Skip to content

Commit

Permalink
bug fixes + tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Kelvinrr committed Oct 23, 2024
1 parent 4104331 commit 5ab4fdd
Show file tree
Hide file tree
Showing 6 changed files with 155 additions and 57 deletions.
2 changes: 2 additions & 0 deletions SpiceQL/include/inventory.h
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ namespace SpiceQL {
namespace Inventory {
nlohmann::json search_for_kernelset(std::string spiceql_mission, std::vector<std::string> types, double start_time=-std::numeric_limits<double>::max(), double stop_time=std::numeric_limits<double>::max(),
std::string ckQuality="smithed", std::string spkQuality="smithed", bool enforce_quality=false);

std::string getDbFilePath();

void create_database();
}
Expand Down
8 changes: 7 additions & 1 deletion SpiceQL/src/inventory.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

#include <nlohmann/json.hpp>
#include <spdlog/spdlog.h>
#include <ghc/fs_std.hpp>

#include "inventory.h"
#include "inventoryimpl.h"
Expand All @@ -26,7 +27,12 @@ namespace SpiceQL {

return impl.search_for_kernelset(instrument, enum_types, start_time, stop_time, enum_ck_quality, enum_spk_quality, enforce_quality);
}


string getDbFilePath() {
static std::string db_path = fs::path(getCacheDir()) / DB_HDF_FILE;
return db_path;
}

void create_database() {
// force generate the database
InventoryImpl db(true);
Expand Down
104 changes: 55 additions & 49 deletions SpiceQL/src/inventoryimpl.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -106,17 +106,17 @@ namespace SpiceQL {
fc::BTreePair<double, size_t> p;
p.first = sstimes.first;
p.second = index;
if(kernel_times->start_times.contains(p.first)) {
p.first-=0.0000001;
while(kernel_times->start_times.contains(p.first)) {
p.first-=0.001;
}
kernel_times->start_times.insert(p);

fc::BTreePair<double, size_t> p2;
p2.first = sstimes.second;
p2.second = index;

if(kernel_times->stop_times.contains(p2.first)) {
p2.first+=0.0000001;
while(kernel_times->stop_times.contains(p2.first)) {
p2.first+=0.001;
}
kernel_times->stop_times.insert(p2);

Expand Down Expand Up @@ -280,68 +280,74 @@ namespace SpiceQL {
vector<string> file_paths_v = getKey<vector<string>>(DB_SPICE_ROOT_KEY+"/"+key+"/"+DB_TIME_FILES_KEY);

time_indices->file_paths = file_paths_v;

SPDLOG_TRACE("Index, start time, stop time sizes: {}, {}, {}", file_index_v.size(), start_times_v.size(), stop_times_v.size());
// load start_times
for(size_t i = 0; i < start_times_v.size(); i++) {
time_indices->start_times[start_times_v[i]] = file_index_v[i];
time_indices->stop_times[stop_times_v[i]] = file_index_v[i];
}

found = true;
}
catch (runtime_error &e) {
// should probably replace with a more specific exception
SPDLOG_TRACE("Couldn't find "+DB_SPICE_ROOT_KEY+"/" + key+ ". " + e.what());
continue;
}
}
if (enforce_quality) break; // only interate once if quality is enforced
}

if (time_indices) {
SPDLOG_TRACE("NUMBER OF KERNELS: {}", time_indices->file_paths.size());
SPDLOG_TRACE("NUMBER OF START TIMES: {}", time_indices->start_times.size());
SPDLOG_TRACE("NUMBER OF STOP TIMES: {}", time_indices->stop_times.size());
} else {
// no kernels found
continue;
}
size_t iterations = 0;
if (time_indices) {
SPDLOG_TRACE("NUMBER OF KERNELS: {}", time_indices->file_paths.size());
SPDLOG_TRACE("NUMBER OF START TIMES: {}", time_indices->start_times.size());
SPDLOG_TRACE("NUMBER OF STOP TIMES: {}", time_indices->stop_times.size());
} else {
// no kernels found
continue;
}

size_t iterations = 0;

// init containers
unordered_set<size_t> start_time_kernels;
vector<string> final_time_kernels;

// Get everything starting before the stop_time;
auto start_upper_bound = time_indices->start_times.upper_bound(stop_time);
for(auto it = time_indices->start_times.begin() ;it != start_upper_bound; it++) {
iterations++;
start_time_kernels.insert(it->second);
}
// init containers
unordered_set<size_t> start_time_kernels;
vector<string> final_time_kernels;

// Get everything starting before the stop_time;
auto start_upper_bound = time_indices->start_times.upper_bound(stop_time);
if(start_upper_bound == time_indices->start_times.begin() && start_upper_bound->first <= start_time) {
iterations++;
start_time_kernels.insert(start_upper_bound->second);
}
for(auto it = time_indices->start_times.begin() ;it != start_upper_bound; it++) {
iterations++;
start_time_kernels.insert(it->second);
}

SPDLOG_TRACE("NUMBER OF KERNELS MATCHING START TIME: {}", start_time_kernels.size());
SPDLOG_TRACE("NUMBER OF KERNELS MATCHING START TIME: {}", start_time_kernels.size());

// Get everything stopping after the start_time;
auto stop_lower_bound = time_indices->stop_times.lower_bound(start_time);
if(time_indices->stop_times.end() == stop_lower_bound && start_time_kernels.contains(stop_lower_bound->second)) {
final_time_kernels.push_back(time_indices->file_paths.at(stop_lower_bound->second));
}
else {
for(auto &it = stop_lower_bound;it != time_indices->stop_times.end(); it++) {
// if it's also in the start_time set, add it to the list
iterations++;

if (start_time_kernels.contains(it->second)) {
final_time_kernels.push_back(data_dir / time_indices->file_paths.at(it->second));
}
}
}
if (final_time_kernels.size()) {
kernels[Kernel::translateType(type)] = final_time_kernels;
kernels[qkey] = Kernel::translateQuality(quality);
// Get everything stopping after the start_time;
auto stop_lower_bound = time_indices->stop_times.lower_bound(start_time);
SPDLOG_TRACE("IS {} in the array? {}", stop_lower_bound->second, start_time_kernels.contains(stop_lower_bound->second));
if(time_indices->stop_times.end() == stop_lower_bound && stop_lower_bound->first >= stop_time && start_time_kernels.contains(stop_lower_bound->second)) {
final_time_kernels.push_back(time_indices->file_paths.at(stop_lower_bound->second));
}
else {
for(auto &it = stop_lower_bound;it != time_indices->stop_times.end(); it++) {
// if it's also in the start_time set, add it to the list
iterations++;
SPDLOG_TRACE("IS {} in the array? {}", it->second, start_time_kernels.contains(it->second));
if (start_time_kernels.contains(it->second)) {
final_time_kernels.push_back(data_dir / time_indices->file_paths.at(it->second));
}
}
}
if (final_time_kernels.size()) {
found = true;
kernels[Kernel::translateType(type)] = final_time_kernels;
kernels[qkey] = Kernel::translateQuality(quality);
}
SPDLOG_TRACE("NUMBER OF ITERATIONS: {}", iterations);
SPDLOG_TRACE("NUMBER OF KERNELS FOUND: {}", final_time_kernels.size());

if (enforce_quality) break; // only interate once if quality is enforced
}
SPDLOG_TRACE("NUMBER OF ITERATIONS: {}", iterations);
SPDLOG_TRACE("NUMBER OF KERNELS FOUND: {}", final_time_kernels.size());
}
else { // text/non time based kernels
SPDLOG_DEBUG("Trying to search time independant kernels");
Expand Down
46 changes: 43 additions & 3 deletions SpiceQL/tests/Fixtures.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ void TempTestingFiles::SetUp() {


void TempTestingFiles::TearDown() {
if(!fs::remove_all(tempDir)) {
throw runtime_error("Could not delete temporary files");
}
// if(!fs::remove_all(tempDir)) {
// throw runtime_error("Could not delete temporary files");
// }
}


Expand Down Expand Up @@ -171,6 +171,46 @@ void IsisDataDirectory::CompareKernelSets(vector<string> kVector, vector<string>
}
}

void KernelsWithQualities::SetUp() {
root = getenv("SPICEROOT");

fs::create_directory(root / "spk");

// we are using Mars odyssey here
int bodyCode = -83000;
std::string referenceFrame = "j2000";

std::vector<double> times1 = {110000000, 120000000};
std::vector<double> times2 = {130000000, 140000000};

// create predicted SPK

std::vector<std::vector<double>> velocities = {{1,1,1}, {2,2,2}};
std::vector<std::vector<double>> positions = {{1, 1, 1}, {2, 2, 2}};
spkPathPredict = root / "spk" / "m01_map.bsp";
writeSpk(spkPathPredict, positions, times1, bodyCode, 1, referenceFrame, "SPK ID 1", 1, velocities, "SPK 1");

// create reconstructed SPK
spkPathRecon = root / "spk" / "m01_ab_v2.bsp";

writeSpk(spkPathRecon, positions, times1, bodyCode, 1, referenceFrame, "SPK ID 1", 1, velocities, "SPK 1");

// create another reconstructed SPK with different times
spkPathRecon2 = root / "spk" / "m01_map_rec.bsp";

writeSpk(spkPathRecon2, positions, times2, bodyCode, 1, referenceFrame, "SPK ID 1", 1, velocities, "SPK 1");

spkPathSmithed = root / "spk" / "themis_dayir_merged_2018Jul13_spk.bsp";
writeSpk(spkPathSmithed, positions, times1, bodyCode, 1, referenceFrame, "SPK ID 1", 1, velocities, "SPK 1");

Inventory::create_database();
}


void KernelsWithQualities::TearDown() {

}


void LroKernelSet::SetUp() {
root = getenv("SPICEROOT");
Expand Down
12 changes: 12 additions & 0 deletions SpiceQL/tests/Fixtures.h
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,18 @@ class LroKernelSet : public ::testing::Test {
void TearDown() override;
};

class KernelsWithQualities : public ::testing::Test {
protected:
fs::path root;
string spkPathPredict;
string spkPathRecon;
string spkPathRecon2;
string spkPathSmithed;

void SetUp() override;
void TearDown() override;
};

class TestConfig : public KernelDataDirectories {
protected:

Expand Down
40 changes: 36 additions & 4 deletions SpiceQL/tests/InventoryTests.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,20 @@
#include "Fixtures.h"

#include "inventory.h"
#include "inventoryimpl.h"

#include <spdlog/spdlog.h>

#include <highfive/highfive.hpp>

TEST_F(LroKernelSet, TestInventorySmithed) {
Inventory::create_database();
nlohmann::json kernels = Inventory::search_for_kernelset("lroc", {"fk", "sclk", "spk", "ck"}, 110000000, 130000001);
nlohmann::json kernels = Inventory::search_for_kernelset("lroc", {"fk", "sclk", "spk", "ck"}, 110000000, 140000000);
EXPECT_EQ(fs::path(kernels["fk"][0]).filename(), "lro_frames_1111111_v01.tf");
EXPECT_EQ(fs::path(kernels["sclk"][0]).filename(), "lro_clkcor_2020184_v00.tsc");
EXPECT_EQ(fs::path(kernels["ck"][0]).filename(), "soc31_1111111_1111111_v21.bc");

EXPECT_EQ(kernels["spk"].size(), 3);
EXPECT_EQ(kernels["ck"].size(), 2);


EXPECT_EQ(kernels["ckQuality"], "reconstructed");
EXPECT_EQ(kernels["spkQuality"], "smithed");
}
Expand Down Expand Up @@ -51,3 +51,35 @@ TEST_F(LroKernelSet, TestInventoryEmpty) {
EXPECT_TRUE(kernels.empty());
}


TEST_F(LroKernelSet, TestInventoryPortability) {
fs::path dbfile = Inventory::getDbFilePath();
HighFive::File file(dbfile, HighFive::File::ReadOnly);

auto dataset = file.getDataSet("spice/lroc/sclk/kernels");
vector<string> data = dataset.read<vector<string>>();
dataset.read(data);

// assert that the path in the db is relative
EXPECT_EQ(data.at(0), "clocks/lro_clkcor_2020184_v00.tsc");

nlohmann::json kernels = Inventory::search_for_kernelset("lroc", {"fk", "sclk", "spk", "ck"});

// these paths should be expanded
EXPECT_TRUE(kernels["sclk"][0].get<string>().size() > data.at(0).size());
}


TEST_F(KernelsWithQualities, TestUnenforcedQuality) {
nlohmann::json kernels = Inventory::search_for_kernelset("odyssey", {"spk"}, 130000000, 140000000, "smithed", "smithed", false);
// smithed kernels should not exist so it should return reconstructed
EXPECT_EQ(kernels["spkQuality"].get<string>(), "reconstructed");
}


TEST_F(KernelsWithQualities, TestEnforcedQuality) {
nlohmann::json kernels = Inventory::search_for_kernelset("odyssey", {"spk"}, 130000000, 140000000, "smithed", "smithed", true);
// Should be empty since we are enforcing smithed
EXPECT_TRUE(kernels.is_null());
}

0 comments on commit 5ab4fdd

Please sign in to comment.