Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,6 @@ resolver = "2"
[workspace.dependencies]
anyhow = "1.0.86"
serde = { version = "1.0.214", features = ["derive"] }
serde_json = { version = "1.0.125", features = ["unbounded_depth"] }
rust_am_lib = { git = "https://github.com/ku-sldg/rust-am-lib.git", version = "0.3.0" }
#rust_am_lib = { git = "file:///Users/adampetz/Documents/Summer_2025/rust-am-lib/", version = "0.2.0", branch="env_demo"}
serde_json = {version = "1.0.125", features = ["unbounded_depth"]}
rust_am_lib = { git = "https://github.com/ku-sldg/rust-am-lib.git", version = "0.3.0"}
#rust_am_lib = { git = "file://<local_dir>", version = "", branch=""}
2 changes: 2 additions & 0 deletions executables/readfile_range_many/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,5 @@ anyhow = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
rust_am_lib = { workspace = true }
serde_stacker = "0.1"
flate2 = "1.0"
73 changes: 61 additions & 12 deletions executables/readfile_range_many/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,44 +4,71 @@
// Common Packages
use anyhow::{Context, Result};

use std::fs;
use std::fs::File;
use std::io::{self, BufRead};
use std::io::{self, BufRead, Write};
use std::path::Path;
use std::collections::HashMap;

use flate2::write::GzEncoder;
//use flate2::read::GzDecoder;
use flate2::Compression;

use serde::{Deserialize, Serialize};
use serde_json::{Value, from_value};
//use serde::de::DeserializeOwned;
use serde_stacker::Deserializer;

use rust_am_lib::{
copland::{self, handle_body},
debug_print,
};
use serde::{Deserialize, Serialize};

// This ASP ("readfile_range_many") is a measurement ASP that reads the contents of the specified lines of text from a collection of files.
//
// INPUT:
// The ASP expects a JSON object with an "ASP_ARGS" field containing the following arguments:
// - "filepath": A string path to the file to be read.
// - "start_index": A number for the starting line index (starting at 1).
// - "end_index": A number for the ending line index.
// - "slices_file": A filepath(String) pointing to a JSON-encoded vector of File_Slice objects

//
// OUTPUT:
// The ASP returns a raw evidence package (`RawEv`) containing a vector of length 1 with the only member being a byte array (Vec<u8>),
// containing the encoded contents of the Slices_Map structure defined below. The keys in that HashMap structure are of the form: `<filepath>::<start_index>-<end_index>`, and
// the values are byte arrays (encoded Vec<u8>s) of the file contents at those line ranges. For simplicity, we chose not to preserve line boundaries
// of the contents because that would make the output evidence structure depend on the input file range.
// of the contents because that would make the output evidence structure depend on the input file range.

// NOTE: Additionally, we choose to gzip compress the Slices_Map structure to trim down the output evidence size.
// Any dual appraisal ASP will first need to decompress the raw data before decoding and proceeding with appraisal.


#[derive(Serialize, Deserialize, Debug, Clone)]
struct File_Slice {
filepath: String,
start_index: usize,
end_index: usize
filepath: String, // - "filepath": A string path to the file to be read.
start_index: usize, // - "start_index": A number for the starting line index (starting at 1).
end_index: usize // - "end_index": A number for the ending line index.
}

// ASP Arguments (JSON-decoded)
#[derive(Serialize, Deserialize, Debug, Clone)]
struct ASP_ARGS_ReadfileRangeMany {
slices: Vec<File_Slice>
slices_file: String
}

fn compress_string(s: &str) -> io::Result<Vec<u8>> {
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
encoder.write_all(s.as_bytes())?;
encoder.finish()
}

/*
fn decompress_string(compressed_data: &[u8]) -> io::Result<String> {
let mut decoder = GzDecoder::new(compressed_data);
let mut s = String::new();
decoder.read_to_string(&mut s)?;
Ok(s)
}
*/

type Slices_Map = HashMap<String, Vec<u8>>;

fn read_line_range<P: AsRef<Path>>(
Expand Down Expand Up @@ -80,6 +107,19 @@ fn get_bytevec_fileslice (
Ok(res)
}

fn deserialize_deep_json_string(json_data: &str) -> serde_json::Result<Value> {
let mut de = serde_json::de::Deserializer::from_str(json_data);
de.disable_recursion_limit(); // This method is only available with the feature

// Wrap with serde_stacker's Deserializer to use a dynamically growing stack
let stacker_de = Deserializer::new(&mut de);

// Deserialize the data
let value = Value::deserialize(stacker_de)?;

Ok(value)
}

// function where the work of the ASP is performed.
// May signal an error which will be handled in main.
fn body(_ev: copland::ASP_RawEv, args: copland::ASP_ARGS) -> Result<copland::ASP_RawEv> {
Expand All @@ -88,7 +128,12 @@ fn body(_ev: copland::ASP_RawEv, args: copland::ASP_ARGS) -> Result<copland::ASP
let myaspargs: ASP_ARGS_ReadfileRangeMany =
serde_json::from_value(args).context("Could not decode ASP_ARGS for ASP readfile_range_many")?;

let slices = myaspargs.slices;
let slices_file = myaspargs.slices_file;

let contents = fs::read_to_string(slices_file).expect("Couldn't read slices_file JSON file in readfile_range_many");
debug_print!{"\n\nAttempting to decode Vec<File_Slice>...\n\n"};
let my_contents_val = deserialize_deep_json_string(&contents)?;
let slices: Vec<File_Slice> = from_value(my_contents_val)?;

let mut m : Slices_Map = HashMap::new();

Expand All @@ -108,7 +153,11 @@ fn body(_ev: copland::ASP_RawEv, args: copland::ASP_ARGS) -> Result<copland::ASP
}
};

let res= serde_json::to_vec(&m)?;
let res_str = serde_json::to_string(&m)?;

let compressed_str = compress_string(&res_str)?;

let res = compressed_str;

Ok(vec![res])
}
Expand Down
1 change: 1 addition & 0 deletions executables/readfile_range_many_appr/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,4 @@ rust_am_lib = {workspace = true}
serde = {workspace = true}
serde_json = {workspace = true}
serde_stacker = "0.1"
flate2 = "1.0"
83 changes: 43 additions & 40 deletions executables/readfile_range_many_appr/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,25 @@ use rust_am_lib::{
};
use serde::{Deserialize, Serialize};
use serde_json::{Value, from_value};
use serde::de::DeserializeOwned;
use serde_stacker::Deserializer;
use std::collections::HashMap;

use flate2::read::GzDecoder;
//use flate2::write::GzEncoder;
//use flate2::Compression;
use std::io::prelude::*;
use std::io::{self};

#[derive(Serialize, Deserialize, Debug, Clone)]
struct ASP_ARGS_ReadfileRangeMany_Appr {
env_var_golden: String,
filepath_golden: String,
outdir: String,
report_filepath: String,
asp_id_appr: String,
targ_id_appr: String
env_var_golden: String, // - "env_var_golden": A string for the ENV var to get the (optional) prefix path to filepath_golden
filepath_golden: String, // - "filepath_golden": A string path to the golden evidence file to be read.
outdir: String, // - "outdir": A string path to the output directory for the appraisal summary
report_filepath: String, // - "report_filepath": A string path to the input HAMR AttestationReport structure
asp_id_appr: String, // - "asp_id_appr": ASP_ID string set by the appraisal procedure internally.
// Used to index into golden evidence structure
targ_id_appr: String // - "targ_id_appr": TARG_ID string set by the appraisal procedure internally.
// Used to index into golden evidence structure
}

type Slices_Map = HashMap<String, Vec<u8>>;
Expand Down Expand Up @@ -91,19 +98,6 @@ pub struct ResoluteAppraisalSummaryResponse {
pub PAYLOAD: Vec<Resolute_Appsumm_Member>
}

fn deserialize_deep_json(json_data: &Vec<u8>) -> serde_json::Result<Value> {
let mut de = serde_json::de::Deserializer::from_slice(json_data);
de.disable_recursion_limit(); // This method is only available with the feature

// Wrap with serde_stacker's Deserializer to use a dynamically growing stack
let stacker_de = Deserializer::new(&mut de);

// Deserialize the data
let value = Value::deserialize(stacker_de)?;

Ok(value)
}

fn deserialize_deep_json_string(json_data: &str) -> serde_json::Result<Value> {
let mut de = serde_json::de::Deserializer::from_str(json_data);
de.disable_recursion_limit(); // This method is only available with the feature
Expand Down Expand Up @@ -140,22 +134,14 @@ fn get_slices_comp_map (golden_map:Slices_Map, candidate_map:Slices_Map) -> Slic

}

fn decode_from_file_and_print<T: DeserializeOwned + std::fmt::Debug + Clone>(term_fp:&Path, type_string:String) -> Result<T, serde_json::Error> {

let err_string = format!("Couldn't read {type_string} JSON file");
let term_contents = fs::read_to_string(term_fp).expect(err_string.as_str());
eprintln!("\n{type_string} contents:\n{term_contents}");
let term : T = serde_json::from_str(&term_contents)?;
eprintln!("\nDecoded Term as:");
eprintln!("{:?}", term);
Ok(term)
}

pub fn get_attestation_report_json (hamr_report_fp:&Path) -> std::io::Result<HAMR_AttestationReport> {

let res: HAMR_AttestationReport = decode_from_file_and_print(hamr_report_fp, "HAMR_AttestationReport".to_string())?;
/* TODO: make this a library function to keep in-sync with client code */
fn get_attestation_report_json (hamr_report_fp:&Path) -> std::io::Result<HAMR_AttestationReport> {

Ok (res)
let type_string = "HAMR_AttestationReport".to_string();
let err_string = format!("Couldn't read {type_string} JSON file");
let term_contents = fs::read_to_string(hamr_report_fp).expect(err_string.as_str());
let term : HAMR_AttestationReport = serde_json::from_str(&term_contents)?;
Ok(term)
}

fn relpath_to_abspath (project_root_fp:&Path, relpath:&Path) -> String {
Expand Down Expand Up @@ -274,6 +260,21 @@ pub fn write_string_to_output_dir (maybe_out_dir:Option<String>, fp_suffix: &Pat
Ok(full_req_fp.as_path().to_str().unwrap().to_string())
}

/*
fn compress_string(s: &str) -> io::Result<Vec<u8>> {
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
encoder.write_all(s.as_bytes())?;
encoder.finish()
}
*/

fn decompress_string(compressed_data: &[u8]) -> io::Result<String> {
let mut decoder = GzDecoder::new(compressed_data);
let mut s = String::new();
decoder.read_to_string(&mut s)?;
Ok(s)
}

// function where the work of the ASP is performed.
// May signal an error which will be handled in main.
fn body(ev: copland::ASP_RawEv, args: copland::ASP_ARGS) -> Result<Result<()>> {
Expand Down Expand Up @@ -317,18 +318,20 @@ fn body(ev: copland::ASP_RawEv, args: copland::ASP_ARGS) -> Result<Result<()>> {
panic!("Evidence vectors have unexpected length in readfile_range_many_appr ASP");
}

let golden_map_encoded: &Vec<u8> = golden_bytes.first().unwrap();
let candidate_map_encoded: &Vec<u8> = evidence_in.first().unwrap();
let golden_map_encoded_compressed: &Vec<u8> = golden_bytes.first().unwrap();
let candidate_map_encoded_compressed: &Vec<u8> = evidence_in.first().unwrap();

let golden_map_json: Value = deserialize_deep_json(golden_map_encoded)?;
let golden_map_decompressed = decompress_string(golden_map_encoded_compressed)?;
let candidate_map_decompressed = decompress_string(candidate_map_encoded_compressed)?;

let golden_map_json: Value = deserialize_deep_json_string(&golden_map_decompressed)?;
let golden_map : Slices_Map = serde_json::from_value(golden_map_json)?;

let candidate_map_json: Value = deserialize_deep_json(candidate_map_encoded)?;
let candidate_map_json: Value = deserialize_deep_json_string(&candidate_map_decompressed)?;
let candidate_map : Slices_Map = serde_json::from_value(candidate_map_json)?;

let res_map: Slices_Comp_Map = get_slices_comp_map(golden_map, candidate_map);


let resolute_appsumm_response: ResoluteAppraisalSummaryResponse = generate_resolute_appsumm( myaspargs.report_filepath, res_map.clone())?;

let resolute_appsumm_resp_string = serde_json::to_string(&resolute_appsumm_response)?;
Expand Down