Skip to content

Commit

Permalink
feat: add parse_raw_upload
Browse files Browse the repository at this point in the history
we want to support parsing raw upload files instead of individual JUnit
XML files

the input of this new function is the raw upload in byte form

the output is a messagepacked binary payload containing the results of
the parsing and the raw upload in readable format in byte form
  • Loading branch information
joseph-sentry committed Dec 19, 2024
1 parent 79fc58d commit 1117e75
Show file tree
Hide file tree
Showing 4 changed files with 468 additions and 7 deletions.
2 changes: 2 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use pyo3::prelude::*;
mod compute_name;
mod failure_message;
mod junit;
mod raw_upload;
mod testrun;

pyo3::create_exception!(test_results_parser, ParserError, PyException);
Expand All @@ -18,6 +19,7 @@ fn test_results_parser(py: Python, m: &Bound<PyModule>) -> PyResult<()> {
m.add_class::<testrun::Framework>()?;
m.add_class::<testrun::ParsingInfo>()?;

m.add_function(wrap_pyfunction!(raw_upload::parse_raw_upload, m)?)?;
m.add_function(wrap_pyfunction!(junit::parse_junit_xml, m)?)?;
m.add_function(wrap_pyfunction!(failure_message::build_message, m)?)?;
m.add_function(wrap_pyfunction!(failure_message::escape_message, m)?)?;
Expand Down
101 changes: 101 additions & 0 deletions src/raw_upload.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
use base64::prelude::*;
use pyo3::prelude::*;
use std::collections::HashSet;
use std::io::prelude::*;

use flate2::bufread::ZlibDecoder;

use quick_xml::reader::Reader;
use serde::Deserialize;

use crate::junit::{get_position_info, use_reader};
use crate::testrun::ParsingInfo;
use crate::ParserError;

#[derive(Deserialize, Debug, Clone)]
struct TestResultFile {
filename: String,
#[serde(skip_deserializing)]
_format: String,
data: String,
#[serde(skip_deserializing)]
_labels: Vec<String>,
}
#[derive(Deserialize, Debug, Clone)]
struct RawTestResultUpload {
#[serde(default)]
network: Option<Vec<String>>,
test_results_files: Vec<TestResultFile>,
}

#[derive(Debug, Clone)]
struct ReadableFile {
filename: Vec<u8>,
data: Vec<u8>,
}

const LEGACY_FORMAT_PREFIX: &[u8] = b"# path=";
const LEGACY_FORMAT_SUFFIX: &[u8] = b"<<<<<< EOF";

fn serialize_to_legacy_format(readable_files: Vec<ReadableFile>) -> Vec<u8> {
let mut res = Vec::new();
for file in readable_files {
res.extend_from_slice(LEGACY_FORMAT_PREFIX);
res.extend_from_slice(&file.filename);
res.extend_from_slice(b"\n");
res.extend_from_slice(&file.data);
res.extend_from_slice(b"\n");
res.extend_from_slice(LEGACY_FORMAT_SUFFIX);
res.extend_from_slice(b"\n");
}
res
}

#[pyfunction]
#[pyo3(signature = (raw_upload_bytes))]
pub fn parse_raw_upload(raw_upload_bytes: &[u8]) -> PyResult<(Vec<u8>, Vec<u8>)> {
let upload: RawTestResultUpload = serde_json::from_slice(raw_upload_bytes)
.map_err(|e| ParserError::new_err(format!("Error deserializing json: {}", e)))?;
let network: Option<HashSet<String>> = upload.network.map(|v| v.into_iter().collect());

let mut results: Vec<ParsingInfo> = Vec::new();
let mut readable_files: Vec<ReadableFile> = Vec::new();

for file in upload.test_results_files {
let decoded_file_bytes = BASE64_STANDARD
.decode(file.data)
.map_err(|e| ParserError::new_err(format!("Error decoding base64: {}", e)))?;

let mut decoder = ZlibDecoder::new(&decoded_file_bytes[..]);

let mut decompressed_file_bytes = Vec::new();
decoder
.read_to_end(&mut decompressed_file_bytes)
.map_err(|e| ParserError::new_err(format!("Error decompressing file: {}", e)))?;

let mut reader = Reader::from_reader(&decompressed_file_bytes[..]);
reader.config_mut().trim_text(true);
let reader_result = use_reader(&mut reader, network.as_ref()).map_err(|e| {
let pos = reader.buffer_position();
let (line, col) = get_position_info(&decompressed_file_bytes, pos.try_into().unwrap());
ParserError::new_err(format!(
"Error parsing JUnit XML at {}:{}: {}",
line, col, e
))
})?;
results.push(reader_result);

let readable_file = ReadableFile {
data: decompressed_file_bytes,
filename: file.filename.into_bytes(),
};
readable_files.push(readable_file);
}

let results_bytes = rmp_serde::to_vec_named(&results)
.map_err(|_| ParserError::new_err("Error serializing pr comment summary"))?;

let readable_file = serialize_to_legacy_format(readable_files);

Ok((results_bytes, readable_file))
}
25 changes: 18 additions & 7 deletions src/testrun.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ use std::fmt::Display;
use pyo3::class::basic::CompareOp;
use pyo3::{prelude::*, pyclass};

use serde::Serialize;

#[derive(Clone, Copy, Debug, PartialEq)]
// See https://github.com/PyO3/pyo3/issues/4723
#[allow(ambiguous_associated_items)]
Expand Down Expand Up @@ -37,13 +39,22 @@ impl Outcome {
}
}

impl Serialize for Outcome {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}

impl Display for Outcome {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self {
Outcome::Pass => write!(f, "Pass"),
Outcome::Failure => write!(f, "Failure"),
Outcome::Error => write!(f, "Error"),
Outcome::Skip => write!(f, "Skip"),
Outcome::Pass => write!(f, "pass"),
Outcome::Failure => write!(f, "failure"),
Outcome::Error => write!(f, "error"),
Outcome::Skip => write!(f, "skip"),
}
}
}
Expand Down Expand Up @@ -77,7 +88,7 @@ pub fn check_testsuites_name(testsuites_name: &str) -> Option<Framework> {
.next()
}

#[derive(Clone, Debug, PartialEq)]
#[derive(Clone, Debug, PartialEq, Serialize)]
#[pyclass]
pub struct Testrun {
#[pyo3(get, set)]
Expand Down Expand Up @@ -189,7 +200,7 @@ impl Testrun {
}
}

#[derive(Clone, Copy, Debug, PartialEq)]
#[derive(Clone, Copy, Debug, PartialEq, Serialize)]
#[pyclass(eq, eq_int)]
pub enum Framework {
Pytest,
Expand Down Expand Up @@ -221,7 +232,7 @@ impl Display for Framework {
}
}

#[derive(Clone, Debug)]
#[derive(Clone, Debug, Serialize)]
#[pyclass]
pub struct ParsingInfo {
#[pyo3(get, set)]
Expand Down
Loading

0 comments on commit 1117e75

Please sign in to comment.