1#![allow(missing_docs)]
2use std::{
3 fs::File,
4 io::prelude::*,
5 path::PathBuf,
6 time::{Duration, Instant},
7};
8
9use clap::Parser;
10use colored::*;
11use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite};
12
13use crate::{
14 config::{self, UnitTestResult},
15 signal,
16};
17
18#[derive(Parser, Debug)]
19#[command(rename_all = "kebab-case")]
20pub struct Opts {
21 #[arg(id = "config-toml", long, value_delimiter(','))]
23 paths_toml: Vec<PathBuf>,
24
25 #[arg(id = "config-json", long, value_delimiter(','))]
27 paths_json: Vec<PathBuf>,
28
29 #[arg(id = "config-yaml", long, value_delimiter(','))]
31 paths_yaml: Vec<PathBuf>,
32
33 #[arg(value_delimiter(','))]
36 paths: Vec<PathBuf>,
37
38 #[arg(
43 id = "config-dir",
44 short = 'C',
45 long,
46 env = "VECTOR_CONFIG_DIR",
47 value_delimiter(',')
48 )]
49 pub config_dirs: Vec<PathBuf>,
50
51 #[arg(id = "junit-report", long, value_delimiter(','))]
53 junit_report_paths: Option<Vec<PathBuf>>,
54}
55
56impl Opts {
57 fn paths_with_formats(&self) -> Vec<config::ConfigPath> {
58 config::merge_path_lists(vec![
59 (&self.paths, None),
60 (&self.paths_toml, Some(config::Format::Toml)),
61 (&self.paths_json, Some(config::Format::Json)),
62 (&self.paths_yaml, Some(config::Format::Yaml)),
63 ])
64 .map(|(path, hint)| config::ConfigPath::File(path, hint))
65 .chain(
66 self.config_dirs
67 .iter()
68 .map(|dir| config::ConfigPath::Dir(dir.to_path_buf())),
69 )
70 .collect()
71 }
72}
73
74#[derive(Debug)]
75pub struct JUnitReporter<'a> {
76 report: Report,
77 test_suite: TestSuite,
78 output_paths: Option<&'a Vec<PathBuf>>,
79}
80
81impl<'a> JUnitReporter<'a> {
82 fn new(paths: Option<&'a Vec<PathBuf>>) -> Self {
83 Self {
84 report: Report::new("Vector Unit Tests"),
85 test_suite: TestSuite::new("Test Suite"),
86 output_paths: paths,
87 }
88 }
89
90 fn add_test_result(&mut self, name: &str, errors: &[String], time: Duration) {
91 if self.output_paths.is_none() {
92 return;
93 }; if errors.is_empty() {
96 let mut test_case = TestCase::new(name.to_owned(), TestCaseStatus::success());
98 test_case.set_time(time);
99 self.test_suite.add_test_case(test_case);
100 } else {
101 let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
103 status.set_description(errors.join("\n"));
104 let mut test_case = TestCase::new(name.to_owned(), status);
105 test_case.set_time(time);
106 self.test_suite.add_test_case(test_case);
107 }
108 }
109
110 fn write_reports(mut self, time: Duration) -> Result<(), String> {
111 if self.output_paths.is_none() {
112 return Ok(());
113 }; self.test_suite.set_time(time);
117 self.report.add_test_suite(self.test_suite);
118
119 let report_bytes = match self.report.to_string() {
120 Ok(report_string) => report_string.into_bytes(),
121 Err(error) => return Err(error.to_string()),
122 };
123
124 for path in self.output_paths.unwrap() {
125 match File::create(path) {
127 Ok(mut file) => match file.write_all(&report_bytes) {
128 Ok(()) => {}
129 Err(error) => return Err(error.to_string()),
130 },
131 Err(error) => return Err(error.to_string()),
132 }
133 }
134
135 Ok(())
136 }
137}
138
139pub async fn cmd(opts: &Opts, signal_handler: &mut signal::SignalHandler) -> exitcode::ExitCode {
140 let mut aggregated_test_errors: Vec<(String, Vec<String>)> = Vec::new();
141
142 let paths = opts.paths_with_formats();
143 let paths = match config::process_paths(&paths) {
144 Some(paths) => paths,
145 None => return exitcode::CONFIG,
146 };
147
148 let mut junit_reporter = JUnitReporter::new(opts.junit_report_paths.as_ref());
149
150 #[allow(clippy::print_stdout)]
151 {
152 println!("Running tests");
153 }
154 match config::build_unit_tests_main(&paths, signal_handler).await {
155 Ok(tests) => {
156 if tests.is_empty() {
157 #[allow(clippy::print_stdout)]
158 {
159 println!("{}", "No tests found.".yellow());
160 }
161 } else {
162 let test_suite_start = Instant::now();
163
164 for test in tests {
165 let name = test.name.clone();
166
167 let test_case_start = Instant::now();
168 let UnitTestResult { errors } = test.run().await;
169 let test_case_elapsed = test_case_start.elapsed();
170
171 junit_reporter.add_test_result(&name, &errors, test_case_elapsed);
172
173 if !errors.is_empty() {
174 #[allow(clippy::print_stdout)]
175 {
176 println!("test {} ... {}", name, "failed".red());
177 }
178 aggregated_test_errors.push((name, errors));
179 } else {
180 #[allow(clippy::print_stdout)]
181 {
182 println!("test {} ... {}", name, "passed".green());
183 }
184 }
185 }
186
187 let test_suite_elapsed = test_suite_start.elapsed();
188 match junit_reporter.write_reports(test_suite_elapsed) {
189 Ok(()) => {}
190 Err(error) => {
191 error!("Failed to execute tests:\n{}.", error);
192 return exitcode::CONFIG;
193 }
194 }
195 }
196 }
197 Err(errors) => {
198 error!("Failed to execute tests:\n{}.", errors.join("\n"));
199 return exitcode::CONFIG;
200 }
201 }
202
203 if !aggregated_test_errors.is_empty() {
204 #[allow(clippy::print_stdout)]
205 {
206 println!("\nfailures:");
207 }
208 for (test_name, fails) in aggregated_test_errors {
209 #[allow(clippy::print_stdout)]
210 {
211 println!("\ntest {test_name}:\n");
212 }
213 for fail in fails {
214 #[allow(clippy::print_stdout)]
215 {
216 println!("{fail}\n");
217 }
218 }
219 }
220
221 exitcode::CONFIG
222 } else {
223 exitcode::OK
224 }
225}