1#![allow(missing_docs)]
2use std::fs::File;
3use std::io::prelude::*;
4use std::path::PathBuf;
5use std::time::{Duration, Instant};
6
7use clap::Parser;
8use colored::*;
9use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite};
10
11use crate::config::{self, UnitTestResult};
12use crate::signal;
13
14#[derive(Parser, Debug)]
15#[command(rename_all = "kebab-case")]
16pub struct Opts {
17 #[arg(id = "config-toml", long, value_delimiter(','))]
19 paths_toml: Vec<PathBuf>,
20
21 #[arg(id = "config-json", long, value_delimiter(','))]
23 paths_json: Vec<PathBuf>,
24
25 #[arg(id = "config-yaml", long, value_delimiter(','))]
27 paths_yaml: Vec<PathBuf>,
28
29 #[arg(value_delimiter(','))]
32 paths: Vec<PathBuf>,
33
34 #[arg(
39 id = "config-dir",
40 short = 'C',
41 long,
42 env = "VECTOR_CONFIG_DIR",
43 value_delimiter(',')
44 )]
45 pub config_dirs: Vec<PathBuf>,
46
47 #[arg(id = "junit-report", long, value_delimiter(','))]
49 junit_report_paths: Option<Vec<PathBuf>>,
50}
51
52impl Opts {
53 fn paths_with_formats(&self) -> Vec<config::ConfigPath> {
54 config::merge_path_lists(vec![
55 (&self.paths, None),
56 (&self.paths_toml, Some(config::Format::Toml)),
57 (&self.paths_json, Some(config::Format::Json)),
58 (&self.paths_yaml, Some(config::Format::Yaml)),
59 ])
60 .map(|(path, hint)| config::ConfigPath::File(path, hint))
61 .chain(
62 self.config_dirs
63 .iter()
64 .map(|dir| config::ConfigPath::Dir(dir.to_path_buf())),
65 )
66 .collect()
67 }
68}
69
70#[derive(Debug)]
71pub struct JUnitReporter<'a> {
72 report: Report,
73 test_suite: TestSuite,
74 output_paths: Option<&'a Vec<PathBuf>>,
75}
76
77impl<'a> JUnitReporter<'a> {
78 fn new(paths: Option<&'a Vec<PathBuf>>) -> Self {
79 Self {
80 report: Report::new("Vector Unit Tests"),
81 test_suite: TestSuite::new("Test Suite"),
82 output_paths: paths,
83 }
84 }
85
86 fn add_test_result(&mut self, name: &str, errors: &[String], time: Duration) {
87 if self.output_paths.is_none() {
88 return;
89 }; if errors.is_empty() {
92 let mut test_case = TestCase::new(name.to_owned(), TestCaseStatus::success());
94 test_case.set_time(time);
95 self.test_suite.add_test_case(test_case);
96 } else {
97 let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
99 status.set_description(errors.join("\n"));
100 let mut test_case = TestCase::new(name.to_owned(), status);
101 test_case.set_time(time);
102 self.test_suite.add_test_case(test_case);
103 }
104 }
105
106 fn write_reports(mut self, time: Duration) -> Result<(), String> {
107 if self.output_paths.is_none() {
108 return Ok(());
109 }; self.test_suite.set_time(time);
113 self.report.add_test_suite(self.test_suite);
114
115 let report_bytes = match self.report.to_string() {
116 Ok(report_string) => report_string.into_bytes(),
117 Err(error) => return Err(error.to_string()),
118 };
119
120 for path in self.output_paths.unwrap() {
121 match File::create(path) {
123 Ok(mut file) => match file.write_all(&report_bytes) {
124 Ok(()) => {}
125 Err(error) => return Err(error.to_string()),
126 },
127 Err(error) => return Err(error.to_string()),
128 }
129 }
130
131 Ok(())
132 }
133}
134
135pub async fn cmd(opts: &Opts, signal_handler: &mut signal::SignalHandler) -> exitcode::ExitCode {
136 let mut aggregated_test_errors: Vec<(String, Vec<String>)> = Vec::new();
137
138 let paths = opts.paths_with_formats();
139 let paths = match config::process_paths(&paths) {
140 Some(paths) => paths,
141 None => return exitcode::CONFIG,
142 };
143
144 let mut junit_reporter = JUnitReporter::new(opts.junit_report_paths.as_ref());
145
146 #[allow(clippy::print_stdout)]
147 {
148 println!("Running tests");
149 }
150 match config::build_unit_tests_main(&paths, signal_handler).await {
151 Ok(tests) => {
152 if tests.is_empty() {
153 #[allow(clippy::print_stdout)]
154 {
155 println!("{}", "No tests found.".yellow());
156 }
157 } else {
158 let test_suite_start = Instant::now();
159
160 for test in tests {
161 let name = test.name.clone();
162
163 let test_case_start = Instant::now();
164 let UnitTestResult { errors } = test.run().await;
165 let test_case_elapsed = test_case_start.elapsed();
166
167 junit_reporter.add_test_result(&name, &errors, test_case_elapsed);
168
169 if !errors.is_empty() {
170 #[allow(clippy::print_stdout)]
171 {
172 println!("test {} ... {}", name, "failed".red());
173 }
174 aggregated_test_errors.push((name, errors));
175 } else {
176 #[allow(clippy::print_stdout)]
177 {
178 println!("test {} ... {}", name, "passed".green());
179 }
180 }
181 }
182
183 let test_suite_elapsed = test_suite_start.elapsed();
184 match junit_reporter.write_reports(test_suite_elapsed) {
185 Ok(()) => {}
186 Err(error) => {
187 error!("Failed to execute tests:\n{}.", error);
188 return exitcode::CONFIG;
189 }
190 }
191 }
192 }
193 Err(errors) => {
194 error!("Failed to execute tests:\n{}.", errors.join("\n"));
195 return exitcode::CONFIG;
196 }
197 }
198
199 if !aggregated_test_errors.is_empty() {
200 #[allow(clippy::print_stdout)]
201 {
202 println!("\nfailures:");
203 }
204 for (test_name, fails) in aggregated_test_errors {
205 #[allow(clippy::print_stdout)]
206 {
207 println!("\ntest {test_name}:\n");
208 }
209 for fail in fails {
210 #[allow(clippy::print_stdout)]
211 {
212 println!("{fail}\n");
213 }
214 }
215 }
216
217 exitcode::CONFIG
218 } else {
219 exitcode::OK
220 }
221}