diff --git a/Cargo.lock b/Cargo.lock index 6dd391f..a16f3c5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" @@ -530,6 +530,12 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" +[[package]] +name = "constant_time_eq" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d52eff69cd5e647efe296129160853a42795992097e8af39800e1060caeea9b" + [[package]] name = "cookie" version = "0.18.1" @@ -1304,6 +1310,12 @@ dependencies = [ "serde", ] +[[package]] +name = "is_ci" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45" + [[package]] name = "itertools" version = "0.11.0" @@ -1340,9 +1352,9 @@ dependencies = [ [[package]] name = "json-patch" -version = "3.0.1" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "863726d7afb6bc2590eeff7135d923545e5e964f004c2ccf8716c25e70a86f08" +checksum = "159294d661a039f7644cea7e4d844e6b25aaf71c1ffe9d73a96d768c24b0faf4" dependencies = [ "jsonptr", "serde", @@ -1352,9 +1364,9 @@ dependencies = [ [[package]] name = "jsonptr" -version = "0.6.3" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dea2b27dd239b2556ed7a25ba842fe47fd602e7fc7433c2a8d6106d4d9edd70" +checksum = "a5a3cc660ba5d72bce0b3bb295bf20847ccbb40fd423f3f05b61273672e561fe" dependencies = [ "serde", "serde_json", @@ -1582,6 +1594,12 @@ version = "1.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d75b0bedcc4fe52caa0e03d9f1151a323e4aa5e2d78ba3580400cd3c9e2bc4bc" +[[package]] +name = "owo-colors" +version = "4.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1036865bb9422d3300cf723f657c2851d0e9ab12567854b1f4eba3d77decf564" + [[package]] name = "parking" version = "2.2.1" @@ -1648,12 +1666,14 @@ dependencies = [ "httpmock", "iri-string", "log", + "owo-colors", "pgxn_meta", "regex", "semver", "serde", "serde_json", "sha2", + "supports-color", "temp-env", "tempfile", "thiserror 2.0.12", @@ -1664,19 +1684,20 @@ dependencies = [ [[package]] name = "pgxn_meta" -version = "0.5.2" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "851f90705d22aa735b096ab7aee19072d59aa5ad0aef28352ed4fb864b344288" +checksum = "29d1d85ac0df62e00dd190ed6080bea9971e82862c33a7cb3ca38397812770dd" dependencies = [ "base64 0.22.1", "boon", "chrono", - "constant_time_eq", + "constant_time_eq 0.4.2", "digest", "email_address", "hex", "json-patch", "lexopt", + "log", "rand", "relative-path", "semver", @@ -1805,20 +1826,20 @@ checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" [[package]] name = "rand" -version = "0.8.5" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" dependencies = [ - "libc", "rand_chacha", "rand_core", + "zerocopy 0.8.23", ] [[package]] name = "rand_chacha" -version = "0.3.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" dependencies = [ "ppv-lite86", "rand_core", @@ -1826,11 +1847,11 @@ dependencies = [ [[package]] name = "rand_core" -version = "0.6.4" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.3.2", ] [[package]] @@ -2234,6 +2255,15 @@ version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" +[[package]] +name = "supports-color" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c64fc7232dd8d2e4ac5ce4ef302b1d81e0b80d055b9d77c7c4f51f6aa4c867d6" +dependencies = [ + "is_ci", +] + [[package]] name = "syn" version = "1.0.109" @@ -3022,7 +3052,7 @@ dependencies = [ "aes", "arbitrary", "bzip2", - "constant_time_eq", + "constant_time_eq 0.3.1", "crc32fast", "crossbeam-utils", "deflate64", diff --git a/Cargo.toml b/Cargo.toml index 7031b4c..6535c69 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,11 +20,13 @@ chrono = "0.4.40" hex = "0.4.3" iri-string = "0.7.8" log = { version = "0.4.26", features = ["kv"] } -pgxn_meta = "0.5.2" +owo-colors = "4.2.0" +pgxn_meta = "0.6.0" regex = "1.11.1" semver = "1.0.26" serde = "1.0.219" serde_json = "1.0.140" +supports-color = "3.0.2" tempfile = "3.19.1" thiserror = "2.0.12" ureq = { version = "3.0.10", features = ["json"] } diff --git a/corpus/dist/api/0.1.2/META.json b/corpus/dist/api/0.1.2/META.json new file mode 100644 index 0000000..23866e5 --- /dev/null +++ b/corpus/dist/api/0.1.2/META.json @@ -0,0 +1,90 @@ +{ + "abstract": "A key/value pair data type (with added fields from api.pgxn.org)", + "date": "2011-04-20T23:47:22Z", + "description": "This library contains a single PostgreSQL extension, a key/value pair data type called “pair”, along with a convenience function for constructing key/value pairs.", + "docs": { + "README": { + "title": "pair 0.1.2" + }, + "doc/pair": { + "abstract": "A key/value pair data type", + "title": "pair 0.1.2" + } + }, + "license": "postgresql", + "maintainer": ["David E. Wheeler "], + "name": "pair", + "provides": { + "pair": { + "abstract": "A key/value pair data type", + "docfile": "doc/pair.md", + "docpath": "doc/pair", + "file": "sql/pair.sql", + "version": "0.1.2" + } + }, + "release_status": "stable", + "releases": { + "stable": [ + { + "date": "2020-10-25T21:54:02Z", + "version": "0.1.7" + }, + { + "date": "2018-11-10T20:55:55Z", + "version": "0.1.6" + }, + { + "date": "2011-11-11T17:56:30Z", + "version": "0.1.5" + }, + { + "date": "2011-11-11T06:52:41Z", + "version": "0.1.4" + }, + { + "date": "2011-05-12T18:55:30Z", + "version": "0.1.3" + }, + { + "date": "2011-04-20T23:47:22Z", + "version": "0.1.2" + }, + { + "date": "2010-10-29T22:44:42Z", + "version": "0.1.1" + }, + { + "date": "2010-10-19T03:59:54Z", + "version": "0.1.0" + } + ] + }, + "resources": { + "bugtracker": { + "web": "http://github.com/theory/kv-pair/issues/" + }, + "repository": { + "type": "git", + "url": "git://github.com/theory/kv-pair.git", + "web": "http://github.com/theory/kv-pair/" + } + }, + "sha1": "9988d7adb056b11f8576db44cca30f88a08bd652", + "special_files": [ + "Changes", + "README.md", + "META.json", + "Makefile", + "pair.control" + ], + "tags": [ + "variadic function", + "ordered pair", + "pair", + "key value", + "key value pair" + ], + "user": "theory", + "version": "0.1.2" +} diff --git a/mocks/emit.rs b/mocks/emit.rs new file mode 100644 index 0000000..d6a47fb --- /dev/null +++ b/mocks/emit.rs @@ -0,0 +1,6 @@ +// Print to STDOUT and STDERR. +fn main() { + let args: Vec = std::env::args().skip(1).collect(); + println!("{}", &args[0]); + eprintln!("{}", &args[1]); +} diff --git a/src/api/dist/mod.rs b/src/api/dist/mod.rs index b14b5c1..32744af 100644 --- a/src/api/dist/mod.rs +++ b/src/api/dist/mod.rs @@ -3,6 +3,7 @@ //! [Dist API]: https://github.com/pgxn/pgxn-api/wiki/dist-api use chrono::{DateTime, Utc}; +use log::{debug, info}; use semver::Version; use serde::{Deserialize, Serialize}; use std::{borrow::Borrow, io}; @@ -89,13 +90,17 @@ impl Dist { /// returns the latest unstable versions. Returns an error if there are no /// versions at all. pub fn best_version(&self) -> Result<&Version, BuildError> { + debug!(of = self.name(); "Determining best version"); if let Some(v) = self.latest_stable_version() { + info!(of=self.name(), v:display; "Best version"); return Ok(v); } if let Some(v) = self.latest_testing_version() { + info!(of=self.name(), v:display; "Best version"); return Ok(v); } if let Some(v) = self.latest_unstable_version() { + info!(of=self.name(), v:display; "Best version"); return Ok(v); } diff --git a/src/api/mod.rs b/src/api/mod.rs index 825dac2..165f14a 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -4,7 +4,7 @@ Interface to local and remote PGXN mirrors and the PGXN API. */ mod dist; -pub use dist::{Dist, Release, Releases}; +pub use dist::Dist; use crate::error::BuildError; use iri_string::spec; @@ -102,7 +102,7 @@ impl Api { ctx.insert("version", version.to_string()); let url = self.url_for("meta", ctx)?; let mut val = fetch_json(&self.agent, &url)?; - debug!(url:display; "parsing"); + debug!(url:display=url; "parsing"); if val.get("meta-spec").is_none() { // PGXN v1 stripped meta-spec out of this API :-/. let val_type = type_of!(val); @@ -117,14 +117,14 @@ impl Api { /// Unpack download `file` in directory `into` and return the path to the /// unpacked directory. pub fn unpack>(&self, into: P, file: P) -> Result { - info!(file:display = crate::filename(&file); "unpacking"); + info!(archive:display = crate::filename(&file); "Unpacking"); let zip = File::open(file)?; let mut archive = zip::ZipArchive::new(zip)?; archive.extract(&into)?; let first = archive .by_index(0)? .enclosed_name() - .ok_or_else(|| zip::result::ZipError::FileNotFound)?; + .ok_or(zip::result::ZipError::FileNotFound)?; Ok(into.as_ref().join(first)) } @@ -137,7 +137,7 @@ impl Api { .ok_or_else(|| BuildError::UnknownTemplate(name.to_string()))?; let path = template.expand::(&ctx)?; let url = self.url.join(&path.to_string())?; - trace!(url:display; "resolved URL"); + debug!(url:display=url; "Resolved"); Ok(url) } @@ -152,10 +152,9 @@ impl Api { ctx.insert("dist", meta.name()); ctx.insert("version", meta.version().to_string()); let url = self.url_for("download", ctx)?; - info!(url:display; "downloading"); let file = self.download_url_to(dir, url)?; - info!(file:display = file.display(); "validating"); meta.release().digests().validate(&file)?; + Ok(file) } @@ -166,7 +165,7 @@ impl Api { dir: P, url: url::Url, ) -> Result { - trace!( url:display, dir:display = dir.as_ref().display(); "downloading"); + info!(from:% = url, to:display=dir.as_ref().display(); "Downloading"); // Extract the file name from the URL. match url.path_segments() { None => Err(BuildError::NoUrlFile(url))?, @@ -183,31 +182,40 @@ impl Api { // Copy the file. Eschew std::fs::copy for better // error messages. let mut input = get_file(&url)?; + debug!(destination:display=dst.display(); "Create"); return match File::create(&dst) { Err(e) => Err(BuildError::File( "creating", dst.display().to_string(), e.kind(), )), - Ok(mut out) => match io::copy(&mut input, &mut out) { - Ok(_) => Ok(dst), - Err(e) => copy_err!(url.to_file_path().unwrap().display(), dst, e), - }, + Ok(mut out) => { + debug!(to:display=dst.display(); "Copy"); + match io::copy(&mut input, &mut out) { + Ok(_) => Ok(dst), + Err(e) => copy_err!(url.to_file_path().unwrap().display(), dst, e), + } + } }; } // Download the file over HTTP. + debug!(url:%; "GET"); let res = self.agent.get(url.as_str()).call()?; + debug!(destination:display=dst.display(); "Create"); match File::create(&dst) { Err(e) => Err(BuildError::File( "creating", dst.display().to_string(), e.kind(), )), - Ok(mut out) => match io::copy(&mut res.into_body().as_reader(), &mut out) { - Ok(_) => Ok(dst), - Err(e) => copy_err!(url, dst, e), - }, + Ok(mut out) => { + debug!(to:display=dst.display(); "Copy"); + match io::copy(&mut res.into_body().as_reader(), &mut out) { + Ok(_) => Ok(dst), + Err(e) => copy_err!(url, dst, e), + } + } } } } @@ -227,7 +235,7 @@ fn parse_base_url(url: &str) -> Result { /// Fetches the JSON at URL and converts it to a serde_json::Value. fn fetch_json(agent: &ureq::Agent, url: &url::Url) -> Result { - debug!(url:display; "fetching"); + debug!(url:display=url; "fetching"); match url.scheme() { "file" => Ok(serde_json::from_reader(get_file(url)?)?), // Avoid .into_json(); it returns IO errors. @@ -243,7 +251,7 @@ fn fetch_reader( agent: &ureq::Agent, url: &url::Url, ) -> Result, BuildError> { - debug!(url:display; "fetching"); + debug!(url:display=url; "fetching"); match url.scheme() { "file" => Ok(Box::new(get_file(url)?)), "http" | "https" => Ok(Box::new( @@ -260,6 +268,7 @@ fn get_file(url: &url::Url) -> Result { Err(_) => Err(BuildError::NoUrlFile(url.clone()))?, Ok(s) => s, }; + debug!(file:display = src.display(); "Source"); // if src.is_dir() { // return Err(BuildError::File( // "opening", @@ -290,6 +299,7 @@ fn fetch_templates( let mut map: HashMap = HashMap::with_capacity(obj.len()); for (k, v) in obj.into_iter() { + trace!(template:display=k, url:display=v; "load"); let str = v.as_str().ok_or_else(|| { BuildError::Type( format!("template {} in {}", json!(k), url), diff --git a/src/api/tests.rs b/src/api/tests.rs index c575759..473601c 100644 --- a/src/api/tests.rs +++ b/src/api/tests.rs @@ -862,6 +862,20 @@ fn meta() -> Result<(), BuildError> { Ok(()) } +#[test] +fn api_meta() -> Result<(), BuildError> { + let url = format!("file://{}/", corpus_dir().display()); + let api = Api::new(&url, None)?; + let v = Version::parse("0.1.2").unwrap(); + let dist = api.meta("api", &v)?; + assert_eq!("pair", dist.name()); + assert_eq!(&v, dist.version()); + let sha = dist.release().digests().sha1().unwrap(); + assert_eq!("9988d7adb056b11f8576db44cca30f88a08bd652", hex::encode(sha)); + + Ok(()) +} + #[test] fn meta_err() -> Result<(), BuildError> { // Start a lightweight mock server. diff --git a/src/exec/mod.rs b/src/exec/mod.rs new file mode 100644 index 0000000..da7d798 --- /dev/null +++ b/src/exec/mod.rs @@ -0,0 +1,155 @@ +//! Stream STDOUT and STDERR output from a Command to buffers. +use crate::error::BuildError; +use crate::line::WriteLine; +use log::debug; +use std::{ + clone::Clone, + fmt, + io::{self, BufRead, BufReader}, + path::PathBuf, + process::{Command, Stdio}, + sync::mpsc, + thread, +}; + +#[cfg(test)] +mod tests; + +// Define a structure fo capturing output. +struct Output { + line: String, + is_err: bool, +} + +impl Output { + fn new(line: String, is_err: bool) -> Self { + Self { line, is_err } + } +} + +/// Command execution context. +pub(crate) struct Executor { + dir: PathBuf, + out: Box, + err: Box, +} + +impl fmt::Debug for Executor { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Executor") + .field("dir", &self.dir) + .field("out", &std::any::type_name_of_val(&self.out)) + .field("err", &std::any::type_name_of_val(&self.err)) + .finish() + } +} + +/// Consider Executors equal if they have the same directory +impl PartialEq for Executor { + fn eq(&self, other: &Self) -> bool { + self.dir() == other.dir() + } +} + +impl Executor { + /// Creates a new command execution context. Commands passed to + /// [`execute`] will have their current directory set to `dir`. STDOUT + /// lines will be sent to `out` and STDERR lines will be sent to err. + pub fn new( + dir: impl Into, + out: O, + err: E, + ) -> Self { + Self { + dir: dir.into(), + out: Box::new(out), + err: Box::new(err), + } + } + + /// Returns a reference to the directory passed to [`Self::new`]. + pub fn dir(&self) -> &PathBuf { + &self.dir + } + + /// Sets `cmd`'s `current_dir` to `self.dir`, pipes output to `self.out` + /// and `self.err`, and executes `cmd`. + pub fn execute(&mut self, mut cmd: Command) -> Result<(), BuildError> { + // Execute from self.dir and create pipes from the child's stdout and stderr. + cmd.current_dir(&self.dir) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); + + // Spawn the child process. + debug!(command:? = cmd; "Executing"); + let mut child = cmd + .spawn() + .map_err(|e| BuildError::Command(format!("{:?}", cmd), e.kind().to_string()))?; + + // Grab the stdout and stderr pipes. + let child_out = child + .stdout + .take() + .ok_or_else(|| BuildError::Command(format!("{:?}", cmd), "no stdout".to_string()))?; + let child_err = child + .stderr + .take() + .ok_or_else(|| BuildError::Command(format!("{:?}", cmd), "no stderr".to_string()))?; + + // Setup a channel to send stdout and stderr lines. + let (otx, rx) = mpsc::channel(); + let etx = otx.clone(); + + // Spawn a thread to stream STDOUT lines back to the main thread. + let stdout_thread = thread::spawn(move || -> Result<(), io::Error> { + let buf = BufReader::new(child_out); + for line in buf.lines() { + otx.send(Output::new(line?, false)).unwrap() + } + Ok(()) + }); + + // Spawn a thread to stream STDERR lines back to the main thread. + let stderr_thread = thread::spawn(move || -> Result<(), io::Error> { + let stderr_lines = BufReader::new(child_err).lines(); + for line in stderr_lines { + etx.send(Output::new(line?, true)).unwrap(); + } + Ok(()) + }); + + // Read the lines from the spawned threads and format send them to the buffers. + for output in rx { + if output.is_err { + self.err.write_line(&output.line)?; + } else { + self.out.write_line(&output.line)?; + } + } + + // Wait for the child and output threads to finish. + let res = child.wait(); + stdout_thread.join().unwrap()?; + stderr_thread.join().unwrap()?; + + // Determine how the command finished. + match res { + Ok(status) => { + if !status.success() { + return Err(BuildError::Command( + format!("{:?}", cmd), + match status.code() { + Some(code) => format!("exited with status code: {code}"), + None => "process terminated by signal".to_string(), + }, + )); + } + Ok(()) + } + Err(e) => Err(BuildError::Command( + format!("{:?}", cmd), + e.kind().to_string(), + )), + } + } +} diff --git a/src/exec/tests.rs b/src/exec/tests.rs new file mode 100644 index 0000000..ea27c63 --- /dev/null +++ b/src/exec/tests.rs @@ -0,0 +1,96 @@ +use super::*; +use crate::error::BuildError; +use crate::line; +use crate::tests::compile_mock; +use assertables::*; +// use std::str; +use tempfile::tempdir; + +#[test] +fn output() { + let output = Output::new("hello".to_string(), false); + assert_eq!("hello", output.line); + assert!(!output.is_err); + let output = Output::new("🥏 Flying Disk!".to_string(), true); + assert_eq!("🥏 Flying Disk!", output.line); + assert!(output.is_err); +} + +#[test] +fn execute() -> Result<(), BuildError> { + let tmp = tempdir()?; + // Build an app that echos output. + let dest = tmp + .path() + .join(if cfg!(windows) { "sudo.exe" } else { "sudo" }) + .display() + .to_string(); + compile_mock("emit", &dest); + + // Set up buffers for output. + let out = Vec::new(); + let err = Vec::new(); + { + let stdout = line::LineWriter::new(out); + let stderr = line::LineWriter::new(err); + let mut exec = Executor::new(tmp.as_ref(), stdout, stderr); + + // Run the app. + let mut cmd = Command::new(&dest); + cmd.arg("this is standard output") + .arg("this is error output"); + if let Err(e) = exec.execute(cmd) { + panic!("emit execution failed: {e}"); + } + + // Run it again. + let mut cmd = Command::new(&dest); + cmd.arg("more standard output").arg("more error output"); + if let Err(e) = exec.execute(cmd) { + panic!("emit execution failed: {e}"); + } + } + + // Check the output. + // let res = str::from_utf8(out.as_slice()).unwrap(); + // assert_eq!("this is standard output\nmore standard output\n", res); + // let res = str::from_utf8(err.as_slice()).unwrap(); + // assert_eq!("this is error output\nmore error output\n", res); + + // Test nonexistent file. + let out = Vec::new(); + let err = Vec::new(); + let stdout = line::LineWriter::new(out); + let stderr = line::LineWriter::new(err); + let mut exec = Executor::new(tmp.as_ref(), stdout, stderr); + match exec.execute(Command::new("__nonesuch_nope__")) { + Ok(_) => panic!("Nonexistent file unexpectedly succeeded"), + Err(e) => { + assert_starts_with!(e.to_string(), "executing "); + assert_ends_with!(e.to_string(), "\"__nonesuch_nope__\"`: entity not found") + } + } + // assert_eq!(0, out.len()); + // assert_eq!(0, err.len()); + + // Test an executable that returns an error. + let out = Vec::new(); + let err = Vec::new(); + let stdout = line::LineWriter::new(out); + let stderr = line::LineWriter::new(err); + let mut exec = Executor::new(tmp.as_ref(), stdout, stderr); + let path = tmp.path().join("exit_err").display().to_string(); + compile_mock("exit_err", &path); + match exec.execute(Command::new(&path)) { + Ok(_) => panic!("exit_err unexpectedly succeeded"), + Err(e) => { + assert_starts_with!(e.to_string(), "executing"); + assert_ends_with!(e.to_string(), " exited with status code: 2"); + } + } + // assert_eq!(0, out.len()); + // let res = str::from_utf8(err.as_slice()).unwrap(); + // assert_eq!("DED: \n", res); + + Ok(()) +} diff --git a/src/lib.rs b/src/lib.rs index 1378b3a..7773ca8 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,48 +7,57 @@ This crate builds PGXN distributions for a variety of platforms and Postgres versions. */ -pub mod api; +mod api; pub mod error; -pub mod pg_config; +mod exec; +mod line; +mod pg_config; mod pgrx; mod pgxs; mod pipeline; -use crate::{error::BuildError, pgrx::Pgrx, pgxs::Pgxs, pipeline::Pipeline}; -use pg_config::PgConfig; +use crate::{error::BuildError, exec::Executor, pgrx::Pgrx, pgxs::Pgxs, pipeline::Pipeline}; +pub use api::Api; +use line::WriteLine; +use owo_colors::Style; +pub use pg_config::PgConfig; use pgxn_meta::{dist, release::Release}; -use std::path::Path; +use std::{ + io, + path::{Path, PathBuf}, +}; +use supports_color::Stream; /// Defines the types of builders. #[derive(Debug, PartialEq)] -enum Build> { +enum Build { /// Defines a builder using the PGXS pipeline. - Pgxs(Pgxs

), + Pgxs(Pgxs), /// Defines a builder using the pgrx pipeline. - Pgrx(Pgrx

), + Pgrx(Pgrx), } -impl> Build

{ +impl Build { /// Returns a build pipeline identified by `pipe`, or an error if `pipe` /// is unknown. - fn new(pipe: &dist::Pipeline, dir: P, cfg: PgConfig) -> Result, BuildError> { + fn new(pipe: &dist::Pipeline, exec: Executor, cfg: PgConfig) -> Result { match pipe { - dist::Pipeline::Pgxs => Ok(Build::Pgxs(Pgxs::new(dir, cfg))), - dist::Pipeline::Pgrx => Ok(Build::Pgrx(Pgrx::new(dir, cfg))), + dist::Pipeline::Pgxs => Ok(Build::Pgxs(Pgxs::new(exec, cfg))), + dist::Pipeline::Pgrx => Ok(Build::Pgrx(Pgrx::new(exec, cfg))), _ => Err(BuildError::UnknownPipeline(pipe.to_string())), } } /// Attempts to detect and return the appropriate build pipeline to build /// the contents of `dir`. Returns an error if no pipeline can do so. - fn detect(dir: P, cfg: PgConfig) -> Result, BuildError> { + fn detect(exec: Executor, cfg: PgConfig) -> Result { // Start with PGXS. - let mut score = Pgxs::confidence(&dir); + let mut score = Pgxs::confidence(exec.dir()); let mut pipe = dist::Pipeline::Pgxs; // Does pgrx have a higher score? - let c = Pgrx::confidence(&dir); + let c = Pgrx::confidence(exec.dir()); if c > score { score = c; pipe = dist::Pipeline::Pgrx; @@ -62,8 +71,8 @@ impl> Build

{ // Construct the winner. match pipe { - dist::Pipeline::Pgrx => Ok(Build::Pgrx(Pgrx::new(dir, cfg))), - dist::Pipeline::Pgxs => Ok(Build::Pgxs(Pgxs::new(dir, cfg))), + dist::Pipeline::Pgrx => Ok(Build::Pgrx(Pgrx::new(exec, cfg))), + dist::Pipeline::Pgxs => Ok(Build::Pgxs(Pgxs::new(exec, cfg))), _ => unreachable!("unknown pipelines {pipe}"), } } @@ -71,22 +80,23 @@ impl> Build

{ /// Builder builds PGXN releases. #[derive(Debug, PartialEq)] -pub struct Builder> { - pipeline: Build

, +pub struct Builder { + pipeline: Build, meta: Release, } -impl> Builder

{ +impl Builder { /// Creates and returns a new builder using the appropriate pipeline. - pub fn new(dir: P, meta: Release, cfg: PgConfig) -> Result { + pub fn new(dir: impl Into, meta: Release, cfg: PgConfig) -> Result { + let exec = Executor::new(dir.into(), stdout(), stderr()); let pipeline = if let Some(deps) = meta.dependencies() { if let Some(pipe) = deps.pipeline() { - Build::new(pipe, dir, cfg)? + Build::new(pipe, exec, cfg)? } else { - Build::detect(dir, cfg)? + Build::detect(exec, cfg)? } } else { - Build::detect(dir, cfg)? + Build::detect(exec, cfg)? }; Ok(Builder { pipeline, meta }) @@ -94,32 +104,32 @@ impl> Builder

{ /// Configures a distribution to build on a particular platform and /// Postgres version. - pub fn configure(&self) -> Result<(), BuildError> { - match &self.pipeline { + pub fn configure(&mut self) -> Result<(), BuildError> { + match &mut self.pipeline { Build::Pgxs(pgxs) => pgxs.configure(), Build::Pgrx(pgrx) => pgrx.configure(), } } /// Compiles a distribution on a particular platform and Postgres version. - pub fn compile(&self) -> Result<(), BuildError> { - match &self.pipeline { + pub fn compile(&mut self) -> Result<(), BuildError> { + match &mut self.pipeline { Build::Pgxs(pgxs) => pgxs.compile(), Build::Pgrx(pgrx) => pgrx.compile(), } } /// Tests a distribution a particular platform and Postgres version. - pub fn test(&self) -> Result<(), BuildError> { - match &self.pipeline { + pub fn test(&mut self) -> Result<(), BuildError> { + match &mut self.pipeline { Build::Pgxs(pgxs) => pgxs.test(), Build::Pgrx(pgrx) => pgrx.test(), } } /// Installs a distribution on a particular platform and Postgres version. - pub fn install(&self) -> Result<(), BuildError> { - match &self.pipeline { + pub fn install(&mut self) -> Result<(), BuildError> { + match &mut self.pipeline { Build::Pgxs(pgxs) => pgxs.install(), Build::Pgrx(pgrx) => pgrx.install(), } @@ -135,5 +145,42 @@ pub(crate) fn filename>(path: P) -> String { .to_string() } +/// Returns a WriteLine implementation that prints to STDOUT. Returns a +/// [line::ColorLine] that styles text dimmed grey if STDOUT supports color. +/// Otherwise returns a [line::LineWriter]. +fn stdout() -> Box { + if cfg!(test) { + return Box::new(line::LineWriter::new(vec![])); + } + _styled_stdout() +} + +fn _styled_stdout() -> Box { + if supports_color::on(Stream::Stdout).is_some() { + return Box::new(line::ColorLine::new( + io::stdout(), + Style::new().white().dimmed(), + )); + } + Box::new(line::LineWriter::new(io::stdout())) +} + +/// Returns a WriteLine implementation that prints to STDERR. Returns a +/// [line::ColorLine] that styles text red if STDERR supports color. Otherwise +/// returns a [line::LineWriter]. +fn stderr() -> Box { + if cfg!(test) { + return Box::new(line::LineWriter::new(vec![])); + } + _styled_stderr() +} + +fn _styled_stderr() -> Box { + if supports_color::on(Stream::Stderr).is_some() { + return Box::new(line::ColorLine::new(io::stderr(), Style::new().red())); + } + Box::new(line::LineWriter::new(io::stderr())) +} + #[cfg(test)] mod tests; diff --git a/src/line/mod.rs b/src/line/mod.rs new file mode 100644 index 0000000..29ca364 --- /dev/null +++ b/src/line/mod.rs @@ -0,0 +1,88 @@ +use owo_colors::Style; +use std::{ + fmt::Debug, + io::{Result, Write}, +}; + +// WriteLine extends [io::Write] to add a function for writing a line of text. +pub trait WriteLine: Write { + fn write_line(&mut self, line: &str) -> Result<()>; +} + +// LineWriter implements WriteLine to write a line of text to an internal +// [std::io::Write] implementation. +#[derive(Debug, PartialEq)] +pub struct LineWriter(T); + +impl LineWriter { + /// Create a new LineWriter that writes lines of text to `writer`. + pub fn new(writer: T) -> Self { + Self(writer) + } +} + +impl Write for LineWriter { + /// Write buf to the underlying writer. + fn write(&mut self, buf: &[u8]) -> Result { + self.0.write(buf) + } + + /// Flush the underlying writer. + fn flush(&mut self) -> Result<()> { + self.0.flush() + } +} + +impl WriteLine for LineWriter { + /// Write `line` to the underlying writer. + fn write_line(&mut self, line: &str) -> Result<()> { + writeln!(self.0, "{}", line) + } +} + +impl WriteLine for Box { + /// Write `line` to the underlying writer. + fn write_line(&mut self, line: &str) -> Result<()> { + (**self).write_line(line) + } +} + +/// ColorLine implements WriteLine to write a colored line of text to an +/// internal [std::io::Write] implementation. +pub struct ColorLine { + inner: T, + style: Style, +} + +impl ColorLine { + /// Create a new ColorLine that writes lines of text styled with `style` to + /// `writer`. + pub fn new(writer: T, style: Style) -> Self { + Self { + inner: writer, + style, + } + } +} + +impl Write for ColorLine { + /// Write buf to the underlying writer. + fn write(&mut self, buf: &[u8]) -> Result { + self.inner.write(buf) + } + + /// Flush the underlying writer. + fn flush(&mut self) -> Result<()> { + self.inner.flush() + } +} + +impl WriteLine for ColorLine { + /// Styles and write `line` to the underlying writer. + fn write_line(&mut self, line: &str) -> Result<()> { + writeln!(self.inner, "{}", self.style.style(line)) + } +} + +#[cfg(test)] +mod tests; diff --git a/src/line/tests.rs b/src/line/tests.rs new file mode 100644 index 0000000..a120284 --- /dev/null +++ b/src/line/tests.rs @@ -0,0 +1,49 @@ +use super::*; +use std::str; + +#[test] +fn line_writer() { + let mut buf = Vec::new(); + let mut lw = LineWriter::new(&mut buf); + assert!(lw.write_line("hello").is_ok()); + assert!(lw.write("yes".as_bytes()).is_ok()); + assert!(lw.flush().is_ok()); + let res = str::from_utf8(buf.as_slice()).unwrap(); + assert_eq!("hello\nyes", res); + + // Write a couple of lines with a boxed LineWriter. + let mut buf = Vec::new(); + let mut lw = Box::new(LineWriter::new(&mut buf)); + assert!(lw.write_line("🤘🏻 Rock on gold dust woman").is_ok()); + assert!(lw.write_line("🐦‍⬛ Crows are rad.").is_ok()); + let res = str::from_utf8(buf.as_slice()).unwrap(); + assert_eq!("🤘🏻 Rock on gold dust woman\n🐦‍⬛ Crows are rad.\n", res); +} + +#[test] +fn color_line() { + let mut buf = Vec::new(); + let style = Style::new().green(); + let mut lw = ColorLine::new(&mut buf, style); + assert!(lw.write_line("hello").is_ok()); + assert!(lw.write("yes".as_bytes()).is_ok()); + assert!(lw.flush().is_ok()); + let res = str::from_utf8(buf.as_slice()).unwrap(); + assert_eq!(format!("{}\nyes", style.style("hello")), res); + + // Write a couple of lines with a boxed LineWriter. + let mut buf = Vec::new(); + let style = Style::new().red(); + let mut lw = Box::new(ColorLine::new(&mut buf, style)); + assert!(lw.write_line("🤘🏻 Rock on gold dust woman").is_ok()); + assert!(lw.write_line("🐦‍⬛ Crows are rad.").is_ok()); + let res = str::from_utf8(buf.as_slice()).unwrap(); + assert_eq!( + format!( + "{}\n{}\n", + style.style("🤘🏻 Rock on gold dust woman"), + style.style("🐦‍⬛ Crows are rad.") + ), + res + ); +} diff --git a/src/pgrx/mod.rs b/src/pgrx/mod.rs index f874fb1..ef52708 100644 --- a/src/pgrx/mod.rs +++ b/src/pgrx/mod.rs @@ -2,28 +2,27 @@ //! //! [pgrx]: https://github.com/pgcentralfoundation/pgrx -use crate::error::BuildError; -use crate::pg_config::PgConfig; use crate::pipeline::Pipeline; +use crate::{error::BuildError, exec::Executor, pg_config::PgConfig}; use std::path::Path; /// Builder implementation for [pgrx] Pipelines. /// /// [pgrx]: https://github.com/pgcentralfoundation/pgrx #[derive(Debug, PartialEq)] -pub(crate) struct Pgrx> { +pub(crate) struct Pgrx { + exec: Executor, cfg: PgConfig, - dir: P, } -impl> Pipeline

for Pgrx

{ - fn new(dir: P, cfg: PgConfig) -> Self { - Pgrx { cfg, dir } +impl Pipeline for Pgrx { + fn new(exec: Executor, cfg: PgConfig) -> Self { + Pgrx { exec, cfg } } - /// Returns the directory passed to [`Self::new`]. - fn dir(&self) -> &P { - &self.dir + /// Returns the Executor passed to [`Self::new`]. + fn executor(&mut self) -> &mut Executor { + &mut self.exec } /// Returns the PgConfig passed to [`Self::new`]. @@ -35,7 +34,7 @@ impl> Pipeline

for Pgrx

{ /// contents of `dir`. Returns 255 if it contains a file named /// `Cargo.toml` and lists pgrx as a dependency. Otherwise returns 1 if /// `Cargo.toml` exists and 0 if it does not. - fn confidence(dir: P) -> u8 { + fn confidence(dir: impl AsRef) -> u8 { let file = dir.as_ref().join("Cargo.toml"); if !file.exists() { return 0; @@ -54,22 +53,22 @@ impl> Pipeline

for Pgrx

{ } /// Runs `cargo init`. - fn configure(&self) -> Result<(), BuildError> { + fn configure(&mut self) -> Result<(), BuildError> { Ok(()) } /// Runs `cargo build`. - fn compile(&self) -> Result<(), BuildError> { + fn compile(&mut self) -> Result<(), BuildError> { Ok(()) } /// Runs `cargo test`. - fn test(&self) -> Result<(), BuildError> { + fn test(&mut self) -> Result<(), BuildError> { Ok(()) } /// Runs `cargo install`. - fn install(&self) -> Result<(), BuildError> { + fn install(&mut self) -> Result<(), BuildError> { Ok(()) } } diff --git a/src/pgrx/tests.rs b/src/pgrx/tests.rs index 79fff8f..5851ec2 100644 --- a/src/pgrx/tests.rs +++ b/src/pgrx/tests.rs @@ -1,10 +1,16 @@ use super::*; +use crate::line::LineWriter; use std::{collections::HashMap, fs::File, io::Write}; use tempfile::tempdir; #[test] fn confidence() -> Result<(), BuildError> { let tmp = tempdir()?; + // let mut out = Vec::new(); + // let mut err = Vec::new(); + // // Test basic success. + // let exec = Executor::new(&tmp, LineWriter::new(&mut out), LineWriter::new(&mut err)); + // No Cargo.toml. assert_eq!(0, Pgrx::confidence(tmp.as_ref())); @@ -28,23 +34,26 @@ fn confidence() -> Result<(), BuildError> { fn new() { let dir = Path::new(env!("CARGO_MANIFEST_DIR")); let cfg = PgConfig::from_map(HashMap::new()); - let pipe = Pgrx::new(dir, cfg.clone()); - assert_eq!(dir, pipe.dir); - assert_eq!(&dir, pipe.dir()); + let exec = Executor::new(dir, LineWriter::new(vec![]), LineWriter::new(vec![])); + let mut pipe = Pgrx::new(exec, cfg.clone()); + let exec = Executor::new(dir, LineWriter::new(vec![]), LineWriter::new(vec![])); + assert_eq!(&exec, pipe.executor()); assert_eq!(&cfg, pipe.pg_config()); let dir2 = dir.join("corpus"); let cfg2 = PgConfig::from_map(HashMap::from([("bindir".to_string(), "bin".to_string())])); - let pipe = Pgrx::new(dir2.as_path(), cfg2.clone()); - assert_eq!(dir2, pipe.dir); - assert_eq!(&dir2, pipe.dir()); + let exec2 = Executor::new(&dir2, LineWriter::new(vec![]), LineWriter::new(vec![])); + let mut pipe = Pgrx::new(exec2, cfg2.clone()); + let exec2 = Executor::new(&dir2, LineWriter::new(vec![]), LineWriter::new(vec![])); + assert_eq!(&exec2, pipe.executor()); assert_eq!(&cfg2, pipe.pg_config()); } #[test] fn configure_et_al() { let dir = Path::new(env!("CARGO_MANIFEST_DIR")); - let pipe = Pgrx::new(dir, PgConfig::from_map(HashMap::new())); + let exec = Executor::new(dir, LineWriter::new(vec![]), LineWriter::new(vec![])); + let mut pipe = Pgrx::new(exec, PgConfig::from_map(HashMap::new())); assert!(pipe.configure().is_ok()); assert!(pipe.compile().is_ok()); assert!(pipe.test().is_ok()); diff --git a/src/pgxs/mod.rs b/src/pgxs/mod.rs index a51669c..2f317dc 100644 --- a/src/pgxs/mod.rs +++ b/src/pgxs/mod.rs @@ -2,9 +2,8 @@ //! //! [PGXS]: https://www.postgresql.org/docs/current/extend-pgxs.html -use crate::pipeline::Pipeline; -use crate::{error::BuildError, pg_config::PgConfig}; -use log::info; +use crate::{error::BuildError, exec::Executor, pg_config::PgConfig, pipeline::Pipeline}; +use log::{debug, info}; use regex::Regex; use std::{ fs::{self, File}, @@ -16,14 +15,14 @@ use std::{ /// /// [PGXS]: https://www.postgresql.org/docs/current/extend-pgxs.html #[derive(Debug, PartialEq)] -pub(crate) struct Pgxs> { +pub(crate) struct Pgxs { + exec: Executor, cfg: PgConfig, - dir: P, } -impl> Pipeline

for Pgxs

{ - fn new(dir: P, cfg: PgConfig) -> Self { - Pgxs { cfg, dir } +impl Pipeline for Pgxs { + fn new(exec: Executor, cfg: PgConfig) -> Self { + Pgxs { exec, cfg } } /// Determines the confidence that the Pgxs pipeline can build the @@ -34,7 +33,7 @@ impl> Pipeline

for Pgxs

{ /// * Returns 200 if it declares variables named `MODULES`, /// `MODULE_big`, `PROGRAM`, `EXTENSION`, `DATA`, or `DATA_built` /// * Otherwise returns 127 - fn confidence(dir: P) -> u8 { + fn confidence(dir: impl AsRef) -> u8 { let file = match makefile(dir.as_ref()) { Some(f) => f, None => return 0, @@ -65,9 +64,9 @@ impl> Pipeline

for Pgxs

{ score } - /// Returns the directory passed to [`Self::new`]. - fn dir(&self) -> &P { - &self.dir + /// Returns the Executor passed to [`Self::new`]. + fn executor(&mut self) -> &mut Executor { + &mut self.exec } /// Returns the PgConfig passed to [`Self::new`]. @@ -75,9 +74,9 @@ impl> Pipeline

for Pgxs

{ &self.cfg } - fn configure(&self) -> Result<(), BuildError> { + fn configure(&mut self) -> Result<(), BuildError> { // Run configure if it exists. - if let Ok(ok) = fs::exists(self.dir().as_ref().join("configure")) { + if let Ok(ok) = fs::exists(self.exec.dir().join("configure")) { if ok { info!("running configure"); // "." will not work on VMS or MacOS Classic. @@ -89,19 +88,19 @@ impl> Pipeline

for Pgxs

{ Ok(()) } - fn compile(&self) -> Result<(), BuildError> { + fn compile(&mut self) -> Result<(), BuildError> { info!("building extension"); self.run("make", ["all"], false)?; Ok(()) } - fn test(&self) -> Result<(), BuildError> { + fn test(&mut self) -> Result<(), BuildError> { info!("testing extension"); self.run("make", ["installcheck"], false)?; Ok(()) } - fn install(&self) -> Result<(), BuildError> { + fn install(&mut self) -> Result<(), BuildError> { info!("installing extension"); self.run("make", ["install"], true)?; Ok(()) @@ -114,6 +113,7 @@ fn makefile(dir: &Path) -> Option { for makefile in ["GNUmakefile", "makefile", "Makefile"] { let file = dir.join(makefile); if file.exists() { + debug!("Found {:?}", file); return Some(file); } } diff --git a/src/pgxs/tests.rs b/src/pgxs/tests.rs index 188bc3a..5edbdd1 100644 --- a/src/pgxs/tests.rs +++ b/src/pgxs/tests.rs @@ -1,4 +1,5 @@ use super::*; +use crate::line::LineWriter; use assertables::*; #[cfg(target_family = "unix")] use std::os::unix::fs::PermissionsExt; @@ -57,23 +58,33 @@ fn confidence() -> Result<(), BuildError> { fn new() { let dir = Path::new(env!("CARGO_MANIFEST_DIR")); let cfg = PgConfig::from_map(HashMap::new()); - let pipe = Pgxs::new(dir, cfg.clone()); - assert_eq!(dir, pipe.dir); - assert_eq!(&dir, pipe.dir()); - assert_eq!(&cfg, pipe.pg_config()); + { + // Test basic success. + let exec = Executor::new(dir, LineWriter::new(vec![]), LineWriter::new(vec![])); + let mut pipe = Pgxs::new(exec, cfg.clone()); + let exec = Executor::new(dir, LineWriter::new(vec![]), LineWriter::new(vec![])); + assert_eq!(&exec, pipe.executor()); + assert_eq!(&cfg, pipe.pg_config()); + } let dir2 = dir.join("corpus"); let cfg2 = PgConfig::from_map(HashMap::from([("bindir".to_string(), "bin".to_string())])); - let pipe = Pgxs::new(dir2.as_path(), cfg2.clone()); - assert_eq!(dir2, pipe.dir); - assert_eq!(&dir2, pipe.dir()); + let exec2 = Executor::new(&dir2, LineWriter::new(vec![]), LineWriter::new(vec![])); + let mut pipe = Pgxs::new(exec2, cfg2.clone()); + let exec2 = Executor::new(&dir2, LineWriter::new(vec![]), LineWriter::new(vec![])); + assert_eq!(&exec2, pipe.executor()); assert_eq!(&cfg2, pipe.pg_config()); } #[test] fn configure() -> Result<(), BuildError> { let tmp = tempdir()?; - let pipe = Pgxs::new(&tmp, PgConfig::from_map(HashMap::new())); + let exec = Executor::new( + tmp.as_ref(), + LineWriter::new(vec![]), + LineWriter::new(vec![]), + ); + let mut pipe = Pgxs::new(exec, PgConfig::from_map(HashMap::new())); // Try with no Configure file. if let Err(e) = pipe.configure() { @@ -92,6 +103,7 @@ fn configure() -> Result<(), BuildError> { match pipe.configure() { Ok(_) => panic!("configure unexpectedly succeeded"), Err(e) => { + println!("OUTPUT {e}"); assert_starts_with!(e.to_string(), "executing "); assert_ends_with!( e.to_string(), @@ -125,7 +137,8 @@ fn configure() -> Result<(), BuildError> { #[test] fn compile() -> Result<(), BuildError> { let dir = Path::new(env!("CARGO_MANIFEST_DIR")); - let pipe = Pgxs::new(dir, PgConfig::from_map(HashMap::new())); + let exec = Executor::new(dir, LineWriter::new(vec![]), LineWriter::new(vec![])); + let mut pipe = Pgxs::new(exec, PgConfig::from_map(HashMap::new())); assert!(pipe.compile().is_err()); Ok(()) } @@ -133,7 +146,8 @@ fn compile() -> Result<(), BuildError> { #[test] fn test() -> Result<(), BuildError> { let dir = Path::new(env!("CARGO_MANIFEST_DIR")); - let pipe = Pgxs::new(dir, PgConfig::from_map(HashMap::new())); + let exec = Executor::new(dir, LineWriter::new(vec![]), LineWriter::new(vec![])); + let mut pipe = Pgxs::new(exec, PgConfig::from_map(HashMap::new())); assert!(pipe.test().is_err()); Ok(()) } @@ -141,7 +155,8 @@ fn test() -> Result<(), BuildError> { #[test] fn install() -> Result<(), BuildError> { let dir = Path::new(env!("CARGO_MANIFEST_DIR")); - let pipe = Pgxs::new(dir, PgConfig::from_map(HashMap::new())); + let exec = Executor::new(dir, LineWriter::new(vec![]), LineWriter::new(vec![])); + let mut pipe = Pgxs::new(exec, PgConfig::from_map(HashMap::new())); assert!(pipe.install().is_err()); Ok(()) } diff --git a/src/pipeline/mod.rs b/src/pipeline/mod.rs index f8b4359..5b0b05d 100644 --- a/src/pipeline/mod.rs +++ b/src/pipeline/mod.rs @@ -1,35 +1,36 @@ //! Build Pipeline interface definition. +use crate::exec::Executor; use crate::{error::BuildError, pg_config::PgConfig}; use log::debug; use std::{io::Write, path::Path, process::Command}; /// Defines the interface for build pipelines to configure, compile, and test /// PGXN distributions. -pub(crate) trait Pipeline> { +pub(crate) trait Pipeline { /// Creates an instance of a Pipeline. - fn new(dir: P, pg_config: PgConfig) -> Self; + fn new(exec: Executor, pg_config: PgConfig) -> Self; /// Returns a score for the confidence that this pipeline can build the /// contents of `dir`. A score of 0 means no confidence and 255 means the /// highest confidence. - fn confidence(dir: P) -> u8; + fn confidence(dir: impl AsRef) -> u8; /// Configures a distribution to build on a particular platform and /// Postgres version. - fn configure(&self) -> Result<(), BuildError>; + fn configure(&mut self) -> Result<(), BuildError>; /// Compiles a distribution on a particular platform and Postgres version. - fn compile(&self) -> Result<(), BuildError>; + fn compile(&mut self) -> Result<(), BuildError>; /// Installs a distribution on a particular platform and Postgres version. - fn install(&self) -> Result<(), BuildError>; + fn install(&mut self) -> Result<(), BuildError>; /// Tests a distribution a particular platform and Postgres version. - fn test(&self) -> Result<(), BuildError>; + fn test(&mut self) -> Result<(), BuildError>; - /// Returns the directory passed to [`new`]. - fn dir(&self) -> &P; + /// Returns the Executor passed to [`new`]. + fn executor(&mut self) -> &mut Executor; /// Returns the PgConfig passed to [`new`]. fn pg_config(&self) -> &PgConfig; @@ -66,7 +67,7 @@ pub(crate) trait Pipeline> { /// Run a command. Runs it with elevated privileges when `sudo` is true /// and `pg_config --pkglibdir` isn't writeable by the current user. - fn run(&self, program: &str, args: I, sudo: bool) -> Result<(), BuildError> + fn run(&mut self, program: &str, args: I, sudo: bool) -> Result<(), BuildError> where I: IntoIterator, S: AsRef, @@ -74,22 +75,9 @@ pub(crate) trait Pipeline> { // Use `sudo` if the param is set. let mut cmd = self.maybe_sudo(program, sudo); cmd.args(args); - cmd.current_dir(self.dir()); - match cmd.output() { - Ok(out) => { - if !out.status.success() { - return Err(BuildError::Command( - format!("{:?}", cmd), - String::from_utf8_lossy(&out.stderr).to_string(), - )); - } - Ok(()) - } - Err(e) => Err(BuildError::Command( - format!("{:?}", cmd), - e.kind().to_string(), - )), - } + + // Execute the command. + self.executor().execute(cmd) } } diff --git a/src/pipeline/tests.rs b/src/pipeline/tests.rs index b33bb19..865d24e 100644 --- a/src/pipeline/tests.rs +++ b/src/pipeline/tests.rs @@ -1,80 +1,140 @@ use super::*; +use crate::line::LineWriter; use crate::tests::compile_mock; use assertables::*; use std::{collections::HashMap, env}; use tempfile::tempdir; -struct TestPipeline> { - dir: P, +struct TestPipeline { + exec: Executor, cfg: PgConfig, } // Create a mock version of the trait. -#[cfg(test)] -impl> Pipeline

for TestPipeline

{ - fn new(dir: P, cfg: PgConfig) -> Self { - TestPipeline { dir, cfg } +impl Pipeline for TestPipeline { + fn new(exec: Executor, cfg: PgConfig) -> Self { + TestPipeline { exec, cfg } } - fn dir(&self) -> &P { - &self.dir + fn executor(&mut self) -> &mut Executor { + &mut self.exec } fn pg_config(&self) -> &PgConfig { &self.cfg } - fn confidence(_: P) -> u8 { + fn confidence(_: impl AsRef) -> u8 { 0 } - fn configure(&self) -> Result<(), BuildError> { + + fn configure(&mut self) -> Result<(), BuildError> { Ok(()) } - fn compile(&self) -> Result<(), BuildError> { + + fn compile(&mut self) -> Result<(), BuildError> { Ok(()) } - fn install(&self) -> Result<(), BuildError> { + + fn install(&mut self) -> Result<(), BuildError> { Ok(()) } - fn test(&self) -> Result<(), BuildError> { + + fn test(&mut self) -> Result<(), BuildError> { Ok(()) } } +#[test] +fn trait_functions() { + assert_eq!(0, TestPipeline::confidence("some dir")); + + let exec = Executor::new( + env!("CARGO_MANIFEST_DIR"), + LineWriter::new(vec![]), + LineWriter::new(vec![]), + ); + let mut pipe = TestPipeline::new(exec, PgConfig::from_map(HashMap::new())); + assert!(pipe.configure().is_ok()); + assert!(pipe.compile().is_ok()); + assert!(pipe.install().is_ok()); + assert!(pipe.test().is_ok()); +} + #[test] fn run() -> Result<(), BuildError> { let tmp = tempdir()?; - let cfg = PgConfig::from_map(HashMap::new()); - - // Test basic success. - let pipe = TestPipeline::new(&tmp, cfg); - if let Err(e) = pipe.run("echo", ["hello"], false) { - panic!("echo hello failed: {e}"); + let out = Vec::new(); + let err = Vec::new(); + { + // Test basic success. + let exec = Executor::new(tmp.as_ref(), LineWriter::new(out), LineWriter::new(err)); + let mut pipe = TestPipeline::new(exec, PgConfig::from_map(HashMap::new())); + if let Err(e) = pipe.run("echo", ["hello"], false) { + panic!("echo hello failed: {e}"); + } } + // Check the output. + // let res = str::from_utf8(out.as_slice()).unwrap(); + // assert_eq!("hello\n", res); + // out.clear(); + // let res = str::from_utf8(err.as_slice()).unwrap(); + // assert_eq!("", res); + // Test nonexistent file. - match pipe.run("__nonesuch_nope__", [""], false) { - Ok(_) => panic!("Nonexistent file unexpectedly succeeded"), - Err(e) => { - assert_starts_with!(e.to_string(), "executing "); - assert_ends_with!( - e.to_string(), - "\"__nonesuch_nope__\" \"\"`: entity not found" - ) + { + let exec = Executor::new( + tmp.as_ref(), + LineWriter::new(vec![]), + LineWriter::new(vec![]), + ); + let mut pipe = TestPipeline::new(exec, PgConfig::from_map(HashMap::new())); + match pipe.run("__nonesuch_nope__", [""], false) { + Ok(_) => panic!("Nonexistent file unexpectedly succeeded"), + Err(e) => { + assert_starts_with!(e.to_string(), "executing "); + assert_ends_with!( + e.to_string(), + "\"__nonesuch_nope__\" \"\"`: entity not found" + ) + } } } + // Check the output. + // let res = str::from_utf8(out.as_slice()).unwrap(); + // assert_eq!("", res); + // let res = str::from_utf8(err.as_slice()).unwrap(); + // assert_eq!("\"__nonesuch_nope__\" \"\"`: entity not found", res); + // err.clear(); + // Test an executable that returns an error. - let path = tmp.path().join("exit_err").display().to_string(); - compile_mock("exit_err", &path); - match pipe.run(&path, ["hi"], false) { - Ok(_) => panic!("exit_err unexpectedly succeeded"), - Err(e) => { - assert_starts_with!(e.to_string(), "executing"); - assert_ends_with!(e.to_string(), " DED: hi\n"); + { + let exec = Executor::new( + tmp.as_ref(), + LineWriter::new(vec![]), + LineWriter::new(vec![]), + ); + let mut pipe = TestPipeline::new(exec, PgConfig::from_map(HashMap::new())); + let path = tmp.path().join("exit_err").display().to_string(); + compile_mock("exit_err", &path); + match pipe.run(&path, ["hi"], false) { + Ok(_) => panic!("exit_err unexpectedly succeeded"), + Err(e) => { + assert_starts_with!(e.to_string(), "executing"); + assert_ends_with!(e.to_string(), " exited with status code: 2"); + } } } + // Check the output. + // let res = str::from_utf8(out.as_slice()).unwrap(); + // assert_eq!("", res); + // let res = str::from_utf8(err.as_slice()).unwrap(); + // assert_eq!("exited with status code: 2", res); + // err.clear(); + // Build a mock `sudo` that echos output. let dest = tmp .path() @@ -91,11 +151,24 @@ fn run() -> Result<(), BuildError> { // Run sudo echo with the path set. temp_env::with_var("PATH", Some(env::join_paths(path).unwrap()), || { + let exec = Executor::new( + tmp.as_ref(), + LineWriter::new(vec![]), + LineWriter::new(vec![]), + ); + let mut pipe = TestPipeline::new(exec, PgConfig::from_map(HashMap::new())); if let Err(e) = pipe.run("echo", ["hello"], true) { panic!("echo hello failed: {e}"); } }); + // Check the output. + // let res = str::from_utf8(out.as_slice()).unwrap(); + // assert_eq!("hello\n", res); + // let res = str::from_utf8(err.as_slice()).unwrap(); + // assert_eq!("", res); + // out.clear(); + Ok(()) } @@ -103,9 +176,13 @@ fn run() -> Result<(), BuildError> { fn is_writeable() -> Result<(), BuildError> { let tmp = tempdir()?; let cfg = PgConfig::from_map(HashMap::new()); - - let pipe = TestPipeline::new(&tmp, cfg); - assert!(pipe.is_writeable(&tmp)); + let exec = Executor::new( + tmp.as_ref(), + LineWriter::new(vec![]), + LineWriter::new(vec![]), + ); + let pipe = TestPipeline::new(exec, cfg); + assert!(pipe.is_writeable(tmp.as_ref())); assert!(!pipe.is_writeable(tmp.path().join(" nonesuch"))); Ok(()) @@ -118,7 +195,12 @@ fn maybe_sudo() -> Result<(), BuildError> { "pkglibdir".to_string(), tmp.as_ref().display().to_string(), )])); - let pipe = TestPipeline::new(&tmp, cfg); + let exec = Executor::new( + tmp.as_ref(), + LineWriter::new(vec![]), + LineWriter::new(vec![]), + ); + let pipe = TestPipeline::new(exec, cfg); // Never use sudo when param is false. let cmd = pipe.maybe_sudo("foo", false); @@ -133,7 +215,13 @@ fn maybe_sudo() -> Result<(), BuildError> { "pkglibdir".to_string(), tmp.path().join("nonesuch").display().to_string(), )])); - let pipe = TestPipeline::new(&tmp, cfg); + + let exec = Executor::new( + tmp.as_ref(), + LineWriter::new(vec![]), + LineWriter::new(vec![]), + ); + let pipe = TestPipeline::new(exec, cfg); let cmd = pipe.maybe_sudo("foo", true); assert_eq!("sudo", cmd.get_program().to_str().unwrap()); let args: Vec<&std::ffi::OsStr> = cmd.get_args().collect(); diff --git a/src/tests.rs b/src/tests.rs index 68be89c..e3fc515 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -1,3 +1,5 @@ +use crate::{exec::Executor, line::LineWriter}; + use super::*; use serde_json::{json, Value}; use std::{collections::HashMap, fs::File, io::Write, path::PathBuf, process::Command}; @@ -41,11 +43,11 @@ fn pgxs() { let tmp = tempdir().unwrap(); let cfg = PgConfig::from_map(HashMap::new()); let rel = Release::try_from(meta.clone()).unwrap(); - let builder = Builder::new(tmp.as_ref(), rel, cfg).unwrap(); + let mut builder = Builder::new(tmp.as_ref(), rel, cfg).unwrap(); let rel = Release::try_from(meta).unwrap(); let cfg = PgConfig::from_map(HashMap::new()); let exp = Builder { - pipeline: Build::Pgxs(Pgxs::new(tmp.as_ref(), cfg)), + pipeline: Build::Pgxs(Pgxs::new(exec_in(tmp.as_ref()), cfg)), meta: rel, }; assert_eq!(exp, builder, "pgxs"); @@ -63,10 +65,10 @@ fn pgrx() { let tmp = tempdir().unwrap(); let cfg = PgConfig::from_map(HashMap::new()); let rel = Release::try_from(meta.clone()).unwrap(); - let builder = Builder::new(tmp.as_ref(), rel, cfg.clone()).unwrap(); + let mut builder = Builder::new(tmp.as_ref(), rel, cfg.clone()).unwrap(); let rel = Release::try_from(meta).unwrap(); let exp = Builder { - pipeline: Build::Pgrx(Pgrx::new(tmp.as_ref(), cfg.clone())), + pipeline: Build::Pgrx(Pgrx::new(exec_in(tmp.as_ref()), cfg.clone())), meta: rel, }; assert_eq!(exp, builder, "pgrx"); @@ -76,6 +78,10 @@ fn pgrx() { assert!(builder.install().is_ok()); } +fn exec_in(dir: &Path) -> Executor { + Executor::new(dir, LineWriter::new(vec![]), LineWriter::new(vec![])) +} + #[test] fn unsupported_pipeline() { // Test unsupported pipeline. @@ -112,7 +118,8 @@ fn detect_pipeline() -> Result<(), BuildError> { let tmp = tempdir()?; let dir = tmp.as_ref(); let cfg = PgConfig::from_map(HashMap::new()); - match Build::detect(dir, cfg.clone()) { + let exec = exec_in(dir); + match Build::detect(exec, cfg.clone()) { Ok(_) => panic!("detect unexpectedly succeeded with empty dir"), Err(e) => assert_eq!( "cannot detect build pipeline and none specified", @@ -131,25 +138,33 @@ fn detect_pipeline() -> Result<(), BuildError> { // Add an empty Makefile, PGXS should win. let mut makefile = File::create(dir.join("Makefile"))?; - match Build::detect(dir, cfg.clone()) { - Ok(p) => assert_eq!(Build::Pgxs(Pgxs::new(dir, cfg.clone())), p), + let exec = exec_in(dir); + match Build::detect(exec, cfg.clone()) { + Ok(p) => assert_eq!(Build::Pgxs(Pgxs::new(exec_in(dir), cfg.clone())), p), Err(e) => panic!("Unexpectedly errored with Makefile: {e}"), } for meta in &metas { match Builder::new(dir, no_pipe(meta), cfg.clone()) { - Ok(b) => assert_eq!(Build::Pgxs(Pgxs::new(dir, cfg.clone())), b.pipeline), + Ok(b) => assert_eq!( + Build::Pgxs(Pgxs::new(exec_in(dir), cfg.clone())), + b.pipeline + ), Err(e) => panic!("Unexpectedly errored with Makefile: {e}"), } } // Add an empty cargo.toml, PGXS should still win. let mut cargo_toml = File::create(dir.join("Cargo.toml"))?; - match Build::detect(dir, cfg.clone()) { - Ok(p) => assert_eq!(Build::Pgxs(Pgxs::new(dir, cfg.clone())), p), + let exec = exec_in(dir); + match Build::detect(exec, cfg.clone()) { + Ok(p) => assert_eq!(Build::Pgxs(Pgxs::new(exec_in(dir), cfg.clone())), p), Err(e) => panic!("Unexpectedly errored with Cargo.toml: {e}"), } for meta in &metas { match Builder::new(dir, no_pipe(meta), cfg.clone()) { - Ok(b) => assert_eq!(Build::Pgxs(Pgxs::new(dir, cfg.clone())), b.pipeline), + Ok(b) => assert_eq!( + Build::Pgxs(Pgxs::new(exec_in(dir), cfg.clone())), + b.pipeline + ), Err(e) => panic!("Unexpectedly errored with Cargo.toml: {e}"), } } @@ -157,13 +172,17 @@ fn detect_pipeline() -> Result<(), BuildError> { // Add pgrx to Cargo.toml; now pgrx should win. writeln!(&cargo_toml, "[dependencies]\npgrx = \"0.12.6\"")?; cargo_toml.flush()?; - match Build::detect(dir, cfg.clone()) { - Ok(p) => assert_eq!(Build::Pgrx(Pgrx::new(dir, cfg.clone())), p), + let exec = exec_in(dir); + match Build::detect(exec, cfg.clone()) { + Ok(p) => assert_eq!(Build::Pgrx(Pgrx::new(exec_in(dir), cfg.clone())), p), Err(e) => panic!("Unexpectedly errored with pgrx dependency: {e}"), } for meta in &metas { match Builder::new(dir, no_pipe(meta), cfg.clone()) { - Ok(b) => assert_eq!(Build::Pgrx(Pgrx::new(dir, cfg.clone())), b.pipeline), + Ok(b) => assert_eq!( + Build::Pgrx(Pgrx::new(exec_in(dir), cfg.clone())), + b.pipeline + ), Err(e) => panic!("Unexpectedly errored with pgrx dependency: {e}"), } } @@ -171,9 +190,10 @@ fn detect_pipeline() -> Result<(), BuildError> { // Add PG_CONFIG to the Makefile, PGXS should win again. writeln!(&makefile, "PG_CONFIG ?= pg_config")?; makefile.flush()?; - match Build::detect(dir, cfg.clone()) { + let exec = exec_in(dir); + match Build::detect(exec, cfg.clone()) { Ok(p) => assert_eq!( - Build::Pgxs(Pgxs::new(dir, PgConfig::from_map(HashMap::new()))), + Build::Pgxs(Pgxs::new(exec_in(dir), PgConfig::from_map(HashMap::new()))), p ), Err(e) => panic!("Unexpectedly errored with PG_CONFIG var: {e}"), @@ -181,7 +201,7 @@ fn detect_pipeline() -> Result<(), BuildError> { for meta in &metas { match Builder::new(dir, no_pipe(meta), cfg.clone()) { Ok(b) => assert_eq!( - Build::Pgxs(Pgxs::new(dir, PgConfig::from_map(HashMap::new()))), + Build::Pgxs(Pgxs::new(exec_in(dir), PgConfig::from_map(HashMap::new()))), b.pipeline ), Err(e) => panic!("Unexpectedly errored with PG_CONFIG var: {e}"), @@ -191,6 +211,13 @@ fn detect_pipeline() -> Result<(), BuildError> { Ok(()) } +#[test] +fn outputs() { + // XXX Would be nice to be able to test the contents of these boxes. + _ = _styled_stdout(); + _ = _styled_stderr(); +} + /// Utility function for compiling `mocks/{name}.rs` into `dest`. Used to /// provide consistent execution and output for testing across OSes. pub fn compile_mock(name: &str, dest: &str) {