use std::collections::{HashMap, HashSet}; use std::fs::{self, File}; use std::io::{prelude::*, SeekFrom}; use std::path::{Path, PathBuf}; use crate::support::git::{repo, Repository}; use crate::support::registry::alt_api_path; use crate::support::{find_json_mismatch, paths}; use byteorder::{LittleEndian, ReadBytesExt}; use flate2::read::GzDecoder; use tar::Archive; use url::Url; pub fn setup() -> Repository { let config = paths::root().join(".cargo/config"); t!(fs::create_dir_all(config.parent().unwrap())); t!(t!(File::create(&config)).write_all( format!( r#" [registry] token = "api-token" [registries.alternative] index = "{registry}" "#, registry = registry().to_string() ) .as_bytes() )); let credentials = paths::root().join("home/.cargo/credentials"); t!(fs::create_dir_all(credentials.parent().unwrap())); t!(t!(File::create(&credentials)).write_all( br#" [registries.alternative] token = "api-token" "# )); t!(fs::create_dir_all(&upload_path().join("api/v1/crates"))); repo(®istry_path()) .file( "config.json", &format!( r#"{{ "dl": "{0}", "api": "{0}" }}"#, upload() ), ) .build() } pub fn registry_path() -> PathBuf { paths::root().join("registry") } pub fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() } pub fn upload_path() -> PathBuf { paths::root().join("upload") } fn upload() -> Url { Url::from_file_path(&*upload_path()).ok().unwrap() } /// Check the result of a crate publish. pub fn validate_upload(expected_json: &str, expected_crate_name: &str, expected_files: &[&str]) { let new_path = upload_path().join("api/v1/crates/new"); _validate_upload( &new_path, expected_json, expected_crate_name, expected_files, ); } /// Check the result of a crate publish to an alternative registry. pub fn validate_alt_upload( expected_json: &str, expected_crate_name: &str, expected_files: &[&str], ) { let new_path = alt_api_path().join("api/v1/crates/new"); _validate_upload( &new_path, expected_json, expected_crate_name, expected_files, ); } fn _validate_upload( new_path: &Path, expected_json: &str, expected_crate_name: &str, expected_files: &[&str], ) { let mut f = File::open(new_path).unwrap(); // 32-bit little-endian integer of length of JSON data. let json_sz = f.read_u32::().expect("read json length"); let mut json_bytes = vec![0; json_sz as usize]; f.read_exact(&mut json_bytes).expect("read JSON data"); let actual_json = serde_json::from_slice(&json_bytes).expect("uploaded JSON should be valid"); let expected_json = serde_json::from_str(expected_json).expect("expected JSON does not parse"); find_json_mismatch(&expected_json, &actual_json) .expect("uploaded JSON did not match expected JSON"); // 32-bit little-endian integer of length of crate file. let crate_sz = f.read_u32::().expect("read crate length"); let mut krate_bytes = vec![0; crate_sz as usize]; f.read_exact(&mut krate_bytes).expect("read crate data"); // Check at end. let current = f.seek(SeekFrom::Current(0)).unwrap(); assert_eq!(f.seek(SeekFrom::End(0)).unwrap(), current); // Verify the tarball validate_crate_contents(&krate_bytes[..], expected_crate_name, expected_files, &[]); } /// Check the contents of a `.crate` file. /// /// - `expected_crate_name` should be something like `foo-0.0.1.crate`. /// - `expected_files` should be a complete list of files in the crate /// (relative to expected_crate_name). /// - `expected_contents` should be a list of `(file_name, contents)` tuples /// to validate the contents of the given file. Only the listed files will /// be checked (others will be ignored). pub fn validate_crate_contents( reader: impl Read, expected_crate_name: &str, expected_files: &[&str], expected_contents: &[(&str, &str)], ) { let mut rdr = GzDecoder::new(reader); assert_eq!( rdr.header().unwrap().filename().unwrap(), expected_crate_name.as_bytes() ); let mut contents = Vec::new(); rdr.read_to_end(&mut contents).unwrap(); let mut ar = Archive::new(&contents[..]); let files: HashMap = ar .entries() .unwrap() .map(|entry| { let mut entry = entry.unwrap(); let name = entry.path().unwrap().into_owned(); let mut contents = String::new(); entry.read_to_string(&mut contents).unwrap(); (name, contents) }) .collect(); assert!(expected_crate_name.ends_with(".crate")); let base_crate_name = Path::new(&expected_crate_name[..expected_crate_name.len() - 6]); let actual_files: HashSet = files.keys().cloned().collect(); let expected_files: HashSet = expected_files .iter() .map(|name| base_crate_name.join(name)) .collect(); let missing: Vec<&PathBuf> = expected_files.difference(&actual_files).collect(); let extra: Vec<&PathBuf> = actual_files.difference(&expected_files).collect(); if !missing.is_empty() || !extra.is_empty() { panic!( "uploaded archive does not match.\nMissing: {:?}\nExtra: {:?}\n", missing, extra ); } if !expected_contents.is_empty() { for (e_file_name, e_file_contents) in expected_contents { let full_e_name = base_crate_name.join(e_file_name); let actual_contents = files .get(&full_e_name) .unwrap_or_else(|| panic!("file `{}` missing in archive", e_file_name)); assert_eq!(actual_contents, e_file_contents); } } }