debian-watch-0.2.8/.cargo_vcs_info.json0000644000000001360000000000100133710ustar { "git": { "sha1": "5f91c8153d603edab050cf9282025aea36f16adc" }, "path_in_vcs": "" }debian-watch-0.2.8/.github/CODEOWNERS000064400000000000000000000000121046102023000151050ustar 00000000000000* @jelmer debian-watch-0.2.8/.github/FUNDING.yml000064400000000000000000000000171046102023000153340ustar 00000000000000github: jelmer debian-watch-0.2.8/.github/dependabot.yml000064400000000000000000000006251046102023000163540ustar 00000000000000# Please see the documentation for all configuration options: # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates version: 2 updates: - package-ecosystem: "cargo" directory: "/" schedule: interval: "weekly" rebase-strategy: "disabled" - package-ecosystem: "github-actions" directory: "/" schedule: interval: weekly debian-watch-0.2.8/.github/workflows/rust.yml000064400000000000000000000007141046102023000173000ustar 00000000000000name: Rust on: push: pull_request: env: CARGO_TERM_COLOR: always jobs: build: runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest] fail-fast: false steps: - uses: actions/checkout@v4 - name: Build run: cargo build --verbose env: RUSTFLAGS: -Dwarnings - name: Run tests run: cargo test --verbose env: RUSTFLAGS: -Dwarnings debian-watch-0.2.8/.gitignore000064400000000000000000000000131046102023000141430ustar 00000000000000target .*~ debian-watch-0.2.8/Cargo.toml0000644000000022010000000000100113620ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" name = "debian-watch" version = "0.2.8" authors = ["Jelmer Vernooij "] build = false autolib = false autobins = false autoexamples = false autotests = false autobenches = false description = "parser for Debian watch files" homepage = "https://github.com/jelmer/debian-watch-rs" readme = "README.md" license = "Apache-2.0" repository = "https://github.com/jelmer/debian-watch-rs.git" [lib] name = "debian_watch" path = "src/lib.rs" [dependencies.debversion] version = ">=0.3" [dependencies.m_lexer] version = "0.0.4" [dependencies.rowan] version = "0.16" [dependencies.url] version = "2.5.3" [dev-dependencies.maplit] version = "1.0.2" debian-watch-0.2.8/Cargo.toml.orig000064400000000000000000000006501046102023000150510ustar 00000000000000[package] name = "debian-watch" version = "0.2.8" authors = [ "Jelmer Vernooij ",] edition = "2021" license = "Apache-2.0" description = "parser for Debian watch files" repository = "https://github.com/jelmer/debian-watch-rs.git" homepage = "https://github.com/jelmer/debian-watch-rs" [dependencies] rowan = "0.16" m_lexer = "0.0.4" debversion = ">=0.3" url = "2.5.3" [dev-dependencies] maplit = "1.0.2" debian-watch-0.2.8/README.md000064400000000000000000000021061046102023000134370ustar 00000000000000Format-preserving parser and editor for Debian watch files ========================================================== This crate supports reading, editing and writing Debian watch files, while preserving the original contents byte-for-byte. Example: ```rust let wf = debian_watch::WatchFile::new(None); assert_eq!(wf.version(), debian_watch::DEFAULT_VERSION); assert_eq!("", wf.to_string()); let wf = debian_watch::WatchFile::new(Some(4)); assert_eq!(wf.version(), 4); assert_eq!("version=4\n", wf.to_string()); let wf: debian_watch::WatchFile = r#"version=4 opts=foo=blah https://foo.com/bar .*/v?(\d\S+)\.tar\.gz "#.parse().unwrap(); assert_eq!(wf.version(), 4); assert_eq!(wf.entries().collect::>().len(), 1); let entry = wf.entries().next().unwrap(); assert_eq!(entry.opts(), maplit::hashmap! { "foo".to_string() => "blah".to_string(), }); assert_eq!(&entry.url(), "https://foo.com/bar"); assert_eq!(entry.matching_pattern().as_deref(), Some(".*/v?(\\d\\S+)\\.tar\\.gz")); ``` It also supports partial parsing (with some error nodes), which could be useful for e.g. IDEs. debian-watch-0.2.8/disperse.conf000064400000000000000000000000461046102023000146460ustar 00000000000000timeout_days: 5 tag_name: "v$VERSION" debian-watch-0.2.8/src/lex.rs000064400000000000000000000100501046102023000141020ustar 00000000000000use crate::SyntaxKind; use crate::SyntaxKind::*; /// Split the input string into a flat list of tokens pub(crate) fn lex(text: &str) -> Vec<(SyntaxKind, String)> { fn tok(t: SyntaxKind) -> m_lexer::TokenKind { let sk = rowan::SyntaxKind::from(t); m_lexer::TokenKind(sk.0) } fn kind(t: m_lexer::TokenKind) -> SyntaxKind { match t.0 { 0 => KEY, 1 => VALUE, 2 => EQUALS, 3 => QUOTE, 4 => COMMA, 5 => CONTINUATION, 6 => NEWLINE, 7 => WHITESPACE, 8 => COMMENT, 9 => ERROR, _ => unreachable!(), } } let lexer = m_lexer::LexerBuilder::new() .error_token(tok(ERROR)) .tokens(&[ (tok(KEY), r"[a-z]+"), (tok(QUOTE), "\""), (tok(VALUE), r#"[^\s=,"]*[^\s=\\,"]"#), (tok(CONTINUATION), r"\\\n"), (tok(EQUALS), r"="), (tok(COMMA), r","), (tok(NEWLINE), r"\n"), (tok(WHITESPACE), r"\s+"), (tok(COMMENT), r"#[^\n]*"), ]) .build(); lexer .tokenize(text) .into_iter() .map(|t| (t.len, kind(t.kind))) .scan(0usize, |start_offset, (len, kind)| { let s: String = text[*start_offset..*start_offset + len].into(); *start_offset += len; Some((kind, s)) }) .collect() } #[cfg(test)] mod tests { use crate::SyntaxKind::*; #[test] fn test_empty() { assert_eq!(super::lex(""), vec![]); } #[test] fn test_simple() { assert_eq!( super::lex( r#"version=4 opts=bare,filenamemangle=s/.+\/v?(\d\S+)\.tar\.gz/syncthing-gtk-$1\.tar\.gz/ \ https://github.com/syncthing/syncthing-gtk/tags .*/v?(\d\S+)\.tar\.gz "# ), vec![ (KEY, "version".into()), (EQUALS, "=".into()), (VALUE, "4".into()), (NEWLINE, "\n".into()), (KEY, "opts".into()), (EQUALS, "=".into()), (KEY, "bare".into()), (COMMA, ",".into()), (KEY, "filenamemangle".into()), (EQUALS, "=".into()), ( VALUE, "s/.+\\/v?(\\d\\S+)\\.tar\\.gz/syncthing-gtk-$1\\.tar\\.gz/".into() ), (WHITESPACE, " ".into()), (CONTINUATION, "\\\n".into()), (WHITESPACE, " ".into()), ( VALUE, "https://github.com/syncthing/syncthing-gtk/tags".into() ), (WHITESPACE, " ".into()), (VALUE, ".*/v?(\\d\\S+)\\.tar\\.gz".into()), (NEWLINE, "\n".into()), ] ); } #[test] fn test_quoted() { assert_eq!( super::lex( r#"version=4 opts="bare, filenamemangle=foo" \ https://github.com/syncthing/syncthing-gtk/tags .*/v?(\d\S+)\.tar\.gz "# ), vec![ (KEY, "version".into()), (EQUALS, "=".into()), (VALUE, "4".into()), (NEWLINE, "\n".into()), (KEY, "opts".into()), (EQUALS, "=".into()), (QUOTE, "\"".into()), (KEY, "bare".into()), (COMMA, ",".into()), (WHITESPACE, " ".into()), (KEY, "filenamemangle".into()), (EQUALS, "=".into()), (KEY, "foo".into()), (QUOTE, "\"".into()), (WHITESPACE, " ".into()), (CONTINUATION, "\\\n".into()), (WHITESPACE, " ".into()), ( VALUE, "https://github.com/syncthing/syncthing-gtk/tags".into() ), (WHITESPACE, " ".into()), (VALUE, ".*/v?(\\d\\S+)\\.tar\\.gz".into()), (NEWLINE, "\n".into()), ] ); } } debian-watch-0.2.8/src/lib.rs000064400000000000000000000045301046102023000140660ustar 00000000000000#![deny(missing_docs)] //! Formatting-preserving parser and editor for Debian watch files //! //! # Example //! //! ```rust //! let wf = debian_watch::WatchFile::new(None); //! assert_eq!(wf.version(), debian_watch::DEFAULT_VERSION); //! assert_eq!("", wf.to_string()); //! //! let wf = debian_watch::WatchFile::new(Some(4)); //! assert_eq!(wf.version(), 4); //! assert_eq!("version=4\n", wf.to_string()); //! //! let wf: debian_watch::WatchFile = r#"version=4 //! opts=foo=blah https://foo.com/bar .*/v?(\d\S+)\.tar\.gz //! "#.parse().unwrap(); //! assert_eq!(wf.version(), 4); //! assert_eq!(wf.entries().collect::>().len(), 1); //! let entry = wf.entries().next().unwrap(); //! assert_eq!(entry.opts(), maplit::hashmap! { //! "foo".to_string() => "blah".to_string(), //! }); //! assert_eq!(&entry.url(), "https://foo.com/bar"); //! assert_eq!(entry.matching_pattern().as_deref(), Some(".*/v?(\\d\\S+)\\.tar\\.gz")); //! ``` mod lex; mod parse; /// Any watch files without a version are assumed to be /// version 1. pub const DEFAULT_VERSION: u32 = 1; mod types; pub use types::*; /// Let's start with defining all kinds of tokens and /// composite nodes. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[allow(non_camel_case_types, missing_docs, clippy::upper_case_acronyms)] #[repr(u16)] pub(crate) enum SyntaxKind { KEY = 0, VALUE, EQUALS, QUOTE, COMMA, CONTINUATION, NEWLINE, WHITESPACE, // whitespaces is explicit COMMENT, // comments ERROR, // as well as errors // composite nodes ROOT, // The entire file VERSION, // "version=x\n" ENTRY, // "opts=foo=blah https://foo.com/bar .*/v?(\d\S+)\.tar\.gz\n" OPTS_LIST, // "opts=foo=blah" OPTION, // "foo=blah" } /// Convert our `SyntaxKind` into the rowan `SyntaxKind`. impl From for rowan::SyntaxKind { fn from(kind: SyntaxKind) -> Self { Self(kind as u16) } } pub use crate::parse::Entry; pub use crate::parse::WatchFile; #[cfg(test)] mod tests { #[test] fn test_create_watchfile() { let wf = super::WatchFile::new(None); assert_eq!(wf.version(), super::DEFAULT_VERSION); assert_eq!("", wf.to_string()); let wf = super::WatchFile::new(Some(4)); assert_eq!(wf.version(), 4); assert_eq!("version=4\n", wf.to_string()); } } debian-watch-0.2.8/src/parse.rs000064400000000000000000000762021046102023000144370ustar 00000000000000use crate::lex::lex; use crate::types::*; use crate::SyntaxKind; use crate::SyntaxKind::*; use crate::DEFAULT_VERSION; use std::str::FromStr; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ParseError(Vec); impl std::fmt::Display for ParseError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { for err in &self.0 { writeln!(f, "{}", err)?; } Ok(()) } } impl std::error::Error for ParseError {} /// Second, implementing the `Language` trait teaches rowan to convert between /// these two SyntaxKind types, allowing for a nicer SyntaxNode API where /// "kinds" are values from our `enum SyntaxKind`, instead of plain u16 values. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] enum Lang {} impl rowan::Language for Lang { type Kind = SyntaxKind; fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind { unsafe { std::mem::transmute::(raw.0) } } fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind { kind.into() } } /// GreenNode is an immutable tree, which is cheap to change, /// but doesn't contain offsets and parent pointers. use rowan::GreenNode; /// You can construct GreenNodes by hand, but a builder /// is helpful for top-down parsers: it maintains a stack /// of currently in-progress nodes use rowan::GreenNodeBuilder; /// The parse results are stored as a "green tree". /// We'll discuss working with the results later struct Parse { green_node: GreenNode, #[allow(unused)] errors: Vec, #[allow(unused)] version: i32, } fn parse(text: &str) -> Parse { struct Parser { /// input tokens, including whitespace, /// in *reverse* order. tokens: Vec<(SyntaxKind, String)>, /// the in-progress tree. builder: GreenNodeBuilder<'static>, /// the list of syntax errors we've accumulated /// so far. errors: Vec, } impl Parser { fn parse_version(&mut self) -> Option { let mut version = None; if self.tokens.last() == Some(&(KEY, "version".to_string())) { self.builder.start_node(VERSION.into()); self.bump(); self.skip_ws(); if self.current() != Some(EQUALS) { self.builder.start_node(ERROR.into()); self.errors.push("expected `=`".to_string()); self.bump(); self.builder.finish_node(); } else { self.bump(); } if self.current() != Some(VALUE) { self.builder.start_node(ERROR.into()); self.errors .push(format!("expected value, got {:?}", self.current())); self.bump(); self.builder.finish_node(); } else { let version_str = self.tokens.last().unwrap().1.clone(); match version_str.parse() { Ok(v) => { version = Some(v); self.bump(); } Err(_) => { self.builder.start_node(ERROR.into()); self.errors .push(format!("invalid version: {}", version_str)); self.bump(); self.builder.finish_node(); } } } if self.current() != Some(NEWLINE) { self.builder.start_node(ERROR.into()); self.errors.push("expected newline".to_string()); self.bump(); self.builder.finish_node(); } else { self.bump(); } self.builder.finish_node(); } version } fn parse_watch_entry(&mut self) -> bool { self.skip_ws(); if self.current().is_none() { return false; } if self.current() == Some(NEWLINE) { self.bump(); return false; } self.builder.start_node(ENTRY.into()); self.parse_options_list(); for i in 0..4 { if self.current() == Some(NEWLINE) { break; } if self.current() == Some(CONTINUATION) { self.bump(); self.skip_ws(); continue; } if self.current() != Some(VALUE) && self.current() != Some(KEY) { self.builder.start_node(ERROR.into()); self.errors.push(format!( "expected value, got {:?} (i={})", self.current(), i )); if self.current().is_some() { self.bump(); } self.builder.finish_node(); } else { self.bump(); } self.skip_ws(); } if self.current() != Some(NEWLINE) && self.current().is_some() { self.builder.start_node(ERROR.into()); self.errors .push(format!("expected newline, not {:?}", self.current())); if self.current().is_some() { self.bump(); } self.builder.finish_node(); } else { self.bump(); } self.builder.finish_node(); true } fn parse_option(&mut self) -> bool { if self.current().is_none() { return false; } while self.current() == Some(CONTINUATION) { self.bump(); } if self.current() == Some(WHITESPACE) { return false; } self.builder.start_node(OPTION.into()); if self.current() != Some(KEY) { self.builder.start_node(ERROR.into()); self.errors.push("expected key".to_string()); self.bump(); self.builder.finish_node(); } else { self.bump(); } if self.current() == Some(EQUALS) { self.bump(); if self.current() != Some(VALUE) && self.current() != Some(KEY) { self.builder.start_node(ERROR.into()); self.errors .push(format!("expected value, got {:?}", self.current())); self.bump(); self.builder.finish_node(); } else { self.bump(); } } else if self.current() == Some(COMMA) { } else { self.builder.start_node(ERROR.into()); self.errors.push("expected `=`".to_string()); if self.current().is_some() { self.bump(); } self.builder.finish_node(); } self.builder.finish_node(); true } fn parse_options_list(&mut self) { self.skip_ws(); if self.tokens.last() == Some(&(KEY, "opts".to_string())) || self.tokens.last() == Some(&(KEY, "options".to_string())) { self.builder.start_node(OPTS_LIST.into()); self.bump(); self.skip_ws(); if self.current() != Some(EQUALS) { self.builder.start_node(ERROR.into()); self.errors.push("expected `=`".to_string()); if self.current().is_some() { self.bump(); } self.builder.finish_node(); } else { self.bump(); } let quoted = if self.current() == Some(QUOTE) { self.bump(); true } else { false }; loop { if quoted { if self.current() == Some(QUOTE) { self.bump(); break; } self.skip_ws(); } if !self.parse_option() { break; } if self.current() == Some(COMMA) { self.bump(); } else if !quoted { break; } } self.builder.finish_node(); self.skip_ws(); } } fn parse(mut self) -> Parse { let mut version = 1; // Make sure that the root node covers all source self.builder.start_node(ROOT.into()); if let Some(v) = self.parse_version() { version = v; } // TODO: use version to influence parsing loop { if !self.parse_watch_entry() { break; } } // Don't forget to eat *trailing* whitespace self.skip_ws(); // Close the root node. self.builder.finish_node(); // Turn the builder into a GreenNode Parse { green_node: self.builder.finish(), errors: self.errors, version, } } /// Advance one token, adding it to the current branch of the tree builder. fn bump(&mut self) { let (kind, text) = self.tokens.pop().unwrap(); self.builder.token(kind.into(), text.as_str()); } /// Peek at the first unprocessed token fn current(&self) -> Option { self.tokens.last().map(|(kind, _)| *kind) } fn skip_ws(&mut self) { while self.current() == Some(WHITESPACE) || self.current() == Some(CONTINUATION) || self.current() == Some(COMMENT) { self.bump() } } } let mut tokens = lex(text); tokens.reverse(); Parser { tokens, builder: GreenNodeBuilder::new(), errors: Vec::new(), } .parse() } /// To work with the parse results we need a view into the /// green tree - the Syntax tree. /// It is also immutable, like a GreenNode, /// but it contains parent pointers, offsets, and /// has identity semantics. type SyntaxNode = rowan::SyntaxNode; #[allow(unused)] type SyntaxToken = rowan::SyntaxToken; #[allow(unused)] type SyntaxElement = rowan::NodeOrToken; impl Parse { fn syntax(&self) -> SyntaxNode { SyntaxNode::new_root(self.green_node.clone()) } fn root(&self) -> WatchFile { WatchFile::cast(self.syntax()).unwrap() } } macro_rules! ast_node { ($ast:ident, $kind:ident) => { #[derive(PartialEq, Eq, Hash)] #[repr(transparent)] /// A node in the syntax tree for $ast pub struct $ast(SyntaxNode); impl $ast { #[allow(unused)] fn cast(node: SyntaxNode) -> Option { if node.kind() == $kind { Some(Self(node)) } else { None } } } impl ToString for $ast { fn to_string(&self) -> String { self.0.text().to_string() } } }; } ast_node!(WatchFile, ROOT); ast_node!(Version, VERSION); ast_node!(Entry, ENTRY); ast_node!(OptionList, OPTS_LIST); ast_node!(_Option, OPTION); impl WatchFile { /// Create a new watch file with specified version pub fn new(version: Option) -> WatchFile { let mut builder = GreenNodeBuilder::new(); builder.start_node(ROOT.into()); if let Some(version) = version { builder.start_node(VERSION.into()); builder.token(KEY.into(), "version"); builder.token(EQUALS.into(), "="); builder.token(VALUE.into(), version.to_string().as_str()); builder.token(NEWLINE.into(), "\n"); builder.finish_node(); } builder.finish_node(); WatchFile(SyntaxNode::new_root(builder.finish())) } /// Returns the version of the watch file. pub fn version(&self) -> u32 { self.0 .children() .find_map(Version::cast) .map(|it| it.version()) .unwrap_or(DEFAULT_VERSION) } /// Returns an iterator over all entries in the watch file. pub fn entries(&self) -> impl Iterator + '_ { self.0.children().filter_map(Entry::cast) } } impl FromStr for WatchFile { type Err = ParseError; fn from_str(s: &str) -> Result { let parsed = parse(s); if parsed.errors.is_empty() { Ok(parsed.root()) } else { Err(ParseError(parsed.errors)) } } } impl Version { /// Returns the version of the watch file. pub fn version(&self) -> u32 { self.0 .children_with_tokens() .find_map(|it| match it { SyntaxElement::Token(token) => { if token.kind() == VALUE { Some(token.text().parse().unwrap()) } else { None } } _ => None, }) .unwrap_or(DEFAULT_VERSION) } } impl Entry { /// List of options pub fn option_list(&self) -> Option { self.0.children().find_map(OptionList::cast) } /// Get the value of an option pub fn get_option(&self, key: &str) -> Option { self.option_list().and_then(|ol| ol.get_option(key)) } /// Check if an option is set pub fn has_option(&self, key: &str) -> bool { self.option_list().map_or(false, |ol| ol.has_option(key)) } /// The name of the secondary source tarball pub fn component(&self) -> Option { self.get_option("component") } /// Component type pub fn ctype(&self) -> Result, ()> { self.get_option("ctype").map(|s| s.parse()).transpose() } /// Compression method pub fn compression(&self) -> Result, ()> { self.get_option("compression") .map(|s| s.parse()) .transpose() } /// Repack the tarball pub fn repack(&self) -> bool { self.has_option("repack") } /// Repack suffix pub fn repacksuffix(&self) -> Option { self.get_option("repacksuffix") } /// Retrieve the mode of the watch file entry. pub fn mode(&self) -> Result { Ok(self .get_option("mode") .map(|s| s.parse()) .transpose()? .unwrap_or_default()) } /// Return the git pretty mode pub fn pretty(&self) -> Result { Ok(self .get_option("pretty") .map(|s| s.parse()) .transpose()? .unwrap_or_default()) } /// Set the date string used by the pretty option to an arbitrary format as an optional /// opts argument when the matching-pattern is HEAD or heads/branch for git mode. pub fn date(&self) -> String { self.get_option("date") .unwrap_or_else(|| "%Y%m%d".to_string()) } /// Return the git export mode pub fn gitexport(&self) -> Result { Ok(self .get_option("gitexport") .map(|s| s.parse()) .transpose()? .unwrap_or_default()) } /// Return the git mode pub fn gitmode(&self) -> Result { Ok(self .get_option("gitmode") .map(|s| s.parse()) .transpose()? .unwrap_or_default()) } /// Return the pgp mode pub fn pgpmode(&self) -> Result { Ok(self .get_option("pgpmode") .map(|s| s.parse()) .transpose()? .unwrap_or_default()) } /// Return the search mode pub fn searchmode(&self) -> Result { Ok(self .get_option("searchmode") .map(|s| s.parse()) .transpose()? .unwrap_or_default()) } /// Return the decompression mode pub fn decompress(&self) -> bool { self.has_option("decompress") } /// Whether to disable all site specific special case code such as URL director uses and page /// content alterations. pub fn bare(&self) -> bool { self.has_option("bare") } /// Set the user-agent string used to contact the HTTP(S) server as user-agent-string. (persistent) pub fn user_agent(&self) -> Option { self.get_option("user-agent") } /// Use PASV mode for the FTP connection. pub fn passive(&self) -> Option { if self.has_option("passive") || self.has_option("pasv") { Some(true) } else if self.has_option("active") || self.has_option("nopasv") { Some(false) } else { None } } /// Add the extra options to use with the unzip command, such as -a, -aa, and -b, when executed /// by mk-origtargz. pub fn unzipoptions(&self) -> Option { self.get_option("unzipopt") } /// Normalize the downloaded web page string. pub fn dversionmangle(&self) -> Option { self.get_option("dversionmangle") .or_else(|| self.get_option("versionmangle")) } /// Normalize the directory path string matching the regex in a set of parentheses of /// http://URL as the sortable version index string. This is used /// as the directory path sorting index only. pub fn dirversionmangle(&self) -> Option { self.get_option("dirversionmangle") } /// Normalize the downloaded web page string. pub fn pagemangle(&self) -> Option { self.get_option("pagemangle") } /// Normalize the candidate upstream version strings extracted from hrefs in the /// source of the web page. This is used as the version sorting index when selecting the /// latest upstream version. pub fn uversionmangle(&self) -> Option { self.get_option("uversionmangle") .or_else(|| self.get_option("versionmangle")) } /// Syntactic shorthand for uversionmangle=rules, dversionmangle=rules pub fn versionmangle(&self) -> Option { self.get_option("versionmangle") } /// Convert the selected upstream tarball href string from the percent-encoded hexadecimal /// string to the decoded normal URL string for obfuscated /// web sites. Only percent-encoding is available and it is decoded with /// s/%([A-Fa-f\d]{2})/chr hex $1/eg. pub fn hrefdecode(&self) -> bool { self.get_option("hrefdecode").is_some() } /// Convert the selected upstream tarball href string into the accessible URL for obfuscated /// web sites. This is run after hrefdecode. pub fn downloadurlmangle(&self) -> Option { self.get_option("downloadurlmangle") } /// Generate the upstream tarball filename from the selected href string if matching-pattern /// can extract the latest upstream version from the selected href string. /// Otherwise, generate the upstream tarball filename from its full URL string and set the /// missing from the generated upstream tarball filename. /// /// Without this option, the default upstream tarball filename is generated by taking the last /// component of the URL and removing everything after any '?' or '#'. pub fn filenamemangle(&self) -> Option { self.get_option("filenamemangle") } /// Generate the candidate upstream signature file URL string from the upstream tarball URL. pub fn pgpsigurlmangle(&self) -> Option { self.get_option("pgpsigurlmangle") } /// Generate the version string of the source tarball _.orig.tar.gz /// from . This should be used to add a suffix such as +dfsg to a MUT package. pub fn oversionmangle(&self) -> Option { self.get_option("oversionmangle") } /// Returns options set pub fn opts(&self) -> std::collections::HashMap { let mut options = std::collections::HashMap::new(); if let Some(ol) = self.option_list() { for opt in ol.children() { let key = opt.key(); let value = opt.value(); if let (Some(key), Some(value)) = (key, value) { options.insert(key.to_string(), value.to_string()); } } } options } fn items(&self) -> impl Iterator + '_ { self.0.children_with_tokens().filter_map(|it| match it { SyntaxElement::Token(token) => { if token.kind() == VALUE || token.kind() == KEY { Some(token.text().to_string()) } else { None } } _ => None, }) } /// Returns the URL of the entry. pub fn url(&self) -> String { self.items().next().unwrap() } /// Returns the matching pattern of the entry. pub fn matching_pattern(&self) -> Option { self.items().nth(1) } /// Returns the version policy pub fn version(&self) -> Result, String> { self.items().nth(2).map(|it| it.parse()).transpose() } /// Returns the script of the entry. pub fn script(&self) -> Option { self.items().nth(3) } /// Replace all substitutions and return the resulting URL. pub fn format_url(&self, package: impl FnOnce() -> String) -> url::Url { subst(self.url().as_str(), package).parse().unwrap() } } const SUBSTITUTIONS: &[(&str, &str)] = &[ // This is substituted with the source package name found in the first line // of the debian/changelog file. // "@PACKAGE@": None, // This is substituted by the legal upstream version regex (capturing). ("@ANY_VERSION@", r"[-_]?(\d[\-+\.:\~\da-zA-Z]*)"), // This is substituted by the typical archive file extension regex // (non-capturing). ( "@ARCHIVE_EXT@", r"(?i)\.(?:tar\.xz|tar\.bz2|tar\.gz|zip|tgz|tbz|txz)", ), // This is substituted by the typical signature file extension regex // (non-capturing). ( "@SIGNATURE_EXT@", r"(?i)\.(?:tar\.xz|tar\.bz2|tar\.gz|zip|tgz|tbz|txz)\.(?:asc|pgp|gpg|sig|sign)", ), // This is substituted by the typical Debian extension regexp (capturing). ("@DEB_EXT@", r"[\+~](debian|dfsg|ds|deb)(\.)?(\d+)?$"), ]; pub fn subst(text: &str, package: impl FnOnce() -> String) -> String { let mut substs = SUBSTITUTIONS.to_vec(); let package_name; if text.contains("@PACKAGE@") { package_name = Some(package()); substs.push(("@PACKAGE@", package_name.as_deref().unwrap())); } let mut text = text.to_string(); for (k, v) in substs { text = text.replace(k, v); } text } #[test] fn test_subst() { assert_eq!( subst("@ANY_VERSION@", || unreachable!()), r"[-_]?(\d[\-+\.:\~\da-zA-Z]*)" ); assert_eq!(subst("@PACKAGE@", || "dulwich".to_string()), "dulwich"); } impl OptionList { fn children(&self) -> impl Iterator + '_ { self.0.children().filter_map(_Option::cast) } pub fn has_option(&self, key: &str) -> bool { self.children().any(|it| it.key().as_deref() == Some(key)) } pub fn get_option(&self, key: &str) -> Option { for child in self.children() { if child.key().as_deref() == Some(key) { return child.value(); } } None } } impl _Option { /// Returns the key of the option. pub fn key(&self) -> Option { self.0.children_with_tokens().find_map(|it| match it { SyntaxElement::Token(token) => { if token.kind() == KEY { Some(token.text().to_string()) } else { None } } _ => None, }) } /// Returns the value of the option. pub fn value(&self) -> Option { self.0 .children_with_tokens() .filter_map(|it| match it { SyntaxElement::Token(token) => { if token.kind() == VALUE || token.kind() == KEY { Some(token.text().to_string()) } else { None } } _ => None, }) .nth(1) } } #[test] fn test_parse_v1() { const WATCHV1: &str = r#"version=4 opts=bare,filenamemangle=s/.+\/v?(\d\S+)\.tar\.gz/syncthing-gtk-$1\.tar\.gz/ \ https://github.com/syncthing/syncthing-gtk/tags .*/v?(\d\S+)\.tar\.gz "#; let parsed = parse(WATCHV1); //assert_eq!(parsed.errors, Vec::::new()); let node = parsed.syntax(); assert_eq!( format!("{:#?}", node), r#"ROOT@0..161 VERSION@0..10 KEY@0..7 "version" EQUALS@7..8 "=" VALUE@8..9 "4" NEWLINE@9..10 "\n" ENTRY@10..161 OPTS_LIST@10..86 KEY@10..14 "opts" EQUALS@14..15 "=" OPTION@15..19 KEY@15..19 "bare" COMMA@19..20 "," OPTION@20..86 KEY@20..34 "filenamemangle" EQUALS@34..35 "=" VALUE@35..86 "s/.+\\/v?(\\d\\S+)\\.tar\\ ..." WHITESPACE@86..87 " " CONTINUATION@87..89 "\\\n" WHITESPACE@89..91 " " VALUE@91..138 "https://github.com/sy ..." WHITESPACE@138..139 " " VALUE@139..160 ".*/v?(\\d\\S+)\\.tar\\.gz" NEWLINE@160..161 "\n" "# ); let root = parsed.root(); assert_eq!(root.version(), 4); let entries = root.entries().collect::>(); assert_eq!(entries.len(), 1); let entry = &entries[0]; assert_eq!( entry.url(), "https://github.com/syncthing/syncthing-gtk/tags" ); assert_eq!( entry.matching_pattern(), Some(".*/v?(\\d\\S+)\\.tar\\.gz".into()) ); assert_eq!(entry.version(), Ok(None)); assert_eq!(entry.script(), None); assert_eq!(node.text(), WATCHV1); } #[test] fn test_parse_v2() { let parsed = parse( r#"version=4 https://github.com/syncthing/syncthing-gtk/tags .*/v?(\d\S+)\.tar\.gz # comment "#, ); assert_eq!(parsed.errors, Vec::::new()); let node = parsed.syntax(); assert_eq!( format!("{:#?}", node), r###"ROOT@0..90 VERSION@0..10 KEY@0..7 "version" EQUALS@7..8 "=" VALUE@8..9 "4" NEWLINE@9..10 "\n" ENTRY@10..80 VALUE@10..57 "https://github.com/sy ..." WHITESPACE@57..58 " " VALUE@58..79 ".*/v?(\\d\\S+)\\.tar\\.gz" NEWLINE@79..80 "\n" COMMENT@80..89 "# comment" NEWLINE@89..90 "\n" "### ); let root = parsed.root(); assert_eq!(root.version(), 4); let entries = root.entries().collect::>(); assert_eq!(entries.len(), 1); let entry = &entries[0]; assert_eq!( entry.url(), "https://github.com/syncthing/syncthing-gtk/tags" ); assert_eq!( entry.format_url(|| "syncthing-gtk".to_string()), "https://github.com/syncthing/syncthing-gtk/tags" .parse() .unwrap() ); } #[test] fn test_parse_v3() { let parsed = parse( r#"version=4 https://github.com/syncthing/@PACKAGE@/tags .*/v?(\d\S+)\.tar\.gz # comment "#, ); assert_eq!(parsed.errors, Vec::::new()); let root = parsed.root(); assert_eq!(root.version(), 4); let entries = root.entries().collect::>(); assert_eq!(entries.len(), 1); let entry = &entries[0]; assert_eq!(entry.url(), "https://github.com/syncthing/@PACKAGE@/tags"); assert_eq!( entry.format_url(|| "syncthing-gtk".to_string()), "https://github.com/syncthing/syncthing-gtk/tags" .parse() .unwrap() ); } #[test] fn test_parse_v4() { let cl: super::WatchFile = r#"version=4 opts=repack,compression=xz,dversionmangle=s/\+ds//,repacksuffix=+ds \ https://github.com/example/example-cat/tags \ (?:.*?/)?v?(\d[\d.]*)\.tar\.gz debian uupdate "# .parse() .unwrap(); assert_eq!(cl.version(), 4); let entries = cl.entries().collect::>(); assert_eq!(entries.len(), 1); let entry = &entries[0]; assert_eq!(entry.url(), "https://github.com/example/example-cat/tags"); assert_eq!( entry.matching_pattern(), Some("(?:.*?/)?v?(\\d[\\d.]*)\\.tar\\.gz".into()) ); assert!(entry.repack()); assert_eq!(entry.compression(), Ok(Some(Compression::Xz))); assert_eq!(entry.dversionmangle(), Some("s/\\+ds//".into())); assert_eq!(entry.repacksuffix(), Some("+ds".into())); assert_eq!(entry.script(), Some("uupdate".into())); assert_eq!( entry.format_url(|| "example-cat".to_string()), "https://github.com/example/example-cat/tags" .parse() .unwrap() ); assert_eq!(entry.version(), Ok(Some(VersionPolicy::Debian))); } #[test] fn test_git_mode() { let text = r#"version=3 opts="mode=git, gitmode=shallow, pgpmode=gittag" \ https://git.kernel.org/pub/scm/linux/kernel/git/firmware/linux-firmware.git \ refs/tags/(.*) debian "#; let parsed = parse(text); assert_eq!(parsed.errors, Vec::::new()); let cl = parsed.root(); assert_eq!(cl.version(), 3); let entries = cl.entries().collect::>(); assert_eq!(entries.len(), 1); let entry = &entries[0]; assert_eq!( entry.url(), "https://git.kernel.org/pub/scm/linux/kernel/git/firmware/linux-firmware.git" ); assert_eq!(entry.matching_pattern(), Some("refs/tags/(.*)".into())); assert_eq!(entry.version(), Ok(Some(VersionPolicy::Debian))); assert_eq!(entry.script(), None); assert_eq!(entry.gitmode(), Ok(GitMode::Shallow)); assert_eq!(entry.pgpmode(), Ok(PgpMode::GitTag)); assert_eq!(entry.mode(), Ok(Mode::Git)); } #[test] fn test_parse_quoted() { const WATCHV1: &str = r#"version=4 opts="bare, filenamemangle=blah" \ https://github.com/syncthing/syncthing-gtk/tags .*/v?(\d\S+)\.tar\.gz "#; let parsed = parse(WATCHV1); //assert_eq!(parsed.errors, Vec::::new()); let node = parsed.syntax(); let root = parsed.root(); assert_eq!(root.version(), 4); let entries = root.entries().collect::>(); assert_eq!(entries.len(), 1); let entry = &entries[0]; assert_eq!( entry.url(), "https://github.com/syncthing/syncthing-gtk/tags" ); assert_eq!( entry.matching_pattern(), Some(".*/v?(\\d\\S+)\\.tar\\.gz".into()) ); assert_eq!(entry.version(), Ok(None)); assert_eq!(entry.script(), None); assert_eq!(node.text(), WATCHV1); } debian-watch-0.2.8/src/types.rs000064400000000000000000000317001046102023000144630ustar 00000000000000use std::str::FromStr; /// The type of the component pub enum ComponentType { /// Perl component Perl, /// NodeJS component NodeJS, } impl std::fmt::Display for ComponentType { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!( f, "{}", match self { ComponentType::Perl => "perl", ComponentType::NodeJS => "nodejs", } ) } } impl FromStr for ComponentType { type Err = (); fn from_str(s: &str) -> Result { match s { "perl" => Ok(ComponentType::Perl), "nodejs" => Ok(ComponentType::NodeJS), _ => Err(()), } } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] /// Compression type pub enum Compression { /// Gzip compression Gzip, /// Xz compression Xz, /// Bzip2 compression Bzip2, /// Lzma compression Lzma, #[default] /// Default compression Default, } impl std::fmt::Display for Compression { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!( f, "{}", match self { Compression::Gzip => "gzip", Compression::Xz => "xz", Compression::Bzip2 => "bzip2", Compression::Lzma => "lzma", Compression::Default => "default", } ) } } impl FromStr for Compression { type Err = (); fn from_str(s: &str) -> Result { match s { "gz" | "gzip" => Ok(Compression::Gzip), "xz" => Ok(Compression::Xz), "bz2" | "bzip2" => Ok(Compression::Bzip2), "lzma" => Ok(Compression::Lzma), "default" => Ok(Compression::Default), _ => Err(()), } } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] /// How to generate upstream version string from git tags pub enum Pretty { /// Use git describe to generate the version string Describe, /// Use a custom pattern to generate the version string Pattern(String), } impl Default for Pretty { fn default() -> Self { Pretty::Pattern("0.0~git%cd.h%".to_string()) } } impl std::fmt::Display for Pretty { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!( f, "{}", match self { Pretty::Describe => "describe", Pretty::Pattern(pattern) => pattern, } ) } } impl FromStr for Pretty { type Err = (); fn from_str(s: &str) -> Result { if s == "describe" { Ok(Pretty::Describe) } else { Ok(Pretty::Pattern(s.to_string())) } } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] /// Git export mode pub enum GitExport { #[default] /// Export only files in the .orig.tar archive that are not ignored by the upstream. Default, /// Export all files in the .orig.tar archive, ignoring any export-ignore git attributes /// defined by the upstream. All, } impl std::fmt::Display for GitExport { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!( f, "{}", match self { GitExport::Default => "default".to_string(), GitExport::All => "all".to_string(), } ) } } impl FromStr for GitExport { type Err = (); fn from_str(s: &str) -> Result { match s { "default" => Ok(GitExport::Default), "all" => Ok(GitExport::All), _ => Err(()), } } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] /// Git clone operation mode pub enum GitMode { #[default] /// Clone the git repository in shallow mode Shallow, /// Clone the git repository in full mode Full, } impl std::fmt::Display for GitMode { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!( f, "{}", match self { GitMode::Shallow => "shallow".to_string(), GitMode::Full => "full".to_string(), } ) } } impl FromStr for GitMode { type Err = (); fn from_str(s: &str) -> Result { match s { "shallow" => Ok(GitMode::Shallow), "full" => Ok(GitMode::Full), _ => Err(()), } } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] /// PGP verification mode pub enum PgpMode { /// check possible URLs for the signature file and autogenerate a ``pgpsigurlmangle`` rule to /// use it Auto, #[default] /// Use pgpsigurlmangle=rules to generate the candidate upstream signature file URL string from /// the upstream tarball URL. /// /// If the specified pgpsigurlmangle is missing, uscan checks possible URLs for the signature /// file and suggests adding a pgpsigurlmangle rule. /// Default, /// Use pgpsigurlmangle=rules to generate the candidate upstream signature file URL string from the upstream tarball URL. Mangle, /// Verify this downloaded tarball file with the signature file specified in the next watch /// line. The next watch line must be pgpmode=previous. Otherwise, no verification occurs. Next, /// Verify the downloaded tarball file specified in the previous watch line with this signature /// file. The previous watch line must be pgpmode=next. Previous, /// Verify the downloaded file foo.ext with its self signature and extract its content tarball /// file as foo. SelfSignature, /// Verify tag signature if mode=git. GitTag, } impl std::fmt::Display for PgpMode { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!( f, "{}", match self { PgpMode::Auto => "auto", PgpMode::Default => "default", PgpMode::Mangle => "mangle", PgpMode::Next => "next", PgpMode::Previous => "previous", PgpMode::SelfSignature => "self", PgpMode::GitTag => "gittag", } ) } } impl FromStr for PgpMode { type Err = (); fn from_str(s: &str) -> Result { match s { "auto" => Ok(PgpMode::Auto), "default" => Ok(PgpMode::Default), "mangle" => Ok(PgpMode::Mangle), "next" => Ok(PgpMode::Next), "previous" => Ok(PgpMode::Previous), "self" => Ok(PgpMode::SelfSignature), "gittag" => Ok(PgpMode::GitTag), _ => Err(()), } } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] /// How to search for the upstream tarball pub enum SearchMode { #[default] /// Search for the upstream tarball in the HTML page Html, /// Search for the upstream tarball in the plain text page Plain, } impl std::fmt::Display for SearchMode { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!( f, "{}", match self { SearchMode::Html => "html", SearchMode::Plain => "plain", } ) } } impl FromStr for SearchMode { type Err = (); fn from_str(s: &str) -> Result { match s { "html" => Ok(SearchMode::Html), "plain" => Ok(SearchMode::Plain), _ => Err(()), } } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] /// Archive download mode pub enum Mode { #[default] /// downloads the specified tarball from the archive URL on the web. Automatically internal /// mode value is updated to either http or ftp by URL. LWP, /// Access the upstream git archive directly with the git command and packs the source tree /// with the specified tag via matching-pattern into spkg-version.tar.xz. Git, /// Access the upstream Subversion archive directly with the svn command and packs the source /// tree. Svn, } impl std::fmt::Display for Mode { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!( f, "{}", match self { Mode::LWP => "lwp", Mode::Git => "git", Mode::Svn => "svn", } ) } } impl FromStr for Mode { type Err = (); fn from_str(s: &str) -> Result { match s { "lwp" => Ok(Mode::LWP), "git" => Ok(Mode::Git), "svn" => Ok(Mode::Svn), _ => Err(()), } } } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] /// The version policy to use when downloading upstream tarballs pub enum VersionPolicy { #[default] /// Requires the downloading upstream tarball to be newer than the version obtained from debian/changelog Debian, /// Requires the upstream tarball to be newer than specified version Version(debversion::Version), /// Requires the downloaded version of the secondary tarballs to be exactly the same as the one for the first upstream tarball downloaded Same, /// Restricts the version of the seignature file (used with pgpmode=previous) Previous, /// Does not restrict the version of the secondary tarballs Ignore, /// Requires the downloading upstream tarball to be newer than the version obtained from /// debian/changelog. Package version is the concatenation of all "group" upstream version. Group, /// Requires the downloading upstream tarball to be newer than the version obtained from /// debian/changelog. Package version is the concatenation of the version of the main tarball, /// followed by a checksum of all the tarballs using the checksum version system. At least the /// main upstream source has to be declared as group. Checksum, } impl std::fmt::Display for VersionPolicy { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { VersionPolicy::Debian => write!(f, "debian"), VersionPolicy::Version(v) => write!(f, "version-{}", v), VersionPolicy::Same => write!(f, "same"), VersionPolicy::Previous => write!(f, "previous"), VersionPolicy::Ignore => write!(f, "ignore"), VersionPolicy::Group => write!(f, "group"), VersionPolicy::Checksum => write!(f, "checksum"), } } } impl std::str::FromStr for VersionPolicy { type Err = String; fn from_str(s: &str) -> Result { match s { "debian" => Ok(VersionPolicy::Debian), "same" => Ok(VersionPolicy::Same), "previous" => Ok(VersionPolicy::Previous), "ignore" => Ok(VersionPolicy::Ignore), "group" => Ok(VersionPolicy::Group), "checksum" => Ok(VersionPolicy::Checksum), s if s.starts_with("version-") => { let v = s.trim_start_matches("version-"); Ok(VersionPolicy::Version( v.parse::() .map_err(|e| e.to_string())?, )) } _ => Err(format!("Unknown version policy: {}", s)), } } } #[cfg(test)] mod version_policy_tests { use super::VersionPolicy; use std::str::FromStr; #[test] fn test_version_policy_to_string() { assert_eq!("debian", VersionPolicy::Debian.to_string()); assert_eq!("same", VersionPolicy::Same.to_string()); assert_eq!("previous", VersionPolicy::Previous.to_string()); assert_eq!("ignore", VersionPolicy::Ignore.to_string()); assert_eq!("group", VersionPolicy::Group.to_string()); assert_eq!("checksum", VersionPolicy::Checksum.to_string()); assert_eq!( "version-1.2.3", VersionPolicy::Version("1.2.3".parse().unwrap()).to_string() ); } #[test] fn test_version_policy_from_str() { assert_eq!( VersionPolicy::Debian, VersionPolicy::from_str("debian").unwrap() ); assert_eq!( VersionPolicy::Same, VersionPolicy::from_str("same").unwrap() ); assert_eq!( VersionPolicy::Previous, VersionPolicy::from_str("previous").unwrap() ); assert_eq!( VersionPolicy::Ignore, VersionPolicy::from_str("ignore").unwrap() ); assert_eq!( VersionPolicy::Group, VersionPolicy::from_str("group").unwrap() ); assert_eq!( VersionPolicy::Checksum, VersionPolicy::from_str("checksum").unwrap() ); assert_eq!( VersionPolicy::Version("1.2.3".parse().unwrap()), VersionPolicy::from_str("version-1.2.3").unwrap() ); } }