Parses songs the usual way
This commit is contained in:
parent
f487d2f23d
commit
529536199e
1 changed files with 70 additions and 31 deletions
|
@ -59,16 +59,28 @@ impl FromStr for Song {
|
|||
return Err(SongParseError::EmptyString);
|
||||
}
|
||||
|
||||
// TODO: some way to encode comments in a song struct so that if/when we
|
||||
// serialize it back into a string format they are preserved?
|
||||
// would probably best be done with an actual AST
|
||||
|
||||
static COMMENT_REGEX: OnceLock<Regex> = OnceLock::new();
|
||||
let comment_re = COMMENT_REGEX.get_or_init(|| Regex::new(r"(?s)#[^\n]*").unwrap());
|
||||
|
||||
let s = comment_re.replace_all(s, "").into_owned();
|
||||
|
||||
dbg!(&s);
|
||||
|
||||
static HUNK_REGEX: OnceLock<Regex> = OnceLock::new();
|
||||
let re = HUNK_REGEX.get_or_init(|| Regex::new(r"\s*[\n\r]\s*[\n\r]\s*").unwrap());
|
||||
let hunk_re = HUNK_REGEX.get_or_init(|| Regex::new(r"\s*[\n\r]\s*[\n\r]\s*").unwrap());
|
||||
|
||||
let mut hunks = VecDeque::new();
|
||||
let mut last_end: usize = 0;
|
||||
|
||||
for m in re.find_iter(s) {
|
||||
hunks.push_back(&s[last_end..m.start()]);
|
||||
for m in hunk_re.find_iter(&s) {
|
||||
hunks.push_back(s[last_end..m.start()].trim());
|
||||
last_end = m.end();
|
||||
}
|
||||
hunks.push_back(&s[last_end..s.len()]);
|
||||
hunks.push_back(s[last_end..s.len()].trim());
|
||||
|
||||
// process header
|
||||
let mut header_lines = hunks.pop_front().unwrap().lines().map(|s| s.trim());
|
||||
|
@ -104,10 +116,16 @@ impl FromStr for Song {
|
|||
|
||||
// process verses
|
||||
for hunk in hunks {
|
||||
let mut verse_contents = hunk;
|
||||
if hunk.starts_with('(') {
|
||||
if hunk.ends_with(')') && !hunk.contains('\n') {
|
||||
default_plan.push_back(hunk[1..hunk.len() - 1].to_owned());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
let mut verse_contents: &str = hunk;
|
||||
let end_i = hunk.find('\n').unwrap_or(hunk.len());
|
||||
let verse_name: String = if let Some(i) = &hunk[0..end_i].find(':') {
|
||||
verse_contents = &hunk[end_i + 1..];
|
||||
verse_contents = &&hunk[end_i + 1..];
|
||||
String::from(&hunk[0..*i])
|
||||
} else {
|
||||
format!("Generated Verse {}", verses.len() + 1).to_owned()
|
||||
|
@ -134,6 +152,26 @@ mod test {
|
|||
fn parses_simple_song() {
|
||||
let song: Song = r#"Song Title
|
||||
|
||||
A verse"#
|
||||
.parse()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(song.name, "Song Title");
|
||||
assert_eq!(
|
||||
song.verses.get("Generated Verse 1"),
|
||||
Some(&Verse {
|
||||
content: "A verse".to_owned()
|
||||
})
|
||||
);
|
||||
assert_eq!(song.verses.len(), 1);
|
||||
assert_eq!(song.default_plan[0], "Generated Verse 1");
|
||||
assert_eq!(song.default_plan.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_song_with_comments() {
|
||||
let song: Song = r#"Song Title
|
||||
# this is a comment
|
||||
A verse"#
|
||||
.parse()
|
||||
.unwrap();
|
||||
|
@ -187,7 +225,7 @@ mod test {
|
|||
|
||||
|
||||
v1:
|
||||
v1
|
||||
v1content
|
||||
|
||||
|
||||
v2:
|
||||
|
@ -199,24 +237,25 @@ mod test {
|
|||
.parse()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(song.name, "Song Title");
|
||||
assert_eq!(song.name, "Title");
|
||||
assert_eq!(
|
||||
song.verses.get("Generated Verse 1"),
|
||||
song.verses.get("v1"),
|
||||
Some(&Verse {
|
||||
content: "A verse".to_owned()
|
||||
content: "v1content".to_owned()
|
||||
})
|
||||
);
|
||||
assert_eq!(song.verses.len(), 1);
|
||||
assert_eq!(song.default_plan[0], "Generated Verse 1");
|
||||
assert_eq!(song.default_plan.len(), 1);
|
||||
assert_eq!(song.verses.len(), 2);
|
||||
assert_eq!(song.default_plan[0], "v1");
|
||||
assert_eq!(song.default_plan.len(), 4);
|
||||
dbg!(&song.other_plans);
|
||||
assert_eq!(
|
||||
song.other_plans.get("another_plan"),
|
||||
Some(&VecDeque::from(vec![
|
||||
"Generated Verse 1".to_owned(),
|
||||
"Generated Verse 1".to_owned(),
|
||||
"Generated Verse 1".to_owned()
|
||||
]))
|
||||
song.default_plan,
|
||||
VecDeque::from(vec![
|
||||
"v1".to_owned(),
|
||||
"v2".to_owned(),
|
||||
"v2".to_owned(),
|
||||
"v1".to_owned(),
|
||||
])
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue