diff --git a/crates/gosub_css3/src/parser/selector.rs b/crates/gosub_css3/src/parser/selector.rs index 3a446a308..97fd2e337 100644 --- a/crates/gosub_css3/src/parser/selector.rs +++ b/crates/gosub_css3/src/parser/selector.rs @@ -273,7 +273,6 @@ impl Css3<'_> { self.tokenizer.reconsume(); self.parse_attribute_selector()? } - TokenType::IDHash(value) => Node::new(NodeType::IdSelector { value }, t.location), TokenType::Hash(value) => Node::new(NodeType::IdSelector { value }, t.location), TokenType::Colon => { let nt = self.tokenizer.lookahead(0); diff --git a/crates/gosub_css3/src/parser/value.rs b/crates/gosub_css3/src/parser/value.rs index af72cac3a..d7697b3c0 100644 --- a/crates/gosub_css3/src/parser/value.rs +++ b/crates/gosub_css3/src/parser/value.rs @@ -43,15 +43,6 @@ impl Css3<'_> { let t = self.consume_any()?; match t.token_type { - TokenType::IDHash(value) => { - let node = Node::new( - NodeType::Ident { - value: format!("#{}", value), - }, - t.location, - ); - Ok(Some(node)) - } TokenType::Hash(value) => { let node = Node::new(NodeType::Hash { value }, t.location); Ok(Some(node)) diff --git a/crates/gosub_css3/src/stylesheet.rs b/crates/gosub_css3/src/stylesheet.rs index 14f75fce0..32076dc93 100644 --- a/crates/gosub_css3/src/stylesheet.rs +++ b/crates/gosub_css3/src/stylesheet.rs @@ -1,10 +1,13 @@ -use crate::colors::RgbColor; -use anyhow::anyhow; use core::fmt::Debug; -use gosub_shared::types::Result; use std::cmp::Ordering; use std::fmt::Display; +use anyhow::anyhow; + +use gosub_shared::types::Result; + +use crate::colors::RgbColor; + /// Defines a complete stylesheet with all its rules and the location where it was found #[derive(Debug, PartialEq, Clone)] pub struct CssStylesheet { @@ -353,7 +356,11 @@ impl CssValue { crate::node::NodeType::Percentage { value } => Ok(CssValue::Percentage(value)), crate::node::NodeType::Dimension { value, unit } => Ok(CssValue::Unit(value, unit)), crate::node::NodeType::String { value } => Ok(CssValue::String(value)), - crate::node::NodeType::Hash { value } => Ok(CssValue::String(value)), + crate::node::NodeType::Hash { mut value } => { + value.insert(0, '#'); + + Ok(CssValue::String(value)) + } crate::node::NodeType::Operator(_) => Ok(CssValue::None), crate::node::NodeType::Calc { .. } => { Ok(CssValue::Function("calc".to_string(), vec![])) @@ -428,6 +435,8 @@ impl CssValue { #[cfg(test)] mod test { + use std::vec; + use super::*; // #[test] diff --git a/crates/gosub_css3/src/tokenizer.rs b/crates/gosub_css3/src/tokenizer.rs index ca7b943e4..8dce822a8 100644 --- a/crates/gosub_css3/src/tokenizer.rs +++ b/crates/gosub_css3/src/tokenizer.rs @@ -1,9 +1,11 @@ -use crate::unicode::{get_unicode_char, UnicodeChar}; +use std::fmt; +use std::fmt::Debug; + use gosub_shared::byte_stream::Character::Ch; use gosub_shared::byte_stream::{ByteStream, Character}; use gosub_shared::byte_stream::{Location, LocationHandler, Stream}; -use std::fmt; -use std::fmt::Debug; + +use crate::unicode::{get_unicode_char, UnicodeChar}; pub type Number = f32; @@ -33,16 +35,10 @@ pub enum TokenType { BadString(String), /// A [``](https://drafts.csswg.org/css-syntax/#whitespace-token-diagram) Whitespace(String), - /// A [``](https://drafts.csswg.org/css-syntax/#hash-token-diagram) with the type flag set to "unrestricted" + /// A [``](https://drafts.csswg.org/css-syntax/#hash-token-diagram) (with the type flag set to "unrestricted") /// /// The value does not include the `#` marker. Hash(String), - /// A [``](https://drafts.csswg.org/css-syntax/#hash-token-diagram) with the type flag set to "id" - /// - /// The value does not include the `#` marker. - /// - /// Hash that is a valid ID selector. - IDHash(String), /// A `` Delim(char), /// A `<{-token>` @@ -109,10 +105,6 @@ impl Token { Token::new(TokenType::Delim(c), location) } - fn new_id_hash(value: &str, location: Location) -> Token { - Token::new(TokenType::IDHash(value.to_string()), location) - } - fn new_hash(value: &str, location: Location) -> Token { Token::new(TokenType::Hash(value.to_string()), location) } @@ -204,7 +196,6 @@ impl fmt::Display for Token { | TokenType::Comment(val) | TokenType::BadUrl(val) | TokenType::Hash(val) - | TokenType::IDHash(val) | TokenType::Ident(val) | TokenType::Function(val) | TokenType::QuotedString(val) @@ -384,11 +375,7 @@ impl<'stream> Tokenizer<'stream> { self.next_char(); if self.is_ident_char(self.current_char().into()) || self.is_start_of_escape(0) { - return if self.is_next_3_points_starts_ident_seq(0) { - Token::new_id_hash(self.consume_ident().as_str(), loc) - } else { - Token::new_hash(self.consume_ident().as_str(), loc) - }; + return Token::new_hash(self.consume_ident().as_str(), loc); } Token::new_delim(c, loc) @@ -962,9 +949,10 @@ impl<'stream> Tokenizer<'stream> { #[cfg(test)] mod test { - use super::*; use gosub_shared::byte_stream::Encoding; + use super::*; + macro_rules! assert_token_eq { ($t1:expr, $t2:expr) => { assert_eq!($t1.token_type, $t2.token_type) @@ -1368,7 +1356,7 @@ mod test { let tokens = vec![ // 1st css rule Token::new(TokenType::Whitespace("\n".into()), Location::default()), - Token::new_id_hash("header", Location::default()), + Token::new_hash("header", Location::default()), Token::new(TokenType::Whitespace(" ".into()), Location::default()), Token::new_delim('.', Location::default()), Token::new_ident("nav", Location::default()), @@ -1585,14 +1573,14 @@ mod test { stream.close(); let tokens = vec![ - Token::new_id_hash("red0", Location::default()), + Token::new_hash("red0", Location::default()), Token::new(TokenType::Whitespace(" ".into()), Location::default()), - Token::new_id_hash("-Red", Location::default()), + Token::new_hash("-Red", Location::default()), Token::new(TokenType::Whitespace(" ".into()), Location::default()), - Token::new_id_hash("--red", Location::default()), + Token::new_hash("--red", Location::default()), Token::new(TokenType::Whitespace(" ".into()), Location::default()), // `#--\\red` - Token::new_id_hash("--red", Location::default()), + Token::new_hash("--red", Location::default()), Token::new(TokenType::Whitespace(" ".into()), Location::default()), // `#0red` => 0red Token::new_hash("0red", Location::default()), @@ -1601,7 +1589,7 @@ mod test { Token::new_hash("-0red", Location::default()), Token::new(TokenType::Whitespace(" ".into()), Location::default()), // `#_Red` - Token::new_id_hash("_Red", Location::default()), + Token::new_hash("_Red", Location::default()), Token::new(TokenType::Whitespace(" ".into()), Location::default()), // `#.red` => [#, ., red] Token::new_delim('#', Location::default()), @@ -1609,13 +1597,13 @@ mod test { Token::new_ident("red", Location::default()), Token::new(TokenType::Whitespace(" ".into()), Location::default()), // `#rêd` - Token::new_id_hash("rêd", Location::default()), + Token::new_hash("rêd", Location::default()), Token::new(TokenType::Whitespace(" ".into()), Location::default()), // `#êrd` - Token::new_id_hash("êrd", Location::default()), + Token::new_hash("êrd", Location::default()), Token::new(TokenType::Whitespace(" ".into()), Location::default()), // `#\\.red\\` - Token::new_id_hash(".red\u{FFFD}", Location::default()), + Token::new_hash(".red\u{FFFD}", Location::default()), Token::new(TokenType::Eof, Location::default()), ]; let mut tokenizer = Tokenizer::new(&mut stream, Location::default());