Skip to content

Commit

Permalink
remove TokenType::IDHash
Browse files Browse the repository at this point in the history
fixes colors that some colors can't be used
  • Loading branch information
Sharktheone committed Sep 12, 2024
1 parent 049fb5d commit e27dfe7
Show file tree
Hide file tree
Showing 4 changed files with 31 additions and 44 deletions.
1 change: 0 additions & 1 deletion crates/gosub_css3/src/parser/selector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,6 @@ impl Css3<'_> {
self.tokenizer.reconsume();
self.parse_attribute_selector()?
}
TokenType::IDHash(value) => Node::new(NodeType::IdSelector { value }, t.location),
TokenType::Hash(value) => Node::new(NodeType::IdSelector { value }, t.location),
TokenType::Colon => {
let nt = self.tokenizer.lookahead(0);
Expand Down
9 changes: 0 additions & 9 deletions crates/gosub_css3/src/parser/value.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,15 +43,6 @@ impl Css3<'_> {

let t = self.consume_any()?;
match t.token_type {
TokenType::IDHash(value) => {
let node = Node::new(
NodeType::Ident {
value: format!("#{}", value),
},
t.location,
);
Ok(Some(node))
}
TokenType::Hash(value) => {
let node = Node::new(NodeType::Hash { value }, t.location);
Ok(Some(node))
Expand Down
17 changes: 13 additions & 4 deletions crates/gosub_css3/src/stylesheet.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
use crate::colors::RgbColor;
use anyhow::anyhow;
use core::fmt::Debug;
use gosub_shared::types::Result;
use std::cmp::Ordering;
use std::fmt::Display;

use anyhow::anyhow;

use gosub_shared::types::Result;

use crate::colors::RgbColor;

/// Defines a complete stylesheet with all its rules and the location where it was found
#[derive(Debug, PartialEq, Clone)]
pub struct CssStylesheet {
Expand Down Expand Up @@ -353,7 +356,11 @@ impl CssValue {
crate::node::NodeType::Percentage { value } => Ok(CssValue::Percentage(value)),
crate::node::NodeType::Dimension { value, unit } => Ok(CssValue::Unit(value, unit)),
crate::node::NodeType::String { value } => Ok(CssValue::String(value)),
crate::node::NodeType::Hash { value } => Ok(CssValue::String(value)),
crate::node::NodeType::Hash { mut value } => {
value.insert(0, '#');

Ok(CssValue::String(value))
}
crate::node::NodeType::Operator(_) => Ok(CssValue::None),
crate::node::NodeType::Calc { .. } => {
Ok(CssValue::Function("calc".to_string(), vec![]))
Expand Down Expand Up @@ -428,6 +435,8 @@ impl CssValue {

#[cfg(test)]
mod test {
use std::vec;

use super::*;

// #[test]
Expand Down
48 changes: 18 additions & 30 deletions crates/gosub_css3/src/tokenizer.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
use crate::unicode::{get_unicode_char, UnicodeChar};
use std::fmt;
use std::fmt::Debug;

use gosub_shared::byte_stream::Character::Ch;
use gosub_shared::byte_stream::{ByteStream, Character};
use gosub_shared::byte_stream::{Location, LocationHandler, Stream};
use std::fmt;
use std::fmt::Debug;

use crate::unicode::{get_unicode_char, UnicodeChar};

pub type Number = f32;

Expand Down Expand Up @@ -33,16 +35,10 @@ pub enum TokenType {
BadString(String),
/// A [`<whitespace-token>`](https://drafts.csswg.org/css-syntax/#whitespace-token-diagram)
Whitespace(String),
/// A [`<hash-token>`](https://drafts.csswg.org/css-syntax/#hash-token-diagram) with the type flag set to "unrestricted"
/// A [`<hash-token>`](https://drafts.csswg.org/css-syntax/#hash-token-diagram) (with the type flag set to "unrestricted")
///
/// The value does not include the `#` marker.
Hash(String),
/// A [`<hash-token>`](https://drafts.csswg.org/css-syntax/#hash-token-diagram) with the type flag set to "id"
///
/// The value does not include the `#` marker.
///
/// Hash that is a valid ID selector.
IDHash(String),
/// A `<delim-token>`
Delim(char),
/// A `<{-token>`
Expand Down Expand Up @@ -109,10 +105,6 @@ impl Token {
Token::new(TokenType::Delim(c), location)
}

fn new_id_hash(value: &str, location: Location) -> Token {
Token::new(TokenType::IDHash(value.to_string()), location)
}

fn new_hash(value: &str, location: Location) -> Token {
Token::new(TokenType::Hash(value.to_string()), location)
}
Expand Down Expand Up @@ -204,7 +196,6 @@ impl fmt::Display for Token {
| TokenType::Comment(val)
| TokenType::BadUrl(val)
| TokenType::Hash(val)
| TokenType::IDHash(val)
| TokenType::Ident(val)
| TokenType::Function(val)
| TokenType::QuotedString(val)
Expand Down Expand Up @@ -384,11 +375,7 @@ impl<'stream> Tokenizer<'stream> {
self.next_char();

if self.is_ident_char(self.current_char().into()) || self.is_start_of_escape(0) {
return if self.is_next_3_points_starts_ident_seq(0) {
Token::new_id_hash(self.consume_ident().as_str(), loc)
} else {
Token::new_hash(self.consume_ident().as_str(), loc)
};
return Token::new_hash(self.consume_ident().as_str(), loc);
}

Token::new_delim(c, loc)
Expand Down Expand Up @@ -962,9 +949,10 @@ impl<'stream> Tokenizer<'stream> {

#[cfg(test)]
mod test {
use super::*;
use gosub_shared::byte_stream::Encoding;

use super::*;

macro_rules! assert_token_eq {
($t1:expr, $t2:expr) => {
assert_eq!($t1.token_type, $t2.token_type)
Expand Down Expand Up @@ -1368,7 +1356,7 @@ mod test {
let tokens = vec![
// 1st css rule
Token::new(TokenType::Whitespace("\n".into()), Location::default()),
Token::new_id_hash("header", Location::default()),
Token::new_hash("header", Location::default()),
Token::new(TokenType::Whitespace(" ".into()), Location::default()),
Token::new_delim('.', Location::default()),
Token::new_ident("nav", Location::default()),
Expand Down Expand Up @@ -1585,14 +1573,14 @@ mod test {
stream.close();

let tokens = vec![
Token::new_id_hash("red0", Location::default()),
Token::new_hash("red0", Location::default()),
Token::new(TokenType::Whitespace(" ".into()), Location::default()),
Token::new_id_hash("-Red", Location::default()),
Token::new_hash("-Red", Location::default()),
Token::new(TokenType::Whitespace(" ".into()), Location::default()),
Token::new_id_hash("--red", Location::default()),
Token::new_hash("--red", Location::default()),
Token::new(TokenType::Whitespace(" ".into()), Location::default()),
// `#--\\red`
Token::new_id_hash("--red", Location::default()),
Token::new_hash("--red", Location::default()),
Token::new(TokenType::Whitespace(" ".into()), Location::default()),
// `#0red` => 0red
Token::new_hash("0red", Location::default()),
Expand All @@ -1601,21 +1589,21 @@ mod test {
Token::new_hash("-0red", Location::default()),
Token::new(TokenType::Whitespace(" ".into()), Location::default()),
// `#_Red`
Token::new_id_hash("_Red", Location::default()),
Token::new_hash("_Red", Location::default()),
Token::new(TokenType::Whitespace(" ".into()), Location::default()),
// `#.red` => [#, ., red]
Token::new_delim('#', Location::default()),
Token::new_delim('.', Location::default()),
Token::new_ident("red", Location::default()),
Token::new(TokenType::Whitespace(" ".into()), Location::default()),
// `#rêd`
Token::new_id_hash("rêd", Location::default()),
Token::new_hash("rêd", Location::default()),
Token::new(TokenType::Whitespace(" ".into()), Location::default()),
// `#êrd`
Token::new_id_hash("êrd", Location::default()),
Token::new_hash("êrd", Location::default()),
Token::new(TokenType::Whitespace(" ".into()), Location::default()),
// `#\\.red\\`
Token::new_id_hash(".red\u{FFFD}", Location::default()),
Token::new_hash(".red\u{FFFD}", Location::default()),
Token::new(TokenType::Eof, Location::default()),
];
let mut tokenizer = Tokenizer::new(&mut stream, Location::default());
Expand Down

0 comments on commit e27dfe7

Please sign in to comment.