summaryrefslogtreecommitdiff
path: root/src/tokeniser.rs
diff options
context:
space:
mode:
Diffstat (limited to 'src/tokeniser.rs')
-rw-r--r--src/tokeniser.rs16
1 files changed, 8 insertions, 8 deletions
diff --git a/src/tokeniser.rs b/src/tokeniser.rs
index 4ff3d0b..8e3cb15 100644
--- a/src/tokeniser.rs
+++ b/src/tokeniser.rs
@@ -14,11 +14,11 @@ pub struct Tokeniser {
/// Line where the embedded source file begins.
pub embedded_first_line: usize,
/// Position of the next character to be consumed.
- pub position: Position,
+ pub position: SourcePosition,
/// Position of the most recently consumed character.
- pub prev_position: Position,
+ pub prev_position: SourcePosition,
/// Position of the first character of the current token.
- pub start_position: Position,
+ pub start_position: SourcePosition,
/// The source characters consumed for the current token.
pub consumed: String,
/// List of characters that start a new token.
@@ -35,9 +35,9 @@ impl Tokeniser {
source_path: path.map(|p| p.into()),
embedded_path: None,
embedded_first_line: 0,
- position: Position::ZERO,
- prev_position: Position::ZERO,
- start_position: Position::ZERO,
+ position: SourcePosition::ZERO,
+ prev_position: SourcePosition::ZERO,
+ start_position: SourcePosition::ZERO,
consumed: String::new(),
delimiters: Vec::new(),
terminators: Vec::new(),
@@ -145,11 +145,11 @@ impl Tokeniser {
Some(
SourceLocation {
path: Some(embedded_path.to_owned()),
- start: Position {
+ start: SourcePosition {
line: in_merged.start.line.saturating_sub(offset),
column: in_merged.start.column,
},
- end: Position {
+ end: SourcePosition {
line: in_merged.end.line.saturating_sub(offset),
column: in_merged.end.column,
}