mirror of
https://github.com/sharkdp/bat.git
synced 2025-09-02 03:12:25 +01:00
Merge branch 'master' into 2783-setting-terminal-title
This commit is contained in:
@@ -441,7 +441,7 @@ mod tests {
|
||||
fn new() -> Self {
|
||||
SyntaxDetectionTest {
|
||||
assets: HighlightingAssets::from_binary(),
|
||||
syntax_mapping: SyntaxMapping::builtin(),
|
||||
syntax_mapping: SyntaxMapping::new(),
|
||||
temp_dir: TempDir::new().expect("creation of temporary directory"),
|
||||
}
|
||||
}
|
||||
|
@@ -3,7 +3,7 @@ use std::path::Path;
|
||||
use std::time::SystemTime;
|
||||
|
||||
use semver::Version;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
|
||||
use crate::error::*;
|
||||
|
||||
|
@@ -3,8 +3,7 @@ use super::*;
|
||||
use std::collections::BTreeMap;
|
||||
use std::convert::TryFrom;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
|
||||
use once_cell::unsync::OnceCell;
|
||||
|
||||
|
@@ -121,7 +121,7 @@ impl App {
|
||||
_ => unreachable!("other values for --paging are not allowed"),
|
||||
};
|
||||
|
||||
let mut syntax_mapping = SyntaxMapping::builtin();
|
||||
let mut syntax_mapping = SyntaxMapping::new();
|
||||
|
||||
if let Some(values) = self.matches.get_many::<String>("ignored-suffix") {
|
||||
for suffix in values {
|
||||
@@ -130,7 +130,9 @@ impl App {
|
||||
}
|
||||
|
||||
if let Some(values) = self.matches.get_many::<String>("map-syntax") {
|
||||
for from_to in values {
|
||||
// later args take precedence over earlier ones, hence `.rev()`
|
||||
// see: https://github.com/sharkdp/bat/pull/2755#discussion_r1456416875
|
||||
for from_to in values.rev() {
|
||||
let parts: Vec<_> = from_to.split(':').collect();
|
||||
|
||||
if parts.len() != 2 {
|
||||
|
@@ -78,9 +78,11 @@ fn run_cache_subcommand(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_syntax_mapping_to_paths<'a>(
|
||||
mappings: &[(GlobMatcher, MappingTarget<'a>)],
|
||||
) -> HashMap<&'a str, Vec<String>> {
|
||||
fn get_syntax_mapping_to_paths<'r, 't, I>(mappings: I) -> HashMap<&'t str, Vec<String>>
|
||||
where
|
||||
I: IntoIterator<Item = (&'r GlobMatcher, &'r MappingTarget<'t>)>,
|
||||
't: 'r, // target text outlives rule
|
||||
{
|
||||
let mut map = HashMap::new();
|
||||
for mapping in mappings {
|
||||
if let (matcher, MappingTarget::MapTo(s)) = mapping {
|
||||
@@ -123,7 +125,7 @@ pub fn get_languages(config: &Config, cache_dir: &Path) -> Result<String> {
|
||||
|
||||
languages.sort_by_key(|lang| lang.name.to_uppercase());
|
||||
|
||||
let configured_languages = get_syntax_mapping_to_paths(config.syntax_mapping.mappings());
|
||||
let configured_languages = get_syntax_mapping_to_paths(config.syntax_mapping.all_mappings());
|
||||
|
||||
for lang in &mut languages {
|
||||
if let Some(additional_paths) = configured_languages.get(lang.name.as_str()) {
|
||||
|
@@ -47,7 +47,7 @@ impl<'b> Controller<'b> {
|
||||
&self,
|
||||
inputs: Vec<Input>,
|
||||
output_buffer: Option<&mut dyn std::fmt::Write>,
|
||||
handle_error: impl Fn(&Error, &mut dyn Write),
|
||||
mut handle_error: impl FnMut(&Error, &mut dyn Write),
|
||||
) -> Result<bool> {
|
||||
let mut output_type;
|
||||
|
||||
|
136
src/printer.rs
136
src/printer.rs
@@ -7,8 +7,6 @@ use nu_ansi_term::Style;
|
||||
|
||||
use bytesize::ByteSize;
|
||||
|
||||
use console::AnsiCodeIterator;
|
||||
|
||||
use syntect::easy::HighlightLines;
|
||||
use syntect::highlighting::Color;
|
||||
use syntect::highlighting::Theme;
|
||||
@@ -33,9 +31,23 @@ use crate::line_range::RangeCheckResult;
|
||||
use crate::preprocessor::{expand_tabs, replace_nonprintable};
|
||||
use crate::style::StyleComponent;
|
||||
use crate::terminal::{as_terminal_escaped, to_ansi_color};
|
||||
use crate::vscreen::AnsiStyle;
|
||||
use crate::vscreen::{AnsiStyle, EscapeSequence, EscapeSequenceIterator};
|
||||
use crate::wrapping::WrappingMode;
|
||||
|
||||
const ANSI_UNDERLINE_ENABLE: EscapeSequence = EscapeSequence::CSI {
|
||||
raw_sequence: "\x1B[4m",
|
||||
parameters: "4",
|
||||
intermediates: "",
|
||||
final_byte: "m",
|
||||
};
|
||||
|
||||
const ANSI_UNDERLINE_DISABLE: EscapeSequence = EscapeSequence::CSI {
|
||||
raw_sequence: "\x1B[24m",
|
||||
parameters: "24",
|
||||
intermediates: "",
|
||||
final_byte: "m",
|
||||
};
|
||||
|
||||
pub enum OutputHandle<'a> {
|
||||
IoWrite(&'a mut dyn io::Write),
|
||||
FmtWrite(&'a mut dyn fmt::Write),
|
||||
@@ -287,6 +299,14 @@ impl<'a> InteractivePrinter<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_header_component_indent_length(&self) -> usize {
|
||||
if self.config.style_components.grid() && self.panel_width > 0 {
|
||||
self.panel_width + 2
|
||||
} else {
|
||||
self.panel_width
|
||||
}
|
||||
}
|
||||
|
||||
fn print_header_component_indent(&mut self, handle: &mut OutputHandle) -> Result<()> {
|
||||
if self.config.style_components.grid() {
|
||||
write!(
|
||||
@@ -302,6 +322,30 @@ impl<'a> InteractivePrinter<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn print_header_component_with_indent(
|
||||
&mut self,
|
||||
handle: &mut OutputHandle,
|
||||
content: &str,
|
||||
) -> Result<()> {
|
||||
self.print_header_component_indent(handle)?;
|
||||
writeln!(handle, "{}", content)
|
||||
}
|
||||
|
||||
fn print_header_multiline_component(
|
||||
&mut self,
|
||||
handle: &mut OutputHandle,
|
||||
content: &str,
|
||||
) -> Result<()> {
|
||||
let mut content = content;
|
||||
let content_width = self.config.term_width - self.get_header_component_indent_length();
|
||||
while content.len() > content_width {
|
||||
let (content_line, remaining) = content.split_at(content_width);
|
||||
self.print_header_component_with_indent(handle, content_line)?;
|
||||
content = remaining;
|
||||
}
|
||||
self.print_header_component_with_indent(handle, content)
|
||||
}
|
||||
|
||||
fn preprocess(&self, text: &str, cursor: &mut usize) -> String {
|
||||
if self.config.tab_width > 0 {
|
||||
return expand_tabs(text, self.config.tab_width, cursor);
|
||||
@@ -377,31 +421,32 @@ impl<'a> Printer for InteractivePrinter<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
header_components.iter().try_for_each(|component| {
|
||||
self.print_header_component_indent(handle)?;
|
||||
|
||||
match component {
|
||||
StyleComponent::HeaderFilename => writeln!(
|
||||
handle,
|
||||
"{}{}{}",
|
||||
description
|
||||
.kind()
|
||||
.map(|kind| format!("{}: ", kind))
|
||||
.unwrap_or_else(|| "".into()),
|
||||
self.colors.header_value.paint(description.title()),
|
||||
mode
|
||||
),
|
||||
|
||||
header_components
|
||||
.iter()
|
||||
.try_for_each(|component| match component {
|
||||
StyleComponent::HeaderFilename => {
|
||||
let header_filename = format!(
|
||||
"{}{}{}",
|
||||
description
|
||||
.kind()
|
||||
.map(|kind| format!("{}: ", kind))
|
||||
.unwrap_or_else(|| "".into()),
|
||||
self.colors.header_value.paint(description.title()),
|
||||
mode
|
||||
);
|
||||
self.print_header_multiline_component(handle, &header_filename)
|
||||
}
|
||||
StyleComponent::HeaderFilesize => {
|
||||
let bsize = metadata
|
||||
.size
|
||||
.map(|s| format!("{}", ByteSize(s)))
|
||||
.unwrap_or_else(|| "-".into());
|
||||
writeln!(handle, "Size: {}", self.colors.header_value.paint(bsize))
|
||||
let header_filesize =
|
||||
format!("Size: {}", self.colors.header_value.paint(bsize));
|
||||
self.print_header_multiline_component(handle, &header_filesize)
|
||||
}
|
||||
_ => Ok(()),
|
||||
}
|
||||
})?;
|
||||
})?;
|
||||
|
||||
if self.config.style_components.grid() {
|
||||
if self.content_type.map_or(false, |c| c.is_text()) || self.config.show_nonprintable {
|
||||
@@ -521,7 +566,7 @@ impl<'a> Printer for InteractivePrinter<'a> {
|
||||
self.config.highlighted_lines.0.check(line_number) == RangeCheckResult::InRange;
|
||||
|
||||
if highlight_this_line && self.config.theme == "ansi" {
|
||||
self.ansi_style.update("^[4m");
|
||||
self.ansi_style.update(ANSI_UNDERLINE_ENABLE);
|
||||
}
|
||||
|
||||
let background_color = self
|
||||
@@ -548,23 +593,17 @@ impl<'a> Printer for InteractivePrinter<'a> {
|
||||
let italics = self.config.use_italic_text;
|
||||
|
||||
for &(style, region) in ®ions {
|
||||
let ansi_iterator = AnsiCodeIterator::new(region);
|
||||
let ansi_iterator = EscapeSequenceIterator::new(region);
|
||||
for chunk in ansi_iterator {
|
||||
match chunk {
|
||||
// ANSI escape passthrough.
|
||||
(ansi, true) => {
|
||||
self.ansi_style.update(ansi);
|
||||
write!(handle, "{}", ansi)?;
|
||||
}
|
||||
|
||||
// Regular text.
|
||||
(text, false) => {
|
||||
let text = &*self.preprocess(text, &mut cursor_total);
|
||||
EscapeSequence::Text(text) => {
|
||||
let text = self.preprocess(text, &mut cursor_total);
|
||||
let text_trimmed = text.trim_end_matches(|c| c == '\r' || c == '\n');
|
||||
|
||||
write!(
|
||||
handle,
|
||||
"{}",
|
||||
"{}{}",
|
||||
as_terminal_escaped(
|
||||
style,
|
||||
&format!("{}{}", self.ansi_style, text_trimmed),
|
||||
@@ -572,9 +611,11 @@ impl<'a> Printer for InteractivePrinter<'a> {
|
||||
colored_output,
|
||||
italics,
|
||||
background_color
|
||||
)
|
||||
),
|
||||
self.ansi_style.to_reset_sequence(),
|
||||
)?;
|
||||
|
||||
// Pad the rest of the line.
|
||||
if text.len() != text_trimmed.len() {
|
||||
if let Some(background_color) = background_color {
|
||||
let ansi_style = Style {
|
||||
@@ -592,6 +633,12 @@ impl<'a> Printer for InteractivePrinter<'a> {
|
||||
write!(handle, "{}", &text[text_trimmed.len()..])?;
|
||||
}
|
||||
}
|
||||
|
||||
// ANSI escape passthrough.
|
||||
_ => {
|
||||
write!(handle, "{}", chunk.raw())?;
|
||||
self.ansi_style.update(chunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -601,17 +648,11 @@ impl<'a> Printer for InteractivePrinter<'a> {
|
||||
}
|
||||
} else {
|
||||
for &(style, region) in ®ions {
|
||||
let ansi_iterator = AnsiCodeIterator::new(region);
|
||||
let ansi_iterator = EscapeSequenceIterator::new(region);
|
||||
for chunk in ansi_iterator {
|
||||
match chunk {
|
||||
// ANSI escape passthrough.
|
||||
(ansi, true) => {
|
||||
self.ansi_style.update(ansi);
|
||||
write!(handle, "{}", ansi)?;
|
||||
}
|
||||
|
||||
// Regular text.
|
||||
(text, false) => {
|
||||
EscapeSequence::Text(text) => {
|
||||
let text = self.preprocess(
|
||||
text.trim_end_matches(|c| c == '\r' || c == '\n'),
|
||||
&mut cursor_total,
|
||||
@@ -654,7 +695,7 @@ impl<'a> Printer for InteractivePrinter<'a> {
|
||||
// It wraps.
|
||||
write!(
|
||||
handle,
|
||||
"{}\n{}",
|
||||
"{}{}\n{}",
|
||||
as_terminal_escaped(
|
||||
style,
|
||||
&format!("{}{}", self.ansi_style, line_buf),
|
||||
@@ -663,6 +704,7 @@ impl<'a> Printer for InteractivePrinter<'a> {
|
||||
self.config.use_italic_text,
|
||||
background_color
|
||||
),
|
||||
self.ansi_style.to_reset_sequence(),
|
||||
panel_wrap.clone().unwrap()
|
||||
)?;
|
||||
|
||||
@@ -691,6 +733,12 @@ impl<'a> Printer for InteractivePrinter<'a> {
|
||||
)
|
||||
)?;
|
||||
}
|
||||
|
||||
// ANSI escape passthrough.
|
||||
_ => {
|
||||
write!(handle, "{}", chunk.raw())?;
|
||||
self.ansi_style.update(chunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -711,8 +759,8 @@ impl<'a> Printer for InteractivePrinter<'a> {
|
||||
}
|
||||
|
||||
if highlight_this_line && self.config.theme == "ansi" {
|
||||
self.ansi_style.update("^[24m");
|
||||
write!(handle, "\x1B[24m")?;
|
||||
write!(handle, "{}", ANSI_UNDERLINE_DISABLE.raw())?;
|
||||
self.ansi_style.update(ANSI_UNDERLINE_DISABLE);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@@ -1,12 +1,23 @@
|
||||
use std::path::Path;
|
||||
|
||||
use crate::error::Result;
|
||||
use ignored_suffixes::IgnoredSuffixes;
|
||||
|
||||
use globset::{Candidate, GlobBuilder, GlobMatcher};
|
||||
|
||||
use crate::error::Result;
|
||||
use builtin::BUILTIN_MAPPINGS;
|
||||
use ignored_suffixes::IgnoredSuffixes;
|
||||
|
||||
mod builtin;
|
||||
pub mod ignored_suffixes;
|
||||
|
||||
fn make_glob_matcher(from: &str) -> Result<GlobMatcher> {
|
||||
let matcher = GlobBuilder::new(from)
|
||||
.case_insensitive(true)
|
||||
.literal_separator(true)
|
||||
.build()?
|
||||
.compile_matcher();
|
||||
Ok(matcher)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
#[non_exhaustive]
|
||||
pub enum MappingTarget<'a> {
|
||||
@@ -29,204 +40,72 @@ pub enum MappingTarget<'a> {
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct SyntaxMapping<'a> {
|
||||
mappings: Vec<(GlobMatcher, MappingTarget<'a>)>,
|
||||
/// User-defined mappings at run time.
|
||||
///
|
||||
/// Rules in front have precedence.
|
||||
custom_mappings: Vec<(GlobMatcher, MappingTarget<'a>)>,
|
||||
pub(crate) ignored_suffixes: IgnoredSuffixes<'a>,
|
||||
}
|
||||
|
||||
impl<'a> SyntaxMapping<'a> {
|
||||
pub fn empty() -> SyntaxMapping<'a> {
|
||||
pub fn new() -> SyntaxMapping<'a> {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
pub fn builtin() -> SyntaxMapping<'a> {
|
||||
let mut mapping = Self::empty();
|
||||
mapping.insert("*.h", MappingTarget::MapTo("C++")).unwrap();
|
||||
mapping
|
||||
.insert(".clang-format", MappingTarget::MapTo("YAML"))
|
||||
.unwrap();
|
||||
mapping.insert("*.fs", MappingTarget::MapTo("F#")).unwrap();
|
||||
mapping
|
||||
.insert("build", MappingTarget::MapToUnknown)
|
||||
.unwrap();
|
||||
mapping
|
||||
.insert("**/.ssh/config", MappingTarget::MapTo("SSH Config"))
|
||||
.unwrap();
|
||||
mapping
|
||||
.insert(
|
||||
"**/bat/config",
|
||||
MappingTarget::MapTo("Bourne Again Shell (bash)"),
|
||||
)
|
||||
.unwrap();
|
||||
mapping
|
||||
.insert(
|
||||
"/etc/profile",
|
||||
MappingTarget::MapTo("Bourne Again Shell (bash)"),
|
||||
)
|
||||
.unwrap();
|
||||
mapping
|
||||
.insert(
|
||||
"os-release",
|
||||
MappingTarget::MapTo("Bourne Again Shell (bash)"),
|
||||
)
|
||||
.unwrap();
|
||||
mapping
|
||||
.insert("*.pac", MappingTarget::MapTo("JavaScript (Babel)"))
|
||||
.unwrap();
|
||||
mapping
|
||||
.insert("fish_history", MappingTarget::MapTo("YAML"))
|
||||
.unwrap();
|
||||
|
||||
for glob in ["*.jsonl", "*.sarif"] {
|
||||
mapping.insert(glob, MappingTarget::MapTo("JSON")).unwrap();
|
||||
}
|
||||
|
||||
// See #2151, https://nmap.org/book/nse-language.html
|
||||
mapping
|
||||
.insert("*.nse", MappingTarget::MapTo("Lua"))
|
||||
.unwrap();
|
||||
|
||||
// See #1008
|
||||
mapping
|
||||
.insert("rails", MappingTarget::MapToUnknown)
|
||||
.unwrap();
|
||||
|
||||
mapping
|
||||
.insert("Containerfile", MappingTarget::MapTo("Dockerfile"))
|
||||
.unwrap();
|
||||
|
||||
mapping
|
||||
.insert("*.ksh", MappingTarget::MapTo("Bourne Again Shell (bash)"))
|
||||
.unwrap();
|
||||
|
||||
// Nginx and Apache syntax files both want to style all ".conf" files
|
||||
// see #1131 and #1137
|
||||
mapping
|
||||
.insert("*.conf", MappingTarget::MapExtensionToUnknown)
|
||||
.unwrap();
|
||||
|
||||
for glob in &[
|
||||
"/etc/nginx/**/*.conf",
|
||||
"/etc/nginx/sites-*/**/*",
|
||||
"nginx.conf",
|
||||
"mime.types",
|
||||
] {
|
||||
mapping.insert(glob, MappingTarget::MapTo("nginx")).unwrap();
|
||||
}
|
||||
|
||||
for glob in &[
|
||||
"/etc/apache2/**/*.conf",
|
||||
"/etc/apache2/sites-*/**/*",
|
||||
"httpd.conf",
|
||||
] {
|
||||
mapping
|
||||
.insert(glob, MappingTarget::MapTo("Apache Conf"))
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
for glob in &[
|
||||
"**/systemd/**/*.conf",
|
||||
"**/systemd/**/*.example",
|
||||
"*.automount",
|
||||
"*.device",
|
||||
"*.dnssd",
|
||||
"*.link",
|
||||
"*.mount",
|
||||
"*.netdev",
|
||||
"*.network",
|
||||
"*.nspawn",
|
||||
"*.path",
|
||||
"*.service",
|
||||
"*.scope",
|
||||
"*.slice",
|
||||
"*.socket",
|
||||
"*.swap",
|
||||
"*.target",
|
||||
"*.timer",
|
||||
] {
|
||||
mapping.insert(glob, MappingTarget::MapTo("INI")).unwrap();
|
||||
}
|
||||
|
||||
// unix mail spool
|
||||
for glob in &["/var/spool/mail/*", "/var/mail/*"] {
|
||||
mapping.insert(glob, MappingTarget::MapTo("Email")).unwrap()
|
||||
}
|
||||
|
||||
// pacman hooks
|
||||
mapping
|
||||
.insert("*.hook", MappingTarget::MapTo("INI"))
|
||||
.unwrap();
|
||||
|
||||
mapping
|
||||
.insert("*.ron", MappingTarget::MapTo("Rust"))
|
||||
.unwrap();
|
||||
|
||||
// Global git config files rooted in `$XDG_CONFIG_HOME/git/` or `$HOME/.config/git/`
|
||||
// See e.g. https://git-scm.com/docs/git-config#FILES
|
||||
match (
|
||||
std::env::var_os("XDG_CONFIG_HOME").filter(|val| !val.is_empty()),
|
||||
std::env::var_os("HOME")
|
||||
.filter(|val| !val.is_empty())
|
||||
.map(|home| Path::new(&home).join(".config")),
|
||||
) {
|
||||
(Some(xdg_config_home), Some(default_config_home))
|
||||
if xdg_config_home == default_config_home => {
|
||||
insert_git_config_global(&mut mapping, &xdg_config_home)
|
||||
}
|
||||
(Some(xdg_config_home), Some(default_config_home)) /* else guard */ => {
|
||||
insert_git_config_global(&mut mapping, &xdg_config_home);
|
||||
insert_git_config_global(&mut mapping, &default_config_home)
|
||||
}
|
||||
(Some(config_home), None) => insert_git_config_global(&mut mapping, &config_home),
|
||||
(None, Some(config_home)) => insert_git_config_global(&mut mapping, &config_home),
|
||||
(None, None) => (),
|
||||
};
|
||||
|
||||
fn insert_git_config_global(mapping: &mut SyntaxMapping, config_home: impl AsRef<Path>) {
|
||||
let git_config_path = config_home.as_ref().join("git");
|
||||
|
||||
mapping
|
||||
.insert(
|
||||
&git_config_path.join("config").to_string_lossy(),
|
||||
MappingTarget::MapTo("Git Config"),
|
||||
)
|
||||
.ok();
|
||||
|
||||
mapping
|
||||
.insert(
|
||||
&git_config_path.join("ignore").to_string_lossy(),
|
||||
MappingTarget::MapTo("Git Ignore"),
|
||||
)
|
||||
.ok();
|
||||
|
||||
mapping
|
||||
.insert(
|
||||
&git_config_path.join("attributes").to_string_lossy(),
|
||||
MappingTarget::MapTo("Git Attributes"),
|
||||
)
|
||||
.ok();
|
||||
}
|
||||
|
||||
mapping
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, from: &str, to: MappingTarget<'a>) -> Result<()> {
|
||||
let glob = GlobBuilder::new(from)
|
||||
.case_insensitive(true)
|
||||
.literal_separator(true)
|
||||
.build()?;
|
||||
self.mappings.push((glob.compile_matcher(), to));
|
||||
let matcher = make_glob_matcher(from)?;
|
||||
self.custom_mappings.push((matcher, to));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn mappings(&self) -> &[(GlobMatcher, MappingTarget<'a>)] {
|
||||
&self.mappings
|
||||
/// Returns an iterator over all mappings. User-defined mappings are listed
|
||||
/// before builtin mappings; mappings in front have higher precedence.
|
||||
///
|
||||
/// Builtin mappings' `GlobMatcher`s are lazily compiled.
|
||||
///
|
||||
/// Note that this function only returns mappings that are valid under the
|
||||
/// current environment. For details see [`Self::builtin_mappings`].
|
||||
pub fn all_mappings(&self) -> impl Iterator<Item = (&GlobMatcher, &MappingTarget<'a>)> {
|
||||
self.custom_mappings()
|
||||
.iter()
|
||||
.map(|(matcher, target)| (matcher, target)) // as_ref
|
||||
.chain(
|
||||
// we need a map with a closure to "do" the lifetime variance
|
||||
// see: https://discord.com/channels/273534239310479360/1120124565591425034/1170543402870382653
|
||||
// also, clippy false positive:
|
||||
// see: https://github.com/rust-lang/rust-clippy/issues/9280
|
||||
#[allow(clippy::map_identity)]
|
||||
self.builtin_mappings().map(|rule| rule),
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn get_syntax_for(&self, path: impl AsRef<Path>) -> Option<MappingTarget<'a>> {
|
||||
/// Returns an iterator over all valid builtin mappings. Mappings in front
|
||||
/// have higher precedence.
|
||||
///
|
||||
/// The `GlabMatcher`s are lazily compiled.
|
||||
///
|
||||
/// Mappings that are invalid under the current environment (i.e. rule
|
||||
/// requires environment variable(s) that is unset, or the joined string
|
||||
/// after variable(s) replacement is not a valid glob expression) are
|
||||
/// ignored.
|
||||
pub fn builtin_mappings(
|
||||
&self,
|
||||
) -> impl Iterator<Item = (&'static GlobMatcher, &'static MappingTarget<'static>)> {
|
||||
BUILTIN_MAPPINGS
|
||||
.iter()
|
||||
.filter_map(|(matcher, target)| matcher.as_ref().map(|glob| (glob, target)))
|
||||
}
|
||||
|
||||
/// Returns all user-defined mappings.
|
||||
pub fn custom_mappings(&self) -> &[(GlobMatcher, MappingTarget<'a>)] {
|
||||
&self.custom_mappings
|
||||
}
|
||||
|
||||
pub fn get_syntax_for(&self, path: impl AsRef<Path>) -> Option<MappingTarget<'a>> {
|
||||
// Try matching on the file name as-is.
|
||||
let candidate = Candidate::new(&path);
|
||||
let candidate_filename = path.as_ref().file_name().map(Candidate::new);
|
||||
for (ref glob, ref syntax) in self.mappings.iter().rev() {
|
||||
for (glob, syntax) in self.all_mappings() {
|
||||
if glob.is_match_candidate(&candidate)
|
||||
|| candidate_filename
|
||||
.as_ref()
|
||||
@@ -252,9 +131,46 @@ impl<'a> SyntaxMapping<'a> {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn basic() {
|
||||
let mut map = SyntaxMapping::empty();
|
||||
fn builtin_mappings_work() {
|
||||
let map = SyntaxMapping::new();
|
||||
|
||||
assert_eq!(
|
||||
map.get_syntax_for("/path/to/build"),
|
||||
Some(MappingTarget::MapToUnknown)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_fixed_builtin_mappings_can_compile() {
|
||||
let map = SyntaxMapping::new();
|
||||
|
||||
// collect call evaluates all lazy closures
|
||||
// fixed builtin mappings will panic if they fail to compile
|
||||
let _mappings = map.builtin_mappings().collect::<Vec<_>>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn builtin_mappings_matcher_only_compile_once() {
|
||||
let map = SyntaxMapping::new();
|
||||
|
||||
let two_iterations: Vec<_> = (0..2)
|
||||
.map(|_| {
|
||||
// addresses of every matcher
|
||||
map.builtin_mappings()
|
||||
.map(|(matcher, _)| matcher as *const _ as usize)
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.collect();
|
||||
|
||||
// if the matchers are only compiled once, their address should remain the same
|
||||
assert_eq!(two_iterations[0], two_iterations[1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn custom_mappings_work() {
|
||||
let mut map = SyntaxMapping::new();
|
||||
map.insert("/path/to/Cargo.lock", MappingTarget::MapTo("TOML"))
|
||||
.ok();
|
||||
map.insert("/path/to/.ignore", MappingTarget::MapTo("Git Ignore"))
|
||||
@@ -273,52 +189,32 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn user_can_override_builtin_mappings() {
|
||||
let mut map = SyntaxMapping::builtin();
|
||||
fn custom_mappings_override_builtin() {
|
||||
let mut map = SyntaxMapping::new();
|
||||
|
||||
assert_eq!(
|
||||
map.get_syntax_for("/etc/profile"),
|
||||
Some(MappingTarget::MapTo("Bourne Again Shell (bash)"))
|
||||
map.get_syntax_for("/path/to/httpd.conf"),
|
||||
Some(MappingTarget::MapTo("Apache Conf"))
|
||||
);
|
||||
map.insert("/etc/profile", MappingTarget::MapTo("My Syntax"))
|
||||
map.insert("httpd.conf", MappingTarget::MapTo("My Syntax"))
|
||||
.ok();
|
||||
assert_eq!(
|
||||
map.get_syntax_for("/etc/profile"),
|
||||
map.get_syntax_for("/path/to/httpd.conf"),
|
||||
Some(MappingTarget::MapTo("My Syntax"))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn builtin_mappings() {
|
||||
let map = SyntaxMapping::builtin();
|
||||
fn custom_mappings_precedence() {
|
||||
let mut map = SyntaxMapping::new();
|
||||
|
||||
map.insert("/path/to/foo", MappingTarget::MapTo("alpha"))
|
||||
.ok();
|
||||
map.insert("/path/to/foo", MappingTarget::MapTo("bravo"))
|
||||
.ok();
|
||||
assert_eq!(
|
||||
map.get_syntax_for("/path/to/build"),
|
||||
Some(MappingTarget::MapToUnknown)
|
||||
map.get_syntax_for("/path/to/foo"),
|
||||
Some(MappingTarget::MapTo("alpha"))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// verifies that SyntaxMapping::builtin() doesn't repeat `Glob`-based keys
|
||||
fn no_duplicate_builtin_keys() {
|
||||
let mappings = SyntaxMapping::builtin().mappings;
|
||||
for i in 0..mappings.len() {
|
||||
let tail = mappings[i + 1..].into_iter();
|
||||
let (dupl, _): (Vec<_>, Vec<_>) =
|
||||
tail.partition(|item| item.0.glob() == mappings[i].0.glob());
|
||||
|
||||
// emit repeats on failure
|
||||
assert_eq!(
|
||||
dupl.len(),
|
||||
0,
|
||||
"Glob pattern `{}` mapped to multiple: {:?}",
|
||||
mappings[i].0.glob().glob(),
|
||||
{
|
||||
let (_, mut dupl_targets): (Vec<GlobMatcher>, Vec<MappingTarget>) =
|
||||
dupl.into_iter().cloned().unzip();
|
||||
dupl_targets.push(mappings[i].1)
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
91
src/syntax_mapping/builtin.rs
Normal file
91
src/syntax_mapping/builtin.rs
Normal file
@@ -0,0 +1,91 @@
|
||||
use std::env;
|
||||
|
||||
use globset::GlobMatcher;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::syntax_mapping::{make_glob_matcher, MappingTarget};
|
||||
|
||||
// Static syntax mappings generated from /src/syntax_mapping/builtins/ by the
|
||||
// build script (/build/syntax_mapping.rs).
|
||||
include!(concat!(
|
||||
env!("OUT_DIR"),
|
||||
"/codegen_static_syntax_mappings.rs"
|
||||
));
|
||||
|
||||
// The defined matcher strings are analysed at compile time and converted into
|
||||
// lazily-compiled `GlobMatcher`s. This is so that the string searches are moved
|
||||
// from run time to compile time, thus improving startup performance.
|
||||
//
|
||||
// To any future maintainer (including possibly myself) wondering why there is
|
||||
// not a `BuiltinMatcher` enum that looks like this:
|
||||
//
|
||||
// ```
|
||||
// enum BuiltinMatcher {
|
||||
// Fixed(&'static str),
|
||||
// Dynamic(Lazy<Option<String>>),
|
||||
// }
|
||||
// ```
|
||||
//
|
||||
// Because there was. I tried it and threw it out.
|
||||
//
|
||||
// Naively looking at the problem from a distance, this may seem like a good
|
||||
// design (strongly typed etc. etc.). It would also save on compiled size by
|
||||
// extracting out common behaviour into functions. But while actually
|
||||
// implementing the lazy matcher compilation logic, I realised that it's most
|
||||
// convenient for `BUILTIN_MAPPINGS` to have the following type:
|
||||
//
|
||||
// `[(Lazy<Option<GlobMatcher>>, MappingTarget); N]`
|
||||
//
|
||||
// The benefit for this is that operations like listing all builtin mappings
|
||||
// would be effectively memoised. The caller would not have to compile another
|
||||
// `GlobMatcher` for rules that they have previously visited.
|
||||
//
|
||||
// Unfortunately, this means we are going to have to store a distinct closure
|
||||
// for each rule anyway, which makes a `BuiltinMatcher` enum a pointless layer
|
||||
// of indirection.
|
||||
//
|
||||
// In the current implementation, the closure within each generated rule simply
|
||||
// calls either `build_matcher_fixed` or `build_matcher_dynamic`, depending on
|
||||
// whether the defined matcher contains dynamic segments or not.
|
||||
|
||||
/// Compile a fixed glob string into a glob matcher.
|
||||
///
|
||||
/// A failure to compile is a fatal error.
|
||||
///
|
||||
/// Used internally by `Lazy<Option<GlobMatcher>>`'s lazy evaluation closure.
|
||||
fn build_matcher_fixed(from: &str) -> GlobMatcher {
|
||||
make_glob_matcher(from).expect("A builtin fixed glob matcher failed to compile")
|
||||
}
|
||||
|
||||
/// Join a list of matcher segments to create a glob string, replacing all
|
||||
/// environment variables, then compile to a glob matcher.
|
||||
///
|
||||
/// Returns `None` if any replacement fails, or if the joined glob string fails
|
||||
/// to compile.
|
||||
///
|
||||
/// Used internally by `Lazy<Option<GlobMatcher>>`'s lazy evaluation closure.
|
||||
fn build_matcher_dynamic(segs: &[MatcherSegment]) -> Option<GlobMatcher> {
|
||||
// join segments
|
||||
let mut buf = String::new();
|
||||
for seg in segs {
|
||||
match seg {
|
||||
MatcherSegment::Text(s) => buf.push_str(s),
|
||||
MatcherSegment::Env(var) => {
|
||||
let replaced = env::var(var).ok()?;
|
||||
buf.push_str(&replaced);
|
||||
}
|
||||
}
|
||||
}
|
||||
// compile glob matcher
|
||||
let matcher = make_glob_matcher(&buf).ok()?;
|
||||
Some(matcher)
|
||||
}
|
||||
|
||||
/// A segment of a dynamic builtin matcher.
|
||||
///
|
||||
/// Used internally by `Lazy<Option<GlobMatcher>>`'s lazy evaluation closure.
|
||||
#[derive(Clone, Debug)]
|
||||
enum MatcherSegment {
|
||||
Text(&'static str),
|
||||
Env(&'static str),
|
||||
}
|
116
src/syntax_mapping/builtins/README.md
Normal file
116
src/syntax_mapping/builtins/README.md
Normal file
@@ -0,0 +1,116 @@
|
||||
# `/src/syntax_mapping/builtins`
|
||||
|
||||
The files in this directory define path/name-based syntax mappings, which amend
|
||||
and take precedence over the extension/content-based syntax mappings provided by
|
||||
[syntect](https://github.com/trishume/syntect).
|
||||
|
||||
## File organisation
|
||||
|
||||
Each TOML file should describe the syntax mappings of a single application, or
|
||||
otherwise a set of logically-related rules.
|
||||
|
||||
What defines "a single application" here is deliberately vague, since the
|
||||
file-splitting is purely for maintainability reasons. (Technically, we could
|
||||
just as well use a single TOML file.) So just use common sense.
|
||||
|
||||
TOML files should reside in the corresponding subdirectory of the platform(s)
|
||||
that they intend to target. At compile time, the build script will go through
|
||||
each subdirectory that is applicable to the compilation target, collect the
|
||||
syntax mappings defined by all TOML files, and embed them into the binary.
|
||||
|
||||
## File syntax
|
||||
|
||||
Each TOML file should contain a single section named `mappings`, with each of
|
||||
its keys being a language identifier (first column of `bat -L`; also referred to
|
||||
as "target").
|
||||
|
||||
The value of each key should be an array of strings, with each item being a glob
|
||||
matcher. We will call each of these items a "rule".
|
||||
|
||||
For example, if `foo-application` uses both TOML and YAML configuration files,
|
||||
we could write something like this:
|
||||
|
||||
```toml
|
||||
# 30-foo-application.toml
|
||||
[mappings]
|
||||
"TOML" = [
|
||||
# rules for TOML syntax go here
|
||||
"/usr/share/foo-application/toml-config/*.conf",
|
||||
"/etc/foo-application/toml-config/*.conf",
|
||||
]
|
||||
"YAML" = [
|
||||
# rules for YAML syntax go here
|
||||
# ...
|
||||
]
|
||||
```
|
||||
|
||||
### Dynamic environment variable replacement
|
||||
|
||||
In additional to the standard glob matcher syntax, rules also support dynamic
|
||||
replacement of environment variables at runtime. This allows us to concisely
|
||||
handle things like [XDG](https://specifications.freedesktop.org/basedir-spec/latest/).
|
||||
|
||||
All environment variables intended to be replaced at runtime must be enclosed in
|
||||
`${}`, for example `"/foo/*/${YOUR_ENV}-suffix/*.log"`. Note that this is the
|
||||
**only** admissible syntax; other variable substitution syntaxes are not
|
||||
supported and will either cause a compile time error, or be treated as plain
|
||||
text.
|
||||
|
||||
For example, if `foo-application` also supports per-user configuration files, we
|
||||
could write something like this:
|
||||
|
||||
```toml
|
||||
# 30-foo-application.toml
|
||||
[mappings]
|
||||
"TOML" = [
|
||||
# rules for TOML syntax go here
|
||||
"/usr/share/foo-application/toml-config/*.conf",
|
||||
"/etc/foo-application/toml-config/*.conf",
|
||||
"${XDG_CONFIG_HOME}/foo-application/toml-config/*.conf",
|
||||
"${HOME}/.config/foo-application/toml-config/*.conf",
|
||||
]
|
||||
"YAML" = [
|
||||
# rules for YAML syntax go here
|
||||
# ...
|
||||
]
|
||||
```
|
||||
|
||||
If any environment variable replacement in a rule fails (for example when a
|
||||
variable is unset), or if the glob string after replacements is invalid, the
|
||||
entire rule will be ignored.
|
||||
|
||||
### Explicitly mapping to unknown
|
||||
|
||||
Sometimes it may be necessary to "unset" a particular syntect mapping - perhaps
|
||||
a syntax's matching rules are "too greedy", and is claiming files that it should
|
||||
not. In this case, there are two special identifiers:
|
||||
`MappingTarget::MapToUnknown` and `MappingTarget::MapExtensionToUnknown`
|
||||
(corresponding to the two variants of the `syntax_mapping::MappingTarget` enum).
|
||||
|
||||
An example of this would be `*.conf` files in general. So we may write something
|
||||
like this:
|
||||
|
||||
```toml
|
||||
# 99-unset-ambiguous-extensions.toml
|
||||
[mappings]
|
||||
"MappingTarget::MapExtensionToUnknown" = [
|
||||
"*.conf",
|
||||
]
|
||||
```
|
||||
|
||||
## Ordering
|
||||
|
||||
At compile time, all TOML files applicable to the target are processed in
|
||||
lexicographical filename order. So `00-foo.toml` takes precedence over
|
||||
`10-bar.toml`, which takes precedence over `20-baz.toml`, and so on. Note that
|
||||
**only** the filenames of the TOML files are taken into account; the
|
||||
subdirectories they are placed in have no influence on ordering.
|
||||
|
||||
This behaviour can be occasionally useful for creating high/low priority rules,
|
||||
such as in the aforementioned example of explicitly mapping `*.conf` files to
|
||||
unknown. Generally this should not be much of a concern though, since rules
|
||||
should be written as specifically as possible for each application.
|
||||
|
||||
Rules within each TOML file are processed (and therefore matched) in the order
|
||||
in which they are defined. At runtime, the syntax selection algorithm will
|
||||
short-circuit and return the target of the first matching rule.
|
0
src/syntax_mapping/builtins/bsd-family/.gitkeep
Normal file
0
src/syntax_mapping/builtins/bsd-family/.gitkeep
Normal file
@@ -0,0 +1,2 @@
|
||||
[mappings]
|
||||
"Bourne Again Shell (bash)" = ["/etc/os-release", "/var/run/os-release"]
|
0
src/syntax_mapping/builtins/common/.gitkeep
Normal file
0
src/syntax_mapping/builtins/common/.gitkeep
Normal file
2
src/syntax_mapping/builtins/common/50-apache.toml
Normal file
2
src/syntax_mapping/builtins/common/50-apache.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[mappings]
|
||||
"Apache Conf" = ["httpd.conf"]
|
2
src/syntax_mapping/builtins/common/50-bat.toml
Normal file
2
src/syntax_mapping/builtins/common/50-bat.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[mappings]
|
||||
"Bourne Again Shell (bash)" = ["**/bat/config"]
|
2
src/syntax_mapping/builtins/common/50-container.toml
Normal file
2
src/syntax_mapping/builtins/common/50-container.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[mappings]
|
||||
"Dockerfile" = ["Containerfile"]
|
6
src/syntax_mapping/builtins/common/50-cpp.toml
Normal file
6
src/syntax_mapping/builtins/common/50-cpp.toml
Normal file
@@ -0,0 +1,6 @@
|
||||
[mappings]
|
||||
"C++" = [
|
||||
# probably better than the default Objective C mapping #877
|
||||
"*.h",
|
||||
]
|
||||
"YAML" = [".clang-format"]
|
2
src/syntax_mapping/builtins/common/50-f-sharp.toml
Normal file
2
src/syntax_mapping/builtins/common/50-f-sharp.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[mappings]
|
||||
"F#" = ["*.fs"]
|
10
src/syntax_mapping/builtins/common/50-git.toml
Normal file
10
src/syntax_mapping/builtins/common/50-git.toml
Normal file
@@ -0,0 +1,10 @@
|
||||
# Global git config files rooted in `$XDG_CONFIG_HOME/git/` or `$HOME/.config/git/`
|
||||
# See e.g. https://git-scm.com/docs/git-config#FILES
|
||||
|
||||
[mappings]
|
||||
"Git Config" = ["${XDG_CONFIG_HOME}/git/config", "${HOME}/.config/git/config"]
|
||||
"Git Ignore" = ["${XDG_CONFIG_HOME}/git/ignore", "${HOME}/.config/git/ignore"]
|
||||
"Git Attributes" = [
|
||||
"${XDG_CONFIG_HOME}/git/attributes",
|
||||
"${HOME}/.config/git/attributes",
|
||||
]
|
3
src/syntax_mapping/builtins/common/50-jsonl.toml
Normal file
3
src/syntax_mapping/builtins/common/50-jsonl.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
# JSON Lines is a simple variation of JSON #2535
|
||||
[mappings]
|
||||
"JSON" = ["*.jsonl"]
|
2
src/syntax_mapping/builtins/common/50-nginx.toml
Normal file
2
src/syntax_mapping/builtins/common/50-nginx.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[mappings]
|
||||
"nginx" = ["nginx.conf", "mime.types"]
|
3
src/syntax_mapping/builtins/common/50-nmap.toml
Normal file
3
src/syntax_mapping/builtins/common/50-nmap.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
[mappings]
|
||||
# See #2151, https://nmap.org/book/nse-language.html
|
||||
"Lua" = ["*.nse"]
|
@@ -0,0 +1,3 @@
|
||||
# 1515
|
||||
[mappings]
|
||||
"JavaScript (Babel)" = ["*.pac"]
|
3
src/syntax_mapping/builtins/common/50-ron.toml
Normal file
3
src/syntax_mapping/builtins/common/50-ron.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
# Rusty Object Notation #2427
|
||||
[mappings]
|
||||
"Rust" = ["*.ron"]
|
3
src/syntax_mapping/builtins/common/50-sarif.toml
Normal file
3
src/syntax_mapping/builtins/common/50-sarif.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
# SARIF is a format for reporting static analysis results #2695
|
||||
[mappings]
|
||||
"JSON" = ["*.sarif"]
|
2
src/syntax_mapping/builtins/common/50-ssh.toml
Normal file
2
src/syntax_mapping/builtins/common/50-ssh.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[mappings]
|
||||
"SSH Config" = ["**/.ssh/config"]
|
@@ -0,0 +1,5 @@
|
||||
[mappings]
|
||||
"MappingTarget::MapExtensionToUnknown" = [
|
||||
# common extension used for all kinds of formats
|
||||
"*.conf",
|
||||
]
|
@@ -0,0 +1,7 @@
|
||||
[mappings]
|
||||
"MappingTarget::MapToUnknown" = [
|
||||
# "NAnt Build File" should only match *.build files, not files named "build"
|
||||
"build",
|
||||
# "bin/rails" scripts in a Ruby project misidentified as HTML (Rails) #1008
|
||||
"rails",
|
||||
]
|
0
src/syntax_mapping/builtins/linux/.gitkeep
Normal file
0
src/syntax_mapping/builtins/linux/.gitkeep
Normal file
7
src/syntax_mapping/builtins/linux/50-os-release.toml
Normal file
7
src/syntax_mapping/builtins/linux/50-os-release.toml
Normal file
@@ -0,0 +1,7 @@
|
||||
[mappings]
|
||||
"Bourne Again Shell (bash)" = [
|
||||
"/etc/os-release",
|
||||
"/usr/lib/os-release",
|
||||
"/etc/initrd-release",
|
||||
"/usr/lib/extension-release.d/extension-release.*",
|
||||
]
|
3
src/syntax_mapping/builtins/linux/50-pacman.toml
Normal file
3
src/syntax_mapping/builtins/linux/50-pacman.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
[mappings]
|
||||
# pacman hooks
|
||||
"INI" = ["/usr/share/libalpm/hooks/*.hook", "/etc/pacman.d/hooks/*.hook"]
|
21
src/syntax_mapping/builtins/linux/50-systemd.toml
Normal file
21
src/syntax_mapping/builtins/linux/50-systemd.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[mappings]
|
||||
"INI" = [
|
||||
"**/systemd/**/*.conf",
|
||||
"**/systemd/**/*.example",
|
||||
"*.automount",
|
||||
"*.device",
|
||||
"*.dnssd",
|
||||
"*.link",
|
||||
"*.mount",
|
||||
"*.netdev",
|
||||
"*.network",
|
||||
"*.nspawn",
|
||||
"*.path",
|
||||
"*.service",
|
||||
"*.scope",
|
||||
"*.slice",
|
||||
"*.socket",
|
||||
"*.swap",
|
||||
"*.target",
|
||||
"*.timer",
|
||||
]
|
0
src/syntax_mapping/builtins/macos/.gitkeep
Normal file
0
src/syntax_mapping/builtins/macos/.gitkeep
Normal file
0
src/syntax_mapping/builtins/unix-family/.gitkeep
Normal file
0
src/syntax_mapping/builtins/unix-family/.gitkeep
Normal file
2
src/syntax_mapping/builtins/unix-family/50-apache.toml
Normal file
2
src/syntax_mapping/builtins/unix-family/50-apache.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[mappings]
|
||||
"Apache Conf" = ["/etc/apache2/**/*.conf", "/etc/apache2/sites-*/**/*"]
|
@@ -0,0 +1,2 @@
|
||||
[mappings]
|
||||
"YAML" = ["fish_history"]
|
@@ -0,0 +1,3 @@
|
||||
# KornShell is backward-compatible with the Bourne shell #2633
|
||||
[mappings]
|
||||
"Bourne Again Shell (bash)" = ["*.ksh"]
|
@@ -0,0 +1,2 @@
|
||||
[mappings]
|
||||
"Email" = ["/var/spool/mail/*", "/var/mail/*"]
|
2
src/syntax_mapping/builtins/unix-family/50-nginx.toml
Normal file
2
src/syntax_mapping/builtins/unix-family/50-nginx.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[mappings]
|
||||
"nginx" = ["/etc/nginx/**/*.conf", "/etc/nginx/sites-*/**/*"]
|
5
src/syntax_mapping/builtins/unix-family/50-shell.toml
Normal file
5
src/syntax_mapping/builtins/unix-family/50-shell.toml
Normal file
@@ -0,0 +1,5 @@
|
||||
[mappings]
|
||||
"Bourne Again Shell (bash)" = [
|
||||
# used by lots of shells
|
||||
"/etc/profile",
|
||||
]
|
0
src/syntax_mapping/builtins/windows/.gitkeep
Normal file
0
src/syntax_mapping/builtins/windows/.gitkeep
Normal file
735
src/vscreen.rs
735
src/vscreen.rs
@@ -1,4 +1,8 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::{
|
||||
fmt::{Display, Formatter},
|
||||
iter::Peekable,
|
||||
str::CharIndices,
|
||||
};
|
||||
|
||||
// Wrapper to avoid unnecessary branching when input doesn't have ANSI escape sequences.
|
||||
pub struct AnsiStyle {
|
||||
@@ -10,7 +14,7 @@ impl AnsiStyle {
|
||||
AnsiStyle { attributes: None }
|
||||
}
|
||||
|
||||
pub fn update(&mut self, sequence: &str) -> bool {
|
||||
pub fn update(&mut self, sequence: EscapeSequence) -> bool {
|
||||
match &mut self.attributes {
|
||||
Some(a) => a.update(sequence),
|
||||
None => {
|
||||
@@ -19,6 +23,13 @@ impl AnsiStyle {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_reset_sequence(&mut self) -> String {
|
||||
match &mut self.attributes {
|
||||
Some(a) => a.to_reset_sequence(),
|
||||
None => String::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for AnsiStyle {
|
||||
@@ -31,6 +42,8 @@ impl Display for AnsiStyle {
|
||||
}
|
||||
|
||||
struct Attributes {
|
||||
has_sgr_sequences: bool,
|
||||
|
||||
foreground: String,
|
||||
background: String,
|
||||
underlined: String,
|
||||
@@ -61,11 +74,20 @@ struct Attributes {
|
||||
/// ON: ^[9m
|
||||
/// OFF: ^[29m
|
||||
strike: String,
|
||||
|
||||
/// The hyperlink sequence.
|
||||
/// FORMAT: \x1B]8;{ID};{URL}\e\\
|
||||
///
|
||||
/// `\e\\` may be replaced with BEL `\x07`.
|
||||
/// Setting both {ID} and {URL} to an empty string represents no hyperlink.
|
||||
hyperlink: String,
|
||||
}
|
||||
|
||||
impl Attributes {
|
||||
pub fn new() -> Self {
|
||||
Attributes {
|
||||
has_sgr_sequences: false,
|
||||
|
||||
foreground: "".to_owned(),
|
||||
background: "".to_owned(),
|
||||
underlined: "".to_owned(),
|
||||
@@ -76,34 +98,56 @@ impl Attributes {
|
||||
underline: "".to_owned(),
|
||||
italic: "".to_owned(),
|
||||
strike: "".to_owned(),
|
||||
hyperlink: "".to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Update the attributes with an escape sequence.
|
||||
/// Returns `false` if the sequence is unsupported.
|
||||
pub fn update(&mut self, sequence: &str) -> bool {
|
||||
let mut chars = sequence.char_indices().skip(1);
|
||||
|
||||
if let Some((_, t)) = chars.next() {
|
||||
match t {
|
||||
'(' => self.update_with_charset('(', chars.map(|(_, c)| c)),
|
||||
')' => self.update_with_charset(')', chars.map(|(_, c)| c)),
|
||||
'[' => {
|
||||
if let Some((i, last)) = chars.last() {
|
||||
// SAFETY: Always starts with ^[ and ends with m.
|
||||
self.update_with_csi(last, &sequence[2..i])
|
||||
} else {
|
||||
false
|
||||
pub fn update(&mut self, sequence: EscapeSequence) -> bool {
|
||||
use EscapeSequence::*;
|
||||
match sequence {
|
||||
Text(_) => return false,
|
||||
Unknown(_) => { /* defer to update_with_unsupported */ }
|
||||
OSC {
|
||||
raw_sequence,
|
||||
command,
|
||||
..
|
||||
} => {
|
||||
if command.starts_with("8;") {
|
||||
return self.update_with_hyperlink(raw_sequence);
|
||||
}
|
||||
/* defer to update_with_unsupported */
|
||||
}
|
||||
CSI {
|
||||
final_byte,
|
||||
parameters,
|
||||
..
|
||||
} => {
|
||||
match final_byte {
|
||||
"m" => return self.update_with_sgr(parameters),
|
||||
_ => {
|
||||
// NOTE(eth-p): We might want to ignore these, since they involve cursor or buffer manipulation.
|
||||
/* defer to update_with_unsupported */
|
||||
}
|
||||
}
|
||||
_ => self.update_with_unsupported(sequence),
|
||||
}
|
||||
} else {
|
||||
false
|
||||
NF { nf_sequence, .. } => {
|
||||
let mut iter = nf_sequence.chars();
|
||||
match iter.next() {
|
||||
Some('(') => return self.update_with_charset('(', iter),
|
||||
Some(')') => return self.update_with_charset(')', iter),
|
||||
_ => { /* defer to update_with_unsupported */ }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.update_with_unsupported(sequence.raw())
|
||||
}
|
||||
|
||||
fn sgr_reset(&mut self) {
|
||||
self.has_sgr_sequences = false;
|
||||
|
||||
self.foreground.clear();
|
||||
self.background.clear();
|
||||
self.underlined.clear();
|
||||
@@ -121,6 +165,7 @@ impl Attributes {
|
||||
.map(|p| p.parse::<u16>())
|
||||
.map(|p| p.unwrap_or(0)); // Treat errors as 0.
|
||||
|
||||
self.has_sgr_sequences = true;
|
||||
while let Some(p) = iter.next() {
|
||||
match p {
|
||||
0 => self.sgr_reset(),
|
||||
@@ -149,19 +194,23 @@ impl Attributes {
|
||||
true
|
||||
}
|
||||
|
||||
fn update_with_csi(&mut self, finalizer: char, sequence: &str) -> bool {
|
||||
if finalizer == 'm' {
|
||||
self.update_with_sgr(sequence)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn update_with_unsupported(&mut self, sequence: &str) -> bool {
|
||||
self.unknown_buffer.push_str(sequence);
|
||||
false
|
||||
}
|
||||
|
||||
fn update_with_hyperlink(&mut self, sequence: &str) -> bool {
|
||||
if sequence == "8;;" {
|
||||
// Empty hyperlink ID and HREF -> end of hyperlink.
|
||||
self.hyperlink.clear();
|
||||
} else {
|
||||
self.hyperlink.clear();
|
||||
self.hyperlink.push_str(sequence);
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
fn update_with_charset(&mut self, kind: char, set: impl Iterator<Item = char>) -> bool {
|
||||
self.charset = format!("\x1B{}{}", kind, set.take(1).collect::<String>());
|
||||
true
|
||||
@@ -179,13 +228,35 @@ impl Attributes {
|
||||
_ => format!("\x1B[{}m", color),
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets an ANSI escape sequence to reset all the known attributes.
|
||||
pub fn to_reset_sequence(&self) -> String {
|
||||
let mut buf = String::with_capacity(17);
|
||||
|
||||
// TODO: Enable me in a later pull request.
|
||||
// if self.has_sgr_sequences {
|
||||
// buf.push_str("\x1B[m");
|
||||
// }
|
||||
|
||||
if !self.hyperlink.is_empty() {
|
||||
buf.push_str("\x1B]8;;\x1B\\"); // Disable hyperlink.
|
||||
}
|
||||
|
||||
// TODO: Enable me in a later pull request.
|
||||
// if !self.charset.is_empty() {
|
||||
// // https://espterm.github.io/docs/VT100%20escape%20codes.html
|
||||
// buf.push_str("\x1B(B\x1B)B"); // setusg0 and setusg1
|
||||
// }
|
||||
|
||||
buf
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Attributes {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}{}{}{}{}{}{}{}{}",
|
||||
"{}{}{}{}{}{}{}{}{}{}",
|
||||
self.foreground,
|
||||
self.background,
|
||||
self.underlined,
|
||||
@@ -195,6 +266,7 @@ impl Display for Attributes {
|
||||
self.underline,
|
||||
self.italic,
|
||||
self.strike,
|
||||
self.hyperlink,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -210,3 +282,612 @@ fn join(
|
||||
.collect::<Vec<String>>()
|
||||
.join(delimiter)
|
||||
}
|
||||
|
||||
/// A range of indices for a raw ANSI escape sequence.
|
||||
#[derive(Debug, PartialEq)]
|
||||
enum EscapeSequenceOffsets {
|
||||
Text {
|
||||
start: usize,
|
||||
end: usize,
|
||||
},
|
||||
Unknown {
|
||||
start: usize,
|
||||
end: usize,
|
||||
},
|
||||
NF {
|
||||
// https://en.wikipedia.org/wiki/ANSI_escape_code#nF_Escape_sequences
|
||||
start_sequence: usize,
|
||||
start: usize,
|
||||
end: usize,
|
||||
},
|
||||
OSC {
|
||||
// https://en.wikipedia.org/wiki/ANSI_escape_code#OSC_(Operating_System_Command)_sequences
|
||||
start_sequence: usize,
|
||||
start_command: usize,
|
||||
start_terminator: usize,
|
||||
end: usize,
|
||||
},
|
||||
CSI {
|
||||
// https://en.wikipedia.org/wiki/ANSI_escape_code#CSI_(Control_Sequence_Introducer)_sequences
|
||||
start_sequence: usize,
|
||||
start_parameters: usize,
|
||||
start_intermediates: usize,
|
||||
start_final_byte: usize,
|
||||
end: usize,
|
||||
},
|
||||
}
|
||||
|
||||
/// An iterator over the offests of ANSI/VT escape sequences within a string.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```ignore
|
||||
/// let iter = EscapeSequenceOffsetsIterator::new("\x1B[33mThis is yellow text.\x1B[m");
|
||||
/// ```
|
||||
struct EscapeSequenceOffsetsIterator<'a> {
|
||||
text: &'a str,
|
||||
chars: Peekable<CharIndices<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> EscapeSequenceOffsetsIterator<'a> {
|
||||
pub fn new(text: &'a str) -> EscapeSequenceOffsetsIterator<'a> {
|
||||
return EscapeSequenceOffsetsIterator {
|
||||
text,
|
||||
chars: text.char_indices().peekable(),
|
||||
};
|
||||
}
|
||||
|
||||
/// Takes values from the iterator while the predicate returns true.
|
||||
/// If the predicate returns false, that value is left.
|
||||
fn chars_take_while(&mut self, pred: impl Fn(char) -> bool) -> Option<(usize, usize)> {
|
||||
if self.chars.peek().is_none() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let start = self.chars.peek().unwrap().0;
|
||||
let mut end: usize = start;
|
||||
while let Some((i, c)) = self.chars.peek() {
|
||||
if !pred(*c) {
|
||||
break;
|
||||
}
|
||||
|
||||
end = *i + c.len_utf8();
|
||||
self.chars.next();
|
||||
}
|
||||
|
||||
Some((start, end))
|
||||
}
|
||||
|
||||
fn next_text(&mut self) -> Option<EscapeSequenceOffsets> {
|
||||
match self.chars_take_while(|c| c != '\x1B') {
|
||||
None => None,
|
||||
Some((start, end)) => Some(EscapeSequenceOffsets::Text { start, end }),
|
||||
}
|
||||
}
|
||||
|
||||
fn next_sequence(&mut self) -> Option<EscapeSequenceOffsets> {
|
||||
let (start_sequence, c) = self.chars.next().expect("to not be finished");
|
||||
match self.chars.peek() {
|
||||
None => Some(EscapeSequenceOffsets::Unknown {
|
||||
start: start_sequence,
|
||||
end: start_sequence + c.len_utf8(),
|
||||
}),
|
||||
|
||||
Some((_, ']')) => self.next_osc(start_sequence),
|
||||
Some((_, '[')) => self.next_csi(start_sequence),
|
||||
Some((i, c)) => match c {
|
||||
'\x20'..='\x2F' => self.next_nf(start_sequence),
|
||||
c => Some(EscapeSequenceOffsets::Unknown {
|
||||
start: start_sequence,
|
||||
end: i + c.len_utf8(),
|
||||
}),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn next_osc(&mut self, start_sequence: usize) -> Option<EscapeSequenceOffsets> {
|
||||
let (osc_open_index, osc_open_char) = self.chars.next().expect("to not be finished");
|
||||
debug_assert_eq!(osc_open_char, ']');
|
||||
|
||||
let mut start_terminator: usize;
|
||||
let mut end_sequence: usize;
|
||||
|
||||
loop {
|
||||
match self.chars_take_while(|c| !matches!(c, '\x07' | '\x1B')) {
|
||||
None => {
|
||||
start_terminator = self.text.len();
|
||||
end_sequence = start_terminator;
|
||||
break;
|
||||
}
|
||||
|
||||
Some((_, end)) => {
|
||||
start_terminator = end;
|
||||
end_sequence = end;
|
||||
}
|
||||
}
|
||||
|
||||
match self.chars.next() {
|
||||
Some((ti, '\x07')) => {
|
||||
end_sequence = ti + '\x07'.len_utf8();
|
||||
break;
|
||||
}
|
||||
|
||||
Some((ti, '\x1B')) => {
|
||||
match self.chars.next() {
|
||||
Some((i, '\\')) => {
|
||||
end_sequence = i + '\\'.len_utf8();
|
||||
break;
|
||||
}
|
||||
|
||||
None => {
|
||||
end_sequence = ti + '\x1B'.len_utf8();
|
||||
break;
|
||||
}
|
||||
|
||||
_ => {
|
||||
// Repeat, since `\\`(anything) isn't a valid ST.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None => {
|
||||
// Prematurely ends.
|
||||
break;
|
||||
}
|
||||
|
||||
Some((_, tc)) => {
|
||||
panic!("this should not be reached: char {:?}", tc)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(EscapeSequenceOffsets::OSC {
|
||||
start_sequence,
|
||||
start_command: osc_open_index + osc_open_char.len_utf8(),
|
||||
start_terminator: start_terminator,
|
||||
end: end_sequence,
|
||||
})
|
||||
}
|
||||
|
||||
fn next_csi(&mut self, start_sequence: usize) -> Option<EscapeSequenceOffsets> {
|
||||
let (csi_open_index, csi_open_char) = self.chars.next().expect("to not be finished");
|
||||
debug_assert_eq!(csi_open_char, '[');
|
||||
|
||||
let start_parameters: usize = csi_open_index + csi_open_char.len_utf8();
|
||||
|
||||
// Keep iterating while within the range of `0x30-0x3F`.
|
||||
let mut start_intermediates: usize = start_parameters;
|
||||
if let Some((_, end)) = self.chars_take_while(|c| matches!(c, '\x30'..='\x3F')) {
|
||||
start_intermediates = end;
|
||||
}
|
||||
|
||||
// Keep iterating while within the range of `0x20-0x2F`.
|
||||
let mut start_final_byte: usize = start_intermediates;
|
||||
if let Some((_, end)) = self.chars_take_while(|c| matches!(c, '\x20'..='\x2F')) {
|
||||
start_final_byte = end;
|
||||
}
|
||||
|
||||
// Take the last char.
|
||||
let end_of_sequence = match self.chars.next() {
|
||||
None => start_final_byte,
|
||||
Some((i, c)) => i + c.len_utf8(),
|
||||
};
|
||||
|
||||
Some(EscapeSequenceOffsets::CSI {
|
||||
start_sequence,
|
||||
start_parameters,
|
||||
start_intermediates,
|
||||
start_final_byte,
|
||||
end: end_of_sequence,
|
||||
})
|
||||
}
|
||||
|
||||
fn next_nf(&mut self, start_sequence: usize) -> Option<EscapeSequenceOffsets> {
|
||||
let (nf_open_index, nf_open_char) = self.chars.next().expect("to not be finished");
|
||||
debug_assert!(matches!(nf_open_char, '\x20'..='\x2F'));
|
||||
|
||||
let start: usize = nf_open_index;
|
||||
let mut end: usize = start;
|
||||
|
||||
// Keep iterating while within the range of `0x20-0x2F`.
|
||||
match self.chars_take_while(|c| matches!(c, '\x20'..='\x2F')) {
|
||||
Some((_, i)) => end = i,
|
||||
None => {
|
||||
return Some(EscapeSequenceOffsets::NF {
|
||||
start_sequence,
|
||||
start,
|
||||
end,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Get the final byte.
|
||||
match self.chars.next() {
|
||||
Some((i, c)) => end = i + c.len_utf8(),
|
||||
None => {}
|
||||
}
|
||||
|
||||
Some(EscapeSequenceOffsets::NF {
|
||||
start_sequence,
|
||||
start,
|
||||
end,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for EscapeSequenceOffsetsIterator<'a> {
|
||||
type Item = EscapeSequenceOffsets;
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self.chars.peek() {
|
||||
Some((_, '\x1B')) => self.next_sequence(),
|
||||
Some((_, _)) => self.next_text(),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over ANSI/VT escape sequences within a string.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```ignore
|
||||
/// let iter = EscapeSequenceIterator::new("\x1B[33mThis is yellow text.\x1B[m");
|
||||
/// ```
|
||||
pub struct EscapeSequenceIterator<'a> {
|
||||
text: &'a str,
|
||||
offset_iter: EscapeSequenceOffsetsIterator<'a>,
|
||||
}
|
||||
|
||||
impl<'a> EscapeSequenceIterator<'a> {
|
||||
pub fn new(text: &'a str) -> EscapeSequenceIterator<'a> {
|
||||
return EscapeSequenceIterator {
|
||||
text,
|
||||
offset_iter: EscapeSequenceOffsetsIterator::new(text),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for EscapeSequenceIterator<'a> {
|
||||
type Item = EscapeSequence<'a>;
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
use EscapeSequenceOffsets::*;
|
||||
self.offset_iter.next().map(|offsets| match offsets {
|
||||
Unknown { start, end } => EscapeSequence::Unknown(&self.text[start..end]),
|
||||
Text { start, end } => EscapeSequence::Text(&self.text[start..end]),
|
||||
NF {
|
||||
start_sequence,
|
||||
start,
|
||||
end,
|
||||
} => EscapeSequence::NF {
|
||||
raw_sequence: &self.text[start_sequence..end],
|
||||
nf_sequence: &self.text[start..end],
|
||||
},
|
||||
OSC {
|
||||
start_sequence,
|
||||
start_command,
|
||||
start_terminator,
|
||||
end,
|
||||
} => EscapeSequence::OSC {
|
||||
raw_sequence: &self.text[start_sequence..end],
|
||||
command: &self.text[start_command..start_terminator],
|
||||
terminator: &self.text[start_terminator..end],
|
||||
},
|
||||
CSI {
|
||||
start_sequence,
|
||||
start_parameters,
|
||||
start_intermediates,
|
||||
start_final_byte,
|
||||
end,
|
||||
} => EscapeSequence::CSI {
|
||||
raw_sequence: &self.text[start_sequence..end],
|
||||
parameters: &self.text[start_parameters..start_intermediates],
|
||||
intermediates: &self.text[start_intermediates..start_final_byte],
|
||||
final_byte: &self.text[start_final_byte..end],
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A parsed ANSI/VT100 escape sequence.
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum EscapeSequence<'a> {
|
||||
Text(&'a str),
|
||||
Unknown(&'a str),
|
||||
NF {
|
||||
raw_sequence: &'a str,
|
||||
nf_sequence: &'a str,
|
||||
},
|
||||
OSC {
|
||||
raw_sequence: &'a str,
|
||||
command: &'a str,
|
||||
terminator: &'a str,
|
||||
},
|
||||
CSI {
|
||||
raw_sequence: &'a str,
|
||||
parameters: &'a str,
|
||||
intermediates: &'a str,
|
||||
final_byte: &'a str,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> EscapeSequence<'a> {
|
||||
pub fn raw(&self) -> &'a str {
|
||||
use EscapeSequence::*;
|
||||
match *self {
|
||||
Text(raw) => raw,
|
||||
Unknown(raw) => raw,
|
||||
NF { raw_sequence, .. } => raw_sequence,
|
||||
OSC { raw_sequence, .. } => raw_sequence,
|
||||
CSI { raw_sequence, .. } => raw_sequence,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::vscreen::{
|
||||
EscapeSequence, EscapeSequenceIterator, EscapeSequenceOffsets,
|
||||
EscapeSequenceOffsetsIterator,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_escape_sequence_offsets_iterator_parses_text() {
|
||||
let mut iter = EscapeSequenceOffsetsIterator::new("text");
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::Text { start: 0, end: 4 })
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_escape_sequence_offsets_iterator_parses_text_stops_at_esc() {
|
||||
let mut iter = EscapeSequenceOffsetsIterator::new("text\x1B[ming");
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::Text { start: 0, end: 4 })
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_escape_sequence_offsets_iterator_parses_osc_with_bel() {
|
||||
let mut iter = EscapeSequenceOffsetsIterator::new("\x1B]abc\x07");
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::OSC {
|
||||
start_sequence: 0,
|
||||
start_command: 2,
|
||||
start_terminator: 5,
|
||||
end: 6,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_escape_sequence_offsets_iterator_parses_osc_with_st() {
|
||||
let mut iter = EscapeSequenceOffsetsIterator::new("\x1B]abc\x1B\\");
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::OSC {
|
||||
start_sequence: 0,
|
||||
start_command: 2,
|
||||
start_terminator: 5,
|
||||
end: 7,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_escape_sequence_offsets_iterator_parses_osc_thats_broken() {
|
||||
let mut iter = EscapeSequenceOffsetsIterator::new("\x1B]ab");
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::OSC {
|
||||
start_sequence: 0,
|
||||
start_command: 2,
|
||||
start_terminator: 4,
|
||||
end: 4,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_escape_sequence_offsets_iterator_parses_csi() {
|
||||
let mut iter = EscapeSequenceOffsetsIterator::new("\x1B[m");
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::CSI {
|
||||
start_sequence: 0,
|
||||
start_parameters: 2,
|
||||
start_intermediates: 2,
|
||||
start_final_byte: 2,
|
||||
end: 3
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_escape_sequence_offsets_iterator_parses_csi_with_parameters() {
|
||||
let mut iter = EscapeSequenceOffsetsIterator::new("\x1B[1;34m");
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::CSI {
|
||||
start_sequence: 0,
|
||||
start_parameters: 2,
|
||||
start_intermediates: 6,
|
||||
start_final_byte: 6,
|
||||
end: 7
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_escape_sequence_offsets_iterator_parses_csi_with_intermediates() {
|
||||
let mut iter = EscapeSequenceOffsetsIterator::new("\x1B[$m");
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::CSI {
|
||||
start_sequence: 0,
|
||||
start_parameters: 2,
|
||||
start_intermediates: 2,
|
||||
start_final_byte: 3,
|
||||
end: 4
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_escape_sequence_offsets_iterator_parses_csi_with_parameters_and_intermediates() {
|
||||
let mut iter = EscapeSequenceOffsetsIterator::new("\x1B[1$m");
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::CSI {
|
||||
start_sequence: 0,
|
||||
start_parameters: 2,
|
||||
start_intermediates: 3,
|
||||
start_final_byte: 4,
|
||||
end: 5
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_escape_sequence_offsets_iterator_parses_csi_thats_broken() {
|
||||
let mut iter = EscapeSequenceOffsetsIterator::new("\x1B[");
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::CSI {
|
||||
start_sequence: 0,
|
||||
start_parameters: 2,
|
||||
start_intermediates: 2,
|
||||
start_final_byte: 2,
|
||||
end: 2
|
||||
})
|
||||
);
|
||||
|
||||
let mut iter = EscapeSequenceOffsetsIterator::new("\x1B[1");
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::CSI {
|
||||
start_sequence: 0,
|
||||
start_parameters: 2,
|
||||
start_intermediates: 3,
|
||||
start_final_byte: 3,
|
||||
end: 3
|
||||
})
|
||||
);
|
||||
|
||||
let mut iter = EscapeSequenceOffsetsIterator::new("\x1B[1$");
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::CSI {
|
||||
start_sequence: 0,
|
||||
start_parameters: 2,
|
||||
start_intermediates: 3,
|
||||
start_final_byte: 4,
|
||||
end: 4
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_escape_sequence_offsets_iterator_parses_nf() {
|
||||
let mut iter = EscapeSequenceOffsetsIterator::new("\x1B($0");
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::NF {
|
||||
start_sequence: 0,
|
||||
start: 1,
|
||||
end: 4
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_escape_sequence_offsets_iterator_parses_nf_thats_broken() {
|
||||
let mut iter = EscapeSequenceOffsetsIterator::new("\x1B(");
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::NF {
|
||||
start_sequence: 0,
|
||||
start: 1,
|
||||
end: 1
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_escape_sequence_offsets_iterator_iterates() {
|
||||
let mut iter = EscapeSequenceOffsetsIterator::new("text\x1B[33m\x1B]OSC\x07\x1B(0");
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::Text { start: 0, end: 4 })
|
||||
);
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::CSI {
|
||||
start_sequence: 4,
|
||||
start_parameters: 6,
|
||||
start_intermediates: 8,
|
||||
start_final_byte: 8,
|
||||
end: 9
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::OSC {
|
||||
start_sequence: 9,
|
||||
start_command: 11,
|
||||
start_terminator: 14,
|
||||
end: 15
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequenceOffsets::NF {
|
||||
start_sequence: 15,
|
||||
start: 16,
|
||||
end: 18
|
||||
})
|
||||
);
|
||||
assert_eq!(iter.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_escape_sequence_iterator_iterates() {
|
||||
let mut iter = EscapeSequenceIterator::new("text\x1B[33m\x1B]OSC\x07\x1B]OSC\x1B\\\x1B(0");
|
||||
assert_eq!(iter.next(), Some(EscapeSequence::Text("text")));
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequence::CSI {
|
||||
raw_sequence: "\x1B[33m",
|
||||
parameters: "33",
|
||||
intermediates: "",
|
||||
final_byte: "m",
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequence::OSC {
|
||||
raw_sequence: "\x1B]OSC\x07",
|
||||
command: "OSC",
|
||||
terminator: "\x07",
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequence::OSC {
|
||||
raw_sequence: "\x1B]OSC\x1B\\",
|
||||
command: "OSC",
|
||||
terminator: "\x1B\\",
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
iter.next(),
|
||||
Some(EscapeSequence::NF {
|
||||
raw_sequence: "\x1B(0",
|
||||
nf_sequence: "(0",
|
||||
})
|
||||
);
|
||||
assert_eq!(iter.next(), None);
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user