package
analyzers
Type Members
-
class
Analyzer extends AnyRef
-
-
-
-
-
-
-
case class
CommonGramsTokenFilter(name: String, commonWords: Iterable[String] = Set.empty, ignoreCase: Boolean = false, queryMode: Boolean = false) extends TokenFilterDefinition with Product with Serializable
-
case class
CustomAnalyzer(name: String) extends Analyzer with Product with Serializable
-
-
case class
CustomNormalizer(name: String) extends Normalizer with Product with Serializable
-
-
-
case class
EdgeNGramTokenFilter(name: String, minGram: Int = 1, maxGram: Int = 2, side: String = "front") extends TokenFilterDefinition with Product with Serializable
-
case class
EdgeNGramTokenizer(name: String, minGram: Int = 1, maxGram: Int = 2, tokenChars: Iterable[String] = Nil) extends CustomizedTokenizer with Product with Serializable
-
case class
ElisionTokenFilter(name: String, articles: Seq[String] = Nil) extends TokenFilterDefinition with Product with Serializable
-
case class
KeywordMarkerTokenFilter(name: String, keywords: Seq[String] = Nil, ignoreCase: Boolean = false) extends TokenFilterDefinition with Product with Serializable
-
case class
KeywordTokenizer(name: String, bufferSize: Int = 256) extends CustomizedTokenizer with Product with Serializable
-
-
-
case class
LengthTokenFilter(name: String, min: Int = 0, max: Int = Integer.MAX_VALUE) extends TokenFilterDefinition with Product with Serializable
-
case class
LimitTokenFilter(name: String, maxTokenCount: Int = 1, consumeAllTokens: Boolean = false) extends TokenFilterDefinition with Product with Serializable
-
case class
MappingCharFilter(name: String, mappings: (String, String)*) extends CharFilterDefinition with Product with Serializable
-
case class
NGramTokenFilter(name: String, minGram: Int = 1, maxGram: Int = 2) extends TokenFilterDefinition with Product with Serializable
-
case class
NGramTokenizer(name: String, minGram: Int = 1, maxGram: Int = 2, tokenChars: Iterable[String] = Nil) extends CustomizedTokenizer with Product with Serializable
-
abstract
class
Normalizer extends AnyRef
-
-
-
case class
PathHierarchyTokenizer(name: String, delimiter: Char = '/', replacement: Char = '/', bufferSize: Int = 1024, reverse: Boolean = false, skip: Int = 0) extends CustomizedTokenizer with Product with Serializable
-
case class
PatternAnalyzerDefinition(name: String, regex: String, lowercase: Boolean = true) extends AnalyzerDefinition with Product with Serializable
-
case class
PatternCaptureTokenFilter(name: String, patterns: Seq[String] = Nil, preserveOriginal: Boolean = true) extends TokenFilterDefinition with Product with Serializable
-
case class
PatternReplaceCharFilter(name: String, pattern: String, replacement: String) extends CharFilterDefinition with Product with Serializable
-
case class
PatternReplaceTokenFilter(name: String, pattern: String, replacement: String) extends TokenFilterDefinition with Product with Serializable
-
case class
PatternTokenizer(name: String, pattern: String = "\\W+", flags: String = "", group: Int = 1) extends CustomizedTokenizer with Product with Serializable
-
case class
PredefinedCharFilter(name: String) extends CharFilter with Product with Serializable
-
case class
PredefinedTokenFilter(name: String) extends TokenFilter with Product with Serializable
-
case class
PredefinedTokenizer(name: String) extends Tokenizer with Product with Serializable
-
case class
ShingleTokenFilter(name: String, max_shingle_size: Int = 2, min_shingle_size: Int = 2, output_unigrams: Boolean = true, output_unigrams_if_no_shingles: Boolean = false, token_separator: String = " ", filler_token: String = "_") extends TokenFilterDefinition with Product with Serializable
-
case class
SnowballAnalyzerDefinition(name: String, lang: String = "English", stopwords: Iterable[String] = Nil) extends AnalyzerDefinition with Product with Serializable
-
case class
SnowballTokenFilter(name: String, language: String = "English") extends TokenFilterDefinition with Product with Serializable
-
case class
StandardAnalyzerDefinition(name: String, stopwords: Iterable[String] = Nil, maxTokenLength: Int = 255) extends AnalyzerDefinition with Product with Serializable
-
case class
StandardTokenizer(name: String, maxTokenLength: Int = 255) extends CustomizedTokenizer with Product with Serializable
-
-
case class
StemmerTokenFilter(name: String, lang: String = "English") extends TokenFilterDefinition with Product with Serializable
-
case class
StopAnalyzerDefinition(name: String, stopwords: Iterable[String] = Nil) extends AnalyzerDefinition with Product with Serializable
-
case class
StopTokenFilter(name: String, language: Option[String] = None, stopwords: Iterable[String] = Nil, stopwordsPath: Option[String] = None, enablePositionIncrements: Option[Boolean] = None, removeTrailing: Option[Boolean] = None, ignoreCase: Option[Boolean] = None) extends TokenFilterDefinition with Product with Serializable
-
case class
StopTokenFilterPath(name: String, stopwords_path: String, enablePositionIncrements: Boolean = false, ignoreCase: Boolean = false) extends TokenFilterDefinition with Product with Serializable
-
case class
SynonymTokenFilter(name: String, path: Option[String] = None, synonyms: Set[String] = Set.empty, ignoreCase: Option[Boolean] = None, format: Option[String] = None, expand: Option[Boolean] = None, tokenizer: Option[Tokenizer] = None) extends TokenFilterDefinition with Product with Serializable
-
-
-
-
abstract
class
Tokenizer extends AnyRef
-
-
case class
TruncateTokenFilter(name: String, length: Int = 10) extends TokenFilterDefinition with Product with Serializable
-
case class
UaxUrlEmailTokenizer(name: String, maxTokenLength: Int = 255) extends CustomizedTokenizer with Product with Serializable
-
case class
UniqueTokenFilter(name: String, onlyOnSamePosition: Boolean = false) extends TokenFilterDefinition with Product with Serializable
-
case class
WordDelimiterTokenFilter(name: String, generateWordParts: Option[Boolean] = None, generateNumberParts: Option[Boolean] = None, catenateWords: Option[Boolean] = None, catenateNumbers: Option[Boolean] = None, catenateAll: Option[Boolean] = None, splitOnCaseChange: Option[Boolean] = None, preserveOriginal: Option[Boolean] = None, splitOnNumerics: Option[Boolean] = None, stemEnglishPossesive: Option[Boolean] = None) extends TokenFilterDefinition with Product with Serializable
Deprecated Value Members
-
(Since version 5.0.0) Use the language-specific analyzer in modules/analysis instead