object StanfordParser
Value Members
- final def !=(arg0: Any): Boolean
- final def ##: Int
- final def ==(arg0: Any): Boolean
- final def asInstanceOf[T0]: T0
- val baseMweSubs: Vector[(Vector[String], TaggedWord)]
- val baseWordTags: Vector[(String, String)]
- def clone(): AnyRef
- def coreLabels(s: String): List[CoreLabel]
- final def eq(arg0: AnyRef): Boolean
- def equals(arg0: AnyRef): Boolean
- final def getClass(): Class[_ <: AnyRef]
- lazy val gsf: GrammaticalStructureFactory
- def hashCode(): Int
- final def isInstanceOf[T0]: Boolean
- val lp: LexicalizedParser
- def mergeSubs(mwe: Vector[String], tw: TaggedWord)(tws: Vector[TaggedWord]): Vector[TaggedWord]
- def mergeTag(mwe: Vector[String], tag: String)(tws: Vector[TaggedWord]): Vector[TaggedWord]
- final def ne(arg0: AnyRef): Boolean
- final def notify(): Unit
- final def notifyAll(): Unit
- def parse(s: String): Tree
- def proseTree(s: String, wordTags: Vector[(String, String)] = baseWordTags, mweSubs: Vector[(Vector[String], TaggedWord)] = baseMweSubs): ProseTree
- def reTag(word: String, tag: String)(tw: TaggedWord): TaggedWord
- final def synchronized[T0](arg0: => T0): T0
- val tagger: MaxentTagger
- def texDisplay(s: String): MatchIterator
- def texInline(s: String): MatchIterator
- def texParse(s: String, wordTags: Vector[(String, String)] = baseWordTags, mweSubs: Vector[(Vector[String], TaggedWord)] = baseMweSubs): Tree
- lazy val tlp: PennTreebankLanguagePack
- def toString(): String
- val tokenizerFactory: TokenizerFactory[CoreLabel]
- final def wait(arg0: Long, arg1: Int): Unit
- final def wait(arg0: Long): Unit
- final def wait(): Unit
- def wordNetTags(tws: Vector[TaggedWord], depth: Int): Vector[Vector[TaggedWord]]
- def words(s: String): Buffer[Word]
Deprecated Value Members
- def finalize(): Unit
Inherited from AnyRef
Value Members
- final def !=(arg0: Any): Boolean
- final def ##: Int
- final def ==(arg0: Any): Boolean
- def clone(): AnyRef
- final def eq(arg0: AnyRef): Boolean
- def equals(arg0: AnyRef): Boolean
- final def getClass(): Class[_ <: AnyRef]
- def hashCode(): Int
- final def ne(arg0: AnyRef): Boolean
- final def notify(): Unit
- final def notifyAll(): Unit
- final def synchronized[T0](arg0: => T0): T0
- def toString(): String
- final def wait(arg0: Long, arg1: Int): Unit
- final def wait(arg0: Long): Unit
- final def wait(): Unit
- def finalize(): Unit
Inherited from Any
Value Members
- final def asInstanceOf[T0]: T0
- final def isInstanceOf[T0]: Boolean
Ungrouped
- final def !=(arg0: Any): Boolean
- final def ##: Int
- final def ==(arg0: Any): Boolean
- final def asInstanceOf[T0]: T0
- val baseMweSubs: Vector[(Vector[String], TaggedWord)]
- val baseWordTags: Vector[(String, String)]
- def clone(): AnyRef
- def coreLabels(s: String): List[CoreLabel]
- final def eq(arg0: AnyRef): Boolean
- def equals(arg0: AnyRef): Boolean
- final def getClass(): Class[_ <: AnyRef]
- lazy val gsf: GrammaticalStructureFactory
- def hashCode(): Int
- final def isInstanceOf[T0]: Boolean
- val lp: LexicalizedParser
- def mergeSubs(mwe: Vector[String], tw: TaggedWord)(tws: Vector[TaggedWord]): Vector[TaggedWord]
- def mergeTag(mwe: Vector[String], tag: String)(tws: Vector[TaggedWord]): Vector[TaggedWord]
- final def ne(arg0: AnyRef): Boolean
- final def notify(): Unit
- final def notifyAll(): Unit
- def parse(s: String): Tree
- def proseTree(s: String, wordTags: Vector[(String, String)] = baseWordTags, mweSubs: Vector[(Vector[String], TaggedWord)] = baseMweSubs): ProseTree
- def reTag(word: String, tag: String)(tw: TaggedWord): TaggedWord
- final def synchronized[T0](arg0: => T0): T0
- val tagger: MaxentTagger
- def texDisplay(s: String): MatchIterator
- def texInline(s: String): MatchIterator
- def texParse(s: String, wordTags: Vector[(String, String)] = baseWordTags, mweSubs: Vector[(Vector[String], TaggedWord)] = baseMweSubs): Tree
- lazy val tlp: PennTreebankLanguagePack
- def toString(): String
- val tokenizerFactory: TokenizerFactory[CoreLabel]
- final def wait(arg0: Long, arg1: Int): Unit
- final def wait(arg0: Long): Unit
- final def wait(): Unit
- def wordNetTags(tws: Vector[TaggedWord], depth: Int): Vector[Vector[TaggedWord]]
- def words(s: String): Buffer[Word]
- def finalize(): Unit