case class JsonTable(name: String, sparkSession: SparkSession, options: CaseInsensitiveStringMap, paths: Seq[String], userSpecifiedSchema: Option[StructType], fallbackFileFormat: Class[_ <: FileFormat]) extends FileTable with Product with Serializable
- Alphabetic
- By Inheritance
- JsonTable
- Serializable
- Product
- Equals
- FileTable
- SupportsWrite
- SupportsRead
- Table
- AnyRef
- Any
- Hide All
- Show All
- Public
- Protected
Instance Constructors
- new JsonTable(name: String, sparkSession: SparkSession, options: CaseInsensitiveStringMap, paths: Seq[String], userSpecifiedSchema: Option[StructType], fallbackFileFormat: Class[_ <: FileFormat])
Value Members
- final def !=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def ##: Int
- Definition Classes
- AnyRef → Any
- final def ==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def asInstanceOf[T0]: T0
- Definition Classes
- Any
- def capabilities(): Set[TableCapability]
- Definition Classes
- FileTable → Table
- def clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.CloneNotSupportedException]) @native()
- lazy val dataSchema: StructType
- Definition Classes
- FileTable
- final def eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- val fallbackFileFormat: Class[_ <: FileFormat]
Returns a V1 FileFormat class of the same file data source.
Returns a V1 FileFormat class of the same file data source. This is a solution for the following cases: 1. File datasource V2 implementations cause regression. Users can disable the problematic data source via SQL configuration and fall back to FileFormat. 2. Catalog support is required, which is still under development for data source V2.
- lazy val fileIndex: PartitioningAwareFileIndex
- Definition Classes
- FileTable
- def finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.Throwable])
- def formatName: String
The string that represents the format that this data source provider uses.
- final def getClass(): Class[_ <: AnyRef]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- def inferSchema(files: Seq[FileStatus]): Option[StructType]
When possible, this method should return the schema of the given
files. - final def isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- val name: String
- Definition Classes
- JsonTable → Table
- final def ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- def newScanBuilder(options: CaseInsensitiveStringMap): JsonScanBuilder
- Definition Classes
- JsonTable → SupportsRead
- def newWriteBuilder(info: LogicalWriteInfo): WriteBuilder
- Definition Classes
- JsonTable → SupportsWrite
- final def notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- final def notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- val options: CaseInsensitiveStringMap
- def partitioning(): Array[Transform]
- Definition Classes
- FileTable → Table
- val paths: Seq[String]
- def productElementNames: Iterator[String]
- Definition Classes
- Product
- def properties(): Map[String, String]
- Definition Classes
- FileTable → Table
- lazy val schema: StructType
- Definition Classes
- FileTable → Table
- val sparkSession: SparkSession
- def supportsDataType(dataType: DataType): Boolean
Returns whether this format supports the given DataType in read/write path.
- final def synchronized[T0](arg0: => T0): T0
- Definition Classes
- AnyRef
- val userSpecifiedSchema: Option[StructType]
- final def wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException]) @native()