Packages

case class MagpieRows(schema: StructType, rows: Array[Row] = Array(), totalCount: Option[Long] = None) extends MagpieResultSet with SparkSchemaUtils with Product with Serializable

A collection of rows returned from a magpie command, optionally including the total count of the source data set.

schema

the schema of the source data set

rows

the spark rows returned by the command, with columns converted to strings for display

totalCount

the total size of the source data set for the command

Linear Supertypes
Serializable, Serializable, Product, Equals, SparkSchemaUtils, FieldUtils, TableUtils, LazyLogging, MagpieResultSet, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. MagpieRows
  2. Serializable
  3. Serializable
  4. Product
  5. Equals
  6. SparkSchemaUtils
  7. FieldUtils
  8. TableUtils
  9. LazyLogging
  10. MagpieResultSet
  11. AnyRef
  12. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new MagpieRows(schema: StructType, rows: Array[Row] = Array(), totalCount: Option[Long] = None)

    schema

    the schema of the source data set

    rows

    the spark rows returned by the command, with columns converted to strings for display

    totalCount

    the total size of the source data set for the command

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  5. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  6. def count: Int

    The number of rows returned by the command

    The number of rows returned by the command

    returns

    number of rows

    Definition Classes
    MagpieRows → MagpieResultSet
  7. def createReadSchema(structure: DataStructure): StructType
    Attributes
    protected
    Definition Classes
    SparkSchemaUtils
  8. def createStructType(fields: Seq[Field]): StructType
    Attributes
    protected
    Definition Classes
    SparkSchemaUtils
  9. def createWriteSchema(table: Table): StructType
    Attributes
    protected
    Definition Classes
    SparkSchemaUtils
  10. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  11. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  12. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  13. def getFieldType(t: DataType): FieldType
    Definition Classes
    FieldUtils
  14. def getSparkType(fieldType: FieldType): DataType
    Attributes
    protected
    Definition Classes
    FieldUtils
  15. def getWritableFields(table: Table): Seq[Field]
    Attributes
    protected
    Definition Classes
    TableUtils
  16. def header: Seq[String]
    Attributes
    protected
    Definition Classes
    MagpieRows → MagpieResultSet
  17. def isDirectMap(mapping: Map[String, String]): Boolean
    Attributes
    protected
    Definition Classes
    SparkSchemaUtils
  18. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  19. lazy val logger: Logger
    Attributes
    protected
    Definition Classes
    LazyLogging
    Annotations
    @transient()
  20. def mapDfColumns(sourceDf: DataFrame, structure: DataStructure, mapping: Map[String, String]): DataFrame
    Attributes
    protected
    Definition Classes
    SparkSchemaUtils
  21. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  22. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  23. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  24. def readFieldMetadata(schema: StructType): Seq[Field]
    Attributes
    protected
    Definition Classes
    SparkSchemaUtils
  25. def reverseMapDfColumns(df: DataFrame, mapping: Map[String, String]): DataFrame
    Attributes
    protected
    Definition Classes
    SparkSchemaUtils
  26. val rows: Array[Row]
  27. val schema: StructType
  28. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  29. val totalCount: Option[Long]
    Definition Classes
    MagpieRows → MagpieResultSet
  30. def validateSchema(actual: StructType, expected: StructType): Seq[String]
    Attributes
    protected
    Definition Classes
    SparkSchemaUtils
  31. def validateSchema(actual: Seq[Field], expected: Seq[Field]): Seq[String]
    Attributes
    protected
    Definition Classes
    TableUtils
  32. def validateTableDelete(mapping: Option[PersistenceMapping]): Future[Unit]
    Attributes
    protected
    Definition Classes
    TableUtils
  33. def values: Seq[Seq[String]]
    Attributes
    protected
    Definition Classes
    MagpieRows → MagpieResultSet
  34. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  35. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  36. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()

Inherited from Serializable

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from SparkSchemaUtils

Inherited from FieldUtils

Inherited from TableUtils

Inherited from LazyLogging

Inherited from MagpieResultSet

Inherited from AnyRef

Inherited from Any

Ungrouped