class SparkExecutor extends QueryExecutor[DataFrame]
Linear Supertypes
Ordering
- Alphabetic
- By Inheritance
Inherited
- SparkExecutor
- QueryExecutor
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- All
Instance Constructors
- new SparkExecutor(spark: SparkSession, mappingsFile: String)
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
def
count(jDF: DataFrame): Long
- Definition Classes
- SparkExecutor → QueryExecutor
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- def getType: DataFrame
-
def
groupBy(jDF: Any, groupBys: (ListBuffer[String], Set[(String, String)])): DataFrame
- Definition Classes
- SparkExecutor → QueryExecutor
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
join(joins: ArrayListMultimap[String, (String, String)], prefixes: Map[String, String], star_df: Map[String, DataFrame]): DataFrame
- Definition Classes
- SparkExecutor → QueryExecutor
- def joinReordered(joins: ArrayListMultimap[String, (String, String)], prefixes: Map[String, String], star_df: Map[String, DataFrame], startingJoin: (String, (String, String)), starWeights: Map[String, Double]): DataFrame
-
def
limit(jDF: Any, limitValue: Int): DataFrame
- Definition Classes
- SparkExecutor → QueryExecutor
-
val
logger: Logger
- Definition Classes
- SparkExecutor → QueryExecutor
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
def
orderBy(jDF: Any, direction: String, variable: String): DataFrame
- Definition Classes
- SparkExecutor → QueryExecutor
-
def
project(jDF: Any, columnNames: Seq[String], distinct: Boolean): DataFrame
- Definition Classes
- SparkExecutor → QueryExecutor
-
def
query(sources: Set[(HashMap[String, String], String, String, HashMap[String, (String, Boolean)])], optionsMap_entity: HashMap[String, (Map[String, String], String)], toJoinWith: Boolean, star: String, prefixes: Map[String, String], select: List[String], star_predicate_var: HashMap[(String, String), String], neededPredicates: Set[String], filters: ArrayListMultimap[String, (String, String)], leftJoinTransformations: (String, Array[String]), rightJoinTransformations: Array[String], joinPairs: Map[(String, String), String]): (DataFrame, Integer, String)
- Definition Classes
- SparkExecutor → QueryExecutor
-
def
run(jDF: Any): Unit
- Definition Classes
- SparkExecutor → QueryExecutor
- def schemaOf(jDF: DataFrame): Unit
-
def
show(jDF: Any): Unit
- Definition Classes
- SparkExecutor → QueryExecutor
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
def
transform(df: Any, column: String, transformationsArray: Array[String]): DataFrame
- Definition Classes
- SparkExecutor → QueryExecutor
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()