ai
.
chronon
.
spark
TableUtils
Related Doc:
package spark
case class
TableUtils
(
sparkSession:
SparkSession
)
extends
Product
with
Serializable
Linear Supertypes
Serializable
,
Serializable
,
Product
,
Equals
,
AnyRef
,
Any
Ordering
Alphabetic
By Inheritance
Inherited
TableUtils
Serializable
Serializable
Product
Equals
AnyRef
Any
Hide All
Show All
Visibility
Public
All
Instance Constructors
new
TableUtils
(
sparkSession:
SparkSession
)
Value Members
final
def
!=
(
arg0:
Any
)
:
Boolean
Definition Classes
AnyRef → Any
final
def
##
()
:
Int
Definition Classes
AnyRef → Any
final
def
==
(
arg0:
Any
)
:
Boolean
Definition Classes
AnyRef → Any
val
aggregationParallelism
:
Int
def
allPartitions
(
tableName:
String
,
partitionColumnsFilter:
Seq
[
String
] =
Seq.empty
)
:
Seq
[
Map
[
String
,
String
]]
def
archiveOrDropTableIfExists
(
tableName:
String
,
timestamp:
Option
[
Instant
]
)
:
Unit
def
archiveTableIfExists
(
tableName:
String
,
timestamp:
Option
[
Instant
]
)
:
Unit
final
def
asInstanceOf
[
T0
]
:
T0
Definition Classes
Any
val
backfillValidationEnforced
:
Boolean
val
blockingCacheEviction
:
Boolean
val
bloomFilterThreshold
:
Long
val
cacheLevel
:
Option
[
StorageLevel
]
val
cacheLevelString
:
String
def
checkTablePermission
(
tableName:
String
,
fallbackPartition:
String
=
...
)
:
Boolean
def
chunk
(
partitions:
Set
[
String
]
)
:
Seq
[
PartitionRange
]
def
clone
()
:
AnyRef
Attributes
protected[
java.lang
]
Definition Classes
AnyRef
Annotations
@throws
(
...
)
def
columnSizeEstimator
(
dataType:
DataType
)
:
Long
def
dropPartitionRange
(
tableName:
String
,
startDate:
String
,
endDate:
String
,
subPartitionFilters:
Map
[
String
,
String
] =
Map.empty
)
:
Unit
def
dropPartitions
(
tableName:
String
,
partitions:
Seq
[
String
]
,
partitionColumn:
String
=
partitionColumn
,
subPartitionFilters:
Map
[
String
,
String
] =
Map.empty
)
:
Unit
def
dropTableIfExists
(
tableName:
String
)
:
Unit
final
def
eq
(
arg0:
AnyRef
)
:
Boolean
Definition Classes
AnyRef
def
finalize
()
:
Unit
Attributes
protected[
java.lang
]
Definition Classes
AnyRef
Annotations
@throws
(
classOf[java.lang.Throwable]
)
def
firstAvailablePartition
(
tableName:
String
,
subPartitionFilters:
Map
[
String
,
String
] =
Map.empty
)
:
Option
[
String
]
final
def
getClass
()
:
Class
[_]
Definition Classes
AnyRef → Any
def
getColumnsFromQuery
(
query:
String
)
:
Seq
[
String
]
def
getFieldNames
(
schema:
StructType
)
:
Seq
[
String
]
def
getSchemaFromTable
(
tableName:
String
)
:
StructType
def
getTableProperties
(
tableName:
String
)
:
Option
[
Map
[
String
,
String
]]
def
ifPartitionExistsInTable
(
tableName:
String
,
partition:
String
)
:
Boolean
def
insertPartitions
(
df:
DataFrame
,
tableName:
String
,
tableProperties:
Map
[
String
,
String
] =
null
,
partitionColumns:
Seq
[
String
] =
Seq(partitionColumn)
,
saveMode:
SaveMode
=
SaveMode.Overwrite
,
fileFormat:
String
=
"PARQUET"
,
autoExpand:
Boolean
=
false
,
stats:
Option
[
DfStats
] =
None
)
:
Unit
def
insertUnPartitioned
(
df:
DataFrame
,
tableName:
String
,
tableProperties:
Map
[
String
,
String
] =
null
,
saveMode:
SaveMode
=
SaveMode.Overwrite
,
fileFormat:
String
=
"PARQUET"
)
:
Unit
final
def
isInstanceOf
[
T0
]
:
Boolean
Definition Classes
Any
def
isPartitioned
(
tableName:
String
)
:
Boolean
val
joinPartParallelism
:
Int
def
lastAvailablePartition
(
tableName:
String
,
subPartitionFilters:
Map
[
String
,
String
] =
Map.empty
)
:
Option
[
String
]
def
loadEntireTable
(
tableName:
String
)
:
DataFrame
lazy val
logger
:
Logger
val
maxWait
:
Int
final
def
ne
(
arg0:
AnyRef
)
:
Boolean
Definition Classes
AnyRef
final
def
notify
()
:
Unit
Definition Classes
AnyRef
final
def
notifyAll
()
:
Unit
Definition Classes
AnyRef
def
parsePartition
(
pstring:
String
)
:
Map
[
String
,
String
]
val
partitionColumn
:
String
val
partitionSpec
:
PartitionSpec
def
partitions
(
tableName:
String
,
subPartitionsFilter:
Map
[
String
,
String
] =
Map.empty
)
:
Seq
[
String
]
def
preAggRepartition
(
rdd:
RDD
[
Row
]
)
:
RDD
[
Row
]
def
preAggRepartition
(
df:
DataFrame
)
:
DataFrame
val
sparkSession
:
SparkSession
def
sql
(
query:
String
)
:
DataFrame
final
def
synchronized
[
T0
]
(
arg0: ⇒
T0
)
:
T0
Definition Classes
AnyRef
def
tableExists
(
tableName:
String
)
:
Boolean
def
unfilledRanges
(
outputTable:
String
,
outputPartitionRange:
PartitionRange
,
inputTables:
Option
[
Seq
[
String
]] =
None
,
inputTableToSubPartitionFiltersMap:
Map
[
String
,
Map
[
String
,
String
]] =
Map.empty
,
inputToOutputShift:
Int
=
0
,
skipFirstHole:
Boolean
=
true
)
:
Option
[
Seq
[
PartitionRange
]]
final
def
wait
()
:
Unit
Definition Classes
AnyRef
Annotations
@throws
(
...
)
final
def
wait
(
arg0:
Long
,
arg1:
Int
)
:
Unit
Definition Classes
AnyRef
Annotations
@throws
(
...
)
final
def
wait
(
arg0:
Long
)
:
Unit
Definition Classes
AnyRef
Annotations
@throws
(
...
)
def
wrapWithCache
[
T
]
(
opString:
String
,
dataFrame:
DataFrame
)
(
func: ⇒
T
)
:
Try
[
T
]
Deprecated Value Members
def
dropPartitionsAfterHole
(
inputTable:
String
,
outputTable:
String
,
partitionRange:
PartitionRange
,
subPartitionFilters:
Map
[
String
,
String
] =
Map.empty
)
:
Option
[
String
]
Annotations
@deprecated
Deprecated
Inherited from
Serializable
Inherited from
Serializable
Inherited from
Product
Inherited from
Equals
Inherited from
AnyRef
Inherited from
Any
Ungrouped