Class SparkSqlTestBase
- java.lang.Object
-
- org.projectnessie.spark.extensions.SparkSqlTestBase
-
- Direct Known Subclasses:
AbstractNessieSparkSqlExtensionTest
public abstract class SparkSqlTestBase extends java.lang.Object
-
-
Field Summary
Fields Modifier and Type Field Description protected java.lang.String
additionalRefName
protected org.projectnessie.client.api.NessieApiV1
api
protected static org.apache.spark.SparkConf
conf
protected boolean
first
protected org.projectnessie.model.Branch
initialDefaultBranch
protected static java.lang.String
NON_NESSIE_CATALOG
protected java.lang.String
refName
protected static org.apache.spark.sql.SparkSession
spark
protected static java.lang.String
url
-
Constructor Summary
Constructors Constructor Description SparkSqlTestBase()
-
Method Summary
All Methods Static Methods Instance Methods Abstract Methods Concrete Methods Modifier and Type Method Description protected java.util.List<org.projectnessie.spark.extensions.SparkCommitLogEntry>
commitAndReturnLog(java.lang.String branch, java.lang.String initialHashOrBranch)
protected java.util.List<org.projectnessie.spark.extensions.SparkCommitLogEntry>
createBranchCommitAndReturnLog()
protected void
createBranchForTest(java.lang.String branchName)
protected void
createTagForTest(java.lang.String tagName)
protected java.lang.String
defaultBranch()
protected java.lang.String
defaultHash()
protected java.util.List<org.projectnessie.spark.extensions.SparkCommitLogEntry>
fetchLog(java.lang.String branch)
protected java.util.Map<java.lang.String,java.lang.String>
nessieParams()
protected boolean
requiresCommonAncestor()
protected static java.lang.Object[]
row(java.lang.Object... values)
This looks weird but it gives a clear semantic way to turn a list of objects into a 'row' for spark assertions.protected void
setupSparkAndApi(org.junit.jupiter.api.TestInfo testInfo)
protected java.util.Map<java.lang.String,java.lang.String>
sparkHadoop()
protected static java.util.List<java.lang.Object[]>
sql(java.lang.String query, java.lang.Object... args)
protected static java.util.List<java.lang.Object[]>
sqlWithEmptyCache(java.lang.String query, java.lang.Object... args)
protected static java.lang.Object[]
toJava(org.apache.spark.sql.Row row)
protected abstract java.lang.String
warehouseURI()
-
-
-
Field Detail
-
NON_NESSIE_CATALOG
protected static final java.lang.String NON_NESSIE_CATALOG
- See Also:
- Constant Field Values
-
conf
protected static org.apache.spark.SparkConf conf
-
spark
protected static org.apache.spark.sql.SparkSession spark
-
url
protected static java.lang.String url
-
first
protected boolean first
-
initialDefaultBranch
protected org.projectnessie.model.Branch initialDefaultBranch
-
refName
protected java.lang.String refName
-
additionalRefName
protected java.lang.String additionalRefName
-
api
protected org.projectnessie.client.api.NessieApiV1 api
-
-
Method Detail
-
warehouseURI
protected abstract java.lang.String warehouseURI()
-
sparkHadoop
protected java.util.Map<java.lang.String,java.lang.String> sparkHadoop()
-
nessieParams
protected java.util.Map<java.lang.String,java.lang.String> nessieParams()
-
requiresCommonAncestor
protected boolean requiresCommonAncestor()
-
setupSparkAndApi
@BeforeEach protected void setupSparkAndApi(org.junit.jupiter.api.TestInfo testInfo) throws org.projectnessie.error.NessieNotFoundException, org.projectnessie.error.NessieConflictException
- Throws:
org.projectnessie.error.NessieNotFoundException
org.projectnessie.error.NessieConflictException
-
defaultBranch
protected java.lang.String defaultBranch()
-
defaultHash
protected java.lang.String defaultHash()
-
sql
@FormatMethod protected static java.util.List<java.lang.Object[]> sql(java.lang.String query, java.lang.Object... args)
-
sqlWithEmptyCache
@FormatMethod protected static java.util.List<java.lang.Object[]> sqlWithEmptyCache(java.lang.String query, java.lang.Object... args)
-
toJava
protected static java.lang.Object[] toJava(org.apache.spark.sql.Row row)
-
row
protected static java.lang.Object[] row(java.lang.Object... values)
This looks weird but it gives a clear semantic way to turn a list of objects into a 'row' for spark assertions.
-
fetchLog
protected java.util.List<org.projectnessie.spark.extensions.SparkCommitLogEntry> fetchLog(java.lang.String branch)
-
createBranchForTest
protected void createBranchForTest(java.lang.String branchName) throws org.projectnessie.error.NessieNotFoundException
- Throws:
org.projectnessie.error.NessieNotFoundException
-
createTagForTest
protected void createTagForTest(java.lang.String tagName) throws org.projectnessie.error.NessieNotFoundException
- Throws:
org.projectnessie.error.NessieNotFoundException
-
createBranchCommitAndReturnLog
protected java.util.List<org.projectnessie.spark.extensions.SparkCommitLogEntry> createBranchCommitAndReturnLog() throws org.projectnessie.error.NessieConflictException, org.projectnessie.error.NessieNotFoundException
- Throws:
org.projectnessie.error.NessieConflictException
org.projectnessie.error.NessieNotFoundException
-
commitAndReturnLog
protected java.util.List<org.projectnessie.spark.extensions.SparkCommitLogEntry> commitAndReturnLog(java.lang.String branch, java.lang.String initialHashOrBranch) throws org.projectnessie.error.NessieNotFoundException, org.projectnessie.error.NessieConflictException
- Throws:
org.projectnessie.error.NessieNotFoundException
org.projectnessie.error.NessieConflictException
-
-