object LakeFSContext
- Alphabetic
- By Inheritance
- LakeFSContext
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- val COMMITS_LOCATION_KEY: String
- val DEFAULT_GC_UNCOMMITTED_MIN_AGE_SECONDS: Int
- val DEFAULT_LAKEFS_CONF_GC_NUM_ADDRESS_PARTITIONS: Int
- val DEFAULT_LAKEFS_CONF_GC_NUM_COMMIT_PARTITIONS: Int
- val DEFAULT_LAKEFS_CONF_GC_NUM_RANGE_PARTITIONS: Int
- val LAKEFS_CONF_API_ACCESS_KEY_KEY: String
- val LAKEFS_CONF_API_CONNECTION_TIMEOUT_SEC_KEY: String
- val LAKEFS_CONF_API_READ_TIMEOUT_SEC_KEY: String
- val LAKEFS_CONF_API_SECRET_KEY_KEY: String
- val LAKEFS_CONF_API_URL_KEY: String
- val LAKEFS_CONF_DEBUG_GC_MAX_COMMIT_EPOCH_SECONDS_KEY: String
- val LAKEFS_CONF_DEBUG_GC_MAX_COMMIT_ISO_DATETIME_KEY: String
- val LAKEFS_CONF_DEBUG_GC_NO_DELETE_KEY: String
- val LAKEFS_CONF_DEBUG_GC_REPRODUCE_RUN_ID_KEY: String
- val LAKEFS_CONF_DEBUG_GC_UNCOMMITTED_MIN_AGE_SECONDS_KEY: String
- val LAKEFS_CONF_GC_DO_MARK: String
- val LAKEFS_CONF_GC_DO_SWEEP: String
- val LAKEFS_CONF_GC_MARK_ID: String
- val LAKEFS_CONF_GC_NUM_ADDRESS_PARTITIONS: String
- val LAKEFS_CONF_GC_NUM_COMMIT_PARTITIONS: String
- val LAKEFS_CONF_GC_NUM_RANGE_PARTITIONS: String
- val LAKEFS_CONF_JOB_COMMIT_ID_KEY: String
- val LAKEFS_CONF_JOB_REPO_NAME_KEY: String
- val LAKEFS_CONF_JOB_SOURCE_NAME_KEY: String
- val LAKEFS_CONF_JOB_STORAGE_NAMESPACE_KEY: String
- val MARK_ID_KEY: String
- val RUN_ID_KEY: String
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- def newDF(spark: SparkSession, params: LakeFSJobParams): DataFrame
- def newRDD(sc: SparkContext, params: LakeFSJobParams): RDD[(Array[Byte], WithIdentifier[Entry])]
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
Deprecated Value Members
-
def
newDF(spark: SparkSession, repoName: String, commitID: String = ""): DataFrame
Returns all entries in all ranges of the given commit, as a DataFrame.
Returns all entries in all ranges of the given commit, as a DataFrame. If no commit is given, returns all entries in all ranges of the entire repository. The same entry may be found in multiple ranges.
- Deprecated
use LakeFSContext.newDF(SparkSession,LakeFSJobParams) instead.
-
def
newDF(spark: SparkSession, storageNamespace: String): DataFrame
Returns all entries in all ranges found in this storage namespace.
Returns all entries in all ranges found in this storage namespace. The same entry may be found in multiple ranges.
The storage namespace is expected to be a URI accessible by Hadoop.
- Deprecated
use LakeFSContext.newDF(SparkContext,LakeFSJobParams) instead.
-
def
newRDD(sc: SparkContext, repoName: String, commitID: String = ""): RDD[(Array[Byte], WithIdentifier[Entry])]
Returns all entries in all ranges of the given commit, as an RDD.
Returns all entries in all ranges of the given commit, as an RDD. If no commit is given, returns all entries in all ranges of the entire repository. The same entry may be found in multiple ranges.
- Deprecated
use LakeFSContext.newRDD(SparkContext,LakeFSJobParams) instead.