trait Implementation extends AnyRef
Linear Supertypes
Known Subclasses
Ordering
- Alphabetic
- By Inheritance
Inherited
- Implementation
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- All
Abstract Value Members
-
abstract
def
calcDatasetRow(cache: Cache, i: Int): Row
- Attributes
- protected
-
abstract
def
dumpDatasetBytes(os: OutputStream, dataset: Dataset): Unit
- Attributes
- protected
-
abstract
def
extractDatasetRow(dataset: Dataset, i: Int): Row
- Attributes
- protected
-
abstract
def
hashimoto(seedBytes: Array[Byte], fullSize: Long, datasetAccessor: (Int) ⇒ Row): Hashimoto
- Attributes
- protected
-
abstract
def
mkCache(cacheSize: Long, seed: Array[Byte]): Cache
- Attributes
- protected
-
abstract
def
readDatasetBytes(is: InputStream, mbInitSize: Option[Long]): Dataset
- Attributes
- protected
-
implicit abstract
val
rowClassTag: ClassTag[Row]
- Attributes
- protected
-
abstract
def
toDataset(array: Array[Row]): Dataset
- Attributes
- protected
-
abstract
def
writeRow(row: Row): Array[Byte]
- Attributes
- protected
Concrete Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
- def blocksRemainingInEpoch(blockNumber: Long): Long
- def cacheDataset(seed: Array[Byte], dataset: Dataset): Failable[Unit]
- def calcDataset(cache: Cache, fullSize: Long)(implicit mf: Factory): Dataset
- def calcDatasetForBlock(blockNumber: Long)(implicit mf: Factory): Dataset
- def calcDatasetForEpoch(epochNumber: Long)(implicit mf: Factory): Dataset
-
final
def
calcDatasetParallel(cache: Cache, fullSize: Long)(mf: Factory): Dataset
- Attributes
- protected[ethash23]
-
final
def
calcDatasetSequential(cache: Cache, fullSize: Long)(mf: Factory): Dataset
- Attributes
- protected[ethash23]
-
def
clone(): AnyRef
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native() @HotSpotIntrinsicCandidate()
-
final
def
datasetLen(fullSize: Long): Int
- Attributes
- protected[ethash23]
-
def
doCalcDataset(cache: Cache, fullSize: Long)(mf: Factory): Dataset
- Attributes
- protected[ethash23]
- def epochFromBlock(blockNumber: Long): Long
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- def getCacheSizeForBlock(blockNumber: Long): Long
- def getCacheSizeForEpoch(epochNumber: Long): Long
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
- def getFullSizeForBlock(blockNumber: Long): Long
- def getFullSizeForEpoch(epochNumber: Long): Long
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
- def hashimotoFull(header: Header, dataset: Dataset, nonce: Unsigned64): Hashimoto
- def hashimotoLight(header: Header, cache: Cache, nonce: Unsigned64): Hashimoto
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
val
isParallel: Boolean
- Attributes
- protected[ethash23]
- def loadDagFile(seed: Array[Byte]): Failable[Dataset]
- def mkCacheForBlock(blockNumber: Long): Cache
- def mkCacheForEpoch(epochNumber: Long): Cache
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
- def precomputeCacheDatasetForBlockNumber(blockNumber: Long)(implicit mf: Factory): Failable[Unit]
- def precomputeCacheDatasetForEpochNumber(epochNumber: Long)(implicit mf: Factory): Failable[Unit]
- def readDagFile(is: InputStream, mbFileLength: Option[Long]): Dataset
-
def
requireValidInt(l: Long): Int
- Attributes
- protected[ethash23]
-
def
requireValidLong(bi: BigInt): Long
- Attributes
- protected[ethash23]
- def streamDagFileForBlockNumber(blockNumber: Long, file: Option[File])(implicit mf: Factory): Failable[Unit]
- def streamDagFileForBlockNumber(blockNumber: Long)(implicit mf: Factory): Failable[Unit]
- def streamDagFileForEpochNumber(epochNumber: Long, mbSeed: Option[Array[Byte]], mbCache: Option[Cache], mbFile: Option[File])(implicit mf: Factory): Failable[Unit]
- def streamDagFileForEpochNumber(epochNumber: Long, mbFile: Option[File])(implicit mf: Factory): Failable[Unit]
- def streamDagFileForEpochNumber(epochNumber: Long)(implicit mf: Factory): Failable[Unit]
- def streamDatasetAsDagFile(os: OutputStream, cache: Cache, fullSize: Long)(implicit mf: Factory): Unit
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
- def truncatedHeaderHash(header: Header): Keccak256
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
- def writeDagFile(os: OutputStream, dataset: Dataset): Unit
Deprecated Value Members
-
def
finalize(): Unit
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] ) @Deprecated @deprecated
- Deprecated
(Since version ) see corresponding Javadoc for more information.