|
@@ -46,51 +46,51 @@ using namespace aria2::expr;
|
|
|
namespace aria2 {
|
|
|
|
|
|
BitfieldMan::BitfieldMan(size_t blockLength, uint64_t totalLength)
|
|
|
- :blockLength(blockLength),
|
|
|
- totalLength(totalLength),
|
|
|
- bitfieldLength(0),
|
|
|
- blocks(0),
|
|
|
- filterEnabled(false),
|
|
|
- bitfield(0),
|
|
|
- useBitfield(0),
|
|
|
- filterBitfield(0),
|
|
|
- cachedNumMissingBlock(0),
|
|
|
- cachedNumFilteredBlock(0),
|
|
|
- cachedCompletedLength(0),
|
|
|
- cachedFilteredComletedLength(0),
|
|
|
- cachedFilteredTotalLength(0)
|
|
|
+ :_blockLength(blockLength),
|
|
|
+ _totalLength(totalLength),
|
|
|
+ _bitfieldLength(0),
|
|
|
+ _blocks(0),
|
|
|
+ _filterEnabled(false),
|
|
|
+ _bitfield(0),
|
|
|
+ _useBitfield(0),
|
|
|
+ _filterBitfield(0),
|
|
|
+ _cachedNumMissingBlock(0),
|
|
|
+ _cachedNumFilteredBlock(0),
|
|
|
+ _cachedCompletedLength(0),
|
|
|
+ _cachedFilteredCompletedLength(0),
|
|
|
+ _cachedFilteredTotalLength(0)
|
|
|
{
|
|
|
- if(blockLength > 0 && totalLength > 0) {
|
|
|
- blocks = totalLength/blockLength+(totalLength%blockLength ? 1 : 0);
|
|
|
- bitfieldLength = blocks/8+(blocks%8 ? 1 : 0);
|
|
|
- bitfield = new unsigned char[bitfieldLength];
|
|
|
- useBitfield = new unsigned char[bitfieldLength];
|
|
|
- memset(bitfield, 0, bitfieldLength);
|
|
|
- memset(useBitfield, 0, bitfieldLength);
|
|
|
+ if(_blockLength > 0 && _totalLength > 0) {
|
|
|
+ _blocks = _totalLength/_blockLength+(_totalLength%_blockLength ? 1 : 0);
|
|
|
+ _bitfieldLength = _blocks/8+(_blocks%8 ? 1 : 0);
|
|
|
+ _bitfield = new unsigned char[_bitfieldLength];
|
|
|
+ _useBitfield = new unsigned char[_bitfieldLength];
|
|
|
+ memset(_bitfield, 0, _bitfieldLength);
|
|
|
+ memset(_useBitfield, 0, _bitfieldLength);
|
|
|
updateCache();
|
|
|
}
|
|
|
}
|
|
|
|
|
|
BitfieldMan::BitfieldMan(const BitfieldMan& bitfieldMan)
|
|
|
- :blockLength(bitfieldMan.blockLength),
|
|
|
- totalLength(bitfieldMan.totalLength),
|
|
|
- bitfieldLength(bitfieldMan.bitfieldLength),
|
|
|
- blocks(bitfieldMan.blocks),
|
|
|
- filterEnabled(bitfieldMan.filterEnabled),
|
|
|
- bitfield(new unsigned char[bitfieldLength]),
|
|
|
- useBitfield(new unsigned char[bitfieldLength]),
|
|
|
- filterBitfield(0),
|
|
|
- cachedNumMissingBlock(0),
|
|
|
- cachedNumFilteredBlock(0),
|
|
|
- cachedCompletedLength(0),
|
|
|
- cachedFilteredComletedLength(0),
|
|
|
- cachedFilteredTotalLength(0)
|
|
|
+ :_blockLength(bitfieldMan._blockLength),
|
|
|
+ _totalLength(bitfieldMan._totalLength),
|
|
|
+ _bitfieldLength(bitfieldMan._bitfieldLength),
|
|
|
+ _blocks(bitfieldMan._blocks),
|
|
|
+ _filterEnabled(bitfieldMan._filterEnabled),
|
|
|
+ _bitfield(new unsigned char[_bitfieldLength]),
|
|
|
+ _useBitfield(new unsigned char[_bitfieldLength]),
|
|
|
+ _filterBitfield(0),
|
|
|
+ _cachedNumMissingBlock(0),
|
|
|
+ _cachedNumFilteredBlock(0),
|
|
|
+ _cachedCompletedLength(0),
|
|
|
+ _cachedFilteredCompletedLength(0),
|
|
|
+ _cachedFilteredTotalLength(0)
|
|
|
{
|
|
|
- memcpy(bitfield, bitfieldMan.bitfield, bitfieldLength);
|
|
|
- memcpy(useBitfield, bitfieldMan.useBitfield, bitfieldLength);
|
|
|
- if(filterEnabled) {
|
|
|
- filterBitfield = new unsigned char[bitfieldLength];
|
|
|
- memcpy(filterBitfield, bitfieldMan.filterBitfield, bitfieldLength);
|
|
|
+ memcpy(_bitfield, bitfieldMan._bitfield, _bitfieldLength);
|
|
|
+ memcpy(_useBitfield, bitfieldMan._useBitfield, _bitfieldLength);
|
|
|
+ if(_filterEnabled) {
|
|
|
+ _filterBitfield = new unsigned char[_bitfieldLength];
|
|
|
+ memcpy(_filterBitfield, bitfieldMan._filterBitfield, _bitfieldLength);
|
|
|
}
|
|
|
updateCache();
|
|
|
}
|
|
@@ -98,26 +98,26 @@ BitfieldMan::BitfieldMan(const BitfieldMan& bitfieldMan)
|
|
|
BitfieldMan& BitfieldMan::operator=(const BitfieldMan& bitfieldMan)
|
|
|
{
|
|
|
if(this != &bitfieldMan) {
|
|
|
- blockLength = bitfieldMan.blockLength;
|
|
|
- totalLength = bitfieldMan.totalLength;
|
|
|
- blocks = bitfieldMan.blocks;
|
|
|
- bitfieldLength = bitfieldMan.bitfieldLength;
|
|
|
- filterEnabled = bitfieldMan.filterEnabled;
|
|
|
-
|
|
|
- delete [] bitfield;
|
|
|
- bitfield = new unsigned char[bitfieldLength];
|
|
|
- memcpy(bitfield, bitfieldMan.bitfield, bitfieldLength);
|
|
|
-
|
|
|
- delete [] useBitfield;
|
|
|
- useBitfield = new unsigned char[bitfieldLength];
|
|
|
- memcpy(useBitfield, bitfieldMan.useBitfield, bitfieldLength);
|
|
|
-
|
|
|
- delete [] filterBitfield;
|
|
|
- if(filterEnabled) {
|
|
|
- filterBitfield = new unsigned char[bitfieldLength];
|
|
|
- memcpy(filterBitfield, bitfieldMan.filterBitfield, bitfieldLength);
|
|
|
+ _blockLength = bitfieldMan._blockLength;
|
|
|
+ _totalLength = bitfieldMan._totalLength;
|
|
|
+ _blocks = bitfieldMan._blocks;
|
|
|
+ _bitfieldLength = bitfieldMan._bitfieldLength;
|
|
|
+ _filterEnabled = bitfieldMan._filterEnabled;
|
|
|
+
|
|
|
+ delete [] _bitfield;
|
|
|
+ _bitfield = new unsigned char[_bitfieldLength];
|
|
|
+ memcpy(_bitfield, bitfieldMan._bitfield, _bitfieldLength);
|
|
|
+
|
|
|
+ delete [] _useBitfield;
|
|
|
+ _useBitfield = new unsigned char[_bitfieldLength];
|
|
|
+ memcpy(_useBitfield, bitfieldMan._useBitfield, _bitfieldLength);
|
|
|
+
|
|
|
+ delete [] _filterBitfield;
|
|
|
+ if(_filterEnabled) {
|
|
|
+ _filterBitfield = new unsigned char[_bitfieldLength];
|
|
|
+ memcpy(_filterBitfield, bitfieldMan._filterBitfield, _bitfieldLength);
|
|
|
} else {
|
|
|
- filterBitfield = 0;
|
|
|
+ _filterBitfield = 0;
|
|
|
}
|
|
|
|
|
|
updateCache();
|
|
@@ -126,16 +126,16 @@ BitfieldMan& BitfieldMan::operator=(const BitfieldMan& bitfieldMan)
|
|
|
}
|
|
|
|
|
|
BitfieldMan::~BitfieldMan() {
|
|
|
- delete [] bitfield;
|
|
|
- delete [] useBitfield;
|
|
|
- delete [] filterBitfield;
|
|
|
+ delete [] _bitfield;
|
|
|
+ delete [] _useBitfield;
|
|
|
+ delete [] _filterBitfield;
|
|
|
}
|
|
|
|
|
|
size_t BitfieldMan::getBlockLength(size_t index) const
|
|
|
{
|
|
|
- if(index == blocks-1) {
|
|
|
+ if(index == _blocks-1) {
|
|
|
return getLastBlockLength();
|
|
|
- } else if(index < blocks-1) {
|
|
|
+ } else if(index < _blocks-1) {
|
|
|
return getBlockLength();
|
|
|
} else {
|
|
|
return 0;
|
|
@@ -145,14 +145,14 @@ size_t BitfieldMan::getBlockLength(size_t index) const
|
|
|
bool BitfieldMan::hasMissingPiece
|
|
|
(const unsigned char* peerBitfield, size_t length) const
|
|
|
{
|
|
|
- if(bitfieldLength != length) {
|
|
|
+ if(_bitfieldLength != length) {
|
|
|
return false;
|
|
|
}
|
|
|
bool retval = false;
|
|
|
- for(size_t i = 0; i < bitfieldLength; ++i) {
|
|
|
- unsigned char temp = peerBitfield[i] & ~bitfield[i];
|
|
|
- if(filterEnabled) {
|
|
|
- temp &= filterBitfield[i];
|
|
|
+ for(size_t i = 0; i < _bitfieldLength; ++i) {
|
|
|
+ unsigned char temp = peerBitfield[i] & ~_bitfield[i];
|
|
|
+ if(_filterEnabled) {
|
|
|
+ temp &= _filterBitfield[i];
|
|
|
}
|
|
|
if(temp&0xff) {
|
|
|
retval = true;
|
|
@@ -164,37 +164,37 @@ bool BitfieldMan::hasMissingPiece
|
|
|
|
|
|
bool BitfieldMan::getFirstMissingUnusedIndex(size_t& index) const
|
|
|
{
|
|
|
- if(filterEnabled) {
|
|
|
+ if(_filterEnabled) {
|
|
|
return bitfield::getFirstMissingIndex
|
|
|
- (index, ~array(bitfield)&~array(useBitfield)&array(filterBitfield),
|
|
|
- blocks);
|
|
|
+ (index, ~array(_bitfield)&~array(_useBitfield)&array(_filterBitfield),
|
|
|
+ _blocks);
|
|
|
} else {
|
|
|
return bitfield::getFirstMissingIndex
|
|
|
- (index, ~array(bitfield)&~array(useBitfield), blocks);
|
|
|
+ (index, ~array(_bitfield)&~array(_useBitfield), _blocks);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
size_t BitfieldMan::getFirstNMissingUnusedIndex
|
|
|
(std::vector<size_t>& out, size_t n) const
|
|
|
{
|
|
|
- if(filterEnabled) {
|
|
|
+ if(_filterEnabled) {
|
|
|
return bitfield::getFirstNMissingIndex
|
|
|
(std::back_inserter(out), n,
|
|
|
- ~array(bitfield)&~array(useBitfield)&array(filterBitfield), blocks);
|
|
|
+ ~array(_bitfield)&~array(_useBitfield)&array(_filterBitfield), _blocks);
|
|
|
} else {
|
|
|
return bitfield::getFirstNMissingIndex
|
|
|
(std::back_inserter(out), n,
|
|
|
- ~array(bitfield)&~array(useBitfield), blocks);
|
|
|
+ ~array(_bitfield)&~array(_useBitfield), _blocks);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
bool BitfieldMan::getFirstMissingIndex(size_t& index) const
|
|
|
{
|
|
|
- if(filterEnabled) {
|
|
|
+ if(_filterEnabled) {
|
|
|
return bitfield::getFirstMissingIndex
|
|
|
- (index, ~array(bitfield)&array(filterBitfield), blocks);
|
|
|
+ (index, ~array(_bitfield)&array(_filterBitfield), _blocks);
|
|
|
} else {
|
|
|
- return bitfield::getFirstMissingIndex(index, ~array(bitfield), blocks);
|
|
|
+ return bitfield::getFirstMissingIndex(index, ~array(_bitfield), _blocks);
|
|
|
}
|
|
|
}
|
|
|
|
|
@@ -262,14 +262,14 @@ bool BitfieldMan::getSparseMissingUnusedIndex
|
|
|
const unsigned char* ignoreBitfield,
|
|
|
size_t ignoreBitfieldLength) const
|
|
|
{
|
|
|
- if(filterEnabled) {
|
|
|
+ if(_filterEnabled) {
|
|
|
return aria2::getSparseMissingUnusedIndex
|
|
|
- (index, array(ignoreBitfield)|~array(filterBitfield)|array(bitfield)|array(useBitfield),
|
|
|
- useBitfield, blocks);
|
|
|
+ (index, array(ignoreBitfield)|~array(_filterBitfield)|array(_bitfield)|array(_useBitfield),
|
|
|
+ _useBitfield, _blocks);
|
|
|
} else {
|
|
|
return aria2::getSparseMissingUnusedIndex
|
|
|
- (index, array(ignoreBitfield)|array(bitfield)|array(useBitfield),
|
|
|
- useBitfield, blocks);
|
|
|
+ (index, array(ignoreBitfield)|array(_bitfield)|array(_useBitfield),
|
|
|
+ _useBitfield, _blocks);
|
|
|
}
|
|
|
}
|
|
|
|
|
@@ -290,12 +290,12 @@ static bool copyBitfield(unsigned char* dst, const Array& src, size_t blocks)
|
|
|
bool BitfieldMan::getAllMissingIndexes(unsigned char* misbitfield, size_t len)
|
|
|
const
|
|
|
{
|
|
|
- assert(len == bitfieldLength);
|
|
|
- if(filterEnabled) {
|
|
|
+ assert(len == _bitfieldLength);
|
|
|
+ if(_filterEnabled) {
|
|
|
return copyBitfield
|
|
|
- (misbitfield, ~array(bitfield)&array(filterBitfield), blocks);
|
|
|
+ (misbitfield, ~array(_bitfield)&array(_filterBitfield), _blocks);
|
|
|
} else {
|
|
|
- return copyBitfield(misbitfield, ~array(bitfield), blocks);
|
|
|
+ return copyBitfield(misbitfield, ~array(_bitfield), _blocks);
|
|
|
}
|
|
|
}
|
|
|
|
|
@@ -303,18 +303,19 @@ bool BitfieldMan::getAllMissingIndexes(unsigned char* misbitfield, size_t len,
|
|
|
const unsigned char* peerBitfield,
|
|
|
size_t peerBitfieldLength) const
|
|
|
{
|
|
|
- assert(len == bitfieldLength);
|
|
|
- if(bitfieldLength != peerBitfieldLength) {
|
|
|
+ assert(len == _bitfieldLength);
|
|
|
+ if(_bitfieldLength != peerBitfieldLength) {
|
|
|
return false;
|
|
|
}
|
|
|
- if(filterEnabled) {
|
|
|
+ if(_filterEnabled) {
|
|
|
return copyBitfield
|
|
|
- (misbitfield, ~array(bitfield)&array(peerBitfield)&array(filterBitfield),
|
|
|
- blocks);
|
|
|
+ (misbitfield,
|
|
|
+ ~array(_bitfield)&array(peerBitfield)&array(_filterBitfield),
|
|
|
+ _blocks);
|
|
|
} else {
|
|
|
return copyBitfield
|
|
|
- (misbitfield, ~array(bitfield)&array(peerBitfield),
|
|
|
- blocks);
|
|
|
+ (misbitfield, ~array(_bitfield)&array(peerBitfield),
|
|
|
+ _blocks);
|
|
|
}
|
|
|
}
|
|
|
|
|
@@ -323,51 +324,52 @@ bool BitfieldMan::getAllMissingUnusedIndexes(unsigned char* misbitfield,
|
|
|
const unsigned char* peerBitfield,
|
|
|
size_t peerBitfieldLength) const
|
|
|
{
|
|
|
- assert(len == bitfieldLength);
|
|
|
- if(bitfieldLength != peerBitfieldLength) {
|
|
|
+ assert(len == _bitfieldLength);
|
|
|
+ if(_bitfieldLength != peerBitfieldLength) {
|
|
|
return false;
|
|
|
}
|
|
|
- if(filterEnabled) {
|
|
|
+ if(_filterEnabled) {
|
|
|
return copyBitfield
|
|
|
(misbitfield,
|
|
|
- ~array(bitfield)&~array(useBitfield)&array(peerBitfield)&array(filterBitfield),
|
|
|
- blocks);
|
|
|
+ ~array(_bitfield)&~array(_useBitfield)&array(peerBitfield)&
|
|
|
+ array(_filterBitfield),
|
|
|
+ _blocks);
|
|
|
} else {
|
|
|
return copyBitfield
|
|
|
(misbitfield,
|
|
|
- ~array(bitfield)&~array(useBitfield)&array(peerBitfield),
|
|
|
- blocks);
|
|
|
+ ~array(_bitfield)&~array(_useBitfield)&array(peerBitfield),
|
|
|
+ _blocks);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
size_t BitfieldMan::countMissingBlock() const {
|
|
|
- return cachedNumMissingBlock;
|
|
|
+ return _cachedNumMissingBlock;
|
|
|
}
|
|
|
|
|
|
size_t BitfieldMan::countMissingBlockNow() const {
|
|
|
- if(filterEnabled) {
|
|
|
- array_ptr<unsigned char> temp(new unsigned char[bitfieldLength]);
|
|
|
- for(size_t i = 0; i < bitfieldLength; ++i) {
|
|
|
- temp[i] = bitfield[i]&filterBitfield[i];
|
|
|
+ if(_filterEnabled) {
|
|
|
+ array_ptr<unsigned char> temp(new unsigned char[_bitfieldLength]);
|
|
|
+ for(size_t i = 0; i < _bitfieldLength; ++i) {
|
|
|
+ temp[i] = _bitfield[i]&_filterBitfield[i];
|
|
|
}
|
|
|
- size_t count = bitfield::countSetBit(filterBitfield, blocks)-
|
|
|
- bitfield::countSetBit(temp, blocks);
|
|
|
+ size_t count = bitfield::countSetBit(_filterBitfield, _blocks)-
|
|
|
+ bitfield::countSetBit(temp, _blocks);
|
|
|
return count;
|
|
|
} else {
|
|
|
- return blocks-bitfield::countSetBit(bitfield, blocks);
|
|
|
+ return _blocks-bitfield::countSetBit(_bitfield, _blocks);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
size_t BitfieldMan::countFilteredBlockNow() const {
|
|
|
- if(filterEnabled) {
|
|
|
- return bitfield::countSetBit(filterBitfield, blocks);
|
|
|
+ if(_filterEnabled) {
|
|
|
+ return bitfield::countSetBit(_filterBitfield, _blocks);
|
|
|
} else {
|
|
|
return 0;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
bool BitfieldMan::setBitInternal(unsigned char* bitfield, size_t index, bool on) {
|
|
|
- if(blocks <= index) { return false; }
|
|
|
+ if(_blocks <= index) { return false; }
|
|
|
unsigned char mask = 128 >> (index%8);
|
|
|
if(on) {
|
|
|
bitfield[index/8] |= mask;
|
|
@@ -378,29 +380,29 @@ bool BitfieldMan::setBitInternal(unsigned char* bitfield, size_t index, bool on)
|
|
|
}
|
|
|
|
|
|
bool BitfieldMan::setUseBit(size_t index) {
|
|
|
- return setBitInternal(useBitfield, index, true);
|
|
|
+ return setBitInternal(_useBitfield, index, true);
|
|
|
}
|
|
|
|
|
|
bool BitfieldMan::unsetUseBit(size_t index) {
|
|
|
- return setBitInternal(useBitfield, index, false);
|
|
|
+ return setBitInternal(_useBitfield, index, false);
|
|
|
}
|
|
|
|
|
|
bool BitfieldMan::setBit(size_t index) {
|
|
|
- bool b = setBitInternal(bitfield, index, true);
|
|
|
+ bool b = setBitInternal(_bitfield, index, true);
|
|
|
updateCache();
|
|
|
return b;
|
|
|
}
|
|
|
|
|
|
bool BitfieldMan::unsetBit(size_t index) {
|
|
|
- bool b = setBitInternal(bitfield, index, false);
|
|
|
+ bool b = setBitInternal(_bitfield, index, false);
|
|
|
updateCache();
|
|
|
return b;
|
|
|
}
|
|
|
|
|
|
bool BitfieldMan::isFilteredAllBitSet() const {
|
|
|
- if(filterEnabled) {
|
|
|
- for(size_t i = 0; i < bitfieldLength; ++i) {
|
|
|
- if((bitfield[i]&filterBitfield[i]) != filterBitfield[i]) {
|
|
|
+ if(_filterEnabled) {
|
|
|
+ for(size_t i = 0; i < _bitfieldLength; ++i) {
|
|
|
+ if((_bitfield[i]&_filterBitfield[i]) != _filterBitfield[i]) {
|
|
|
return false;
|
|
|
}
|
|
|
}
|
|
@@ -426,77 +428,77 @@ static bool testAllBitSet
|
|
|
|
|
|
bool BitfieldMan::isAllBitSet() const
|
|
|
{
|
|
|
- return testAllBitSet(bitfield, bitfieldLength, blocks);
|
|
|
+ return testAllBitSet(_bitfield, _bitfieldLength, _blocks);
|
|
|
}
|
|
|
|
|
|
bool BitfieldMan::isAllFilterBitSet() const
|
|
|
{
|
|
|
- if(!filterBitfield) {
|
|
|
+ if(!_filterBitfield) {
|
|
|
return false;
|
|
|
}
|
|
|
- return testAllBitSet(filterBitfield, bitfieldLength, blocks);
|
|
|
+ return testAllBitSet(_filterBitfield, _bitfieldLength, _blocks);
|
|
|
}
|
|
|
|
|
|
bool BitfieldMan::isBitSet(size_t index) const
|
|
|
{
|
|
|
- return bitfield::test(bitfield, blocks, index);
|
|
|
+ return bitfield::test(_bitfield, _blocks, index);
|
|
|
}
|
|
|
|
|
|
bool BitfieldMan::isUseBitSet(size_t index) const
|
|
|
{
|
|
|
- return bitfield::test(useBitfield, blocks, index);
|
|
|
+ return bitfield::test(_useBitfield, _blocks, index);
|
|
|
}
|
|
|
|
|
|
void BitfieldMan::setBitfield(const unsigned char* bitfield, size_t bitfieldLength) {
|
|
|
- if(this->bitfieldLength != bitfieldLength) {
|
|
|
+ if(_bitfieldLength != bitfieldLength) {
|
|
|
return;
|
|
|
}
|
|
|
- memcpy(this->bitfield, bitfield, this->bitfieldLength);
|
|
|
- memset(this->useBitfield, 0, this->bitfieldLength);
|
|
|
+ memcpy(_bitfield, bitfield, _bitfieldLength);
|
|
|
+ memset(_useBitfield, 0, _bitfieldLength);
|
|
|
updateCache();
|
|
|
}
|
|
|
|
|
|
void BitfieldMan::clearAllBit() {
|
|
|
- memset(this->bitfield, 0, this->bitfieldLength);
|
|
|
+ memset(_bitfield, 0, _bitfieldLength);
|
|
|
updateCache();
|
|
|
}
|
|
|
|
|
|
void BitfieldMan::setAllBit() {
|
|
|
- for(size_t i = 0; i < blocks; ++i) {
|
|
|
- setBitInternal(bitfield, i, true);
|
|
|
+ for(size_t i = 0; i < _blocks; ++i) {
|
|
|
+ setBitInternal(_bitfield, i, true);
|
|
|
}
|
|
|
updateCache();
|
|
|
}
|
|
|
|
|
|
void BitfieldMan::clearAllUseBit() {
|
|
|
- memset(this->useBitfield, 0, this->bitfieldLength);
|
|
|
+ memset(_useBitfield, 0, _bitfieldLength);
|
|
|
updateCache();
|
|
|
}
|
|
|
|
|
|
void BitfieldMan::setAllUseBit() {
|
|
|
- for(size_t i = 0; i < blocks; ++i) {
|
|
|
- setBitInternal(useBitfield, i, true);
|
|
|
+ for(size_t i = 0; i < _blocks; ++i) {
|
|
|
+ setBitInternal(_useBitfield, i, true);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
bool BitfieldMan::setFilterBit(size_t index) {
|
|
|
- return setBitInternal(filterBitfield, index, true);
|
|
|
+ return setBitInternal(_filterBitfield, index, true);
|
|
|
}
|
|
|
|
|
|
void BitfieldMan::ensureFilterBitfield()
|
|
|
{
|
|
|
- if(!filterBitfield) {
|
|
|
- filterBitfield = new unsigned char[bitfieldLength];
|
|
|
- memset(filterBitfield, 0, bitfieldLength);
|
|
|
+ if(!_filterBitfield) {
|
|
|
+ _filterBitfield = new unsigned char[_bitfieldLength];
|
|
|
+ memset(_filterBitfield, 0, _bitfieldLength);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
void BitfieldMan::addFilter(uint64_t offset, uint64_t length) {
|
|
|
ensureFilterBitfield();
|
|
|
if(length > 0) {
|
|
|
- size_t startBlock = offset/blockLength;
|
|
|
- size_t endBlock = (offset+length-1)/blockLength;
|
|
|
- for(size_t i = startBlock; i <= endBlock && i < blocks; i++) {
|
|
|
+ size_t startBlock = offset/_blockLength;
|
|
|
+ size_t endBlock = (offset+length-1)/_blockLength;
|
|
|
+ for(size_t i = startBlock; i <= endBlock && i < _blocks; i++) {
|
|
|
setFilterBit(i);
|
|
|
}
|
|
|
}
|
|
@@ -506,10 +508,10 @@ void BitfieldMan::addFilter(uint64_t offset, uint64_t length) {
|
|
|
void BitfieldMan::removeFilter(uint64_t offset, uint64_t length) {
|
|
|
ensureFilterBitfield();
|
|
|
if(length > 0) {
|
|
|
- size_t startBlock = offset/blockLength;
|
|
|
- size_t endBlock = (offset+length-1)/blockLength;
|
|
|
- for(size_t i = startBlock; i <= endBlock && i < blocks; i++) {
|
|
|
- setBitInternal(filterBitfield, i, false);
|
|
|
+ size_t startBlock = offset/_blockLength;
|
|
|
+ size_t endBlock = (offset+length-1)/_blockLength;
|
|
|
+ for(size_t i = startBlock; i <= endBlock && i < _blocks; i++) {
|
|
|
+ setBitInternal(_filterBitfield, i, false);
|
|
|
}
|
|
|
}
|
|
|
updateCache();
|
|
@@ -518,16 +520,16 @@ void BitfieldMan::removeFilter(uint64_t offset, uint64_t length) {
|
|
|
void BitfieldMan::addNotFilter(uint64_t offset, uint64_t length)
|
|
|
{
|
|
|
ensureFilterBitfield();
|
|
|
- if(length > 0 && blocks > 0) {
|
|
|
- size_t startBlock = offset/blockLength;
|
|
|
- if(blocks <= startBlock) {
|
|
|
- startBlock = blocks;
|
|
|
+ if(length > 0 && _blocks > 0) {
|
|
|
+ size_t startBlock = offset/_blockLength;
|
|
|
+ if(_blocks <= startBlock) {
|
|
|
+ startBlock = _blocks;
|
|
|
}
|
|
|
- size_t endBlock = (offset+length-1)/blockLength;
|
|
|
+ size_t endBlock = (offset+length-1)/_blockLength;
|
|
|
for(size_t i = 0; i < startBlock; ++i) {
|
|
|
setFilterBit(i);
|
|
|
}
|
|
|
- for(size_t i = endBlock+1; i < blocks; ++i) {
|
|
|
+ for(size_t i = endBlock+1; i < _blocks; ++i) {
|
|
|
setFilterBit(i);
|
|
|
}
|
|
|
}
|
|
@@ -536,61 +538,61 @@ void BitfieldMan::addNotFilter(uint64_t offset, uint64_t length)
|
|
|
|
|
|
void BitfieldMan::enableFilter() {
|
|
|
ensureFilterBitfield();
|
|
|
- filterEnabled = true;
|
|
|
+ _filterEnabled = true;
|
|
|
updateCache();
|
|
|
}
|
|
|
|
|
|
void BitfieldMan::disableFilter() {
|
|
|
- filterEnabled = false;
|
|
|
+ _filterEnabled = false;
|
|
|
updateCache();
|
|
|
}
|
|
|
|
|
|
void BitfieldMan::clearFilter() {
|
|
|
- if(filterBitfield) {
|
|
|
- delete [] filterBitfield;
|
|
|
- filterBitfield = 0;
|
|
|
+ if(_filterBitfield) {
|
|
|
+ delete [] _filterBitfield;
|
|
|
+ _filterBitfield = 0;
|
|
|
}
|
|
|
- filterEnabled = false;
|
|
|
+ _filterEnabled = false;
|
|
|
updateCache();
|
|
|
}
|
|
|
|
|
|
uint64_t BitfieldMan::getFilteredTotalLengthNow() const {
|
|
|
- if(!filterBitfield) {
|
|
|
+ if(!_filterBitfield) {
|
|
|
return 0;
|
|
|
}
|
|
|
- size_t filteredBlocks = bitfield::countSetBit(filterBitfield, blocks);
|
|
|
+ size_t filteredBlocks = bitfield::countSetBit(_filterBitfield, _blocks);
|
|
|
if(filteredBlocks == 0) {
|
|
|
return 0;
|
|
|
}
|
|
|
- if(bitfield::test(filterBitfield, blocks, blocks-1)) {
|
|
|
- return ((uint64_t)filteredBlocks-1)*blockLength+getLastBlockLength();
|
|
|
+ if(bitfield::test(_filterBitfield, _blocks, _blocks-1)) {
|
|
|
+ return ((uint64_t)filteredBlocks-1)*_blockLength+getLastBlockLength();
|
|
|
} else {
|
|
|
- return ((uint64_t)filteredBlocks)*blockLength;
|
|
|
+ return ((uint64_t)filteredBlocks)*_blockLength;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
uint64_t BitfieldMan::getCompletedLength(bool useFilter) const {
|
|
|
unsigned char* temp;
|
|
|
if(useFilter) {
|
|
|
- temp = new unsigned char[bitfieldLength];
|
|
|
- for(size_t i = 0; i < bitfieldLength; ++i) {
|
|
|
- temp[i] = bitfield[i];
|
|
|
- if(filterEnabled) {
|
|
|
- temp[i] &= filterBitfield[i];
|
|
|
+ temp = new unsigned char[_bitfieldLength];
|
|
|
+ for(size_t i = 0; i < _bitfieldLength; ++i) {
|
|
|
+ temp[i] = _bitfield[i];
|
|
|
+ if(_filterEnabled) {
|
|
|
+ temp[i] &= _filterBitfield[i];
|
|
|
}
|
|
|
}
|
|
|
} else {
|
|
|
- temp = bitfield;
|
|
|
+ temp = _bitfield;
|
|
|
}
|
|
|
- size_t completedBlocks = bitfield::countSetBit(temp, blocks);
|
|
|
+ size_t completedBlocks = bitfield::countSetBit(temp, _blocks);
|
|
|
uint64_t completedLength = 0;
|
|
|
if(completedBlocks == 0) {
|
|
|
completedLength = 0;
|
|
|
} else {
|
|
|
- if(bitfield::test(temp, blocks, blocks-1)) {
|
|
|
- completedLength = ((uint64_t)completedBlocks-1)*blockLength+getLastBlockLength();
|
|
|
+ if(bitfield::test(temp, _blocks, _blocks-1)) {
|
|
|
+ completedLength = ((uint64_t)completedBlocks-1)*_blockLength+getLastBlockLength();
|
|
|
} else {
|
|
|
- completedLength = ((uint64_t)completedBlocks)*blockLength;
|
|
|
+ completedLength = ((uint64_t)completedBlocks)*_blockLength;
|
|
|
}
|
|
|
}
|
|
|
if(useFilter) {
|
|
@@ -609,11 +611,11 @@ uint64_t BitfieldMan::getFilteredCompletedLengthNow() const {
|
|
|
|
|
|
void BitfieldMan::updateCache()
|
|
|
{
|
|
|
- cachedNumMissingBlock = countMissingBlockNow();
|
|
|
- cachedNumFilteredBlock = countFilteredBlockNow();
|
|
|
- cachedFilteredTotalLength = getFilteredTotalLengthNow();
|
|
|
- cachedCompletedLength = getCompletedLengthNow();
|
|
|
- cachedFilteredComletedLength = getFilteredCompletedLengthNow();
|
|
|
+ _cachedNumMissingBlock = countMissingBlockNow();
|
|
|
+ _cachedNumFilteredBlock = countFilteredBlockNow();
|
|
|
+ _cachedFilteredTotalLength = getFilteredTotalLengthNow();
|
|
|
+ _cachedCompletedLength = getCompletedLengthNow();
|
|
|
+ _cachedFilteredCompletedLength = getFilteredCompletedLengthNow();
|
|
|
}
|
|
|
|
|
|
bool BitfieldMan::isBitRangeSet(size_t startIndex, size_t endIndex) const
|
|
@@ -647,14 +649,14 @@ bool BitfieldMan::isBitSetOffsetRange(uint64_t offset, uint64_t length) const
|
|
|
if(length <= 0) {
|
|
|
return false;
|
|
|
}
|
|
|
- if(totalLength <= offset) {
|
|
|
+ if(_totalLength <= offset) {
|
|
|
return false;
|
|
|
}
|
|
|
- if(totalLength < offset+length) {
|
|
|
- length = totalLength-offset;
|
|
|
+ if(_totalLength < offset+length) {
|
|
|
+ length = _totalLength-offset;
|
|
|
}
|
|
|
- size_t startBlock = offset/blockLength;
|
|
|
- size_t endBlock = (offset+length-1)/blockLength;
|
|
|
+ size_t startBlock = offset/_blockLength;
|
|
|
+ size_t endBlock = (offset+length-1)/_blockLength;
|
|
|
for(size_t i = startBlock; i <= endBlock; i++) {
|
|
|
if(!isBitSet(i)) {
|
|
|
return false;
|
|
@@ -665,11 +667,11 @@ bool BitfieldMan::isBitSetOffsetRange(uint64_t offset, uint64_t length) const
|
|
|
|
|
|
uint64_t BitfieldMan::getMissingUnusedLength(size_t startingIndex) const
|
|
|
{
|
|
|
- if(startingIndex < 0 || blocks <= startingIndex) {
|
|
|
+ if(startingIndex < 0 || _blocks <= startingIndex) {
|
|
|
return 0;
|
|
|
}
|
|
|
uint64_t length = 0;
|
|
|
- for(size_t i = startingIndex; i < blocks; ++i) {
|
|
|
+ for(size_t i = startingIndex; i < _blocks; ++i) {
|
|
|
if(isBitSet(i) || isUseBitSet(i)) {
|
|
|
break;
|
|
|
}
|