54 EPNAME(
"UpdateRangeHoleUntilPage");
57 static const std::vector<uint32_t> crc32Vec(
stsize_, crczero);
58 static const std::vector<uint32_t> crc32VecZ(
stsize_, 0u);
60 const off_t trackinglen = sizes.first;
62 if (until <= tracked_page)
return 0;
71 TRACE(Warn,
"Unexpected partially filled last page " <<
fn_);
76 const ssize_t rret =
ts_->ReadTags(&prevtag, tracked_page, 1);
86 const ssize_t wret =
ts_->WriteTags(&
crc32c, tracked_page, 1);
94 const off_t nAllEmpty = (tracked_off>0) ? (until - tracked_page - 1) : (until - tracked_page);
95 const off_t firstEmpty = (tracked_off>0) ? (tracked_page + 1) : tracked_page;
97 off_t towrite = nAllEmpty;
102 const uint32_t *wpointer = 0;
111 wblks = (off_t)crc32Vec.size();
112 wpointer = &crc32Vec[0];
116 wblks = (off_t)crc32VecZ.size();
117 wpointer = &crc32VecZ[0];
119 const size_t nw = std::min(towrite, wblks);
120 const ssize_t wret =
ts_->WriteTags(wpointer, firstEmpty+nwritten, nw);
149 const off_t offset,
const off_t trackinglen,
150 const uint32_t *
const csvec, uint32_t &prepageval)
152 EPNAME(
"StoreRangeUnaligned_preblock");
159 if (p1 > tracked_page)
176 if (p1 == tracked_page && p1_off >= tracked_off)
184 const ssize_t rret =
ts_->ReadTags(&crc32v, p1, 1);
224 if (
static_cast<size_t>(rlen) == tracked_off)
227 if (tracked_off==0 || crc32x == crc32v)
break;
232 if ((tracked_off>0 || p1_off==0) &&
static_cast<size_t>(rlen) <= p1_off+blen)
235 if (tracked_off != 0)
244 " of file " <<
fn_ <<
" rlen=" << rlen <<
" (append)");
248 if (
static_cast<size_t>(rlen) != tracked_off && rlen>0)
251 if (crc32x == crc32v)
259 memcpy(&b[p1_off], buff, blen);
261 if (crc32x == crc32v)
264 " of file " <<
fn_ <<
" (append)");
278 " of file " <<
fn_ <<
", offset-in-page=" << p1_off <<
" rlen=" << rlen <<
" (append)");
288 const size_t nz = p1_off - tracked_off;
308 assert(p1_off < bavail);
312 assert(p1_off !=0 || blen<bavail);
316 ssize_t rret =
ts_->ReadTags(&crc32v, p1, 1);
344 if (
static_cast<size_t>(rlen) == bavail &&
crc32c == crc32v)
break;
351 const size_t rmin = (p1_off+blen < bavail) ? bavail : 0;
352 if (
static_cast<size_t>(rlen) >= rmin &&
static_cast<size_t>(rlen)<=bavail)
361 if (
static_cast<size_t>(rlen) != bavail && rlen > 0)
372 memcpy(&b[p1_off], buff, blen);
373 const size_t vl = std::max(bavail, p1_off+blen);
377 TRACE(Warn,
"Recovered matching write at offset " << (
XrdSys::PageSize * p1)+p1_off <<
" of file " <<
fn_ <<
" (overwrite)");
415 if (p1_off+blen < bavail)
432 const off_t offset,
const off_t trackinglen,
433 const uint32_t *
const csvec, uint32_t &lastpageval)
435 EPNAME(
"StoreRangeUnaligned_postblock");
437 const uint8_t *
const p = (uint8_t*)buff;
451 const size_t bremain = (p2_off < bavail) ? bavail-p2_off : 0;
460 ssize_t rret =
ts_->ReadTags(&crc32v, p2, 1);
499 if (crc32v != crc32prev)
534 EPNAME(
"StoreRangeUnaligned");
537 const off_t trackinglen = sizes.first;
538 if (offset > trackinglen)
543 TRACE(Warn,
"Error updating tags for holes, error=" << ret);
551 bool hasprepage =
false;
567 const off_t np = hasprepage ? p1+1 : p1;
577 const ssize_t wret =
ts_->WriteTags(&prepageval, p1, 1);
587 const uint8_t *
const p = (uint8_t*)buff;
588 const uint32_t *csp = csvec;
589 if (csp && hasprepage) csp++;
592 if (p2_off == 0 || (offset + blen >=
static_cast<size_t>(trackinglen)))
598 TRACE(Warn,
"Error updating tags, error=" << aret);
606 uint32_t lastpageval;
617 TRACE(Warn,
"Error updating tags, error=" << aret);
640 const off_t trackinglen, uint32_t *
const tbuf, uint32_t *
const csvec,
const uint64_t
opts)
642 EPNAME(
"FetchRangeUnaligned_preblock");
651 const size_t bcommon = std::min(bavail - p1_off, blen);
654 const uint8_t *ub = (uint8_t*)buff;
668 if (memcmp(buff, &b[p1_off], bcommon))
671 for(badoff=0;badoff<bcommon;badoff++) {
if (((uint8_t*)buff)[badoff] != b[p1_off+badoff])
break; }
672 badoff = (badoff < bcommon) ? badoff : 0;
683 if (tbuf[0] != crc32calc)
692 if (bavail>bcommon && csvec)
711 csvec[0] =
CrcUtils.crc32c_split2(csvec[0],
crc32c, bavail-p1_off);
715 csvec[0] =
CrcUtils.crc32c_split1(csvec[0],
crc32c, bavail-p1_off-bcommon);
728 const off_t trackinglen, uint32_t *
const tbuf, uint32_t *
const csvec,
const size_t tidx,
const uint64_t
opts)
730 EPNAME(
"FetchRangeUnaligned_postblock");
739 const size_t bremain = (p2_off < bavail) ? bavail-p2_off : 0;
741 const uint8_t *ub = &((uint8_t*)buff)[blen-p2_off];
753 const uint8_t *
const p = (uint8_t*)buff;
754 if (memcmp(&p[blen-p2_off], b, p2_off))
757 for(badoff=0;badoff<p2_off;badoff++) {
if (p[blen-p2_off+badoff] != b[badoff])
break; }
758 badoff = (badoff < p2_off) ? badoff : 0;
768 if (tbuf[tidx] != crc32calc)
776 if (csvec && bremain>0)
790 csvec[tidx] =
CrcUtils.crc32c_split1(csvec[tidx],
crc32c, bremain);
805 EPNAME(
"FetchRangeUnaligned");
812 const off_t trackinglen = sizes.first;
814 size_t ntagstoread = (p2_off>0) ? p2-p1+1 : p2-p1;
815 size_t ntagsbase = p1;
816 uint32_t tbufint[
stsize_], *tbuf=0;
821 tbufsz =
sizeof(tbufint)/
sizeof(uint32_t);
826 tbufsz = ntagstoread;
829 size_t tcnt = std::min(ntagstoread, tbufsz);
830 ssize_t rret =
ts_->ReadTags(tbuf, ntagsbase, tcnt);
849 const off_t fp = (p1_off != 0) ? p1+1 : p1;
855 const uint8_t *
const p = (uint8_t*)buff;
857 const size_t cbufsz =
sizeof(calcbuf)/
sizeof(uint32_t);
858 size_t toread = lp-fp;
862 const size_t ccnt = std::min(toread, cbufsz);
864 size_t tovalid = ccnt;
868 const size_t tidx=fp+nread+nvalid - ntagsbase;
869 const size_t nv = std::min(tovalid, tbufsz-tidx);
872 assert(csvec == NULL);
874 tcnt = std::min(ntagstoread, tbufsz);
875 rret =
ts_->ReadTags(tbuf, ntagsbase, tcnt);
884 if (memcmp(&calcbuf[nvalid], &tbuf[tidx], 4*nv))
887 for(badpg=0;badpg<nv;badpg++) {
if (memcmp(&calcbuf[nvalid+badpg], &tbuf[tidx+badpg],4))
break; }
889 (ntagsbase+tidx+badpg),
890 calcbuf[nvalid+badpg], tbuf[tidx+badpg]));
902 if (p2>p1 && p2_off > 0)
906 size_t tidx = p2 - ntagsbase;
909 assert(csvec == NULL);
912 rret =
ts_->ReadTags(tbuf, ntagsbase, 1);
static XrdOssCsiCrcUtils CrcUtils
uint32_t crc32c(uint32_t crc, void const *buf, size_t len)
int StoreRangeUnaligned(XrdOssDF *, const void *, off_t, size_t, const Sizes_t &, const uint32_t *)
ssize_t apply_sequential_aligned_modify(const void *, off_t, size_t, const uint32_t *, bool, bool, uint32_t, uint32_t)
std::string ByteMismatchError(size_t blen, off_t off, uint8_t user, uint8_t page)
static ssize_t maxread(XrdOssDF *fd, void *buff, const off_t off, const size_t sz, size_t tg=0)
std::string TagsReadError(off_t start, size_t n, int ret)
std::unique_ptr< XrdOssCsiTagstore > ts_
int UpdateRangeUnaligned(XrdOssDF *, const void *, off_t, size_t, const Sizes_t &)
std::string TagsWriteError(off_t start, size_t n, int ret)
int FetchRangeUnaligned_preblock(XrdOssDF *, const void *, off_t, size_t, off_t, uint32_t *, uint32_t *, uint64_t)
int UpdateRangeHoleUntilPage(XrdOssDF *, off_t, const Sizes_t &)
static ssize_t fullread(XrdOssDF *fd, void *buff, const off_t off, const size_t sz)
std::pair< off_t, off_t > Sizes_t
int FetchRangeUnaligned(XrdOssDF *, const void *, off_t, size_t, const Sizes_t &, uint32_t *, uint64_t)
int FetchRangeUnaligned_postblock(XrdOssDF *, const void *, off_t, size_t, off_t, uint32_t *, uint32_t *, size_t, uint64_t)
int VerifyRangeUnaligned(XrdOssDF *, const void *, off_t, size_t, const Sizes_t &)
std::string CRCMismatchError(size_t blen, off_t pgnum, uint32_t got, uint32_t expected)
int StoreRangeUnaligned_preblock(XrdOssDF *, const void *, size_t, off_t, off_t, const uint32_t *, uint32_t &)
std::string PageReadError(size_t blen, off_t pgnum, int ret)
int StoreRangeUnaligned_postblock(XrdOssDF *, const void *, size_t, off_t, off_t, const uint32_t *, uint32_t &)
static const size_t stsize_
static const uint64_t Verify
all: Verify checksums
static uint32_t Calc32C(const void *data, size_t count, uint32_t prevcs=0)
static const int PageSize