Lines Matching refs:rl

89  * @rl:		original runlist
90 * @old_size: number of runlist elements in the original runlist @rl
103 static runlist_element *ntfs_rl_realloc(runlist_element *rl,
109 return rl;
110 return realloc(rl, new_size);
501 int di, si; /* Current index into @[ds]rl. */
504 int dend, send; /* Last index into @[ds]rl. */
505 int dfinal, sfinal; /* The last index into @[ds]rl with
739 runlist_element *rl; /* The output runlist. */
770 rl = ntfs_malloc(rlsize);
771 if (!rl)
775 rl->vcn = (VCN)0;
776 rl->lcn = (LCN)LCN_RL_NOT_MAPPED;
777 rl->length = vcn;
789 rl2 = realloc(rl, rlsize);
792 free(rl);
796 rl = rl2;
799 rl[rlpos].vcn = vcn;
830 rl[rlpos].length = deltaxcn;
839 rl[rlpos].lcn = (LCN)LCN_HOLE;
872 rl[rlpos].lcn = lcn;
915 rl[rlpos].vcn = vcn;
916 vcn += rl[rlpos].length = max_cluster - deltaxcn;
917 rl[rlpos].lcn = (LCN)LCN_RL_NOT_MAPPED;
927 rl[rlpos].lcn = (LCN)LCN_ENOENT;
929 rl[rlpos].lcn = (LCN)LCN_RL_NOT_MAPPED;
932 rl[rlpos].vcn = vcn;
933 rl[rlpos].length = (s64)0;
937 ntfs_debug_runlist_dump(rl);
938 return rl;
941 old_rl = ntfs_runlists_merge(old_rl, rl);
945 free(rl);
952 free(rl);
959 * @rl: runlist to use for conversion
963 * cluster number (lcn) of a device using the runlist @rl to map vcns to their
976 LCN ntfs_rl_vcn_to_lcn(const runlist_element *rl, const VCN vcn)
983 * If rl is NULL, assume that we have found an unmapped runlist. The
987 if (!rl)
991 if (vcn < rl[0].vcn)
994 for (i = 0; rl[i].length; i++) {
995 if (vcn < rl[i+1].vcn) {
996 if (rl[i].lcn >= (LCN)0)
997 return rl[i].lcn + (vcn - rl[i].vcn);
998 return rl[i].lcn;
1005 if (rl[i].lcn < (LCN)0)
1006 return rl[i].lcn;
1014 * @rl: runlist specifying where to read the data from
1015 * @pos: byte position within runlist @rl at which to begin the read
1020 * @b gathering the data as specified by the runlist @rl. The read begins at
1021 * offset @pos into the runlist @rl.
1035 s64 ntfs_rl_pread(const ntfs_volume *vol, const runlist_element *rl,
1041 if (!vol || !rl || pos < 0 || count < 0) {
1047 /* Seek in @rl to the run containing @pos. */
1048 for (ofs = 0; rl->length && (ofs + (rl->length <<
1049 vol->cluster_size_bits) <= pos); rl++)
1050 ofs += (rl->length << vol->cluster_size_bits);
1053 for (total = 0LL; count; rl++, ofs = 0) {
1054 if (!rl->length)
1056 if (rl->lcn < (LCN)0) {
1057 if (rl->lcn != (LCN)LCN_HOLE)
1060 to_read = min(count, (rl->length <<
1070 to_read = min(count, (rl->length << vol->cluster_size_bits) -
1073 bytes_read = ntfs_pread(vol->u.dev, (rl->lcn <<
1101 * @rl: runlist specifying where to write the data to
1102 * @pos: byte position within runlist @rl at which to begin the write
1107 * scattering the data as specified by the runlist @rl. The write begins at
1108 * offset @pos into the runlist @rl.
1119 s64 ntfs_rl_pwrite(const ntfs_volume *vol, const runlist_element *rl,
1125 if (!vol || !rl || pos < 0 || count < 0) {
1131 /* Seek in @rl to the run containing @pos. */
1132 for (ofs = 0; rl->length && (ofs + (rl->length <<
1133 vol->cluster_size_bits) <= pos); rl++)
1134 ofs += (rl->length << vol->cluster_size_bits);
1137 for (total = 0LL; count; rl++, ofs = 0) {
1138 if (!rl->length)
1140 if (rl->lcn < (LCN)0) {
1144 if (rl->lcn != (LCN)LCN_HOLE)
1150 to_write = min(count, (rl->length <<
1179 to_write = min(count, (rl->length << vol->cluster_size_bits) -
1183 written = ntfs_pwrite(vol->u.dev, (rl->lcn <<
1214 * @rl: runlist specifying where to write zeroes to
1215 * @pos: byte position within runlist @rl at which to begin the zeroing
1220 int ntfs_rl_fill_zero(const ntfs_volume *vol, const runlist *rl, s64 pos,
1230 if (!vol || !rl || pos < 0 || count < 0) {
1241 written = ntfs_rl_pwrite(vol, rl, pos, size, buf);
1287 * @rl: runlist for which to determine the size of the mapping pairs
1290 * Walk the runlist @rl and calculate the size in bytes of the mapping pairs
1291 * array corresponding to the runlist @rl, starting at vcn @start_vcn. This
1295 * If @rl is NULL, just return 1 (for the single terminator byte).
1305 const runlist_element *rl, const VCN start_vcn)
1316 if (!rl) {
1318 ntfs_log_trace("rl NULL, start_vcn %lld (should be > 0)\n",
1326 while (rl->length && start_vcn >= rl[1].vcn)
1327 rl++;
1328 if ((!rl->length && start_vcn > rl->vcn) || start_vcn < rl->vcn) {
1336 if (start_vcn > rl->vcn) {
1339 /* We know rl->length != 0 already. */
1340 if (rl->length < 0 || rl->lcn < LCN_HOLE)
1342 delta = start_vcn - rl->vcn;
1344 rls += 1 + ntfs_get_nr_significant_bytes(rl->length - delta);
1352 if (rl->lcn >= 0 || vol->major_ver < 3) {
1353 prev_lcn = rl->lcn;
1354 if (rl->lcn >= 0)
1360 rl++;
1363 for (; rl->length; rl++) {
1364 if (rl->length < 0 || rl->lcn < LCN_HOLE)
1367 rls += 1 + ntfs_get_nr_significant_bytes(rl->length);
1375 if (rl->lcn >= 0 || vol->major_ver < 3) {
1377 rls += ntfs_get_nr_significant_bytes(rl->lcn -
1379 prev_lcn = rl->lcn;
1384 if (rl->lcn == LCN_RL_NOT_MAPPED)
1447 * @rl: runlist for which to build the mapping pairs array
1451 * Create the mapping pairs array from the runlist @rl, starting at vcn
1456 * If @rl is NULL, just write a single terminator byte to @dst.
1474 const int dst_len, const runlist_element *rl,
1483 if (!rl) {
1497 while (rl->length && start_vcn >= rl[1].vcn)
1498 rl++;
1499 if ((!rl->length && start_vcn > rl->vcn) || start_vcn < rl->vcn)
1508 if (start_vcn > rl->vcn) {
1511 /* We know rl->length != 0 already. */
1512 if (rl->length < 0 || rl->lcn < LCN_HOLE)
1514 delta = start_vcn - rl->vcn;
1517 rl->length - delta);
1529 if (rl->lcn >= 0 || vol->major_ver < 3) {
1530 prev_lcn = rl->lcn;
1531 if (rl->lcn >= 0)
1548 rl++;
1551 for (; rl->length; rl++) {
1552 if (rl->length < 0 || rl->lcn < LCN_HOLE)
1556 rl->length);
1568 if (rl->lcn >= 0 || vol->major_ver < 3) {
1571 len_len, dst_max, rl->lcn - prev_lcn);
1574 prev_lcn = rl->lcn;
1587 *stop_vcn = rl->vcn;
1594 *stop_vcn = rl->vcn;
1603 if (rl->lcn == LCN_RL_NOT_MAPPED)
1625 runlist *rl;
1633 rl = *arl;
1635 if (start_vcn < rl->vcn) {
1642 while (rl->length) {
1643 if (start_vcn < rl[1].vcn)
1645 rl++;
1648 if (!rl->length) {
1655 rl->length = start_vcn - rl->vcn;
1662 if (rl->length) {
1663 ++rl;
1664 rl->vcn = start_vcn;
1665 rl->length = 0;
1667 rl->lcn = (LCN)LCN_ENOENT;
1673 * @rl: runlist to check
1681 int ntfs_rl_sparse(runlist *rl)
1685 if (!rl) {
1691 for (rlc = rl; rlc->length; rlc++) {
1709 * @rl: runlist to calculate for
1713 s64 ntfs_rl_get_compressed_size(ntfs_volume *vol, runlist *rl)
1718 if (!rl) {
1724 for (rlc = rl; rlc->length; rlc++) {
1751 * @rl:
1757 static void test_rl_dump_runlist(const runlist_element *rl)
1764 if (!rl) {
1770 for (len = 0; rl[len].length; len++) ;
1773 for (i = 0; ; i++, rl++) {
1774 LCN lcn = rl->lcn;
1789 rl->vcn, lcn_str[ind], rl->length);
1792 rl->vcn, rl->lcn, rl->length);
1793 if (!rl->length)
1977 printf("rl pure [contig|noncontig] [single|multi]\n");
1985 printf("rl pure [contig|noncontig] [single|multi]\n");
2148 printf("rl [zero|frag|pure] {args}\n");