Lines Matching full:pointers

1004 		 * Both @faila and @failb should be valid pointers if any of  in get_rbio_veritical_errors()
1187 void **pointers = rbio->finish_pointers; in generate_pq_vertical() local
1196 pointers[stripe] = kmap_local_page(sector->page) + in generate_pq_vertical()
1203 pointers[stripe++] = kmap_local_page(sector->page) + sector->pgoff; in generate_pq_vertical()
1212 pointers[stripe++] = kmap_local_page(sector->page) + in generate_pq_vertical()
1216 pointers); in generate_pq_vertical()
1219 memcpy(pointers[rbio->nr_data], pointers[0], sectorsize); in generate_pq_vertical()
1220 run_xor(pointers + 1, rbio->nr_data - 1, sectorsize); in generate_pq_vertical()
1223 kunmap_local(pointers[stripe]); in generate_pq_vertical()
1715 * @*pointers are the pre-allocated pointers by the caller, so we don't
1716 * need to allocate/free the pointers again and again.
1719 void **pointers, void **unmap_array) in recover_vertical() argument
1751 * Setup our array of pointers with sectors from each stripe in recover_vertical()
1753 * NOTE: store a duplicate array of pointers to preserve the in recover_vertical()
1767 pointers[stripe_nr] = kmap_local_page(sector->page) + in recover_vertical()
1769 unmap_array[stripe_nr] = pointers[stripe_nr]; in recover_vertical()
1815 faila, pointers); in recover_vertical()
1818 faila, failb, pointers); in recover_vertical()
1827 memcpy(pointers[faila], pointers[rbio->nr_data], sectorsize); in recover_vertical()
1830 p = pointers[faila]; in recover_vertical()
1833 pointers[stripe_nr] = pointers[stripe_nr + 1]; in recover_vertical()
1834 pointers[rbio->nr_data - 1] = p; in recover_vertical()
1837 run_xor(pointers, rbio->nr_data - 1, sectorsize); in recover_vertical()
1876 void **pointers = NULL; in recover_sectors() local
1882 * @pointers array stores the pointer for each sector. in recover_sectors()
1884 * @unmap_array stores copy of pointers that does not get reordered in recover_sectors()
1887 pointers = kcalloc(rbio->real_stripes, sizeof(void *), GFP_NOFS); in recover_sectors()
1889 if (!pointers || !unmap_array) { in recover_sectors()
1903 ret = recover_vertical(rbio, sectornr, pointers, unmap_array); in recover_sectors()
1909 kfree(pointers); in recover_sectors()
2404 void **pointers = rbio->finish_pointers; in finish_parity_scrub() local
2457 pointers[rbio->real_stripes - 1] = kmap_local_page(q_sector.page); in finish_parity_scrub()
2463 pointers[nr_data] = kmap_local_page(p_sector.page); in finish_parity_scrub()
2472 pointers[stripe] = kmap_local_page(sector->page) + in finish_parity_scrub()
2479 pointers); in finish_parity_scrub()
2482 memcpy(pointers[nr_data], pointers[0], sectorsize); in finish_parity_scrub()
2483 run_xor(pointers + 1, nr_data - 1, sectorsize); in finish_parity_scrub()
2489 if (memcmp(parity, pointers[rbio->scrubp], sectorsize) != 0) in finish_parity_scrub()
2490 memcpy(parity, pointers[rbio->scrubp], sectorsize); in finish_parity_scrub()
2497 kunmap_local(pointers[stripe]); in finish_parity_scrub()
2500 kunmap_local(pointers[nr_data]); in finish_parity_scrub()
2504 kunmap_local(pointers[rbio->real_stripes - 1]); in finish_parity_scrub()
2561 void **pointers = NULL; in recover_scrub_rbio() local
2567 * @pointers array stores the pointer for each sector. in recover_scrub_rbio()
2569 * @unmap_array stores copy of pointers that does not get reordered in recover_scrub_rbio()
2572 pointers = kcalloc(rbio->real_stripes, sizeof(void *), GFP_NOFS); in recover_scrub_rbio()
2574 if (!pointers || !unmap_array) { in recover_scrub_rbio()
2633 ret = recover_vertical(rbio, sector_nr, pointers, unmap_array); in recover_scrub_rbio()
2638 kfree(pointers); in recover_scrub_rbio()