Lines Matching refs:t
833 struct mmc_test_area *t = &test->area; in mmc_test_nonblock_transfer() local
834 struct scatterlist *sg = t->sg; in mmc_test_nonblock_transfer()
835 struct scatterlist *sg_areq = t->sg_areq; in mmc_test_nonblock_transfer()
849 mmc_test_prepare_mrq(test, mrq, sg, t->sg_len, dev_addr, in mmc_test_nonblock_transfer()
850 t->blocks, 512, write); in mmc_test_nonblock_transfer()
860 dev_addr += t->blocks; in mmc_test_nonblock_transfer()
1386 struct mmc_test_area *t = &test->area; in mmc_test_area_map() local
1390 t->blocks = sz >> 9; in mmc_test_area_map()
1393 err = mmc_test_map_sg_max_scatter(t->mem, sz, t->sg, in mmc_test_area_map()
1394 t->max_segs, t->max_seg_sz, in mmc_test_area_map()
1395 &t->sg_len); in mmc_test_area_map()
1397 err = mmc_test_map_sg(t->mem, sz, t->sg, 1, t->max_segs, in mmc_test_area_map()
1398 t->max_seg_sz, &t->sg_len, min_sg_len); in mmc_test_area_map()
1405 err = mmc_test_map_sg_max_scatter(t->mem, sz, t->sg_areq, in mmc_test_area_map()
1406 t->max_segs, t->max_seg_sz, in mmc_test_area_map()
1409 err = mmc_test_map_sg(t->mem, sz, t->sg_areq, 1, t->max_segs, in mmc_test_area_map()
1410 t->max_seg_sz, &sg_len, min_sg_len); in mmc_test_area_map()
1412 if (!err && sg_len != t->sg_len) in mmc_test_area_map()
1428 struct mmc_test_area *t = &test->area; in mmc_test_area_transfer() local
1430 return mmc_test_simple_transfer(test, t->sg, t->sg_len, dev_addr, in mmc_test_area_transfer()
1431 t->blocks, 512, write); in mmc_test_area_transfer()
1451 struct mmc_test_area *t = &test->area; in mmc_test_area_io_seq() local
1454 if (t->max_seg_sz >= PAGE_SIZE) in mmc_test_area_io_seq()
1455 max_tfr = t->max_segs * PAGE_SIZE; in mmc_test_area_io_seq()
1457 max_tfr = t->max_segs * t->max_seg_sz; in mmc_test_area_io_seq()
1501 struct mmc_test_area *t = &test->area; in mmc_test_area_fill() local
1503 return mmc_test_area_io(test, t->max_tfr, t->dev_addr, 1, 0, 0); in mmc_test_area_fill()
1511 struct mmc_test_area *t = &test->area; in mmc_test_area_erase() local
1516 return mmc_erase(test->card, t->dev_addr, t->max_sz >> 9, in mmc_test_area_erase()
1525 struct mmc_test_area *t = &test->area; in mmc_test_area_cleanup() local
1527 kfree(t->sg); in mmc_test_area_cleanup()
1528 kfree(t->sg_areq); in mmc_test_area_cleanup()
1529 mmc_test_free_mem(t->mem); in mmc_test_area_cleanup()
1543 struct mmc_test_area *t = &test->area; in mmc_test_area_init() local
1553 t->max_sz = sz; in mmc_test_area_init()
1554 while (t->max_sz < 4 * 1024 * 1024) in mmc_test_area_init()
1555 t->max_sz += sz; in mmc_test_area_init()
1556 while (t->max_sz > TEST_AREA_MAX_SIZE && t->max_sz > sz) in mmc_test_area_init()
1557 t->max_sz -= sz; in mmc_test_area_init()
1559 t->max_segs = test->card->host->max_segs; in mmc_test_area_init()
1560 t->max_seg_sz = test->card->host->max_seg_size; in mmc_test_area_init()
1561 t->max_seg_sz -= t->max_seg_sz % 512; in mmc_test_area_init()
1563 t->max_tfr = t->max_sz; in mmc_test_area_init()
1564 if (t->max_tfr >> 9 > test->card->host->max_blk_count) in mmc_test_area_init()
1565 t->max_tfr = test->card->host->max_blk_count << 9; in mmc_test_area_init()
1566 if (t->max_tfr > test->card->host->max_req_size) in mmc_test_area_init()
1567 t->max_tfr = test->card->host->max_req_size; in mmc_test_area_init()
1568 if (t->max_tfr / t->max_seg_sz > t->max_segs) in mmc_test_area_init()
1569 t->max_tfr = t->max_segs * t->max_seg_sz; in mmc_test_area_init()
1577 t->mem = mmc_test_alloc_mem(min_sz, t->max_tfr, t->max_segs, in mmc_test_area_init()
1578 t->max_seg_sz); in mmc_test_area_init()
1579 if (!t->mem) in mmc_test_area_init()
1582 t->sg = kmalloc_array(t->max_segs, sizeof(*t->sg), GFP_KERNEL); in mmc_test_area_init()
1583 if (!t->sg) { in mmc_test_area_init()
1588 t->sg_areq = kmalloc_array(t->max_segs, sizeof(*t->sg_areq), in mmc_test_area_init()
1590 if (!t->sg_areq) { in mmc_test_area_init()
1595 t->dev_addr = mmc_test_capacity(test->card) / 2; in mmc_test_area_init()
1596 t->dev_addr -= t->dev_addr % (t->max_sz >> 9); in mmc_test_area_init()
1652 struct mmc_test_area *t = &test->area; in mmc_test_best_performance() local
1654 return mmc_test_area_io(test, t->max_tfr, t->dev_addr, write, in mmc_test_best_performance()
1695 struct mmc_test_area *t = &test->area; in mmc_test_profile_read_perf() local
1700 for (sz = 512; sz < t->max_tfr; sz <<= 1) { in mmc_test_profile_read_perf()
1701 dev_addr = t->dev_addr + (sz >> 9); in mmc_test_profile_read_perf()
1706 sz = t->max_tfr; in mmc_test_profile_read_perf()
1707 dev_addr = t->dev_addr; in mmc_test_profile_read_perf()
1716 struct mmc_test_area *t = &test->area; in mmc_test_profile_write_perf() local
1724 for (sz = 512; sz < t->max_tfr; sz <<= 1) { in mmc_test_profile_write_perf()
1725 dev_addr = t->dev_addr + (sz >> 9); in mmc_test_profile_write_perf()
1733 sz = t->max_tfr; in mmc_test_profile_write_perf()
1734 dev_addr = t->dev_addr; in mmc_test_profile_write_perf()
1743 struct mmc_test_area *t = &test->area; in mmc_test_profile_trim_perf() local
1755 for (sz = 512; sz < t->max_sz; sz <<= 1) { in mmc_test_profile_trim_perf()
1756 dev_addr = t->dev_addr + (sz >> 9); in mmc_test_profile_trim_perf()
1764 dev_addr = t->dev_addr; in mmc_test_profile_trim_perf()
1776 struct mmc_test_area *t = &test->area; in mmc_test_seq_read_perf() local
1781 cnt = t->max_sz / sz; in mmc_test_seq_read_perf()
1782 dev_addr = t->dev_addr; in mmc_test_seq_read_perf()
1800 struct mmc_test_area *t = &test->area; in mmc_test_profile_seq_read_perf() local
1804 for (sz = 512; sz < t->max_tfr; sz <<= 1) { in mmc_test_profile_seq_read_perf()
1809 sz = t->max_tfr; in mmc_test_profile_seq_read_perf()
1815 struct mmc_test_area *t = &test->area; in mmc_test_seq_write_perf() local
1823 cnt = t->max_sz / sz; in mmc_test_seq_write_perf()
1824 dev_addr = t->dev_addr; in mmc_test_seq_write_perf()
1842 struct mmc_test_area *t = &test->area; in mmc_test_profile_seq_write_perf() local
1846 for (sz = 512; sz < t->max_tfr; sz <<= 1) { in mmc_test_profile_seq_write_perf()
1851 sz = t->max_tfr; in mmc_test_profile_seq_write_perf()
1860 struct mmc_test_area *t = &test->area; in mmc_test_profile_seq_trim_perf() local
1872 for (sz = 512; sz <= t->max_sz; sz <<= 1) { in mmc_test_profile_seq_trim_perf()
1879 cnt = t->max_sz / sz; in mmc_test_profile_seq_trim_perf()
1880 dev_addr = t->dev_addr; in mmc_test_profile_seq_trim_perf()
1943 struct mmc_test_area *t = &test->area; in mmc_test_random_perf() local
1948 for (sz = 512; sz < t->max_tfr; sz <<= 1) { in mmc_test_random_perf()
1965 sz = t->max_tfr; in mmc_test_random_perf()
1995 struct mmc_test_area *t = &test->area; in mmc_test_seq_perf() local
2000 sz = t->max_tfr; in mmc_test_seq_perf()
2009 if (t->max_seg_sz >= PAGE_SIZE) in mmc_test_seq_perf()
2010 max_tfr = t->max_segs * PAGE_SIZE; in mmc_test_seq_perf()
2012 max_tfr = t->max_segs * t->max_seg_sz; in mmc_test_seq_perf()
2084 struct mmc_test_area *t = &test->area; in mmc_test_rw_multiple() local
2090 if (reqsize > t->max_tfr) in mmc_test_rw_multiple()
2091 reqsize = t->max_tfr; in mmc_test_rw_multiple()
2360 struct mmc_test_area *t = &test->area; in mmc_test_ongoing_transfer() local
2376 mmc_test_prepare_mrq(test, mrq, t->sg, t->sg_len, dev_addr, t->blocks, in mmc_test_ongoing_transfer()
2379 if (use_sbc && t->blocks > 1 && !mrq->sbc) { in mmc_test_ongoing_transfer()
2456 if (repeat_cmd && (t->blocks + 1) << 9 > t->max_tfr) in mmc_test_ongoing_transfer()
2458 mmc_hostname(test->card->host), count, t->blocks); in mmc_test_ongoing_transfer()
2472 struct mmc_test_area *t = &test->area; in __mmc_test_cmds_during_tfr() local
2482 ret = mmc_test_ongoing_transfer(test, t->dev_addr, use_sbc, 0, write, in __mmc_test_cmds_during_tfr()
2487 return mmc_test_ongoing_transfer(test, t->dev_addr, use_sbc, 1, write, in __mmc_test_cmds_during_tfr()
2494 struct mmc_test_area *t = &test->area; in mmc_test_cmds_during_tfr() local
2498 for (sz = 512; sz <= t->max_tfr; sz += 512) { in mmc_test_cmds_during_tfr()