/openbmc/linux/drivers/md/ |
H A D | dm-cache-target.c | 298 struct cache { struct 422 struct cache *cache; argument 434 static bool writethrough_mode(struct cache *cache) in writethrough_mode() argument 436 return cache->features.io_mode == CM_IO_WRITETHROUGH; in writethrough_mode() 439 static bool writeback_mode(struct cache *cache) in writeback_mode() argument 441 return cache->features.io_mode == CM_IO_WRITEBACK; in writeback_mode() 444 static inline bool passthrough_mode(struct cache *cache) in passthrough_mode() argument 446 return unlikely(cache->features.io_mode == CM_IO_PASSTHROUGH); in passthrough_mode() 451 static void wake_deferred_bio_worker(struct cache *cache) in wake_deferred_bio_worker() argument 453 queue_work(cache->wq, &cache->deferred_bio_worker); in wake_deferred_bio_worker() [all …]
|
/openbmc/linux/arch/powerpc/kernel/ |
H A D | cacheinfo.c | 43 struct cache *cache; member 117 struct cache { struct 124 struct cache *next_local; /* next cache of >= level */ argument 139 static const char *cache_type_string(const struct cache *cache) in cache_type_string() argument 141 return cache_type_info[cache->type].name; in cache_type_string() 144 static void cache_init(struct cache *cache, int type, int level, in cache_init() argument 147 cache->type = type; in cache_init() 148 cache->level = level; in cache_init() 149 cache->ofnode = of_node_get(ofnode); in cache_init() 150 cache->group_id = group_id; in cache_init() [all …]
|
/openbmc/linux/fs/fscache/ |
H A D | cache.c | 26 struct fscache_cache *cache; in fscache_alloc_cache() local 28 cache = kzalloc(sizeof(*cache), GFP_KERNEL); in fscache_alloc_cache() 29 if (cache) { in fscache_alloc_cache() 31 cache->name = kstrdup(name, GFP_KERNEL); in fscache_alloc_cache() 32 if (!cache->name) { in fscache_alloc_cache() 33 kfree(cache); in fscache_alloc_cache() 37 refcount_set(&cache->ref, 1); in fscache_alloc_cache() 38 INIT_LIST_HEAD(&cache->cache_link); in fscache_alloc_cache() 39 cache->debug_id = atomic_inc_return(&fscache_cache_debug_id); in fscache_alloc_cache() 41 return cache; in fscache_alloc_cache() [all …]
|
/openbmc/linux/fs/cachefiles/ |
H A D | daemon.c | 62 int (*handler)(struct cachefiles_cache *cache, char *args); 92 struct cachefiles_cache *cache; in cachefiles_daemon_open() local 105 cache = kzalloc(sizeof(struct cachefiles_cache), GFP_KERNEL); in cachefiles_daemon_open() 106 if (!cache) { in cachefiles_daemon_open() 111 mutex_init(&cache->daemon_mutex); in cachefiles_daemon_open() 112 init_waitqueue_head(&cache->daemon_pollwq); in cachefiles_daemon_open() 113 INIT_LIST_HEAD(&cache->volumes); in cachefiles_daemon_open() 114 INIT_LIST_HEAD(&cache->object_list); in cachefiles_daemon_open() 115 spin_lock_init(&cache->object_list_lock); in cachefiles_daemon_open() 116 refcount_set(&cache->unbind_pincount, 1); in cachefiles_daemon_open() [all …]
|
H A D | cache.c | 17 int cachefiles_add_cache(struct cachefiles_cache *cache) in cachefiles_add_cache() argument 28 cache_cookie = fscache_acquire_cache(cache->tag); in cachefiles_add_cache() 33 ret = cachefiles_get_security_ID(cache); in cachefiles_add_cache() 37 cachefiles_begin_secure(cache, &saved_cred); in cachefiles_add_cache() 40 ret = kern_path(cache->rootdirname, LOOKUP_DIRECTORY, &path); in cachefiles_add_cache() 44 cache->mnt = path.mnt; in cachefiles_add_cache() 78 ret = cachefiles_determine_cache_security(cache, root, &saved_cred); in cachefiles_add_cache() 95 cache->bsize = stats.f_bsize; in cachefiles_add_cache() 96 cache->bshift = ilog2(stats.f_bsize); in cachefiles_add_cache() 99 cache->bsize, cache->bshift); in cachefiles_add_cache() [all …]
|
/openbmc/linux/arch/arm64/boot/dts/amd/ |
H A D | amd-seattle-cpus.dtsi | 49 i-cache-size = <0xC000>; 50 i-cache-line-size = <64>; 51 i-cache-sets = <256>; 52 d-cache-size = <0x8000>; 53 d-cache-line-size = <64>; 54 d-cache-sets = <256>; 55 l2-cache = <&L2_0>; 65 i-cache-size = <0xC000>; 66 i-cache-line-size = <64>; 67 i-cache-sets = <256>; [all …]
|
/openbmc/linux/arch/arm64/boot/dts/amazon/ |
H A D | alpine-v3.dtsi | 28 d-cache-size = <0x8000>; 29 d-cache-line-size = <64>; 30 d-cache-sets = <256>; 31 i-cache-size = <0xc000>; 32 i-cache-line-size = <64>; 33 i-cache-sets = <256>; 34 next-level-cache = <&cluster0_l2>; 42 d-cache-size = <0x8000>; 43 d-cache-line-size = <64>; 44 d-cache-sets = <256>; [all …]
|
/openbmc/linux/mm/ |
H A D | swap_slots.c | 115 struct swap_slots_cache *cache; in alloc_swap_slot_cache() local 136 cache = &per_cpu(swp_slots, cpu); in alloc_swap_slot_cache() 137 if (cache->slots || cache->slots_ret) { in alloc_swap_slot_cache() 147 if (!cache->lock_initialized) { in alloc_swap_slot_cache() 148 mutex_init(&cache->alloc_lock); in alloc_swap_slot_cache() 149 spin_lock_init(&cache->free_lock); in alloc_swap_slot_cache() 150 cache->lock_initialized = true; in alloc_swap_slot_cache() 152 cache->nr = 0; in alloc_swap_slot_cache() 153 cache->cur = 0; in alloc_swap_slot_cache() 154 cache->n_ret = 0; in alloc_swap_slot_cache() [all …]
|
/openbmc/linux/fs/btrfs/tests/ |
H A D | free-space-tests.c | 20 static int test_extents(struct btrfs_block_group *cache) in test_extents() argument 27 ret = btrfs_add_free_space(cache, 0, SZ_4M); in test_extents() 33 ret = btrfs_remove_free_space(cache, 0, SZ_4M); in test_extents() 39 if (test_check_exists(cache, 0, SZ_4M)) { in test_extents() 45 ret = btrfs_add_free_space(cache, 0, SZ_4M); in test_extents() 51 ret = btrfs_remove_free_space(cache, 3 * SZ_1M, SZ_1M); in test_extents() 57 ret = btrfs_remove_free_space(cache, 0, SZ_1M); in test_extents() 63 ret = btrfs_remove_free_space(cache, SZ_2M, 4096); in test_extents() 69 if (test_check_exists(cache, 0, SZ_1M)) { in test_extents() 74 if (test_check_exists(cache, SZ_2M, 4096)) { in test_extents() [all …]
|
H A D | free-space-tree-tests.c | 22 struct btrfs_block_group *cache, in __check_free_space_extents() argument 35 info = search_free_space_info(trans, cache, path, 0); in __check_free_space_extents() 52 end = cache->start + cache->length; in __check_free_space_extents() 60 bit = free_space_test_bit(cache, path, offset); in __check_free_space_extents() 109 struct btrfs_block_group *cache, in check_free_space_extents() argument 118 info = search_free_space_info(trans, cache, path, 0); in check_free_space_extents() 127 ret = __check_free_space_extents(trans, fs_info, cache, path, extents, in check_free_space_extents() 134 ret = convert_free_space_to_extents(trans, cache, path); in check_free_space_extents() 140 ret = convert_free_space_to_bitmaps(trans, cache, path); in check_free_space_extents() 146 return __check_free_space_extents(trans, fs_info, cache, path, extents, in check_free_space_extents() [all …]
|
/openbmc/linux/fs/ |
H A D | mbcache.c | 47 static unsigned long mb_cache_shrink(struct mb_cache *cache, 50 static inline struct hlist_bl_head *mb_cache_entry_head(struct mb_cache *cache, in mb_cache_entry_head() argument 53 return &cache->c_hash[hash_32(key, cache->c_bucket_bits)]; in mb_cache_entry_head() 74 int mb_cache_entry_create(struct mb_cache *cache, gfp_t mask, u32 key, in mb_cache_entry_create() argument 82 if (cache->c_entry_count >= cache->c_max_entries) in mb_cache_entry_create() 83 schedule_work(&cache->c_shrink_work); in mb_cache_entry_create() 85 if (cache->c_entry_count >= 2*cache->c_max_entries) in mb_cache_entry_create() 86 mb_cache_shrink(cache, SYNC_SHRINK_BATCH); in mb_cache_entry_create() 106 head = mb_cache_entry_head(cache, key); in mb_cache_entry_create() 117 spin_lock(&cache->c_list_lock); in mb_cache_entry_create() [all …]
|
/openbmc/linux/drivers/acpi/acpica/ |
H A D | utcache.c | 36 struct acpi_memory_list *cache; in acpi_os_create_cache() local 46 cache = acpi_os_allocate(sizeof(struct acpi_memory_list)); in acpi_os_create_cache() 47 if (!cache) { in acpi_os_create_cache() 53 memset(cache, 0, sizeof(struct acpi_memory_list)); in acpi_os_create_cache() 54 cache->list_name = cache_name; in acpi_os_create_cache() 55 cache->object_size = object_size; in acpi_os_create_cache() 56 cache->max_depth = max_depth; in acpi_os_create_cache() 58 *return_cache = cache; in acpi_os_create_cache() 74 acpi_status acpi_os_purge_cache(struct acpi_memory_list *cache) in acpi_os_purge_cache() argument 81 if (!cache) { in acpi_os_purge_cache() [all …]
|
/openbmc/linux/fs/squashfs/ |
H A D | cache.c | 53 struct squashfs_cache *cache, u64 block, int length) in squashfs_cache_get() argument 58 spin_lock(&cache->lock); in squashfs_cache_get() 61 for (i = cache->curr_blk, n = 0; n < cache->entries; n++) { in squashfs_cache_get() 62 if (cache->entry[i].block == block) { in squashfs_cache_get() 63 cache->curr_blk = i; in squashfs_cache_get() 66 i = (i + 1) % cache->entries; in squashfs_cache_get() 69 if (n == cache->entries) { in squashfs_cache_get() 74 if (cache->unused == 0) { in squashfs_cache_get() 75 cache->num_waiters++; in squashfs_cache_get() 76 spin_unlock(&cache->lock); in squashfs_cache_get() [all …]
|
/openbmc/qemu/migration/ |
H A D | page_cache.c | 45 PageCache *cache; in cache_init() local 61 cache = g_try_malloc(sizeof(*cache)); in cache_init() 62 if (!cache) { in cache_init() 66 cache->page_size = page_size; in cache_init() 67 cache->num_items = 0; in cache_init() 68 cache->max_num_items = num_pages; in cache_init() 70 trace_migration_pagecache_init(cache->max_num_items); in cache_init() 73 cache->page_cache = g_try_malloc((cache->max_num_items) * in cache_init() 74 sizeof(*cache->page_cache)); in cache_init() 75 if (!cache->page_cache) { in cache_init() [all …]
|
/openbmc/linux/Documentation/devicetree/bindings/cache/ |
H A D | freescale-l2cache.txt | 3 L2 cache is present in Freescale's QorIQ and QorIQ Qonverge platforms. 4 The cache bindings explained below are Devicetree Specification compliant 9 "fsl,b4420-l2-cache-controller" 10 "fsl,b4860-l2-cache-controller" 11 "fsl,bsc9131-l2-cache-controller" 12 "fsl,bsc9132-l2-cache-controller" 13 "fsl,c293-l2-cache-controller" 14 "fsl,mpc8536-l2-cache-controller" 15 "fsl,mpc8540-l2-cache-controller" 16 "fsl,mpc8541-l2-cache-controller" [all …]
|
/openbmc/linux/arch/arm64/boot/dts/ti/ |
H A D | k3-am654.dtsi | 41 i-cache-size = <0x8000>; 42 i-cache-line-size = <64>; 43 i-cache-sets = <256>; 44 d-cache-size = <0x8000>; 45 d-cache-line-size = <64>; 46 d-cache-sets = <128>; 47 next-level-cache = <&L2_0>; 55 i-cache-size = <0x8000>; 56 i-cache-line-size = <64>; 57 i-cache-sets = <256>; [all …]
|
H A D | k3-am62a7.dtsi | 44 i-cache-size = <0x8000>; 45 i-cache-line-size = <64>; 46 i-cache-sets = <256>; 47 d-cache-size = <0x8000>; 48 d-cache-line-size = <64>; 49 d-cache-sets = <128>; 50 next-level-cache = <&L2_0>; 58 i-cache-size = <0x8000>; 59 i-cache-line-size = <64>; 60 i-cache-sets = <256>; [all …]
|
H A D | k3-am62p5.dtsi | 43 i-cache-size = <0x8000>; 44 i-cache-line-size = <64>; 45 i-cache-sets = <256>; 46 d-cache-size = <0x8000>; 47 d-cache-line-size = <64>; 48 d-cache-sets = <128>; 49 next-level-cache = <&l2_0>; 58 i-cache-size = <0x8000>; 59 i-cache-line-size = <64>; 60 i-cache-sets = <256>; [all …]
|
H A D | k3-j784s4.dtsi | 70 i-cache-size = <0xc000>; 71 i-cache-line-size = <64>; 72 i-cache-sets = <256>; 73 d-cache-size = <0x8000>; 74 d-cache-line-size = <64>; 75 d-cache-sets = <256>; 76 next-level-cache = <&L2_0>; 84 i-cache-size = <0xc000>; 85 i-cache-line-size = <64>; 86 i-cache-sets = <256>; [all …]
|
/openbmc/u-boot/arch/arm/dts/ |
H A D | k3-am654.dtsi | 41 i-cache-size = <0x8000>; 42 i-cache-line-size = <64>; 43 i-cache-sets = <256>; 44 d-cache-size = <0x8000>; 45 d-cache-line-size = <64>; 46 d-cache-sets = <128>; 47 next-level-cache = <&L2_0>; 55 i-cache-size = <0x8000>; 56 i-cache-line-size = <64>; 57 i-cache-sets = <256>; [all …]
|
/openbmc/linux/arch/arm64/boot/dts/marvell/ |
H A D | armada-ap806-quad.dtsi | 25 i-cache-size = <0xc000>; 26 i-cache-line-size = <64>; 27 i-cache-sets = <256>; 28 d-cache-size = <0x8000>; 29 d-cache-line-size = <64>; 30 d-cache-sets = <256>; 31 next-level-cache = <&l2_0>; 40 i-cache-size = <0xc000>; 41 i-cache-line-size = <64>; 42 i-cache-sets = <256>; [all …]
|
H A D | armada-ap807-quad.dtsi | 25 i-cache-size = <0xc000>; 26 i-cache-line-size = <64>; 27 i-cache-sets = <256>; 28 d-cache-size = <0x8000>; 29 d-cache-line-size = <64>; 30 d-cache-sets = <256>; 31 next-level-cache = <&l2_0>; 40 i-cache-size = <0xc000>; 41 i-cache-line-size = <64>; 42 i-cache-sets = <256>; [all …]
|
/openbmc/u-boot/arch/x86/lib/ |
H A D | mrccache.c | 21 struct mrc_data_container *cache) in next_mrc_block() argument 24 u32 mrc_size = sizeof(*cache) + cache->data_size; in next_mrc_block() 25 u8 *region_ptr = (u8 *)cache; in next_mrc_block() 37 static int is_mrc_cache(struct mrc_data_container *cache) in is_mrc_cache() argument 39 return cache && (cache->signature == MRC_DATA_SIGNATURE); in is_mrc_cache() 44 struct mrc_data_container *cache, *next; in mrccache_find_current() local 50 cache = NULL; in mrccache_find_current() 56 cache = next; in mrccache_find_current() 68 if (cache->checksum != compute_ip_checksum(cache->data, in mrccache_find_current() 69 cache->data_size)) { in mrccache_find_current() [all …]
|
/openbmc/linux/fs/nfs/ |
H A D | nfs42xattr.c | 64 struct nfs4_xattr_cache *cache; member 106 nfs4_xattr_hash_init(struct nfs4_xattr_cache *cache) in nfs4_xattr_hash_init() argument 111 INIT_HLIST_HEAD(&cache->buckets[i].hlist); in nfs4_xattr_hash_init() 112 spin_lock_init(&cache->buckets[i].lock); in nfs4_xattr_hash_init() 113 cache->buckets[i].cache = cache; in nfs4_xattr_hash_init() 114 cache->buckets[i].draining = false; in nfs4_xattr_hash_init() 270 struct nfs4_xattr_cache *cache; in nfs4_xattr_free_cache_cb() local 273 cache = container_of(kref, struct nfs4_xattr_cache, ref); in nfs4_xattr_free_cache_cb() 276 if (WARN_ON(!hlist_empty(&cache->buckets[i].hlist))) in nfs4_xattr_free_cache_cb() 278 cache->buckets[i].draining = false; in nfs4_xattr_free_cache_cb() [all …]
|
/openbmc/linux/io_uring/ |
H A D | alloc_cache.h | 13 static inline bool io_alloc_cache_put(struct io_alloc_cache *cache, in io_alloc_cache_put() argument 16 if (cache->nr_cached < cache->max_cached) { in io_alloc_cache_put() 17 cache->nr_cached++; in io_alloc_cache_put() 18 wq_stack_add_head(&entry->node, &cache->list); in io_alloc_cache_put() 26 static inline bool io_alloc_cache_empty(struct io_alloc_cache *cache) in io_alloc_cache_empty() argument 28 return !cache->list.next; in io_alloc_cache_empty() 31 static inline struct io_cache_entry *io_alloc_cache_get(struct io_alloc_cache *cache) in io_alloc_cache_get() argument 33 if (cache->list.next) { in io_alloc_cache_get() 36 entry = container_of(cache->list.next, struct io_cache_entry, node); in io_alloc_cache_get() 37 kasan_unpoison_range(entry, cache->elem_size); in io_alloc_cache_get() [all …]
|