summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--kern/list.h2
-rw-r--r--kern/slab.c8
2 files changed, 5 insertions, 5 deletions
diff --git a/kern/list.h b/kern/list.h
index 0341471..ad782a8 100644
--- a/kern/list.h
+++ b/kern/list.h
@@ -240,7 +240,7 @@ static inline void list_add(struct list *prev, struct list *next,
/*
* Insert a node at the head of a list.
*/
-static inline void list_insert(struct list *list, struct list *node)
+static inline void list_insert_head(struct list *list, struct list *node)
{
list_add(list, list->next, node);
}
diff --git a/kern/slab.c b/kern/slab.c
index c8a2b7c..0a4dbdf 100644
--- a/kern/slab.c
+++ b/kern/slab.c
@@ -878,7 +878,7 @@ static int kmem_cache_grow(struct kmem_cache *cache)
simple_lock(&cache->lock);
if (slab != NULL) {
- list_insert(&cache->free_slabs, &slab->list_node);
+ list_insert_head(&cache->free_slabs, &slab->list_node);
cache->nr_bufs += cache->bufs_per_slab;
cache->nr_slabs++;
cache->nr_free_slabs++;
@@ -957,7 +957,7 @@ static void * kmem_cache_alloc_from_slab(struct kmem_cache *cache)
} else if (slab->nr_refs == 1) {
/* The slab has become partial */
list_remove(&slab->list_node);
- list_insert(&cache->partial_slabs, &slab->list_node);
+ list_insert_head(&cache->partial_slabs, &slab->list_node);
cache->nr_free_slabs--;
}
@@ -1010,11 +1010,11 @@ static void kmem_cache_free_to_slab(struct kmem_cache *cache, void *buf)
if (cache->bufs_per_slab > 1)
list_remove(&slab->list_node);
- list_insert(&cache->free_slabs, &slab->list_node);
+ list_insert_head(&cache->free_slabs, &slab->list_node);
cache->nr_free_slabs++;
} else if (slab->nr_refs == (cache->bufs_per_slab - 1)) {
/* The slab has become partial */
- list_insert(&cache->partial_slabs, &slab->list_node);
+ list_insert_head(&cache->partial_slabs, &slab->list_node);
}
}