|
@@ -770,8 +770,6 @@ static inline int memcg_cache_id(struct mem_cgroup *memcg)
|
|
struct kmem_cache *__memcg_kmem_get_cache(struct kmem_cache *cachep);
|
|
struct kmem_cache *__memcg_kmem_get_cache(struct kmem_cache *cachep);
|
|
void __memcg_kmem_put_cache(struct kmem_cache *cachep);
|
|
void __memcg_kmem_put_cache(struct kmem_cache *cachep);
|
|
|
|
|
|
-struct mem_cgroup *__mem_cgroup_from_kmem(void *ptr);
|
|
|
|
-
|
|
|
|
static inline bool __memcg_kmem_bypass(gfp_t gfp)
|
|
static inline bool __memcg_kmem_bypass(gfp_t gfp)
|
|
{
|
|
{
|
|
if (!memcg_kmem_enabled())
|
|
if (!memcg_kmem_enabled())
|
|
@@ -830,13 +828,6 @@ static __always_inline void memcg_kmem_put_cache(struct kmem_cache *cachep)
|
|
if (memcg_kmem_enabled())
|
|
if (memcg_kmem_enabled())
|
|
__memcg_kmem_put_cache(cachep);
|
|
__memcg_kmem_put_cache(cachep);
|
|
}
|
|
}
|
|
-
|
|
|
|
-static __always_inline struct mem_cgroup *mem_cgroup_from_kmem(void *ptr)
|
|
|
|
-{
|
|
|
|
- if (!memcg_kmem_enabled())
|
|
|
|
- return NULL;
|
|
|
|
- return __mem_cgroup_from_kmem(ptr);
|
|
|
|
-}
|
|
|
|
#else
|
|
#else
|
|
#define for_each_memcg_cache_index(_idx) \
|
|
#define for_each_memcg_cache_index(_idx) \
|
|
for (; NULL; )
|
|
for (; NULL; )
|
|
@@ -882,11 +873,5 @@ memcg_kmem_get_cache(struct kmem_cache *cachep, gfp_t gfp)
|
|
static inline void memcg_kmem_put_cache(struct kmem_cache *cachep)
|
|
static inline void memcg_kmem_put_cache(struct kmem_cache *cachep)
|
|
{
|
|
{
|
|
}
|
|
}
|
|
-
|
|
|
|
-static inline struct mem_cgroup *mem_cgroup_from_kmem(void *ptr)
|
|
|
|
-{
|
|
|
|
- return NULL;
|
|
|
|
-}
|
|
|
|
#endif /* CONFIG_MEMCG_KMEM */
|
|
#endif /* CONFIG_MEMCG_KMEM */
|
|
#endif /* _LINUX_MEMCONTROL_H */
|
|
#endif /* _LINUX_MEMCONTROL_H */
|
|
-
|
|
|