|
@@ -16,24 +16,41 @@
|
|
#define unlikely(x) (__builtin_expect(!!(x), 0))
|
|
#define unlikely(x) (__builtin_expect(!!(x), 0))
|
|
#define likely(x) (__builtin_expect(!!(x), 1))
|
|
#define likely(x) (__builtin_expect(!!(x), 1))
|
|
#define ALIGN(x, a) (((x) + (a) - 1) / (a) * (a))
|
|
#define ALIGN(x, a) (((x) + (a) - 1) / (a) * (a))
|
|
|
|
+#define SIZE_MAX (~(size_t)0)
|
|
|
|
+
|
|
typedef pthread_spinlock_t spinlock_t;
|
|
typedef pthread_spinlock_t spinlock_t;
|
|
|
|
|
|
typedef int gfp_t;
|
|
typedef int gfp_t;
|
|
-static void *kmalloc(unsigned size, gfp_t gfp)
|
|
|
|
-{
|
|
|
|
- return memalign(64, size);
|
|
|
|
-}
|
|
|
|
|
|
+#define __GFP_ZERO 0x1
|
|
|
|
|
|
-static void *kzalloc(unsigned size, gfp_t gfp)
|
|
|
|
|
|
+static void *kmalloc(unsigned size, gfp_t gfp)
|
|
{
|
|
{
|
|
void *p = memalign(64, size);
|
|
void *p = memalign(64, size);
|
|
if (!p)
|
|
if (!p)
|
|
return p;
|
|
return p;
|
|
- memset(p, 0, size);
|
|
|
|
|
|
|
|
|
|
+ if (gfp & __GFP_ZERO)
|
|
|
|
+ memset(p, 0, size);
|
|
return p;
|
|
return p;
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+static inline void *kzalloc(unsigned size, gfp_t flags)
|
|
|
|
+{
|
|
|
|
+ return kmalloc(size, flags | __GFP_ZERO);
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+static inline void *kmalloc_array(size_t n, size_t size, gfp_t flags)
|
|
|
|
+{
|
|
|
|
+ if (size != 0 && n > SIZE_MAX / size)
|
|
|
|
+ return NULL;
|
|
|
|
+ return kmalloc(n * size, flags);
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+static inline void *kcalloc(size_t n, size_t size, gfp_t flags)
|
|
|
|
+{
|
|
|
|
+ return kmalloc_array(n, size, flags | __GFP_ZERO);
|
|
|
|
+}
|
|
|
|
+
|
|
static void kfree(void *p)
|
|
static void kfree(void *p)
|
|
{
|
|
{
|
|
if (p)
|
|
if (p)
|