|
|
@@ -40,6 +40,10 @@
|
|
|
|
|
|
#ifdef CONFIG_RANDOM32_SELFTEST
|
|
|
static void __init prandom_state_selftest(void);
|
|
|
+#else
|
|
|
+static inline void prandom_state_selftest(void)
|
|
|
+{
|
|
|
+}
|
|
|
#endif
|
|
|
|
|
|
static DEFINE_PER_CPU(struct rnd_state, net_rand_state);
|
|
|
@@ -53,8 +57,7 @@ static DEFINE_PER_CPU(struct rnd_state, net_rand_state);
|
|
|
*/
|
|
|
u32 prandom_u32_state(struct rnd_state *state)
|
|
|
{
|
|
|
-#define TAUSWORTHE(s,a,b,c,d) ((s&c)<<d) ^ (((s <<a) ^ s)>>b)
|
|
|
-
|
|
|
+#define TAUSWORTHE(s, a, b, c, d) ((s & c) << d) ^ (((s << a) ^ s) >> b)
|
|
|
state->s1 = TAUSWORTHE(state->s1, 6U, 13U, 4294967294U, 18U);
|
|
|
state->s2 = TAUSWORTHE(state->s2, 2U, 27U, 4294967288U, 2U);
|
|
|
state->s3 = TAUSWORTHE(state->s3, 13U, 21U, 4294967280U, 7U);
|
|
|
@@ -147,21 +150,25 @@ static void prandom_warmup(struct rnd_state *state)
|
|
|
prandom_u32_state(state);
|
|
|
}
|
|
|
|
|
|
-static void prandom_seed_very_weak(struct rnd_state *state, u32 seed)
|
|
|
+static u32 __extract_hwseed(void)
|
|
|
{
|
|
|
- /* Note: This sort of seeding is ONLY used in test cases and
|
|
|
- * during boot at the time from core_initcall until late_initcall
|
|
|
- * as we don't have a stronger entropy source available yet.
|
|
|
- * After late_initcall, we reseed entire state, we have to (!),
|
|
|
- * otherwise an attacker just needs to search 32 bit space to
|
|
|
- * probe for our internal 128 bit state if he knows a couple
|
|
|
- * of prandom32 outputs!
|
|
|
- */
|
|
|
-#define LCG(x) ((x) * 69069U) /* super-duper LCG */
|
|
|
- state->s1 = __seed(LCG(seed), 2U);
|
|
|
- state->s2 = __seed(LCG(state->s1), 8U);
|
|
|
- state->s3 = __seed(LCG(state->s2), 16U);
|
|
|
- state->s4 = __seed(LCG(state->s3), 128U);
|
|
|
+ u32 val = 0;
|
|
|
+
|
|
|
+ (void)(arch_get_random_seed_int(&val) ||
|
|
|
+ arch_get_random_int(&val));
|
|
|
+
|
|
|
+ return val;
|
|
|
+}
|
|
|
+
|
|
|
+static void prandom_seed_early(struct rnd_state *state, u32 seed,
|
|
|
+ bool mix_with_hwseed)
|
|
|
+{
|
|
|
+#define LCG(x) ((x) * 69069U) /* super-duper LCG */
|
|
|
+#define HWSEED() (mix_with_hwseed ? __extract_hwseed() : 0)
|
|
|
+ state->s1 = __seed(HWSEED() ^ LCG(seed), 2U);
|
|
|
+ state->s2 = __seed(HWSEED() ^ LCG(state->s1), 8U);
|
|
|
+ state->s3 = __seed(HWSEED() ^ LCG(state->s2), 16U);
|
|
|
+ state->s4 = __seed(HWSEED() ^ LCG(state->s3), 128U);
|
|
|
}
|
|
|
|
|
|
/**
|
|
|
@@ -194,14 +201,13 @@ static int __init prandom_init(void)
|
|
|
{
|
|
|
int i;
|
|
|
|
|
|
-#ifdef CONFIG_RANDOM32_SELFTEST
|
|
|
prandom_state_selftest();
|
|
|
-#endif
|
|
|
|
|
|
for_each_possible_cpu(i) {
|
|
|
struct rnd_state *state = &per_cpu(net_rand_state,i);
|
|
|
+ u32 weak_seed = (i + jiffies) ^ random_get_entropy();
|
|
|
|
|
|
- prandom_seed_very_weak(state, (i + jiffies) ^ random_get_entropy());
|
|
|
+ prandom_seed_early(state, weak_seed, true);
|
|
|
prandom_warmup(state);
|
|
|
}
|
|
|
|
|
|
@@ -210,6 +216,7 @@ static int __init prandom_init(void)
|
|
|
core_initcall(prandom_init);
|
|
|
|
|
|
static void __prandom_timer(unsigned long dontcare);
|
|
|
+
|
|
|
static DEFINE_TIMER(seed_timer, __prandom_timer, 0, 0);
|
|
|
|
|
|
static void __prandom_timer(unsigned long dontcare)
|
|
|
@@ -419,7 +426,7 @@ static void __init prandom_state_selftest(void)
|
|
|
for (i = 0; i < ARRAY_SIZE(test1); i++) {
|
|
|
struct rnd_state state;
|
|
|
|
|
|
- prandom_seed_very_weak(&state, test1[i].seed);
|
|
|
+ prandom_seed_early(&state, test1[i].seed, false);
|
|
|
prandom_warmup(&state);
|
|
|
|
|
|
if (test1[i].result != prandom_u32_state(&state))
|
|
|
@@ -434,7 +441,7 @@ static void __init prandom_state_selftest(void)
|
|
|
for (i = 0; i < ARRAY_SIZE(test2); i++) {
|
|
|
struct rnd_state state;
|
|
|
|
|
|
- prandom_seed_very_weak(&state, test2[i].seed);
|
|
|
+ prandom_seed_early(&state, test2[i].seed, false);
|
|
|
prandom_warmup(&state);
|
|
|
|
|
|
for (j = 0; j < test2[i].iteration - 1; j++)
|