|
@@ -72,7 +72,7 @@ static struct vgic_irq *vgic_get_lpi(struct kvm *kvm, u32 intid)
|
|
|
struct vgic_irq *irq = NULL;
|
|
struct vgic_irq *irq = NULL;
|
|
|
unsigned long flags;
|
|
unsigned long flags;
|
|
|
|
|
|
|
|
- spin_lock_irqsave(&dist->lpi_list_lock, flags);
|
|
|
|
|
|
|
+ raw_spin_lock_irqsave(&dist->lpi_list_lock, flags);
|
|
|
|
|
|
|
|
list_for_each_entry(irq, &dist->lpi_list_head, lpi_list) {
|
|
list_for_each_entry(irq, &dist->lpi_list_head, lpi_list) {
|
|
|
if (irq->intid != intid)
|
|
if (irq->intid != intid)
|
|
@@ -88,7 +88,7 @@ static struct vgic_irq *vgic_get_lpi(struct kvm *kvm, u32 intid)
|
|
|
irq = NULL;
|
|
irq = NULL;
|
|
|
|
|
|
|
|
out_unlock:
|
|
out_unlock:
|
|
|
- spin_unlock_irqrestore(&dist->lpi_list_lock, flags);
|
|
|
|
|
|
|
+ raw_spin_unlock_irqrestore(&dist->lpi_list_lock, flags);
|
|
|
|
|
|
|
|
return irq;
|
|
return irq;
|
|
|
}
|
|
}
|
|
@@ -138,15 +138,15 @@ void vgic_put_irq(struct kvm *kvm, struct vgic_irq *irq)
|
|
|
if (irq->intid < VGIC_MIN_LPI)
|
|
if (irq->intid < VGIC_MIN_LPI)
|
|
|
return;
|
|
return;
|
|
|
|
|
|
|
|
- spin_lock_irqsave(&dist->lpi_list_lock, flags);
|
|
|
|
|
|
|
+ raw_spin_lock_irqsave(&dist->lpi_list_lock, flags);
|
|
|
if (!kref_put(&irq->refcount, vgic_irq_release)) {
|
|
if (!kref_put(&irq->refcount, vgic_irq_release)) {
|
|
|
- spin_unlock_irqrestore(&dist->lpi_list_lock, flags);
|
|
|
|
|
|
|
+ raw_spin_unlock_irqrestore(&dist->lpi_list_lock, flags);
|
|
|
return;
|
|
return;
|
|
|
};
|
|
};
|
|
|
|
|
|
|
|
list_del(&irq->lpi_list);
|
|
list_del(&irq->lpi_list);
|
|
|
dist->lpi_list_count--;
|
|
dist->lpi_list_count--;
|
|
|
- spin_unlock_irqrestore(&dist->lpi_list_lock, flags);
|
|
|
|
|
|
|
+ raw_spin_unlock_irqrestore(&dist->lpi_list_lock, flags);
|
|
|
|
|
|
|
|
kfree(irq);
|
|
kfree(irq);
|
|
|
}
|
|
}
|