static bool xen_evtchn_trymask(struct pic *pic, int pin) { volatile struct shared_info *s = HYPERVISOR_shared_info; unsigned long masked; /* Mask it. */ masked = xen_atomic_test_and_set_bit(&s->evtchn_mask[0], pin); /* * Grab pending bit, if any. No ordering barrier needed: even * if this weren't x86, we are concerned only with * interruption, not writes by different CPUs. */ if (xen_atomic_test_bit(&s->evtchn_pending[0], pin)) { /* Unmask it and report failure: pending, no change to mask. */ if (!masked) xen_atomic_clear_bit(&s->evtchn_mask[0], pin); return false; } /* Success: masked, not pending. */ return true; }