|
@@ -29,10 +29,10 @@ static ext_int_info_t ext_int_info_hwc;
|
|
|
/* Lock to protect internal data consistency. */
|
|
|
static DEFINE_SPINLOCK(sclp_lock);
|
|
|
|
|
|
-/* Mask of events that we can receive from the sclp interface. */
|
|
|
+/* Mask of events that we can send to the sclp interface. */
|
|
|
static sccb_mask_t sclp_receive_mask;
|
|
|
|
|
|
-/* Mask of events that we can send to the sclp interface. */
|
|
|
+/* Mask of events that we can receive from the sclp interface. */
|
|
|
static sccb_mask_t sclp_send_mask;
|
|
|
|
|
|
/* List of registered event listeners and senders. */
|
|
@@ -380,7 +380,7 @@ sclp_interrupt_handler(__u16 code)
|
|
|
}
|
|
|
sclp_running_state = sclp_running_state_idle;
|
|
|
}
|
|
|
- if (evbuf_pending && sclp_receive_mask != 0 &&
|
|
|
+ if (evbuf_pending &&
|
|
|
sclp_activation_state == sclp_activation_state_active)
|
|
|
__sclp_queue_read_req();
|
|
|
spin_unlock(&sclp_lock);
|
|
@@ -459,8 +459,8 @@ sclp_dispatch_state_change(void)
|
|
|
reg = NULL;
|
|
|
list_for_each(l, &sclp_reg_list) {
|
|
|
reg = list_entry(l, struct sclp_register, list);
|
|
|
- receive_mask = reg->receive_mask & sclp_receive_mask;
|
|
|
- send_mask = reg->send_mask & sclp_send_mask;
|
|
|
+ receive_mask = reg->send_mask & sclp_receive_mask;
|
|
|
+ send_mask = reg->receive_mask & sclp_send_mask;
|
|
|
if (reg->sclp_receive_mask != receive_mask ||
|
|
|
reg->sclp_send_mask != send_mask) {
|
|
|
reg->sclp_receive_mask = receive_mask;
|
|
@@ -615,8 +615,8 @@ struct init_sccb {
|
|
|
u16 mask_length;
|
|
|
sccb_mask_t receive_mask;
|
|
|
sccb_mask_t send_mask;
|
|
|
- sccb_mask_t sclp_send_mask;
|
|
|
sccb_mask_t sclp_receive_mask;
|
|
|
+ sccb_mask_t sclp_send_mask;
|
|
|
} __attribute__((packed));
|
|
|
|
|
|
/* Prepare init mask request. Called while sclp_lock is locked. */
|