|
@@ -540,9 +540,9 @@ static int emulate_nm(struct x86_emulate_ctxt *ctxt)
|
|
return emulate_exception(ctxt, NM_VECTOR, 0, false);
|
|
return emulate_exception(ctxt, NM_VECTOR, 0, false);
|
|
}
|
|
}
|
|
|
|
|
|
-static int linearize(struct x86_emulate_ctxt *ctxt,
|
|
|
|
|
|
+static int __linearize(struct x86_emulate_ctxt *ctxt,
|
|
struct segmented_address addr,
|
|
struct segmented_address addr,
|
|
- unsigned size, bool write,
|
|
|
|
|
|
+ unsigned size, bool write, bool fetch,
|
|
ulong *linear)
|
|
ulong *linear)
|
|
{
|
|
{
|
|
struct decode_cache *c = &ctxt->decode;
|
|
struct decode_cache *c = &ctxt->decode;
|
|
@@ -569,7 +569,7 @@ static int linearize(struct x86_emulate_ctxt *ctxt,
|
|
if (((desc.type & 8) || !(desc.type & 2)) && write)
|
|
if (((desc.type & 8) || !(desc.type & 2)) && write)
|
|
goto bad;
|
|
goto bad;
|
|
/* unreadable code segment */
|
|
/* unreadable code segment */
|
|
- if ((desc.type & 8) && !(desc.type & 2))
|
|
|
|
|
|
+ if (!fetch && (desc.type & 8) && !(desc.type & 2))
|
|
goto bad;
|
|
goto bad;
|
|
lim = desc_limit_scaled(&desc);
|
|
lim = desc_limit_scaled(&desc);
|
|
if ((desc.type & 8) || !(desc.type & 4)) {
|
|
if ((desc.type & 8) || !(desc.type & 4)) {
|
|
@@ -602,7 +602,7 @@ static int linearize(struct x86_emulate_ctxt *ctxt,
|
|
}
|
|
}
|
|
break;
|
|
break;
|
|
}
|
|
}
|
|
- if (c->ad_bytes != 8)
|
|
|
|
|
|
+ if (fetch ? ctxt->mode != X86EMUL_MODE_PROT64 : c->ad_bytes != 8)
|
|
la &= (u32)-1;
|
|
la &= (u32)-1;
|
|
*linear = la;
|
|
*linear = la;
|
|
return X86EMUL_CONTINUE;
|
|
return X86EMUL_CONTINUE;
|
|
@@ -613,6 +613,15 @@ bad:
|
|
return emulate_gp(ctxt, addr.seg);
|
|
return emulate_gp(ctxt, addr.seg);
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+static int linearize(struct x86_emulate_ctxt *ctxt,
|
|
|
|
+ struct segmented_address addr,
|
|
|
|
+ unsigned size, bool write,
|
|
|
|
+ ulong *linear)
|
|
|
|
+{
|
|
|
|
+ return __linearize(ctxt, addr, size, write, false, linear);
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+
|
|
static int segmented_read_std(struct x86_emulate_ctxt *ctxt,
|
|
static int segmented_read_std(struct x86_emulate_ctxt *ctxt,
|
|
struct segmented_address addr,
|
|
struct segmented_address addr,
|
|
void *data,
|
|
void *data,
|
|
@@ -637,11 +646,13 @@ static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
|
|
int size, cur_size;
|
|
int size, cur_size;
|
|
|
|
|
|
if (eip == fc->end) {
|
|
if (eip == fc->end) {
|
|
- unsigned long linear = eip + ctxt->cs_base;
|
|
|
|
- if (ctxt->mode != X86EMUL_MODE_PROT64)
|
|
|
|
- linear &= (u32)-1;
|
|
|
|
|
|
+ unsigned long linear;
|
|
|
|
+ struct segmented_address addr = { .seg=VCPU_SREG_CS, .ea=eip};
|
|
cur_size = fc->end - fc->start;
|
|
cur_size = fc->end - fc->start;
|
|
size = min(15UL - cur_size, PAGE_SIZE - offset_in_page(eip));
|
|
size = min(15UL - cur_size, PAGE_SIZE - offset_in_page(eip));
|
|
|
|
+ rc = __linearize(ctxt, addr, size, false, true, &linear);
|
|
|
|
+ if (rc != X86EMUL_CONTINUE)
|
|
|
|
+ return rc;
|
|
rc = ops->fetch(linear, fc->data + cur_size,
|
|
rc = ops->fetch(linear, fc->data + cur_size,
|
|
size, ctxt->vcpu, &ctxt->exception);
|
|
size, ctxt->vcpu, &ctxt->exception);
|
|
if (rc != X86EMUL_CONTINUE)
|
|
if (rc != X86EMUL_CONTINUE)
|
|
@@ -3127,7 +3138,6 @@ x86_decode_insn(struct x86_emulate_ctxt *ctxt, void *insn, int insn_len)
|
|
c->fetch.end = c->fetch.start + insn_len;
|
|
c->fetch.end = c->fetch.start + insn_len;
|
|
if (insn_len > 0)
|
|
if (insn_len > 0)
|
|
memcpy(c->fetch.data, insn, insn_len);
|
|
memcpy(c->fetch.data, insn, insn_len);
|
|
- ctxt->cs_base = seg_base(ctxt, ops, VCPU_SREG_CS);
|
|
|
|
|
|
|
|
switch (mode) {
|
|
switch (mode) {
|
|
case X86EMUL_MODE_REAL:
|
|
case X86EMUL_MODE_REAL:
|