Mercurial > repos > blastem
comparison z80_to_x86.c @ 666:b68039895627
In theory, the Z80 core should work on 32-bit builds now; however, I suspect there is some code that cannot deal with most of the Z80 registers not having a native register so more work will be needed
author | Michael Pavone <pavone@retrodev.com> |
---|---|
date | Fri, 02 Jan 2015 13:47:34 -0800 |
parents | d0943769353b |
children | 30ccf56842d6 |
comparison
equal
deleted
inserted
replaced
665:d0943769353b | 666:b68039895627 |
---|---|
122 } | 122 } |
123 } else if(opts->regs[inst->ea_reg] >= 0) { | 123 } else if(opts->regs[inst->ea_reg] >= 0) { |
124 ea->base = opts->regs[inst->ea_reg]; | 124 ea->base = opts->regs[inst->ea_reg]; |
125 if (ea->base >= AH && ea->base <= BH && inst->reg != Z80_UNUSED && inst->reg != Z80_USE_IMMED) { | 125 if (ea->base >= AH && ea->base <= BH && inst->reg != Z80_UNUSED && inst->reg != Z80_USE_IMMED) { |
126 uint8_t other_reg = opts->regs[inst->reg]; | 126 uint8_t other_reg = opts->regs[inst->reg]; |
127 #ifdef X86_64 | |
127 if (other_reg >= R8 || (other_reg >= RSP && other_reg <= RDI)) { | 128 if (other_reg >= R8 || (other_reg >= RSP && other_reg <= RDI)) { |
128 //we can't mix an *H reg with a register that requires the REX prefix | 129 //we can't mix an *H reg with a register that requires the REX prefix |
129 ea->base = opts->regs[z80_low_reg(inst->ea_reg)]; | 130 ea->base = opts->regs[z80_low_reg(inst->ea_reg)]; |
130 ror_ir(code, 8, ea->base, SZ_W); | 131 ror_ir(code, 8, ea->base, SZ_W); |
131 } | 132 } |
133 #endif | |
132 } | 134 } |
133 } else { | 135 } else { |
134 ea->mode = MODE_REG_DISPLACE8; | 136 ea->mode = MODE_REG_DISPLACE8; |
135 ea->base = opts->gen.context_reg; | 137 ea->base = opts->gen.context_reg; |
136 ea->disp = offsetof(z80_context, regs) + inst->ea_reg; | 138 ea->disp = offsetof(z80_context, regs) + inst->ea_reg; |
222 } else { | 224 } else { |
223 ror_ir(code, 8, opts->regs[Z80_IY], SZ_W); | 225 ror_ir(code, 8, opts->regs[Z80_IY], SZ_W); |
224 } | 226 } |
225 } else if (inst->reg != Z80_UNUSED && inst->reg != Z80_USE_IMMED && opts->regs[inst->ea_reg] >= AH && opts->regs[inst->ea_reg] <= BH) { | 227 } else if (inst->reg != Z80_UNUSED && inst->reg != Z80_USE_IMMED && opts->regs[inst->ea_reg] >= AH && opts->regs[inst->ea_reg] <= BH) { |
226 uint8_t other_reg = opts->regs[inst->reg]; | 228 uint8_t other_reg = opts->regs[inst->reg]; |
229 #ifdef X86_64 | |
227 if (other_reg >= R8 || (other_reg >= RSP && other_reg <= RDI)) { | 230 if (other_reg >= R8 || (other_reg >= RSP && other_reg <= RDI)) { |
228 //we can't mix an *H reg with a register that requires the REX prefix | 231 //we can't mix an *H reg with a register that requires the REX prefix |
229 ror_ir(code, 8, opts->regs[z80_low_reg(inst->ea_reg)], SZ_W); | 232 ror_ir(code, 8, opts->regs[z80_low_reg(inst->ea_reg)], SZ_W); |
230 } | 233 } |
234 #endif | |
231 } | 235 } |
232 } | 236 } |
233 } | 237 } |
234 | 238 |
235 void z80_save_result(z80_options *opts, z80inst * inst) | 239 void z80_save_result(z80_options *opts, z80inst * inst) |
1253 cycles(&opts->gen, 1); | 1257 cycles(&opts->gen, 1); |
1254 } | 1258 } |
1255 bts_ir(code, bit, src_op.base, size); | 1259 bts_ir(code, bit, src_op.base, size); |
1256 if (inst->reg != Z80_USE_IMMED) { | 1260 if (inst->reg != Z80_USE_IMMED) { |
1257 if (size == SZ_W) { | 1261 if (size == SZ_W) { |
1262 #ifdef X86_64 | |
1258 if (dst_op.base >= R8) { | 1263 if (dst_op.base >= R8) { |
1259 ror_ir(code, 8, src_op.base, SZ_W); | 1264 ror_ir(code, 8, src_op.base, SZ_W); |
1260 mov_rr(code, opts->regs[z80_low_reg(inst->ea_reg)], dst_op.base, SZ_B); | 1265 mov_rr(code, opts->regs[z80_low_reg(inst->ea_reg)], dst_op.base, SZ_B); |
1261 ror_ir(code, 8, src_op.base, SZ_W); | 1266 ror_ir(code, 8, src_op.base, SZ_W); |
1262 } else { | 1267 } else { |
1268 #endif | |
1263 mov_rr(code, opts->regs[inst->ea_reg], dst_op.base, SZ_B); | 1269 mov_rr(code, opts->regs[inst->ea_reg], dst_op.base, SZ_B); |
1270 #ifdef X86_64 | |
1264 } | 1271 } |
1272 #endif | |
1265 } else { | 1273 } else { |
1266 mov_rr(code, src_op.base, dst_op.base, SZ_B); | 1274 mov_rr(code, src_op.base, dst_op.base, SZ_B); |
1267 } | 1275 } |
1268 } | 1276 } |
1269 if ((inst->addr_mode & 0x1F) != Z80_REG) { | 1277 if ((inst->addr_mode & 0x1F) != Z80_REG) { |
1295 cycles(&opts->gen, 1); | 1303 cycles(&opts->gen, 1); |
1296 } | 1304 } |
1297 btr_ir(code, bit, src_op.base, size); | 1305 btr_ir(code, bit, src_op.base, size); |
1298 if (inst->reg != Z80_USE_IMMED) { | 1306 if (inst->reg != Z80_USE_IMMED) { |
1299 if (size == SZ_W) { | 1307 if (size == SZ_W) { |
1308 #ifdef X86_64 | |
1300 if (dst_op.base >= R8) { | 1309 if (dst_op.base >= R8) { |
1301 ror_ir(code, 8, src_op.base, SZ_W); | 1310 ror_ir(code, 8, src_op.base, SZ_W); |
1302 mov_rr(code, opts->regs[z80_low_reg(inst->ea_reg)], dst_op.base, SZ_B); | 1311 mov_rr(code, opts->regs[z80_low_reg(inst->ea_reg)], dst_op.base, SZ_B); |
1303 ror_ir(code, 8, src_op.base, SZ_W); | 1312 ror_ir(code, 8, src_op.base, SZ_W); |
1304 } else { | 1313 } else { |
1314 #endif | |
1305 mov_rr(code, opts->regs[inst->ea_reg], dst_op.base, SZ_B); | 1315 mov_rr(code, opts->regs[inst->ea_reg], dst_op.base, SZ_B); |
1316 #ifdef X86_64 | |
1306 } | 1317 } |
1318 #endif | |
1307 } else { | 1319 } else { |
1308 mov_rr(code, src_op.base, dst_op.base, SZ_B); | 1320 mov_rr(code, src_op.base, dst_op.base, SZ_B); |
1309 } | 1321 } |
1310 } | 1322 } |
1311 if (inst->addr_mode != Z80_REG) { | 1323 if (inst->addr_mode != Z80_REG) { |
1933 options->gen.mem_ptr_off = offsetof(z80_context, mem_pointers); | 1945 options->gen.mem_ptr_off = offsetof(z80_context, mem_pointers); |
1934 options->gen.ram_flags_off = offsetof(z80_context, ram_code_flags); | 1946 options->gen.ram_flags_off = offsetof(z80_context, ram_code_flags); |
1935 options->gen.ram_flags_shift = 7; | 1947 options->gen.ram_flags_shift = 7; |
1936 | 1948 |
1937 options->flags = 0; | 1949 options->flags = 0; |
1950 #ifdef X86_64 | |
1938 options->regs[Z80_B] = BH; | 1951 options->regs[Z80_B] = BH; |
1939 options->regs[Z80_C] = RBX; | 1952 options->regs[Z80_C] = RBX; |
1940 options->regs[Z80_D] = CH; | 1953 options->regs[Z80_D] = CH; |
1941 options->regs[Z80_E] = RCX; | 1954 options->regs[Z80_E] = RCX; |
1942 options->regs[Z80_H] = AH; | 1955 options->regs[Z80_H] = AH; |
1953 options->regs[Z80_HL] = RAX; | 1966 options->regs[Z80_HL] = RAX; |
1954 options->regs[Z80_SP] = R9; | 1967 options->regs[Z80_SP] = R9; |
1955 options->regs[Z80_AF] = -1; | 1968 options->regs[Z80_AF] = -1; |
1956 options->regs[Z80_IX] = RDX; | 1969 options->regs[Z80_IX] = RDX; |
1957 options->regs[Z80_IY] = R8; | 1970 options->regs[Z80_IY] = R8; |
1958 | 1971 |
1972 options->gen.scratch1 = R13; | |
1973 options->gen.scratch2 = R14; | |
1974 #else | |
1975 memset(options->regs, -1, sizeof(options->regs)); | |
1976 options->regs[Z80_A] = RAX; | |
1977 options->regx[Z80_SP] = RBX; | |
1978 | |
1979 options->gen.scratch1 = RCX; | |
1980 options->gen.scratch2 = RDX; | |
1981 #endif | |
1982 | |
1959 options->gen.context_reg = RSI; | 1983 options->gen.context_reg = RSI; |
1960 options->gen.cycles = RBP; | 1984 options->gen.cycles = RBP; |
1961 options->gen.limit = RDI; | 1985 options->gen.limit = RDI; |
1962 options->gen.scratch1 = R13; | |
1963 options->gen.scratch2 = R14; | |
1964 | 1986 |
1965 options->gen.native_code_map = malloc(sizeof(native_map_slot)); | 1987 options->gen.native_code_map = malloc(sizeof(native_map_slot)); |
1966 memset(options->gen.native_code_map, 0, sizeof(native_map_slot)); | 1988 memset(options->gen.native_code_map, 0, sizeof(native_map_slot)); |
1967 options->gen.deferred = NULL; | 1989 options->gen.deferred = NULL; |
1968 options->gen.ram_inst_sizes = malloc(sizeof(uint8_t) * 0x2000 + sizeof(uint8_t *)); | 1990 options->gen.ram_inst_sizes = malloc(sizeof(uint8_t) * 0x2000 + sizeof(uint8_t *)); |