xref: /openbmc/linux/arch/mips/include/asm/vga.h (revision b2441318)
1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
2384740dcSRalf Baechle /*
3384740dcSRalf Baechle  *	Access to VGA videoram
4384740dcSRalf Baechle  *
5384740dcSRalf Baechle  *	(c) 1998 Martin Mares <mj@ucw.cz>
6384740dcSRalf Baechle  */
7384740dcSRalf Baechle #ifndef _ASM_VGA_H
8384740dcSRalf Baechle #define _ASM_VGA_H
9384740dcSRalf Baechle 
10ac036f95SMatthew Wilcox #include <linux/string.h>
11258e1e73SLeonid Yegoshin #include <asm/addrspace.h>
12384740dcSRalf Baechle #include <asm/byteorder.h>
13384740dcSRalf Baechle 
14384740dcSRalf Baechle /*
15384740dcSRalf Baechle  *	On the PC, we can just recalculate addresses and then
16384740dcSRalf Baechle  *	access the videoram directly without any black magic.
17384740dcSRalf Baechle  */
18384740dcSRalf Baechle 
19258e1e73SLeonid Yegoshin #define VGA_MAP_MEM(x, s)	CKSEG1ADDR(0x10000000L + (unsigned long)(x))
20384740dcSRalf Baechle 
21384740dcSRalf Baechle #define vga_readb(x)	(*(x))
22384740dcSRalf Baechle #define vga_writeb(x, y)	(*(y) = (x))
23384740dcSRalf Baechle 
24384740dcSRalf Baechle #define VT_BUF_HAVE_RW
25384740dcSRalf Baechle /*
26384740dcSRalf Baechle  *  These are only needed for supporting VGA or MDA text mode, which use little
27384740dcSRalf Baechle  *  endian byte ordering.
28384740dcSRalf Baechle  *  In other cases, we can optimize by using native byte ordering and
29384740dcSRalf Baechle  *  <linux/vt_buffer.h> has already done the right job for us.
30384740dcSRalf Baechle  */
31384740dcSRalf Baechle 
32384740dcSRalf Baechle #undef scr_writew
33384740dcSRalf Baechle #undef scr_readw
34384740dcSRalf Baechle 
scr_writew(u16 val,volatile u16 * addr)35384740dcSRalf Baechle static inline void scr_writew(u16 val, volatile u16 *addr)
36384740dcSRalf Baechle {
37384740dcSRalf Baechle 	*addr = cpu_to_le16(val);
38384740dcSRalf Baechle }
39384740dcSRalf Baechle 
scr_readw(volatile const u16 * addr)40384740dcSRalf Baechle static inline u16 scr_readw(volatile const u16 *addr)
41384740dcSRalf Baechle {
42384740dcSRalf Baechle 	return le16_to_cpu(*addr);
43384740dcSRalf Baechle }
44384740dcSRalf Baechle 
scr_memsetw(u16 * s,u16 v,unsigned int count)45ac036f95SMatthew Wilcox static inline void scr_memsetw(u16 *s, u16 v, unsigned int count)
46ac036f95SMatthew Wilcox {
47ac036f95SMatthew Wilcox 	memset16(s, cpu_to_le16(v), count / 2);
48ac036f95SMatthew Wilcox }
49ac036f95SMatthew Wilcox 
50384740dcSRalf Baechle #define scr_memcpyw(d, s, c) memcpy(d, s, c)
51384740dcSRalf Baechle #define scr_memmovew(d, s, c) memmove(d, s, c)
52384740dcSRalf Baechle #define VT_BUF_HAVE_MEMCPYW
53384740dcSRalf Baechle #define VT_BUF_HAVE_MEMMOVEW
54ac036f95SMatthew Wilcox #define VT_BUF_HAVE_MEMSETW
55384740dcSRalf Baechle 
56384740dcSRalf Baechle #endif /* _ASM_VGA_H */
57