1*b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
2b8b572e1SStephen Rothwell #ifndef _ASM_POWERPC_VGA_H_
3b8b572e1SStephen Rothwell #define _ASM_POWERPC_VGA_H_
4b8b572e1SStephen Rothwell
5b8b572e1SStephen Rothwell #ifdef __KERNEL__
6b8b572e1SStephen Rothwell
7b8b572e1SStephen Rothwell /*
8b8b572e1SStephen Rothwell * Access to VGA videoram
9b8b572e1SStephen Rothwell *
10b8b572e1SStephen Rothwell * (c) 1998 Martin Mares <mj@ucw.cz>
11b8b572e1SStephen Rothwell */
12b8b572e1SStephen Rothwell
13b8b572e1SStephen Rothwell
14b8b572e1SStephen Rothwell #include <asm/io.h>
15b8b572e1SStephen Rothwell
16b8b572e1SStephen Rothwell
17b8b572e1SStephen Rothwell #if defined(CONFIG_VGA_CONSOLE) || defined(CONFIG_MDA_CONSOLE)
18b8b572e1SStephen Rothwell
19b8b572e1SStephen Rothwell #define VT_BUF_HAVE_RW
20b8b572e1SStephen Rothwell /*
21b8b572e1SStephen Rothwell * These are only needed for supporting VGA or MDA text mode, which use little
22b8b572e1SStephen Rothwell * endian byte ordering.
23b8b572e1SStephen Rothwell * In other cases, we can optimize by using native byte ordering and
24b8b572e1SStephen Rothwell * <linux/vt_buffer.h> has already done the right job for us.
25b8b572e1SStephen Rothwell */
26b8b572e1SStephen Rothwell
scr_writew(u16 val,volatile u16 * addr)27b8b572e1SStephen Rothwell static inline void scr_writew(u16 val, volatile u16 *addr)
28b8b572e1SStephen Rothwell {
29f5718726SDavid Gibson *addr = cpu_to_le16(val);
30b8b572e1SStephen Rothwell }
31b8b572e1SStephen Rothwell
scr_readw(volatile const u16 * addr)32b8b572e1SStephen Rothwell static inline u16 scr_readw(volatile const u16 *addr)
33b8b572e1SStephen Rothwell {
34f5718726SDavid Gibson return le16_to_cpu(*addr);
35b8b572e1SStephen Rothwell }
36b8b572e1SStephen Rothwell
37ac036f95SMatthew Wilcox #define VT_BUF_HAVE_MEMSETW
scr_memsetw(u16 * s,u16 v,unsigned int n)38ac036f95SMatthew Wilcox static inline void scr_memsetw(u16 *s, u16 v, unsigned int n)
39ac036f95SMatthew Wilcox {
40ac036f95SMatthew Wilcox memset16(s, cpu_to_le16(v), n / 2);
41ac036f95SMatthew Wilcox }
42ac036f95SMatthew Wilcox
43b8b572e1SStephen Rothwell #define VT_BUF_HAVE_MEMCPYW
44ac036f95SMatthew Wilcox #define VT_BUF_HAVE_MEMMOVEW
45b8b572e1SStephen Rothwell #define scr_memcpyw memcpy
46ac036f95SMatthew Wilcox #define scr_memmovew memmove
47b8b572e1SStephen Rothwell
48b8b572e1SStephen Rothwell #endif /* !CONFIG_VGA_CONSOLE && !CONFIG_MDA_CONSOLE */
49b8b572e1SStephen Rothwell
50b8b572e1SStephen Rothwell #ifdef __powerpc64__
51b8b572e1SStephen Rothwell #define VGA_MAP_MEM(x,s) ((unsigned long) ioremap((x), s))
52b8b572e1SStephen Rothwell #else
53e88f157dSMichael Ellerman #define VGA_MAP_MEM(x,s) (x)
54b8b572e1SStephen Rothwell #endif
55b8b572e1SStephen Rothwell
56b8b572e1SStephen Rothwell #define vga_readb(x) (*(x))
57b8b572e1SStephen Rothwell #define vga_writeb(x,y) (*(y) = (x))
58b8b572e1SStephen Rothwell
59b8b572e1SStephen Rothwell #endif /* __KERNEL__ */
60b8b572e1SStephen Rothwell #endif /* _ASM_POWERPC_VGA_H_ */
61