1 /*
2  *  Generic fillrect for frame buffers in system RAM with packed pixels of
3  *  any depth.
4  *
5  *  Based almost entirely from cfbfillrect.c (which is based almost entirely
6  *  on Geert Uytterhoeven's fillrect routine)
7  *
8  *      Copyright (C)  2007 Antonino Daplas <adaplas@pol.net>
9  *
10  *  This file is subject to the terms and conditions of the GNU General Public
11  *  License.  See the file COPYING in the main directory of this archive for
12  *  more details.
13  */
14 #include <linux/module.h>
15 #include <linux/string.h>
16 #include <linux/fb.h>
17 #include <asm/types.h>
18 #include "fb_draw.h"
19 
20     /*
21      *  Aligned pattern fill using 32/64-bit memory accesses
22      */
23 
24 static void
bitfill_aligned(struct fb_info * p,unsigned long * dst,int dst_idx,unsigned long pat,unsigned n,int bits)25 bitfill_aligned(struct fb_info *p, unsigned long *dst, int dst_idx,
26 		unsigned long pat, unsigned n, int bits)
27 {
28 	unsigned long first, last;
29 
30 	if (!n)
31 		return;
32 
33 	first = FB_SHIFT_HIGH(p, ~0UL, dst_idx);
34 	last = ~(FB_SHIFT_HIGH(p, ~0UL, (dst_idx+n) % bits));
35 
36 	if (dst_idx+n <= bits) {
37 		/* Single word */
38 		if (last)
39 			first &= last;
40 		*dst = comp(pat, *dst, first);
41 	} else {
42 		/* Multiple destination words */
43 
44 		/* Leading bits */
45  		if (first!= ~0UL) {
46 			*dst = comp(pat, *dst, first);
47 			dst++;
48 			n -= bits - dst_idx;
49 		}
50 
51 		/* Main chunk */
52 		n /= bits;
53 		memset_l(dst, pat, n);
54 		dst += n;
55 
56 		/* Trailing bits */
57 		if (last)
58 			*dst = comp(pat, *dst, last);
59 	}
60 }
61 
62 
63     /*
64      *  Unaligned generic pattern fill using 32/64-bit memory accesses
65      *  The pattern must have been expanded to a full 32/64-bit value
66      *  Left/right are the appropriate shifts to convert to the pattern to be
67      *  used for the next 32/64-bit word
68      */
69 
70 static void
bitfill_unaligned(struct fb_info * p,unsigned long * dst,int dst_idx,unsigned long pat,int left,int right,unsigned n,int bits)71 bitfill_unaligned(struct fb_info *p, unsigned long *dst, int dst_idx,
72 		  unsigned long pat, int left, int right, unsigned n, int bits)
73 {
74 	unsigned long first, last;
75 
76 	if (!n)
77 		return;
78 
79 	first = FB_SHIFT_HIGH(p, ~0UL, dst_idx);
80 	last = ~(FB_SHIFT_HIGH(p, ~0UL, (dst_idx+n) % bits));
81 
82 	if (dst_idx+n <= bits) {
83 		/* Single word */
84 		if (last)
85 			first &= last;
86 		*dst = comp(pat, *dst, first);
87 	} else {
88 		/* Multiple destination words */
89 		/* Leading bits */
90 		if (first) {
91 			*dst = comp(pat, *dst, first);
92 			dst++;
93 			pat = pat << left | pat >> right;
94 			n -= bits - dst_idx;
95 		}
96 
97 		/* Main chunk */
98 		n /= bits;
99 		while (n >= 4) {
100 			*dst++ = pat;
101 			pat = pat << left | pat >> right;
102 			*dst++ = pat;
103 			pat = pat << left | pat >> right;
104 			*dst++ = pat;
105 			pat = pat << left | pat >> right;
106 			*dst++ = pat;
107 			pat = pat << left | pat >> right;
108 			n -= 4;
109 		}
110 		while (n--) {
111 			*dst++ = pat;
112 			pat = pat << left | pat >> right;
113 		}
114 
115 		/* Trailing bits */
116 		if (last)
117 			*dst = comp(pat, *dst, last);
118 	}
119 }
120 
121     /*
122      *  Aligned pattern invert using 32/64-bit memory accesses
123      */
124 static void
bitfill_aligned_rev(struct fb_info * p,unsigned long * dst,int dst_idx,unsigned long pat,unsigned n,int bits)125 bitfill_aligned_rev(struct fb_info *p, unsigned long *dst, int dst_idx,
126 		    unsigned long pat, unsigned n, int bits)
127 {
128 	unsigned long val = pat;
129 	unsigned long first, last;
130 
131 	if (!n)
132 		return;
133 
134 	first = FB_SHIFT_HIGH(p, ~0UL, dst_idx);
135 	last = ~(FB_SHIFT_HIGH(p, ~0UL, (dst_idx+n) % bits));
136 
137 	if (dst_idx+n <= bits) {
138 		/* Single word */
139 		if (last)
140 			first &= last;
141 		*dst = comp(*dst ^ val, *dst, first);
142 	} else {
143 		/* Multiple destination words */
144 		/* Leading bits */
145 		if (first!=0UL) {
146 			*dst = comp(*dst ^ val, *dst, first);
147 			dst++;
148 			n -= bits - dst_idx;
149 		}
150 
151 		/* Main chunk */
152 		n /= bits;
153 		while (n >= 8) {
154 			*dst++ ^= val;
155 			*dst++ ^= val;
156 			*dst++ ^= val;
157 			*dst++ ^= val;
158 			*dst++ ^= val;
159 			*dst++ ^= val;
160 			*dst++ ^= val;
161 			*dst++ ^= val;
162 			n -= 8;
163 		}
164 		while (n--)
165 			*dst++ ^= val;
166 		/* Trailing bits */
167 		if (last)
168 			*dst = comp(*dst ^ val, *dst, last);
169 	}
170 }
171 
172 
173     /*
174      *  Unaligned generic pattern invert using 32/64-bit memory accesses
175      *  The pattern must have been expanded to a full 32/64-bit value
176      *  Left/right are the appropriate shifts to convert to the pattern to be
177      *  used for the next 32/64-bit word
178      */
179 
180 static void
bitfill_unaligned_rev(struct fb_info * p,unsigned long * dst,int dst_idx,unsigned long pat,int left,int right,unsigned n,int bits)181 bitfill_unaligned_rev(struct fb_info *p, unsigned long *dst, int dst_idx,
182 		      unsigned long pat, int left, int right, unsigned n,
183 		      int bits)
184 {
185 	unsigned long first, last;
186 
187 	if (!n)
188 		return;
189 
190 	first = FB_SHIFT_HIGH(p, ~0UL, dst_idx);
191 	last = ~(FB_SHIFT_HIGH(p, ~0UL, (dst_idx+n) % bits));
192 
193 	if (dst_idx+n <= bits) {
194 		/* Single word */
195 		if (last)
196 			first &= last;
197 		*dst = comp(*dst ^ pat, *dst, first);
198 	} else {
199 		/* Multiple destination words */
200 
201 		/* Leading bits */
202 		if (first != 0UL) {
203 			*dst = comp(*dst ^ pat, *dst, first);
204 			dst++;
205 			pat = pat << left | pat >> right;
206 			n -= bits - dst_idx;
207 		}
208 
209 		/* Main chunk */
210 		n /= bits;
211 		while (n >= 4) {
212 			*dst++ ^= pat;
213 			pat = pat << left | pat >> right;
214 			*dst++ ^= pat;
215 			pat = pat << left | pat >> right;
216 			*dst++ ^= pat;
217 			pat = pat << left | pat >> right;
218 			*dst++ ^= pat;
219 			pat = pat << left | pat >> right;
220 			n -= 4;
221 		}
222 		while (n--) {
223 			*dst ^= pat;
224 			pat = pat << left | pat >> right;
225 		}
226 
227 		/* Trailing bits */
228 		if (last)
229 			*dst = comp(*dst ^ pat, *dst, last);
230 	}
231 }
232 
sys_fillrect(struct fb_info * p,const struct fb_fillrect * rect)233 void sys_fillrect(struct fb_info *p, const struct fb_fillrect *rect)
234 {
235 	unsigned long pat, pat2, fg;
236 	unsigned long width = rect->width, height = rect->height;
237 	int bits = BITS_PER_LONG, bytes = bits >> 3;
238 	u32 bpp = p->var.bits_per_pixel;
239 	unsigned long *dst;
240 	int dst_idx, left;
241 
242 	if (p->state != FBINFO_STATE_RUNNING)
243 		return;
244 
245 	if (p->fix.visual == FB_VISUAL_TRUECOLOR ||
246 	    p->fix.visual == FB_VISUAL_DIRECTCOLOR )
247 		fg = ((u32 *) (p->pseudo_palette))[rect->color];
248 	else
249 		fg = rect->color;
250 
251 	pat = pixel_to_pat( bpp, fg);
252 
253 	dst = (unsigned long *)((unsigned long)p->screen_base & ~(bytes-1));
254 	dst_idx = ((unsigned long)p->screen_base & (bytes - 1))*8;
255 	dst_idx += rect->dy*p->fix.line_length*8+rect->dx*bpp;
256 	/* FIXME For now we support 1-32 bpp only */
257 	left = bits % bpp;
258 	if (p->fbops->fb_sync)
259 		p->fbops->fb_sync(p);
260 	if (!left) {
261 		void (*fill_op32)(struct fb_info *p, unsigned long *dst,
262 				  int dst_idx, unsigned long pat, unsigned n,
263 				  int bits) = NULL;
264 
265 		switch (rect->rop) {
266 		case ROP_XOR:
267 			fill_op32 = bitfill_aligned_rev;
268 			break;
269 		case ROP_COPY:
270 			fill_op32 = bitfill_aligned;
271 			break;
272 		default:
273 			printk( KERN_ERR "cfb_fillrect(): unknown rop, "
274 				"defaulting to ROP_COPY\n");
275 			fill_op32 = bitfill_aligned;
276 			break;
277 		}
278 		while (height--) {
279 			dst += dst_idx >> (ffs(bits) - 1);
280 			dst_idx &= (bits - 1);
281 			fill_op32(p, dst, dst_idx, pat, width*bpp, bits);
282 			dst_idx += p->fix.line_length*8;
283 		}
284 	} else {
285 		int right, r;
286 		void (*fill_op)(struct fb_info *p, unsigned long *dst,
287 				int dst_idx, unsigned long pat, int left,
288 				int right, unsigned n, int bits) = NULL;
289 #ifdef __LITTLE_ENDIAN
290 		right = left;
291 		left = bpp - right;
292 #else
293 		right = bpp - left;
294 #endif
295 		switch (rect->rop) {
296 		case ROP_XOR:
297 			fill_op = bitfill_unaligned_rev;
298 			break;
299 		case ROP_COPY:
300 			fill_op = bitfill_unaligned;
301 			break;
302 		default:
303 			printk(KERN_ERR "sys_fillrect(): unknown rop, "
304 				"defaulting to ROP_COPY\n");
305 			fill_op = bitfill_unaligned;
306 			break;
307 		}
308 		while (height--) {
309 			dst += dst_idx / bits;
310 			dst_idx &= (bits - 1);
311 			r = dst_idx % bpp;
312 			/* rotate pattern to the correct start position */
313 			pat2 = le_long_to_cpu(rolx(cpu_to_le_long(pat), r, bpp));
314 			fill_op(p, dst, dst_idx, pat2, left, right,
315 				width*bpp, bits);
316 			dst_idx += p->fix.line_length*8;
317 		}
318 	}
319 }
320 
321 EXPORT_SYMBOL(sys_fillrect);
322 
323 MODULE_AUTHOR("Antonino Daplas <adaplas@pol.net>");
324 MODULE_DESCRIPTION("Generic fill rectangle (sys-to-sys)");
325 MODULE_LICENSE("GPL");
326