21 #include "../SDL_internal.h"
32 #define HAVE_FAST_WRITE_INT8 1
36 # undef HAVE_FAST_WRITE_INT8
37 # define HAVE_FAST_WRITE_INT8 0
50 #if SDL_ALTIVEC_BLITTERS
55 #include <sys/sysctl.h>
59 const char key[] =
"hw.l3cachesize";
61 size_t typeSize =
sizeof(
result);
79 #if (defined(__MACOSX__) && (__GNUC__ < 4))
80 #define VECUINT8_LITERAL(a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p) \
81 (vector unsigned char) ( a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p )
82 #define VECUINT16_LITERAL(a,b,c,d,e,f,g,h) \
83 (vector unsigned short) ( a,b,c,d,e,f,g,h )
85 #define VECUINT8_LITERAL(a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p) \
86 (vector unsigned char) { a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p }
87 #define VECUINT16_LITERAL(a,b,c,d,e,f,g,h) \
88 (vector unsigned short) { a,b,c,d,e,f,g,h }
91 #define UNALIGNED_PTR(x) (((size_t) x) & 0x0000000F)
92 #define VSWIZZLE32(a,b,c,d) (vector unsigned char) \
93 ( 0x00+a, 0x00+b, 0x00+c, 0x00+d, \
94 0x04+a, 0x04+b, 0x04+c, 0x04+d, \
95 0x08+a, 0x08+b, 0x08+c, 0x08+d, \
96 0x0C+a, 0x0C+b, 0x0C+c, 0x0C+d )
98 #define MAKE8888(dstfmt, r, g, b, a) \
99 ( ((r<<dstfmt->Rshift)&dstfmt->Rmask) | \
100 ((g<<dstfmt->Gshift)&dstfmt->Gmask) | \
101 ((b<<dstfmt->Bshift)&dstfmt->Bmask) | \
102 ((a<<dstfmt->Ashift)&dstfmt->Amask) )
110 #define DST_CHAN_SRC 1
111 #define DST_CHAN_DEST 2
114 #define DST_CTRL(size, count, stride) \
115 (((size) << 24) | ((count) << 16) | (stride))
117 #define VEC_ALIGNER(src) ((UNALIGNED_PTR(src)) \
119 : vec_add(vec_lvsl(8, src), vec_splat_u8(8)))
122 static vector
unsigned char
134 0x00FF0000, 0x0000FF00, 0x000000FF, 0xFF000000,
139 const vector
unsigned char plus = VECUINT8_LITERAL(0x00, 0x00, 0x00, 0x00,
140 0x04, 0x04, 0x04, 0x04,
141 0x08, 0x08, 0x08, 0x08,
144 vector
unsigned char vswiz;
145 vector
unsigned int srcvec;
146 Uint32 rmask, gmask, bmask, amask;
149 srcfmt = &default_pixel_format;
152 dstfmt = &default_pixel_format;
155 #define RESHIFT(X) (3 - ((X) >> 3))
163 ((srcfmt->
Amask) ? RESHIFT(srcfmt->
172 ((
unsigned int *) (
char *) &srcvec)[0] = (rmask | gmask | bmask | amask);
173 vswiz = vec_add(plus, (vector
unsigned char) vec_splat(srcvec, 0));
177 #if defined(__powerpc__) && (SDL_BYTEORDER == SDL_LIL_ENDIAN)
179 static vector
unsigned char reorder_ppc64le_vec(vector
unsigned char vpermute)
193 const vector
unsigned char ppc64le_reorder = VECUINT8_LITERAL(
194 0x01, 0x00, 0x03, 0x02,
195 0x05, 0x04, 0x07, 0x06,
196 0x09, 0x08, 0x0B, 0x0A,
197 0x0D, 0x0C, 0x0F, 0x0E );
199 vector
unsigned char vswiz_ppc64le;
200 vswiz_ppc64le = vec_perm(vpermute, vpermute, ppc64le_reorder);
201 return(vswiz_ppc64le);
215 vector
unsigned char valpha = vec_splat_u8(0);
216 vector
unsigned char vpermute = calc_swizzle32(srcfmt,
NULL);
217 vector
unsigned char vgmerge = VECUINT8_LITERAL(0x00, 0x02, 0x00, 0x06,
218 0x00, 0x0a, 0x00, 0x0e,
219 0x00, 0x12, 0x00, 0x16,
220 0x00, 0x1a, 0x00, 0x1e);
221 vector
unsigned short v1 = vec_splat_u16(1);
222 vector
unsigned short v3 = vec_splat_u16(3);
223 vector
unsigned short v3f =
224 VECUINT16_LITERAL(0x003f, 0x003f, 0x003f, 0x003f,
225 0x003f, 0x003f, 0x003f, 0x003f);
226 vector
unsigned short vfc =
227 VECUINT16_LITERAL(0x00fc, 0x00fc, 0x00fc, 0x00fc,
228 0x00fc, 0x00fc, 0x00fc, 0x00fc);
229 vector
unsigned short vf800 = (vector
unsigned short) vec_splat_u8(-7);
230 vf800 = vec_sl(vf800, vec_splat_u16(8));
233 vector
unsigned char valigner;
234 vector
unsigned char voverflow;
235 vector
unsigned char vsrc;
241 #define ONE_PIXEL_BLEND(condition, widthvar) \
242 while (condition) { \
244 unsigned sR, sG, sB, sA; \
245 DISEMBLE_RGBA((Uint8 *)src, 4, srcfmt, Pixel, \
247 *(Uint16 *)(dst) = (((sR << 8) & 0x0000F800) | \
248 ((sG << 3) & 0x000007E0) | \
249 ((sB >> 3) & 0x0000001F)); \
258 extrawidth = (
width % 8);
260 vsrc = vec_ld(0,
src);
261 valigner = VEC_ALIGNER(
src);
264 vector
unsigned short vpixel, vrpixel, vgpixel, vbpixel;
265 vector
unsigned int vsrc1, vsrc2;
266 vector
unsigned char vdst;
268 voverflow = vec_ld(15,
src);
269 vsrc = vec_perm(vsrc, voverflow, valigner);
270 vsrc1 = (vector
unsigned int) vec_perm(vsrc, valpha, vpermute);
273 voverflow = vec_ld(15,
src);
274 vsrc = vec_perm(vsrc, voverflow, valigner);
275 vsrc2 = (vector
unsigned int) vec_perm(vsrc, valpha, vpermute);
277 vpixel = (vector
unsigned short) vec_packpx(vsrc1, vsrc2);
278 vgpixel = (vector
unsigned short) vec_perm(vsrc1, vsrc2, vgmerge);
279 vgpixel = vec_and(vgpixel, vfc);
280 vgpixel = vec_sl(vgpixel,
v3);
281 vrpixel = vec_sl(vpixel,
v1);
282 vrpixel = vec_and(vrpixel, vf800);
283 vbpixel = vec_and(vpixel, v3f);
285 vec_or((vector
unsigned char) vrpixel,
286 (vector
unsigned char) vgpixel);
288 vdst = vec_or(vdst, (vector
unsigned char) vbpixel);
289 vec_st(vdst, 0,
dst);
300 ONE_PIXEL_BLEND((extrawidth), extrawidth);
301 #undef ONE_PIXEL_BLEND
321 vector
unsigned char valpha;
322 vector
unsigned char vpermute;
323 vector
unsigned short vf800;
324 vector
unsigned int v8 = vec_splat_u32(8);
325 vector
unsigned int v16 = vec_add(v8, v8);
326 vector
unsigned short v2 = vec_splat_u16(2);
327 vector
unsigned short v3 = vec_splat_u16(3);
333 vector
unsigned char vredalpha1 = VECUINT8_LITERAL(0x10, 0x00, 0x01, 0x01,
334 0x10, 0x02, 0x01, 0x01,
335 0x10, 0x04, 0x01, 0x01,
338 vector
unsigned char vredalpha2 =
340 char) (vec_add((vector
unsigned int) vredalpha1, vec_sl(v8, v16))
346 vector
unsigned char vblue1 = VECUINT8_LITERAL(0x00, 0x01, 0x02, 0x11,
347 0x04, 0x05, 0x06, 0x13,
348 0x08, 0x09, 0x0a, 0x15,
349 0x0c, 0x0d, 0x0e, 0x17);
350 vector
unsigned char vblue2 =
351 (vector
unsigned char) (vec_add((vector
unsigned int) vblue1, v8)
357 vector
unsigned char vgreen1 = VECUINT8_LITERAL(0x00, 0x01, 0x10, 0x03,
358 0x04, 0x05, 0x12, 0x07,
359 0x08, 0x09, 0x14, 0x0b,
360 0x0c, 0x0d, 0x16, 0x0f);
361 vector
unsigned char vgreen2 =
363 char) (vec_add((vector
unsigned int) vgreen1, vec_sl(v8, v8))
369 vf800 = (vector
unsigned short) vec_splat_u8(-7);
370 vf800 = vec_sl(vf800, vec_splat_u16(8));
372 if (dstfmt->
Amask && info->
a) {
373 ((
unsigned char *) &valpha)[0] =
alpha = info->
a;
374 valpha = vec_splat(valpha, 0);
377 valpha = vec_splat_u8(0);
380 vpermute = calc_swizzle32(
NULL, dstfmt);
382 vector
unsigned char valigner;
383 vector
unsigned char voverflow;
384 vector
unsigned char vsrc;
390 #define ONE_PIXEL_BLEND(condition, widthvar) \
391 while (condition) { \
392 unsigned sR, sG, sB; \
393 unsigned short Pixel = *((unsigned short *)src); \
394 sR = (Pixel >> 8) & 0xf8; \
395 sG = (Pixel >> 3) & 0xfc; \
396 sB = (Pixel << 3) & 0xf8; \
397 ASSEMBLE_RGBA(dst, 4, dstfmt, sR, sG, sB, alpha); \
405 extrawidth = (
width % 8);
407 vsrc = vec_ld(0,
src);
408 valigner = VEC_ALIGNER(
src);
411 vector
unsigned short vR, vG, vB;
412 vector
unsigned char vdst1, vdst2;
414 voverflow = vec_ld(15,
src);
415 vsrc = vec_perm(vsrc, voverflow, valigner);
417 vR = vec_and((vector
unsigned short) vsrc, vf800);
418 vB = vec_sl((vector
unsigned short) vsrc,
v3);
422 (vector
unsigned char) vec_perm((vector
unsigned char) vR,
424 vdst1 = vec_perm(vdst1, (vector
unsigned char) vB, vblue1);
425 vdst1 = vec_perm(vdst1, (vector
unsigned char) vG, vgreen1);
426 vdst1 = vec_perm(vdst1, valpha, vpermute);
427 vec_st(vdst1, 0,
dst);
430 (vector
unsigned char) vec_perm((vector
unsigned char) vR,
432 vdst2 = vec_perm(vdst2, (vector
unsigned char) vB, vblue2);
433 vdst2 = vec_perm(vdst2, (vector
unsigned char) vG, vgreen2);
434 vdst2 = vec_perm(vdst2, valpha, vpermute);
435 vec_st(vdst2, 16,
dst);
447 ONE_PIXEL_BLEND((extrawidth), extrawidth);
448 #undef ONE_PIXEL_BLEND
468 vector
unsigned char valpha;
469 vector
unsigned char vpermute;
470 vector
unsigned short vf800;
471 vector
unsigned int v8 = vec_splat_u32(8);
472 vector
unsigned int v16 = vec_add(v8, v8);
473 vector
unsigned short v1 = vec_splat_u16(1);
474 vector
unsigned short v3 = vec_splat_u16(3);
480 vector
unsigned char vredalpha1 = VECUINT8_LITERAL(0x10, 0x00, 0x01, 0x01,
481 0x10, 0x02, 0x01, 0x01,
482 0x10, 0x04, 0x01, 0x01,
485 vector
unsigned char vredalpha2 =
487 char) (vec_add((vector
unsigned int) vredalpha1, vec_sl(v8, v16))
493 vector
unsigned char vblue1 = VECUINT8_LITERAL(0x00, 0x01, 0x02, 0x11,
494 0x04, 0x05, 0x06, 0x13,
495 0x08, 0x09, 0x0a, 0x15,
496 0x0c, 0x0d, 0x0e, 0x17);
497 vector
unsigned char vblue2 =
498 (vector
unsigned char) (vec_add((vector
unsigned int) vblue1, v8)
504 vector
unsigned char vgreen1 = VECUINT8_LITERAL(0x00, 0x01, 0x10, 0x03,
505 0x04, 0x05, 0x12, 0x07,
506 0x08, 0x09, 0x14, 0x0b,
507 0x0c, 0x0d, 0x16, 0x0f);
508 vector
unsigned char vgreen2 =
510 char) (vec_add((vector
unsigned int) vgreen1, vec_sl(v8, v8))
516 vf800 = (vector
unsigned short) vec_splat_u8(-7);
517 vf800 = vec_sl(vf800, vec_splat_u16(8));
519 if (dstfmt->
Amask && info->
a) {
520 ((
unsigned char *) &valpha)[0] =
alpha = info->
a;
521 valpha = vec_splat(valpha, 0);
524 valpha = vec_splat_u8(0);
527 vpermute = calc_swizzle32(
NULL, dstfmt);
529 vector
unsigned char valigner;
530 vector
unsigned char voverflow;
531 vector
unsigned char vsrc;
537 #define ONE_PIXEL_BLEND(condition, widthvar) \
538 while (condition) { \
539 unsigned sR, sG, sB; \
540 unsigned short Pixel = *((unsigned short *)src); \
541 sR = (Pixel >> 7) & 0xf8; \
542 sG = (Pixel >> 2) & 0xf8; \
543 sB = (Pixel << 3) & 0xf8; \
544 ASSEMBLE_RGBA(dst, 4, dstfmt, sR, sG, sB, alpha); \
552 extrawidth = (
width % 8);
554 vsrc = vec_ld(0,
src);
555 valigner = VEC_ALIGNER(
src);
558 vector
unsigned short vR, vG, vB;
559 vector
unsigned char vdst1, vdst2;
561 voverflow = vec_ld(15,
src);
562 vsrc = vec_perm(vsrc, voverflow, valigner);
564 vR = vec_and(vec_sl((vector
unsigned short) vsrc,
v1), vf800);
565 vB = vec_sl((vector
unsigned short) vsrc,
v3);
569 (vector
unsigned char) vec_perm((vector
unsigned char) vR,
571 vdst1 = vec_perm(vdst1, (vector
unsigned char) vB, vblue1);
572 vdst1 = vec_perm(vdst1, (vector
unsigned char) vG, vgreen1);
573 vdst1 = vec_perm(vdst1, valpha, vpermute);
574 vec_st(vdst1, 0,
dst);
577 (vector
unsigned char) vec_perm((vector
unsigned char) vR,
579 vdst2 = vec_perm(vdst2, (vector
unsigned char) vB, vblue2);
580 vdst2 = vec_perm(vdst2, (vector
unsigned char) vG, vgreen2);
581 vdst2 = vec_perm(vdst2, valpha, vpermute);
582 vec_st(vdst2, 16,
dst);
594 ONE_PIXEL_BLEND((extrawidth), extrawidth);
595 #undef ONE_PIXEL_BLEND
617 int copy_alpha = (srcfmt->
Amask && dstfmt->
Amask);
621 vector
unsigned int valpha;
622 vector
unsigned char vpermute;
623 vector
unsigned char vzero;
624 vector
unsigned int vckey;
625 vector
unsigned int vrgbmask;
626 vpermute = calc_swizzle32(srcfmt, dstfmt);
627 if (info->
dst_w < 16) {
635 vzero = vec_splat_u8(0);
637 ((
unsigned char *) &valpha)[0] = (
unsigned char)
alpha;
639 (vector
unsigned int) vec_splat((vector
unsigned char) valpha, 0);
641 valpha = (vector
unsigned int) vzero;
644 ((
unsigned int *) (
char *) &vckey)[0] = ckey;
645 vckey = vec_splat(vckey, 0);
646 ((
unsigned int *) (
char *) &vrgbmask)[0] = rgbmask;
647 vrgbmask = vec_splat(vrgbmask, 0);
650 #define ONE_PIXEL_BLEND(condition, widthvar) \
652 while (condition) { \
654 unsigned sR, sG, sB, sA; \
655 DISEMBLE_RGBA((Uint8 *)srcp, srcbpp, srcfmt, Pixel, \
657 if ( (Pixel & rgbmask) != ckey ) { \
658 ASSEMBLE_RGBA((Uint8 *)dstp, dstbpp, dstfmt, \
661 dstp = (Uint32 *) (((Uint8 *) dstp) + dstbpp); \
662 srcp = (Uint32 *) (((Uint8 *) srcp) + srcbpp); \
666 while (condition) { \
668 unsigned sR, sG, sB; \
669 RETRIEVE_RGB_PIXEL((Uint8 *)srcp, srcbpp, Pixel); \
670 if ( Pixel != ckey ) { \
671 RGB_FROM_PIXEL(Pixel, srcfmt, sR, sG, sB); \
672 ASSEMBLE_RGBA((Uint8 *)dstp, dstbpp, dstfmt, \
673 sR, sG, sB, alpha); \
675 dstp = (Uint32 *) (((Uint8 *)dstp) + dstbpp); \
676 srcp = (Uint32 *) (((Uint8 *)srcp) + srcbpp); \
681 ONE_PIXEL_BLEND((UNALIGNED_PTR(dstp)) && (
width),
width);
684 int extrawidth = (
width % 4);
685 vector
unsigned char valigner = VEC_ALIGNER(srcp);
686 vector
unsigned int vs = vec_ld(0, srcp);
690 vector
unsigned char vsel;
691 vector
unsigned int vd;
692 vector
unsigned int voverflow = vec_ld(15, srcp);
694 vs = vec_perm(vs, voverflow, valigner);
696 vsel = (vector
unsigned char) vec_and(vs, vrgbmask);
697 vsel = (vector
unsigned char) vec_cmpeq(vs, vckey);
698 #if defined(__powerpc__) && (SDL_BYTEORDER == SDL_LIL_ENDIAN)
700 vpermute = reorder_ppc64le_vec(vpermute);
703 vs = vec_perm(vs, valpha, vpermute);
705 vd = vec_ld(0, dstp);
707 vd = (vector
unsigned int) vec_sel((vector
unsigned char) vs,
708 (vector
unsigned char) vd,
717 ONE_PIXEL_BLEND((extrawidth), extrawidth);
718 #undef ONE_PIXEL_BLEND
737 vector
unsigned int vzero = vec_splat_u32(0);
738 vector
unsigned char vpermute = calc_swizzle32(srcfmt, dstfmt);
741 vector
unsigned char valpha;
742 ((
unsigned char *) &valpha)[0] = info->
a;
743 vzero = (vector
unsigned int) vec_splat(valpha, 0);
751 vector
unsigned char valigner;
752 vector
unsigned int vbits;
753 vector
unsigned int voverflow;
761 while ((UNALIGNED_PTR(
dst)) && (
width)) {
766 *(
dst++) = MAKE8888(dstfmt,
r,
g,
b,
a);
771 extrawidth = (
width % 4);
773 valigner = VEC_ALIGNER(
src);
774 vbits = vec_ld(0,
src);
777 voverflow = vec_ld(15,
src);
780 vbits = vec_perm(vbits, voverflow, valigner);
781 #if defined(__powerpc__) && (SDL_BYTEORDER == SDL_LIL_ENDIAN)
783 vpermute = reorder_ppc64le_vec(vpermute);
785 vbits = vec_perm(vbits, vzero, vpermute);
786 vec_st(vbits, 0,
dst);
799 *(
dst++) = MAKE8888(dstfmt,
r,
g,
b,
a);
814 const int scalar_dst_lead =
sizeof(
Uint32) * 4;
815 const int vector_dst_lead =
sizeof(
Uint32) * 16;
824 vector
unsigned int vzero = vec_splat_u32(0);
825 vector
unsigned char vpermute = calc_swizzle32(srcfmt, dstfmt);
828 vector
unsigned char valpha;
829 ((
unsigned char *) &valpha)[0] = info->
a;
830 vzero = (vector
unsigned int) vec_splat(valpha, 0);
838 vector
unsigned char valigner;
839 vector
unsigned int vbits;
840 vector
unsigned int voverflow;
848 while ((UNALIGNED_PTR(
dst)) && (
width)) {
849 vec_dstt(
src + scalar_dst_lead, DST_CTRL(2, 32, 1024),
851 vec_dstst(
dst + scalar_dst_lead, DST_CTRL(2, 32, 1024),
857 *(
dst++) = MAKE8888(dstfmt,
r,
g,
b,
a);
862 extrawidth = (
width % 4);
864 valigner = VEC_ALIGNER(
src);
865 vbits = vec_ld(0,
src);
868 vec_dstt(
src + vector_dst_lead, DST_CTRL(2, 32, 1024),
870 vec_dstst(
dst + vector_dst_lead, DST_CTRL(2, 32, 1024),
872 voverflow = vec_ld(15,
src);
875 vbits = vec_perm(vbits, voverflow, valigner);
876 #if defined(__powerpc__) && (SDL_BYTEORDER == SDL_LIL_ENDIAN)
878 vpermute = reorder_ppc64le_vec(vpermute);
880 vbits = vec_perm(vbits, vzero, vpermute);
881 vec_st(vbits, 0,
dst);
894 *(
dst++) = MAKE8888(dstfmt,
r,
g,
b,
a);
902 vec_dss(DST_CHAN_SRC);
903 vec_dss(DST_CHAN_DEST);
912 char *
override =
SDL_getenv(
"SDL_ALTIVEC_BLIT_FEATURES");
914 unsigned int features_as_uint = 0;
915 SDL_sscanf(
override,
"%u", &features_as_uint);
933 #pragma altivec_model off
937 #define GetBlitFeatures() ((SDL_HasMMX() ? BLIT_FEATURE_HAS_MMX : 0) | (SDL_HasARMSIMD() ? BLIT_FEATURE_HAS_ARM_SIMD : 0))
940 #if SDL_ARM_SIMD_BLITTERS
953 Blit_BGR888_RGB888ARMSIMDAsm(
width,
height, dstp, dststride, srcp, srcstride);
968 Blit_RGB444_RGB888ARMSIMDAsm(
width,
height, dstp, dststride, srcp, srcstride);
973 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
982 #define RGB888_RGB332(dst, src) { \
983 dst = (Uint8)((((src)&0x00E00000)>>16)| \
984 (((src)&0x0000E000)>>11)| \
985 (((src)&0x000000C0)>>6)); \
990 #ifndef USE_DUFFS_LOOP
997 int srcskip, dstskip;
1010 #ifdef USE_DUFFS_LOOP
1027 switch (
width & 3) {
1046 #ifdef USE_DUFFS_LOOP
1070 switch (
width & 3) {
1092 #define RGB101010_RGB332(dst, src) { \
1093 dst = (Uint8)((((src)&0x38000000)>>22)| \
1094 (((src)&0x000E0000)>>15)| \
1095 (((src)&0x00000300)>>8)); \
1100 #ifndef USE_DUFFS_LOOP
1107 int srcskip, dstskip;
1120 #ifdef USE_DUFFS_LOOP
1137 switch (
width & 3) {
1156 #ifdef USE_DUFFS_LOOP
1180 switch (
width & 3) {
1202 #define RGB888_RGB555(dst, src) { \
1203 *(Uint16 *)(dst) = (Uint16)((((*src)&0x00F80000)>>9)| \
1204 (((*src)&0x0000F800)>>6)| \
1205 (((*src)&0x000000F8)>>3)); \
1207 #ifndef USE_DUFFS_LOOP
1208 #define RGB888_RGB555_TWO(dst, src) { \
1209 *(Uint32 *)(dst) = (((((src[HI])&0x00F80000)>>9)| \
1210 (((src[HI])&0x0000F800)>>6)| \
1211 (((src[HI])&0x000000F8)>>3))<<16)| \
1212 (((src[LO])&0x00F80000)>>9)| \
1213 (((src[LO])&0x0000F800)>>6)| \
1214 (((src[LO])&0x000000F8)>>3); \
1220 #ifndef USE_DUFFS_LOOP
1226 int srcskip, dstskip;
1236 #ifdef USE_DUFFS_LOOP
1250 if ((
long)
dst & 0x03) {
1265 RGB888_RGB555_TWO(
dst,
src);
1268 RGB888_RGB555_TWO(
dst,
src);
1273 switch (
width & 3) {
1279 RGB888_RGB555_TWO(
dst,
src);
1296 RGB888_RGB555_TWO(
dst,
src);
1299 RGB888_RGB555_TWO(
dst,
src);
1304 switch (
width & 3) {
1310 RGB888_RGB555_TWO(
dst,
src);
1328 #define RGB888_RGB565(dst, src) { \
1329 *(Uint16 *)(dst) = (Uint16)((((*src)&0x00F80000)>>8)| \
1330 (((*src)&0x0000FC00)>>5)| \
1331 (((*src)&0x000000F8)>>3)); \
1333 #ifndef USE_DUFFS_LOOP
1334 #define RGB888_RGB565_TWO(dst, src) { \
1335 *(Uint32 *)(dst) = (((((src[HI])&0x00F80000)>>8)| \
1336 (((src[HI])&0x0000FC00)>>5)| \
1337 (((src[HI])&0x000000F8)>>3))<<16)| \
1338 (((src[LO])&0x00F80000)>>8)| \
1339 (((src[LO])&0x0000FC00)>>5)| \
1340 (((src[LO])&0x000000F8)>>3); \
1346 #ifndef USE_DUFFS_LOOP
1352 int srcskip, dstskip;
1362 #ifdef USE_DUFFS_LOOP
1376 if ((
long)
dst & 0x03) {
1391 RGB888_RGB565_TWO(
dst,
src);
1394 RGB888_RGB565_TWO(
dst,
src);
1399 switch (
width & 3) {
1405 RGB888_RGB565_TWO(
dst,
src);
1422 RGB888_RGB565_TWO(
dst,
src);
1425 RGB888_RGB565_TWO(
dst,
src);
1430 switch (
width & 3) {
1436 RGB888_RGB565_TWO(
dst,
src);
1454 #if SDL_HAVE_BLIT_N_RGB565
1457 #define RGB565_32(dst, src, map) (map[src[LO]*2] + map[src[HI]*2+1])
1461 #ifndef USE_DUFFS_LOOP
1467 int srcskip, dstskip;
1504 switch (
width & 3) {
1524 0x00000000, 0xff000000, 0x00000008, 0xff002000,
1525 0x00000010, 0xff004000, 0x00000018, 0xff006100,
1526 0x00000020, 0xff008100, 0x00000029, 0xff00a100,
1527 0x00000031, 0xff00c200, 0x00000039, 0xff00e200,
1528 0x00000041, 0xff080000, 0x0000004a, 0xff082000,
1529 0x00000052, 0xff084000, 0x0000005a, 0xff086100,
1530 0x00000062, 0xff088100, 0x0000006a, 0xff08a100,
1531 0x00000073, 0xff08c200, 0x0000007b, 0xff08e200,
1532 0x00000083, 0xff100000, 0x0000008b, 0xff102000,
1533 0x00000094, 0xff104000, 0x0000009c, 0xff106100,
1534 0x000000a4, 0xff108100, 0x000000ac, 0xff10a100,
1535 0x000000b4, 0xff10c200, 0x000000bd, 0xff10e200,
1536 0x000000c5, 0xff180000, 0x000000cd, 0xff182000,
1537 0x000000d5, 0xff184000, 0x000000de, 0xff186100,
1538 0x000000e6, 0xff188100, 0x000000ee, 0xff18a100,
1539 0x000000f6, 0xff18c200, 0x000000ff, 0xff18e200,
1540 0x00000400, 0xff200000, 0x00000408, 0xff202000,
1541 0x00000410, 0xff204000, 0x00000418, 0xff206100,
1542 0x00000420, 0xff208100, 0x00000429, 0xff20a100,
1543 0x00000431, 0xff20c200, 0x00000439, 0xff20e200,
1544 0x00000441, 0xff290000, 0x0000044a, 0xff292000,
1545 0x00000452, 0xff294000, 0x0000045a, 0xff296100,
1546 0x00000462, 0xff298100, 0x0000046a, 0xff29a100,
1547 0x00000473, 0xff29c200, 0x0000047b, 0xff29e200,
1548 0x00000483, 0xff310000, 0x0000048b, 0xff312000,
1549 0x00000494, 0xff314000, 0x0000049c, 0xff316100,
1550 0x000004a4, 0xff318100, 0x000004ac, 0xff31a100,
1551 0x000004b4, 0xff31c200, 0x000004bd, 0xff31e200,
1552 0x000004c5, 0xff390000, 0x000004cd, 0xff392000,
1553 0x000004d5, 0xff394000, 0x000004de, 0xff396100,
1554 0x000004e6, 0xff398100, 0x000004ee, 0xff39a100,
1555 0x000004f6, 0xff39c200, 0x000004ff, 0xff39e200,
1556 0x00000800, 0xff410000, 0x00000808, 0xff412000,
1557 0x00000810, 0xff414000, 0x00000818, 0xff416100,
1558 0x00000820, 0xff418100, 0x00000829, 0xff41a100,
1559 0x00000831, 0xff41c200, 0x00000839, 0xff41e200,
1560 0x00000841, 0xff4a0000, 0x0000084a, 0xff4a2000,
1561 0x00000852, 0xff4a4000, 0x0000085a, 0xff4a6100,
1562 0x00000862, 0xff4a8100, 0x0000086a, 0xff4aa100,
1563 0x00000873, 0xff4ac200, 0x0000087b, 0xff4ae200,
1564 0x00000883, 0xff520000, 0x0000088b, 0xff522000,
1565 0x00000894, 0xff524000, 0x0000089c, 0xff526100,
1566 0x000008a4, 0xff528100, 0x000008ac, 0xff52a100,
1567 0x000008b4, 0xff52c200, 0x000008bd, 0xff52e200,
1568 0x000008c5, 0xff5a0000, 0x000008cd, 0xff5a2000,
1569 0x000008d5, 0xff5a4000, 0x000008de, 0xff5a6100,
1570 0x000008e6, 0xff5a8100, 0x000008ee, 0xff5aa100,
1571 0x000008f6, 0xff5ac200, 0x000008ff, 0xff5ae200,
1572 0x00000c00, 0xff620000, 0x00000c08, 0xff622000,
1573 0x00000c10, 0xff624000, 0x00000c18, 0xff626100,
1574 0x00000c20, 0xff628100, 0x00000c29, 0xff62a100,
1575 0x00000c31, 0xff62c200, 0x00000c39, 0xff62e200,
1576 0x00000c41, 0xff6a0000, 0x00000c4a, 0xff6a2000,
1577 0x00000c52, 0xff6a4000, 0x00000c5a, 0xff6a6100,
1578 0x00000c62, 0xff6a8100, 0x00000c6a, 0xff6aa100,
1579 0x00000c73, 0xff6ac200, 0x00000c7b, 0xff6ae200,
1580 0x00000c83, 0xff730000, 0x00000c8b, 0xff732000,
1581 0x00000c94, 0xff734000, 0x00000c9c, 0xff736100,
1582 0x00000ca4, 0xff738100, 0x00000cac, 0xff73a100,
1583 0x00000cb4, 0xff73c200, 0x00000cbd, 0xff73e200,
1584 0x00000cc5, 0xff7b0000, 0x00000ccd, 0xff7b2000,
1585 0x00000cd5, 0xff7b4000, 0x00000cde, 0xff7b6100,
1586 0x00000ce6, 0xff7b8100, 0x00000cee, 0xff7ba100,
1587 0x00000cf6, 0xff7bc200, 0x00000cff, 0xff7be200,
1588 0x00001000, 0xff830000, 0x00001008, 0xff832000,
1589 0x00001010, 0xff834000, 0x00001018, 0xff836100,
1590 0x00001020, 0xff838100, 0x00001029, 0xff83a100,
1591 0x00001031, 0xff83c200, 0x00001039, 0xff83e200,
1592 0x00001041, 0xff8b0000, 0x0000104a, 0xff8b2000,
1593 0x00001052, 0xff8b4000, 0x0000105a, 0xff8b6100,
1594 0x00001062, 0xff8b8100, 0x0000106a, 0xff8ba100,
1595 0x00001073, 0xff8bc200, 0x0000107b, 0xff8be200,
1596 0x00001083, 0xff940000, 0x0000108b, 0xff942000,
1597 0x00001094, 0xff944000, 0x0000109c, 0xff946100,
1598 0x000010a4, 0xff948100, 0x000010ac, 0xff94a100,
1599 0x000010b4, 0xff94c200, 0x000010bd, 0xff94e200,
1600 0x000010c5, 0xff9c0000, 0x000010cd, 0xff9c2000,
1601 0x000010d5, 0xff9c4000, 0x000010de, 0xff9c6100,
1602 0x000010e6, 0xff9c8100, 0x000010ee, 0xff9ca100,
1603 0x000010f6, 0xff9cc200, 0x000010ff, 0xff9ce200,
1604 0x00001400, 0xffa40000, 0x00001408, 0xffa42000,
1605 0x00001410, 0xffa44000, 0x00001418, 0xffa46100,
1606 0x00001420, 0xffa48100, 0x00001429, 0xffa4a100,
1607 0x00001431, 0xffa4c200, 0x00001439, 0xffa4e200,
1608 0x00001441, 0xffac0000, 0x0000144a, 0xffac2000,
1609 0x00001452, 0xffac4000, 0x0000145a, 0xffac6100,
1610 0x00001462, 0xffac8100, 0x0000146a, 0xffaca100,
1611 0x00001473, 0xffacc200, 0x0000147b, 0xfface200,
1612 0x00001483, 0xffb40000, 0x0000148b, 0xffb42000,
1613 0x00001494, 0xffb44000, 0x0000149c, 0xffb46100,
1614 0x000014a4, 0xffb48100, 0x000014ac, 0xffb4a100,
1615 0x000014b4, 0xffb4c200, 0x000014bd, 0xffb4e200,
1616 0x000014c5, 0xffbd0000, 0x000014cd, 0xffbd2000,
1617 0x000014d5, 0xffbd4000, 0x000014de, 0xffbd6100,
1618 0x000014e6, 0xffbd8100, 0x000014ee, 0xffbda100,
1619 0x000014f6, 0xffbdc200, 0x000014ff, 0xffbde200,
1620 0x00001800, 0xffc50000, 0x00001808, 0xffc52000,
1621 0x00001810, 0xffc54000, 0x00001818, 0xffc56100,
1622 0x00001820, 0xffc58100, 0x00001829, 0xffc5a100,
1623 0x00001831, 0xffc5c200, 0x00001839, 0xffc5e200,
1624 0x00001841, 0xffcd0000, 0x0000184a, 0xffcd2000,
1625 0x00001852, 0xffcd4000, 0x0000185a, 0xffcd6100,
1626 0x00001862, 0xffcd8100, 0x0000186a, 0xffcda100,
1627 0x00001873, 0xffcdc200, 0x0000187b, 0xffcde200,
1628 0x00001883, 0xffd50000, 0x0000188b, 0xffd52000,
1629 0x00001894, 0xffd54000, 0x0000189c, 0xffd56100,
1630 0x000018a4, 0xffd58100, 0x000018ac, 0xffd5a100,
1631 0x000018b4, 0xffd5c200, 0x000018bd, 0xffd5e200,
1632 0x000018c5, 0xffde0000, 0x000018cd, 0xffde2000,
1633 0x000018d5, 0xffde4000, 0x000018de, 0xffde6100,
1634 0x000018e6, 0xffde8100, 0x000018ee, 0xffdea100,
1635 0x000018f6, 0xffdec200, 0x000018ff, 0xffdee200,
1636 0x00001c00, 0xffe60000, 0x00001c08, 0xffe62000,
1637 0x00001c10, 0xffe64000, 0x00001c18, 0xffe66100,
1638 0x00001c20, 0xffe68100, 0x00001c29, 0xffe6a100,
1639 0x00001c31, 0xffe6c200, 0x00001c39, 0xffe6e200,
1640 0x00001c41, 0xffee0000, 0x00001c4a, 0xffee2000,
1641 0x00001c52, 0xffee4000, 0x00001c5a, 0xffee6100,
1642 0x00001c62, 0xffee8100, 0x00001c6a, 0xffeea100,
1643 0x00001c73, 0xffeec200, 0x00001c7b, 0xffeee200,
1644 0x00001c83, 0xfff60000, 0x00001c8b, 0xfff62000,
1645 0x00001c94, 0xfff64000, 0x00001c9c, 0xfff66100,
1646 0x00001ca4, 0xfff68100, 0x00001cac, 0xfff6a100,
1647 0x00001cb4, 0xfff6c200, 0x00001cbd, 0xfff6e200,
1648 0x00001cc5, 0xffff0000, 0x00001ccd, 0xffff2000,
1649 0x00001cd5, 0xffff4000, 0x00001cde, 0xffff6100,
1650 0x00001ce6, 0xffff8100, 0x00001cee, 0xffffa100,
1651 0x00001cf6, 0xffffc200, 0x00001cff, 0xffffe200
1662 0xff000000, 0x00000000, 0xff080000, 0x00002000,
1663 0xff100000, 0x00004000, 0xff180000, 0x00006100,
1664 0xff200000, 0x00008100, 0xff290000, 0x0000a100,
1665 0xff310000, 0x0000c200, 0xff390000, 0x0000e200,
1666 0xff410000, 0x00000008, 0xff4a0000, 0x00002008,
1667 0xff520000, 0x00004008, 0xff5a0000, 0x00006108,
1668 0xff620000, 0x00008108, 0xff6a0000, 0x0000a108,
1669 0xff730000, 0x0000c208, 0xff7b0000, 0x0000e208,
1670 0xff830000, 0x00000010, 0xff8b0000, 0x00002010,
1671 0xff940000, 0x00004010, 0xff9c0000, 0x00006110,
1672 0xffa40000, 0x00008110, 0xffac0000, 0x0000a110,
1673 0xffb40000, 0x0000c210, 0xffbd0000, 0x0000e210,
1674 0xffc50000, 0x00000018, 0xffcd0000, 0x00002018,
1675 0xffd50000, 0x00004018, 0xffde0000, 0x00006118,
1676 0xffe60000, 0x00008118, 0xffee0000, 0x0000a118,
1677 0xfff60000, 0x0000c218, 0xffff0000, 0x0000e218,
1678 0xff000400, 0x00000020, 0xff080400, 0x00002020,
1679 0xff100400, 0x00004020, 0xff180400, 0x00006120,
1680 0xff200400, 0x00008120, 0xff290400, 0x0000a120,
1681 0xff310400, 0x0000c220, 0xff390400, 0x0000e220,
1682 0xff410400, 0x00000029, 0xff4a0400, 0x00002029,
1683 0xff520400, 0x00004029, 0xff5a0400, 0x00006129,
1684 0xff620400, 0x00008129, 0xff6a0400, 0x0000a129,
1685 0xff730400, 0x0000c229, 0xff7b0400, 0x0000e229,
1686 0xff830400, 0x00000031, 0xff8b0400, 0x00002031,
1687 0xff940400, 0x00004031, 0xff9c0400, 0x00006131,
1688 0xffa40400, 0x00008131, 0xffac0400, 0x0000a131,
1689 0xffb40400, 0x0000c231, 0xffbd0400, 0x0000e231,
1690 0xffc50400, 0x00000039, 0xffcd0400, 0x00002039,
1691 0xffd50400, 0x00004039, 0xffde0400, 0x00006139,
1692 0xffe60400, 0x00008139, 0xffee0400, 0x0000a139,
1693 0xfff60400, 0x0000c239, 0xffff0400, 0x0000e239,
1694 0xff000800, 0x00000041, 0xff080800, 0x00002041,
1695 0xff100800, 0x00004041, 0xff180800, 0x00006141,
1696 0xff200800, 0x00008141, 0xff290800, 0x0000a141,
1697 0xff310800, 0x0000c241, 0xff390800, 0x0000e241,
1698 0xff410800, 0x0000004a, 0xff4a0800, 0x0000204a,
1699 0xff520800, 0x0000404a, 0xff5a0800, 0x0000614a,
1700 0xff620800, 0x0000814a, 0xff6a0800, 0x0000a14a,
1701 0xff730800, 0x0000c24a, 0xff7b0800, 0x0000e24a,
1702 0xff830800, 0x00000052, 0xff8b0800, 0x00002052,
1703 0xff940800, 0x00004052, 0xff9c0800, 0x00006152,
1704 0xffa40800, 0x00008152, 0xffac0800, 0x0000a152,
1705 0xffb40800, 0x0000c252, 0xffbd0800, 0x0000e252,
1706 0xffc50800, 0x0000005a, 0xffcd0800, 0x0000205a,
1707 0xffd50800, 0x0000405a, 0xffde0800, 0x0000615a,
1708 0xffe60800, 0x0000815a, 0xffee0800, 0x0000a15a,
1709 0xfff60800, 0x0000c25a, 0xffff0800, 0x0000e25a,
1710 0xff000c00, 0x00000062, 0xff080c00, 0x00002062,
1711 0xff100c00, 0x00004062, 0xff180c00, 0x00006162,
1712 0xff200c00, 0x00008162, 0xff290c00, 0x0000a162,
1713 0xff310c00, 0x0000c262, 0xff390c00, 0x0000e262,
1714 0xff410c00, 0x0000006a, 0xff4a0c00, 0x0000206a,
1715 0xff520c00, 0x0000406a, 0xff5a0c00, 0x0000616a,
1716 0xff620c00, 0x0000816a, 0xff6a0c00, 0x0000a16a,
1717 0xff730c00, 0x0000c26a, 0xff7b0c00, 0x0000e26a,
1718 0xff830c00, 0x00000073, 0xff8b0c00, 0x00002073,
1719 0xff940c00, 0x00004073, 0xff9c0c00, 0x00006173,
1720 0xffa40c00, 0x00008173, 0xffac0c00, 0x0000a173,
1721 0xffb40c00, 0x0000c273, 0xffbd0c00, 0x0000e273,
1722 0xffc50c00, 0x0000007b, 0xffcd0c00, 0x0000207b,
1723 0xffd50c00, 0x0000407b, 0xffde0c00, 0x0000617b,
1724 0xffe60c00, 0x0000817b, 0xffee0c00, 0x0000a17b,
1725 0xfff60c00, 0x0000c27b, 0xffff0c00, 0x0000e27b,
1726 0xff001000, 0x00000083, 0xff081000, 0x00002083,
1727 0xff101000, 0x00004083, 0xff181000, 0x00006183,
1728 0xff201000, 0x00008183, 0xff291000, 0x0000a183,
1729 0xff311000, 0x0000c283, 0xff391000, 0x0000e283,
1730 0xff411000, 0x0000008b, 0xff4a1000, 0x0000208b,
1731 0xff521000, 0x0000408b, 0xff5a1000, 0x0000618b,
1732 0xff621000, 0x0000818b, 0xff6a1000, 0x0000a18b,
1733 0xff731000, 0x0000c28b, 0xff7b1000, 0x0000e28b,
1734 0xff831000, 0x00000094, 0xff8b1000, 0x00002094,
1735 0xff941000, 0x00004094, 0xff9c1000, 0x00006194,
1736 0xffa41000, 0x00008194, 0xffac1000, 0x0000a194,
1737 0xffb41000, 0x0000c294, 0xffbd1000, 0x0000e294,
1738 0xffc51000, 0x0000009c, 0xffcd1000, 0x0000209c,
1739 0xffd51000, 0x0000409c, 0xffde1000, 0x0000619c,
1740 0xffe61000, 0x0000819c, 0xffee1000, 0x0000a19c,
1741 0xfff61000, 0x0000c29c, 0xffff1000, 0x0000e29c,
1742 0xff001400, 0x000000a4, 0xff081400, 0x000020a4,
1743 0xff101400, 0x000040a4, 0xff181400, 0x000061a4,
1744 0xff201400, 0x000081a4, 0xff291400, 0x0000a1a4,
1745 0xff311400, 0x0000c2a4, 0xff391400, 0x0000e2a4,
1746 0xff411400, 0x000000ac, 0xff4a1400, 0x000020ac,
1747 0xff521400, 0x000040ac, 0xff5a1400, 0x000061ac,
1748 0xff621400, 0x000081ac, 0xff6a1400, 0x0000a1ac,
1749 0xff731400, 0x0000c2ac, 0xff7b1400, 0x0000e2ac,
1750 0xff831400, 0x000000b4, 0xff8b1400, 0x000020b4,
1751 0xff941400, 0x000040b4, 0xff9c1400, 0x000061b4,
1752 0xffa41400, 0x000081b4, 0xffac1400, 0x0000a1b4,
1753 0xffb41400, 0x0000c2b4, 0xffbd1400, 0x0000e2b4,
1754 0xffc51400, 0x000000bd, 0xffcd1400, 0x000020bd,
1755 0xffd51400, 0x000040bd, 0xffde1400, 0x000061bd,
1756 0xffe61400, 0x000081bd, 0xffee1400, 0x0000a1bd,
1757 0xfff61400, 0x0000c2bd, 0xffff1400, 0x0000e2bd,
1758 0xff001800, 0x000000c5, 0xff081800, 0x000020c5,
1759 0xff101800, 0x000040c5, 0xff181800, 0x000061c5,
1760 0xff201800, 0x000081c5, 0xff291800, 0x0000a1c5,
1761 0xff311800, 0x0000c2c5, 0xff391800, 0x0000e2c5,
1762 0xff411800, 0x000000cd, 0xff4a1800, 0x000020cd,
1763 0xff521800, 0x000040cd, 0xff5a1800, 0x000061cd,
1764 0xff621800, 0x000081cd, 0xff6a1800, 0x0000a1cd,
1765 0xff731800, 0x0000c2cd, 0xff7b1800, 0x0000e2cd,
1766 0xff831800, 0x000000d5, 0xff8b1800, 0x000020d5,
1767 0xff941800, 0x000040d5, 0xff9c1800, 0x000061d5,
1768 0xffa41800, 0x000081d5, 0xffac1800, 0x0000a1d5,
1769 0xffb41800, 0x0000c2d5, 0xffbd1800, 0x0000e2d5,
1770 0xffc51800, 0x000000de, 0xffcd1800, 0x000020de,
1771 0xffd51800, 0x000040de, 0xffde1800, 0x000061de,
1772 0xffe61800, 0x000081de, 0xffee1800, 0x0000a1de,
1773 0xfff61800, 0x0000c2de, 0xffff1800, 0x0000e2de,
1774 0xff001c00, 0x000000e6, 0xff081c00, 0x000020e6,
1775 0xff101c00, 0x000040e6, 0xff181c00, 0x000061e6,
1776 0xff201c00, 0x000081e6, 0xff291c00, 0x0000a1e6,
1777 0xff311c00, 0x0000c2e6, 0xff391c00, 0x0000e2e6,
1778 0xff411c00, 0x000000ee, 0xff4a1c00, 0x000020ee,
1779 0xff521c00, 0x000040ee, 0xff5a1c00, 0x000061ee,
1780 0xff621c00, 0x000081ee, 0xff6a1c00, 0x0000a1ee,
1781 0xff731c00, 0x0000c2ee, 0xff7b1c00, 0x0000e2ee,
1782 0xff831c00, 0x000000f6, 0xff8b1c00, 0x000020f6,
1783 0xff941c00, 0x000040f6, 0xff9c1c00, 0x000061f6,
1784 0xffa41c00, 0x000081f6, 0xffac1c00, 0x0000a1f6,
1785 0xffb41c00, 0x0000c2f6, 0xffbd1c00, 0x0000e2f6,
1786 0xffc51c00, 0x000000ff, 0xffcd1c00, 0x000020ff,
1787 0xffd51c00, 0x000040ff, 0xffde1c00, 0x000061ff,
1788 0xffe61c00, 0x000081ff, 0xffee1c00, 0x0000a1ff,
1789 0xfff61c00, 0x0000c2ff, 0xffff1c00, 0x0000e2ff
1800 0x000000ff, 0x00000000, 0x000008ff, 0x00200000,
1801 0x000010ff, 0x00400000, 0x000018ff, 0x00610000,
1802 0x000020ff, 0x00810000, 0x000029ff, 0x00a10000,
1803 0x000031ff, 0x00c20000, 0x000039ff, 0x00e20000,
1804 0x000041ff, 0x08000000, 0x00004aff, 0x08200000,
1805 0x000052ff, 0x08400000, 0x00005aff, 0x08610000,
1806 0x000062ff, 0x08810000, 0x00006aff, 0x08a10000,
1807 0x000073ff, 0x08c20000, 0x00007bff, 0x08e20000,
1808 0x000083ff, 0x10000000, 0x00008bff, 0x10200000,
1809 0x000094ff, 0x10400000, 0x00009cff, 0x10610000,
1810 0x0000a4ff, 0x10810000, 0x0000acff, 0x10a10000,
1811 0x0000b4ff, 0x10c20000, 0x0000bdff, 0x10e20000,
1812 0x0000c5ff, 0x18000000, 0x0000cdff, 0x18200000,
1813 0x0000d5ff, 0x18400000, 0x0000deff, 0x18610000,
1814 0x0000e6ff, 0x18810000, 0x0000eeff, 0x18a10000,
1815 0x0000f6ff, 0x18c20000, 0x0000ffff, 0x18e20000,
1816 0x000400ff, 0x20000000, 0x000408ff, 0x20200000,
1817 0x000410ff, 0x20400000, 0x000418ff, 0x20610000,
1818 0x000420ff, 0x20810000, 0x000429ff, 0x20a10000,
1819 0x000431ff, 0x20c20000, 0x000439ff, 0x20e20000,
1820 0x000441ff, 0x29000000, 0x00044aff, 0x29200000,
1821 0x000452ff, 0x29400000, 0x00045aff, 0x29610000,
1822 0x000462ff, 0x29810000, 0x00046aff, 0x29a10000,
1823 0x000473ff, 0x29c20000, 0x00047bff, 0x29e20000,
1824 0x000483ff, 0x31000000, 0x00048bff, 0x31200000,
1825 0x000494ff, 0x31400000, 0x00049cff, 0x31610000,
1826 0x0004a4ff, 0x31810000, 0x0004acff, 0x31a10000,
1827 0x0004b4ff, 0x31c20000, 0x0004bdff, 0x31e20000,
1828 0x0004c5ff, 0x39000000, 0x0004cdff, 0x39200000,
1829 0x0004d5ff, 0x39400000, 0x0004deff, 0x39610000,
1830 0x0004e6ff, 0x39810000, 0x0004eeff, 0x39a10000,
1831 0x0004f6ff, 0x39c20000, 0x0004ffff, 0x39e20000,
1832 0x000800ff, 0x41000000, 0x000808ff, 0x41200000,
1833 0x000810ff, 0x41400000, 0x000818ff, 0x41610000,
1834 0x000820ff, 0x41810000, 0x000829ff, 0x41a10000,
1835 0x000831ff, 0x41c20000, 0x000839ff, 0x41e20000,
1836 0x000841ff, 0x4a000000, 0x00084aff, 0x4a200000,
1837 0x000852ff, 0x4a400000, 0x00085aff, 0x4a610000,
1838 0x000862ff, 0x4a810000, 0x00086aff, 0x4aa10000,
1839 0x000873ff, 0x4ac20000, 0x00087bff, 0x4ae20000,
1840 0x000883ff, 0x52000000, 0x00088bff, 0x52200000,
1841 0x000894ff, 0x52400000, 0x00089cff, 0x52610000,
1842 0x0008a4ff, 0x52810000, 0x0008acff, 0x52a10000,
1843 0x0008b4ff, 0x52c20000, 0x0008bdff, 0x52e20000,
1844 0x0008c5ff, 0x5a000000, 0x0008cdff, 0x5a200000,
1845 0x0008d5ff, 0x5a400000, 0x0008deff, 0x5a610000,
1846 0x0008e6ff, 0x5a810000, 0x0008eeff, 0x5aa10000,
1847 0x0008f6ff, 0x5ac20000, 0x0008ffff, 0x5ae20000,
1848 0x000c00ff, 0x62000000, 0x000c08ff, 0x62200000,
1849 0x000c10ff, 0x62400000, 0x000c18ff, 0x62610000,
1850 0x000c20ff, 0x62810000, 0x000c29ff, 0x62a10000,
1851 0x000c31ff, 0x62c20000, 0x000c39ff, 0x62e20000,
1852 0x000c41ff, 0x6a000000, 0x000c4aff, 0x6a200000,
1853 0x000c52ff, 0x6a400000, 0x000c5aff, 0x6a610000,
1854 0x000c62ff, 0x6a810000, 0x000c6aff, 0x6aa10000,
1855 0x000c73ff, 0x6ac20000, 0x000c7bff, 0x6ae20000,
1856 0x000c83ff, 0x73000000, 0x000c8bff, 0x73200000,
1857 0x000c94ff, 0x73400000, 0x000c9cff, 0x73610000,
1858 0x000ca4ff, 0x73810000, 0x000cacff, 0x73a10000,
1859 0x000cb4ff, 0x73c20000, 0x000cbdff, 0x73e20000,
1860 0x000cc5ff, 0x7b000000, 0x000ccdff, 0x7b200000,
1861 0x000cd5ff, 0x7b400000, 0x000cdeff, 0x7b610000,
1862 0x000ce6ff, 0x7b810000, 0x000ceeff, 0x7ba10000,
1863 0x000cf6ff, 0x7bc20000, 0x000cffff, 0x7be20000,
1864 0x001000ff, 0x83000000, 0x001008ff, 0x83200000,
1865 0x001010ff, 0x83400000, 0x001018ff, 0x83610000,
1866 0x001020ff, 0x83810000, 0x001029ff, 0x83a10000,
1867 0x001031ff, 0x83c20000, 0x001039ff, 0x83e20000,
1868 0x001041ff, 0x8b000000, 0x00104aff, 0x8b200000,
1869 0x001052ff, 0x8b400000, 0x00105aff, 0x8b610000,
1870 0x001062ff, 0x8b810000, 0x00106aff, 0x8ba10000,
1871 0x001073ff, 0x8bc20000, 0x00107bff, 0x8be20000,
1872 0x001083ff, 0x94000000, 0x00108bff, 0x94200000,
1873 0x001094ff, 0x94400000, 0x00109cff, 0x94610000,
1874 0x0010a4ff, 0x94810000, 0x0010acff, 0x94a10000,
1875 0x0010b4ff, 0x94c20000, 0x0010bdff, 0x94e20000,
1876 0x0010c5ff, 0x9c000000, 0x0010cdff, 0x9c200000,
1877 0x0010d5ff, 0x9c400000, 0x0010deff, 0x9c610000,
1878 0x0010e6ff, 0x9c810000, 0x0010eeff, 0x9ca10000,
1879 0x0010f6ff, 0x9cc20000, 0x0010ffff, 0x9ce20000,
1880 0x001400ff, 0xa4000000, 0x001408ff, 0xa4200000,
1881 0x001410ff, 0xa4400000, 0x001418ff, 0xa4610000,
1882 0x001420ff, 0xa4810000, 0x001429ff, 0xa4a10000,
1883 0x001431ff, 0xa4c20000, 0x001439ff, 0xa4e20000,
1884 0x001441ff, 0xac000000, 0x00144aff, 0xac200000,
1885 0x001452ff, 0xac400000, 0x00145aff, 0xac610000,
1886 0x001462ff, 0xac810000, 0x00146aff, 0xaca10000,
1887 0x001473ff, 0xacc20000, 0x00147bff, 0xace20000,
1888 0x001483ff, 0xb4000000, 0x00148bff, 0xb4200000,
1889 0x001494ff, 0xb4400000, 0x00149cff, 0xb4610000,
1890 0x0014a4ff, 0xb4810000, 0x0014acff, 0xb4a10000,
1891 0x0014b4ff, 0xb4c20000, 0x0014bdff, 0xb4e20000,
1892 0x0014c5ff, 0xbd000000, 0x0014cdff, 0xbd200000,
1893 0x0014d5ff, 0xbd400000, 0x0014deff, 0xbd610000,
1894 0x0014e6ff, 0xbd810000, 0x0014eeff, 0xbda10000,
1895 0x0014f6ff, 0xbdc20000, 0x0014ffff, 0xbde20000,
1896 0x001800ff, 0xc5000000, 0x001808ff, 0xc5200000,
1897 0x001810ff, 0xc5400000, 0x001818ff, 0xc5610000,
1898 0x001820ff, 0xc5810000, 0x001829ff, 0xc5a10000,
1899 0x001831ff, 0xc5c20000, 0x001839ff, 0xc5e20000,
1900 0x001841ff, 0xcd000000, 0x00184aff, 0xcd200000,
1901 0x001852ff, 0xcd400000, 0x00185aff, 0xcd610000,
1902 0x001862ff, 0xcd810000, 0x00186aff, 0xcda10000,
1903 0x001873ff, 0xcdc20000, 0x00187bff, 0xcde20000,
1904 0x001883ff, 0xd5000000, 0x00188bff, 0xd5200000,
1905 0x001894ff, 0xd5400000, 0x00189cff, 0xd5610000,
1906 0x0018a4ff, 0xd5810000, 0x0018acff, 0xd5a10000,
1907 0x0018b4ff, 0xd5c20000, 0x0018bdff, 0xd5e20000,
1908 0x0018c5ff, 0xde000000, 0x0018cdff, 0xde200000,
1909 0x0018d5ff, 0xde400000, 0x0018deff, 0xde610000,
1910 0x0018e6ff, 0xde810000, 0x0018eeff, 0xdea10000,
1911 0x0018f6ff, 0xdec20000, 0x0018ffff, 0xdee20000,
1912 0x001c00ff, 0xe6000000, 0x001c08ff, 0xe6200000,
1913 0x001c10ff, 0xe6400000, 0x001c18ff, 0xe6610000,
1914 0x001c20ff, 0xe6810000, 0x001c29ff, 0xe6a10000,
1915 0x001c31ff, 0xe6c20000, 0x001c39ff, 0xe6e20000,
1916 0x001c41ff, 0xee000000, 0x001c4aff, 0xee200000,
1917 0x001c52ff, 0xee400000, 0x001c5aff, 0xee610000,
1918 0x001c62ff, 0xee810000, 0x001c6aff, 0xeea10000,
1919 0x001c73ff, 0xeec20000, 0x001c7bff, 0xeee20000,
1920 0x001c83ff, 0xf6000000, 0x001c8bff, 0xf6200000,
1921 0x001c94ff, 0xf6400000, 0x001c9cff, 0xf6610000,
1922 0x001ca4ff, 0xf6810000, 0x001cacff, 0xf6a10000,
1923 0x001cb4ff, 0xf6c20000, 0x001cbdff, 0xf6e20000,
1924 0x001cc5ff, 0xff000000, 0x001ccdff, 0xff200000,
1925 0x001cd5ff, 0xff400000, 0x001cdeff, 0xff610000,
1926 0x001ce6ff, 0xff810000, 0x001ceeff, 0xffa10000,
1927 0x001cf6ff, 0xffc20000, 0x001cffff, 0xffe20000,
1938 0x00000000, 0x000000ff, 0x08000000, 0x002000ff,
1939 0x10000000, 0x004000ff, 0x18000000, 0x006100ff,
1940 0x20000000, 0x008100ff, 0x29000000, 0x00a100ff,
1941 0x31000000, 0x00c200ff, 0x39000000, 0x00e200ff,
1942 0x41000000, 0x000008ff, 0x4a000000, 0x002008ff,
1943 0x52000000, 0x004008ff, 0x5a000000, 0x006108ff,
1944 0x62000000, 0x008108ff, 0x6a000000, 0x00a108ff,
1945 0x73000000, 0x00c208ff, 0x7b000000, 0x00e208ff,
1946 0x83000000, 0x000010ff, 0x8b000000, 0x002010ff,
1947 0x94000000, 0x004010ff, 0x9c000000, 0x006110ff,
1948 0xa4000000, 0x008110ff, 0xac000000, 0x00a110ff,
1949 0xb4000000, 0x00c210ff, 0xbd000000, 0x00e210ff,
1950 0xc5000000, 0x000018ff, 0xcd000000, 0x002018ff,
1951 0xd5000000, 0x004018ff, 0xde000000, 0x006118ff,
1952 0xe6000000, 0x008118ff, 0xee000000, 0x00a118ff,
1953 0xf6000000, 0x00c218ff, 0xff000000, 0x00e218ff,
1954 0x00040000, 0x000020ff, 0x08040000, 0x002020ff,
1955 0x10040000, 0x004020ff, 0x18040000, 0x006120ff,
1956 0x20040000, 0x008120ff, 0x29040000, 0x00a120ff,
1957 0x31040000, 0x00c220ff, 0x39040000, 0x00e220ff,
1958 0x41040000, 0x000029ff, 0x4a040000, 0x002029ff,
1959 0x52040000, 0x004029ff, 0x5a040000, 0x006129ff,
1960 0x62040000, 0x008129ff, 0x6a040000, 0x00a129ff,
1961 0x73040000, 0x00c229ff, 0x7b040000, 0x00e229ff,
1962 0x83040000, 0x000031ff, 0x8b040000, 0x002031ff,
1963 0x94040000, 0x004031ff, 0x9c040000, 0x006131ff,
1964 0xa4040000, 0x008131ff, 0xac040000, 0x00a131ff,
1965 0xb4040000, 0x00c231ff, 0xbd040000, 0x00e231ff,
1966 0xc5040000, 0x000039ff, 0xcd040000, 0x002039ff,
1967 0xd5040000, 0x004039ff, 0xde040000, 0x006139ff,
1968 0xe6040000, 0x008139ff, 0xee040000, 0x00a139ff,
1969 0xf6040000, 0x00c239ff, 0xff040000, 0x00e239ff,
1970 0x00080000, 0x000041ff, 0x08080000, 0x002041ff,
1971 0x10080000, 0x004041ff, 0x18080000, 0x006141ff,
1972 0x20080000, 0x008141ff, 0x29080000, 0x00a141ff,
1973 0x31080000, 0x00c241ff, 0x39080000, 0x00e241ff,
1974 0x41080000, 0x00004aff, 0x4a080000, 0x00204aff,
1975 0x52080000, 0x00404aff, 0x5a080000, 0x00614aff,
1976 0x62080000, 0x00814aff, 0x6a080000, 0x00a14aff,
1977 0x73080000, 0x00c24aff, 0x7b080000, 0x00e24aff,
1978 0x83080000, 0x000052ff, 0x8b080000, 0x002052ff,
1979 0x94080000, 0x004052ff, 0x9c080000, 0x006152ff,
1980 0xa4080000, 0x008152ff, 0xac080000, 0x00a152ff,
1981 0xb4080000, 0x00c252ff, 0xbd080000, 0x00e252ff,
1982 0xc5080000, 0x00005aff, 0xcd080000, 0x00205aff,
1983 0xd5080000, 0x00405aff, 0xde080000, 0x00615aff,
1984 0xe6080000, 0x00815aff, 0xee080000, 0x00a15aff,
1985 0xf6080000, 0x00c25aff, 0xff080000, 0x00e25aff,
1986 0x000c0000, 0x000062ff, 0x080c0000, 0x002062ff,
1987 0x100c0000, 0x004062ff, 0x180c0000, 0x006162ff,
1988 0x200c0000, 0x008162ff, 0x290c0000, 0x00a162ff,
1989 0x310c0000, 0x00c262ff, 0x390c0000, 0x00e262ff,
1990 0x410c0000, 0x00006aff, 0x4a0c0000, 0x00206aff,
1991 0x520c0000, 0x00406aff, 0x5a0c0000, 0x00616aff,
1992 0x620c0000, 0x00816aff, 0x6a0c0000, 0x00a16aff,
1993 0x730c0000, 0x00c26aff, 0x7b0c0000, 0x00e26aff,
1994 0x830c0000, 0x000073ff, 0x8b0c0000, 0x002073ff,
1995 0x940c0000, 0x004073ff, 0x9c0c0000, 0x006173ff,
1996 0xa40c0000, 0x008173ff, 0xac0c0000, 0x00a173ff,
1997 0xb40c0000, 0x00c273ff, 0xbd0c0000, 0x00e273ff,
1998 0xc50c0000, 0x00007bff, 0xcd0c0000, 0x00207bff,
1999 0xd50c0000, 0x00407bff, 0xde0c0000, 0x00617bff,
2000 0xe60c0000, 0x00817bff, 0xee0c0000, 0x00a17bff,
2001 0xf60c0000, 0x00c27bff, 0xff0c0000, 0x00e27bff,
2002 0x00100000, 0x000083ff, 0x08100000, 0x002083ff,
2003 0x10100000, 0x004083ff, 0x18100000, 0x006183ff,
2004 0x20100000, 0x008183ff, 0x29100000, 0x00a183ff,
2005 0x31100000, 0x00c283ff, 0x39100000, 0x00e283ff,
2006 0x41100000, 0x00008bff, 0x4a100000, 0x00208bff,
2007 0x52100000, 0x00408bff, 0x5a100000, 0x00618bff,
2008 0x62100000, 0x00818bff, 0x6a100000, 0x00a18bff,
2009 0x73100000, 0x00c28bff, 0x7b100000, 0x00e28bff,
2010 0x83100000, 0x000094ff, 0x8b100000, 0x002094ff,
2011 0x94100000, 0x004094ff, 0x9c100000, 0x006194ff,
2012 0xa4100000, 0x008194ff, 0xac100000, 0x00a194ff,
2013 0xb4100000, 0x00c294ff, 0xbd100000, 0x00e294ff,
2014 0xc5100000, 0x00009cff, 0xcd100000, 0x00209cff,
2015 0xd5100000, 0x00409cff, 0xde100000, 0x00619cff,
2016 0xe6100000, 0x00819cff, 0xee100000, 0x00a19cff,
2017 0xf6100000, 0x00c29cff, 0xff100000, 0x00e29cff,
2018 0x00140000, 0x0000a4ff, 0x08140000, 0x0020a4ff,
2019 0x10140000, 0x0040a4ff, 0x18140000, 0x0061a4ff,
2020 0x20140000, 0x0081a4ff, 0x29140000, 0x00a1a4ff,
2021 0x31140000, 0x00c2a4ff, 0x39140000, 0x00e2a4ff,
2022 0x41140000, 0x0000acff, 0x4a140000, 0x0020acff,
2023 0x52140000, 0x0040acff, 0x5a140000, 0x0061acff,
2024 0x62140000, 0x0081acff, 0x6a140000, 0x00a1acff,
2025 0x73140000, 0x00c2acff, 0x7b140000, 0x00e2acff,
2026 0x83140000, 0x0000b4ff, 0x8b140000, 0x0020b4ff,
2027 0x94140000, 0x0040b4ff, 0x9c140000, 0x0061b4ff,
2028 0xa4140000, 0x0081b4ff, 0xac140000, 0x00a1b4ff,
2029 0xb4140000, 0x00c2b4ff, 0xbd140000, 0x00e2b4ff,
2030 0xc5140000, 0x0000bdff, 0xcd140000, 0x0020bdff,
2031 0xd5140000, 0x0040bdff, 0xde140000, 0x0061bdff,
2032 0xe6140000, 0x0081bdff, 0xee140000, 0x00a1bdff,
2033 0xf6140000, 0x00c2bdff, 0xff140000, 0x00e2bdff,
2034 0x00180000, 0x0000c5ff, 0x08180000, 0x0020c5ff,
2035 0x10180000, 0x0040c5ff, 0x18180000, 0x0061c5ff,
2036 0x20180000, 0x0081c5ff, 0x29180000, 0x00a1c5ff,
2037 0x31180000, 0x00c2c5ff, 0x39180000, 0x00e2c5ff,
2038 0x41180000, 0x0000cdff, 0x4a180000, 0x0020cdff,
2039 0x52180000, 0x0040cdff, 0x5a180000, 0x0061cdff,
2040 0x62180000, 0x0081cdff, 0x6a180000, 0x00a1cdff,
2041 0x73180000, 0x00c2cdff, 0x7b180000, 0x00e2cdff,
2042 0x83180000, 0x0000d5ff, 0x8b180000, 0x0020d5ff,
2043 0x94180000, 0x0040d5ff, 0x9c180000, 0x0061d5ff,
2044 0xa4180000, 0x0081d5ff, 0xac180000, 0x00a1d5ff,
2045 0xb4180000, 0x00c2d5ff, 0xbd180000, 0x00e2d5ff,
2046 0xc5180000, 0x0000deff, 0xcd180000, 0x0020deff,
2047 0xd5180000, 0x0040deff, 0xde180000, 0x0061deff,
2048 0xe6180000, 0x0081deff, 0xee180000, 0x00a1deff,
2049 0xf6180000, 0x00c2deff, 0xff180000, 0x00e2deff,
2050 0x001c0000, 0x0000e6ff, 0x081c0000, 0x0020e6ff,
2051 0x101c0000, 0x0040e6ff, 0x181c0000, 0x0061e6ff,
2052 0x201c0000, 0x0081e6ff, 0x291c0000, 0x00a1e6ff,
2053 0x311c0000, 0x00c2e6ff, 0x391c0000, 0x00e2e6ff,
2054 0x411c0000, 0x0000eeff, 0x4a1c0000, 0x0020eeff,
2055 0x521c0000, 0x0040eeff, 0x5a1c0000, 0x0061eeff,
2056 0x621c0000, 0x0081eeff, 0x6a1c0000, 0x00a1eeff,
2057 0x731c0000, 0x00c2eeff, 0x7b1c0000, 0x00e2eeff,
2058 0x831c0000, 0x0000f6ff, 0x8b1c0000, 0x0020f6ff,
2059 0x941c0000, 0x0040f6ff, 0x9c1c0000, 0x0061f6ff,
2060 0xa41c0000, 0x0081f6ff, 0xac1c0000, 0x00a1f6ff,
2061 0xb41c0000, 0x00c2f6ff, 0xbd1c0000, 0x00e2f6ff,
2062 0xc51c0000, 0x0000ffff, 0xcd1c0000, 0x0020ffff,
2063 0xd51c0000, 0x0040ffff, 0xde1c0000, 0x0061ffff,
2064 0xe61c0000, 0x0081ffff, 0xee1c0000, 0x00a1ffff,
2065 0xf61c0000, 0x00c2ffff, 0xff1c0000, 0x00e2ffff
2079 #ifndef USE_DUFFS_LOOP
2086 int srcskip, dstskip;
2105 #ifdef USE_DUFFS_LOOP
2112 *
dst = ((sR>>5)<<(3+2))|
2125 *
dst = ((sR >> 5) << (3 + 2)) |
2126 ((sG >> 5) << (2)) | ((sB >> 6) << (0));
2137 #ifdef USE_DUFFS_LOOP
2144 *
dst =
map[((sR>>5)<<(3+2))|
2157 *
dst =
map[((sR >> 5) << (3 + 2)) |
2158 ((sG >> 5) << (2)) | ((sB >> 6) << (0))];
2183 if (dstfmt->
Amask) {
2250 int *_p0 ,
int *_p1,
int *_p2,
int *_p3,
int *_alpha_channel)
2252 int alpha_channel = 0, p0, p1, p2, p3;
2253 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
2254 int Pixel = 0x04030201;
2256 int Pixel = 0x01020304;
2261 if (srcfmt->
Amask) {
2268 if (dstfmt->
Amask) {
2269 if (srcfmt->
Amask) {
2278 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
2280 p1 = (Pixel >> 8) & 0xFF;
2281 p2 = (Pixel >> 16) & 0xFF;
2282 p3 = (Pixel >> 24) & 0xFF;
2285 p2 = (Pixel >> 8) & 0xFF;
2286 p1 = (Pixel >> 16) & 0xFF;
2287 p0 = (Pixel >> 24) & 0xFF;
2293 }
else if (p1 == 0) {
2296 }
else if (p2 == 0) {
2299 }
else if (p3 == 0) {
2304 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
2306 if (srcbpp == 3 && dstbpp == 4) {
2311 }
else if (srcbpp == 4 && dstbpp == 3) {
2322 if (_alpha_channel) {
2323 *_alpha_channel = alpha_channel;
2343 #if HAVE_FAST_WRITE_INT8
2345 if (srcbpp == 4 && dstbpp == 4 &&
2350 int alpha_channel, p0, p1, p2, p3;
2374 if (srcbpp == 4 && dstbpp == 3 &&
2398 #if HAVE_FAST_WRITE_INT8
2400 if (srcbpp == 3 && dstbpp == 4 &&
2404 int alpha_channel, p0, p1, p2, p3;
2462 #if HAVE_FAST_WRITE_INT8
2464 if (srcbpp == 4 && dstbpp == 4 &&
2494 unsigned sR, sG, sB, sA;
2520 unsigned sR, sG, sB;
2526 if (palmap ==
NULL) {
2533 if ( (Pixel & rgbmask) != ckey ) {
2554 if ( (Pixel & rgbmask) != ckey ) {
2556 *
dst = (
Uint8)palmap[((sR>>5)<<(3+2))|
2592 if ( (*srcp & rgbmask) != ckey ) {
2621 int sfmt = srcfmt->
format;
2622 int dfmt = dstfmt->
format;
2632 if (dstfmt->
Amask) {
2639 if ((*src32 & rgbmask) != ckey) {
2640 *dst32 = *src32 |
mask;
2657 if ((*src32 & rgbmask) != ckey) {
2658 *dst32 = *src32 &
mask;
2671 #if HAVE_FAST_WRITE_INT8
2673 if (srcbpp == 4 && dstbpp == 4 &&
2678 int alpha_channel, p0, p1, p2, p3;
2687 if ((*src32 & rgbmask) != ckey) {
2709 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
2710 Uint8 k0 = ckey & 0xFF;
2711 Uint8 k1 = (ckey >> 8) & 0xFF;
2712 Uint8 k2 = (ckey >> 16) & 0xFF;
2714 Uint8 k0 = (ckey >> 16) & 0xFF;
2715 Uint8 k1 = (ckey >> 8) & 0xFF;
2716 Uint8 k2 = ckey & 0xFF;
2727 if (k0 !=
s0 || k1 !=
s1 || k2 != s2) {
2747 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
2748 Uint8 k0 = ckey & 0xFF;
2749 Uint8 k1 = (ckey >> 8) & 0xFF;
2750 Uint8 k2 = (ckey >> 16) & 0xFF;
2752 Uint8 k0 = (ckey >> 16) & 0xFF;
2753 Uint8 k1 = (ckey >> 8) & 0xFF;
2754 Uint8 k2 = ckey & 0xFF;
2764 if (k0 !=
s0 || k1 !=
s1 || k2 != s2) {
2782 if (srcbpp == 4 && dstbpp == 3 &&
2794 if ((*src32 & rgbmask) != ckey) {
2809 #if HAVE_FAST_WRITE_INT8
2811 if (srcbpp == 3 && dstbpp == 4 &&
2814 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
2815 Uint8 k0 = ckey & 0xFF;
2816 Uint8 k1 = (ckey >> 8) & 0xFF;
2817 Uint8 k2 = (ckey >> 16) & 0xFF;
2819 Uint8 k0 = (ckey >> 16) & 0xFF;
2820 Uint8 k1 = (ckey >> 8) & 0xFF;
2821 Uint8 k2 = ckey & 0xFF;
2825 int alpha_channel, p0, p1, p2, p3;
2836 if (k0 !=
s0 || k1 !=
s1 || k2 != s2) {
2863 if ( (Pixel & rgbmask) != ckey ) {
2894 unsigned sR, sG, sB, sA;
2915 if ((*src32 & rgbmask) != ckey) {
2930 #if HAVE_FAST_WRITE_INT8
2932 if (srcbpp == 4 && dstbpp == 4 &&
2945 if ((*src32 & rgbmask) != ckey) {
2967 if ( (Pixel & rgbmask) != ckey ) {
2993 unsigned sR, sG, sB, sA;
3025 unsigned sR, sG, sB, sA;
3059 if (dstfmt->
Amask) {
3062 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
3063 int i0 = 0,
i1 = 1,
i2 = 2;
3065 int i0 = srcbpp - 1 - 0;
3066 int i1 = srcbpp - 1 - 1;
3067 int i2 = srcbpp - 1 - 2;
3077 *dst32 = (
s0) | (
s1 << 8) | (s2 << 16) |
mask;
3087 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
3088 int i0 = 0,
i1 = 1,
i2 = 2;
3089 int j0 = 0,
j1 = 1, j2 = 2;
3091 int i0 = srcbpp - 1 - 0;
3092 int i1 = srcbpp - 1 - 1;
3093 int i2 = srcbpp - 1 - 2;
3094 int j0 = dstbpp - 1 - 0;
3095 int j1 = dstbpp - 1 - 1;
3096 int j2 = dstbpp - 1 - 2;
3133 if (dstfmt->
Amask) {
3134 if (srcfmt->
Amask) {
3138 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
3139 int i0 = 0,
i1 = 1,
i2 = 2, i3 = 3;
3141 int i0 = 3,
i1 = 2,
i2 = 1, i3 = 0;
3152 *dst32 = (
s0 << 16) | (
s1 << 8) | (s2) | alphashift;
3163 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
3164 int i0 = 0,
i1 = 1,
i2 = 2;
3166 int i0 = srcbpp - 1 - 0;
3167 int i1 = srcbpp - 1 - 1;
3168 int i2 = srcbpp - 1 - 2;
3179 *dst32 = (
s0 << 16) | (
s1 << 8) | (s2) |
mask;
3190 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
3191 int i0 = 0,
i1 = 1,
i2 = 2;
3192 int j0 = 2,
j1 = 1, j2 = 0;
3194 int i0 = srcbpp - 1 - 0;
3195 int i1 = srcbpp - 1 - 1;
3196 int i2 = srcbpp - 1 - 2;
3197 int j0 = dstbpp - 1 - 2;
3198 int j1 = dstbpp - 1 - 1;
3199 int j2 = dstbpp - 1 - 0;
3225 #define COPY_ALPHA 4
3237 {0, 0, 0, 0, 0, 0, 0, 0,
BlitNtoN, 0}
3241 #if SDL_ALTIVEC_BLITTERS
3243 {0x0000F800, 0x000007E0, 0x0000001F, 4, 0x00000000, 0x00000000, 0x00000000,
3245 {0x00007C00, 0x000003E0, 0x0000001F, 4, 0x00000000, 0x00000000, 0x00000000,
3248 #if SDL_ARM_SIMD_BLITTERS
3249 {0x00000F00, 0x000000F0, 0x0000000F, 4, 0x00FF0000, 0x0000FF00, 0x000000FF,
3252 #if SDL_HAVE_BLIT_N_RGB565
3253 {0x0000F800, 0x000007E0, 0x0000001F, 4, 0x00FF0000, 0x0000FF00, 0x000000FF,
3255 {0x0000F800, 0x000007E0, 0x0000001F, 4, 0x000000FF, 0x0000FF00, 0x00FF0000,
3257 {0x0000F800, 0x000007E0, 0x0000001F, 4, 0xFF000000, 0x00FF0000, 0x0000FF00,
3259 {0x0000F800, 0x000007E0, 0x0000001F, 4, 0x0000FF00, 0x00FF0000, 0xFF000000,
3264 {0, 0, 0, 0, 0, 0, 0, 0,
BlitNtoN, 0}
3269 {0x000000FF, 0x0000FF00, 0x00FF0000, 4, 0x000000FF, 0x0000FF00, 0x00FF0000,
3271 #if HAVE_FAST_WRITE_INT8
3275 {0x00FF0000, 0x0000FF00, 0x000000FF, 4, 0x00FF0000, 0x0000FF00, 0x000000FF,
3277 #if HAVE_FAST_WRITE_INT8
3282 {0x000000FF, 0x0000FF00, 0x00FF0000, 4, 0x00FF0000, 0x0000FF00, 0x000000FF,
3284 #if HAVE_FAST_WRITE_INT8
3288 {0x00FF0000, 0x0000FF00, 0x000000FF, 4, 0x000000FF, 0x0000FF00, 0x00FF0000,
3290 #if HAVE_FAST_WRITE_INT8
3295 {0x000000FF, 0x0000FF00, 0x00FF0000, 3, 0x00FF0000, 0x0000FF00, 0x000000FF,
3297 {0x00FF0000, 0x0000FF00, 0x000000FF, 3, 0x000000FF, 0x0000FF00, 0x00FF0000,
3300 {0, 0, 0, 0, 0, 0, 0, 0,
BlitNtoN, 0}
3304 #if SDL_ALTIVEC_BLITTERS
3306 {0x00000000, 0x00000000, 0x00000000, 4, 0x00000000, 0x00000000, 0x00000000,
3309 {0x00000000, 0x00000000, 0x00000000, 4, 0x00000000, 0x00000000, 0x00000000,
3312 {0x00000000, 0x00000000, 0x00000000, 2, 0x0000F800, 0x000007E0, 0x0000001F,
3315 #if SDL_ARM_SIMD_BLITTERS
3316 {0x000000FF, 0x0000FF00, 0x00FF0000, 4, 0x00FF0000, 0x0000FF00, 0x000000FF,
3320 {0x000000FF, 0x0000FF00, 0x00FF0000, 3, 0x000000FF, 0x0000FF00, 0x00FF0000,
3322 {0x00FF0000, 0x0000FF00, 0x000000FF, 3, 0x00FF0000, 0x0000FF00, 0x000000FF,
3325 {0x000000FF, 0x0000FF00, 0x00FF0000, 3, 0x00FF0000, 0x0000FF00, 0x000000FF,
3327 {0x00FF0000, 0x0000FF00, 0x000000FF, 3, 0x000000FF, 0x0000FF00, 0x00FF0000,
3330 {0x000000FF, 0x0000FF00, 0x00FF0000, 4, 0x00FF0000, 0x0000FF00, 0x000000FF,
3332 #if HAVE_FAST_WRITE_INT8
3336 {0x00FF0000, 0x0000FF00, 0x000000FF, 4, 0x000000FF, 0x0000FF00, 0x00FF0000,
3338 #if HAVE_FAST_WRITE_INT8
3343 {0x00FF0000, 0x0000FF00, 0x000000FF, 2, 0x0000F800, 0x000007E0, 0x0000001F,
3345 {0x00FF0000, 0x0000FF00, 0x000000FF, 2, 0x00007C00, 0x000003E0, 0x0000001F,
3348 {0, 0, 0, 0, 0, 0, 0, 0,
BlitNtoN, 0}
3356 #define MASKOK(x, y) (((x) == (y)) || ((y) == 0x00000000))
3381 (srcfmt->
Rmask == 0x00FF0000) &&
3382 (srcfmt->
Gmask == 0x0000FF00) &&
3383 (srcfmt->
Bmask == 0x000000FF)) {
3386 (srcfmt->
Rmask == 0x3FF00000) &&
3387 (srcfmt->
Gmask == 0x000FFC00) &&
3388 (srcfmt->
Bmask == 0x000003FF)) {
3399 for (which = 0;
table[which].dstbpp; ++which) {
3407 (a_need &
table[which].alpha) == a_need &&
3452 #if SDL_ALTIVEC_BLITTERS
3455 return Blit32to32KeyAltivec;
#define SDL_assert(condition)
#define ASSEMBLE_RGBA(buf, bpp, fmt, r, g, b, a)
#define DISEMBLE_RGB(buf, bpp, fmt, Pixel, r, g, b)
#define RGBA_FROM_8888(Pixel, fmt, r, g, b, a)
#define RETRIEVE_RGB_PIXEL(buf, bpp, Pixel)
#define SDL_COPY_RLE_MASK
#define RGB_FROM_PIXEL(Pixel, fmt, r, g, b)
#define DISEMBLE_RGBA(buf, bpp, fmt, Pixel, r, g, b, a)
#define ARGB2101010_FROM_RGBA(Pixel, r, g, b, a)
#define PIXEL_FROM_RGB(Pixel, fmt, r, g, b)
#define DUFFS_LOOP(pixel_copy_increment, width)
#define RGBA_FROM_PIXEL(Pixel, fmt, r, g, b, a)
#define SDL_COPY_COLORKEY
void(* SDL_BlitFunc)(SDL_BlitInfo *info)
#define RGBA_FROM_ARGB2101010(Pixel, r, g, b, a)
#define PIXEL_FROM_RGBA(Pixel, fmt, r, g, b, a)
static void BlitNtoN(SDL_BlitInfo *info)
#define RGB888_RGB555(dst, src)
#define RGB565_32(dst, src, map)
static void Blit_RGB565_ABGR8888(SDL_BlitInfo *info)
static void BlitNto1Key(SDL_BlitInfo *info)
static void Blit2to2Key(SDL_BlitInfo *info)
static void Blit_3or4_to_3or4__inversed_rgb(SDL_BlitInfo *info)
static void BlitNto1(SDL_BlitInfo *info)
SDL_BlitFunc SDL_CalculateBlitN(SDL_Surface *surface)
static void Blit_RGB888_RGB565(SDL_BlitInfo *info)
static void Blit4to4CopyAlpha(SDL_BlitInfo *info)
@ BLIT_FEATURE_ALTIVEC_DONT_USE_PREFETCH
@ BLIT_FEATURE_HAS_ARM_SIMD
@ BLIT_FEATURE_HAS_ALTIVEC
static const struct blit_table normal_blit_3[]
#define RGB888_RGB332(dst, src)
#define RGB101010_RGB332(dst, src)
#define RGB888_RGB565(dst, src)
static const Uint32 RGB565_ARGB8888_LUT[512]
static void Blit_RGB565_BGRA8888(SDL_BlitInfo *info)
static void Blit_RGB565_RGBA8888(SDL_BlitInfo *info)
static void Blit_3or4_to_3or4__same_rgb(SDL_BlitInfo *info)
static void Blit_RGB565_ARGB8888(SDL_BlitInfo *info)
#define GetBlitFeatures()
static void Blit_RGB888_RGB555(SDL_BlitInfo *info)
static void Blit4to4MaskAlpha(SDL_BlitInfo *info)
static void get_permutation(SDL_PixelFormat *srcfmt, SDL_PixelFormat *dstfmt, int *_p0, int *_p1, int *_p2, int *_p3, int *_alpha_channel)
static const struct blit_table normal_blit_1[]
static const struct blit_table normal_blit_2[]
static void Blit_RGB565_32(SDL_BlitInfo *info, const Uint32 *map)
static void Blit_RGB888_index8(SDL_BlitInfo *info)
static void Blit_RGB101010_index8(SDL_BlitInfo *info)
static const Uint32 RGB565_RGBA8888_LUT[512]
static const struct blit_table *const normal_blit[]
static void BlitNtoNKey(SDL_BlitInfo *info)
static const struct blit_table normal_blit_4[]
static void BlitNtoNCopyAlpha(SDL_BlitInfo *info)
static void BlitNto2101010(SDL_BlitInfo *info)
static void BlitNtoNKeyCopyAlpha(SDL_BlitInfo *info)
static const Uint32 RGB565_BGRA8888_LUT[512]
static const Uint32 RGB565_ABGR8888_LUT[512]
static void Blit2101010toN(SDL_BlitInfo *info)
const GLubyte GLuint GLuint GLuint GLuint alpha GLboolean GLboolean GLboolean GLboolean alpha GLint GLint GLsizei GLsizei GLenum type GLenum GLint GLenum GLint GLint GLsizei GLsizei GLint border GLenum GLint GLint GLint GLint GLint GLsizei GLsizei height GLsizei GLsizei GLenum GLenum const GLvoid *pixels GLenum GLint GLint GLint j1
const GLubyte GLuint GLuint GLuint GLuint alpha GLboolean GLboolean GLboolean GLboolean alpha GLint GLint GLsizei GLsizei GLenum type GLenum GLint GLenum GLint GLint GLsizei GLsizei GLint border GLenum GLint GLint GLint GLint GLint GLsizei GLsizei height GLsizei GLsizei GLenum GLenum const GLvoid *pixels GLenum GLint GLint GLint GLint j2 GLdouble GLdouble GLdouble GLdouble GLdouble GLdouble zFar GLenum GLenum GLint *params GLenum GLenum GLint *params GLenum GLenum GLint *params GLenum GLenum GLfloat *params GLenum GLint GLenum GLenum GLvoid *pixels GLenum GLint GLenum GLint *params GLenum GLenum GLint *params GLenum GLsizei const GLvoid *pointer GLenum GLenum const GLint *params GLenum GLfloat GLfloat GLint GLint const GLfloat *points GLenum GLfloat GLfloat GLint GLint GLfloat GLfloat GLint GLint const GLfloat *points GLint GLfloat GLfloat GLint GLfloat GLfloat v2 GLenum GLenum const GLint *params GLdouble GLdouble GLdouble GLdouble GLdouble GLdouble zFar GLenum map
const GLubyte GLuint GLuint GLuint GLuint alpha GLboolean GLboolean GLboolean GLboolean alpha GLint GLint GLsizei GLsizei GLenum type GLenum GLint GLenum GLint GLint GLsizei GLsizei GLint border GLenum GLint GLint GLint GLint GLint GLsizei GLsizei height GLsizei GLsizei GLenum GLenum const GLvoid *pixels GLenum GLint i1
const GLubyte GLuint GLuint GLuint GLuint alpha GLboolean GLboolean GLboolean GLboolean alpha GLint GLint GLsizei GLsizei GLenum type GLenum GLint GLenum GLint GLint GLsizei GLsizei GLint border GLenum GLint GLint GLint GLint GLint GLsizei GLsizei height GLsizei GLsizei GLenum GLenum const GLvoid *pixels GLenum GLint GLint i2
GLint GLint GLsizei width
GLdouble GLdouble GLdouble r
GLint GLint GLsizei GLsizei height
GLboolean GLboolean GLboolean b
GLuint GLfloat GLfloat GLfloat GLfloat GLfloat GLfloat GLfloat GLfloat s1
GLboolean GLboolean GLboolean GLboolean a
GLenum GLint GLenum GLsizei GLsizei GLsizei GLint GLsizei const void * bits
GLfloat GLfloat GLfloat alpha
GLfloat GLfloat GLfloat GLfloat v3
GLfloat GLfloat GLfloat v2
GLenum GLsizei GLenum GLenum const void * table
GLfloat GLfloat GLfloat GLfloat h
GLubyte GLubyte GLubyte GLubyte w
GLuint GLfloat GLfloat GLfloat GLfloat GLfloat GLfloat s0
@ SDL_PIXELFORMAT_RGBA8888
@ SDL_PIXELFORMAT_ABGR8888
@ SDL_PIXELFORMAT_BGRA8888
@ SDL_PIXELFORMAT_ARGB8888
@ SDL_PIXELFORMAT_ARGB2101010
set set set set set set set macro pixldst1 abits if abits op else op endif endm macro pixldst2 abits if abits op else op endif endm macro pixldst4 abits if abits op else op endif endm macro pixldst0 abits op endm macro pixldst3 mem_operand op endm macro pixldst30 mem_operand op endm macro pixldst abits if abits elseif abits elseif abits elseif abits elseif abits pixldst0 abits else pixldst0 abits pixldst0 abits pixldst0 abits pixldst0 abits endif elseif abits else pixldst0 abits pixldst0 abits endif elseif abits else error unsupported bpp *numpix else pixst endif endm macro pixld1_s mem_operand if asr adds SRC_WIDTH_FIXED bpl add asl mov asr adds SRC_WIDTH_FIXED bpl add asl mov asr adds SRC_WIDTH_FIXED bpl add asl mov asr adds SRC_WIDTH_FIXED bpl add asl elseif asr adds SRC_WIDTH_FIXED bpl add asl mov asr adds SRC_WIDTH_FIXED bpl add asl else error unsupported endif endm macro pixld2_s mem_operand if mov asr add asl add asl mov asr sub UNIT_X add asl mov asr add asl add asl mov asr add UNIT_X add asl else pixld1_s mem_operand pixld1_s mem_operand endif endm macro pixld0_s mem_operand if asr adds SRC_WIDTH_FIXED bpl add asl elseif asr adds SRC_WIDTH_FIXED bpl add asl endif endm macro pixld_s_internal mem_operand if mem_operand pixld2_s mem_operand pixdeinterleave basereg elseif mem_operand elseif mem_operand elseif mem_operand elseif mem_operand pixld0_s mem_operand else pixld0_s mem_operand pixld0_s mem_operand pixld0_s mem_operand pixld0_s mem_operand endif elseif mem_operand else pixld0_s mem_operand pixld0_s mem_operand endif elseif mem_operand else error unsupported mem_operand if bpp mem_operand endif endm macro vuzp8 reg2 vuzp d d ®2 endm macro vzip8 reg2 vzip d d ®2 endm macro pixdeinterleave basereg basereg basereg basereg basereg endif endm macro pixinterleave basereg basereg basereg basereg basereg endif endm macro PF boost_increment endif if endif PF tst PF addne PF subne PF cmp ORIG_W if endif if endif if endif PF subge ORIG_W PF subges if endif if endif if endif endif endm macro cache_preload_simple endif if dst_r_bpp pld[DST_R, #(PREFETCH_DISTANCE_SIMPLE *dst_r_bpp/8)] endif if mask_bpp pld if[MASK, #(PREFETCH_DISTANCE_SIMPLE *mask_bpp/8)] endif endif endm macro fetch_mask_pixblock pixld mask_basereg pixblock_size MASK endm macro ensure_destination_ptr_alignment process_pixblock_tail_head if beq irp skip1(dst_w_bpp<=(lowbit *8)) &&((lowbit *8)<(pixblock_size *dst_w_bpp)) .if lowbit< 16 tst DST_R
SDL_PixelFormat * src_fmt
SDL_PixelFormat * dst_fmt
A collection of pixels used in software blitting.
enum blit_features blit_features
typedef int(__stdcall *FARPROC)()