1 /*
2  * Copyright © 2014-2018 Broadcom
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #include "util/format/u_format.h"
25 
26 #include "v3d_context.h"
27 #include "broadcom/cle/v3dx_pack.h"
28 #include "broadcom/common/v3d_macros.h"
29 #include "v3d_format_table.h"
30 
31 #define SWIZ(x,y,z,w) {          \
32         PIPE_SWIZZLE_##x, \
33         PIPE_SWIZZLE_##y, \
34         PIPE_SWIZZLE_##z, \
35         PIPE_SWIZZLE_##w  \
36 }
37 
38 #define FORMAT(pipe, rt, tex, swiz, return_size, return_channels)       \
39         [PIPE_FORMAT_##pipe] = {                                        \
40                 true,                                                   \
41                 V3D_OUTPUT_IMAGE_FORMAT_##rt,                           \
42                 TEXTURE_DATA_FORMAT_##tex,                              \
43                 swiz,                                                   \
44                 return_size,                                            \
45                 return_channels,                                        \
46         }
47 
48 #define SWIZ_X001	SWIZ(X, 0, 0, 1)
49 #define SWIZ_XY01	SWIZ(X, Y, 0, 1)
50 #define SWIZ_XYZ1	SWIZ(X, Y, Z, 1)
51 #define SWIZ_XYZW	SWIZ(X, Y, Z, W)
52 #define SWIZ_YZWX	SWIZ(Y, Z, W, X)
53 #define SWIZ_YZW1	SWIZ(Y, Z, W, 1)
54 #define SWIZ_ZYXW	SWIZ(Z, Y, X, W)
55 #define SWIZ_ZYX1	SWIZ(Z, Y, X, 1)
56 #define SWIZ_XXXY	SWIZ(X, X, X, Y)
57 #define SWIZ_XXX1	SWIZ(X, X, X, 1)
58 #define SWIZ_XXXX	SWIZ(X, X, X, X)
59 #define SWIZ_000X	SWIZ(0, 0, 0, X)
60 
61 static const struct v3d_format format_table[] = {
62         FORMAT(B8G8R8A8_UNORM,    RGBA8,        RGBA8,       SWIZ_ZYXW, 16, 0),
63         FORMAT(B8G8R8X8_UNORM,    RGBA8,        RGBA8,       SWIZ_ZYX1, 16, 0),
64         FORMAT(B8G8R8A8_SRGB,     SRGB8_ALPHA8, RGBA8,       SWIZ_ZYXW, 16, 0),
65         FORMAT(B8G8R8X8_SRGB,     SRGB8_ALPHA8, RGBA8,       SWIZ_ZYX1, 16, 0),
66         FORMAT(R8G8B8A8_UNORM,    RGBA8,        RGBA8,       SWIZ_XYZW, 16, 0),
67         FORMAT(R8G8B8X8_UNORM,    RGBA8,        RGBA8,       SWIZ_XYZ1, 16, 0),
68         FORMAT(R8G8B8A8_SRGB,     SRGB8_ALPHA8, RGBA8,       SWIZ_XYZW, 16, 0),
69         FORMAT(R8G8B8X8_SRGB,     SRGB8_ALPHA8, RGBA8,       SWIZ_XYZ1, 16, 0),
70         FORMAT(R8G8B8A8_SNORM,    NO,           RGBA8_SNORM, SWIZ_XYZW, 16, 0),
71         FORMAT(R8G8B8X8_SNORM,    NO,           RGBA8_SNORM, SWIZ_XYZ1, 16, 0),
72         FORMAT(R10G10B10A2_UNORM, RGB10_A2,     RGB10_A2,    SWIZ_XYZW, 16, 0),
73         FORMAT(R10G10B10A2_UINT,  RGB10_A2UI,   RGB10_A2UI,  SWIZ_XYZW, 16, 0),
74 
75         FORMAT(A4B4G4R4_UNORM,    ABGR4444,     RGBA4,       SWIZ_XYZW, 16, 0),
76 
77         FORMAT(A1B5G5R5_UNORM,    ABGR1555,     RGB5_A1,     SWIZ_XYZW, 16, 0),
78         FORMAT(X1B5G5R5_UNORM,    ABGR1555,     RGB5_A1,     SWIZ_XYZ1, 16, 0),
79         FORMAT(B5G6R5_UNORM,      BGR565,       RGB565,      SWIZ_XYZ1, 16, 0),
80 
81         FORMAT(R8_UNORM,          R8,           R8,          SWIZ_X001, 16, 0),
82         FORMAT(R8_SNORM,          NO,           R8_SNORM,    SWIZ_X001, 16, 0),
83         FORMAT(R8G8_UNORM,        RG8,          RG8,         SWIZ_XY01, 16, 0),
84         FORMAT(R8G8_SNORM,        NO,           RG8_SNORM,   SWIZ_XY01, 16, 0),
85 
86         FORMAT(R16_UNORM,         NO,           R16,         SWIZ_X001, 32, 1),
87         FORMAT(R16_SNORM,         NO,           R16_SNORM,   SWIZ_X001, 32, 1),
88         FORMAT(R16_FLOAT,         R16F,         R16F,        SWIZ_X001, 16, 0),
89         FORMAT(R32_FLOAT,         R32F,         R32F,        SWIZ_X001, 32, 1),
90 
91         FORMAT(R16G16_UNORM,      NO,           RG16,        SWIZ_XY01, 32, 2),
92         FORMAT(R16G16_SNORM,      NO,           RG16_SNORM,  SWIZ_XY01, 32, 2),
93         FORMAT(R16G16_FLOAT,      RG16F,        RG16F,       SWIZ_XY01, 16, 0),
94         FORMAT(R32G32_FLOAT,      RG32F,        RG32F,       SWIZ_XY01, 32, 2),
95 
96         FORMAT(R16G16B16A16_UNORM, NO,          RGBA16,      SWIZ_XYZW, 32, 4),
97         FORMAT(R16G16B16A16_SNORM, NO,          RGBA16_SNORM, SWIZ_XYZW, 32, 4),
98         FORMAT(R16G16B16A16_FLOAT, RGBA16F,     RGBA16F,     SWIZ_XYZW, 16, 0),
99         FORMAT(R32G32B32A32_FLOAT, RGBA32F,     RGBA32F,     SWIZ_XYZW, 32, 4),
100 
101         /* If we don't have L/A/LA16, mesa/st will fall back to RGBA16. */
102         FORMAT(L16_UNORM,         NO,           R16,         SWIZ_XXX1, 32, 1),
103         FORMAT(L16_SNORM,         NO,           R16_SNORM,   SWIZ_XXX1, 32, 1),
104         FORMAT(I16_UNORM,         NO,           R16,         SWIZ_XXXX, 32, 1),
105         FORMAT(I16_SNORM,         NO,           R16_SNORM,   SWIZ_XXXX, 32, 1),
106         FORMAT(A16_UNORM,         NO,           R16,         SWIZ_000X, 32, 1),
107         FORMAT(A16_SNORM,         NO,           R16_SNORM,   SWIZ_000X, 32, 1),
108         FORMAT(L16A16_UNORM,      NO,           RG16,        SWIZ_XXXY, 32, 2),
109         FORMAT(L16A16_SNORM,      NO,           RG16_SNORM,  SWIZ_XXXY, 32, 2),
110 
111         FORMAT(A8_UNORM,          NO,           R8,          SWIZ_000X, 16, 0),
112         FORMAT(L8_UNORM,          NO,           R8,          SWIZ_XXX1, 16, 0),
113         FORMAT(I8_UNORM,          NO,           R8,          SWIZ_XXXX, 16, 0),
114         FORMAT(L8A8_UNORM,        NO,           RG8,         SWIZ_XXXY, 16, 0),
115 
116         FORMAT(R8_SINT,           R8I,          R8I,         SWIZ_X001, 16, 0),
117         FORMAT(R8_UINT,           R8UI,         R8UI,        SWIZ_X001, 16, 0),
118         FORMAT(R8G8_SINT,         RG8I,         RG8I,        SWIZ_XY01, 16, 0),
119         FORMAT(R8G8_UINT,         RG8UI,        RG8UI,       SWIZ_XY01, 16, 0),
120         FORMAT(R8G8B8A8_SINT,     RGBA8I,       RGBA8I,      SWIZ_XYZW, 16, 0),
121         FORMAT(R8G8B8A8_UINT,     RGBA8UI,      RGBA8UI,     SWIZ_XYZW, 16, 0),
122 
123         FORMAT(R16_SINT,          R16I,         R16I,        SWIZ_X001, 16, 0),
124         FORMAT(R16_UINT,          R16UI,        R16UI,       SWIZ_X001, 16, 0),
125         FORMAT(R16G16_SINT,       RG16I,        RG16I,       SWIZ_XY01, 16, 0),
126         FORMAT(R16G16_UINT,       RG16UI,       RG16UI,      SWIZ_XY01, 16, 0),
127         FORMAT(R16G16B16A16_SINT, RGBA16I,      RGBA16I,     SWIZ_XYZW, 16, 0),
128         FORMAT(R16G16B16A16_UINT, RGBA16UI,     RGBA16UI,    SWIZ_XYZW, 16, 0),
129 
130         FORMAT(R32_SINT,          R32I,         R32I,        SWIZ_X001, 32, 1),
131         FORMAT(R32_UINT,          R32UI,        R32UI,       SWIZ_X001, 32, 1),
132         FORMAT(R32G32_SINT,       RG32I,        RG32I,       SWIZ_XY01, 32, 2),
133         FORMAT(R32G32_UINT,       RG32UI,       RG32UI,      SWIZ_XY01, 32, 2),
134         FORMAT(R32G32B32A32_SINT, RGBA32I,      RGBA32I,     SWIZ_XYZW, 32, 4),
135         FORMAT(R32G32B32A32_UINT, RGBA32UI,     RGBA32UI,    SWIZ_XYZW, 32, 4),
136 
137         FORMAT(A8_SINT,           R8I,          R8I,         SWIZ_000X, 16, 0),
138         FORMAT(A8_UINT,           R8UI,         R8UI,        SWIZ_000X, 16, 0),
139         FORMAT(A16_SINT,          R16I,         R16I,        SWIZ_000X, 16, 0),
140         FORMAT(A16_UINT,          R16UI,        R16UI,       SWIZ_000X, 16, 0),
141         FORMAT(A32_SINT,          R32I,         R32I,        SWIZ_000X, 32, 1),
142         FORMAT(A32_UINT,          R32UI,        R32UI,       SWIZ_000X, 32, 1),
143 
144         FORMAT(R11G11B10_FLOAT,   R11F_G11F_B10F, R11F_G11F_B10F, SWIZ_XYZ1, 16, 0),
145         FORMAT(R9G9B9E5_FLOAT,    NO,           RGB9_E5,     SWIZ_XYZ1, 16, 0),
146 
147 #if V3D_VERSION >= 40
148         FORMAT(S8_UINT_Z24_UNORM, D24S8,        DEPTH24_X8,  SWIZ_XXXX, 32, 1),
149         FORMAT(X8Z24_UNORM,       D24S8,        DEPTH24_X8,  SWIZ_XXXX, 32, 1),
150         FORMAT(S8X24_UINT,        S8,           RGBA8UI, SWIZ_XXXX, 16, 1),
151         FORMAT(Z32_FLOAT,         D32F,         DEPTH_COMP32F, SWIZ_XXXX, 32, 1),
152         FORMAT(Z16_UNORM,         D16,          DEPTH_COMP16,SWIZ_XXXX, 32, 1),
153 
154         /* Pretend we support this, but it'll be separate Z32F depth and S8. */
155         FORMAT(Z32_FLOAT_S8X24_UINT, D32F,      DEPTH_COMP32F, SWIZ_XXXX, 32, 1),
156         FORMAT(X32_S8X24_UINT,    S8,           R8UI,          SWIZ_XXXX, 16, 1),
157 #else
158         FORMAT(S8_UINT_Z24_UNORM, ZS_DEPTH24_STENCIL8, DEPTH24_X8, SWIZ_XXXX, 32, 1),
159         FORMAT(X8Z24_UNORM,       ZS_DEPTH24_STENCIL8, DEPTH24_X8, SWIZ_XXXX, 32, 1),
160         FORMAT(S8X24_UINT,        NO,           R32F,        SWIZ_XXXX, 32, 1),
161         FORMAT(Z32_FLOAT,         ZS_DEPTH_COMPONENT32F, R32F, SWIZ_XXXX, 32, 1),
162         FORMAT(Z16_UNORM,         ZS_DEPTH_COMPONENT16,  DEPTH_COMP16, SWIZ_XXXX, 32, 1),
163 
164         /* Pretend we support this, but it'll be separate Z32F depth and S8. */
165         FORMAT(Z32_FLOAT_S8X24_UINT, ZS_DEPTH_COMPONENT32F, R32F, SWIZ_XXXX, 32, 1),
166 #endif
167 
168         FORMAT(ETC2_RGB8,         NO,           RGB8_ETC2,   SWIZ_XYZ1, 16, 0),
169         FORMAT(ETC2_SRGB8,        NO,           RGB8_ETC2,   SWIZ_XYZ1, 16, 0),
170         FORMAT(ETC2_RGB8A1,       NO,           RGB8_PUNCHTHROUGH_ALPHA1, SWIZ_XYZW, 16, 0),
171         FORMAT(ETC2_SRGB8A1,      NO,           RGB8_PUNCHTHROUGH_ALPHA1, SWIZ_XYZW, 16, 0),
172         FORMAT(ETC2_RGBA8,        NO,           RGBA8_ETC2_EAC, SWIZ_XYZW, 16, 0),
173         FORMAT(ETC2_SRGBA8,       NO,           RGBA8_ETC2_EAC, SWIZ_XYZW, 16, 0),
174         FORMAT(ETC2_R11_UNORM,    NO,           R11_EAC,     SWIZ_X001, 16, 0),
175         FORMAT(ETC2_R11_SNORM,    NO,           SIGNED_R11_EAC, SWIZ_X001, 16, 0),
176         FORMAT(ETC2_RG11_UNORM,   NO,           RG11_EAC,    SWIZ_XY01, 16, 0),
177         FORMAT(ETC2_RG11_SNORM,   NO,           SIGNED_RG11_EAC, SWIZ_XY01, 16, 0),
178 
179         FORMAT(DXT1_RGB,          NO,           BC1,         SWIZ_XYZ1, 16, 0),
180         FORMAT(DXT1_SRGB,         NO,           BC1,         SWIZ_XYZ1, 16, 0),
181         FORMAT(DXT1_RGBA,         NO,           BC1,         SWIZ_XYZW, 16, 0),
182         FORMAT(DXT1_SRGBA,        NO,           BC1,         SWIZ_XYZW, 16, 0),
183         FORMAT(DXT3_RGBA,         NO,           BC2,         SWIZ_XYZW, 16, 0),
184         FORMAT(DXT3_SRGBA,        NO,           BC2,         SWIZ_XYZW, 16, 0),
185         FORMAT(DXT5_RGBA,         NO,           BC3,         SWIZ_XYZW, 16, 0),
186         FORMAT(DXT5_SRGBA,        NO,           BC3,         SWIZ_XYZW, 16, 0),
187 };
188 
189 const struct v3d_format *
v3dX(get_format_desc)190 v3dX(get_format_desc)(enum pipe_format f)
191 {
192         if (f < ARRAY_SIZE(format_table) && format_table[f].present)
193                 return &format_table[f];
194         else
195                 return NULL;
196 }
197 
198 void
v3dX(get_internal_type_bpp_for_output_format)199 v3dX(get_internal_type_bpp_for_output_format)(uint32_t format,
200                                               uint32_t *type,
201                                               uint32_t *bpp)
202 {
203         switch (format) {
204         case V3D_OUTPUT_IMAGE_FORMAT_RGBA8:
205 #if V3D_VERSION < 41
206         case V3D_OUTPUT_IMAGE_FORMAT_RGBX8:
207 #endif
208         case V3D_OUTPUT_IMAGE_FORMAT_RGB8:
209         case V3D_OUTPUT_IMAGE_FORMAT_RG8:
210         case V3D_OUTPUT_IMAGE_FORMAT_R8:
211         case V3D_OUTPUT_IMAGE_FORMAT_ABGR4444:
212         case V3D_OUTPUT_IMAGE_FORMAT_BGR565:
213         case V3D_OUTPUT_IMAGE_FORMAT_ABGR1555:
214                 *type = V3D_INTERNAL_TYPE_8;
215                 *bpp = V3D_INTERNAL_BPP_32;
216                 break;
217 
218         case V3D_OUTPUT_IMAGE_FORMAT_RGBA8I:
219         case V3D_OUTPUT_IMAGE_FORMAT_RG8I:
220         case V3D_OUTPUT_IMAGE_FORMAT_R8I:
221                 *type = V3D_INTERNAL_TYPE_8I;
222                 *bpp = V3D_INTERNAL_BPP_32;
223                 break;
224 
225         case V3D_OUTPUT_IMAGE_FORMAT_RGBA8UI:
226         case V3D_OUTPUT_IMAGE_FORMAT_RG8UI:
227         case V3D_OUTPUT_IMAGE_FORMAT_R8UI:
228                 *type = V3D_INTERNAL_TYPE_8UI;
229                 *bpp = V3D_INTERNAL_BPP_32;
230                 break;
231 
232         case V3D_OUTPUT_IMAGE_FORMAT_SRGB8_ALPHA8:
233         case V3D_OUTPUT_IMAGE_FORMAT_SRGB:
234         case V3D_OUTPUT_IMAGE_FORMAT_RGB10_A2:
235         case V3D_OUTPUT_IMAGE_FORMAT_R11F_G11F_B10F:
236 #if V3D_VERSION < 41
237         case V3D_OUTPUT_IMAGE_FORMAT_SRGBX8:
238 #endif
239         case V3D_OUTPUT_IMAGE_FORMAT_RGBA16F:
240                 /* Note that sRGB RTs are stored in the tile buffer at 16F,
241                  * and the conversion to sRGB happens at tilebuffer
242                  * load/store.
243                  */
244                 *type = V3D_INTERNAL_TYPE_16F;
245                 *bpp = V3D_INTERNAL_BPP_64;
246                 break;
247 
248         case V3D_OUTPUT_IMAGE_FORMAT_RG16F:
249         case V3D_OUTPUT_IMAGE_FORMAT_R16F:
250                 *type = V3D_INTERNAL_TYPE_16F;
251                 /* Use 64bpp to make sure the TLB doesn't throw away the alpha
252                  * channel before alpha test happens.
253                  */
254                 *bpp = V3D_INTERNAL_BPP_64;
255                 break;
256 
257         case V3D_OUTPUT_IMAGE_FORMAT_RGBA16I:
258                 *type = V3D_INTERNAL_TYPE_16I;
259                 *bpp = V3D_INTERNAL_BPP_64;
260                 break;
261         case V3D_OUTPUT_IMAGE_FORMAT_RG16I:
262         case V3D_OUTPUT_IMAGE_FORMAT_R16I:
263                 *type = V3D_INTERNAL_TYPE_16I;
264                 *bpp = V3D_INTERNAL_BPP_32;
265                 break;
266 
267         case V3D_OUTPUT_IMAGE_FORMAT_RGB10_A2UI:
268         case V3D_OUTPUT_IMAGE_FORMAT_RGBA16UI:
269                 *type = V3D_INTERNAL_TYPE_16UI;
270                 *bpp = V3D_INTERNAL_BPP_64;
271                 break;
272         case V3D_OUTPUT_IMAGE_FORMAT_RG16UI:
273         case V3D_OUTPUT_IMAGE_FORMAT_R16UI:
274                 *type = V3D_INTERNAL_TYPE_16UI;
275                 *bpp = V3D_INTERNAL_BPP_32;
276                 break;
277 
278         case V3D_OUTPUT_IMAGE_FORMAT_RGBA32I:
279                 *type = V3D_INTERNAL_TYPE_32I;
280                 *bpp = V3D_INTERNAL_BPP_128;
281                 break;
282         case V3D_OUTPUT_IMAGE_FORMAT_RG32I:
283                 *type = V3D_INTERNAL_TYPE_32I;
284                 *bpp = V3D_INTERNAL_BPP_64;
285                 break;
286         case V3D_OUTPUT_IMAGE_FORMAT_R32I:
287                 *type = V3D_INTERNAL_TYPE_32I;
288                 *bpp = V3D_INTERNAL_BPP_32;
289                 break;
290 
291         case V3D_OUTPUT_IMAGE_FORMAT_RGBA32UI:
292                 *type = V3D_INTERNAL_TYPE_32UI;
293                 *bpp = V3D_INTERNAL_BPP_128;
294                 break;
295         case V3D_OUTPUT_IMAGE_FORMAT_RG32UI:
296                 *type = V3D_INTERNAL_TYPE_32UI;
297                 *bpp = V3D_INTERNAL_BPP_64;
298                 break;
299         case V3D_OUTPUT_IMAGE_FORMAT_R32UI:
300                 *type = V3D_INTERNAL_TYPE_32UI;
301                 *bpp = V3D_INTERNAL_BPP_32;
302                 break;
303 
304         case V3D_OUTPUT_IMAGE_FORMAT_RGBA32F:
305                 *type = V3D_INTERNAL_TYPE_32F;
306                 *bpp = V3D_INTERNAL_BPP_128;
307                 break;
308         case V3D_OUTPUT_IMAGE_FORMAT_RG32F:
309                 *type = V3D_INTERNAL_TYPE_32F;
310                 *bpp = V3D_INTERNAL_BPP_64;
311                 break;
312         case V3D_OUTPUT_IMAGE_FORMAT_R32F:
313                 *type = V3D_INTERNAL_TYPE_32F;
314                 *bpp = V3D_INTERNAL_BPP_32;
315                 break;
316 
317         default:
318                 /* Provide some default values, as we'll be called at RB
319                  * creation time, even if an RB with this format isn't
320                  * supported.
321                  */
322                 *type = V3D_INTERNAL_TYPE_8;
323                 *bpp = V3D_INTERNAL_BPP_32;
324                 break;
325         }
326 }
327 
328 bool
v3dX(tfu_supports_tex_format)329 v3dX(tfu_supports_tex_format)(enum V3DX(Texture_Data_Formats) format)
330 {
331         switch (format) {
332         case TEXTURE_DATA_FORMAT_R8:
333         case TEXTURE_DATA_FORMAT_R8_SNORM:
334         case TEXTURE_DATA_FORMAT_RG8:
335         case TEXTURE_DATA_FORMAT_RG8_SNORM:
336         case TEXTURE_DATA_FORMAT_RGBA8:
337         case TEXTURE_DATA_FORMAT_RGBA8_SNORM:
338         case TEXTURE_DATA_FORMAT_RGB565:
339         case TEXTURE_DATA_FORMAT_RGBA4:
340         case TEXTURE_DATA_FORMAT_RGB5_A1:
341         case TEXTURE_DATA_FORMAT_RGB10_A2:
342         case TEXTURE_DATA_FORMAT_R16:
343         case TEXTURE_DATA_FORMAT_R16_SNORM:
344         case TEXTURE_DATA_FORMAT_RG16:
345         case TEXTURE_DATA_FORMAT_RG16_SNORM:
346         case TEXTURE_DATA_FORMAT_RGBA16:
347         case TEXTURE_DATA_FORMAT_RGBA16_SNORM:
348         case TEXTURE_DATA_FORMAT_R16F:
349         case TEXTURE_DATA_FORMAT_RG16F:
350         case TEXTURE_DATA_FORMAT_RGBA16F:
351         case TEXTURE_DATA_FORMAT_R11F_G11F_B10F:
352         case TEXTURE_DATA_FORMAT_R4:
353                 return true;
354         default:
355                 return false;
356         }
357 }
358