1
2 #include "nv50_context.h"
3 #include "os/os_time.h"
4
5 static void
nv50_validate_fb(struct nv50_context * nv50)6 nv50_validate_fb(struct nv50_context *nv50)
7 {
8 struct nouveau_pushbuf *push = nv50->base.pushbuf;
9 struct pipe_framebuffer_state *fb = &nv50->framebuffer;
10 unsigned i;
11 unsigned ms_mode = NV50_3D_MULTISAMPLE_MODE_MS1;
12
13 nouveau_bufctx_reset(nv50->bufctx_3d, NV50_BIND_FB);
14
15 BEGIN_NV04(push, NV50_3D(RT_CONTROL), 1);
16 PUSH_DATA (push, (076543210 << 4) | fb->nr_cbufs);
17 BEGIN_NV04(push, NV50_3D(SCREEN_SCISSOR_HORIZ), 2);
18 PUSH_DATA (push, fb->width << 16);
19 PUSH_DATA (push, fb->height << 16);
20
21 for (i = 0; i < fb->nr_cbufs; ++i) {
22 struct nv50_miptree *mt = nv50_miptree(fb->cbufs[i]->texture);
23 struct nv50_surface *sf = nv50_surface(fb->cbufs[i]);
24 struct nouveau_bo *bo = mt->base.bo;
25
26 BEGIN_NV04(push, NV50_3D(RT_ADDRESS_HIGH(i)), 5);
27 PUSH_DATAh(push, bo->offset + sf->offset);
28 PUSH_DATA (push, bo->offset + sf->offset);
29 PUSH_DATA (push, nv50_format_table[sf->base.format].rt);
30 if (likely(nouveau_bo_memtype(bo))) {
31 PUSH_DATA (push, mt->level[sf->base.u.tex.level].tile_mode);
32 PUSH_DATA (push, mt->layer_stride >> 2);
33 BEGIN_NV04(push, NV50_3D(RT_HORIZ(i)), 2);
34 PUSH_DATA (push, sf->width);
35 PUSH_DATA (push, sf->height);
36 BEGIN_NV04(push, NV50_3D(RT_ARRAY_MODE), 1);
37 PUSH_DATA (push, sf->depth);
38 } else {
39 PUSH_DATA (push, 0);
40 PUSH_DATA (push, 0);
41 BEGIN_NV04(push, NV50_3D(RT_HORIZ(i)), 2);
42 PUSH_DATA (push, NV50_3D_RT_HORIZ_LINEAR | mt->level[0].pitch);
43 PUSH_DATA (push, sf->height);
44 BEGIN_NV04(push, NV50_3D(RT_ARRAY_MODE), 1);
45 PUSH_DATA (push, 0);
46
47 assert(!fb->zsbuf);
48 assert(!mt->ms_mode);
49 }
50
51 ms_mode = mt->ms_mode;
52
53 if (mt->base.status & NOUVEAU_BUFFER_STATUS_GPU_READING)
54 nv50->state.rt_serialize = TRUE;
55 mt->base.status |= NOUVEAU_BUFFER_STATUS_GPU_WRITING;
56 mt->base.status &= NOUVEAU_BUFFER_STATUS_GPU_READING;
57
58 /* only register for writing, otherwise we'd always serialize here */
59 BCTX_REFN(nv50->bufctx_3d, FB, &mt->base, WR);
60 }
61
62 if (fb->zsbuf) {
63 struct nv50_miptree *mt = nv50_miptree(fb->zsbuf->texture);
64 struct nv50_surface *sf = nv50_surface(fb->zsbuf);
65 struct nouveau_bo *bo = mt->base.bo;
66 int unk = mt->base.base.target == PIPE_TEXTURE_2D;
67
68 BEGIN_NV04(push, NV50_3D(ZETA_ADDRESS_HIGH), 5);
69 PUSH_DATAh(push, bo->offset + sf->offset);
70 PUSH_DATA (push, bo->offset + sf->offset);
71 PUSH_DATA (push, nv50_format_table[fb->zsbuf->format].rt);
72 PUSH_DATA (push, mt->level[sf->base.u.tex.level].tile_mode);
73 PUSH_DATA (push, mt->layer_stride >> 2);
74 BEGIN_NV04(push, NV50_3D(ZETA_ENABLE), 1);
75 PUSH_DATA (push, 1);
76 BEGIN_NV04(push, NV50_3D(ZETA_HORIZ), 3);
77 PUSH_DATA (push, sf->width);
78 PUSH_DATA (push, sf->height);
79 PUSH_DATA (push, (unk << 16) | sf->depth);
80
81 ms_mode = mt->ms_mode;
82
83 if (mt->base.status & NOUVEAU_BUFFER_STATUS_GPU_READING)
84 nv50->state.rt_serialize = TRUE;
85 mt->base.status |= NOUVEAU_BUFFER_STATUS_GPU_WRITING;
86 mt->base.status &= NOUVEAU_BUFFER_STATUS_GPU_READING;
87
88 BCTX_REFN(nv50->bufctx_3d, FB, &mt->base, WR);
89 } else {
90 BEGIN_NV04(push, NV50_3D(ZETA_ENABLE), 1);
91 PUSH_DATA (push, 0);
92 }
93
94 BEGIN_NV04(push, NV50_3D(MULTISAMPLE_MODE), 1);
95 PUSH_DATA (push, ms_mode);
96
97 BEGIN_NV04(push, NV50_3D(VIEWPORT_HORIZ(0)), 2);
98 PUSH_DATA (push, fb->width << 16);
99 PUSH_DATA (push, fb->height << 16);
100 }
101
102 static void
nv50_validate_blend_colour(struct nv50_context * nv50)103 nv50_validate_blend_colour(struct nv50_context *nv50)
104 {
105 struct nouveau_pushbuf *push = nv50->base.pushbuf;
106
107 BEGIN_NV04(push, NV50_3D(BLEND_COLOR(0)), 4);
108 PUSH_DATAf(push, nv50->blend_colour.color[0]);
109 PUSH_DATAf(push, nv50->blend_colour.color[1]);
110 PUSH_DATAf(push, nv50->blend_colour.color[2]);
111 PUSH_DATAf(push, nv50->blend_colour.color[3]);
112 }
113
114 static void
nv50_validate_stencil_ref(struct nv50_context * nv50)115 nv50_validate_stencil_ref(struct nv50_context *nv50)
116 {
117 struct nouveau_pushbuf *push = nv50->base.pushbuf;
118
119 BEGIN_NV04(push, NV50_3D(STENCIL_FRONT_FUNC_REF), 1);
120 PUSH_DATA (push, nv50->stencil_ref.ref_value[0]);
121 BEGIN_NV04(push, NV50_3D(STENCIL_BACK_FUNC_REF), 1);
122 PUSH_DATA (push, nv50->stencil_ref.ref_value[1]);
123 }
124
125 static void
nv50_validate_stipple(struct nv50_context * nv50)126 nv50_validate_stipple(struct nv50_context *nv50)
127 {
128 struct nouveau_pushbuf *push = nv50->base.pushbuf;
129 unsigned i;
130
131 BEGIN_NV04(push, NV50_3D(POLYGON_STIPPLE_PATTERN(0)), 32);
132 for (i = 0; i < 32; ++i)
133 PUSH_DATA(push, util_bswap32(nv50->stipple.stipple[i]));
134 }
135
136 static void
nv50_validate_scissor(struct nv50_context * nv50)137 nv50_validate_scissor(struct nv50_context *nv50)
138 {
139 struct nouveau_pushbuf *push = nv50->base.pushbuf;
140 struct pipe_scissor_state *s = &nv50->scissor;
141 #ifdef NV50_SCISSORS_CLIPPING
142 struct pipe_viewport_state *vp = &nv50->viewport;
143 int minx, maxx, miny, maxy;
144
145 if (!(nv50->dirty &
146 (NV50_NEW_SCISSOR | NV50_NEW_VIEWPORT | NV50_NEW_FRAMEBUFFER)) &&
147 nv50->state.scissor == nv50->rast->pipe.scissor)
148 return;
149 nv50->state.scissor = nv50->rast->pipe.scissor;
150
151 if (nv50->state.scissor) {
152 minx = s->minx;
153 maxx = s->maxx;
154 miny = s->miny;
155 maxy = s->maxy;
156 } else {
157 minx = 0;
158 maxx = nv50->framebuffer.width;
159 miny = 0;
160 maxy = nv50->framebuffer.height;
161 }
162
163 minx = MAX2(minx, (int)(vp->translate[0] - fabsf(vp->scale[0])));
164 maxx = MIN2(maxx, (int)(vp->translate[0] + fabsf(vp->scale[0])));
165 miny = MAX2(miny, (int)(vp->translate[1] - fabsf(vp->scale[1])));
166 maxy = MIN2(maxy, (int)(vp->translate[1] + fabsf(vp->scale[1])));
167
168 BEGIN_NV04(push, NV50_3D(SCISSOR_HORIZ(0)), 2);
169 PUSH_DATA (push, (maxx << 16) | minx);
170 PUSH_DATA (push, (maxy << 16) | miny);
171 #else
172 BEGIN_NV04(push, NV50_3D(SCISSOR_HORIZ(0)), 2);
173 PUSH_DATA (push, (s->maxx << 16) | s->minx);
174 PUSH_DATA (push, (s->maxy << 16) | s->miny);
175 #endif
176 }
177
178 static void
nv50_validate_viewport(struct nv50_context * nv50)179 nv50_validate_viewport(struct nv50_context *nv50)
180 {
181 struct nouveau_pushbuf *push = nv50->base.pushbuf;
182 float zmin, zmax;
183
184 BEGIN_NV04(push, NV50_3D(VIEWPORT_TRANSLATE_X(0)), 3);
185 PUSH_DATAf(push, nv50->viewport.translate[0]);
186 PUSH_DATAf(push, nv50->viewport.translate[1]);
187 PUSH_DATAf(push, nv50->viewport.translate[2]);
188 BEGIN_NV04(push, NV50_3D(VIEWPORT_SCALE_X(0)), 3);
189 PUSH_DATAf(push, nv50->viewport.scale[0]);
190 PUSH_DATAf(push, nv50->viewport.scale[1]);
191 PUSH_DATAf(push, nv50->viewport.scale[2]);
192
193 zmin = nv50->viewport.translate[2] - fabsf(nv50->viewport.scale[2]);
194 zmax = nv50->viewport.translate[2] + fabsf(nv50->viewport.scale[2]);
195
196 #ifdef NV50_SCISSORS_CLIPPING
197 BEGIN_NV04(push, NV50_3D(DEPTH_RANGE_NEAR(0)), 2);
198 PUSH_DATAf(push, zmin);
199 PUSH_DATAf(push, zmax);
200 #endif
201 }
202
203 static INLINE void
nv50_check_program_ucps(struct nv50_context * nv50,struct nv50_program * vp,uint8_t mask)204 nv50_check_program_ucps(struct nv50_context *nv50,
205 struct nv50_program *vp, uint8_t mask)
206 {
207 const unsigned n = util_logbase2(mask) + 1;
208
209 if (vp->vp.clpd_nr >= n)
210 return;
211 nv50_program_destroy(nv50, vp);
212
213 vp->vp.clpd_nr = n;
214 if (likely(vp == nv50->vertprog)) {
215 nv50->dirty |= NV50_NEW_VERTPROG;
216 nv50_vertprog_validate(nv50);
217 } else {
218 nv50->dirty |= NV50_NEW_GMTYPROG;
219 nv50_gmtyprog_validate(nv50);
220 }
221 nv50_fp_linkage_validate(nv50);
222 }
223
224 static void
nv50_validate_clip(struct nv50_context * nv50)225 nv50_validate_clip(struct nv50_context *nv50)
226 {
227 struct nouveau_pushbuf *push = nv50->base.pushbuf;
228 struct nv50_program *vp;
229 uint8_t clip_enable;
230
231 if (nv50->dirty & NV50_NEW_CLIP) {
232 BEGIN_NV04(push, NV50_3D(CB_ADDR), 1);
233 PUSH_DATA (push, (0 << 8) | NV50_CB_AUX);
234 BEGIN_NI04(push, NV50_3D(CB_DATA(0)), PIPE_MAX_CLIP_PLANES * 4);
235 PUSH_DATAp(push, &nv50->clip.ucp[0][0], PIPE_MAX_CLIP_PLANES * 4);
236 }
237
238 vp = nv50->gmtyprog;
239 if (likely(!vp))
240 vp = nv50->vertprog;
241
242 clip_enable = nv50->rast->pipe.clip_plane_enable;
243
244 BEGIN_NV04(push, NV50_3D(CLIP_DISTANCE_ENABLE), 1);
245 PUSH_DATA (push, clip_enable);
246
247 if (clip_enable)
248 nv50_check_program_ucps(nv50, vp, clip_enable);
249 }
250
251 static void
nv50_validate_blend(struct nv50_context * nv50)252 nv50_validate_blend(struct nv50_context *nv50)
253 {
254 struct nouveau_pushbuf *push = nv50->base.pushbuf;
255
256 PUSH_SPACE(push, nv50->blend->size);
257 PUSH_DATAp(push, nv50->blend->state, nv50->blend->size);
258 }
259
260 static void
nv50_validate_zsa(struct nv50_context * nv50)261 nv50_validate_zsa(struct nv50_context *nv50)
262 {
263 struct nouveau_pushbuf *push = nv50->base.pushbuf;
264
265 PUSH_SPACE(push, nv50->zsa->size);
266 PUSH_DATAp(push, nv50->zsa->state, nv50->zsa->size);
267 }
268
269 static void
nv50_validate_rasterizer(struct nv50_context * nv50)270 nv50_validate_rasterizer(struct nv50_context *nv50)
271 {
272 struct nouveau_pushbuf *push = nv50->base.pushbuf;
273
274 PUSH_SPACE(push, nv50->rast->size);
275 PUSH_DATAp(push, nv50->rast->state, nv50->rast->size);
276 }
277
278 static void
nv50_validate_sample_mask(struct nv50_context * nv50)279 nv50_validate_sample_mask(struct nv50_context *nv50)
280 {
281 struct nouveau_pushbuf *push = nv50->base.pushbuf;
282
283 unsigned mask[4] =
284 {
285 nv50->sample_mask & 0xffff,
286 nv50->sample_mask & 0xffff,
287 nv50->sample_mask & 0xffff,
288 nv50->sample_mask & 0xffff
289 };
290
291 BEGIN_NV04(push, NV50_3D(MSAA_MASK(0)), 4);
292 PUSH_DATA (push, mask[0]);
293 PUSH_DATA (push, mask[1]);
294 PUSH_DATA (push, mask[2]);
295 PUSH_DATA (push, mask[3]);
296 }
297
298 static void
nv50_switch_pipe_context(struct nv50_context * ctx_to)299 nv50_switch_pipe_context(struct nv50_context *ctx_to)
300 {
301 struct nv50_context *ctx_from = ctx_to->screen->cur_ctx;
302
303 if (ctx_from)
304 ctx_to->state = ctx_from->state;
305
306 ctx_to->dirty = ~0;
307
308 if (!ctx_to->vertex)
309 ctx_to->dirty &= ~(NV50_NEW_VERTEX | NV50_NEW_ARRAYS);
310
311 if (!ctx_to->vertprog)
312 ctx_to->dirty &= ~NV50_NEW_VERTPROG;
313 if (!ctx_to->fragprog)
314 ctx_to->dirty &= ~NV50_NEW_FRAGPROG;
315
316 if (!ctx_to->blend)
317 ctx_to->dirty &= ~NV50_NEW_BLEND;
318 if (!ctx_to->rast)
319 #ifdef NV50_SCISSORS_CLIPPING
320 ctx_to->dirty &= ~(NV50_NEW_RASTERIZER | NV50_NEW_SCISSOR);
321 #else
322 ctx_to->dirty &= ~NV50_NEW_RASTERIZER;
323 #endif
324 if (!ctx_to->zsa)
325 ctx_to->dirty &= ~NV50_NEW_ZSA;
326
327 ctx_to->screen->cur_ctx = ctx_to;
328 }
329
330 static struct state_validate {
331 void (*func)(struct nv50_context *);
332 uint32_t states;
333 } validate_list[] = {
334 { nv50_validate_fb, NV50_NEW_FRAMEBUFFER },
335 { nv50_validate_blend, NV50_NEW_BLEND },
336 { nv50_validate_zsa, NV50_NEW_ZSA },
337 { nv50_validate_sample_mask, NV50_NEW_SAMPLE_MASK },
338 { nv50_validate_rasterizer, NV50_NEW_RASTERIZER },
339 { nv50_validate_blend_colour, NV50_NEW_BLEND_COLOUR },
340 { nv50_validate_stencil_ref, NV50_NEW_STENCIL_REF },
341 { nv50_validate_stipple, NV50_NEW_STIPPLE },
342 #ifdef NV50_SCISSORS_CLIPPING
343 { nv50_validate_scissor, NV50_NEW_SCISSOR | NV50_NEW_VIEWPORT |
344 NV50_NEW_RASTERIZER |
345 NV50_NEW_FRAMEBUFFER },
346 #else
347 { nv50_validate_scissor, NV50_NEW_SCISSOR },
348 #endif
349 { nv50_validate_viewport, NV50_NEW_VIEWPORT },
350 { nv50_vertprog_validate, NV50_NEW_VERTPROG },
351 { nv50_gmtyprog_validate, NV50_NEW_GMTYPROG },
352 { nv50_fragprog_validate, NV50_NEW_FRAGPROG },
353 { nv50_fp_linkage_validate, NV50_NEW_FRAGPROG | NV50_NEW_VERTPROG |
354 NV50_NEW_GMTYPROG | NV50_NEW_RASTERIZER },
355 { nv50_gp_linkage_validate, NV50_NEW_GMTYPROG | NV50_NEW_VERTPROG },
356 { nv50_validate_derived_rs, NV50_NEW_FRAGPROG | NV50_NEW_RASTERIZER |
357 NV50_NEW_VERTPROG | NV50_NEW_GMTYPROG },
358 { nv50_validate_clip, NV50_NEW_CLIP | NV50_NEW_RASTERIZER |
359 NV50_NEW_VERTPROG | NV50_NEW_GMTYPROG },
360 { nv50_constbufs_validate, NV50_NEW_CONSTBUF },
361 { nv50_validate_textures, NV50_NEW_TEXTURES },
362 { nv50_validate_samplers, NV50_NEW_SAMPLERS },
363 { nv50_stream_output_validate, NV50_NEW_STRMOUT |
364 NV50_NEW_VERTPROG | NV50_NEW_GMTYPROG },
365 { nv50_vertex_arrays_validate, NV50_NEW_VERTEX | NV50_NEW_ARRAYS }
366 };
367 #define validate_list_len (sizeof(validate_list) / sizeof(validate_list[0]))
368
369 boolean
nv50_state_validate(struct nv50_context * nv50,uint32_t mask,unsigned words)370 nv50_state_validate(struct nv50_context *nv50, uint32_t mask, unsigned words)
371 {
372 uint32_t state_mask;
373 int ret;
374 unsigned i;
375
376 if (nv50->screen->cur_ctx != nv50)
377 nv50_switch_pipe_context(nv50);
378
379 state_mask = nv50->dirty & mask;
380
381 if (state_mask) {
382 for (i = 0; i < validate_list_len; ++i) {
383 struct state_validate *validate = &validate_list[i];
384
385 if (state_mask & validate->states)
386 validate->func(nv50);
387 }
388 nv50->dirty &= ~state_mask;
389
390 if (nv50->state.rt_serialize) {
391 nv50->state.rt_serialize = FALSE;
392 BEGIN_NV04(nv50->base.pushbuf, SUBC_3D(NV50_GRAPH_SERIALIZE), 1);
393 PUSH_DATA (nv50->base.pushbuf, 0);
394 }
395
396 nv50_bufctx_fence(nv50->bufctx_3d, FALSE);
397 }
398 nouveau_pushbuf_bufctx(nv50->base.pushbuf, nv50->bufctx_3d);
399 ret = nouveau_pushbuf_validate(nv50->base.pushbuf);
400
401 if (unlikely(nv50->state.flushed)) {
402 nv50->state.flushed = FALSE;
403 nv50_bufctx_fence(nv50->bufctx_3d, TRUE);
404 }
405 return !ret;
406 }
407