1c16983ec8b63fd972c90936d1da13c6ebfab53b
[cascardo/linux.git] / drivers / gpu / drm / amd / amdgpu / dce_virtual.c
1 /*
2  * Copyright 2014 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  */
23 #include "drmP.h"
24 #include "amdgpu.h"
25 #include "amdgpu_pm.h"
26 #include "amdgpu_i2c.h"
27 #include "atom.h"
28 #include "amdgpu_pll.h"
29 #include "amdgpu_connectors.h"
30 #ifdef CONFIG_DRM_AMDGPU_CIK
31 #include "dce_v8_0.h"
32 #endif
33 #include "dce_v10_0.h"
34 #include "dce_v11_0.h"
35 #include "dce_virtual.h"
36
37 static void dce_virtual_set_display_funcs(struct amdgpu_device *adev);
38 static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev);
39 static int dce_virtual_pageflip_irq(struct amdgpu_device *adev,
40                                   struct amdgpu_irq_src *source,
41                                   struct amdgpu_iv_entry *entry);
42
43 /**
44  * dce_virtual_vblank_wait - vblank wait asic callback.
45  *
46  * @adev: amdgpu_device pointer
47  * @crtc: crtc to wait for vblank on
48  *
49  * Wait for vblank on the requested crtc (evergreen+).
50  */
51 static void dce_virtual_vblank_wait(struct amdgpu_device *adev, int crtc)
52 {
53         return;
54 }
55
56 static u32 dce_virtual_vblank_get_counter(struct amdgpu_device *adev, int crtc)
57 {
58         if (crtc >= adev->mode_info.num_crtc)
59                 return 0;
60         else
61                 return adev->ddev->vblank[crtc].count;
62 }
63
64 static void dce_virtual_page_flip(struct amdgpu_device *adev,
65                               int crtc_id, u64 crtc_base, bool async)
66 {
67         return;
68 }
69
70 static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc,
71                                         u32 *vbl, u32 *position)
72 {
73         if ((crtc < 0) || (crtc >= adev->mode_info.num_crtc))
74                 return -EINVAL;
75
76         *vbl = 0;
77         *position = 0;
78
79         return 0;
80 }
81
82 static bool dce_virtual_hpd_sense(struct amdgpu_device *adev,
83                                enum amdgpu_hpd_id hpd)
84 {
85         return true;
86 }
87
88 static void dce_virtual_hpd_set_polarity(struct amdgpu_device *adev,
89                                       enum amdgpu_hpd_id hpd)
90 {
91         return;
92 }
93
94 static u32 dce_virtual_hpd_get_gpio_reg(struct amdgpu_device *adev)
95 {
96         return 0;
97 }
98
99 static bool dce_virtual_is_display_hung(struct amdgpu_device *adev)
100 {
101         return false;
102 }
103
104 void dce_virtual_stop_mc_access(struct amdgpu_device *adev,
105                               struct amdgpu_mode_mc_save *save)
106 {
107         switch (adev->asic_type) {
108         case CHIP_BONAIRE:
109         case CHIP_HAWAII:
110         case CHIP_KAVERI:
111         case CHIP_KABINI:
112         case CHIP_MULLINS:
113 #ifdef CONFIG_DRM_AMDGPU_CIK
114                 dce_v8_0_disable_dce(adev);
115 #endif
116                 break;
117         case CHIP_FIJI:
118         case CHIP_TONGA:
119                 dce_v10_0_disable_dce(adev);
120                 break;
121         case CHIP_CARRIZO:
122         case CHIP_STONEY:
123         case CHIP_POLARIS11:
124         case CHIP_POLARIS10:
125                 dce_v11_0_disable_dce(adev);
126                 break;
127         default:
128                 DRM_ERROR("Usupported ASIC type: 0x%X\n", adev->asic_type);
129         }
130
131         return;
132 }
133 void dce_virtual_resume_mc_access(struct amdgpu_device *adev,
134                                 struct amdgpu_mode_mc_save *save)
135 {
136         return;
137 }
138
139 void dce_virtual_set_vga_render_state(struct amdgpu_device *adev,
140                                     bool render)
141 {
142         return;
143 }
144
145 /**
146  * dce_virtual_bandwidth_update - program display watermarks
147  *
148  * @adev: amdgpu_device pointer
149  *
150  * Calculate and program the display watermarks and line
151  * buffer allocation (CIK).
152  */
153 static void dce_virtual_bandwidth_update(struct amdgpu_device *adev)
154 {
155         return;
156 }
157
158 static int dce_virtual_crtc_gamma_set(struct drm_crtc *crtc, u16 *red,
159                                       u16 *green, u16 *blue, uint32_t size)
160 {
161         struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
162         int i;
163
164         /* userspace palettes are always correct as is */
165         for (i = 0; i < size; i++) {
166                 amdgpu_crtc->lut_r[i] = red[i] >> 6;
167                 amdgpu_crtc->lut_g[i] = green[i] >> 6;
168                 amdgpu_crtc->lut_b[i] = blue[i] >> 6;
169         }
170
171         return 0;
172 }
173
174 static void dce_virtual_crtc_destroy(struct drm_crtc *crtc)
175 {
176         struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
177
178         drm_crtc_cleanup(crtc);
179         kfree(amdgpu_crtc);
180 }
181
182 static const struct drm_crtc_funcs dce_virtual_crtc_funcs = {
183         .cursor_set2 = NULL,
184         .cursor_move = NULL,
185         .gamma_set = dce_virtual_crtc_gamma_set,
186         .set_config = amdgpu_crtc_set_config,
187         .destroy = dce_virtual_crtc_destroy,
188         .page_flip = amdgpu_crtc_page_flip,
189 };
190
191 static void dce_virtual_crtc_dpms(struct drm_crtc *crtc, int mode)
192 {
193         struct drm_device *dev = crtc->dev;
194         struct amdgpu_device *adev = dev->dev_private;
195         struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
196         unsigned type;
197
198         switch (mode) {
199         case DRM_MODE_DPMS_ON:
200                 amdgpu_crtc->enabled = true;
201                 /* Make sure VBLANK and PFLIP interrupts are still enabled */
202                 type = amdgpu_crtc_idx_to_irq_type(adev, amdgpu_crtc->crtc_id);
203                 amdgpu_irq_update(adev, &adev->crtc_irq, type);
204                 amdgpu_irq_update(adev, &adev->pageflip_irq, type);
205                 drm_vblank_on(dev, amdgpu_crtc->crtc_id);
206                 break;
207         case DRM_MODE_DPMS_STANDBY:
208         case DRM_MODE_DPMS_SUSPEND:
209         case DRM_MODE_DPMS_OFF:
210                 drm_vblank_off(dev, amdgpu_crtc->crtc_id);
211                 amdgpu_crtc->enabled = false;
212                 break;
213         }
214 }
215
216
217 static void dce_virtual_crtc_prepare(struct drm_crtc *crtc)
218 {
219         dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
220 }
221
222 static void dce_virtual_crtc_commit(struct drm_crtc *crtc)
223 {
224         dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
225 }
226
227 static void dce_virtual_crtc_disable(struct drm_crtc *crtc)
228 {
229         struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
230
231         dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
232         if (crtc->primary->fb) {
233                 int r;
234                 struct amdgpu_framebuffer *amdgpu_fb;
235                 struct amdgpu_bo *rbo;
236
237                 amdgpu_fb = to_amdgpu_framebuffer(crtc->primary->fb);
238                 rbo = gem_to_amdgpu_bo(amdgpu_fb->obj);
239                 r = amdgpu_bo_reserve(rbo, false);
240                 if (unlikely(r))
241                         DRM_ERROR("failed to reserve rbo before unpin\n");
242                 else {
243                         amdgpu_bo_unpin(rbo);
244                         amdgpu_bo_unreserve(rbo);
245                 }
246         }
247
248         amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
249         amdgpu_crtc->encoder = NULL;
250         amdgpu_crtc->connector = NULL;
251 }
252
253 static int dce_virtual_crtc_mode_set(struct drm_crtc *crtc,
254                                   struct drm_display_mode *mode,
255                                   struct drm_display_mode *adjusted_mode,
256                                   int x, int y, struct drm_framebuffer *old_fb)
257 {
258         struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
259
260         /* update the hw version fpr dpm */
261         amdgpu_crtc->hw_mode = *adjusted_mode;
262
263         return 0;
264 }
265
266 static bool dce_virtual_crtc_mode_fixup(struct drm_crtc *crtc,
267                                      const struct drm_display_mode *mode,
268                                      struct drm_display_mode *adjusted_mode)
269 {
270         struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
271         struct drm_device *dev = crtc->dev;
272         struct drm_encoder *encoder;
273
274         /* assign the encoder to the amdgpu crtc to avoid repeated lookups later */
275         list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
276                 if (encoder->crtc == crtc) {
277                         amdgpu_crtc->encoder = encoder;
278                         amdgpu_crtc->connector = amdgpu_get_connector_for_encoder(encoder);
279                         break;
280                 }
281         }
282         if ((amdgpu_crtc->encoder == NULL) || (amdgpu_crtc->connector == NULL)) {
283                 amdgpu_crtc->encoder = NULL;
284                 amdgpu_crtc->connector = NULL;
285                 return false;
286         }
287
288         return true;
289 }
290
291
292 static int dce_virtual_crtc_set_base(struct drm_crtc *crtc, int x, int y,
293                                   struct drm_framebuffer *old_fb)
294 {
295         return 0;
296 }
297
298 static void dce_virtual_crtc_load_lut(struct drm_crtc *crtc)
299 {
300         return;
301 }
302
303 static int dce_virtual_crtc_set_base_atomic(struct drm_crtc *crtc,
304                                          struct drm_framebuffer *fb,
305                                          int x, int y, enum mode_set_atomic state)
306 {
307         return 0;
308 }
309
310 static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs = {
311         .dpms = dce_virtual_crtc_dpms,
312         .mode_fixup = dce_virtual_crtc_mode_fixup,
313         .mode_set = dce_virtual_crtc_mode_set,
314         .mode_set_base = dce_virtual_crtc_set_base,
315         .mode_set_base_atomic = dce_virtual_crtc_set_base_atomic,
316         .prepare = dce_virtual_crtc_prepare,
317         .commit = dce_virtual_crtc_commit,
318         .load_lut = dce_virtual_crtc_load_lut,
319         .disable = dce_virtual_crtc_disable,
320 };
321
322 static int dce_virtual_crtc_init(struct amdgpu_device *adev, int index)
323 {
324         struct amdgpu_crtc *amdgpu_crtc;
325         int i;
326
327         amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) +
328                               (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
329         if (amdgpu_crtc == NULL)
330                 return -ENOMEM;
331
332         drm_crtc_init(adev->ddev, &amdgpu_crtc->base, &dce_virtual_crtc_funcs);
333
334         drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256);
335         amdgpu_crtc->crtc_id = index;
336         adev->mode_info.crtcs[index] = amdgpu_crtc;
337
338         for (i = 0; i < 256; i++) {
339                 amdgpu_crtc->lut_r[i] = i << 2;
340                 amdgpu_crtc->lut_g[i] = i << 2;
341                 amdgpu_crtc->lut_b[i] = i << 2;
342         }
343
344         amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
345         amdgpu_crtc->encoder = NULL;
346         amdgpu_crtc->connector = NULL;
347         drm_crtc_helper_add(&amdgpu_crtc->base, &dce_virtual_crtc_helper_funcs);
348
349         return 0;
350 }
351
352 static int dce_virtual_early_init(void *handle)
353 {
354         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
355
356         dce_virtual_set_display_funcs(adev);
357         dce_virtual_set_irq_funcs(adev);
358
359         adev->mode_info.num_crtc = 1;
360         adev->mode_info.num_hpd = 1;
361         adev->mode_info.num_dig = 1;
362         return 0;
363 }
364
365 static bool dce_virtual_get_connector_info(struct amdgpu_device *adev)
366 {
367         struct amdgpu_i2c_bus_rec ddc_bus;
368         struct amdgpu_router router;
369         struct amdgpu_hpd hpd;
370
371         /* look up gpio for ddc, hpd */
372         ddc_bus.valid = false;
373         hpd.hpd = AMDGPU_HPD_NONE;
374         /* needed for aux chan transactions */
375         ddc_bus.hpd = hpd.hpd;
376
377         memset(&router, 0, sizeof(router));
378         router.ddc_valid = false;
379         router.cd_valid = false;
380         amdgpu_display_add_connector(adev,
381                                       0,
382                                       ATOM_DEVICE_CRT1_SUPPORT,
383                                       DRM_MODE_CONNECTOR_VIRTUAL, &ddc_bus,
384                                       CONNECTOR_OBJECT_ID_VIRTUAL,
385                                       &hpd,
386                                       &router);
387
388         amdgpu_display_add_encoder(adev, ENCODER_VIRTUAL_ENUM_VIRTUAL,
389                                                         ATOM_DEVICE_CRT1_SUPPORT,
390                                                         0);
391
392         amdgpu_link_encoder_connector(adev->ddev);
393
394         return true;
395 }
396
397 static int dce_virtual_sw_init(void *handle)
398 {
399         int r, i;
400         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
401
402         r = amdgpu_irq_add_id(adev, 229, &adev->crtc_irq);
403         if (r)
404                 return r;
405
406         adev->ddev->mode_config.funcs = &amdgpu_mode_funcs;
407
408         adev->ddev->mode_config.max_width = 16384;
409         adev->ddev->mode_config.max_height = 16384;
410
411         adev->ddev->mode_config.preferred_depth = 24;
412         adev->ddev->mode_config.prefer_shadow = 1;
413
414         adev->ddev->mode_config.fb_base = adev->mc.aper_base;
415
416         r = amdgpu_modeset_create_props(adev);
417         if (r)
418                 return r;
419
420         adev->ddev->mode_config.max_width = 16384;
421         adev->ddev->mode_config.max_height = 16384;
422
423         /* allocate crtcs */
424         for (i = 0; i < adev->mode_info.num_crtc; i++) {
425                 r = dce_virtual_crtc_init(adev, i);
426                 if (r)
427                         return r;
428         }
429
430         dce_virtual_get_connector_info(adev);
431         amdgpu_print_display_setup(adev->ddev);
432
433         drm_kms_helper_poll_init(adev->ddev);
434
435         adev->mode_info.mode_config_initialized = true;
436         return 0;
437 }
438
439 static int dce_virtual_sw_fini(void *handle)
440 {
441         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
442
443         kfree(adev->mode_info.bios_hardcoded_edid);
444
445         drm_kms_helper_poll_fini(adev->ddev);
446
447         drm_mode_config_cleanup(adev->ddev);
448         adev->mode_info.mode_config_initialized = false;
449         return 0;
450 }
451
452 static int dce_virtual_hw_init(void *handle)
453 {
454         return 0;
455 }
456
457 static int dce_virtual_hw_fini(void *handle)
458 {
459         return 0;
460 }
461
462 static int dce_virtual_suspend(void *handle)
463 {
464         return dce_virtual_hw_fini(handle);
465 }
466
467 static int dce_virtual_resume(void *handle)
468 {
469         int ret;
470
471         ret = dce_virtual_hw_init(handle);
472
473         return ret;
474 }
475
476 static bool dce_virtual_is_idle(void *handle)
477 {
478         return true;
479 }
480
481 static int dce_virtual_wait_for_idle(void *handle)
482 {
483         return 0;
484 }
485
486 static int dce_virtual_soft_reset(void *handle)
487 {
488         return 0;
489 }
490
491 static int dce_virtual_set_clockgating_state(void *handle,
492                                           enum amd_clockgating_state state)
493 {
494         return 0;
495 }
496
497 static int dce_virtual_set_powergating_state(void *handle,
498                                           enum amd_powergating_state state)
499 {
500         return 0;
501 }
502
503 const struct amd_ip_funcs dce_virtual_ip_funcs = {
504         .name = "dce_virtual",
505         .early_init = dce_virtual_early_init,
506         .late_init = NULL,
507         .sw_init = dce_virtual_sw_init,
508         .sw_fini = dce_virtual_sw_fini,
509         .hw_init = dce_virtual_hw_init,
510         .hw_fini = dce_virtual_hw_fini,
511         .suspend = dce_virtual_suspend,
512         .resume = dce_virtual_resume,
513         .is_idle = dce_virtual_is_idle,
514         .wait_for_idle = dce_virtual_wait_for_idle,
515         .soft_reset = dce_virtual_soft_reset,
516         .set_clockgating_state = dce_virtual_set_clockgating_state,
517         .set_powergating_state = dce_virtual_set_powergating_state,
518 };
519
520 /* these are handled by the primary encoders */
521 static void dce_virtual_encoder_prepare(struct drm_encoder *encoder)
522 {
523         return;
524 }
525
526 static void dce_virtual_encoder_commit(struct drm_encoder *encoder)
527 {
528         return;
529 }
530
531 static void
532 dce_virtual_encoder_mode_set(struct drm_encoder *encoder,
533                       struct drm_display_mode *mode,
534                       struct drm_display_mode *adjusted_mode)
535 {
536         return;
537 }
538
539 static void dce_virtual_encoder_disable(struct drm_encoder *encoder)
540 {
541         return;
542 }
543
544 static void
545 dce_virtual_encoder_dpms(struct drm_encoder *encoder, int mode)
546 {
547         return;
548 }
549
550 static bool dce_virtual_encoder_mode_fixup(struct drm_encoder *encoder,
551                                     const struct drm_display_mode *mode,
552                                     struct drm_display_mode *adjusted_mode)
553 {
554
555         /* set the active encoder to connector routing */
556         amdgpu_encoder_set_active_device(encoder);
557
558         return true;
559 }
560
561 static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs = {
562         .dpms = dce_virtual_encoder_dpms,
563         .mode_fixup = dce_virtual_encoder_mode_fixup,
564         .prepare = dce_virtual_encoder_prepare,
565         .mode_set = dce_virtual_encoder_mode_set,
566         .commit = dce_virtual_encoder_commit,
567         .disable = dce_virtual_encoder_disable,
568 };
569
570 static void dce_virtual_encoder_destroy(struct drm_encoder *encoder)
571 {
572         struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
573
574         kfree(amdgpu_encoder->enc_priv);
575         drm_encoder_cleanup(encoder);
576         kfree(amdgpu_encoder);
577 }
578
579 static const struct drm_encoder_funcs dce_virtual_encoder_funcs = {
580         .destroy = dce_virtual_encoder_destroy,
581 };
582
583 static void dce_virtual_encoder_add(struct amdgpu_device *adev,
584                                  uint32_t encoder_enum,
585                                  uint32_t supported_device,
586                                  u16 caps)
587 {
588         struct drm_device *dev = adev->ddev;
589         struct drm_encoder *encoder;
590         struct amdgpu_encoder *amdgpu_encoder;
591
592         /* see if we already added it */
593         list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
594                 amdgpu_encoder = to_amdgpu_encoder(encoder);
595                 if (amdgpu_encoder->encoder_enum == encoder_enum) {
596                         amdgpu_encoder->devices |= supported_device;
597                         return;
598                 }
599
600         }
601
602         /* add a new one */
603         amdgpu_encoder = kzalloc(sizeof(struct amdgpu_encoder), GFP_KERNEL);
604         if (!amdgpu_encoder)
605                 return;
606
607         encoder = &amdgpu_encoder->base;
608         encoder->possible_crtcs = 0x1;
609         amdgpu_encoder->enc_priv = NULL;
610         amdgpu_encoder->encoder_enum = encoder_enum;
611         amdgpu_encoder->encoder_id = (encoder_enum & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
612         amdgpu_encoder->devices = supported_device;
613         amdgpu_encoder->rmx_type = RMX_OFF;
614         amdgpu_encoder->underscan_type = UNDERSCAN_OFF;
615         amdgpu_encoder->is_ext_encoder = false;
616         amdgpu_encoder->caps = caps;
617
618         drm_encoder_init(dev, encoder, &dce_virtual_encoder_funcs,
619                                          DRM_MODE_ENCODER_VIRTUAL, NULL);
620         drm_encoder_helper_add(encoder, &dce_virtual_encoder_helper_funcs);
621         DRM_INFO("[FM]encoder: %d is VIRTUAL\n", amdgpu_encoder->encoder_id);
622 }
623
624 static const struct amdgpu_display_funcs dce_virtual_display_funcs = {
625         .set_vga_render_state = &dce_virtual_set_vga_render_state,
626         .bandwidth_update = &dce_virtual_bandwidth_update,
627         .vblank_get_counter = &dce_virtual_vblank_get_counter,
628         .vblank_wait = &dce_virtual_vblank_wait,
629         .is_display_hung = &dce_virtual_is_display_hung,
630         .backlight_set_level = NULL,
631         .backlight_get_level = NULL,
632         .hpd_sense = &dce_virtual_hpd_sense,
633         .hpd_set_polarity = &dce_virtual_hpd_set_polarity,
634         .hpd_get_gpio_reg = &dce_virtual_hpd_get_gpio_reg,
635         .page_flip = &dce_virtual_page_flip,
636         .page_flip_get_scanoutpos = &dce_virtual_crtc_get_scanoutpos,
637         .add_encoder = &dce_virtual_encoder_add,
638         .add_connector = &amdgpu_connector_add,
639         .stop_mc_access = &dce_virtual_stop_mc_access,
640         .resume_mc_access = &dce_virtual_resume_mc_access,
641 };
642
643 static void dce_virtual_set_display_funcs(struct amdgpu_device *adev)
644 {
645         if (adev->mode_info.funcs == NULL)
646                 adev->mode_info.funcs = &dce_virtual_display_funcs;
647 }
648
649 static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer)
650 {
651         struct amdgpu_mode_info *mode_info = container_of(vblank_timer, struct amdgpu_mode_info ,vblank_timer);
652         struct amdgpu_device *adev = container_of(mode_info, struct amdgpu_device ,mode_info);
653         unsigned crtc = 0;
654         adev->ddev->vblank[0].count++;
655         drm_handle_vblank(adev->ddev, crtc);
656         dce_virtual_pageflip_irq(adev, NULL, NULL);
657         hrtimer_start(vblank_timer, ktime_set(0, DCE_VIRTUAL_VBLANK_PERIOD), HRTIMER_MODE_REL);
658         return HRTIMER_NORESTART;
659 }
660
661 static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
662                                                      int crtc,
663                                                      enum amdgpu_interrupt_state state)
664 {
665         if (crtc >= adev->mode_info.num_crtc) {
666                 DRM_DEBUG("invalid crtc %d\n", crtc);
667                 return;
668         }
669
670         if (state && !adev->mode_info.vsync_timer_enabled) {
671                 DRM_DEBUG("Enable software vsync timer\n");
672                 hrtimer_init(&adev->mode_info.vblank_timer, CLOCK_MONOTONIC, HRTIMER_MODE_REL);
673                 hrtimer_set_expires(&adev->mode_info.vblank_timer, ktime_set(0, DCE_VIRTUAL_VBLANK_PERIOD));
674                 adev->mode_info.vblank_timer.function = dce_virtual_vblank_timer_handle;
675                 hrtimer_start(&adev->mode_info.vblank_timer, ktime_set(0, DCE_VIRTUAL_VBLANK_PERIOD), HRTIMER_MODE_REL);
676         } else if (!state && adev->mode_info.vsync_timer_enabled) {
677                 DRM_DEBUG("Disable software vsync timer\n");
678                 hrtimer_cancel(&adev->mode_info.vblank_timer);
679         }
680
681         if (!state || (state && !adev->mode_info.vsync_timer_enabled))
682                 adev->ddev->vblank[0].count = 0;
683         adev->mode_info.vsync_timer_enabled = state;
684         DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n", crtc, state);
685 }
686
687
688 static int dce_virtual_set_crtc_irq_state(struct amdgpu_device *adev,
689                                        struct amdgpu_irq_src *source,
690                                        unsigned type,
691                                        enum amdgpu_interrupt_state state)
692 {
693         switch (type) {
694         case AMDGPU_CRTC_IRQ_VBLANK1:
695                 dce_virtual_set_crtc_vblank_interrupt_state(adev, 0, state);
696                 break;
697         default:
698                 break;
699         }
700         return 0;
701 }
702
703 static void dce_virtual_crtc_vblank_int_ack(struct amdgpu_device *adev,
704                                           int crtc)
705 {
706         if (crtc >= adev->mode_info.num_crtc) {
707                 DRM_DEBUG("invalid crtc %d\n", crtc);
708                 return;
709         }
710 }
711
712 static int dce_virtual_crtc_irq(struct amdgpu_device *adev,
713                               struct amdgpu_irq_src *source,
714                               struct amdgpu_iv_entry *entry)
715 {
716         unsigned crtc = 0;
717         unsigned irq_type = AMDGPU_CRTC_IRQ_VBLANK1;
718
719         adev->ddev->vblank[crtc].count++;
720         dce_virtual_crtc_vblank_int_ack(adev, crtc);
721
722         if (amdgpu_irq_enabled(adev, source, irq_type)) {
723                 drm_handle_vblank(adev->ddev, crtc);
724         }
725         dce_virtual_pageflip_irq(adev, NULL, NULL);
726         DRM_DEBUG("IH: D%d vblank\n", crtc + 1);
727         return 0;
728 }
729
730 static int dce_virtual_set_pageflip_irq_state(struct amdgpu_device *adev,
731                                             struct amdgpu_irq_src *src,
732                                             unsigned type,
733                                             enum amdgpu_interrupt_state state)
734 {
735         if (type >= adev->mode_info.num_crtc) {
736                 DRM_ERROR("invalid pageflip crtc %d\n", type);
737                 return -EINVAL;
738         }
739         DRM_DEBUG("[FM]set pageflip irq type %d state %d\n", type, state);
740
741         return 0;
742 }
743
744 static int dce_virtual_pageflip_irq(struct amdgpu_device *adev,
745                                   struct amdgpu_irq_src *source,
746                                   struct amdgpu_iv_entry *entry)
747 {
748         unsigned long flags;
749         unsigned crtc_id = 0;
750         struct amdgpu_crtc *amdgpu_crtc;
751         struct amdgpu_flip_work *works;
752
753         crtc_id = 0;
754         amdgpu_crtc = adev->mode_info.crtcs[crtc_id];
755
756         if (crtc_id >= adev->mode_info.num_crtc) {
757                 DRM_ERROR("invalid pageflip crtc %d\n", crtc_id);
758                 return -EINVAL;
759         }
760
761         /* IRQ could occur when in initial stage */
762         if (amdgpu_crtc == NULL)
763                 return 0;
764
765         spin_lock_irqsave(&adev->ddev->event_lock, flags);
766         works = amdgpu_crtc->pflip_works;
767         if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) {
768                 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
769                         "AMDGPU_FLIP_SUBMITTED(%d)\n",
770                         amdgpu_crtc->pflip_status,
771                         AMDGPU_FLIP_SUBMITTED);
772                 spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
773                 return 0;
774         }
775
776         /* page flip completed. clean up */
777         amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE;
778         amdgpu_crtc->pflip_works = NULL;
779
780         /* wakeup usersapce */
781         if (works->event)
782                 drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event);
783
784         spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
785
786         drm_crtc_vblank_put(&amdgpu_crtc->base);
787         schedule_work(&works->unpin_work);
788
789         return 0;
790 }
791
792 static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs = {
793         .set = dce_virtual_set_crtc_irq_state,
794         .process = dce_virtual_crtc_irq,
795 };
796
797 static const struct amdgpu_irq_src_funcs dce_virtual_pageflip_irq_funcs = {
798         .set = dce_virtual_set_pageflip_irq_state,
799         .process = dce_virtual_pageflip_irq,
800 };
801
802 static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev)
803 {
804         adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_LAST;
805         adev->crtc_irq.funcs = &dce_virtual_crtc_irq_funcs;
806
807         adev->pageflip_irq.num_types = AMDGPU_PAGEFLIP_IRQ_LAST;
808         adev->pageflip_irq.funcs = &dce_virtual_pageflip_irq_funcs;
809 }
810