dce_virtual.c 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751
  1. /*
  2. * Copyright 2014 Advanced Micro Devices, Inc.
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  17. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20. * OTHER DEALINGS IN THE SOFTWARE.
  21. *
  22. */
  23. #include "drmP.h"
  24. #include "amdgpu.h"
  25. #include "amdgpu_pm.h"
  26. #include "amdgpu_i2c.h"
  27. #include "atom.h"
  28. #include "amdgpu_atombios.h"
  29. #include "atombios_crtc.h"
  30. #include "atombios_encoders.h"
  31. #include "amdgpu_pll.h"
  32. #include "amdgpu_connectors.h"
  33. static void dce_virtual_set_display_funcs(struct amdgpu_device *adev);
  34. static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev);
  35. /**
  36. * dce_virtual_vblank_wait - vblank wait asic callback.
  37. *
  38. * @adev: amdgpu_device pointer
  39. * @crtc: crtc to wait for vblank on
  40. *
  41. * Wait for vblank on the requested crtc (evergreen+).
  42. */
  43. static void dce_virtual_vblank_wait(struct amdgpu_device *adev, int crtc)
  44. {
  45. return;
  46. }
  47. static u32 dce_virtual_vblank_get_counter(struct amdgpu_device *adev, int crtc)
  48. {
  49. if (crtc >= adev->mode_info.num_crtc)
  50. return 0;
  51. else
  52. return adev->ddev->vblank[crtc].count;
  53. }
  54. static void dce_virtual_page_flip(struct amdgpu_device *adev,
  55. int crtc_id, u64 crtc_base, bool async)
  56. {
  57. return;
  58. }
  59. static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc,
  60. u32 *vbl, u32 *position)
  61. {
  62. if ((crtc < 0) || (crtc >= adev->mode_info.num_crtc))
  63. return -EINVAL;
  64. *vbl = 0;
  65. *position = 0;
  66. return 0;
  67. }
  68. static bool dce_virtual_hpd_sense(struct amdgpu_device *adev,
  69. enum amdgpu_hpd_id hpd)
  70. {
  71. return true;
  72. }
  73. static void dce_virtual_hpd_set_polarity(struct amdgpu_device *adev,
  74. enum amdgpu_hpd_id hpd)
  75. {
  76. return;
  77. }
  78. static u32 dce_virtual_hpd_get_gpio_reg(struct amdgpu_device *adev)
  79. {
  80. return 0;
  81. }
  82. static bool dce_virtual_is_display_hung(struct amdgpu_device *adev)
  83. {
  84. return false;
  85. }
  86. void dce_virtual_stop_mc_access(struct amdgpu_device *adev,
  87. struct amdgpu_mode_mc_save *save)
  88. {
  89. return;
  90. }
  91. void dce_virtual_resume_mc_access(struct amdgpu_device *adev,
  92. struct amdgpu_mode_mc_save *save)
  93. {
  94. return;
  95. }
  96. void dce_virtual_set_vga_render_state(struct amdgpu_device *adev,
  97. bool render)
  98. {
  99. return;
  100. }
  101. /**
  102. * dce_virtual_bandwidth_update - program display watermarks
  103. *
  104. * @adev: amdgpu_device pointer
  105. *
  106. * Calculate and program the display watermarks and line
  107. * buffer allocation (CIK).
  108. */
  109. static void dce_virtual_bandwidth_update(struct amdgpu_device *adev)
  110. {
  111. return;
  112. }
  113. static int dce_virtual_crtc_gamma_set(struct drm_crtc *crtc, u16 *red,
  114. u16 *green, u16 *blue, uint32_t size)
  115. {
  116. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
  117. int i;
  118. /* userspace palettes are always correct as is */
  119. for (i = 0; i < size; i++) {
  120. amdgpu_crtc->lut_r[i] = red[i] >> 6;
  121. amdgpu_crtc->lut_g[i] = green[i] >> 6;
  122. amdgpu_crtc->lut_b[i] = blue[i] >> 6;
  123. }
  124. return 0;
  125. }
  126. static void dce_virtual_crtc_destroy(struct drm_crtc *crtc)
  127. {
  128. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
  129. drm_crtc_cleanup(crtc);
  130. kfree(amdgpu_crtc);
  131. }
  132. static const struct drm_crtc_funcs dce_virtual_crtc_funcs = {
  133. .cursor_set2 = NULL,
  134. .cursor_move = NULL,
  135. .gamma_set = dce_virtual_crtc_gamma_set,
  136. .set_config = amdgpu_crtc_set_config,
  137. .destroy = dce_virtual_crtc_destroy,
  138. .page_flip = amdgpu_crtc_page_flip,
  139. };
  140. static void dce_virtual_crtc_dpms(struct drm_crtc *crtc, int mode)
  141. {
  142. struct drm_device *dev = crtc->dev;
  143. struct amdgpu_device *adev = dev->dev_private;
  144. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
  145. unsigned type;
  146. switch (mode) {
  147. case DRM_MODE_DPMS_ON:
  148. amdgpu_crtc->enabled = true;
  149. /* Make sure VBLANK and PFLIP interrupts are still enabled */
  150. type = amdgpu_crtc_idx_to_irq_type(adev, amdgpu_crtc->crtc_id);
  151. amdgpu_irq_update(adev, &adev->crtc_irq, type);
  152. amdgpu_irq_update(adev, &adev->pageflip_irq, type);
  153. drm_vblank_on(dev, amdgpu_crtc->crtc_id);
  154. break;
  155. case DRM_MODE_DPMS_STANDBY:
  156. case DRM_MODE_DPMS_SUSPEND:
  157. case DRM_MODE_DPMS_OFF:
  158. drm_vblank_off(dev, amdgpu_crtc->crtc_id);
  159. amdgpu_crtc->enabled = false;
  160. break;
  161. }
  162. }
  163. static void dce_virtual_crtc_prepare(struct drm_crtc *crtc)
  164. {
  165. dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
  166. }
  167. static void dce_virtual_crtc_commit(struct drm_crtc *crtc)
  168. {
  169. dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
  170. }
  171. static void dce_virtual_crtc_disable(struct drm_crtc *crtc)
  172. {
  173. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
  174. dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
  175. if (crtc->primary->fb) {
  176. int r;
  177. struct amdgpu_framebuffer *amdgpu_fb;
  178. struct amdgpu_bo *rbo;
  179. amdgpu_fb = to_amdgpu_framebuffer(crtc->primary->fb);
  180. rbo = gem_to_amdgpu_bo(amdgpu_fb->obj);
  181. r = amdgpu_bo_reserve(rbo, false);
  182. if (unlikely(r))
  183. DRM_ERROR("failed to reserve rbo before unpin\n");
  184. else {
  185. amdgpu_bo_unpin(rbo);
  186. amdgpu_bo_unreserve(rbo);
  187. }
  188. }
  189. amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
  190. amdgpu_crtc->encoder = NULL;
  191. amdgpu_crtc->connector = NULL;
  192. }
  193. static int dce_virtual_crtc_mode_set(struct drm_crtc *crtc,
  194. struct drm_display_mode *mode,
  195. struct drm_display_mode *adjusted_mode,
  196. int x, int y, struct drm_framebuffer *old_fb)
  197. {
  198. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
  199. /* update the hw version fpr dpm */
  200. amdgpu_crtc->hw_mode = *adjusted_mode;
  201. return 0;
  202. }
  203. static bool dce_virtual_crtc_mode_fixup(struct drm_crtc *crtc,
  204. const struct drm_display_mode *mode,
  205. struct drm_display_mode *adjusted_mode)
  206. {
  207. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
  208. struct drm_device *dev = crtc->dev;
  209. struct drm_encoder *encoder;
  210. /* assign the encoder to the amdgpu crtc to avoid repeated lookups later */
  211. list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
  212. if (encoder->crtc == crtc) {
  213. amdgpu_crtc->encoder = encoder;
  214. amdgpu_crtc->connector = amdgpu_get_connector_for_encoder(encoder);
  215. break;
  216. }
  217. }
  218. if ((amdgpu_crtc->encoder == NULL) || (amdgpu_crtc->connector == NULL)) {
  219. amdgpu_crtc->encoder = NULL;
  220. amdgpu_crtc->connector = NULL;
  221. return false;
  222. }
  223. return true;
  224. }
  225. static int dce_virtual_crtc_set_base(struct drm_crtc *crtc, int x, int y,
  226. struct drm_framebuffer *old_fb)
  227. {
  228. return 0;
  229. }
  230. static void dce_virtual_crtc_load_lut(struct drm_crtc *crtc)
  231. {
  232. return;
  233. }
  234. static int dce_virtual_crtc_set_base_atomic(struct drm_crtc *crtc,
  235. struct drm_framebuffer *fb,
  236. int x, int y, enum mode_set_atomic state)
  237. {
  238. return 0;
  239. }
  240. static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs = {
  241. .dpms = dce_virtual_crtc_dpms,
  242. .mode_fixup = dce_virtual_crtc_mode_fixup,
  243. .mode_set = dce_virtual_crtc_mode_set,
  244. .mode_set_base = dce_virtual_crtc_set_base,
  245. .mode_set_base_atomic = dce_virtual_crtc_set_base_atomic,
  246. .prepare = dce_virtual_crtc_prepare,
  247. .commit = dce_virtual_crtc_commit,
  248. .load_lut = dce_virtual_crtc_load_lut,
  249. .disable = dce_virtual_crtc_disable,
  250. };
  251. static int dce_virtual_crtc_init(struct amdgpu_device *adev, int index)
  252. {
  253. struct amdgpu_crtc *amdgpu_crtc;
  254. int i;
  255. amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) +
  256. (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
  257. if (amdgpu_crtc == NULL)
  258. return -ENOMEM;
  259. drm_crtc_init(adev->ddev, &amdgpu_crtc->base, &dce_virtual_crtc_funcs);
  260. drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256);
  261. amdgpu_crtc->crtc_id = index;
  262. adev->mode_info.crtcs[index] = amdgpu_crtc;
  263. for (i = 0; i < 256; i++) {
  264. amdgpu_crtc->lut_r[i] = i << 2;
  265. amdgpu_crtc->lut_g[i] = i << 2;
  266. amdgpu_crtc->lut_b[i] = i << 2;
  267. }
  268. amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
  269. amdgpu_crtc->encoder = NULL;
  270. amdgpu_crtc->connector = NULL;
  271. drm_crtc_helper_add(&amdgpu_crtc->base, &dce_virtual_crtc_helper_funcs);
  272. return 0;
  273. }
  274. static int dce_virtual_early_init(void *handle)
  275. {
  276. struct amdgpu_device *adev = (struct amdgpu_device *)handle;
  277. dce_virtual_set_display_funcs(adev);
  278. dce_virtual_set_irq_funcs(adev);
  279. adev->mode_info.num_crtc = 1;
  280. adev->mode_info.num_hpd = 1;
  281. adev->mode_info.num_dig = 1;
  282. return 0;
  283. }
  284. static bool dce_virtual_get_connector_info(struct amdgpu_device *adev)
  285. {
  286. struct amdgpu_i2c_bus_rec ddc_bus;
  287. struct amdgpu_router router;
  288. struct amdgpu_hpd hpd;
  289. /* look up gpio for ddc, hpd */
  290. ddc_bus.valid = false;
  291. hpd.hpd = AMDGPU_HPD_NONE;
  292. /* needed for aux chan transactions */
  293. ddc_bus.hpd = hpd.hpd;
  294. memset(&router, 0, sizeof(router));
  295. router.ddc_valid = false;
  296. router.cd_valid = false;
  297. amdgpu_display_add_connector(adev,
  298. 0,
  299. ATOM_DEVICE_CRT1_SUPPORT,
  300. DRM_MODE_CONNECTOR_VIRTUAL, &ddc_bus,
  301. CONNECTOR_OBJECT_ID_VIRTUAL,
  302. &hpd,
  303. &router);
  304. amdgpu_display_add_encoder(adev, ENCODER_VIRTUAL_ENUM_VIRTUAL,
  305. ATOM_DEVICE_CRT1_SUPPORT,
  306. 0);
  307. amdgpu_link_encoder_connector(adev->ddev);
  308. return true;
  309. }
  310. static int dce_virtual_sw_init(void *handle)
  311. {
  312. int r, i;
  313. struct amdgpu_device *adev = (struct amdgpu_device *)handle;
  314. r = amdgpu_irq_add_id(adev, 229, &adev->crtc_irq);
  315. if (r)
  316. return r;
  317. adev->ddev->mode_config.funcs = &amdgpu_mode_funcs;
  318. adev->ddev->mode_config.max_width = 16384;
  319. adev->ddev->mode_config.max_height = 16384;
  320. adev->ddev->mode_config.preferred_depth = 24;
  321. adev->ddev->mode_config.prefer_shadow = 1;
  322. adev->ddev->mode_config.fb_base = adev->mc.aper_base;
  323. r = amdgpu_modeset_create_props(adev);
  324. if (r)
  325. return r;
  326. adev->ddev->mode_config.max_width = 16384;
  327. adev->ddev->mode_config.max_height = 16384;
  328. /* allocate crtcs */
  329. for (i = 0; i < adev->mode_info.num_crtc; i++) {
  330. r = dce_virtual_crtc_init(adev, i);
  331. if (r)
  332. return r;
  333. }
  334. dce_virtual_get_connector_info(adev);
  335. amdgpu_print_display_setup(adev->ddev);
  336. drm_kms_helper_poll_init(adev->ddev);
  337. adev->mode_info.mode_config_initialized = true;
  338. return 0;
  339. }
  340. static int dce_virtual_sw_fini(void *handle)
  341. {
  342. struct amdgpu_device *adev = (struct amdgpu_device *)handle;
  343. kfree(adev->mode_info.bios_hardcoded_edid);
  344. drm_kms_helper_poll_fini(adev->ddev);
  345. drm_mode_config_cleanup(adev->ddev);
  346. adev->mode_info.mode_config_initialized = false;
  347. return 0;
  348. }
  349. static int dce_virtual_hw_init(void *handle)
  350. {
  351. return 0;
  352. }
  353. static int dce_virtual_hw_fini(void *handle)
  354. {
  355. return 0;
  356. }
  357. static int dce_virtual_suspend(void *handle)
  358. {
  359. return dce_virtual_hw_fini(handle);
  360. }
  361. static int dce_virtual_resume(void *handle)
  362. {
  363. int ret;
  364. ret = dce_virtual_hw_init(handle);
  365. return ret;
  366. }
  367. static bool dce_virtual_is_idle(void *handle)
  368. {
  369. return true;
  370. }
  371. static int dce_virtual_wait_for_idle(void *handle)
  372. {
  373. return 0;
  374. }
  375. static int dce_virtual_soft_reset(void *handle)
  376. {
  377. return 0;
  378. }
  379. static int dce_virtual_set_clockgating_state(void *handle,
  380. enum amd_clockgating_state state)
  381. {
  382. return 0;
  383. }
  384. static int dce_virtual_set_powergating_state(void *handle,
  385. enum amd_powergating_state state)
  386. {
  387. return 0;
  388. }
  389. const struct amd_ip_funcs dce_virtual_ip_funcs = {
  390. .name = "dce_virtual",
  391. .early_init = dce_virtual_early_init,
  392. .late_init = NULL,
  393. .sw_init = dce_virtual_sw_init,
  394. .sw_fini = dce_virtual_sw_fini,
  395. .hw_init = dce_virtual_hw_init,
  396. .hw_fini = dce_virtual_hw_fini,
  397. .suspend = dce_virtual_suspend,
  398. .resume = dce_virtual_resume,
  399. .is_idle = dce_virtual_is_idle,
  400. .wait_for_idle = dce_virtual_wait_for_idle,
  401. .soft_reset = dce_virtual_soft_reset,
  402. .set_clockgating_state = dce_virtual_set_clockgating_state,
  403. .set_powergating_state = dce_virtual_set_powergating_state,
  404. };
  405. /* these are handled by the primary encoders */
  406. static void dce_virtual_encoder_prepare(struct drm_encoder *encoder)
  407. {
  408. return;
  409. }
  410. static void dce_virtual_encoder_commit(struct drm_encoder *encoder)
  411. {
  412. return;
  413. }
  414. static void
  415. dce_virtual_encoder_mode_set(struct drm_encoder *encoder,
  416. struct drm_display_mode *mode,
  417. struct drm_display_mode *adjusted_mode)
  418. {
  419. return;
  420. }
  421. static void dce_virtual_encoder_disable(struct drm_encoder *encoder)
  422. {
  423. return;
  424. }
  425. static void
  426. dce_virtual_encoder_dpms(struct drm_encoder *encoder, int mode)
  427. {
  428. return;
  429. }
  430. static bool dce_virtual_encoder_mode_fixup(struct drm_encoder *encoder,
  431. const struct drm_display_mode *mode,
  432. struct drm_display_mode *adjusted_mode)
  433. {
  434. /* set the active encoder to connector routing */
  435. amdgpu_encoder_set_active_device(encoder);
  436. return true;
  437. }
  438. static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs = {
  439. .dpms = dce_virtual_encoder_dpms,
  440. .mode_fixup = dce_virtual_encoder_mode_fixup,
  441. .prepare = dce_virtual_encoder_prepare,
  442. .mode_set = dce_virtual_encoder_mode_set,
  443. .commit = dce_virtual_encoder_commit,
  444. .disable = dce_virtual_encoder_disable,
  445. };
  446. static void dce_virtual_encoder_destroy(struct drm_encoder *encoder)
  447. {
  448. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  449. kfree(amdgpu_encoder->enc_priv);
  450. drm_encoder_cleanup(encoder);
  451. kfree(amdgpu_encoder);
  452. }
  453. static const struct drm_encoder_funcs dce_virtual_encoder_funcs = {
  454. .destroy = dce_virtual_encoder_destroy,
  455. };
  456. static void dce_virtual_encoder_add(struct amdgpu_device *adev,
  457. uint32_t encoder_enum,
  458. uint32_t supported_device,
  459. u16 caps)
  460. {
  461. struct drm_device *dev = adev->ddev;
  462. struct drm_encoder *encoder;
  463. struct amdgpu_encoder *amdgpu_encoder;
  464. /* see if we already added it */
  465. list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
  466. amdgpu_encoder = to_amdgpu_encoder(encoder);
  467. if (amdgpu_encoder->encoder_enum == encoder_enum) {
  468. amdgpu_encoder->devices |= supported_device;
  469. return;
  470. }
  471. }
  472. /* add a new one */
  473. amdgpu_encoder = kzalloc(sizeof(struct amdgpu_encoder), GFP_KERNEL);
  474. if (!amdgpu_encoder)
  475. return;
  476. encoder = &amdgpu_encoder->base;
  477. encoder->possible_crtcs = 0x1;
  478. amdgpu_encoder->enc_priv = NULL;
  479. amdgpu_encoder->encoder_enum = encoder_enum;
  480. amdgpu_encoder->encoder_id = (encoder_enum & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
  481. amdgpu_encoder->devices = supported_device;
  482. amdgpu_encoder->rmx_type = RMX_OFF;
  483. amdgpu_encoder->underscan_type = UNDERSCAN_OFF;
  484. amdgpu_encoder->is_ext_encoder = false;
  485. amdgpu_encoder->caps = caps;
  486. drm_encoder_init(dev, encoder, &dce_virtual_encoder_funcs,
  487. DRM_MODE_ENCODER_VIRTUAL, NULL);
  488. drm_encoder_helper_add(encoder, &dce_virtual_encoder_helper_funcs);
  489. DRM_INFO("[FM]encoder: %d is VIRTUAL\n", amdgpu_encoder->encoder_id);
  490. }
  491. static const struct amdgpu_display_funcs dce_virtual_display_funcs = {
  492. .set_vga_render_state = &dce_virtual_set_vga_render_state,
  493. .bandwidth_update = &dce_virtual_bandwidth_update,
  494. .vblank_get_counter = &dce_virtual_vblank_get_counter,
  495. .vblank_wait = &dce_virtual_vblank_wait,
  496. .is_display_hung = &dce_virtual_is_display_hung,
  497. .backlight_set_level = NULL,
  498. .backlight_get_level = NULL,
  499. .hpd_sense = &dce_virtual_hpd_sense,
  500. .hpd_set_polarity = &dce_virtual_hpd_set_polarity,
  501. .hpd_get_gpio_reg = &dce_virtual_hpd_get_gpio_reg,
  502. .page_flip = &dce_virtual_page_flip,
  503. .page_flip_get_scanoutpos = &dce_virtual_crtc_get_scanoutpos,
  504. .add_encoder = &dce_virtual_encoder_add,
  505. .add_connector = &amdgpu_connector_add,
  506. .stop_mc_access = &dce_virtual_stop_mc_access,
  507. .resume_mc_access = &dce_virtual_resume_mc_access,
  508. };
  509. static void dce_virtual_set_display_funcs(struct amdgpu_device *adev)
  510. {
  511. if (adev->mode_info.funcs == NULL)
  512. adev->mode_info.funcs = &dce_virtual_display_funcs;
  513. }
  514. static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
  515. int crtc,
  516. enum amdgpu_interrupt_state state)
  517. {
  518. if (crtc >= adev->mode_info.num_crtc) {
  519. DRM_DEBUG("invalid crtc %d\n", crtc);
  520. return;
  521. }
  522. }
  523. static int dce_virtual_set_crtc_irq_state(struct amdgpu_device *adev,
  524. struct amdgpu_irq_src *source,
  525. unsigned type,
  526. enum amdgpu_interrupt_state state)
  527. {
  528. switch (type) {
  529. case AMDGPU_CRTC_IRQ_VBLANK1:
  530. dce_virtual_set_crtc_vblank_interrupt_state(adev, 0, state);
  531. break;
  532. default:
  533. break;
  534. }
  535. return 0;
  536. }
  537. static void dce_virtual_crtc_vblank_int_ack(struct amdgpu_device *adev,
  538. int crtc)
  539. {
  540. if (crtc >= adev->mode_info.num_crtc) {
  541. DRM_DEBUG("invalid crtc %d\n", crtc);
  542. return;
  543. }
  544. }
  545. static int dce_virtual_crtc_irq(struct amdgpu_device *adev,
  546. struct amdgpu_irq_src *source,
  547. struct amdgpu_iv_entry *entry)
  548. {
  549. unsigned crtc = 0;
  550. unsigned irq_type = AMDGPU_CRTC_IRQ_VBLANK1;
  551. adev->ddev->vblank[crtc].count++;
  552. dce_virtual_crtc_vblank_int_ack(adev, crtc);
  553. if (amdgpu_irq_enabled(adev, source, irq_type)) {
  554. drm_handle_vblank(adev->ddev, crtc);
  555. }
  556. DRM_DEBUG("IH: D%d vblank\n", crtc + 1);
  557. return 0;
  558. }
  559. static int dce_virtual_set_pageflip_irq_state(struct amdgpu_device *adev,
  560. struct amdgpu_irq_src *src,
  561. unsigned type,
  562. enum amdgpu_interrupt_state state)
  563. {
  564. if (type >= adev->mode_info.num_crtc) {
  565. DRM_ERROR("invalid pageflip crtc %d\n", type);
  566. return -EINVAL;
  567. }
  568. DRM_DEBUG("[FM]set pageflip irq type %d state %d\n", type, state);
  569. return 0;
  570. }
  571. static int dce_virtual_pageflip_irq(struct amdgpu_device *adev,
  572. struct amdgpu_irq_src *source,
  573. struct amdgpu_iv_entry *entry)
  574. {
  575. unsigned long flags;
  576. unsigned crtc_id = 0;
  577. struct amdgpu_crtc *amdgpu_crtc;
  578. struct amdgpu_flip_work *works;
  579. crtc_id = 0;
  580. amdgpu_crtc = adev->mode_info.crtcs[crtc_id];
  581. if (crtc_id >= adev->mode_info.num_crtc) {
  582. DRM_ERROR("invalid pageflip crtc %d\n", crtc_id);
  583. return -EINVAL;
  584. }
  585. /* IRQ could occur when in initial stage */
  586. if (amdgpu_crtc == NULL)
  587. return 0;
  588. spin_lock_irqsave(&adev->ddev->event_lock, flags);
  589. works = amdgpu_crtc->pflip_works;
  590. if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) {
  591. DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
  592. "AMDGPU_FLIP_SUBMITTED(%d)\n",
  593. amdgpu_crtc->pflip_status,
  594. AMDGPU_FLIP_SUBMITTED);
  595. spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
  596. return 0;
  597. }
  598. /* page flip completed. clean up */
  599. amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE;
  600. amdgpu_crtc->pflip_works = NULL;
  601. /* wakeup usersapce */
  602. if (works->event)
  603. drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event);
  604. spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
  605. drm_crtc_vblank_put(&amdgpu_crtc->base);
  606. schedule_work(&works->unpin_work);
  607. return 0;
  608. }
  609. static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs = {
  610. .set = dce_virtual_set_crtc_irq_state,
  611. .process = dce_virtual_crtc_irq,
  612. };
  613. static const struct amdgpu_irq_src_funcs dce_virtual_pageflip_irq_funcs = {
  614. .set = dce_virtual_set_pageflip_irq_state,
  615. .process = dce_virtual_pageflip_irq,
  616. };
  617. static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev)
  618. {
  619. adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_LAST;
  620. adev->crtc_irq.funcs = &dce_virtual_crtc_irq_funcs;
  621. adev->pageflip_irq.num_types = AMDGPU_PAGEFLIP_IRQ_LAST;
  622. adev->pageflip_irq.funcs = &dce_virtual_pageflip_irq_funcs;
  623. }