atombios_dp.c 27 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025
  1. /*
  2. * Copyright 2007-8 Advanced Micro Devices, Inc.
  3. * Copyright 2008 Red Hat Inc.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining a
  6. * copy of this software and associated documentation files (the "Software"),
  7. * to deal in the Software without restriction, including without limitation
  8. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9. * and/or sell copies of the Software, and to permit persons to whom the
  10. * Software is furnished to do so, subject to the following conditions:
  11. *
  12. * The above copyright notice and this permission notice shall be included in
  13. * all copies or substantial portions of the Software.
  14. *
  15. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  18. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  19. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  20. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  21. * OTHER DEALINGS IN THE SOFTWARE.
  22. *
  23. * Authors: Dave Airlie
  24. * Alex Deucher
  25. * Jerome Glisse
  26. */
  27. #include "drmP.h"
  28. #include "radeon_drm.h"
  29. #include "radeon.h"
  30. #include "atom.h"
  31. #include "atom-bits.h"
  32. #include "drm_dp_helper.h"
  33. /* move these to drm_dp_helper.c/h */
  34. #define DP_LINK_CONFIGURATION_SIZE 9
  35. #define DP_LINK_STATUS_SIZE 6
  36. #define DP_DPCD_SIZE 8
  37. static char *voltage_names[] = {
  38. "0.4V", "0.6V", "0.8V", "1.2V"
  39. };
  40. static char *pre_emph_names[] = {
  41. "0dB", "3.5dB", "6dB", "9.5dB"
  42. };
  43. /***** radeon AUX functions *****/
  44. /* Atom needs data in little endian format
  45. * so swap as appropriate when copying data to
  46. * or from atom. Note that atom operates on
  47. * dw units.
  48. */
  49. void radeon_atom_copy_swap(u8 *dst, u8 *src, u8 num_bytes, bool to_le)
  50. {
  51. #ifdef __BIG_ENDIAN
  52. u8 src_tmp[20], dst_tmp[20]; /* used for byteswapping */
  53. u32 *dst32, *src32;
  54. int i;
  55. memcpy(src_tmp, src, num_bytes);
  56. src32 = (u32 *)src_tmp;
  57. dst32 = (u32 *)dst_tmp;
  58. if (to_le) {
  59. for (i = 0; i < ((num_bytes + 3) / 4); i++)
  60. dst32[i] = cpu_to_le32(src32[i]);
  61. memcpy(dst, dst_tmp, num_bytes);
  62. } else {
  63. u8 dws = num_bytes & ~3;
  64. for (i = 0; i < ((num_bytes + 3) / 4); i++)
  65. dst32[i] = le32_to_cpu(src32[i]);
  66. memcpy(dst, dst_tmp, dws);
  67. if (num_bytes % 4) {
  68. for (i = 0; i < (num_bytes % 4); i++)
  69. dst[dws+i] = dst_tmp[dws+i];
  70. }
  71. }
  72. #else
  73. memcpy(dst, src, num_bytes);
  74. #endif
  75. }
  76. union aux_channel_transaction {
  77. PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
  78. PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
  79. };
  80. static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
  81. u8 *send, int send_bytes,
  82. u8 *recv, int recv_size,
  83. u8 delay, u8 *ack)
  84. {
  85. struct drm_device *dev = chan->dev;
  86. struct radeon_device *rdev = dev->dev_private;
  87. union aux_channel_transaction args;
  88. int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
  89. unsigned char *base;
  90. int recv_bytes;
  91. memset(&args, 0, sizeof(args));
  92. base = (unsigned char *)(rdev->mode_info.atom_context->scratch + 1);
  93. radeon_atom_copy_swap(base, send, send_bytes, true);
  94. args.v1.lpAuxRequest = cpu_to_le16((u16)(0 + 4));
  95. args.v1.lpDataOut = cpu_to_le16((u16)(16 + 4));
  96. args.v1.ucDataOutLen = 0;
  97. args.v1.ucChannelID = chan->rec.i2c_id;
  98. args.v1.ucDelay = delay / 10;
  99. if (ASIC_IS_DCE4(rdev))
  100. args.v2.ucHPD_ID = chan->rec.hpd;
  101. atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
  102. *ack = args.v1.ucReplyStatus;
  103. /* timeout */
  104. if (args.v1.ucReplyStatus == 1) {
  105. DRM_DEBUG_KMS("dp_aux_ch timeout\n");
  106. return -ETIMEDOUT;
  107. }
  108. /* flags not zero */
  109. if (args.v1.ucReplyStatus == 2) {
  110. DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
  111. return -EBUSY;
  112. }
  113. /* error */
  114. if (args.v1.ucReplyStatus == 3) {
  115. DRM_DEBUG_KMS("dp_aux_ch error\n");
  116. return -EIO;
  117. }
  118. recv_bytes = args.v1.ucDataOutLen;
  119. if (recv_bytes > recv_size)
  120. recv_bytes = recv_size;
  121. if (recv && recv_size)
  122. radeon_atom_copy_swap(recv, base + 16, recv_bytes, false);
  123. return recv_bytes;
  124. }
  125. static int radeon_dp_aux_native_write(struct radeon_connector *radeon_connector,
  126. u16 address, u8 *send, u8 send_bytes, u8 delay)
  127. {
  128. struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
  129. int ret;
  130. u8 msg[20];
  131. int msg_bytes = send_bytes + 4;
  132. u8 ack;
  133. unsigned retry;
  134. if (send_bytes > 16)
  135. return -1;
  136. msg[0] = address;
  137. msg[1] = address >> 8;
  138. msg[2] = AUX_NATIVE_WRITE << 4;
  139. msg[3] = (msg_bytes << 4) | (send_bytes - 1);
  140. memcpy(&msg[4], send, send_bytes);
  141. for (retry = 0; retry < 4; retry++) {
  142. ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
  143. msg, msg_bytes, NULL, 0, delay, &ack);
  144. if (ret == -EBUSY)
  145. continue;
  146. else if (ret < 0)
  147. return ret;
  148. if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
  149. return send_bytes;
  150. else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
  151. udelay(400);
  152. else
  153. return -EIO;
  154. }
  155. return -EIO;
  156. }
  157. static int radeon_dp_aux_native_read(struct radeon_connector *radeon_connector,
  158. u16 address, u8 *recv, int recv_bytes, u8 delay)
  159. {
  160. struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
  161. u8 msg[4];
  162. int msg_bytes = 4;
  163. u8 ack;
  164. int ret;
  165. unsigned retry;
  166. msg[0] = address;
  167. msg[1] = address >> 8;
  168. msg[2] = AUX_NATIVE_READ << 4;
  169. msg[3] = (msg_bytes << 4) | (recv_bytes - 1);
  170. for (retry = 0; retry < 4; retry++) {
  171. ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
  172. msg, msg_bytes, recv, recv_bytes, delay, &ack);
  173. if (ret == -EBUSY)
  174. continue;
  175. else if (ret < 0)
  176. return ret;
  177. if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
  178. return ret;
  179. else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
  180. udelay(400);
  181. else if (ret == 0)
  182. return -EPROTO;
  183. else
  184. return -EIO;
  185. }
  186. return -EIO;
  187. }
  188. static void radeon_write_dpcd_reg(struct radeon_connector *radeon_connector,
  189. u16 reg, u8 val)
  190. {
  191. radeon_dp_aux_native_write(radeon_connector, reg, &val, 1, 0);
  192. }
  193. static u8 radeon_read_dpcd_reg(struct radeon_connector *radeon_connector,
  194. u16 reg)
  195. {
  196. u8 val = 0;
  197. radeon_dp_aux_native_read(radeon_connector, reg, &val, 1, 0);
  198. return val;
  199. }
  200. int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
  201. u8 write_byte, u8 *read_byte)
  202. {
  203. struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
  204. struct radeon_i2c_chan *auxch = (struct radeon_i2c_chan *)adapter;
  205. u16 address = algo_data->address;
  206. u8 msg[5];
  207. u8 reply[2];
  208. unsigned retry;
  209. int msg_bytes;
  210. int reply_bytes = 1;
  211. int ret;
  212. u8 ack;
  213. /* Set up the command byte */
  214. if (mode & MODE_I2C_READ)
  215. msg[2] = AUX_I2C_READ << 4;
  216. else
  217. msg[2] = AUX_I2C_WRITE << 4;
  218. if (!(mode & MODE_I2C_STOP))
  219. msg[2] |= AUX_I2C_MOT << 4;
  220. msg[0] = address;
  221. msg[1] = address >> 8;
  222. switch (mode) {
  223. case MODE_I2C_WRITE:
  224. msg_bytes = 5;
  225. msg[3] = msg_bytes << 4;
  226. msg[4] = write_byte;
  227. break;
  228. case MODE_I2C_READ:
  229. msg_bytes = 4;
  230. msg[3] = msg_bytes << 4;
  231. break;
  232. default:
  233. msg_bytes = 4;
  234. msg[3] = 3 << 4;
  235. break;
  236. }
  237. for (retry = 0; retry < 4; retry++) {
  238. ret = radeon_process_aux_ch(auxch,
  239. msg, msg_bytes, reply, reply_bytes, 0, &ack);
  240. if (ret == -EBUSY)
  241. continue;
  242. else if (ret < 0) {
  243. DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
  244. return ret;
  245. }
  246. switch (ack & AUX_NATIVE_REPLY_MASK) {
  247. case AUX_NATIVE_REPLY_ACK:
  248. /* I2C-over-AUX Reply field is only valid
  249. * when paired with AUX ACK.
  250. */
  251. break;
  252. case AUX_NATIVE_REPLY_NACK:
  253. DRM_DEBUG_KMS("aux_ch native nack\n");
  254. return -EREMOTEIO;
  255. case AUX_NATIVE_REPLY_DEFER:
  256. DRM_DEBUG_KMS("aux_ch native defer\n");
  257. udelay(400);
  258. continue;
  259. default:
  260. DRM_ERROR("aux_ch invalid native reply 0x%02x\n", ack);
  261. return -EREMOTEIO;
  262. }
  263. switch (ack & AUX_I2C_REPLY_MASK) {
  264. case AUX_I2C_REPLY_ACK:
  265. if (mode == MODE_I2C_READ)
  266. *read_byte = reply[0];
  267. return ret;
  268. case AUX_I2C_REPLY_NACK:
  269. DRM_DEBUG_KMS("aux_i2c nack\n");
  270. return -EREMOTEIO;
  271. case AUX_I2C_REPLY_DEFER:
  272. DRM_DEBUG_KMS("aux_i2c defer\n");
  273. udelay(400);
  274. break;
  275. default:
  276. DRM_ERROR("aux_i2c invalid reply 0x%02x\n", ack);
  277. return -EREMOTEIO;
  278. }
  279. }
  280. DRM_DEBUG_KMS("aux i2c too many retries, giving up\n");
  281. return -EREMOTEIO;
  282. }
  283. /***** general DP utility functions *****/
  284. static u8 dp_link_status(u8 link_status[DP_LINK_STATUS_SIZE], int r)
  285. {
  286. return link_status[r - DP_LANE0_1_STATUS];
  287. }
  288. static u8 dp_get_lane_status(u8 link_status[DP_LINK_STATUS_SIZE],
  289. int lane)
  290. {
  291. int i = DP_LANE0_1_STATUS + (lane >> 1);
  292. int s = (lane & 1) * 4;
  293. u8 l = dp_link_status(link_status, i);
  294. return (l >> s) & 0xf;
  295. }
  296. static bool dp_clock_recovery_ok(u8 link_status[DP_LINK_STATUS_SIZE],
  297. int lane_count)
  298. {
  299. int lane;
  300. u8 lane_status;
  301. for (lane = 0; lane < lane_count; lane++) {
  302. lane_status = dp_get_lane_status(link_status, lane);
  303. if ((lane_status & DP_LANE_CR_DONE) == 0)
  304. return false;
  305. }
  306. return true;
  307. }
  308. static bool dp_channel_eq_ok(u8 link_status[DP_LINK_STATUS_SIZE],
  309. int lane_count)
  310. {
  311. u8 lane_align;
  312. u8 lane_status;
  313. int lane;
  314. lane_align = dp_link_status(link_status,
  315. DP_LANE_ALIGN_STATUS_UPDATED);
  316. if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
  317. return false;
  318. for (lane = 0; lane < lane_count; lane++) {
  319. lane_status = dp_get_lane_status(link_status, lane);
  320. if ((lane_status & DP_CHANNEL_EQ_BITS) != DP_CHANNEL_EQ_BITS)
  321. return false;
  322. }
  323. return true;
  324. }
  325. static u8 dp_get_adjust_request_voltage(u8 link_status[DP_LINK_STATUS_SIZE],
  326. int lane)
  327. {
  328. int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
  329. int s = ((lane & 1) ?
  330. DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
  331. DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
  332. u8 l = dp_link_status(link_status, i);
  333. return ((l >> s) & 0x3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
  334. }
  335. static u8 dp_get_adjust_request_pre_emphasis(u8 link_status[DP_LINK_STATUS_SIZE],
  336. int lane)
  337. {
  338. int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
  339. int s = ((lane & 1) ?
  340. DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
  341. DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
  342. u8 l = dp_link_status(link_status, i);
  343. return ((l >> s) & 0x3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
  344. }
  345. #define DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_1200
  346. #define DP_PRE_EMPHASIS_MAX DP_TRAIN_PRE_EMPHASIS_9_5
  347. static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE],
  348. int lane_count,
  349. u8 train_set[4])
  350. {
  351. u8 v = 0;
  352. u8 p = 0;
  353. int lane;
  354. for (lane = 0; lane < lane_count; lane++) {
  355. u8 this_v = dp_get_adjust_request_voltage(link_status, lane);
  356. u8 this_p = dp_get_adjust_request_pre_emphasis(link_status, lane);
  357. DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
  358. lane,
  359. voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
  360. pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
  361. if (this_v > v)
  362. v = this_v;
  363. if (this_p > p)
  364. p = this_p;
  365. }
  366. if (v >= DP_VOLTAGE_MAX)
  367. v |= DP_TRAIN_MAX_SWING_REACHED;
  368. if (p >= DP_PRE_EMPHASIS_MAX)
  369. p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
  370. DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
  371. voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
  372. pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
  373. for (lane = 0; lane < 4; lane++)
  374. train_set[lane] = v | p;
  375. }
  376. /* convert bits per color to bits per pixel */
  377. /* get bpc from the EDID */
  378. static int convert_bpc_to_bpp(int bpc)
  379. {
  380. #if 0
  381. if (bpc == 0)
  382. return 24;
  383. else
  384. return bpc * 3;
  385. #endif
  386. return 24;
  387. }
  388. /* get the max pix clock supported by the link rate and lane num */
  389. static int dp_get_max_dp_pix_clock(int link_rate,
  390. int lane_num,
  391. int bpp)
  392. {
  393. return (link_rate * lane_num * 8) / bpp;
  394. }
  395. static int dp_get_max_link_rate(u8 dpcd[DP_DPCD_SIZE])
  396. {
  397. switch (dpcd[DP_MAX_LINK_RATE]) {
  398. case DP_LINK_BW_1_62:
  399. default:
  400. return 162000;
  401. case DP_LINK_BW_2_7:
  402. return 270000;
  403. case DP_LINK_BW_5_4:
  404. return 540000;
  405. }
  406. }
  407. static u8 dp_get_max_lane_number(u8 dpcd[DP_DPCD_SIZE])
  408. {
  409. return dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK;
  410. }
  411. static u8 dp_get_dp_link_rate_coded(int link_rate)
  412. {
  413. switch (link_rate) {
  414. case 162000:
  415. default:
  416. return DP_LINK_BW_1_62;
  417. case 270000:
  418. return DP_LINK_BW_2_7;
  419. case 540000:
  420. return DP_LINK_BW_5_4;
  421. }
  422. }
  423. /***** radeon specific DP functions *****/
  424. /* First get the min lane# when low rate is used according to pixel clock
  425. * (prefer low rate), second check max lane# supported by DP panel,
  426. * if the max lane# < low rate lane# then use max lane# instead.
  427. */
  428. static int radeon_dp_get_dp_lane_number(struct drm_connector *connector,
  429. u8 dpcd[DP_DPCD_SIZE],
  430. int pix_clock)
  431. {
  432. int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
  433. int max_link_rate = dp_get_max_link_rate(dpcd);
  434. int max_lane_num = dp_get_max_lane_number(dpcd);
  435. int lane_num;
  436. int max_dp_pix_clock;
  437. for (lane_num = 1; lane_num < max_lane_num; lane_num <<= 1) {
  438. max_dp_pix_clock = dp_get_max_dp_pix_clock(max_link_rate, lane_num, bpp);
  439. if (pix_clock <= max_dp_pix_clock)
  440. break;
  441. }
  442. return lane_num;
  443. }
  444. static int radeon_dp_get_dp_link_clock(struct drm_connector *connector,
  445. u8 dpcd[DP_DPCD_SIZE],
  446. int pix_clock)
  447. {
  448. int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
  449. int lane_num, max_pix_clock;
  450. if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) ==
  451. ENCODER_OBJECT_ID_NUTMEG)
  452. return 270000;
  453. lane_num = radeon_dp_get_dp_lane_number(connector, dpcd, pix_clock);
  454. max_pix_clock = dp_get_max_dp_pix_clock(162000, lane_num, bpp);
  455. if (pix_clock <= max_pix_clock)
  456. return 162000;
  457. max_pix_clock = dp_get_max_dp_pix_clock(270000, lane_num, bpp);
  458. if (pix_clock <= max_pix_clock)
  459. return 270000;
  460. if (radeon_connector_is_dp12_capable(connector)) {
  461. max_pix_clock = dp_get_max_dp_pix_clock(540000, lane_num, bpp);
  462. if (pix_clock <= max_pix_clock)
  463. return 540000;
  464. }
  465. return dp_get_max_link_rate(dpcd);
  466. }
  467. static u8 radeon_dp_encoder_service(struct radeon_device *rdev,
  468. int action, int dp_clock,
  469. u8 ucconfig, u8 lane_num)
  470. {
  471. DP_ENCODER_SERVICE_PARAMETERS args;
  472. int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
  473. memset(&args, 0, sizeof(args));
  474. args.ucLinkClock = dp_clock / 10;
  475. args.ucConfig = ucconfig;
  476. args.ucAction = action;
  477. args.ucLaneNum = lane_num;
  478. args.ucStatus = 0;
  479. atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
  480. return args.ucStatus;
  481. }
  482. u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector)
  483. {
  484. struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
  485. struct drm_device *dev = radeon_connector->base.dev;
  486. struct radeon_device *rdev = dev->dev_private;
  487. return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0,
  488. dig_connector->dp_i2c_bus->rec.i2c_id, 0);
  489. }
  490. bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
  491. {
  492. struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
  493. u8 msg[25];
  494. int ret, i;
  495. ret = radeon_dp_aux_native_read(radeon_connector, DP_DPCD_REV, msg, 8, 0);
  496. if (ret > 0) {
  497. memcpy(dig_connector->dpcd, msg, 8);
  498. DRM_DEBUG_KMS("DPCD: ");
  499. for (i = 0; i < 8; i++)
  500. DRM_DEBUG_KMS("%02x ", msg[i]);
  501. DRM_DEBUG_KMS("\n");
  502. return true;
  503. }
  504. dig_connector->dpcd[0] = 0;
  505. return false;
  506. }
  507. int radeon_dp_get_panel_mode(struct drm_encoder *encoder,
  508. struct drm_connector *connector)
  509. {
  510. struct drm_device *dev = encoder->dev;
  511. struct radeon_device *rdev = dev->dev_private;
  512. struct radeon_connector *radeon_connector = to_radeon_connector(connector);
  513. int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
  514. if (!ASIC_IS_DCE4(rdev))
  515. return panel_mode;
  516. if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) ==
  517. ENCODER_OBJECT_ID_NUTMEG)
  518. panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
  519. else if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) ==
  520. ENCODER_OBJECT_ID_TRAVIS) {
  521. u8 id[6];
  522. int i;
  523. for (i = 0; i < 6; i++)
  524. id[i] = radeon_read_dpcd_reg(radeon_connector, 0x503 + i);
  525. if (id[0] == 0x73 &&
  526. id[1] == 0x69 &&
  527. id[2] == 0x76 &&
  528. id[3] == 0x61 &&
  529. id[4] == 0x72 &&
  530. id[5] == 0x54)
  531. panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
  532. else
  533. panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
  534. } else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
  535. u8 tmp = radeon_read_dpcd_reg(radeon_connector, DP_EDP_CONFIGURATION_CAP);
  536. if (tmp & 1)
  537. panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
  538. }
  539. return panel_mode;
  540. }
  541. void radeon_dp_set_link_config(struct drm_connector *connector,
  542. struct drm_display_mode *mode)
  543. {
  544. struct radeon_connector *radeon_connector = to_radeon_connector(connector);
  545. struct radeon_connector_atom_dig *dig_connector;
  546. if (!radeon_connector->con_priv)
  547. return;
  548. dig_connector = radeon_connector->con_priv;
  549. if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
  550. (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
  551. dig_connector->dp_clock =
  552. radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
  553. dig_connector->dp_lane_count =
  554. radeon_dp_get_dp_lane_number(connector, dig_connector->dpcd, mode->clock);
  555. }
  556. }
  557. int radeon_dp_mode_valid_helper(struct drm_connector *connector,
  558. struct drm_display_mode *mode)
  559. {
  560. struct radeon_connector *radeon_connector = to_radeon_connector(connector);
  561. struct radeon_connector_atom_dig *dig_connector;
  562. int dp_clock;
  563. if (!radeon_connector->con_priv)
  564. return MODE_CLOCK_HIGH;
  565. dig_connector = radeon_connector->con_priv;
  566. dp_clock =
  567. radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
  568. if ((dp_clock == 540000) &&
  569. (!radeon_connector_is_dp12_capable(connector)))
  570. return MODE_CLOCK_HIGH;
  571. return MODE_OK;
  572. }
  573. static bool radeon_dp_get_link_status(struct radeon_connector *radeon_connector,
  574. u8 link_status[DP_LINK_STATUS_SIZE])
  575. {
  576. int ret;
  577. ret = radeon_dp_aux_native_read(radeon_connector, DP_LANE0_1_STATUS,
  578. link_status, DP_LINK_STATUS_SIZE, 100);
  579. if (ret <= 0) {
  580. return false;
  581. }
  582. DRM_DEBUG_KMS("link status %02x %02x %02x %02x %02x %02x\n",
  583. link_status[0], link_status[1], link_status[2],
  584. link_status[3], link_status[4], link_status[5]);
  585. return true;
  586. }
  587. bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector)
  588. {
  589. u8 link_status[DP_LINK_STATUS_SIZE];
  590. struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
  591. if (!radeon_dp_get_link_status(radeon_connector, link_status))
  592. return false;
  593. if (dp_channel_eq_ok(link_status, dig->dp_lane_count))
  594. return false;
  595. return true;
  596. }
  597. struct radeon_dp_link_train_info {
  598. struct radeon_device *rdev;
  599. struct drm_encoder *encoder;
  600. struct drm_connector *connector;
  601. struct radeon_connector *radeon_connector;
  602. int enc_id;
  603. int dp_clock;
  604. int dp_lane_count;
  605. int rd_interval;
  606. bool tp3_supported;
  607. u8 dpcd[8];
  608. u8 train_set[4];
  609. u8 link_status[DP_LINK_STATUS_SIZE];
  610. u8 tries;
  611. bool use_dpencoder;
  612. };
  613. static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info)
  614. {
  615. /* set the initial vs/emph on the source */
  616. atombios_dig_transmitter_setup(dp_info->encoder,
  617. ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
  618. 0, dp_info->train_set[0]); /* sets all lanes at once */
  619. /* set the vs/emph on the sink */
  620. radeon_dp_aux_native_write(dp_info->radeon_connector, DP_TRAINING_LANE0_SET,
  621. dp_info->train_set, dp_info->dp_lane_count, 0);
  622. }
  623. static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
  624. {
  625. int rtp = 0;
  626. /* set training pattern on the source */
  627. if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) {
  628. switch (tp) {
  629. case DP_TRAINING_PATTERN_1:
  630. rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
  631. break;
  632. case DP_TRAINING_PATTERN_2:
  633. rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
  634. break;
  635. case DP_TRAINING_PATTERN_3:
  636. rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
  637. break;
  638. }
  639. atombios_dig_encoder_setup(dp_info->encoder, rtp, 0);
  640. } else {
  641. switch (tp) {
  642. case DP_TRAINING_PATTERN_1:
  643. rtp = 0;
  644. break;
  645. case DP_TRAINING_PATTERN_2:
  646. rtp = 1;
  647. break;
  648. }
  649. radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
  650. dp_info->dp_clock, dp_info->enc_id, rtp);
  651. }
  652. /* enable training pattern on the sink */
  653. radeon_write_dpcd_reg(dp_info->radeon_connector, DP_TRAINING_PATTERN_SET, tp);
  654. }
  655. static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
  656. {
  657. struct radeon_encoder *radeon_encoder = to_radeon_encoder(dp_info->encoder);
  658. struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
  659. u8 tmp;
  660. /* power up the sink */
  661. if (dp_info->dpcd[0] >= 0x11)
  662. radeon_write_dpcd_reg(dp_info->radeon_connector,
  663. DP_SET_POWER, DP_SET_POWER_D0);
  664. /* possibly enable downspread on the sink */
  665. if (dp_info->dpcd[3] & 0x1)
  666. radeon_write_dpcd_reg(dp_info->radeon_connector,
  667. DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
  668. else
  669. radeon_write_dpcd_reg(dp_info->radeon_connector,
  670. DP_DOWNSPREAD_CTRL, 0);
  671. if (dig->panel_mode == DP_PANEL_MODE_INTERNAL_DP2_MODE)
  672. radeon_write_dpcd_reg(dp_info->radeon_connector, DP_EDP_CONFIGURATION_SET, 1);
  673. /* set the lane count on the sink */
  674. tmp = dp_info->dp_lane_count;
  675. if (dp_info->dpcd[DP_DPCD_REV] >= 0x11 &&
  676. dp_info->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)
  677. tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
  678. radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LANE_COUNT_SET, tmp);
  679. /* set the link rate on the sink */
  680. tmp = dp_get_dp_link_rate_coded(dp_info->dp_clock);
  681. radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LINK_BW_SET, tmp);
  682. /* start training on the source */
  683. if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
  684. atombios_dig_encoder_setup(dp_info->encoder,
  685. ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
  686. else
  687. radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START,
  688. dp_info->dp_clock, dp_info->enc_id, 0);
  689. /* disable the training pattern on the sink */
  690. radeon_write_dpcd_reg(dp_info->radeon_connector,
  691. DP_TRAINING_PATTERN_SET,
  692. DP_TRAINING_PATTERN_DISABLE);
  693. return 0;
  694. }
  695. static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info)
  696. {
  697. udelay(400);
  698. /* disable the training pattern on the sink */
  699. radeon_write_dpcd_reg(dp_info->radeon_connector,
  700. DP_TRAINING_PATTERN_SET,
  701. DP_TRAINING_PATTERN_DISABLE);
  702. /* disable the training pattern on the source */
  703. if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
  704. atombios_dig_encoder_setup(dp_info->encoder,
  705. ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
  706. else
  707. radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
  708. dp_info->dp_clock, dp_info->enc_id, 0);
  709. return 0;
  710. }
  711. static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
  712. {
  713. bool clock_recovery;
  714. u8 voltage;
  715. int i;
  716. radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
  717. memset(dp_info->train_set, 0, 4);
  718. radeon_dp_update_vs_emph(dp_info);
  719. udelay(400);
  720. /* clock recovery loop */
  721. clock_recovery = false;
  722. dp_info->tries = 0;
  723. voltage = 0xff;
  724. while (1) {
  725. if (dp_info->rd_interval == 0)
  726. udelay(100);
  727. else
  728. mdelay(dp_info->rd_interval * 4);
  729. if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status)) {
  730. DRM_ERROR("displayport link status failed\n");
  731. break;
  732. }
  733. if (dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
  734. clock_recovery = true;
  735. break;
  736. }
  737. for (i = 0; i < dp_info->dp_lane_count; i++) {
  738. if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
  739. break;
  740. }
  741. if (i == dp_info->dp_lane_count) {
  742. DRM_ERROR("clock recovery reached max voltage\n");
  743. break;
  744. }
  745. if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
  746. ++dp_info->tries;
  747. if (dp_info->tries == 5) {
  748. DRM_ERROR("clock recovery tried 5 times\n");
  749. break;
  750. }
  751. } else
  752. dp_info->tries = 0;
  753. voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
  754. /* Compute new train_set as requested by sink */
  755. dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
  756. radeon_dp_update_vs_emph(dp_info);
  757. }
  758. if (!clock_recovery) {
  759. DRM_ERROR("clock recovery failed\n");
  760. return -1;
  761. } else {
  762. DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
  763. dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
  764. (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
  765. DP_TRAIN_PRE_EMPHASIS_SHIFT);
  766. return 0;
  767. }
  768. }
  769. static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
  770. {
  771. bool channel_eq;
  772. if (dp_info->tp3_supported)
  773. radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
  774. else
  775. radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
  776. /* channel equalization loop */
  777. dp_info->tries = 0;
  778. channel_eq = false;
  779. while (1) {
  780. if (dp_info->rd_interval == 0)
  781. udelay(400);
  782. else
  783. mdelay(dp_info->rd_interval * 4);
  784. if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status)) {
  785. DRM_ERROR("displayport link status failed\n");
  786. break;
  787. }
  788. if (dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
  789. channel_eq = true;
  790. break;
  791. }
  792. /* Try 5 times */
  793. if (dp_info->tries > 5) {
  794. DRM_ERROR("channel eq failed: 5 tries\n");
  795. break;
  796. }
  797. /* Compute new train_set as requested by sink */
  798. dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
  799. radeon_dp_update_vs_emph(dp_info);
  800. dp_info->tries++;
  801. }
  802. if (!channel_eq) {
  803. DRM_ERROR("channel eq failed\n");
  804. return -1;
  805. } else {
  806. DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
  807. dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
  808. (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
  809. >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
  810. return 0;
  811. }
  812. }
  813. void radeon_dp_link_train(struct drm_encoder *encoder,
  814. struct drm_connector *connector)
  815. {
  816. struct drm_device *dev = encoder->dev;
  817. struct radeon_device *rdev = dev->dev_private;
  818. struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
  819. struct radeon_encoder_atom_dig *dig;
  820. struct radeon_connector *radeon_connector;
  821. struct radeon_connector_atom_dig *dig_connector;
  822. struct radeon_dp_link_train_info dp_info;
  823. int index;
  824. u8 tmp, frev, crev;
  825. if (!radeon_encoder->enc_priv)
  826. return;
  827. dig = radeon_encoder->enc_priv;
  828. radeon_connector = to_radeon_connector(connector);
  829. if (!radeon_connector->con_priv)
  830. return;
  831. dig_connector = radeon_connector->con_priv;
  832. if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
  833. (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
  834. return;
  835. /* DPEncoderService newer than 1.1 can't program properly the
  836. * training pattern. When facing such version use the
  837. * DIGXEncoderControl (X== 1 | 2)
  838. */
  839. dp_info.use_dpencoder = true;
  840. index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
  841. if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) {
  842. if (crev > 1) {
  843. dp_info.use_dpencoder = false;
  844. }
  845. }
  846. dp_info.enc_id = 0;
  847. if (dig->dig_encoder)
  848. dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
  849. else
  850. dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
  851. if (dig->linkb)
  852. dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B;
  853. else
  854. dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A;
  855. dp_info.rd_interval = radeon_read_dpcd_reg(radeon_connector, DP_TRAINING_AUX_RD_INTERVAL);
  856. tmp = radeon_read_dpcd_reg(radeon_connector, DP_MAX_LANE_COUNT);
  857. if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED))
  858. dp_info.tp3_supported = true;
  859. else
  860. dp_info.tp3_supported = false;
  861. memcpy(dp_info.dpcd, dig_connector->dpcd, 8);
  862. dp_info.rdev = rdev;
  863. dp_info.encoder = encoder;
  864. dp_info.connector = connector;
  865. dp_info.radeon_connector = radeon_connector;
  866. dp_info.dp_lane_count = dig_connector->dp_lane_count;
  867. dp_info.dp_clock = dig_connector->dp_clock;
  868. if (radeon_dp_link_train_init(&dp_info))
  869. goto done;
  870. if (radeon_dp_link_train_cr(&dp_info))
  871. goto done;
  872. if (radeon_dp_link_train_ce(&dp_info))
  873. goto done;
  874. done:
  875. if (radeon_dp_link_train_finish(&dp_info))
  876. return;
  877. }