rtw8851b.c 78 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455
  1. // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
  2. /* Copyright(c) 2022-2023 Realtek Corporation
  3. */
  4. #include "coex.h"
  5. #include "efuse.h"
  6. #include "fw.h"
  7. #include "mac.h"
  8. #include "phy.h"
  9. #include "reg.h"
  10. #include "rtw8851b.h"
  11. #include "rtw8851b_rfk.h"
  12. #include "rtw8851b_rfk_table.h"
  13. #include "rtw8851b_table.h"
  14. #include "txrx.h"
  15. #include "util.h"
  16. #define RTW8851B_FW_FORMAT_MAX 0
  17. #define RTW8851B_FW_BASENAME "rtw89/rtw8851b_fw"
  18. #define RTW8851B_MODULE_FIRMWARE \
  19. RTW8851B_FW_BASENAME ".bin"
  20. static const struct rtw89_hfc_ch_cfg rtw8851b_hfc_chcfg_pcie[] = {
  21. {5, 343, grp_0}, /* ACH 0 */
  22. {5, 343, grp_0}, /* ACH 1 */
  23. {5, 343, grp_0}, /* ACH 2 */
  24. {5, 343, grp_0}, /* ACH 3 */
  25. {0, 0, grp_0}, /* ACH 4 */
  26. {0, 0, grp_0}, /* ACH 5 */
  27. {0, 0, grp_0}, /* ACH 6 */
  28. {0, 0, grp_0}, /* ACH 7 */
  29. {4, 344, grp_0}, /* B0MGQ */
  30. {4, 344, grp_0}, /* B0HIQ */
  31. {0, 0, grp_0}, /* B1MGQ */
  32. {0, 0, grp_0}, /* B1HIQ */
  33. {40, 0, 0} /* FWCMDQ */
  34. };
  35. static const struct rtw89_hfc_pub_cfg rtw8851b_hfc_pubcfg_pcie = {
  36. 448, /* Group 0 */
  37. 0, /* Group 1 */
  38. 448, /* Public Max */
  39. 0 /* WP threshold */
  40. };
  41. static const struct rtw89_hfc_param_ini rtw8851b_hfc_param_ini_pcie[] = {
  42. [RTW89_QTA_SCC] = {rtw8851b_hfc_chcfg_pcie, &rtw8851b_hfc_pubcfg_pcie,
  43. &rtw89_mac_size.hfc_preccfg_pcie, RTW89_HCIFC_POH},
  44. [RTW89_QTA_DLFW] = {NULL, NULL, &rtw89_mac_size.hfc_preccfg_pcie,
  45. RTW89_HCIFC_POH},
  46. [RTW89_QTA_INVALID] = {NULL},
  47. };
  48. static const struct rtw89_dle_mem rtw8851b_dle_mem_pcie[] = {
  49. [RTW89_QTA_SCC] = {RTW89_QTA_SCC, &rtw89_mac_size.wde_size6,
  50. &rtw89_mac_size.ple_size6, &rtw89_mac_size.wde_qt6,
  51. &rtw89_mac_size.wde_qt6, &rtw89_mac_size.ple_qt18,
  52. &rtw89_mac_size.ple_qt58},
  53. [RTW89_QTA_WOW] = {RTW89_QTA_WOW, &rtw89_mac_size.wde_size6,
  54. &rtw89_mac_size.ple_size6, &rtw89_mac_size.wde_qt6,
  55. &rtw89_mac_size.wde_qt6, &rtw89_mac_size.ple_qt18,
  56. &rtw89_mac_size.ple_qt_51b_wow},
  57. [RTW89_QTA_DLFW] = {RTW89_QTA_DLFW, &rtw89_mac_size.wde_size9,
  58. &rtw89_mac_size.ple_size8, &rtw89_mac_size.wde_qt4,
  59. &rtw89_mac_size.wde_qt4, &rtw89_mac_size.ple_qt13,
  60. &rtw89_mac_size.ple_qt13},
  61. [RTW89_QTA_INVALID] = {RTW89_QTA_INVALID, NULL, NULL, NULL, NULL, NULL,
  62. NULL},
  63. };
  64. static const struct rtw89_reg3_def rtw8851b_btc_preagc_en_defs[] = {
  65. {0x46D0, GENMASK(1, 0), 0x3},
  66. {0x4AD4, GENMASK(31, 0), 0xf},
  67. {0x4688, GENMASK(23, 16), 0x80},
  68. {0x4688, GENMASK(31, 24), 0x80},
  69. {0x4694, GENMASK(7, 0), 0x80},
  70. {0x4694, GENMASK(15, 8), 0x80},
  71. {0x4AE4, GENMASK(11, 6), 0x34},
  72. {0x4AE4, GENMASK(17, 12), 0x0},
  73. {0x469C, GENMASK(31, 26), 0x34},
  74. };
  75. static DECLARE_PHY_REG3_TBL(rtw8851b_btc_preagc_en_defs);
  76. static const struct rtw89_reg3_def rtw8851b_btc_preagc_dis_defs[] = {
  77. {0x46D0, GENMASK(1, 0), 0x0},
  78. {0x4AD4, GENMASK(31, 0), 0x60},
  79. {0x4688, GENMASK(23, 16), 0x10},
  80. {0x4690, GENMASK(31, 24), 0x2a},
  81. {0x4694, GENMASK(15, 8), 0x2a},
  82. {0x4AE4, GENMASK(11, 6), 0x26},
  83. {0x4AE4, GENMASK(17, 12), 0x1e},
  84. {0x469C, GENMASK(31, 26), 0x26},
  85. };
  86. static DECLARE_PHY_REG3_TBL(rtw8851b_btc_preagc_dis_defs);
  87. static const u32 rtw8851b_h2c_regs[RTW89_H2CREG_MAX] = {
  88. R_AX_H2CREG_DATA0, R_AX_H2CREG_DATA1, R_AX_H2CREG_DATA2,
  89. R_AX_H2CREG_DATA3
  90. };
  91. static const u32 rtw8851b_c2h_regs[RTW89_C2HREG_MAX] = {
  92. R_AX_C2HREG_DATA0, R_AX_C2HREG_DATA1, R_AX_C2HREG_DATA2,
  93. R_AX_C2HREG_DATA3
  94. };
  95. static const struct rtw89_page_regs rtw8851b_page_regs = {
  96. .hci_fc_ctrl = R_AX_HCI_FC_CTRL,
  97. .ch_page_ctrl = R_AX_CH_PAGE_CTRL,
  98. .ach_page_ctrl = R_AX_ACH0_PAGE_CTRL,
  99. .ach_page_info = R_AX_ACH0_PAGE_INFO,
  100. .pub_page_info3 = R_AX_PUB_PAGE_INFO3,
  101. .pub_page_ctrl1 = R_AX_PUB_PAGE_CTRL1,
  102. .pub_page_ctrl2 = R_AX_PUB_PAGE_CTRL2,
  103. .pub_page_info1 = R_AX_PUB_PAGE_INFO1,
  104. .pub_page_info2 = R_AX_PUB_PAGE_INFO2,
  105. .wp_page_ctrl1 = R_AX_WP_PAGE_CTRL1,
  106. .wp_page_ctrl2 = R_AX_WP_PAGE_CTRL2,
  107. .wp_page_info1 = R_AX_WP_PAGE_INFO1,
  108. };
  109. static const struct rtw89_reg_def rtw8851b_dcfo_comp = {
  110. R_DCFO_COMP_S0_V2, B_DCFO_COMP_S0_MSK_V2
  111. };
  112. static const struct rtw89_imr_info rtw8851b_imr_info = {
  113. .wdrls_imr_set = B_AX_WDRLS_IMR_SET,
  114. .wsec_imr_reg = R_AX_SEC_DEBUG,
  115. .wsec_imr_set = B_AX_IMR_ERROR,
  116. .mpdu_tx_imr_set = 0,
  117. .mpdu_rx_imr_set = 0,
  118. .sta_sch_imr_set = B_AX_STA_SCHEDULER_IMR_SET,
  119. .txpktctl_imr_b0_reg = R_AX_TXPKTCTL_ERR_IMR_ISR,
  120. .txpktctl_imr_b0_clr = B_AX_TXPKTCTL_IMR_B0_CLR,
  121. .txpktctl_imr_b0_set = B_AX_TXPKTCTL_IMR_B0_SET,
  122. .txpktctl_imr_b1_reg = R_AX_TXPKTCTL_ERR_IMR_ISR_B1,
  123. .txpktctl_imr_b1_clr = B_AX_TXPKTCTL_IMR_B1_CLR,
  124. .txpktctl_imr_b1_set = B_AX_TXPKTCTL_IMR_B1_SET,
  125. .wde_imr_clr = B_AX_WDE_IMR_CLR,
  126. .wde_imr_set = B_AX_WDE_IMR_SET,
  127. .ple_imr_clr = B_AX_PLE_IMR_CLR,
  128. .ple_imr_set = B_AX_PLE_IMR_SET,
  129. .host_disp_imr_clr = B_AX_HOST_DISP_IMR_CLR,
  130. .host_disp_imr_set = B_AX_HOST_DISP_IMR_SET,
  131. .cpu_disp_imr_clr = B_AX_CPU_DISP_IMR_CLR,
  132. .cpu_disp_imr_set = B_AX_CPU_DISP_IMR_SET,
  133. .other_disp_imr_clr = B_AX_OTHER_DISP_IMR_CLR,
  134. .other_disp_imr_set = 0,
  135. .bbrpt_com_err_imr_reg = R_AX_BBRPT_COM_ERR_IMR_ISR,
  136. .bbrpt_chinfo_err_imr_reg = R_AX_BBRPT_CHINFO_ERR_IMR_ISR,
  137. .bbrpt_err_imr_set = 0,
  138. .bbrpt_dfs_err_imr_reg = R_AX_BBRPT_DFS_ERR_IMR_ISR,
  139. .ptcl_imr_clr = B_AX_PTCL_IMR_CLR_ALL,
  140. .ptcl_imr_set = B_AX_PTCL_IMR_SET,
  141. .cdma_imr_0_reg = R_AX_DLE_CTRL,
  142. .cdma_imr_0_clr = B_AX_DLE_IMR_CLR,
  143. .cdma_imr_0_set = B_AX_DLE_IMR_SET,
  144. .cdma_imr_1_reg = 0,
  145. .cdma_imr_1_clr = 0,
  146. .cdma_imr_1_set = 0,
  147. .phy_intf_imr_reg = R_AX_PHYINFO_ERR_IMR,
  148. .phy_intf_imr_clr = 0,
  149. .phy_intf_imr_set = 0,
  150. .rmac_imr_reg = R_AX_RMAC_ERR_ISR,
  151. .rmac_imr_clr = B_AX_RMAC_IMR_CLR,
  152. .rmac_imr_set = B_AX_RMAC_IMR_SET,
  153. .tmac_imr_reg = R_AX_TMAC_ERR_IMR_ISR,
  154. .tmac_imr_clr = B_AX_TMAC_IMR_CLR,
  155. .tmac_imr_set = B_AX_TMAC_IMR_SET,
  156. };
  157. static const struct rtw89_xtal_info rtw8851b_xtal_info = {
  158. .xcap_reg = R_AX_XTAL_ON_CTRL3,
  159. .sc_xo_mask = B_AX_XTAL_SC_XO_A_BLOCK_MASK,
  160. .sc_xi_mask = B_AX_XTAL_SC_XI_A_BLOCK_MASK,
  161. };
  162. static const struct rtw89_rrsr_cfgs rtw8851b_rrsr_cfgs = {
  163. .ref_rate = {R_AX_TRXPTCL_RRSR_CTL_0, B_AX_WMAC_RESP_REF_RATE_SEL, 0},
  164. .rsc = {R_AX_TRXPTCL_RRSR_CTL_0, B_AX_WMAC_RESP_RSC_MASK, 2},
  165. };
  166. static const struct rtw89_dig_regs rtw8851b_dig_regs = {
  167. .seg0_pd_reg = R_SEG0R_PD_V1,
  168. .pd_lower_bound_mask = B_SEG0R_PD_LOWER_BOUND_MSK,
  169. .pd_spatial_reuse_en = B_SEG0R_PD_SPATIAL_REUSE_EN_MSK_V1,
  170. .bmode_pd_reg = R_BMODE_PDTH_EN_V1,
  171. .bmode_cca_rssi_limit_en = B_BMODE_PDTH_LIMIT_EN_MSK_V1,
  172. .bmode_pd_lower_bound_reg = R_BMODE_PDTH_V1,
  173. .bmode_rssi_nocca_low_th_mask = B_BMODE_PDTH_LOWER_BOUND_MSK_V1,
  174. .p0_lna_init = {R_PATH0_LNA_INIT_V1, B_PATH0_LNA_INIT_IDX_MSK},
  175. .p1_lna_init = {R_PATH1_LNA_INIT_V1, B_PATH1_LNA_INIT_IDX_MSK},
  176. .p0_tia_init = {R_PATH0_TIA_INIT_V1, B_PATH0_TIA_INIT_IDX_MSK_V1},
  177. .p1_tia_init = {R_PATH1_TIA_INIT_V1, B_PATH1_TIA_INIT_IDX_MSK_V1},
  178. .p0_rxb_init = {R_PATH0_RXB_INIT_V1, B_PATH0_RXB_INIT_IDX_MSK_V1},
  179. .p1_rxb_init = {R_PATH1_RXB_INIT_V1, B_PATH1_RXB_INIT_IDX_MSK_V1},
  180. .p0_p20_pagcugc_en = {R_PATH0_P20_FOLLOW_BY_PAGCUGC_V2,
  181. B_PATH0_P20_FOLLOW_BY_PAGCUGC_EN_MSK},
  182. .p0_s20_pagcugc_en = {R_PATH0_S20_FOLLOW_BY_PAGCUGC_V2,
  183. B_PATH0_S20_FOLLOW_BY_PAGCUGC_EN_MSK},
  184. .p1_p20_pagcugc_en = {R_PATH1_P20_FOLLOW_BY_PAGCUGC_V2,
  185. B_PATH1_P20_FOLLOW_BY_PAGCUGC_EN_MSK},
  186. .p1_s20_pagcugc_en = {R_PATH1_S20_FOLLOW_BY_PAGCUGC_V2,
  187. B_PATH1_S20_FOLLOW_BY_PAGCUGC_EN_MSK},
  188. };
  189. static const struct rtw89_btc_rf_trx_para rtw89_btc_8851b_rf_ul[] = {
  190. {255, 0, 0, 7}, /* 0 -> original */
  191. {255, 2, 0, 7}, /* 1 -> for BT-connected ACI issue && BTG co-rx */
  192. {255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */
  193. {255, 0, 0, 7}, /* 3- >reserved for shared-antenna */
  194. {255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */
  195. {255, 1, 0, 7}, /* the below id is for non-shared-antenna free-run */
  196. {6, 1, 0, 7},
  197. {13, 1, 0, 7},
  198. {13, 1, 0, 7}
  199. };
  200. static const struct rtw89_btc_rf_trx_para rtw89_btc_8851b_rf_dl[] = {
  201. {255, 0, 0, 7}, /* 0 -> original */
  202. {255, 2, 0, 7}, /* 1 -> reserved for shared-antenna */
  203. {255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */
  204. {255, 0, 0, 7}, /* 3- >reserved for shared-antenna */
  205. {255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */
  206. {255, 1, 0, 7}, /* the below id is for non-shared-antenna free-run */
  207. {255, 1, 0, 7},
  208. {255, 1, 0, 7},
  209. {255, 1, 0, 7}
  210. };
  211. static const struct rtw89_btc_fbtc_mreg rtw89_btc_8851b_mon_reg[] = {
  212. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda24),
  213. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda28),
  214. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda2c),
  215. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda30),
  216. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda4c),
  217. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda10),
  218. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda20),
  219. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda34),
  220. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xcef4),
  221. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0x8424),
  222. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xd200),
  223. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xd220),
  224. RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x980),
  225. RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x4738),
  226. RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x4688),
  227. RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x4694),
  228. };
  229. static const u8 rtw89_btc_8851b_wl_rssi_thres[BTC_WL_RSSI_THMAX] = {70, 60, 50, 40};
  230. static const u8 rtw89_btc_8851b_bt_rssi_thres[BTC_BT_RSSI_THMAX] = {50, 40, 30, 20};
  231. static int rtw8851b_pwr_on_func(struct rtw89_dev *rtwdev)
  232. {
  233. u32 val32;
  234. u8 val8;
  235. u32 ret;
  236. rtw89_write32_clr(rtwdev, R_AX_SYS_PW_CTRL, B_AX_AFSM_WLSUS_EN |
  237. B_AX_AFSM_PCIE_SUS_EN);
  238. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_DIS_WLBT_PDNSUSEN_SOPC);
  239. rtw89_write32_set(rtwdev, R_AX_WLLPS_CTRL, B_AX_DIS_WLBT_LPSEN_LOPC);
  240. rtw89_write32_clr(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APDM_HPDN);
  241. rtw89_write32_clr(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APFM_SWLPS);
  242. ret = read_poll_timeout(rtw89_read32, val32, val32 & B_AX_RDY_SYSPWR,
  243. 1000, 20000, false, rtwdev, R_AX_SYS_PW_CTRL);
  244. if (ret)
  245. return ret;
  246. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_EN_WLON);
  247. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APFN_ONMAC);
  248. ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_AX_APFN_ONMAC),
  249. 1000, 20000, false, rtwdev, R_AX_SYS_PW_CTRL);
  250. if (ret)
  251. return ret;
  252. rtw89_write8_set(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
  253. rtw89_write8_clr(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
  254. rtw89_write8_set(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
  255. rtw89_write8_clr(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
  256. rtw89_write8_set(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
  257. rtw89_write32_clr(rtwdev, R_AX_SYS_SDIO_CTRL, B_AX_PCIE_CALIB_EN_V1);
  258. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_OFF_WEI,
  259. XTAL_SI_OFF_WEI);
  260. if (ret)
  261. return ret;
  262. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_OFF_EI,
  263. XTAL_SI_OFF_EI);
  264. if (ret)
  265. return ret;
  266. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_RFC2RF);
  267. if (ret)
  268. return ret;
  269. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_PON_WEI,
  270. XTAL_SI_PON_WEI);
  271. if (ret)
  272. return ret;
  273. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_PON_EI,
  274. XTAL_SI_PON_EI);
  275. if (ret)
  276. return ret;
  277. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_SRAM2RFC);
  278. if (ret)
  279. return ret;
  280. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_SRAM_CTRL, 0, XTAL_SI_SRAM_DIS);
  281. if (ret)
  282. return ret;
  283. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XTAL_XMD_2, 0, XTAL_SI_LDO_LPS);
  284. if (ret)
  285. return ret;
  286. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XTAL_XMD_4, 0, XTAL_SI_LPS_CAP);
  287. if (ret)
  288. return ret;
  289. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XTAL_DRV, 0, XTAL_SI_DRV_LATCH);
  290. if (ret)
  291. return ret;
  292. rtw89_write32_set(rtwdev, R_AX_PMC_DBG_CTRL2, B_AX_SYSON_DIS_PMCR_AX_WRMSK);
  293. rtw89_write32_set(rtwdev, R_AX_SYS_ISO_CTRL, B_AX_ISO_EB2CORE);
  294. rtw89_write32_clr(rtwdev, R_AX_SYS_ISO_CTRL, B_AX_PWC_EV2EF_B15);
  295. fsleep(1000);
  296. rtw89_write32_clr(rtwdev, R_AX_SYS_ISO_CTRL, B_AX_PWC_EV2EF_B14);
  297. rtw89_write32_clr(rtwdev, R_AX_PMC_DBG_CTRL2, B_AX_SYSON_DIS_PMCR_AX_WRMSK);
  298. rtw89_write32_set(rtwdev, R_AX_GPIO0_16_EECS_EESK_LED1_PULL_LOW_EN,
  299. B_AX_GPIO10_PULL_LOW_EN | B_AX_GPIO16_PULL_LOW_EN_V1);
  300. if (rtwdev->hal.cv == CHIP_CAV) {
  301. ret = rtw89_read_efuse_ver(rtwdev, &val8);
  302. if (!ret)
  303. rtwdev->hal.cv = val8;
  304. }
  305. rtw89_write32_clr(rtwdev, R_AX_WLAN_XTAL_SI_CONFIG,
  306. B_AX_XTAL_SI_ADDR_NOT_CHK);
  307. if (rtwdev->hal.cv != CHIP_CAV) {
  308. rtw89_write32_set(rtwdev, R_AX_SPSLDO_ON_CTRL1, B_AX_FPWMDELAY);
  309. rtw89_write32_set(rtwdev, R_AX_SPSANA_ON_CTRL1, B_AX_FPWMDELAY);
  310. }
  311. rtw89_write32_set(rtwdev, R_AX_DMAC_FUNC_EN,
  312. B_AX_MAC_FUNC_EN | B_AX_DMAC_FUNC_EN | B_AX_MPDU_PROC_EN |
  313. B_AX_WD_RLS_EN | B_AX_DLE_WDE_EN | B_AX_TXPKT_CTRL_EN |
  314. B_AX_STA_SCH_EN | B_AX_DLE_PLE_EN | B_AX_PKT_BUF_EN |
  315. B_AX_DMAC_TBL_EN | B_AX_PKT_IN_EN | B_AX_DLE_CPUIO_EN |
  316. B_AX_DISPATCHER_EN | B_AX_BBRPT_EN | B_AX_MAC_SEC_EN |
  317. B_AX_DMACREG_GCKEN);
  318. rtw89_write32_set(rtwdev, R_AX_CMAC_FUNC_EN,
  319. B_AX_CMAC_EN | B_AX_CMAC_TXEN | B_AX_CMAC_RXEN |
  320. B_AX_FORCE_CMACREG_GCKEN | B_AX_PHYINTF_EN | B_AX_CMAC_DMA_EN |
  321. B_AX_PTCLTOP_EN | B_AX_SCHEDULER_EN | B_AX_TMAC_EN |
  322. B_AX_RMAC_EN);
  323. rtw89_write32_mask(rtwdev, R_AX_EECS_EESK_FUNC_SEL, B_AX_PINMUX_EESK_FUNC_SEL_MASK,
  324. PINMUX_EESK_FUNC_SEL_BT_LOG);
  325. return 0;
  326. }
  327. static void rtw8851b_patch_swr_pfm2pwm(struct rtw89_dev *rtwdev)
  328. {
  329. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_SOP_PWMM_DSWR);
  330. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_SOP_ASWRM);
  331. rtw89_write32_set(rtwdev, R_AX_WLLPS_CTRL, B_AX_LPSOP_DSWRM);
  332. rtw89_write32_set(rtwdev, R_AX_WLLPS_CTRL, B_AX_LPSOP_ASWRM);
  333. }
  334. static int rtw8851b_pwr_off_func(struct rtw89_dev *rtwdev)
  335. {
  336. u32 val32;
  337. u32 ret;
  338. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_RFC2RF,
  339. XTAL_SI_RFC2RF);
  340. if (ret)
  341. return ret;
  342. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_OFF_EI);
  343. if (ret)
  344. return ret;
  345. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_OFF_WEI);
  346. if (ret)
  347. return ret;
  348. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0, XTAL_SI_RF00);
  349. if (ret)
  350. return ret;
  351. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_SRAM2RFC,
  352. XTAL_SI_SRAM2RFC);
  353. if (ret)
  354. return ret;
  355. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_PON_EI);
  356. if (ret)
  357. return ret;
  358. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_PON_WEI);
  359. if (ret)
  360. return ret;
  361. rtw89_write32_set(rtwdev, R_AX_WLAN_XTAL_SI_CONFIG,
  362. B_AX_XTAL_SI_ADDR_NOT_CHK);
  363. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_EN_WLON);
  364. rtw89_write32_clr(rtwdev, R_AX_WLRF_CTRL, B_AX_AFC_AFEDIG);
  365. rtw89_write8_clr(rtwdev, R_AX_SYS_FUNC_EN, B_AX_FEN_BB_GLB_RSTN | B_AX_FEN_BBRSTB);
  366. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APFM_OFFMAC);
  367. ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_AX_APFM_OFFMAC),
  368. 1000, 20000, false, rtwdev, R_AX_SYS_PW_CTRL);
  369. if (ret)
  370. return ret;
  371. rtw89_write32(rtwdev, R_AX_WLLPS_CTRL, SW_LPS_OPTION);
  372. if (rtwdev->hal.cv == CHIP_CAV) {
  373. rtw8851b_patch_swr_pfm2pwm(rtwdev);
  374. } else {
  375. rtw89_write32_set(rtwdev, R_AX_SPSLDO_ON_CTRL1, B_AX_FPWMDELAY);
  376. rtw89_write32_set(rtwdev, R_AX_SPSANA_ON_CTRL1, B_AX_FPWMDELAY);
  377. }
  378. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APFM_SWLPS);
  379. return 0;
  380. }
  381. static void rtw8851b_efuse_parsing(struct rtw89_efuse *efuse,
  382. struct rtw8851b_efuse *map)
  383. {
  384. ether_addr_copy(efuse->addr, map->e.mac_addr);
  385. efuse->rfe_type = map->rfe_type;
  386. efuse->xtal_cap = map->xtal_k;
  387. }
  388. static void rtw8851b_efuse_parsing_tssi(struct rtw89_dev *rtwdev,
  389. struct rtw8851b_efuse *map)
  390. {
  391. struct rtw89_tssi_info *tssi = &rtwdev->tssi;
  392. struct rtw8851b_tssi_offset *ofst[] = {&map->path_a_tssi};
  393. u8 i, j;
  394. tssi->thermal[RF_PATH_A] = map->path_a_therm;
  395. for (i = 0; i < RF_PATH_NUM_8851B; i++) {
  396. memcpy(tssi->tssi_cck[i], ofst[i]->cck_tssi,
  397. sizeof(ofst[i]->cck_tssi));
  398. for (j = 0; j < TSSI_CCK_CH_GROUP_NUM; j++)
  399. rtw89_debug(rtwdev, RTW89_DBG_TSSI,
  400. "[TSSI][EFUSE] path=%d cck[%d]=0x%x\n",
  401. i, j, tssi->tssi_cck[i][j]);
  402. memcpy(tssi->tssi_mcs[i], ofst[i]->bw40_tssi,
  403. sizeof(ofst[i]->bw40_tssi));
  404. memcpy(tssi->tssi_mcs[i] + TSSI_MCS_2G_CH_GROUP_NUM,
  405. ofst[i]->bw40_1s_tssi_5g, sizeof(ofst[i]->bw40_1s_tssi_5g));
  406. for (j = 0; j < TSSI_MCS_CH_GROUP_NUM; j++)
  407. rtw89_debug(rtwdev, RTW89_DBG_TSSI,
  408. "[TSSI][EFUSE] path=%d mcs[%d]=0x%x\n",
  409. i, j, tssi->tssi_mcs[i][j]);
  410. }
  411. }
  412. static bool _decode_efuse_gain(u8 data, s8 *high, s8 *low)
  413. {
  414. if (high)
  415. *high = sign_extend32(u8_get_bits(data, GENMASK(7, 4)), 3);
  416. if (low)
  417. *low = sign_extend32(u8_get_bits(data, GENMASK(3, 0)), 3);
  418. return data != 0xff;
  419. }
  420. static void rtw8851b_efuse_parsing_gain_offset(struct rtw89_dev *rtwdev,
  421. struct rtw8851b_efuse *map)
  422. {
  423. struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
  424. bool valid = false;
  425. valid |= _decode_efuse_gain(map->rx_gain_2g_cck,
  426. &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_CCK],
  427. NULL);
  428. valid |= _decode_efuse_gain(map->rx_gain_2g_ofdm,
  429. &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_OFDM],
  430. NULL);
  431. valid |= _decode_efuse_gain(map->rx_gain_5g_low,
  432. &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_LOW],
  433. NULL);
  434. valid |= _decode_efuse_gain(map->rx_gain_5g_mid,
  435. &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_MID],
  436. NULL);
  437. valid |= _decode_efuse_gain(map->rx_gain_5g_high,
  438. &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_HIGH],
  439. NULL);
  440. gain->offset_valid = valid;
  441. }
  442. static int rtw8851b_read_efuse(struct rtw89_dev *rtwdev, u8 *log_map)
  443. {
  444. struct rtw89_efuse *efuse = &rtwdev->efuse;
  445. struct rtw8851b_efuse *map;
  446. map = (struct rtw8851b_efuse *)log_map;
  447. efuse->country_code[0] = map->country_code[0];
  448. efuse->country_code[1] = map->country_code[1];
  449. rtw8851b_efuse_parsing_tssi(rtwdev, map);
  450. rtw8851b_efuse_parsing_gain_offset(rtwdev, map);
  451. switch (rtwdev->hci.type) {
  452. case RTW89_HCI_TYPE_PCIE:
  453. rtw8851b_efuse_parsing(efuse, map);
  454. break;
  455. default:
  456. return -EOPNOTSUPP;
  457. }
  458. rtw89_info(rtwdev, "chip rfe_type is %d\n", efuse->rfe_type);
  459. return 0;
  460. }
  461. static void rtw8851b_phycap_parsing_tssi(struct rtw89_dev *rtwdev, u8 *phycap_map)
  462. {
  463. struct rtw89_tssi_info *tssi = &rtwdev->tssi;
  464. static const u32 tssi_trim_addr[RF_PATH_NUM_8851B] = {0x5D6};
  465. u32 addr = rtwdev->chip->phycap_addr;
  466. bool pg = false;
  467. u32 ofst;
  468. u8 i, j;
  469. for (i = 0; i < RF_PATH_NUM_8851B; i++) {
  470. for (j = 0; j < TSSI_TRIM_CH_GROUP_NUM; j++) {
  471. /* addrs are in decreasing order */
  472. ofst = tssi_trim_addr[i] - addr - j;
  473. tssi->tssi_trim[i][j] = phycap_map[ofst];
  474. if (phycap_map[ofst] != 0xff)
  475. pg = true;
  476. }
  477. }
  478. if (!pg) {
  479. memset(tssi->tssi_trim, 0, sizeof(tssi->tssi_trim));
  480. rtw89_debug(rtwdev, RTW89_DBG_TSSI,
  481. "[TSSI][TRIM] no PG, set all trim info to 0\n");
  482. }
  483. for (i = 0; i < RF_PATH_NUM_8851B; i++)
  484. for (j = 0; j < TSSI_TRIM_CH_GROUP_NUM; j++)
  485. rtw89_debug(rtwdev, RTW89_DBG_TSSI,
  486. "[TSSI] path=%d idx=%d trim=0x%x addr=0x%x\n",
  487. i, j, tssi->tssi_trim[i][j],
  488. tssi_trim_addr[i] - j);
  489. }
  490. static void rtw8851b_phycap_parsing_thermal_trim(struct rtw89_dev *rtwdev,
  491. u8 *phycap_map)
  492. {
  493. struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
  494. static const u32 thm_trim_addr[RF_PATH_NUM_8851B] = {0x5DF};
  495. u32 addr = rtwdev->chip->phycap_addr;
  496. u8 i;
  497. for (i = 0; i < RF_PATH_NUM_8851B; i++) {
  498. info->thermal_trim[i] = phycap_map[thm_trim_addr[i] - addr];
  499. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  500. "[THERMAL][TRIM] path=%d thermal_trim=0x%x\n",
  501. i, info->thermal_trim[i]);
  502. if (info->thermal_trim[i] != 0xff)
  503. info->pg_thermal_trim = true;
  504. }
  505. }
  506. static void rtw8851b_thermal_trim(struct rtw89_dev *rtwdev)
  507. {
  508. #define __thm_setting(raw) \
  509. ({ \
  510. u8 __v = (raw); \
  511. ((__v & 0x1) << 3) | ((__v & 0x1f) >> 1); \
  512. })
  513. struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
  514. u8 i, val;
  515. if (!info->pg_thermal_trim) {
  516. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  517. "[THERMAL][TRIM] no PG, do nothing\n");
  518. return;
  519. }
  520. for (i = 0; i < RF_PATH_NUM_8851B; i++) {
  521. val = __thm_setting(info->thermal_trim[i]);
  522. rtw89_write_rf(rtwdev, i, RR_TM2, RR_TM2_OFF, val);
  523. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  524. "[THERMAL][TRIM] path=%d thermal_setting=0x%x\n",
  525. i, val);
  526. }
  527. #undef __thm_setting
  528. }
  529. static void rtw8851b_phycap_parsing_pa_bias_trim(struct rtw89_dev *rtwdev,
  530. u8 *phycap_map)
  531. {
  532. struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
  533. static const u32 pabias_trim_addr[] = {0x5DE};
  534. u32 addr = rtwdev->chip->phycap_addr;
  535. u8 i;
  536. for (i = 0; i < RF_PATH_NUM_8851B; i++) {
  537. info->pa_bias_trim[i] = phycap_map[pabias_trim_addr[i] - addr];
  538. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  539. "[PA_BIAS][TRIM] path=%d pa_bias_trim=0x%x\n",
  540. i, info->pa_bias_trim[i]);
  541. if (info->pa_bias_trim[i] != 0xff)
  542. info->pg_pa_bias_trim = true;
  543. }
  544. }
  545. static void rtw8851b_pa_bias_trim(struct rtw89_dev *rtwdev)
  546. {
  547. struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
  548. u8 pabias_2g, pabias_5g;
  549. u8 i;
  550. if (!info->pg_pa_bias_trim) {
  551. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  552. "[PA_BIAS][TRIM] no PG, do nothing\n");
  553. return;
  554. }
  555. for (i = 0; i < RF_PATH_NUM_8851B; i++) {
  556. pabias_2g = u8_get_bits(info->pa_bias_trim[i], GENMASK(3, 0));
  557. pabias_5g = u8_get_bits(info->pa_bias_trim[i], GENMASK(7, 4));
  558. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  559. "[PA_BIAS][TRIM] path=%d 2G=0x%x 5G=0x%x\n",
  560. i, pabias_2g, pabias_5g);
  561. rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXG, pabias_2g);
  562. rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXA, pabias_5g);
  563. }
  564. }
  565. static void rtw8851b_phycap_parsing_gain_comp(struct rtw89_dev *rtwdev, u8 *phycap_map)
  566. {
  567. static const u32 comp_addrs[][RTW89_SUBBAND_2GHZ_5GHZ_NR] = {
  568. {0x5BB, 0x5BA, 0, 0x5B9, 0x5B8},
  569. };
  570. struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
  571. u32 phycap_addr = rtwdev->chip->phycap_addr;
  572. bool valid = false;
  573. int path, i;
  574. u8 data;
  575. for (path = 0; path < BB_PATH_NUM_8851B; path++)
  576. for (i = 0; i < RTW89_SUBBAND_2GHZ_5GHZ_NR; i++) {
  577. if (comp_addrs[path][i] == 0)
  578. continue;
  579. data = phycap_map[comp_addrs[path][i] - phycap_addr];
  580. valid |= _decode_efuse_gain(data, NULL,
  581. &gain->comp[path][i]);
  582. }
  583. gain->comp_valid = valid;
  584. }
  585. static int rtw8851b_read_phycap(struct rtw89_dev *rtwdev, u8 *phycap_map)
  586. {
  587. rtw8851b_phycap_parsing_tssi(rtwdev, phycap_map);
  588. rtw8851b_phycap_parsing_thermal_trim(rtwdev, phycap_map);
  589. rtw8851b_phycap_parsing_pa_bias_trim(rtwdev, phycap_map);
  590. rtw8851b_phycap_parsing_gain_comp(rtwdev, phycap_map);
  591. return 0;
  592. }
  593. static void rtw8851b_set_bb_gpio(struct rtw89_dev *rtwdev, u8 gpio_idx, bool inv,
  594. u8 src_sel)
  595. {
  596. u32 addr, mask;
  597. if (gpio_idx >= 32)
  598. return;
  599. /* 2 continual 32-bit registers for 32 GPIOs, and each GPIO occupies 2 bits */
  600. addr = R_RFE_SEL0_A2 + (gpio_idx / 16) * sizeof(u32);
  601. mask = B_RFE_SEL0_MASK << (gpio_idx % 16) * 2;
  602. rtw89_phy_write32_mask(rtwdev, addr, mask, RF_PATH_A);
  603. rtw89_phy_write32_mask(rtwdev, R_RFE_INV0, BIT(gpio_idx), inv);
  604. /* 4 continual 32-bit registers for 32 GPIOs, and each GPIO occupies 4 bits */
  605. addr = R_RFE_SEL0_BASE + (gpio_idx / 8) * sizeof(u32);
  606. mask = B_RFE_SEL0_SRC_MASK << (gpio_idx % 8) * 4;
  607. rtw89_phy_write32_mask(rtwdev, addr, mask, src_sel);
  608. }
  609. static void rtw8851b_set_mac_gpio(struct rtw89_dev *rtwdev, u8 func)
  610. {
  611. static const struct rtw89_reg3_def func16 = {
  612. R_AX_GPIO16_23_FUNC_SEL, B_AX_PINMUX_GPIO16_FUNC_SEL_MASK, BIT(3)
  613. };
  614. static const struct rtw89_reg3_def func17 = {
  615. R_AX_GPIO16_23_FUNC_SEL, B_AX_PINMUX_GPIO17_FUNC_SEL_MASK, BIT(7) >> 4,
  616. };
  617. const struct rtw89_reg3_def *def;
  618. switch (func) {
  619. case 16:
  620. def = &func16;
  621. break;
  622. case 17:
  623. def = &func17;
  624. break;
  625. default:
  626. rtw89_warn(rtwdev, "undefined gpio func %d\n", func);
  627. return;
  628. }
  629. rtw89_write8_mask(rtwdev, def->addr, def->mask, def->data);
  630. }
  631. static void rtw8851b_rfe_gpio(struct rtw89_dev *rtwdev)
  632. {
  633. u8 rfe_type = rtwdev->efuse.rfe_type;
  634. if (rfe_type > 50)
  635. return;
  636. if (rfe_type % 3 == 2) {
  637. rtw8851b_set_bb_gpio(rtwdev, 16, true, RFE_SEL0_SRC_ANTSEL_0);
  638. rtw8851b_set_bb_gpio(rtwdev, 17, false, RFE_SEL0_SRC_ANTSEL_0);
  639. rtw8851b_set_mac_gpio(rtwdev, 16);
  640. rtw8851b_set_mac_gpio(rtwdev, 17);
  641. }
  642. }
  643. static void rtw8851b_power_trim(struct rtw89_dev *rtwdev)
  644. {
  645. rtw8851b_thermal_trim(rtwdev);
  646. rtw8851b_pa_bias_trim(rtwdev);
  647. }
  648. static void rtw8851b_set_channel_mac(struct rtw89_dev *rtwdev,
  649. const struct rtw89_chan *chan,
  650. u8 mac_idx)
  651. {
  652. u32 sub_carr = rtw89_mac_reg_by_idx(rtwdev, R_AX_TX_SUB_CARRIER_VALUE, mac_idx);
  653. u32 chk_rate = rtw89_mac_reg_by_idx(rtwdev, R_AX_TXRATE_CHK, mac_idx);
  654. u32 rf_mod = rtw89_mac_reg_by_idx(rtwdev, R_AX_WMAC_RFMOD, mac_idx);
  655. u8 txsc20 = 0, txsc40 = 0;
  656. switch (chan->band_width) {
  657. case RTW89_CHANNEL_WIDTH_80:
  658. txsc40 = rtw89_phy_get_txsc(rtwdev, chan, RTW89_CHANNEL_WIDTH_40);
  659. fallthrough;
  660. case RTW89_CHANNEL_WIDTH_40:
  661. txsc20 = rtw89_phy_get_txsc(rtwdev, chan, RTW89_CHANNEL_WIDTH_20);
  662. break;
  663. default:
  664. break;
  665. }
  666. switch (chan->band_width) {
  667. case RTW89_CHANNEL_WIDTH_80:
  668. rtw89_write8_mask(rtwdev, rf_mod, B_AX_WMAC_RFMOD_MASK, BIT(1));
  669. rtw89_write32(rtwdev, sub_carr, txsc20 | (txsc40 << 4));
  670. break;
  671. case RTW89_CHANNEL_WIDTH_40:
  672. rtw89_write8_mask(rtwdev, rf_mod, B_AX_WMAC_RFMOD_MASK, BIT(0));
  673. rtw89_write32(rtwdev, sub_carr, txsc20);
  674. break;
  675. case RTW89_CHANNEL_WIDTH_20:
  676. rtw89_write8_clr(rtwdev, rf_mod, B_AX_WMAC_RFMOD_MASK);
  677. rtw89_write32(rtwdev, sub_carr, 0);
  678. break;
  679. default:
  680. break;
  681. }
  682. if (chan->channel > 14) {
  683. rtw89_write8_clr(rtwdev, chk_rate, B_AX_BAND_MODE);
  684. rtw89_write8_set(rtwdev, chk_rate,
  685. B_AX_CHECK_CCK_EN | B_AX_RTS_LIMIT_IN_OFDM6);
  686. } else {
  687. rtw89_write8_set(rtwdev, chk_rate, B_AX_BAND_MODE);
  688. rtw89_write8_clr(rtwdev, chk_rate,
  689. B_AX_CHECK_CCK_EN | B_AX_RTS_LIMIT_IN_OFDM6);
  690. }
  691. }
  692. static const u32 rtw8851b_sco_barker_threshold[14] = {
  693. 0x1cfea, 0x1d0e1, 0x1d1d7, 0x1d2cd, 0x1d3c3, 0x1d4b9, 0x1d5b0, 0x1d6a6,
  694. 0x1d79c, 0x1d892, 0x1d988, 0x1da7f, 0x1db75, 0x1ddc4
  695. };
  696. static const u32 rtw8851b_sco_cck_threshold[14] = {
  697. 0x27de3, 0x27f35, 0x28088, 0x281da, 0x2832d, 0x2847f, 0x285d2, 0x28724,
  698. 0x28877, 0x289c9, 0x28b1c, 0x28c6e, 0x28dc1, 0x290ed
  699. };
  700. static void rtw8851b_ctrl_sco_cck(struct rtw89_dev *rtwdev, u8 primary_ch)
  701. {
  702. u8 ch_element = primary_ch - 1;
  703. rtw89_phy_write32_mask(rtwdev, R_RXSCOBC, B_RXSCOBC_TH,
  704. rtw8851b_sco_barker_threshold[ch_element]);
  705. rtw89_phy_write32_mask(rtwdev, R_RXSCOCCK, B_RXSCOCCK_TH,
  706. rtw8851b_sco_cck_threshold[ch_element]);
  707. }
  708. static u8 rtw8851b_sco_mapping(u8 central_ch)
  709. {
  710. if (central_ch == 1)
  711. return 109;
  712. else if (central_ch >= 2 && central_ch <= 6)
  713. return 108;
  714. else if (central_ch >= 7 && central_ch <= 10)
  715. return 107;
  716. else if (central_ch >= 11 && central_ch <= 14)
  717. return 106;
  718. else if (central_ch == 36 || central_ch == 38)
  719. return 51;
  720. else if (central_ch >= 40 && central_ch <= 58)
  721. return 50;
  722. else if (central_ch >= 60 && central_ch <= 64)
  723. return 49;
  724. else if (central_ch == 100 || central_ch == 102)
  725. return 48;
  726. else if (central_ch >= 104 && central_ch <= 126)
  727. return 47;
  728. else if (central_ch >= 128 && central_ch <= 151)
  729. return 46;
  730. else if (central_ch >= 153 && central_ch <= 177)
  731. return 45;
  732. else
  733. return 0;
  734. }
  735. struct rtw8851b_bb_gain {
  736. u32 gain_g[BB_PATH_NUM_8851B];
  737. u32 gain_a[BB_PATH_NUM_8851B];
  738. u32 gain_mask;
  739. };
  740. static const struct rtw8851b_bb_gain bb_gain_lna[LNA_GAIN_NUM] = {
  741. { .gain_g = {0x4678}, .gain_a = {0x45DC},
  742. .gain_mask = 0x00ff0000 },
  743. { .gain_g = {0x4678}, .gain_a = {0x45DC},
  744. .gain_mask = 0xff000000 },
  745. { .gain_g = {0x467C}, .gain_a = {0x4660},
  746. .gain_mask = 0x000000ff },
  747. { .gain_g = {0x467C}, .gain_a = {0x4660},
  748. .gain_mask = 0x0000ff00 },
  749. { .gain_g = {0x467C}, .gain_a = {0x4660},
  750. .gain_mask = 0x00ff0000 },
  751. { .gain_g = {0x467C}, .gain_a = {0x4660},
  752. .gain_mask = 0xff000000 },
  753. { .gain_g = {0x4680}, .gain_a = {0x4664},
  754. .gain_mask = 0x000000ff },
  755. };
  756. static const struct rtw8851b_bb_gain bb_gain_tia[TIA_GAIN_NUM] = {
  757. { .gain_g = {0x4680}, .gain_a = {0x4664},
  758. .gain_mask = 0x00ff0000 },
  759. { .gain_g = {0x4680}, .gain_a = {0x4664},
  760. .gain_mask = 0xff000000 },
  761. };
  762. static void rtw8851b_set_gain_error(struct rtw89_dev *rtwdev,
  763. enum rtw89_subband subband,
  764. enum rtw89_rf_path path)
  765. {
  766. const struct rtw89_phy_bb_gain_info *gain = &rtwdev->bb_gain;
  767. u8 gain_band = rtw89_subband_to_bb_gain_band(subband);
  768. s32 val;
  769. u32 reg;
  770. u32 mask;
  771. int i;
  772. for (i = 0; i < LNA_GAIN_NUM; i++) {
  773. if (subband == RTW89_CH_2G)
  774. reg = bb_gain_lna[i].gain_g[path];
  775. else
  776. reg = bb_gain_lna[i].gain_a[path];
  777. mask = bb_gain_lna[i].gain_mask;
  778. val = gain->lna_gain[gain_band][path][i];
  779. rtw89_phy_write32_mask(rtwdev, reg, mask, val);
  780. }
  781. for (i = 0; i < TIA_GAIN_NUM; i++) {
  782. if (subband == RTW89_CH_2G)
  783. reg = bb_gain_tia[i].gain_g[path];
  784. else
  785. reg = bb_gain_tia[i].gain_a[path];
  786. mask = bb_gain_tia[i].gain_mask;
  787. val = gain->tia_gain[gain_band][path][i];
  788. rtw89_phy_write32_mask(rtwdev, reg, mask, val);
  789. }
  790. }
  791. static void rtw8851b_set_gain_offset(struct rtw89_dev *rtwdev,
  792. enum rtw89_subband subband,
  793. enum rtw89_phy_idx phy_idx)
  794. {
  795. static const u32 rssi_ofst_addr[] = {R_PATH0_G_TIA1_LNA6_OP1DB_V1};
  796. static const u32 gain_err_addr[] = {R_P0_AGC_RSVD};
  797. struct rtw89_phy_efuse_gain *efuse_gain = &rtwdev->efuse_gain;
  798. enum rtw89_gain_offset gain_ofdm_band;
  799. s32 offset_ofdm, offset_cck;
  800. s32 offset_a;
  801. s32 tmp;
  802. u8 path;
  803. if (!efuse_gain->comp_valid)
  804. goto next;
  805. for (path = RF_PATH_A; path < BB_PATH_NUM_8851B; path++) {
  806. tmp = efuse_gain->comp[path][subband];
  807. tmp = clamp_t(s32, tmp << 2, S8_MIN, S8_MAX);
  808. rtw89_phy_write32_mask(rtwdev, gain_err_addr[path], MASKBYTE0, tmp);
  809. }
  810. next:
  811. if (!efuse_gain->offset_valid)
  812. return;
  813. gain_ofdm_band = rtw89_subband_to_gain_offset_band_of_ofdm(subband);
  814. offset_a = -efuse_gain->offset[RF_PATH_A][gain_ofdm_band];
  815. tmp = -((offset_a << 2) + (efuse_gain->offset_base[RTW89_PHY_0] >> 2));
  816. tmp = clamp_t(s32, tmp, S8_MIN, S8_MAX);
  817. rtw89_phy_write32_mask(rtwdev, rssi_ofst_addr[RF_PATH_A], B_PATH0_R_G_OFST_MASK, tmp);
  818. offset_ofdm = -efuse_gain->offset[RF_PATH_A][gain_ofdm_band];
  819. offset_cck = -efuse_gain->offset[RF_PATH_A][0];
  820. tmp = (offset_ofdm << 4) + efuse_gain->offset_base[RTW89_PHY_0];
  821. tmp = clamp_t(s32, tmp, S8_MIN, S8_MAX);
  822. rtw89_phy_write32_idx(rtwdev, R_P0_RPL1, B_P0_RPL1_BIAS_MASK, tmp, phy_idx);
  823. tmp = (offset_ofdm << 4) + efuse_gain->rssi_base[RTW89_PHY_0];
  824. tmp = clamp_t(s32, tmp, S8_MIN, S8_MAX);
  825. rtw89_phy_write32_idx(rtwdev, R_P1_RPL1, B_P0_RPL1_BIAS_MASK, tmp, phy_idx);
  826. if (subband == RTW89_CH_2G) {
  827. tmp = (offset_cck << 3) + (efuse_gain->offset_base[RTW89_PHY_0] >> 1);
  828. tmp = clamp_t(s32, tmp, S8_MIN >> 1, S8_MAX >> 1);
  829. rtw89_phy_write32_mask(rtwdev, R_RX_RPL_OFST,
  830. B_RX_RPL_OFST_CCK_MASK, tmp);
  831. }
  832. }
  833. static
  834. void rtw8851b_set_rxsc_rpl_comp(struct rtw89_dev *rtwdev, enum rtw89_subband subband)
  835. {
  836. const struct rtw89_phy_bb_gain_info *gain = &rtwdev->bb_gain;
  837. u8 band = rtw89_subband_to_bb_gain_band(subband);
  838. u32 val;
  839. val = u32_encode_bits(gain->rpl_ofst_20[band][RF_PATH_A], B_P0_RPL1_20_MASK) |
  840. u32_encode_bits(gain->rpl_ofst_40[band][RF_PATH_A][0], B_P0_RPL1_40_MASK) |
  841. u32_encode_bits(gain->rpl_ofst_40[band][RF_PATH_A][1], B_P0_RPL1_41_MASK);
  842. val >>= B_P0_RPL1_SHIFT;
  843. rtw89_phy_write32_mask(rtwdev, R_P0_RPL1, B_P0_RPL1_MASK, val);
  844. rtw89_phy_write32_mask(rtwdev, R_P1_RPL1, B_P0_RPL1_MASK, val);
  845. val = u32_encode_bits(gain->rpl_ofst_40[band][RF_PATH_A][2], B_P0_RTL2_42_MASK) |
  846. u32_encode_bits(gain->rpl_ofst_80[band][RF_PATH_A][0], B_P0_RTL2_80_MASK) |
  847. u32_encode_bits(gain->rpl_ofst_80[band][RF_PATH_A][1], B_P0_RTL2_81_MASK) |
  848. u32_encode_bits(gain->rpl_ofst_80[band][RF_PATH_A][10], B_P0_RTL2_8A_MASK);
  849. rtw89_phy_write32(rtwdev, R_P0_RPL2, val);
  850. rtw89_phy_write32(rtwdev, R_P1_RPL2, val);
  851. val = u32_encode_bits(gain->rpl_ofst_80[band][RF_PATH_A][2], B_P0_RTL3_82_MASK) |
  852. u32_encode_bits(gain->rpl_ofst_80[band][RF_PATH_A][3], B_P0_RTL3_83_MASK) |
  853. u32_encode_bits(gain->rpl_ofst_80[band][RF_PATH_A][4], B_P0_RTL3_84_MASK) |
  854. u32_encode_bits(gain->rpl_ofst_80[band][RF_PATH_A][9], B_P0_RTL3_89_MASK);
  855. rtw89_phy_write32(rtwdev, R_P0_RPL3, val);
  856. rtw89_phy_write32(rtwdev, R_P1_RPL3, val);
  857. }
  858. static void rtw8851b_ctrl_ch(struct rtw89_dev *rtwdev,
  859. const struct rtw89_chan *chan,
  860. enum rtw89_phy_idx phy_idx)
  861. {
  862. u8 subband = chan->subband_type;
  863. u8 central_ch = chan->channel;
  864. bool is_2g = central_ch <= 14;
  865. u8 sco_comp;
  866. if (is_2g)
  867. rtw89_phy_write32_idx(rtwdev, R_PATH0_BAND_SEL_V1,
  868. B_PATH0_BAND_SEL_MSK_V1, 1, phy_idx);
  869. else
  870. rtw89_phy_write32_idx(rtwdev, R_PATH0_BAND_SEL_V1,
  871. B_PATH0_BAND_SEL_MSK_V1, 0, phy_idx);
  872. /* SCO compensate FC setting */
  873. sco_comp = rtw8851b_sco_mapping(central_ch);
  874. rtw89_phy_write32_idx(rtwdev, R_FC0_BW_V1, B_FC0_BW_INV, sco_comp, phy_idx);
  875. if (chan->band_type == RTW89_BAND_6G)
  876. return;
  877. /* CCK parameters */
  878. if (central_ch == 14) {
  879. rtw89_phy_write32_mask(rtwdev, R_TXFIR0, B_TXFIR_C01, 0x3b13ff);
  880. rtw89_phy_write32_mask(rtwdev, R_TXFIR2, B_TXFIR_C23, 0x1c42de);
  881. rtw89_phy_write32_mask(rtwdev, R_TXFIR4, B_TXFIR_C45, 0xfdb0ad);
  882. rtw89_phy_write32_mask(rtwdev, R_TXFIR6, B_TXFIR_C67, 0xf60f6e);
  883. rtw89_phy_write32_mask(rtwdev, R_TXFIR8, B_TXFIR_C89, 0xfd8f92);
  884. rtw89_phy_write32_mask(rtwdev, R_TXFIRA, B_TXFIR_CAB, 0x2d011);
  885. rtw89_phy_write32_mask(rtwdev, R_TXFIRC, B_TXFIR_CCD, 0x1c02c);
  886. rtw89_phy_write32_mask(rtwdev, R_TXFIRE, B_TXFIR_CEF, 0xfff00a);
  887. } else {
  888. rtw89_phy_write32_mask(rtwdev, R_TXFIR0, B_TXFIR_C01, 0x3d23ff);
  889. rtw89_phy_write32_mask(rtwdev, R_TXFIR2, B_TXFIR_C23, 0x29b354);
  890. rtw89_phy_write32_mask(rtwdev, R_TXFIR4, B_TXFIR_C45, 0xfc1c8);
  891. rtw89_phy_write32_mask(rtwdev, R_TXFIR6, B_TXFIR_C67, 0xfdb053);
  892. rtw89_phy_write32_mask(rtwdev, R_TXFIR8, B_TXFIR_C89, 0xf86f9a);
  893. rtw89_phy_write32_mask(rtwdev, R_TXFIRA, B_TXFIR_CAB, 0xfaef92);
  894. rtw89_phy_write32_mask(rtwdev, R_TXFIRC, B_TXFIR_CCD, 0xfe5fcc);
  895. rtw89_phy_write32_mask(rtwdev, R_TXFIRE, B_TXFIR_CEF, 0xffdff5);
  896. }
  897. rtw8851b_set_gain_error(rtwdev, subband, RF_PATH_A);
  898. rtw8851b_set_gain_offset(rtwdev, subband, phy_idx);
  899. rtw8851b_set_rxsc_rpl_comp(rtwdev, subband);
  900. }
  901. static void rtw8851b_bw_setting(struct rtw89_dev *rtwdev, u8 bw)
  902. {
  903. rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW0, B_P0_CFCH_CTL, 0x8);
  904. rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW0, B_P0_CFCH_EN, 0x2);
  905. rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW0, B_P0_CFCH_BW0, 0x2);
  906. rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW1, B_P0_CFCH_BW1, 0x4);
  907. rtw89_phy_write32_mask(rtwdev, R_DRCK, B_DRCK_MUL, 0xf);
  908. rtw89_phy_write32_mask(rtwdev, R_ADCMOD, B_ADCMOD_LP, 0xa);
  909. rtw89_phy_write32_mask(rtwdev, R_P0_RXCK, B_P0_RXCK_ADJ, 0x92);
  910. switch (bw) {
  911. case RTW89_CHANNEL_WIDTH_5:
  912. rtw89_phy_write32_mask(rtwdev, R_DCIM, B_DCIM_FR, 0x1);
  913. rtw89_phy_write32_mask(rtwdev, R_WDADC, B_WDADC_SEL, 0x0);
  914. rtw89_phy_write32_mask(rtwdev, R_ADDCK0D, B_ADDCK_DS, 0x1);
  915. break;
  916. case RTW89_CHANNEL_WIDTH_10:
  917. rtw89_phy_write32_mask(rtwdev, R_DCIM, B_DCIM_FR, 0x1);
  918. rtw89_phy_write32_mask(rtwdev, R_WDADC, B_WDADC_SEL, 0x1);
  919. rtw89_phy_write32_mask(rtwdev, R_ADDCK0D, B_ADDCK_DS, 0x0);
  920. break;
  921. case RTW89_CHANNEL_WIDTH_20:
  922. rtw89_phy_write32_mask(rtwdev, R_DCIM, B_DCIM_FR, 0x2);
  923. rtw89_phy_write32_mask(rtwdev, R_WDADC, B_WDADC_SEL, 0x2);
  924. rtw89_phy_write32_mask(rtwdev, R_ADDCK0D, B_ADDCK_DS, 0x0);
  925. break;
  926. case RTW89_CHANNEL_WIDTH_40:
  927. rtw89_phy_write32_mask(rtwdev, R_DCIM, B_DCIM_FR, 0x2);
  928. rtw89_phy_write32_mask(rtwdev, R_WDADC, B_WDADC_SEL, 0x2);
  929. rtw89_phy_write32_mask(rtwdev, R_ADDCK0D, B_ADDCK_DS, 0x0);
  930. break;
  931. case RTW89_CHANNEL_WIDTH_80:
  932. rtw89_phy_write32_mask(rtwdev, R_DCIM, B_DCIM_FR, 0x0);
  933. rtw89_phy_write32_mask(rtwdev, R_WDADC, B_WDADC_SEL, 0x2);
  934. rtw89_phy_write32_mask(rtwdev, R_ADDCK0D, B_ADDCK_DS, 0x0);
  935. break;
  936. default:
  937. rtw89_warn(rtwdev, "Fail to set ADC\n");
  938. }
  939. }
  940. static void rtw8851b_ctrl_bw(struct rtw89_dev *rtwdev, u8 pri_ch, u8 bw,
  941. enum rtw89_phy_idx phy_idx)
  942. {
  943. switch (bw) {
  944. case RTW89_CHANNEL_WIDTH_5:
  945. rtw89_phy_write32_idx(rtwdev, R_FC0_BW_V1, B_FC0_BW_SET, 0x0, phy_idx);
  946. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD_V1, B_CHBW_MOD_SBW, 0x1, phy_idx);
  947. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD_V1, B_CHBW_MOD_PRICH, 0x0, phy_idx);
  948. break;
  949. case RTW89_CHANNEL_WIDTH_10:
  950. rtw89_phy_write32_idx(rtwdev, R_FC0_BW_V1, B_FC0_BW_SET, 0x0, phy_idx);
  951. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD_V1, B_CHBW_MOD_SBW, 0x2, phy_idx);
  952. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD_V1, B_CHBW_MOD_PRICH, 0x0, phy_idx);
  953. break;
  954. case RTW89_CHANNEL_WIDTH_20:
  955. rtw89_phy_write32_idx(rtwdev, R_FC0_BW_V1, B_FC0_BW_SET, 0x0, phy_idx);
  956. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD_V1, B_CHBW_MOD_SBW, 0x0, phy_idx);
  957. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD_V1, B_CHBW_MOD_PRICH, 0x0, phy_idx);
  958. break;
  959. case RTW89_CHANNEL_WIDTH_40:
  960. rtw89_phy_write32_idx(rtwdev, R_FC0_BW_V1, B_FC0_BW_SET, 0x1, phy_idx);
  961. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD_V1, B_CHBW_MOD_SBW, 0x0, phy_idx);
  962. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD_V1, B_CHBW_MOD_PRICH,
  963. pri_ch, phy_idx);
  964. /* CCK primary channel */
  965. if (pri_ch == RTW89_SC_20_UPPER)
  966. rtw89_phy_write32_mask(rtwdev, R_RXSC, B_RXSC_EN, 1);
  967. else
  968. rtw89_phy_write32_mask(rtwdev, R_RXSC, B_RXSC_EN, 0);
  969. break;
  970. case RTW89_CHANNEL_WIDTH_80:
  971. rtw89_phy_write32_idx(rtwdev, R_FC0_BW_V1, B_FC0_BW_SET, 0x2, phy_idx);
  972. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD_V1, B_CHBW_MOD_SBW, 0x0, phy_idx);
  973. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD_V1, B_CHBW_MOD_PRICH,
  974. pri_ch, phy_idx);
  975. break;
  976. default:
  977. rtw89_warn(rtwdev, "Fail to switch bw (bw:%d, pri ch:%d)\n", bw,
  978. pri_ch);
  979. }
  980. rtw8851b_bw_setting(rtwdev, bw);
  981. }
  982. static void rtw8851b_ctrl_cck_en(struct rtw89_dev *rtwdev, bool cck_en)
  983. {
  984. if (cck_en) {
  985. rtw89_phy_write32_mask(rtwdev, R_RXCCA, B_RXCCA_DIS, 0);
  986. rtw89_phy_write32_mask(rtwdev, R_PD_ARBITER_OFF,
  987. B_PD_ARBITER_OFF, 0);
  988. rtw89_phy_write32_mask(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 1);
  989. } else {
  990. rtw89_phy_write32_mask(rtwdev, R_RXCCA, B_RXCCA_DIS, 1);
  991. rtw89_phy_write32_mask(rtwdev, R_PD_ARBITER_OFF,
  992. B_PD_ARBITER_OFF, 1);
  993. rtw89_phy_write32_mask(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 0);
  994. }
  995. }
  996. static u32 rtw8851b_spur_freq(struct rtw89_dev *rtwdev,
  997. const struct rtw89_chan *chan)
  998. {
  999. u8 center_chan = chan->channel;
  1000. switch (chan->band_type) {
  1001. case RTW89_BAND_5G:
  1002. if (center_chan == 151 || center_chan == 153 ||
  1003. center_chan == 155 || center_chan == 163)
  1004. return 5760;
  1005. else if (center_chan == 54 || center_chan == 58)
  1006. return 5280;
  1007. break;
  1008. default:
  1009. break;
  1010. }
  1011. return 0;
  1012. }
  1013. #define CARRIER_SPACING_312_5 312500 /* 312.5 kHz */
  1014. #define CARRIER_SPACING_78_125 78125 /* 78.125 kHz */
  1015. #define MAX_TONE_NUM 2048
  1016. static void rtw8851b_set_csi_tone_idx(struct rtw89_dev *rtwdev,
  1017. const struct rtw89_chan *chan,
  1018. enum rtw89_phy_idx phy_idx)
  1019. {
  1020. u32 spur_freq;
  1021. s32 freq_diff, csi_idx, csi_tone_idx;
  1022. spur_freq = rtw8851b_spur_freq(rtwdev, chan);
  1023. if (spur_freq == 0) {
  1024. rtw89_phy_write32_idx(rtwdev, R_SEG0CSI_EN_V1, B_SEG0CSI_EN,
  1025. 0, phy_idx);
  1026. return;
  1027. }
  1028. freq_diff = (spur_freq - chan->freq) * 1000000;
  1029. csi_idx = s32_div_u32_round_closest(freq_diff, CARRIER_SPACING_78_125);
  1030. s32_div_u32_round_down(csi_idx, MAX_TONE_NUM, &csi_tone_idx);
  1031. rtw89_phy_write32_idx(rtwdev, R_SEG0CSI_V1, B_SEG0CSI_IDX,
  1032. csi_tone_idx, phy_idx);
  1033. rtw89_phy_write32_idx(rtwdev, R_SEG0CSI_EN_V1, B_SEG0CSI_EN, 1, phy_idx);
  1034. }
  1035. static const struct rtw89_nbi_reg_def rtw8851b_nbi_reg_def = {
  1036. .notch1_idx = {0x46E4, 0xFF},
  1037. .notch1_frac_idx = {0x46E4, 0xC00},
  1038. .notch1_en = {0x46E4, 0x1000},
  1039. .notch2_idx = {0x47A4, 0xFF},
  1040. .notch2_frac_idx = {0x47A4, 0xC00},
  1041. .notch2_en = {0x47A4, 0x1000},
  1042. };
  1043. static void rtw8851b_set_nbi_tone_idx(struct rtw89_dev *rtwdev,
  1044. const struct rtw89_chan *chan)
  1045. {
  1046. const struct rtw89_nbi_reg_def *nbi = &rtw8851b_nbi_reg_def;
  1047. s32 nbi_frac_idx, nbi_frac_tone_idx;
  1048. s32 nbi_idx, nbi_tone_idx;
  1049. bool notch2_chk = false;
  1050. u32 spur_freq, fc;
  1051. s32 freq_diff;
  1052. spur_freq = rtw8851b_spur_freq(rtwdev, chan);
  1053. if (spur_freq == 0) {
  1054. rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr,
  1055. nbi->notch1_en.mask, 0);
  1056. rtw89_phy_write32_mask(rtwdev, nbi->notch2_en.addr,
  1057. nbi->notch2_en.mask, 0);
  1058. return;
  1059. }
  1060. fc = chan->freq;
  1061. if (chan->band_width == RTW89_CHANNEL_WIDTH_160) {
  1062. fc = (spur_freq > fc) ? fc + 40 : fc - 40;
  1063. if ((fc > spur_freq &&
  1064. chan->channel < chan->primary_channel) ||
  1065. (fc < spur_freq &&
  1066. chan->channel > chan->primary_channel))
  1067. notch2_chk = true;
  1068. }
  1069. freq_diff = (spur_freq - fc) * 1000000;
  1070. nbi_idx = s32_div_u32_round_down(freq_diff, CARRIER_SPACING_312_5,
  1071. &nbi_frac_idx);
  1072. if (chan->band_width == RTW89_CHANNEL_WIDTH_20) {
  1073. s32_div_u32_round_down(nbi_idx + 32, 64, &nbi_tone_idx);
  1074. } else {
  1075. u16 tone_para = (chan->band_width == RTW89_CHANNEL_WIDTH_40) ?
  1076. 128 : 256;
  1077. s32_div_u32_round_down(nbi_idx, tone_para, &nbi_tone_idx);
  1078. }
  1079. nbi_frac_tone_idx = s32_div_u32_round_closest(nbi_frac_idx,
  1080. CARRIER_SPACING_78_125);
  1081. if (chan->band_width == RTW89_CHANNEL_WIDTH_160 && notch2_chk) {
  1082. rtw89_phy_write32_mask(rtwdev, nbi->notch2_idx.addr,
  1083. nbi->notch2_idx.mask, nbi_tone_idx);
  1084. rtw89_phy_write32_mask(rtwdev, nbi->notch2_frac_idx.addr,
  1085. nbi->notch2_frac_idx.mask, nbi_frac_tone_idx);
  1086. rtw89_phy_write32_mask(rtwdev, nbi->notch2_en.addr,
  1087. nbi->notch2_en.mask, 0);
  1088. rtw89_phy_write32_mask(rtwdev, nbi->notch2_en.addr,
  1089. nbi->notch2_en.mask, 1);
  1090. rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr,
  1091. nbi->notch1_en.mask, 0);
  1092. } else {
  1093. rtw89_phy_write32_mask(rtwdev, nbi->notch1_idx.addr,
  1094. nbi->notch1_idx.mask, nbi_tone_idx);
  1095. rtw89_phy_write32_mask(rtwdev, nbi->notch1_frac_idx.addr,
  1096. nbi->notch1_frac_idx.mask, nbi_frac_tone_idx);
  1097. rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr,
  1098. nbi->notch1_en.mask, 0);
  1099. rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr,
  1100. nbi->notch1_en.mask, 1);
  1101. rtw89_phy_write32_mask(rtwdev, nbi->notch2_en.addr,
  1102. nbi->notch2_en.mask, 0);
  1103. }
  1104. }
  1105. static void rtw8851b_set_cfr(struct rtw89_dev *rtwdev, const struct rtw89_chan *chan)
  1106. {
  1107. if (chan->band_type == RTW89_BAND_2G &&
  1108. chan->band_width == RTW89_CHANNEL_WIDTH_20 &&
  1109. (chan->channel == 1 || chan->channel == 13)) {
  1110. rtw89_phy_write32_mask(rtwdev, R_PATH0_TX_CFR,
  1111. B_PATH0_TX_CFR_LGC0, 0xf8);
  1112. rtw89_phy_write32_mask(rtwdev, R_PATH0_TX_CFR,
  1113. B_PATH0_TX_CFR_LGC1, 0x120);
  1114. rtw89_phy_write32_mask(rtwdev, R_PATH0_TX_POLAR_CLIPPING,
  1115. B_PATH0_TX_POLAR_CLIPPING_LGC0, 0x0);
  1116. rtw89_phy_write32_mask(rtwdev, R_PATH0_TX_POLAR_CLIPPING,
  1117. B_PATH0_TX_POLAR_CLIPPING_LGC1, 0x3);
  1118. } else {
  1119. rtw89_phy_write32_mask(rtwdev, R_PATH0_TX_CFR,
  1120. B_PATH0_TX_CFR_LGC0, 0x120);
  1121. rtw89_phy_write32_mask(rtwdev, R_PATH0_TX_CFR,
  1122. B_PATH0_TX_CFR_LGC1, 0x3ff);
  1123. rtw89_phy_write32_mask(rtwdev, R_PATH0_TX_POLAR_CLIPPING,
  1124. B_PATH0_TX_POLAR_CLIPPING_LGC0, 0x3);
  1125. rtw89_phy_write32_mask(rtwdev, R_PATH0_TX_POLAR_CLIPPING,
  1126. B_PATH0_TX_POLAR_CLIPPING_LGC1, 0x7);
  1127. }
  1128. }
  1129. static void rtw8851b_5m_mask(struct rtw89_dev *rtwdev, const struct rtw89_chan *chan,
  1130. enum rtw89_phy_idx phy_idx)
  1131. {
  1132. u8 pri_ch = chan->pri_ch_idx;
  1133. bool mask_5m_low;
  1134. bool mask_5m_en;
  1135. switch (chan->band_width) {
  1136. case RTW89_CHANNEL_WIDTH_40:
  1137. /* Prich=1: Mask 5M High, Prich=2: Mask 5M Low */
  1138. mask_5m_en = true;
  1139. mask_5m_low = pri_ch == RTW89_SC_20_LOWER;
  1140. break;
  1141. case RTW89_CHANNEL_WIDTH_80:
  1142. /* Prich=3: Mask 5M High, Prich=4: Mask 5M Low, Else: Disable */
  1143. mask_5m_en = pri_ch == RTW89_SC_20_UPMOST ||
  1144. pri_ch == RTW89_SC_20_LOWEST;
  1145. mask_5m_low = pri_ch == RTW89_SC_20_LOWEST;
  1146. break;
  1147. default:
  1148. mask_5m_en = false;
  1149. break;
  1150. }
  1151. if (!mask_5m_en) {
  1152. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET_V1, B_PATH0_5MDET_EN, 0x0);
  1153. rtw89_phy_write32_idx(rtwdev, R_ASSIGN_SBD_OPT_V1,
  1154. B_ASSIGN_SBD_OPT_EN_V1, 0x0, phy_idx);
  1155. return;
  1156. }
  1157. if (mask_5m_low) {
  1158. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET_V1, B_PATH0_5MDET_TH, 0x5);
  1159. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET_V1, B_PATH0_5MDET_EN, 0x1);
  1160. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET_V1, B_PATH0_5MDET_SB2, 0x0);
  1161. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET_V1, B_PATH0_5MDET_SB0, 0x1);
  1162. } else {
  1163. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET_V1, B_PATH0_5MDET_TH, 0x5);
  1164. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET_V1, B_PATH0_5MDET_EN, 0x1);
  1165. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET_V1, B_PATH0_5MDET_SB2, 0x1);
  1166. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET_V1, B_PATH0_5MDET_SB0, 0x0);
  1167. }
  1168. rtw89_phy_write32_idx(rtwdev, R_ASSIGN_SBD_OPT_V1,
  1169. B_ASSIGN_SBD_OPT_EN_V1, 0x1, phy_idx);
  1170. }
  1171. static void rtw8851b_bb_reset_all(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
  1172. {
  1173. rtw89_phy_write32_idx(rtwdev, R_S0_HW_SI_DIS, B_S0_HW_SI_DIS_W_R_TRIG, 0x7, phy_idx);
  1174. fsleep(1);
  1175. rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1, phy_idx);
  1176. rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 0, phy_idx);
  1177. rtw89_phy_write32_idx(rtwdev, R_S0_HW_SI_DIS, B_S0_HW_SI_DIS_W_R_TRIG, 0x0, phy_idx);
  1178. rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1, phy_idx);
  1179. }
  1180. static void rtw8851b_bb_reset_en(struct rtw89_dev *rtwdev, enum rtw89_band band,
  1181. enum rtw89_phy_idx phy_idx, bool en)
  1182. {
  1183. if (en) {
  1184. rtw89_phy_write32_idx(rtwdev, R_S0_HW_SI_DIS,
  1185. B_S0_HW_SI_DIS_W_R_TRIG, 0x0, phy_idx);
  1186. rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1, phy_idx);
  1187. if (band == RTW89_BAND_2G)
  1188. rtw89_phy_write32_mask(rtwdev, R_RXCCA, B_RXCCA_DIS, 0x0);
  1189. rtw89_phy_write32_mask(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x0);
  1190. } else {
  1191. rtw89_phy_write32_mask(rtwdev, R_RXCCA, B_RXCCA_DIS, 0x1);
  1192. rtw89_phy_write32_mask(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x1);
  1193. rtw89_phy_write32_idx(rtwdev, R_S0_HW_SI_DIS,
  1194. B_S0_HW_SI_DIS_W_R_TRIG, 0x7, phy_idx);
  1195. fsleep(1);
  1196. rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 0, phy_idx);
  1197. }
  1198. }
  1199. static void rtw8851b_bb_reset(struct rtw89_dev *rtwdev,
  1200. enum rtw89_phy_idx phy_idx)
  1201. {
  1202. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB,
  1203. B_P0_TXPW_RSTB_MANON | B_P0_TXPW_RSTB_TSSI, 0x1);
  1204. rtw89_phy_write32_set(rtwdev, R_P0_TSSI_TRK, B_P0_TSSI_TRK_EN);
  1205. rtw8851b_bb_reset_all(rtwdev, phy_idx);
  1206. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB,
  1207. B_P0_TXPW_RSTB_MANON | B_P0_TXPW_RSTB_TSSI, 0x3);
  1208. rtw89_phy_write32_clr(rtwdev, R_P0_TSSI_TRK, B_P0_TSSI_TRK_EN);
  1209. }
  1210. static
  1211. void rtw8851b_bb_gpio_trsw(struct rtw89_dev *rtwdev, enum rtw89_rf_path path,
  1212. u8 tx_path_en, u8 trsw_tx,
  1213. u8 trsw_rx, u8 trsw_a, u8 trsw_b)
  1214. {
  1215. u32 mask_ofst = 16;
  1216. u32 val;
  1217. if (path != RF_PATH_A)
  1218. return;
  1219. mask_ofst += (tx_path_en * 4 + trsw_tx * 2 + trsw_rx) * 2;
  1220. val = u32_encode_bits(trsw_a, B_P0_TRSW_A) |
  1221. u32_encode_bits(trsw_b, B_P0_TRSW_B);
  1222. rtw89_phy_write32_mask(rtwdev, R_P0_TRSW,
  1223. (B_P0_TRSW_A | B_P0_TRSW_B) << mask_ofst, val);
  1224. }
  1225. static void rtw8851b_bb_gpio_init(struct rtw89_dev *rtwdev)
  1226. {
  1227. rtw89_phy_write32_set(rtwdev, R_P0_TRSW, B_P0_TRSW_A);
  1228. rtw89_phy_write32_clr(rtwdev, R_P0_TRSW, B_P0_TRSW_X);
  1229. rtw89_phy_write32_clr(rtwdev, R_P0_TRSW, B_P0_TRSW_SO_A2);
  1230. rtw89_phy_write32(rtwdev, R_RFE_SEL0_BASE, 0x77777777);
  1231. rtw89_phy_write32(rtwdev, R_RFE_SEL32_BASE, 0x77777777);
  1232. rtw89_phy_write32(rtwdev, R_RFE_E_A2, 0xffffffff);
  1233. rtw89_phy_write32(rtwdev, R_RFE_O_SEL_A2, 0);
  1234. rtw89_phy_write32(rtwdev, R_RFE_SEL0_A2, 0);
  1235. rtw89_phy_write32(rtwdev, R_RFE_SEL32_A2, 0);
  1236. rtw8851b_bb_gpio_trsw(rtwdev, RF_PATH_A, 0, 0, 0, 0, 1);
  1237. rtw8851b_bb_gpio_trsw(rtwdev, RF_PATH_A, 0, 0, 1, 1, 0);
  1238. rtw8851b_bb_gpio_trsw(rtwdev, RF_PATH_A, 0, 1, 0, 1, 0);
  1239. rtw8851b_bb_gpio_trsw(rtwdev, RF_PATH_A, 0, 1, 1, 1, 0);
  1240. rtw8851b_bb_gpio_trsw(rtwdev, RF_PATH_A, 1, 0, 0, 0, 1);
  1241. rtw8851b_bb_gpio_trsw(rtwdev, RF_PATH_A, 1, 0, 1, 1, 0);
  1242. rtw8851b_bb_gpio_trsw(rtwdev, RF_PATH_A, 1, 1, 0, 1, 0);
  1243. rtw8851b_bb_gpio_trsw(rtwdev, RF_PATH_A, 1, 1, 1, 1, 0);
  1244. }
  1245. static void rtw8851b_bb_macid_ctrl_init(struct rtw89_dev *rtwdev,
  1246. enum rtw89_phy_idx phy_idx)
  1247. {
  1248. u32 addr;
  1249. for (addr = R_AX_PWR_MACID_LMT_TABLE0;
  1250. addr <= R_AX_PWR_MACID_LMT_TABLE127; addr += 4)
  1251. rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, 0);
  1252. }
  1253. static void rtw8851b_bb_sethw(struct rtw89_dev *rtwdev)
  1254. {
  1255. struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
  1256. rtw89_phy_write32_clr(rtwdev, R_P0_EN_SOUND_WO_NDP, B_P0_EN_SOUND_WO_NDP);
  1257. rtw8851b_bb_macid_ctrl_init(rtwdev, RTW89_PHY_0);
  1258. rtw8851b_bb_gpio_init(rtwdev);
  1259. rtw89_write32_clr(rtwdev, R_AX_PWR_NORM_FORCE1, B_AX_FORCE_NTX_VALUE);
  1260. rtw89_write32_set(rtwdev, R_AX_PWR_NORM_FORCE1, B_AX_FORCE_NTX_EN);
  1261. /* read these registers after loading BB parameters */
  1262. gain->offset_base[RTW89_PHY_0] =
  1263. rtw89_phy_read32_mask(rtwdev, R_P0_RPL1, B_P0_RPL1_BIAS_MASK);
  1264. gain->rssi_base[RTW89_PHY_0] =
  1265. rtw89_phy_read32_mask(rtwdev, R_P1_RPL1, B_P0_RPL1_BIAS_MASK);
  1266. }
  1267. static void rtw8851b_set_channel_bb(struct rtw89_dev *rtwdev, const struct rtw89_chan *chan,
  1268. enum rtw89_phy_idx phy_idx)
  1269. {
  1270. u8 band = chan->band_type, chan_idx;
  1271. bool cck_en = chan->channel <= 14;
  1272. u8 pri_ch_idx = chan->pri_ch_idx;
  1273. if (cck_en)
  1274. rtw8851b_ctrl_sco_cck(rtwdev, chan->primary_channel);
  1275. rtw8851b_ctrl_ch(rtwdev, chan, phy_idx);
  1276. rtw8851b_ctrl_bw(rtwdev, pri_ch_idx, chan->band_width, phy_idx);
  1277. rtw8851b_ctrl_cck_en(rtwdev, cck_en);
  1278. rtw8851b_set_nbi_tone_idx(rtwdev, chan);
  1279. rtw8851b_set_csi_tone_idx(rtwdev, chan, phy_idx);
  1280. if (chan->band_type == RTW89_BAND_5G) {
  1281. rtw89_phy_write32_mask(rtwdev, R_PATH0_BT_SHARE_V1,
  1282. B_PATH0_BT_SHARE_V1, 0x0);
  1283. rtw89_phy_write32_mask(rtwdev, R_PATH0_BTG_PATH_V1,
  1284. B_PATH0_BTG_PATH_V1, 0x0);
  1285. rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD_V1, B_BT_SHARE, 0x0);
  1286. rtw89_phy_write32_mask(rtwdev, R_FC0_BW_V1, B_ANT_RX_BT_SEG0, 0x0);
  1287. rtw89_phy_write32_mask(rtwdev, R_BT_DYN_DC_EST_EN_V1,
  1288. B_BT_DYN_DC_EST_EN_MSK, 0x0);
  1289. rtw89_phy_write32_mask(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN, 0x0);
  1290. }
  1291. chan_idx = rtw89_encode_chan_idx(rtwdev, chan->primary_channel, band);
  1292. rtw89_phy_write32_mask(rtwdev, R_MAC_PIN_SEL, B_CH_IDX_SEG0, chan_idx);
  1293. rtw8851b_5m_mask(rtwdev, chan, phy_idx);
  1294. rtw8851b_set_cfr(rtwdev, chan);
  1295. rtw8851b_bb_reset_all(rtwdev, phy_idx);
  1296. }
  1297. static void rtw8851b_set_channel(struct rtw89_dev *rtwdev,
  1298. const struct rtw89_chan *chan,
  1299. enum rtw89_mac_idx mac_idx,
  1300. enum rtw89_phy_idx phy_idx)
  1301. {
  1302. rtw8851b_set_channel_mac(rtwdev, chan, mac_idx);
  1303. rtw8851b_set_channel_bb(rtwdev, chan, phy_idx);
  1304. rtw8851b_set_channel_rf(rtwdev, chan, phy_idx);
  1305. }
  1306. static void rtw8851b_tssi_cont_en(struct rtw89_dev *rtwdev, bool en,
  1307. enum rtw89_rf_path path)
  1308. {
  1309. if (en) {
  1310. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, B_P0_TXPW_RSTB_MANON, 0x0);
  1311. rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK, B_P0_TSSI_TRK_EN, 0x0);
  1312. } else {
  1313. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, B_P0_TXPW_RSTB_MANON, 0x1);
  1314. rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK, B_P0_TSSI_TRK_EN, 0x1);
  1315. }
  1316. }
  1317. static void rtw8851b_tssi_cont_en_phyidx(struct rtw89_dev *rtwdev, bool en,
  1318. u8 phy_idx)
  1319. {
  1320. rtw8851b_tssi_cont_en(rtwdev, en, RF_PATH_A);
  1321. }
  1322. static void rtw8851b_adc_en(struct rtw89_dev *rtwdev, bool en)
  1323. {
  1324. if (en)
  1325. rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_RST, 0x0);
  1326. else
  1327. rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_RST, 0xf);
  1328. }
  1329. static void rtw8851b_set_channel_help(struct rtw89_dev *rtwdev, bool enter,
  1330. struct rtw89_channel_help_params *p,
  1331. const struct rtw89_chan *chan,
  1332. enum rtw89_mac_idx mac_idx,
  1333. enum rtw89_phy_idx phy_idx)
  1334. {
  1335. if (enter) {
  1336. rtw89_chip_stop_sch_tx(rtwdev, RTW89_MAC_0, &p->tx_en, RTW89_SCH_TX_SEL_ALL);
  1337. rtw89_mac_cfg_ppdu_status(rtwdev, RTW89_MAC_0, false);
  1338. rtw8851b_tssi_cont_en_phyidx(rtwdev, false, RTW89_PHY_0);
  1339. rtw8851b_adc_en(rtwdev, false);
  1340. fsleep(40);
  1341. rtw8851b_bb_reset_en(rtwdev, chan->band_type, phy_idx, false);
  1342. } else {
  1343. rtw89_mac_cfg_ppdu_status(rtwdev, RTW89_MAC_0, true);
  1344. rtw8851b_adc_en(rtwdev, true);
  1345. rtw8851b_tssi_cont_en_phyidx(rtwdev, true, RTW89_PHY_0);
  1346. rtw8851b_bb_reset_en(rtwdev, chan->band_type, phy_idx, true);
  1347. rtw89_chip_resume_sch_tx(rtwdev, RTW89_MAC_0, p->tx_en);
  1348. }
  1349. }
  1350. static void rtw8851b_rfk_init(struct rtw89_dev *rtwdev)
  1351. {
  1352. rtwdev->is_tssi_mode[RF_PATH_A] = false;
  1353. rtwdev->is_tssi_mode[RF_PATH_B] = false;
  1354. rtw8851b_lck_init(rtwdev);
  1355. rtw8851b_dpk_init(rtwdev);
  1356. rtw8851b_aack(rtwdev);
  1357. rtw8851b_rck(rtwdev);
  1358. rtw8851b_dack(rtwdev);
  1359. rtw8851b_rx_dck(rtwdev, RTW89_PHY_0);
  1360. }
  1361. static void rtw8851b_rfk_channel(struct rtw89_dev *rtwdev)
  1362. {
  1363. enum rtw89_phy_idx phy_idx = RTW89_PHY_0;
  1364. rtw8851b_rx_dck(rtwdev, phy_idx);
  1365. rtw8851b_iqk(rtwdev, phy_idx);
  1366. rtw8851b_tssi(rtwdev, phy_idx, true);
  1367. rtw8851b_dpk(rtwdev, phy_idx);
  1368. }
  1369. static void rtw8851b_rfk_band_changed(struct rtw89_dev *rtwdev,
  1370. enum rtw89_phy_idx phy_idx)
  1371. {
  1372. rtw8851b_tssi_scan(rtwdev, phy_idx);
  1373. }
  1374. static void rtw8851b_rfk_scan(struct rtw89_dev *rtwdev, bool start)
  1375. {
  1376. rtw8851b_wifi_scan_notify(rtwdev, start, RTW89_PHY_0);
  1377. }
  1378. static void rtw8851b_rfk_track(struct rtw89_dev *rtwdev)
  1379. {
  1380. rtw8851b_dpk_track(rtwdev);
  1381. rtw8851b_lck_track(rtwdev);
  1382. }
  1383. static u32 rtw8851b_bb_cal_txpwr_ref(struct rtw89_dev *rtwdev,
  1384. enum rtw89_phy_idx phy_idx, s16 ref)
  1385. {
  1386. const u16 tssi_16dbm_cw = 0x12c;
  1387. const u8 base_cw_0db = 0x27;
  1388. const s8 ofst_int = 0;
  1389. s16 pwr_s10_3;
  1390. s16 rf_pwr_cw;
  1391. u16 bb_pwr_cw;
  1392. u32 pwr_cw;
  1393. u32 tssi_ofst_cw;
  1394. pwr_s10_3 = (ref << 1) + (s16)(ofst_int) + (s16)(base_cw_0db << 3);
  1395. bb_pwr_cw = u16_get_bits(pwr_s10_3, GENMASK(2, 0));
  1396. rf_pwr_cw = u16_get_bits(pwr_s10_3, GENMASK(8, 3));
  1397. rf_pwr_cw = clamp_t(s16, rf_pwr_cw, 15, 63);
  1398. pwr_cw = (rf_pwr_cw << 3) | bb_pwr_cw;
  1399. tssi_ofst_cw = (u32)((s16)tssi_16dbm_cw + (ref << 1) - (16 << 3));
  1400. rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
  1401. "[TXPWR] tssi_ofst_cw=%d rf_cw=0x%x bb_cw=0x%x\n",
  1402. tssi_ofst_cw, rf_pwr_cw, bb_pwr_cw);
  1403. return u32_encode_bits(tssi_ofst_cw, B_DPD_TSSI_CW) |
  1404. u32_encode_bits(pwr_cw, B_DPD_PWR_CW) |
  1405. u32_encode_bits(ref, B_DPD_REF);
  1406. }
  1407. static void rtw8851b_set_txpwr_ref(struct rtw89_dev *rtwdev,
  1408. enum rtw89_phy_idx phy_idx)
  1409. {
  1410. static const u32 addr[RF_PATH_NUM_8851B] = {0x5800};
  1411. const u32 mask = B_DPD_TSSI_CW | B_DPD_PWR_CW | B_DPD_REF;
  1412. const u8 ofst_ofdm = 0x4;
  1413. const u8 ofst_cck = 0x8;
  1414. const s16 ref_ofdm = 0;
  1415. const s16 ref_cck = 0;
  1416. u32 val;
  1417. u8 i;
  1418. rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set txpwr reference\n");
  1419. rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_AX_PWR_RATE_CTRL,
  1420. B_AX_PWR_REF, 0x0);
  1421. rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set bb ofdm txpwr ref\n");
  1422. val = rtw8851b_bb_cal_txpwr_ref(rtwdev, phy_idx, ref_ofdm);
  1423. for (i = 0; i < RF_PATH_NUM_8851B; i++)
  1424. rtw89_phy_write32_idx(rtwdev, addr[i] + ofst_ofdm, mask, val,
  1425. phy_idx);
  1426. rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set bb cck txpwr ref\n");
  1427. val = rtw8851b_bb_cal_txpwr_ref(rtwdev, phy_idx, ref_cck);
  1428. for (i = 0; i < RF_PATH_NUM_8851B; i++)
  1429. rtw89_phy_write32_idx(rtwdev, addr[i] + ofst_cck, mask, val,
  1430. phy_idx);
  1431. }
  1432. static void rtw8851b_bb_set_tx_shape_dfir(struct rtw89_dev *rtwdev,
  1433. const struct rtw89_chan *chan,
  1434. u8 tx_shape_idx,
  1435. enum rtw89_phy_idx phy_idx)
  1436. {
  1437. #define __DFIR_CFG_ADDR(i) (R_TXFIR0 + ((i) << 2))
  1438. #define __DFIR_CFG_MASK 0xffffffff
  1439. #define __DFIR_CFG_NR 8
  1440. #define __DECL_DFIR_PARAM(_name, _val...) \
  1441. static const u32 param_ ## _name[] = {_val}; \
  1442. static_assert(ARRAY_SIZE(param_ ## _name) == __DFIR_CFG_NR)
  1443. __DECL_DFIR_PARAM(flat,
  1444. 0x023D23FF, 0x0029B354, 0x000FC1C8, 0x00FDB053,
  1445. 0x00F86F9A, 0x06FAEF92, 0x00FE5FCC, 0x00FFDFF5);
  1446. __DECL_DFIR_PARAM(sharp,
  1447. 0x023D83FF, 0x002C636A, 0x0013F204, 0x00008090,
  1448. 0x00F87FB0, 0x06F99F83, 0x00FDBFBA, 0x00003FF5);
  1449. __DECL_DFIR_PARAM(sharp_14,
  1450. 0x023B13FF, 0x001C42DE, 0x00FDB0AD, 0x00F60F6E,
  1451. 0x00FD8F92, 0x0602D011, 0x0001C02C, 0x00FFF00A);
  1452. u8 ch = chan->channel;
  1453. const u32 *param;
  1454. u32 addr;
  1455. int i;
  1456. if (ch > 14) {
  1457. rtw89_warn(rtwdev,
  1458. "set tx shape dfir by unknown ch: %d on 2G\n", ch);
  1459. return;
  1460. }
  1461. if (ch == 14)
  1462. param = param_sharp_14;
  1463. else
  1464. param = tx_shape_idx == 0 ? param_flat : param_sharp;
  1465. for (i = 0; i < __DFIR_CFG_NR; i++) {
  1466. addr = __DFIR_CFG_ADDR(i);
  1467. rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
  1468. "set tx shape dfir: 0x%x: 0x%x\n", addr, param[i]);
  1469. rtw89_phy_write32_idx(rtwdev, addr, __DFIR_CFG_MASK, param[i],
  1470. phy_idx);
  1471. }
  1472. #undef __DECL_DFIR_PARAM
  1473. #undef __DFIR_CFG_NR
  1474. #undef __DFIR_CFG_MASK
  1475. #undef __DECL_CFG_ADDR
  1476. }
  1477. static void rtw8851b_set_tx_shape(struct rtw89_dev *rtwdev,
  1478. const struct rtw89_chan *chan,
  1479. enum rtw89_phy_idx phy_idx)
  1480. {
  1481. const struct rtw89_rfe_parms *rfe_parms = rtwdev->rfe_parms;
  1482. u8 band = chan->band_type;
  1483. u8 regd = rtw89_regd_get(rtwdev, band);
  1484. u8 tx_shape_cck = (*rfe_parms->tx_shape.lmt)[band][RTW89_RS_CCK][regd];
  1485. u8 tx_shape_ofdm = (*rfe_parms->tx_shape.lmt)[band][RTW89_RS_OFDM][regd];
  1486. if (band == RTW89_BAND_2G)
  1487. rtw8851b_bb_set_tx_shape_dfir(rtwdev, chan, tx_shape_cck, phy_idx);
  1488. rtw89_phy_write32_mask(rtwdev, R_DCFO_OPT, B_TXSHAPE_TRIANGULAR_CFG,
  1489. tx_shape_ofdm);
  1490. }
  1491. static void rtw8851b_set_txpwr(struct rtw89_dev *rtwdev,
  1492. const struct rtw89_chan *chan,
  1493. enum rtw89_phy_idx phy_idx)
  1494. {
  1495. rtw89_phy_set_txpwr_byrate(rtwdev, chan, phy_idx);
  1496. rtw89_phy_set_txpwr_offset(rtwdev, chan, phy_idx);
  1497. rtw8851b_set_tx_shape(rtwdev, chan, phy_idx);
  1498. rtw89_phy_set_txpwr_limit(rtwdev, chan, phy_idx);
  1499. rtw89_phy_set_txpwr_limit_ru(rtwdev, chan, phy_idx);
  1500. }
  1501. static void rtw8851b_set_txpwr_ctrl(struct rtw89_dev *rtwdev,
  1502. enum rtw89_phy_idx phy_idx)
  1503. {
  1504. rtw8851b_set_txpwr_ref(rtwdev, phy_idx);
  1505. }
  1506. static
  1507. void rtw8851b_set_txpwr_ul_tb_offset(struct rtw89_dev *rtwdev,
  1508. s8 pw_ofst, enum rtw89_mac_idx mac_idx)
  1509. {
  1510. u32 reg;
  1511. if (pw_ofst < -16 || pw_ofst > 15) {
  1512. rtw89_warn(rtwdev, "[ULTB] Err pwr_offset=%d\n", pw_ofst);
  1513. return;
  1514. }
  1515. reg = rtw89_mac_reg_by_idx(rtwdev, R_AX_PWR_UL_TB_CTRL, mac_idx);
  1516. rtw89_write32_set(rtwdev, reg, B_AX_PWR_UL_TB_CTRL_EN);
  1517. reg = rtw89_mac_reg_by_idx(rtwdev, R_AX_PWR_UL_TB_1T, mac_idx);
  1518. rtw89_write32_mask(rtwdev, reg, B_AX_PWR_UL_TB_1T_MASK, pw_ofst);
  1519. pw_ofst = max_t(s8, pw_ofst - 3, -16);
  1520. reg = rtw89_mac_reg_by_idx(rtwdev, R_AX_PWR_UL_TB_2T, mac_idx);
  1521. rtw89_write32_mask(rtwdev, reg, B_AX_PWR_UL_TB_2T_MASK, pw_ofst);
  1522. }
  1523. static int
  1524. rtw8851b_init_txpwr_unit(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
  1525. {
  1526. int ret;
  1527. ret = rtw89_mac_txpwr_write32(rtwdev, phy_idx, R_AX_PWR_UL_CTRL2, 0x07763333);
  1528. if (ret)
  1529. return ret;
  1530. ret = rtw89_mac_txpwr_write32(rtwdev, phy_idx, R_AX_PWR_COEXT_CTRL, 0x01ebf000);
  1531. if (ret)
  1532. return ret;
  1533. ret = rtw89_mac_txpwr_write32(rtwdev, phy_idx, R_AX_PWR_UL_CTRL0, 0x0002f8ff);
  1534. if (ret)
  1535. return ret;
  1536. rtw8851b_set_txpwr_ul_tb_offset(rtwdev, 0, phy_idx == RTW89_PHY_1 ?
  1537. RTW89_MAC_1 : RTW89_MAC_0);
  1538. return 0;
  1539. }
  1540. static void rtw8851b_bb_ctrl_btc_preagc(struct rtw89_dev *rtwdev, bool bt_en)
  1541. {
  1542. const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
  1543. rtw89_phy_write_reg3_tbl(rtwdev, bt_en ? &rtw8851b_btc_preagc_en_defs_tbl :
  1544. &rtw8851b_btc_preagc_dis_defs_tbl);
  1545. if (!bt_en) {
  1546. if (chan->band_type == RTW89_BAND_2G) {
  1547. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_LNA6_OP1DB_V1,
  1548. B_PATH0_G_LNA6_OP1DB_V1, 0x20);
  1549. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_TIA0_LNA6_OP1DB_V1,
  1550. B_PATH0_G_TIA0_LNA6_OP1DB_V1, 0x30);
  1551. } else {
  1552. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_LNA6_OP1DB_V1,
  1553. B_PATH0_G_LNA6_OP1DB_V1, 0x1a);
  1554. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_TIA0_LNA6_OP1DB_V1,
  1555. B_PATH0_G_TIA0_LNA6_OP1DB_V1, 0x2a);
  1556. }
  1557. }
  1558. }
  1559. static void rtw8851b_ctrl_btg(struct rtw89_dev *rtwdev, bool btg)
  1560. {
  1561. const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
  1562. if (btg) {
  1563. rtw89_phy_write32_mask(rtwdev, R_PATH0_BT_SHARE_V1,
  1564. B_PATH0_BT_SHARE_V1, 0x1);
  1565. rtw89_phy_write32_mask(rtwdev, R_PATH0_BTG_PATH_V1,
  1566. B_PATH0_BTG_PATH_V1, 0x1);
  1567. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_LNA6_OP1DB_V1,
  1568. B_PATH0_G_LNA6_OP1DB_V1, 0x20);
  1569. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_TIA0_LNA6_OP1DB_V1,
  1570. B_PATH0_G_TIA0_LNA6_OP1DB_V1, 0x30);
  1571. rtw89_phy_write32_mask(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0x0);
  1572. rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD_V1, B_BT_SHARE, 0x1);
  1573. rtw89_phy_write32_mask(rtwdev, R_FC0_BW_V1, B_ANT_RX_BT_SEG0, 0x1);
  1574. rtw89_phy_write32_mask(rtwdev, R_BT_DYN_DC_EST_EN_V1,
  1575. B_BT_DYN_DC_EST_EN_MSK, 0x1);
  1576. rtw89_phy_write32_mask(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN, 0x1);
  1577. } else {
  1578. rtw89_phy_write32_mask(rtwdev, R_PATH0_BT_SHARE_V1,
  1579. B_PATH0_BT_SHARE_V1, 0x0);
  1580. rtw89_phy_write32_mask(rtwdev, R_PATH0_BTG_PATH_V1,
  1581. B_PATH0_BTG_PATH_V1, 0x0);
  1582. if (chan->band_type == RTW89_BAND_2G) {
  1583. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_LNA6_OP1DB_V1,
  1584. B_PATH0_G_LNA6_OP1DB_V1, 0x80);
  1585. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_TIA0_LNA6_OP1DB_V1,
  1586. B_PATH0_G_TIA0_LNA6_OP1DB_V1, 0x80);
  1587. } else {
  1588. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_LNA6_OP1DB_V1,
  1589. B_PATH0_G_LNA6_OP1DB_V1, 0x1a);
  1590. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_TIA0_LNA6_OP1DB_V1,
  1591. B_PATH0_G_TIA0_LNA6_OP1DB_V1, 0x2a);
  1592. }
  1593. rtw89_phy_write32_mask(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0xc);
  1594. rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD_V1, B_BT_SHARE, 0x0);
  1595. rtw89_phy_write32_mask(rtwdev, R_FC0_BW_V1, B_ANT_RX_BT_SEG0, 0x0);
  1596. rtw89_phy_write32_mask(rtwdev, R_BT_DYN_DC_EST_EN_V1,
  1597. B_BT_DYN_DC_EST_EN_MSK, 0x1);
  1598. rtw89_phy_write32_mask(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN, 0x0);
  1599. }
  1600. }
  1601. static void rtw8851b_bb_ctrl_rx_path(struct rtw89_dev *rtwdev,
  1602. enum rtw89_rf_path_bit rx_path)
  1603. {
  1604. const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
  1605. u32 rst_mask0;
  1606. if (rx_path == RF_A) {
  1607. rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD_V1, B_ANT_RX_SEG0, 1);
  1608. rtw89_phy_write32_mask(rtwdev, R_FC0_BW_V1, B_ANT_RX_1RCCA_SEG0, 1);
  1609. rtw89_phy_write32_mask(rtwdev, R_FC0_BW_V1, B_ANT_RX_1RCCA_SEG1, 1);
  1610. rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT, B_RXHT_MCS_LIMIT, 0);
  1611. rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT, B_RXVHT_MCS_LIMIT, 0);
  1612. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_USER_MAX, 4);
  1613. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS, 0);
  1614. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS, 0);
  1615. }
  1616. rtw8851b_set_gain_offset(rtwdev, chan->subband_type, RTW89_PHY_0);
  1617. rst_mask0 = B_P0_TXPW_RSTB_MANON | B_P0_TXPW_RSTB_TSSI;
  1618. if (rx_path == RF_A) {
  1619. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, rst_mask0, 1);
  1620. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, rst_mask0, 3);
  1621. }
  1622. }
  1623. static void rtw8851b_bb_cfg_txrx_path(struct rtw89_dev *rtwdev)
  1624. {
  1625. rtw8851b_bb_ctrl_rx_path(rtwdev, RF_A);
  1626. if (rtwdev->hal.rx_nss == 1) {
  1627. rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT, B_RXHT_MCS_LIMIT, 0);
  1628. rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT, B_RXVHT_MCS_LIMIT, 0);
  1629. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS, 0);
  1630. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS, 0);
  1631. }
  1632. rtw89_phy_write32_idx(rtwdev, R_MAC_SEL, B_MAC_SEL_MOD, 0x0, RTW89_PHY_0);
  1633. }
  1634. static u8 rtw8851b_get_thermal(struct rtw89_dev *rtwdev, enum rtw89_rf_path rf_path)
  1635. {
  1636. if (rtwdev->is_tssi_mode[rf_path]) {
  1637. u32 addr = R_TSSI_THER + (rf_path << 13);
  1638. return rtw89_phy_read32_mask(rtwdev, addr, B_TSSI_THER);
  1639. }
  1640. rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1);
  1641. rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x0);
  1642. rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1);
  1643. fsleep(200);
  1644. return rtw89_read_rf(rtwdev, rf_path, RR_TM, RR_TM_VAL);
  1645. }
  1646. static void rtw8851b_btc_set_rfe(struct rtw89_dev *rtwdev)
  1647. {
  1648. struct rtw89_btc *btc = &rtwdev->btc;
  1649. struct rtw89_btc_module *module = &btc->mdinfo;
  1650. module->rfe_type = rtwdev->efuse.rfe_type;
  1651. module->cv = rtwdev->hal.cv;
  1652. module->bt_solo = 0;
  1653. module->switch_type = BTC_SWITCH_INTERNAL;
  1654. module->ant.isolation = 10;
  1655. module->kt_ver_adie = rtwdev->hal.acv;
  1656. if (module->rfe_type == 0)
  1657. return;
  1658. /* rfe_type 3*n+1: 1-Ant(shared),
  1659. * 3*n+2: 2-Ant+Div(non-shared),
  1660. * 3*n+3: 2-Ant+no-Div(non-shared)
  1661. */
  1662. module->ant.num = (module->rfe_type % 3 == 1) ? 1 : 2;
  1663. /* WL-1ss at S0, btg at s0 (On 1 WL RF) */
  1664. module->ant.single_pos = RF_PATH_A;
  1665. module->ant.btg_pos = RF_PATH_A;
  1666. module->ant.stream_cnt = 1;
  1667. if (module->ant.num == 1) {
  1668. module->ant.type = BTC_ANT_SHARED;
  1669. module->bt_pos = BTC_BT_BTG;
  1670. module->wa_type = 1;
  1671. module->ant.diversity = 0;
  1672. } else { /* ant.num == 2 */
  1673. module->ant.type = BTC_ANT_DEDICATED;
  1674. module->bt_pos = BTC_BT_ALONE;
  1675. module->switch_type = BTC_SWITCH_EXTERNAL;
  1676. module->wa_type = 0;
  1677. if (module->rfe_type % 3 == 2)
  1678. module->ant.diversity = 1;
  1679. }
  1680. }
  1681. static
  1682. void rtw8851b_set_trx_mask(struct rtw89_dev *rtwdev, u8 path, u8 group, u32 val)
  1683. {
  1684. if (group > BTC_BT_SS_GROUP)
  1685. group--; /* Tx-group=1, Rx-group=2 */
  1686. if (rtwdev->btc.mdinfo.ant.type == BTC_ANT_SHARED) /* 1-Ant */
  1687. group += 3;
  1688. rtw89_write_rf(rtwdev, path, RR_LUTWA, RFREG_MASK, group);
  1689. rtw89_write_rf(rtwdev, path, RR_LUTWD0, RFREG_MASK, val);
  1690. }
  1691. static void rtw8851b_btc_init_cfg(struct rtw89_dev *rtwdev)
  1692. {
  1693. static const struct rtw89_mac_ax_coex coex_params = {
  1694. .pta_mode = RTW89_MAC_AX_COEX_RTK_MODE,
  1695. .direction = RTW89_MAC_AX_COEX_INNER,
  1696. };
  1697. const struct rtw89_chip_info *chip = rtwdev->chip;
  1698. struct rtw89_btc *btc = &rtwdev->btc;
  1699. struct rtw89_btc_module *module = &btc->mdinfo;
  1700. struct rtw89_btc_ant_info *ant = &module->ant;
  1701. u8 path, path_min, path_max;
  1702. /* PTA init */
  1703. rtw89_mac_coex_init(rtwdev, &coex_params);
  1704. /* set WL Tx response = Hi-Pri */
  1705. chip->ops->btc_set_wl_pri(rtwdev, BTC_PRI_MASK_TX_RESP, true);
  1706. chip->ops->btc_set_wl_pri(rtwdev, BTC_PRI_MASK_BEACON, true);
  1707. /* for 1-Ant && 1-ss case: only 1-path */
  1708. if (ant->stream_cnt == 1) {
  1709. path_min = ant->single_pos;
  1710. path_max = path_min;
  1711. } else {
  1712. path_min = RF_PATH_A;
  1713. path_max = RF_PATH_B;
  1714. }
  1715. for (path = path_min; path <= path_max; path++) {
  1716. /* set rf gnt-debug off */
  1717. rtw89_write_rf(rtwdev, path, RR_WLSEL, RFREG_MASK, 0x0);
  1718. /* set DEBUG_LUT_RFMODE_MASK = 1 to start trx-mask-setup */
  1719. rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, BIT(17));
  1720. /* if GNT_WL=0 && BT=SS_group --> WL Tx/Rx = THRU */
  1721. rtw8851b_set_trx_mask(rtwdev, path, BTC_BT_SS_GROUP, 0x5ff);
  1722. /* if GNT_WL=0 && BT=Rx_group --> WL-Rx = THRU + WL-Tx = MASK */
  1723. rtw8851b_set_trx_mask(rtwdev, path, BTC_BT_RX_GROUP, 0x5df);
  1724. /* if GNT_WL = 0 && BT = Tx_group -->
  1725. * Shared-Ant && BTG-path:WL mask(0x55f), others:WL THRU(0x5ff)
  1726. */
  1727. if (ant->type == BTC_ANT_SHARED && ant->btg_pos == path)
  1728. rtw8851b_set_trx_mask(rtwdev, path, BTC_BT_TX_GROUP, 0x55f);
  1729. else
  1730. rtw8851b_set_trx_mask(rtwdev, path, BTC_BT_TX_GROUP, 0x5ff);
  1731. /* set DEBUG_LUT_RFMODE_MASK = 0 to stop trx-mask-setup */
  1732. rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, 0);
  1733. }
  1734. /* set PTA break table */
  1735. rtw89_write32(rtwdev, R_BTC_BREAK_TABLE, BTC_BREAK_PARAM);
  1736. /* enable BT counter 0xda40[16,2] = 2b'11 */
  1737. rtw89_write32_set(rtwdev, R_AX_CSR_MODE, B_AX_BT_CNT_RST | B_AX_STATIS_BT_EN);
  1738. btc->cx.wl.status.map.init_ok = true;
  1739. }
  1740. static
  1741. void rtw8851b_btc_set_wl_pri(struct rtw89_dev *rtwdev, u8 map, bool state)
  1742. {
  1743. u32 bitmap;
  1744. u32 reg;
  1745. switch (map) {
  1746. case BTC_PRI_MASK_TX_RESP:
  1747. reg = R_BTC_BT_COEX_MSK_TABLE;
  1748. bitmap = B_BTC_PRI_MASK_TX_RESP_V1;
  1749. break;
  1750. case BTC_PRI_MASK_BEACON:
  1751. reg = R_AX_WL_PRI_MSK;
  1752. bitmap = B_AX_PTA_WL_PRI_MASK_BCNQ;
  1753. break;
  1754. case BTC_PRI_MASK_RX_CCK:
  1755. reg = R_BTC_BT_COEX_MSK_TABLE;
  1756. bitmap = B_BTC_PRI_MASK_RXCCK_V1;
  1757. break;
  1758. default:
  1759. return;
  1760. }
  1761. if (state)
  1762. rtw89_write32_set(rtwdev, reg, bitmap);
  1763. else
  1764. rtw89_write32_clr(rtwdev, reg, bitmap);
  1765. }
  1766. union rtw8851b_btc_wl_txpwr_ctrl {
  1767. u32 txpwr_val;
  1768. struct {
  1769. union {
  1770. u16 ctrl_all_time;
  1771. struct {
  1772. s16 data:9;
  1773. u16 rsvd:6;
  1774. u16 flag:1;
  1775. } all_time;
  1776. };
  1777. union {
  1778. u16 ctrl_gnt_bt;
  1779. struct {
  1780. s16 data:9;
  1781. u16 rsvd:7;
  1782. } gnt_bt;
  1783. };
  1784. };
  1785. } __packed;
  1786. static void
  1787. rtw8851b_btc_set_wl_txpwr_ctrl(struct rtw89_dev *rtwdev, u32 txpwr_val)
  1788. {
  1789. union rtw8851b_btc_wl_txpwr_ctrl arg = { .txpwr_val = txpwr_val };
  1790. s32 val;
  1791. #define __write_ctrl(_reg, _msk, _val, _en, _cond) \
  1792. do { \
  1793. u32 _wrt = FIELD_PREP(_msk, _val); \
  1794. BUILD_BUG_ON(!!(_msk & _en)); \
  1795. if (_cond) \
  1796. _wrt |= _en; \
  1797. else \
  1798. _wrt &= ~_en; \
  1799. rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, _reg, \
  1800. _msk | _en, _wrt); \
  1801. } while (0)
  1802. switch (arg.ctrl_all_time) {
  1803. case 0xffff:
  1804. val = 0;
  1805. break;
  1806. default:
  1807. val = arg.all_time.data;
  1808. break;
  1809. }
  1810. __write_ctrl(R_AX_PWR_RATE_CTRL, B_AX_FORCE_PWR_BY_RATE_VALUE_MASK,
  1811. val, B_AX_FORCE_PWR_BY_RATE_EN,
  1812. arg.ctrl_all_time != 0xffff);
  1813. switch (arg.ctrl_gnt_bt) {
  1814. case 0xffff:
  1815. val = 0;
  1816. break;
  1817. default:
  1818. val = arg.gnt_bt.data;
  1819. break;
  1820. }
  1821. __write_ctrl(R_AX_PWR_COEXT_CTRL, B_AX_TXAGC_BT_MASK, val,
  1822. B_AX_TXAGC_BT_EN, arg.ctrl_gnt_bt != 0xffff);
  1823. #undef __write_ctrl
  1824. }
  1825. static
  1826. s8 rtw8851b_btc_get_bt_rssi(struct rtw89_dev *rtwdev, s8 val)
  1827. {
  1828. val = clamp_t(s8, val, -100, 0) + 100;
  1829. val = min(val + 6, 100); /* compensate offset */
  1830. return val;
  1831. }
  1832. static
  1833. void rtw8851b_btc_update_bt_cnt(struct rtw89_dev *rtwdev)
  1834. {
  1835. /* Feature move to firmware */
  1836. }
  1837. static void rtw8851b_btc_wl_s1_standby(struct rtw89_dev *rtwdev, bool state)
  1838. {
  1839. struct rtw89_btc *btc = &rtwdev->btc;
  1840. struct rtw89_btc_ant_info *ant = &btc->mdinfo.ant;
  1841. rtw89_write_rf(rtwdev, ant->btg_pos, RR_LUTWE, RFREG_MASK, 0x80000);
  1842. rtw89_write_rf(rtwdev, ant->btg_pos, RR_LUTWA, RFREG_MASK, 0x1);
  1843. rtw89_write_rf(rtwdev, ant->btg_pos, RR_LUTWD1, RFREG_MASK, 0x110);
  1844. /* set WL standby = Rx for GNT_BT_Tx = 1->0 settle issue */
  1845. if (state)
  1846. rtw89_write_rf(rtwdev, ant->btg_pos, RR_LUTWD0, RFREG_MASK, 0x179c);
  1847. else
  1848. rtw89_write_rf(rtwdev, ant->btg_pos, RR_LUTWD0, RFREG_MASK, 0x208);
  1849. rtw89_write_rf(rtwdev, ant->btg_pos, RR_LUTWE, RFREG_MASK, 0x0);
  1850. }
  1851. #define LNA2_51B_MA 0x700
  1852. static const struct rtw89_reg2_def btc_8851b_rf_0[] = {{0x2, 0x0}};
  1853. static const struct rtw89_reg2_def btc_8851b_rf_1[] = {{0x2, 0x1}};
  1854. static void rtw8851b_btc_set_wl_rx_gain(struct rtw89_dev *rtwdev, u32 level)
  1855. {
  1856. /* To improve BT ACI in co-rx
  1857. * level=0 Default: TIA 1/0= (LNA2,TIAN6) = (7,1)/(5,1) = 21dB/12dB
  1858. * level=1 Fix LNA2=5: TIA 1/0= (LNA2,TIAN6) = (5,0)/(5,1) = 18dB/12dB
  1859. */
  1860. struct rtw89_btc *btc = &rtwdev->btc;
  1861. struct rtw89_btc_ant_info *ant = &btc->mdinfo.ant;
  1862. const struct rtw89_reg2_def *rf;
  1863. u32 n, i, val;
  1864. switch (level) {
  1865. case 0: /* original */
  1866. default:
  1867. btc->dm.wl_lna2 = 0;
  1868. break;
  1869. case 1: /* for FDD free-run */
  1870. btc->dm.wl_lna2 = 0;
  1871. break;
  1872. case 2: /* for BTG Co-Rx*/
  1873. btc->dm.wl_lna2 = 1;
  1874. break;
  1875. }
  1876. if (btc->dm.wl_lna2 == 0) {
  1877. rf = btc_8851b_rf_0;
  1878. n = ARRAY_SIZE(btc_8851b_rf_0);
  1879. } else {
  1880. rf = btc_8851b_rf_1;
  1881. n = ARRAY_SIZE(btc_8851b_rf_1);
  1882. }
  1883. for (i = 0; i < n; i++, rf++) {
  1884. val = rf->data;
  1885. /* bit[10] = 1 if non-shared-ant for 8851b */
  1886. if (btc->mdinfo.ant.type == BTC_ANT_DEDICATED)
  1887. val |= 0x4;
  1888. rtw89_write_rf(rtwdev, ant->btg_pos, rf->addr, LNA2_51B_MA, val);
  1889. }
  1890. }
  1891. static void rtw8851b_fill_freq_with_ppdu(struct rtw89_dev *rtwdev,
  1892. struct rtw89_rx_phy_ppdu *phy_ppdu,
  1893. struct ieee80211_rx_status *status)
  1894. {
  1895. u16 chan = phy_ppdu->chan_idx;
  1896. enum nl80211_band band;
  1897. u8 ch;
  1898. if (chan == 0)
  1899. return;
  1900. rtw89_decode_chan_idx(rtwdev, chan, &ch, &band);
  1901. status->freq = ieee80211_channel_to_frequency(ch, band);
  1902. status->band = band;
  1903. }
  1904. static void rtw8851b_query_ppdu(struct rtw89_dev *rtwdev,
  1905. struct rtw89_rx_phy_ppdu *phy_ppdu,
  1906. struct ieee80211_rx_status *status)
  1907. {
  1908. u8 path;
  1909. u8 *rx_power = phy_ppdu->rssi;
  1910. status->signal = RTW89_RSSI_RAW_TO_DBM(rx_power[RF_PATH_A]);
  1911. for (path = 0; path < rtwdev->chip->rf_path_num; path++) {
  1912. status->chains |= BIT(path);
  1913. status->chain_signal[path] = RTW89_RSSI_RAW_TO_DBM(rx_power[path]);
  1914. }
  1915. if (phy_ppdu->valid)
  1916. rtw8851b_fill_freq_with_ppdu(rtwdev, phy_ppdu, status);
  1917. }
  1918. static int rtw8851b_mac_enable_bb_rf(struct rtw89_dev *rtwdev)
  1919. {
  1920. int ret;
  1921. rtw89_write8_set(rtwdev, R_AX_SYS_FUNC_EN,
  1922. B_AX_FEN_BBRSTB | B_AX_FEN_BB_GLB_RSTN);
  1923. rtw89_write32_set(rtwdev, R_AX_WLRF_CTRL, B_AX_AFC_AFEDIG);
  1924. rtw89_write32_clr(rtwdev, R_AX_WLRF_CTRL, B_AX_AFC_AFEDIG);
  1925. rtw89_write32_set(rtwdev, R_AX_WLRF_CTRL, B_AX_AFC_AFEDIG);
  1926. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0xC7,
  1927. FULL_BIT_MASK);
  1928. if (ret)
  1929. return ret;
  1930. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, 0xC7,
  1931. FULL_BIT_MASK);
  1932. if (ret)
  1933. return ret;
  1934. rtw89_write8(rtwdev, R_AX_PHYREG_SET, PHYREG_SET_XYN_CYCLE);
  1935. return 0;
  1936. }
  1937. static int rtw8851b_mac_disable_bb_rf(struct rtw89_dev *rtwdev)
  1938. {
  1939. u8 wl_rfc_s0;
  1940. u8 wl_rfc_s1;
  1941. int ret;
  1942. rtw89_write8_clr(rtwdev, R_AX_SYS_FUNC_EN,
  1943. B_AX_FEN_BBRSTB | B_AX_FEN_BB_GLB_RSTN);
  1944. ret = rtw89_mac_read_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, &wl_rfc_s0);
  1945. if (ret)
  1946. return ret;
  1947. wl_rfc_s0 &= ~XTAL_SI_RF00S_EN;
  1948. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, wl_rfc_s0,
  1949. FULL_BIT_MASK);
  1950. if (ret)
  1951. return ret;
  1952. ret = rtw89_mac_read_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, &wl_rfc_s1);
  1953. if (ret)
  1954. return ret;
  1955. wl_rfc_s1 &= ~XTAL_SI_RF10S_EN;
  1956. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, wl_rfc_s1,
  1957. FULL_BIT_MASK);
  1958. return ret;
  1959. }
  1960. static const struct rtw89_chip_ops rtw8851b_chip_ops = {
  1961. .enable_bb_rf = rtw8851b_mac_enable_bb_rf,
  1962. .disable_bb_rf = rtw8851b_mac_disable_bb_rf,
  1963. .bb_preinit = NULL,
  1964. .bb_reset = rtw8851b_bb_reset,
  1965. .bb_sethw = rtw8851b_bb_sethw,
  1966. .read_rf = rtw89_phy_read_rf_v1,
  1967. .write_rf = rtw89_phy_write_rf_v1,
  1968. .set_channel = rtw8851b_set_channel,
  1969. .set_channel_help = rtw8851b_set_channel_help,
  1970. .read_efuse = rtw8851b_read_efuse,
  1971. .read_phycap = rtw8851b_read_phycap,
  1972. .fem_setup = NULL,
  1973. .rfe_gpio = rtw8851b_rfe_gpio,
  1974. .rfk_init = rtw8851b_rfk_init,
  1975. .rfk_channel = rtw8851b_rfk_channel,
  1976. .rfk_band_changed = rtw8851b_rfk_band_changed,
  1977. .rfk_scan = rtw8851b_rfk_scan,
  1978. .rfk_track = rtw8851b_rfk_track,
  1979. .power_trim = rtw8851b_power_trim,
  1980. .set_txpwr = rtw8851b_set_txpwr,
  1981. .set_txpwr_ctrl = rtw8851b_set_txpwr_ctrl,
  1982. .init_txpwr_unit = rtw8851b_init_txpwr_unit,
  1983. .get_thermal = rtw8851b_get_thermal,
  1984. .ctrl_btg = rtw8851b_ctrl_btg,
  1985. .query_ppdu = rtw8851b_query_ppdu,
  1986. .bb_ctrl_btc_preagc = rtw8851b_bb_ctrl_btc_preagc,
  1987. .cfg_txrx_path = rtw8851b_bb_cfg_txrx_path,
  1988. .set_txpwr_ul_tb_offset = rtw8851b_set_txpwr_ul_tb_offset,
  1989. .pwr_on_func = rtw8851b_pwr_on_func,
  1990. .pwr_off_func = rtw8851b_pwr_off_func,
  1991. .query_rxdesc = rtw89_core_query_rxdesc,
  1992. .fill_txdesc = rtw89_core_fill_txdesc,
  1993. .fill_txdesc_fwcmd = rtw89_core_fill_txdesc,
  1994. .cfg_ctrl_path = rtw89_mac_cfg_ctrl_path,
  1995. .mac_cfg_gnt = rtw89_mac_cfg_gnt,
  1996. .stop_sch_tx = rtw89_mac_stop_sch_tx,
  1997. .resume_sch_tx = rtw89_mac_resume_sch_tx,
  1998. .h2c_dctl_sec_cam = NULL,
  1999. .btc_set_rfe = rtw8851b_btc_set_rfe,
  2000. .btc_init_cfg = rtw8851b_btc_init_cfg,
  2001. .btc_set_wl_pri = rtw8851b_btc_set_wl_pri,
  2002. .btc_set_wl_txpwr_ctrl = rtw8851b_btc_set_wl_txpwr_ctrl,
  2003. .btc_get_bt_rssi = rtw8851b_btc_get_bt_rssi,
  2004. .btc_update_bt_cnt = rtw8851b_btc_update_bt_cnt,
  2005. .btc_wl_s1_standby = rtw8851b_btc_wl_s1_standby,
  2006. .btc_set_wl_rx_gain = rtw8851b_btc_set_wl_rx_gain,
  2007. .btc_set_policy = rtw89_btc_set_policy_v1,
  2008. };
  2009. #ifdef CONFIG_PM
  2010. static const struct wiphy_wowlan_support rtw_wowlan_stub_8851b = {
  2011. .flags = WIPHY_WOWLAN_MAGIC_PKT | WIPHY_WOWLAN_DISCONNECT,
  2012. .n_patterns = RTW89_MAX_PATTERN_NUM,
  2013. .pattern_max_len = RTW89_MAX_PATTERN_SIZE,
  2014. .pattern_min_len = 1,
  2015. };
  2016. #endif
  2017. const struct rtw89_chip_info rtw8851b_chip_info = {
  2018. .chip_id = RTL8851B,
  2019. .chip_gen = RTW89_CHIP_AX,
  2020. .ops = &rtw8851b_chip_ops,
  2021. .mac_def = &rtw89_mac_gen_ax,
  2022. .phy_def = &rtw89_phy_gen_ax,
  2023. .fw_basename = RTW8851B_FW_BASENAME,
  2024. .fw_format_max = RTW8851B_FW_FORMAT_MAX,
  2025. .try_ce_fw = true,
  2026. .bbmcu_nr = 0,
  2027. .needed_fw_elms = 0,
  2028. .fifo_size = 196608,
  2029. .small_fifo_size = true,
  2030. .dle_scc_rsvd_size = 98304,
  2031. .max_amsdu_limit = 3500,
  2032. .dis_2g_40m_ul_ofdma = true,
  2033. .rsvd_ple_ofst = 0x2f800,
  2034. .hfc_param_ini = rtw8851b_hfc_param_ini_pcie,
  2035. .dle_mem = rtw8851b_dle_mem_pcie,
  2036. .wde_qempty_acq_num = 4,
  2037. .wde_qempty_mgq_sel = 4,
  2038. .rf_base_addr = {0xe000},
  2039. .pwr_on_seq = NULL,
  2040. .pwr_off_seq = NULL,
  2041. .bb_table = &rtw89_8851b_phy_bb_table,
  2042. .bb_gain_table = &rtw89_8851b_phy_bb_gain_table,
  2043. .rf_table = {&rtw89_8851b_phy_radioa_table,},
  2044. .nctl_table = &rtw89_8851b_phy_nctl_table,
  2045. .nctl_post_table = &rtw8851b_nctl_post_defs_tbl,
  2046. .dflt_parms = &rtw89_8851b_dflt_parms,
  2047. .rfe_parms_conf = rtw89_8851b_rfe_parms_conf,
  2048. .txpwr_factor_rf = 2,
  2049. .txpwr_factor_mac = 1,
  2050. .dig_table = NULL,
  2051. .dig_regs = &rtw8851b_dig_regs,
  2052. .tssi_dbw_table = NULL,
  2053. .support_chanctx_num = 0,
  2054. .support_bands = BIT(NL80211_BAND_2GHZ) |
  2055. BIT(NL80211_BAND_5GHZ),
  2056. .support_bw160 = false,
  2057. .support_unii4 = true,
  2058. .ul_tb_waveform_ctrl = true,
  2059. .ul_tb_pwr_diff = false,
  2060. .hw_sec_hdr = false,
  2061. .rf_path_num = 1,
  2062. .tx_nss = 1,
  2063. .rx_nss = 1,
  2064. .acam_num = 32,
  2065. .bcam_num = 20,
  2066. .scam_num = 128,
  2067. .bacam_num = 2,
  2068. .bacam_dynamic_num = 4,
  2069. .bacam_ver = RTW89_BACAM_V0,
  2070. .sec_ctrl_efuse_size = 4,
  2071. .physical_efuse_size = 1216,
  2072. .logical_efuse_size = 2048,
  2073. .limit_efuse_size = 1280,
  2074. .dav_phy_efuse_size = 0,
  2075. .dav_log_efuse_size = 0,
  2076. .phycap_addr = 0x580,
  2077. .phycap_size = 128,
  2078. .para_ver = 0,
  2079. .wlcx_desired = 0x06000000,
  2080. .btcx_desired = 0x7,
  2081. .scbd = 0x1,
  2082. .mailbox = 0x1,
  2083. .afh_guard_ch = 6,
  2084. .wl_rssi_thres = rtw89_btc_8851b_wl_rssi_thres,
  2085. .bt_rssi_thres = rtw89_btc_8851b_bt_rssi_thres,
  2086. .rssi_tol = 2,
  2087. .mon_reg_num = ARRAY_SIZE(rtw89_btc_8851b_mon_reg),
  2088. .mon_reg = rtw89_btc_8851b_mon_reg,
  2089. .rf_para_ulink_num = ARRAY_SIZE(rtw89_btc_8851b_rf_ul),
  2090. .rf_para_ulink = rtw89_btc_8851b_rf_ul,
  2091. .rf_para_dlink_num = ARRAY_SIZE(rtw89_btc_8851b_rf_dl),
  2092. .rf_para_dlink = rtw89_btc_8851b_rf_dl,
  2093. .ps_mode_supported = BIT(RTW89_PS_MODE_RFOFF) |
  2094. BIT(RTW89_PS_MODE_CLK_GATED),
  2095. .low_power_hci_modes = 0,
  2096. .h2c_cctl_func_id = H2C_FUNC_MAC_CCTLINFO_UD,
  2097. .hci_func_en_addr = R_AX_HCI_FUNC_EN,
  2098. .h2c_desc_size = sizeof(struct rtw89_txwd_body),
  2099. .txwd_body_size = sizeof(struct rtw89_txwd_body),
  2100. .txwd_info_size = sizeof(struct rtw89_txwd_info),
  2101. .h2c_ctrl_reg = R_AX_H2CREG_CTRL,
  2102. .h2c_counter_reg = {R_AX_UDM1 + 1, B_AX_UDM1_HALMAC_H2C_DEQ_CNT_MASK >> 8},
  2103. .h2c_regs = rtw8851b_h2c_regs,
  2104. .c2h_ctrl_reg = R_AX_C2HREG_CTRL,
  2105. .c2h_counter_reg = {R_AX_UDM1 + 1, B_AX_UDM1_HALMAC_C2H_ENQ_CNT_MASK >> 8},
  2106. .c2h_regs = rtw8851b_c2h_regs,
  2107. .page_regs = &rtw8851b_page_regs,
  2108. .cfo_src_fd = true,
  2109. .cfo_hw_comp = true,
  2110. .dcfo_comp = &rtw8851b_dcfo_comp,
  2111. .dcfo_comp_sft = 12,
  2112. .imr_info = &rtw8851b_imr_info,
  2113. .rrsr_cfgs = &rtw8851b_rrsr_cfgs,
  2114. .bss_clr_map_reg = R_BSS_CLR_MAP_V1,
  2115. .dma_ch_mask = BIT(RTW89_DMA_ACH4) | BIT(RTW89_DMA_ACH5) |
  2116. BIT(RTW89_DMA_ACH6) | BIT(RTW89_DMA_ACH7) |
  2117. BIT(RTW89_DMA_B1MG) | BIT(RTW89_DMA_B1HI),
  2118. .edcca_lvl_reg = R_SEG0R_EDCCA_LVL_V1,
  2119. #ifdef CONFIG_PM
  2120. .wowlan_stub = &rtw_wowlan_stub_8851b,
  2121. #endif
  2122. .xtal_info = &rtw8851b_xtal_info,
  2123. };
  2124. EXPORT_SYMBOL(rtw8851b_chip_info);
  2125. MODULE_FIRMWARE(RTW8851B_MODULE_FIRMWARE);
  2126. MODULE_AUTHOR("Realtek Corporation");
  2127. MODULE_DESCRIPTION("Realtek 802.11ax wireless 8851B driver");
  2128. MODULE_LICENSE("Dual BSD/GPL");