rtw8852c.c 96 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941
  1. // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
  2. /* Copyright(c) 2019-2022 Realtek Corporation
  3. */
  4. #include "chan.h"
  5. #include "coex.h"
  6. #include "debug.h"
  7. #include "fw.h"
  8. #include "mac.h"
  9. #include "phy.h"
  10. #include "reg.h"
  11. #include "rtw8852c.h"
  12. #include "rtw8852c_rfk.h"
  13. #include "rtw8852c_table.h"
  14. #include "util.h"
  15. #define RTW8852C_FW_FORMAT_MAX 0
  16. #define RTW8852C_FW_BASENAME "rtw89/rtw8852c_fw"
  17. #define RTW8852C_MODULE_FIRMWARE \
  18. RTW8852C_FW_BASENAME ".bin"
  19. static const struct rtw89_hfc_ch_cfg rtw8852c_hfc_chcfg_pcie[] = {
  20. {13, 1614, grp_0}, /* ACH 0 */
  21. {13, 1614, grp_0}, /* ACH 1 */
  22. {13, 1614, grp_0}, /* ACH 2 */
  23. {13, 1614, grp_0}, /* ACH 3 */
  24. {13, 1614, grp_1}, /* ACH 4 */
  25. {13, 1614, grp_1}, /* ACH 5 */
  26. {13, 1614, grp_1}, /* ACH 6 */
  27. {13, 1614, grp_1}, /* ACH 7 */
  28. {13, 1614, grp_0}, /* B0MGQ */
  29. {13, 1614, grp_0}, /* B0HIQ */
  30. {13, 1614, grp_1}, /* B1MGQ */
  31. {13, 1614, grp_1}, /* B1HIQ */
  32. {40, 0, 0} /* FWCMDQ */
  33. };
  34. static const struct rtw89_hfc_pub_cfg rtw8852c_hfc_pubcfg_pcie = {
  35. 1614, /* Group 0 */
  36. 1614, /* Group 1 */
  37. 3228, /* Public Max */
  38. 0 /* WP threshold */
  39. };
  40. static const struct rtw89_hfc_param_ini rtw8852c_hfc_param_ini_pcie[] = {
  41. [RTW89_QTA_SCC] = {rtw8852c_hfc_chcfg_pcie, &rtw8852c_hfc_pubcfg_pcie,
  42. &rtw89_mac_size.hfc_preccfg_pcie, RTW89_HCIFC_POH},
  43. [RTW89_QTA_DLFW] = {NULL, NULL, &rtw89_mac_size.hfc_preccfg_pcie,
  44. RTW89_HCIFC_POH},
  45. [RTW89_QTA_INVALID] = {NULL},
  46. };
  47. static const struct rtw89_dle_mem rtw8852c_dle_mem_pcie[] = {
  48. [RTW89_QTA_SCC] = {RTW89_QTA_SCC, &rtw89_mac_size.wde_size19,
  49. &rtw89_mac_size.ple_size19, &rtw89_mac_size.wde_qt18,
  50. &rtw89_mac_size.wde_qt18, &rtw89_mac_size.ple_qt46,
  51. &rtw89_mac_size.ple_qt47},
  52. [RTW89_QTA_DLFW] = {RTW89_QTA_DLFW, &rtw89_mac_size.wde_size18,
  53. &rtw89_mac_size.ple_size18, &rtw89_mac_size.wde_qt17,
  54. &rtw89_mac_size.wde_qt17, &rtw89_mac_size.ple_qt44,
  55. &rtw89_mac_size.ple_qt45},
  56. [RTW89_QTA_INVALID] = {RTW89_QTA_INVALID, NULL, NULL, NULL, NULL, NULL,
  57. NULL},
  58. };
  59. static const u32 rtw8852c_h2c_regs[RTW89_H2CREG_MAX] = {
  60. R_AX_H2CREG_DATA0_V1, R_AX_H2CREG_DATA1_V1, R_AX_H2CREG_DATA2_V1,
  61. R_AX_H2CREG_DATA3_V1
  62. };
  63. static const u32 rtw8852c_c2h_regs[RTW89_H2CREG_MAX] = {
  64. R_AX_C2HREG_DATA0_V1, R_AX_C2HREG_DATA1_V1, R_AX_C2HREG_DATA2_V1,
  65. R_AX_C2HREG_DATA3_V1
  66. };
  67. static const struct rtw89_page_regs rtw8852c_page_regs = {
  68. .hci_fc_ctrl = R_AX_HCI_FC_CTRL_V1,
  69. .ch_page_ctrl = R_AX_CH_PAGE_CTRL_V1,
  70. .ach_page_ctrl = R_AX_ACH0_PAGE_CTRL_V1,
  71. .ach_page_info = R_AX_ACH0_PAGE_INFO_V1,
  72. .pub_page_info3 = R_AX_PUB_PAGE_INFO3_V1,
  73. .pub_page_ctrl1 = R_AX_PUB_PAGE_CTRL1_V1,
  74. .pub_page_ctrl2 = R_AX_PUB_PAGE_CTRL2_V1,
  75. .pub_page_info1 = R_AX_PUB_PAGE_INFO1_V1,
  76. .pub_page_info2 = R_AX_PUB_PAGE_INFO2_V1,
  77. .wp_page_ctrl1 = R_AX_WP_PAGE_CTRL1_V1,
  78. .wp_page_ctrl2 = R_AX_WP_PAGE_CTRL2_V1,
  79. .wp_page_info1 = R_AX_WP_PAGE_INFO1_V1,
  80. };
  81. static const struct rtw89_reg_def rtw8852c_dcfo_comp = {
  82. R_DCFO_COMP_S0_V1, B_DCFO_COMP_S0_V1_MSK
  83. };
  84. static const struct rtw89_imr_info rtw8852c_imr_info = {
  85. .wdrls_imr_set = B_AX_WDRLS_IMR_SET_V1,
  86. .wsec_imr_reg = R_AX_SEC_ERROR_FLAG_IMR,
  87. .wsec_imr_set = B_AX_TX_HANG_IMR | B_AX_RX_HANG_IMR,
  88. .mpdu_tx_imr_set = B_AX_MPDU_TX_IMR_SET_V1,
  89. .mpdu_rx_imr_set = B_AX_MPDU_RX_IMR_SET_V1,
  90. .sta_sch_imr_set = B_AX_STA_SCHEDULER_IMR_SET,
  91. .txpktctl_imr_b0_reg = R_AX_TXPKTCTL_B0_ERRFLAG_IMR,
  92. .txpktctl_imr_b0_clr = B_AX_TXPKTCTL_IMR_B0_CLR_V1,
  93. .txpktctl_imr_b0_set = B_AX_TXPKTCTL_IMR_B0_SET_V1,
  94. .txpktctl_imr_b1_reg = R_AX_TXPKTCTL_B1_ERRFLAG_IMR,
  95. .txpktctl_imr_b1_clr = B_AX_TXPKTCTL_IMR_B1_CLR_V1,
  96. .txpktctl_imr_b1_set = B_AX_TXPKTCTL_IMR_B1_SET_V1,
  97. .wde_imr_clr = B_AX_WDE_IMR_CLR_V1,
  98. .wde_imr_set = B_AX_WDE_IMR_SET_V1,
  99. .ple_imr_clr = B_AX_PLE_IMR_CLR_V1,
  100. .ple_imr_set = B_AX_PLE_IMR_SET_V1,
  101. .host_disp_imr_clr = B_AX_HOST_DISP_IMR_CLR_V1,
  102. .host_disp_imr_set = B_AX_HOST_DISP_IMR_SET_V1,
  103. .cpu_disp_imr_clr = B_AX_CPU_DISP_IMR_CLR_V1,
  104. .cpu_disp_imr_set = B_AX_CPU_DISP_IMR_SET_V1,
  105. .other_disp_imr_clr = B_AX_OTHER_DISP_IMR_CLR_V1,
  106. .other_disp_imr_set = B_AX_OTHER_DISP_IMR_SET_V1,
  107. .bbrpt_com_err_imr_reg = R_AX_BBRPT_COM_ERR_IMR,
  108. .bbrpt_chinfo_err_imr_reg = R_AX_BBRPT_CHINFO_ERR_IMR,
  109. .bbrpt_err_imr_set = R_AX_BBRPT_CHINFO_IMR_SET_V1,
  110. .bbrpt_dfs_err_imr_reg = R_AX_BBRPT_DFS_ERR_IMR,
  111. .ptcl_imr_clr = B_AX_PTCL_IMR_CLR_V1,
  112. .ptcl_imr_set = B_AX_PTCL_IMR_SET_V1,
  113. .cdma_imr_0_reg = R_AX_RX_ERR_FLAG_IMR,
  114. .cdma_imr_0_clr = B_AX_RX_ERR_IMR_CLR_V1,
  115. .cdma_imr_0_set = B_AX_RX_ERR_IMR_SET_V1,
  116. .cdma_imr_1_reg = R_AX_TX_ERR_FLAG_IMR,
  117. .cdma_imr_1_clr = B_AX_TX_ERR_IMR_CLR_V1,
  118. .cdma_imr_1_set = B_AX_TX_ERR_IMR_SET_V1,
  119. .phy_intf_imr_reg = R_AX_PHYINFO_ERR_IMR_V1,
  120. .phy_intf_imr_clr = B_AX_PHYINFO_IMR_CLR_V1,
  121. .phy_intf_imr_set = B_AX_PHYINFO_IMR_SET_V1,
  122. .rmac_imr_reg = R_AX_RX_ERR_IMR,
  123. .rmac_imr_clr = B_AX_RMAC_IMR_CLR_V1,
  124. .rmac_imr_set = B_AX_RMAC_IMR_SET_V1,
  125. .tmac_imr_reg = R_AX_TRXPTCL_ERROR_INDICA_MASK,
  126. .tmac_imr_clr = B_AX_TMAC_IMR_CLR_V1,
  127. .tmac_imr_set = B_AX_TMAC_IMR_SET_V1,
  128. };
  129. static const struct rtw89_rrsr_cfgs rtw8852c_rrsr_cfgs = {
  130. .ref_rate = {R_AX_TRXPTCL_RRSR_CTL_0, B_AX_WMAC_RESP_REF_RATE_SEL, 0},
  131. .rsc = {R_AX_PTCL_RRSR1, B_AX_RSC_MASK, 2},
  132. };
  133. static const struct rtw89_dig_regs rtw8852c_dig_regs = {
  134. .seg0_pd_reg = R_SEG0R_PD,
  135. .pd_lower_bound_mask = B_SEG0R_PD_LOWER_BOUND_MSK,
  136. .pd_spatial_reuse_en = B_SEG0R_PD_SPATIAL_REUSE_EN_MSK,
  137. .bmode_pd_reg = R_BMODE_PDTH_EN_V1,
  138. .bmode_cca_rssi_limit_en = B_BMODE_PDTH_LIMIT_EN_MSK_V1,
  139. .bmode_pd_lower_bound_reg = R_BMODE_PDTH_V1,
  140. .bmode_rssi_nocca_low_th_mask = B_BMODE_PDTH_LOWER_BOUND_MSK_V1,
  141. .p0_lna_init = {R_PATH0_LNA_INIT_V1, B_PATH0_LNA_INIT_IDX_MSK},
  142. .p1_lna_init = {R_PATH1_LNA_INIT_V1, B_PATH1_LNA_INIT_IDX_MSK},
  143. .p0_tia_init = {R_PATH0_TIA_INIT_V1, B_PATH0_TIA_INIT_IDX_MSK_V1},
  144. .p1_tia_init = {R_PATH1_TIA_INIT_V1, B_PATH1_TIA_INIT_IDX_MSK_V1},
  145. .p0_rxb_init = {R_PATH0_RXB_INIT_V1, B_PATH0_RXB_INIT_IDX_MSK_V1},
  146. .p1_rxb_init = {R_PATH1_RXB_INIT_V1, B_PATH1_RXB_INIT_IDX_MSK_V1},
  147. .p0_p20_pagcugc_en = {R_PATH0_P20_FOLLOW_BY_PAGCUGC_V1,
  148. B_PATH0_P20_FOLLOW_BY_PAGCUGC_EN_MSK},
  149. .p0_s20_pagcugc_en = {R_PATH0_S20_FOLLOW_BY_PAGCUGC_V1,
  150. B_PATH0_S20_FOLLOW_BY_PAGCUGC_EN_MSK},
  151. .p1_p20_pagcugc_en = {R_PATH1_P20_FOLLOW_BY_PAGCUGC_V1,
  152. B_PATH1_P20_FOLLOW_BY_PAGCUGC_EN_MSK},
  153. .p1_s20_pagcugc_en = {R_PATH1_S20_FOLLOW_BY_PAGCUGC_V1,
  154. B_PATH1_S20_FOLLOW_BY_PAGCUGC_EN_MSK},
  155. };
  156. static void rtw8852c_ctrl_btg(struct rtw89_dev *rtwdev, bool btg);
  157. static void rtw8852c_ctrl_tx_path_tmac(struct rtw89_dev *rtwdev, u8 tx_path,
  158. enum rtw89_mac_idx mac_idx);
  159. static int rtw8852c_pwr_on_func(struct rtw89_dev *rtwdev)
  160. {
  161. u32 val32;
  162. u32 ret;
  163. val32 = rtw89_read32_mask(rtwdev, R_AX_SYS_STATUS1, B_AX_PAD_HCI_SEL_V2_MASK);
  164. if (val32 == MAC_AX_HCI_SEL_PCIE_USB)
  165. rtw89_write32_set(rtwdev, R_AX_LDO_AON_CTRL0, B_AX_PD_REGU_L);
  166. rtw89_write32_clr(rtwdev, R_AX_SYS_PW_CTRL, B_AX_AFSM_WLSUS_EN |
  167. B_AX_AFSM_PCIE_SUS_EN);
  168. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_DIS_WLBT_PDNSUSEN_SOPC);
  169. rtw89_write32_set(rtwdev, R_AX_WLLPS_CTRL, B_AX_DIS_WLBT_LPSEN_LOPC);
  170. rtw89_write32_clr(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APDM_HPDN);
  171. rtw89_write32_clr(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APFM_SWLPS);
  172. ret = read_poll_timeout(rtw89_read32, val32, val32 & B_AX_RDY_SYSPWR,
  173. 1000, 20000, false, rtwdev, R_AX_SYS_PW_CTRL);
  174. if (ret)
  175. return ret;
  176. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_EN_WLON);
  177. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APFN_ONMAC);
  178. ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_AX_APFN_ONMAC),
  179. 1000, 20000, false, rtwdev, R_AX_SYS_PW_CTRL);
  180. if (ret)
  181. return ret;
  182. rtw89_write8_set(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
  183. rtw89_write8_clr(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
  184. rtw89_write8_set(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
  185. rtw89_write8_clr(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
  186. rtw89_write8_set(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
  187. rtw89_write32_clr(rtwdev, R_AX_SYS_SDIO_CTRL, B_AX_PCIE_CALIB_EN_V1);
  188. rtw89_write32_clr(rtwdev, R_AX_SYS_ISO_CTRL_EXTEND, B_AX_CMAC1_FEN);
  189. rtw89_write32_set(rtwdev, R_AX_SYS_ISO_CTRL_EXTEND, B_AX_R_SYM_ISO_CMAC12PP);
  190. rtw89_write32_clr(rtwdev, R_AX_AFE_CTRL1, B_AX_R_SYM_WLCMAC1_P4_PC_EN |
  191. B_AX_R_SYM_WLCMAC1_P3_PC_EN |
  192. B_AX_R_SYM_WLCMAC1_P2_PC_EN |
  193. B_AX_R_SYM_WLCMAC1_P1_PC_EN |
  194. B_AX_R_SYM_WLCMAC1_PC_EN);
  195. rtw89_write32_set(rtwdev, R_AX_SYS_ADIE_PAD_PWR_CTRL, B_AX_SYM_PADPDN_WL_PTA_1P3);
  196. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL,
  197. XTAL_SI_GND_SHDN_WL, XTAL_SI_GND_SHDN_WL);
  198. if (ret)
  199. return ret;
  200. rtw89_write32_set(rtwdev, R_AX_SYS_ADIE_PAD_PWR_CTRL, B_AX_SYM_PADPDN_WL_RFC_1P3);
  201. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL,
  202. XTAL_SI_SHDN_WL, XTAL_SI_SHDN_WL);
  203. if (ret)
  204. return ret;
  205. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_OFF_WEI,
  206. XTAL_SI_OFF_WEI);
  207. if (ret)
  208. return ret;
  209. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_OFF_EI,
  210. XTAL_SI_OFF_EI);
  211. if (ret)
  212. return ret;
  213. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_RFC2RF);
  214. if (ret)
  215. return ret;
  216. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_PON_WEI,
  217. XTAL_SI_PON_WEI);
  218. if (ret)
  219. return ret;
  220. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_PON_EI,
  221. XTAL_SI_PON_EI);
  222. if (ret)
  223. return ret;
  224. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_SRAM2RFC);
  225. if (ret)
  226. return ret;
  227. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XTAL_XMD_2, 0, XTAL_SI_LDO_LPS);
  228. if (ret)
  229. return ret;
  230. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XTAL_XMD_4, 0, XTAL_SI_LPS_CAP);
  231. if (ret)
  232. return ret;
  233. rtw89_write32_set(rtwdev, R_AX_PMC_DBG_CTRL2, B_AX_SYSON_DIS_PMCR_AX_WRMSK);
  234. rtw89_write32_set(rtwdev, R_AX_SYS_ISO_CTRL, B_AX_ISO_EB2CORE);
  235. rtw89_write32_clr(rtwdev, R_AX_SYS_ISO_CTRL, B_AX_PWC_EV2EF_B15);
  236. fsleep(1000);
  237. rtw89_write32_clr(rtwdev, R_AX_SYS_ISO_CTRL, B_AX_PWC_EV2EF_B14);
  238. rtw89_write32_clr(rtwdev, R_AX_PMC_DBG_CTRL2, B_AX_SYSON_DIS_PMCR_AX_WRMSK);
  239. rtw89_write32_set(rtwdev, R_AX_GPIO0_15_EECS_EESK_LED1_PULL_LOW_EN,
  240. B_AX_EECS_PULL_LOW_EN | B_AX_EESK_PULL_LOW_EN |
  241. B_AX_LED1_PULL_LOW_EN);
  242. rtw89_write32_set(rtwdev, R_AX_DMAC_FUNC_EN,
  243. B_AX_MAC_FUNC_EN | B_AX_DMAC_FUNC_EN | B_AX_MPDU_PROC_EN |
  244. B_AX_WD_RLS_EN | B_AX_DLE_WDE_EN | B_AX_TXPKT_CTRL_EN |
  245. B_AX_STA_SCH_EN | B_AX_DLE_PLE_EN | B_AX_PKT_BUF_EN |
  246. B_AX_DMAC_TBL_EN | B_AX_PKT_IN_EN | B_AX_DLE_CPUIO_EN |
  247. B_AX_DISPATCHER_EN | B_AX_BBRPT_EN | B_AX_MAC_SEC_EN |
  248. B_AX_MAC_UN_EN | B_AX_H_AXIDMA_EN);
  249. rtw89_write32_set(rtwdev, R_AX_CMAC_FUNC_EN,
  250. B_AX_CMAC_EN | B_AX_CMAC_TXEN | B_AX_CMAC_RXEN |
  251. B_AX_FORCE_CMACREG_GCKEN | B_AX_PHYINTF_EN |
  252. B_AX_CMAC_DMA_EN | B_AX_PTCLTOP_EN | B_AX_SCHEDULER_EN |
  253. B_AX_TMAC_EN | B_AX_RMAC_EN);
  254. rtw89_write32_mask(rtwdev, R_AX_LED1_FUNC_SEL, B_AX_PINMUX_EESK_FUNC_SEL_V1_MASK,
  255. PINMUX_EESK_FUNC_SEL_BT_LOG);
  256. return 0;
  257. }
  258. static int rtw8852c_pwr_off_func(struct rtw89_dev *rtwdev)
  259. {
  260. u32 val32;
  261. u32 ret;
  262. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_RFC2RF,
  263. XTAL_SI_RFC2RF);
  264. if (ret)
  265. return ret;
  266. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_OFF_EI);
  267. if (ret)
  268. return ret;
  269. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_OFF_WEI);
  270. if (ret)
  271. return ret;
  272. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0, XTAL_SI_RF00);
  273. if (ret)
  274. return ret;
  275. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, 0, XTAL_SI_RF10);
  276. if (ret)
  277. return ret;
  278. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_SRAM2RFC,
  279. XTAL_SI_SRAM2RFC);
  280. if (ret)
  281. return ret;
  282. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_PON_EI);
  283. if (ret)
  284. return ret;
  285. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_PON_WEI);
  286. if (ret)
  287. return ret;
  288. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_EN_WLON);
  289. rtw89_write8_clr(rtwdev, R_AX_SYS_FUNC_EN, B_AX_FEN_BB_GLB_RSTN | B_AX_FEN_BBRSTB);
  290. rtw89_write32_clr(rtwdev, R_AX_SYS_ISO_CTRL_EXTEND,
  291. B_AX_R_SYM_FEN_WLBBGLB_1 | B_AX_R_SYM_FEN_WLBBFUN_1);
  292. rtw89_write32_clr(rtwdev, R_AX_SYS_ADIE_PAD_PWR_CTRL, B_AX_SYM_PADPDN_WL_RFC_1P3);
  293. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_SHDN_WL);
  294. if (ret)
  295. return ret;
  296. rtw89_write32_clr(rtwdev, R_AX_SYS_ADIE_PAD_PWR_CTRL, B_AX_SYM_PADPDN_WL_PTA_1P3);
  297. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_GND_SHDN_WL);
  298. if (ret)
  299. return ret;
  300. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APFM_OFFMAC);
  301. ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_AX_APFM_OFFMAC),
  302. 1000, 20000, false, rtwdev, R_AX_SYS_PW_CTRL);
  303. if (ret)
  304. return ret;
  305. rtw89_write32(rtwdev, R_AX_WLLPS_CTRL, 0x0001A0B0);
  306. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_XTAL_OFF_A_DIE);
  307. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APFM_SWLPS);
  308. return 0;
  309. }
  310. static void rtw8852c_e_efuse_parsing(struct rtw89_efuse *efuse,
  311. struct rtw8852c_efuse *map)
  312. {
  313. ether_addr_copy(efuse->addr, map->e.mac_addr);
  314. efuse->rfe_type = map->rfe_type;
  315. efuse->xtal_cap = map->xtal_k;
  316. }
  317. static void rtw8852c_efuse_parsing_tssi(struct rtw89_dev *rtwdev,
  318. struct rtw8852c_efuse *map)
  319. {
  320. struct rtw89_tssi_info *tssi = &rtwdev->tssi;
  321. struct rtw8852c_tssi_offset *ofst[] = {&map->path_a_tssi, &map->path_b_tssi};
  322. u8 *bw40_1s_tssi_6g_ofst[] = {map->bw40_1s_tssi_6g_a, map->bw40_1s_tssi_6g_b};
  323. u8 i, j;
  324. tssi->thermal[RF_PATH_A] = map->path_a_therm;
  325. tssi->thermal[RF_PATH_B] = map->path_b_therm;
  326. for (i = 0; i < RF_PATH_NUM_8852C; i++) {
  327. memcpy(tssi->tssi_cck[i], ofst[i]->cck_tssi,
  328. sizeof(ofst[i]->cck_tssi));
  329. for (j = 0; j < TSSI_CCK_CH_GROUP_NUM; j++)
  330. rtw89_debug(rtwdev, RTW89_DBG_TSSI,
  331. "[TSSI][EFUSE] path=%d cck[%d]=0x%x\n",
  332. i, j, tssi->tssi_cck[i][j]);
  333. memcpy(tssi->tssi_mcs[i], ofst[i]->bw40_tssi,
  334. sizeof(ofst[i]->bw40_tssi));
  335. memcpy(tssi->tssi_mcs[i] + TSSI_MCS_2G_CH_GROUP_NUM,
  336. ofst[i]->bw40_1s_tssi_5g, sizeof(ofst[i]->bw40_1s_tssi_5g));
  337. memcpy(tssi->tssi_6g_mcs[i], bw40_1s_tssi_6g_ofst[i],
  338. sizeof(tssi->tssi_6g_mcs[i]));
  339. for (j = 0; j < TSSI_MCS_CH_GROUP_NUM; j++)
  340. rtw89_debug(rtwdev, RTW89_DBG_TSSI,
  341. "[TSSI][EFUSE] path=%d mcs[%d]=0x%x\n",
  342. i, j, tssi->tssi_mcs[i][j]);
  343. }
  344. }
  345. static bool _decode_efuse_gain(u8 data, s8 *high, s8 *low)
  346. {
  347. if (high)
  348. *high = sign_extend32(FIELD_GET(GENMASK(7, 4), data), 3);
  349. if (low)
  350. *low = sign_extend32(FIELD_GET(GENMASK(3, 0), data), 3);
  351. return data != 0xff;
  352. }
  353. static void rtw8852c_efuse_parsing_gain_offset(struct rtw89_dev *rtwdev,
  354. struct rtw8852c_efuse *map)
  355. {
  356. struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
  357. bool valid = false;
  358. valid |= _decode_efuse_gain(map->rx_gain_2g_cck,
  359. &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_CCK],
  360. &gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_CCK]);
  361. valid |= _decode_efuse_gain(map->rx_gain_2g_ofdm,
  362. &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_OFDM],
  363. &gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_OFDM]);
  364. valid |= _decode_efuse_gain(map->rx_gain_5g_low,
  365. &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_LOW],
  366. &gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_LOW]);
  367. valid |= _decode_efuse_gain(map->rx_gain_5g_mid,
  368. &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_MID],
  369. &gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_MID]);
  370. valid |= _decode_efuse_gain(map->rx_gain_5g_high,
  371. &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_HIGH],
  372. &gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_HIGH]);
  373. gain->offset_valid = valid;
  374. }
  375. static int rtw8852c_read_efuse(struct rtw89_dev *rtwdev, u8 *log_map)
  376. {
  377. struct rtw89_efuse *efuse = &rtwdev->efuse;
  378. struct rtw8852c_efuse *map;
  379. map = (struct rtw8852c_efuse *)log_map;
  380. efuse->country_code[0] = map->country_code[0];
  381. efuse->country_code[1] = map->country_code[1];
  382. rtw8852c_efuse_parsing_tssi(rtwdev, map);
  383. rtw8852c_efuse_parsing_gain_offset(rtwdev, map);
  384. switch (rtwdev->hci.type) {
  385. case RTW89_HCI_TYPE_PCIE:
  386. rtw8852c_e_efuse_parsing(efuse, map);
  387. break;
  388. default:
  389. return -ENOTSUPP;
  390. }
  391. rtw89_info(rtwdev, "chip rfe_type is %d\n", efuse->rfe_type);
  392. return 0;
  393. }
  394. static void rtw8852c_phycap_parsing_tssi(struct rtw89_dev *rtwdev, u8 *phycap_map)
  395. {
  396. struct rtw89_tssi_info *tssi = &rtwdev->tssi;
  397. static const u32 tssi_trim_addr[RF_PATH_NUM_8852C] = {0x5D6, 0x5AB};
  398. static const u32 tssi_trim_addr_6g[RF_PATH_NUM_8852C] = {0x5CE, 0x5A3};
  399. u32 addr = rtwdev->chip->phycap_addr;
  400. bool pg = false;
  401. u32 ofst;
  402. u8 i, j;
  403. for (i = 0; i < RF_PATH_NUM_8852C; i++) {
  404. for (j = 0; j < TSSI_TRIM_CH_GROUP_NUM; j++) {
  405. /* addrs are in decreasing order */
  406. ofst = tssi_trim_addr[i] - addr - j;
  407. tssi->tssi_trim[i][j] = phycap_map[ofst];
  408. if (phycap_map[ofst] != 0xff)
  409. pg = true;
  410. }
  411. for (j = 0; j < TSSI_TRIM_CH_GROUP_NUM_6G; j++) {
  412. /* addrs are in decreasing order */
  413. ofst = tssi_trim_addr_6g[i] - addr - j;
  414. tssi->tssi_trim_6g[i][j] = phycap_map[ofst];
  415. if (phycap_map[ofst] != 0xff)
  416. pg = true;
  417. }
  418. }
  419. if (!pg) {
  420. memset(tssi->tssi_trim, 0, sizeof(tssi->tssi_trim));
  421. memset(tssi->tssi_trim_6g, 0, sizeof(tssi->tssi_trim_6g));
  422. rtw89_debug(rtwdev, RTW89_DBG_TSSI,
  423. "[TSSI][TRIM] no PG, set all trim info to 0\n");
  424. }
  425. for (i = 0; i < RF_PATH_NUM_8852C; i++)
  426. for (j = 0; j < TSSI_TRIM_CH_GROUP_NUM; j++)
  427. rtw89_debug(rtwdev, RTW89_DBG_TSSI,
  428. "[TSSI] path=%d idx=%d trim=0x%x addr=0x%x\n",
  429. i, j, tssi->tssi_trim[i][j],
  430. tssi_trim_addr[i] - j);
  431. }
  432. static void rtw8852c_phycap_parsing_thermal_trim(struct rtw89_dev *rtwdev,
  433. u8 *phycap_map)
  434. {
  435. struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
  436. static const u32 thm_trim_addr[RF_PATH_NUM_8852C] = {0x5DF, 0x5DC};
  437. u32 addr = rtwdev->chip->phycap_addr;
  438. u8 i;
  439. for (i = 0; i < RF_PATH_NUM_8852C; i++) {
  440. info->thermal_trim[i] = phycap_map[thm_trim_addr[i] - addr];
  441. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  442. "[THERMAL][TRIM] path=%d thermal_trim=0x%x\n",
  443. i, info->thermal_trim[i]);
  444. if (info->thermal_trim[i] != 0xff)
  445. info->pg_thermal_trim = true;
  446. }
  447. }
  448. static void rtw8852c_thermal_trim(struct rtw89_dev *rtwdev)
  449. {
  450. #define __thm_setting(raw) \
  451. ({ \
  452. u8 __v = (raw); \
  453. ((__v & 0x1) << 3) | ((__v & 0x1f) >> 1); \
  454. })
  455. struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
  456. u8 i, val;
  457. if (!info->pg_thermal_trim) {
  458. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  459. "[THERMAL][TRIM] no PG, do nothing\n");
  460. return;
  461. }
  462. for (i = 0; i < RF_PATH_NUM_8852C; i++) {
  463. val = __thm_setting(info->thermal_trim[i]);
  464. rtw89_write_rf(rtwdev, i, RR_TM2, RR_TM2_OFF, val);
  465. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  466. "[THERMAL][TRIM] path=%d thermal_setting=0x%x\n",
  467. i, val);
  468. }
  469. #undef __thm_setting
  470. }
  471. static void rtw8852c_phycap_parsing_pa_bias_trim(struct rtw89_dev *rtwdev,
  472. u8 *phycap_map)
  473. {
  474. struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
  475. static const u32 pabias_trim_addr[RF_PATH_NUM_8852C] = {0x5DE, 0x5DB};
  476. u32 addr = rtwdev->chip->phycap_addr;
  477. u8 i;
  478. for (i = 0; i < RF_PATH_NUM_8852C; i++) {
  479. info->pa_bias_trim[i] = phycap_map[pabias_trim_addr[i] - addr];
  480. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  481. "[PA_BIAS][TRIM] path=%d pa_bias_trim=0x%x\n",
  482. i, info->pa_bias_trim[i]);
  483. if (info->pa_bias_trim[i] != 0xff)
  484. info->pg_pa_bias_trim = true;
  485. }
  486. }
  487. static void rtw8852c_pa_bias_trim(struct rtw89_dev *rtwdev)
  488. {
  489. struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
  490. u8 pabias_2g, pabias_5g;
  491. u8 i;
  492. if (!info->pg_pa_bias_trim) {
  493. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  494. "[PA_BIAS][TRIM] no PG, do nothing\n");
  495. return;
  496. }
  497. for (i = 0; i < RF_PATH_NUM_8852C; i++) {
  498. pabias_2g = FIELD_GET(GENMASK(3, 0), info->pa_bias_trim[i]);
  499. pabias_5g = FIELD_GET(GENMASK(7, 4), info->pa_bias_trim[i]);
  500. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  501. "[PA_BIAS][TRIM] path=%d 2G=0x%x 5G=0x%x\n",
  502. i, pabias_2g, pabias_5g);
  503. rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXG, pabias_2g);
  504. rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXA, pabias_5g);
  505. }
  506. }
  507. static int rtw8852c_read_phycap(struct rtw89_dev *rtwdev, u8 *phycap_map)
  508. {
  509. rtw8852c_phycap_parsing_tssi(rtwdev, phycap_map);
  510. rtw8852c_phycap_parsing_thermal_trim(rtwdev, phycap_map);
  511. rtw8852c_phycap_parsing_pa_bias_trim(rtwdev, phycap_map);
  512. return 0;
  513. }
  514. static void rtw8852c_power_trim(struct rtw89_dev *rtwdev)
  515. {
  516. rtw8852c_thermal_trim(rtwdev);
  517. rtw8852c_pa_bias_trim(rtwdev);
  518. }
  519. static void rtw8852c_set_channel_mac(struct rtw89_dev *rtwdev,
  520. const struct rtw89_chan *chan,
  521. u8 mac_idx)
  522. {
  523. u32 rf_mod = rtw89_mac_reg_by_idx(rtwdev, R_AX_WMAC_RFMOD, mac_idx);
  524. u32 sub_carr = rtw89_mac_reg_by_idx(rtwdev, R_AX_TX_SUB_CARRIER_VALUE, mac_idx);
  525. u32 chk_rate = rtw89_mac_reg_by_idx(rtwdev, R_AX_TXRATE_CHK, mac_idx);
  526. u8 txsc20 = 0, txsc40 = 0, txsc80 = 0;
  527. u8 rf_mod_val = 0, chk_rate_mask = 0;
  528. u32 txsc;
  529. switch (chan->band_width) {
  530. case RTW89_CHANNEL_WIDTH_160:
  531. txsc80 = rtw89_phy_get_txsc(rtwdev, chan,
  532. RTW89_CHANNEL_WIDTH_80);
  533. fallthrough;
  534. case RTW89_CHANNEL_WIDTH_80:
  535. txsc40 = rtw89_phy_get_txsc(rtwdev, chan,
  536. RTW89_CHANNEL_WIDTH_40);
  537. fallthrough;
  538. case RTW89_CHANNEL_WIDTH_40:
  539. txsc20 = rtw89_phy_get_txsc(rtwdev, chan,
  540. RTW89_CHANNEL_WIDTH_20);
  541. break;
  542. default:
  543. break;
  544. }
  545. switch (chan->band_width) {
  546. case RTW89_CHANNEL_WIDTH_160:
  547. rf_mod_val = AX_WMAC_RFMOD_160M;
  548. txsc = FIELD_PREP(B_AX_TXSC_20M_MASK, txsc20) |
  549. FIELD_PREP(B_AX_TXSC_40M_MASK, txsc40) |
  550. FIELD_PREP(B_AX_TXSC_80M_MASK, txsc80);
  551. break;
  552. case RTW89_CHANNEL_WIDTH_80:
  553. rf_mod_val = AX_WMAC_RFMOD_80M;
  554. txsc = FIELD_PREP(B_AX_TXSC_20M_MASK, txsc20) |
  555. FIELD_PREP(B_AX_TXSC_40M_MASK, txsc40);
  556. break;
  557. case RTW89_CHANNEL_WIDTH_40:
  558. rf_mod_val = AX_WMAC_RFMOD_40M;
  559. txsc = FIELD_PREP(B_AX_TXSC_20M_MASK, txsc20);
  560. break;
  561. case RTW89_CHANNEL_WIDTH_20:
  562. default:
  563. rf_mod_val = AX_WMAC_RFMOD_20M;
  564. txsc = 0;
  565. break;
  566. }
  567. rtw89_write8_mask(rtwdev, rf_mod, B_AX_WMAC_RFMOD_MASK, rf_mod_val);
  568. rtw89_write32(rtwdev, sub_carr, txsc);
  569. switch (chan->band_type) {
  570. case RTW89_BAND_2G:
  571. chk_rate_mask = B_AX_BAND_MODE;
  572. break;
  573. case RTW89_BAND_5G:
  574. case RTW89_BAND_6G:
  575. chk_rate_mask = B_AX_CHECK_CCK_EN | B_AX_RTS_LIMIT_IN_OFDM6;
  576. break;
  577. default:
  578. rtw89_warn(rtwdev, "Invalid band_type:%d\n", chan->band_type);
  579. return;
  580. }
  581. rtw89_write8_clr(rtwdev, chk_rate, B_AX_BAND_MODE | B_AX_CHECK_CCK_EN |
  582. B_AX_RTS_LIMIT_IN_OFDM6);
  583. rtw89_write8_set(rtwdev, chk_rate, chk_rate_mask);
  584. }
  585. static const u32 rtw8852c_sco_barker_threshold[14] = {
  586. 0x1fe4f, 0x1ff5e, 0x2006c, 0x2017b, 0x2028a, 0x20399, 0x204a8, 0x205b6,
  587. 0x206c5, 0x207d4, 0x208e3, 0x209f2, 0x20b00, 0x20d8a
  588. };
  589. static const u32 rtw8852c_sco_cck_threshold[14] = {
  590. 0x2bdac, 0x2bf21, 0x2c095, 0x2c209, 0x2c37e, 0x2c4f2, 0x2c666, 0x2c7db,
  591. 0x2c94f, 0x2cac3, 0x2cc38, 0x2cdac, 0x2cf21, 0x2d29e
  592. };
  593. static int rtw8852c_ctrl_sco_cck(struct rtw89_dev *rtwdev, u8 central_ch,
  594. u8 primary_ch, enum rtw89_bandwidth bw)
  595. {
  596. u8 ch_element;
  597. if (bw == RTW89_CHANNEL_WIDTH_20) {
  598. ch_element = central_ch - 1;
  599. } else if (bw == RTW89_CHANNEL_WIDTH_40) {
  600. if (primary_ch == 1)
  601. ch_element = central_ch - 1 + 2;
  602. else
  603. ch_element = central_ch - 1 - 2;
  604. } else {
  605. rtw89_warn(rtwdev, "Invalid BW:%d for CCK\n", bw);
  606. return -EINVAL;
  607. }
  608. rtw89_phy_write32_mask(rtwdev, R_BK_FC0_INV_V1, B_BK_FC0_INV_MSK_V1,
  609. rtw8852c_sco_barker_threshold[ch_element]);
  610. rtw89_phy_write32_mask(rtwdev, R_CCK_FC0_INV_V1, B_CCK_FC0_INV_MSK_V1,
  611. rtw8852c_sco_cck_threshold[ch_element]);
  612. return 0;
  613. }
  614. struct rtw8852c_bb_gain {
  615. u32 gain_g[BB_PATH_NUM_8852C];
  616. u32 gain_a[BB_PATH_NUM_8852C];
  617. u32 gain_mask;
  618. };
  619. static const struct rtw8852c_bb_gain bb_gain_lna[LNA_GAIN_NUM] = {
  620. { .gain_g = {0x4678, 0x475C}, .gain_a = {0x45DC, 0x4740},
  621. .gain_mask = 0x00ff0000 },
  622. { .gain_g = {0x4678, 0x475C}, .gain_a = {0x45DC, 0x4740},
  623. .gain_mask = 0xff000000 },
  624. { .gain_g = {0x467C, 0x4760}, .gain_a = {0x4660, 0x4744},
  625. .gain_mask = 0x000000ff },
  626. { .gain_g = {0x467C, 0x4760}, .gain_a = {0x4660, 0x4744},
  627. .gain_mask = 0x0000ff00 },
  628. { .gain_g = {0x467C, 0x4760}, .gain_a = {0x4660, 0x4744},
  629. .gain_mask = 0x00ff0000 },
  630. { .gain_g = {0x467C, 0x4760}, .gain_a = {0x4660, 0x4744},
  631. .gain_mask = 0xff000000 },
  632. { .gain_g = {0x4680, 0x4764}, .gain_a = {0x4664, 0x4748},
  633. .gain_mask = 0x000000ff },
  634. };
  635. static const struct rtw8852c_bb_gain bb_gain_tia[TIA_GAIN_NUM] = {
  636. { .gain_g = {0x4680, 0x4764}, .gain_a = {0x4664, 0x4748},
  637. .gain_mask = 0x00ff0000 },
  638. { .gain_g = {0x4680, 0x4764}, .gain_a = {0x4664, 0x4748},
  639. .gain_mask = 0xff000000 },
  640. };
  641. struct rtw8852c_bb_gain_bypass {
  642. u32 gain_g[BB_PATH_NUM_8852C];
  643. u32 gain_a[BB_PATH_NUM_8852C];
  644. u32 gain_mask_g;
  645. u32 gain_mask_a;
  646. };
  647. static
  648. const struct rtw8852c_bb_gain_bypass bb_gain_bypass_lna[LNA_GAIN_NUM] = {
  649. { .gain_g = {0x4BB8, 0x4C7C}, .gain_a = {0x4BB4, 0x4C78},
  650. .gain_mask_g = 0xff000000, .gain_mask_a = 0xff},
  651. { .gain_g = {0x4BBC, 0x4C80}, .gain_a = {0x4BB4, 0x4C78},
  652. .gain_mask_g = 0xff, .gain_mask_a = 0xff00},
  653. { .gain_g = {0x4BBC, 0x4C80}, .gain_a = {0x4BB4, 0x4C78},
  654. .gain_mask_g = 0xff00, .gain_mask_a = 0xff0000},
  655. { .gain_g = {0x4BBC, 0x4C80}, .gain_a = {0x4BB4, 0x4C78},
  656. .gain_mask_g = 0xff0000, .gain_mask_a = 0xff000000},
  657. { .gain_g = {0x4BBC, 0x4C80}, .gain_a = {0x4BB8, 0x4C7C},
  658. .gain_mask_g = 0xff000000, .gain_mask_a = 0xff},
  659. { .gain_g = {0x4BC0, 0x4C84}, .gain_a = {0x4BB8, 0x4C7C},
  660. .gain_mask_g = 0xff, .gain_mask_a = 0xff00},
  661. { .gain_g = {0x4BC0, 0x4C84}, .gain_a = {0x4BB8, 0x4C7C},
  662. .gain_mask_g = 0xff00, .gain_mask_a = 0xff0000},
  663. };
  664. struct rtw8852c_bb_gain_op1db {
  665. struct {
  666. u32 lna[BB_PATH_NUM_8852C];
  667. u32 tia_lna[BB_PATH_NUM_8852C];
  668. u32 mask;
  669. } reg[LNA_GAIN_NUM];
  670. u32 reg_tia0_lna6[BB_PATH_NUM_8852C];
  671. u32 mask_tia0_lna6;
  672. };
  673. static const struct rtw8852c_bb_gain_op1db bb_gain_op1db_a = {
  674. .reg = {
  675. { .lna = {0x4668, 0x474c}, .tia_lna = {0x4670, 0x4754},
  676. .mask = 0xff},
  677. { .lna = {0x4668, 0x474c}, .tia_lna = {0x4670, 0x4754},
  678. .mask = 0xff00},
  679. { .lna = {0x4668, 0x474c}, .tia_lna = {0x4670, 0x4754},
  680. .mask = 0xff0000},
  681. { .lna = {0x4668, 0x474c}, .tia_lna = {0x4670, 0x4754},
  682. .mask = 0xff000000},
  683. { .lna = {0x466c, 0x4750}, .tia_lna = {0x4674, 0x4758},
  684. .mask = 0xff},
  685. { .lna = {0x466c, 0x4750}, .tia_lna = {0x4674, 0x4758},
  686. .mask = 0xff00},
  687. { .lna = {0x466c, 0x4750}, .tia_lna = {0x4674, 0x4758},
  688. .mask = 0xff0000},
  689. },
  690. .reg_tia0_lna6 = {0x4674, 0x4758},
  691. .mask_tia0_lna6 = 0xff000000,
  692. };
  693. static void rtw8852c_set_gain_error(struct rtw89_dev *rtwdev,
  694. enum rtw89_subband subband,
  695. enum rtw89_rf_path path)
  696. {
  697. const struct rtw89_phy_bb_gain_info *gain = &rtwdev->bb_gain;
  698. u8 gain_band = rtw89_subband_to_bb_gain_band(subband);
  699. s32 val;
  700. u32 reg;
  701. u32 mask;
  702. int i;
  703. for (i = 0; i < LNA_GAIN_NUM; i++) {
  704. if (subband == RTW89_CH_2G)
  705. reg = bb_gain_lna[i].gain_g[path];
  706. else
  707. reg = bb_gain_lna[i].gain_a[path];
  708. mask = bb_gain_lna[i].gain_mask;
  709. val = gain->lna_gain[gain_band][path][i];
  710. rtw89_phy_write32_mask(rtwdev, reg, mask, val);
  711. if (subband == RTW89_CH_2G) {
  712. reg = bb_gain_bypass_lna[i].gain_g[path];
  713. mask = bb_gain_bypass_lna[i].gain_mask_g;
  714. } else {
  715. reg = bb_gain_bypass_lna[i].gain_a[path];
  716. mask = bb_gain_bypass_lna[i].gain_mask_a;
  717. }
  718. val = gain->lna_gain_bypass[gain_band][path][i];
  719. rtw89_phy_write32_mask(rtwdev, reg, mask, val);
  720. if (subband != RTW89_CH_2G) {
  721. reg = bb_gain_op1db_a.reg[i].lna[path];
  722. mask = bb_gain_op1db_a.reg[i].mask;
  723. val = gain->lna_op1db[gain_band][path][i];
  724. rtw89_phy_write32_mask(rtwdev, reg, mask, val);
  725. reg = bb_gain_op1db_a.reg[i].tia_lna[path];
  726. mask = bb_gain_op1db_a.reg[i].mask;
  727. val = gain->tia_lna_op1db[gain_band][path][i];
  728. rtw89_phy_write32_mask(rtwdev, reg, mask, val);
  729. }
  730. }
  731. if (subband != RTW89_CH_2G) {
  732. reg = bb_gain_op1db_a.reg_tia0_lna6[path];
  733. mask = bb_gain_op1db_a.mask_tia0_lna6;
  734. val = gain->tia_lna_op1db[gain_band][path][7];
  735. rtw89_phy_write32_mask(rtwdev, reg, mask, val);
  736. }
  737. for (i = 0; i < TIA_GAIN_NUM; i++) {
  738. if (subband == RTW89_CH_2G)
  739. reg = bb_gain_tia[i].gain_g[path];
  740. else
  741. reg = bb_gain_tia[i].gain_a[path];
  742. mask = bb_gain_tia[i].gain_mask;
  743. val = gain->tia_gain[gain_band][path][i];
  744. rtw89_phy_write32_mask(rtwdev, reg, mask, val);
  745. }
  746. }
  747. static void rtw8852c_set_gain_offset(struct rtw89_dev *rtwdev,
  748. const struct rtw89_chan *chan,
  749. enum rtw89_phy_idx phy_idx,
  750. enum rtw89_rf_path path)
  751. {
  752. static const u32 rssi_ofst_addr[2] = {R_PATH0_G_TIA0_LNA6_OP1DB_V1,
  753. R_PATH1_G_TIA0_LNA6_OP1DB_V1};
  754. static const u32 rpl_mask[2] = {B_RPL_PATHA_MASK, B_RPL_PATHB_MASK};
  755. static const u32 rpl_tb_mask[2] = {B_RSSI_M_PATHA_MASK, B_RSSI_M_PATHB_MASK};
  756. struct rtw89_phy_efuse_gain *efuse_gain = &rtwdev->efuse_gain;
  757. enum rtw89_gain_offset gain_band;
  758. s32 offset_q0, offset_base_q4;
  759. s32 tmp = 0;
  760. if (!efuse_gain->offset_valid)
  761. return;
  762. if (rtwdev->dbcc_en && path == RF_PATH_B)
  763. phy_idx = RTW89_PHY_1;
  764. if (chan->band_type == RTW89_BAND_2G) {
  765. offset_q0 = efuse_gain->offset[path][RTW89_GAIN_OFFSET_2G_CCK];
  766. offset_base_q4 = efuse_gain->offset_base[phy_idx];
  767. tmp = clamp_t(s32, (-offset_q0 << 3) + (offset_base_q4 >> 1),
  768. S8_MIN >> 1, S8_MAX >> 1);
  769. rtw89_phy_write32_mask(rtwdev, R_RPL_OFST, B_RPL_OFST_MASK, tmp & 0x7f);
  770. }
  771. gain_band = rtw89_subband_to_gain_offset_band_of_ofdm(chan->subband_type);
  772. offset_q0 = -efuse_gain->offset[path][gain_band];
  773. offset_base_q4 = efuse_gain->offset_base[phy_idx];
  774. tmp = (offset_q0 << 2) + (offset_base_q4 >> 2);
  775. tmp = clamp_t(s32, -tmp, S8_MIN, S8_MAX);
  776. rtw89_phy_write32_mask(rtwdev, rssi_ofst_addr[path], B_PATH0_R_G_OFST_MASK, tmp & 0xff);
  777. tmp = clamp_t(s32, offset_q0 << 4, S8_MIN, S8_MAX);
  778. rtw89_phy_write32_idx(rtwdev, R_RPL_PATHAB, rpl_mask[path], tmp & 0xff, phy_idx);
  779. rtw89_phy_write32_idx(rtwdev, R_RSSI_M_PATHAB, rpl_tb_mask[path], tmp & 0xff, phy_idx);
  780. }
  781. static void rtw8852c_ctrl_ch(struct rtw89_dev *rtwdev,
  782. const struct rtw89_chan *chan,
  783. enum rtw89_phy_idx phy_idx)
  784. {
  785. u8 sco;
  786. u16 central_freq = chan->freq;
  787. u8 central_ch = chan->channel;
  788. u8 band = chan->band_type;
  789. u8 subband = chan->subband_type;
  790. bool is_2g = band == RTW89_BAND_2G;
  791. u8 chan_idx;
  792. if (!central_freq) {
  793. rtw89_warn(rtwdev, "Invalid central_freq\n");
  794. return;
  795. }
  796. if (phy_idx == RTW89_PHY_0) {
  797. /* Path A */
  798. rtw8852c_set_gain_error(rtwdev, subband, RF_PATH_A);
  799. rtw8852c_set_gain_offset(rtwdev, chan, phy_idx, RF_PATH_A);
  800. if (is_2g)
  801. rtw89_phy_write32_idx(rtwdev, R_PATH0_BAND_SEL_V1,
  802. B_PATH0_BAND_SEL_MSK_V1, 1,
  803. phy_idx);
  804. else
  805. rtw89_phy_write32_idx(rtwdev, R_PATH0_BAND_SEL_V1,
  806. B_PATH0_BAND_SEL_MSK_V1, 0,
  807. phy_idx);
  808. /* Path B */
  809. if (!rtwdev->dbcc_en) {
  810. rtw8852c_set_gain_error(rtwdev, subband, RF_PATH_B);
  811. rtw8852c_set_gain_offset(rtwdev, chan, phy_idx, RF_PATH_B);
  812. if (is_2g)
  813. rtw89_phy_write32_idx(rtwdev,
  814. R_PATH1_BAND_SEL_V1,
  815. B_PATH1_BAND_SEL_MSK_V1,
  816. 1, phy_idx);
  817. else
  818. rtw89_phy_write32_idx(rtwdev,
  819. R_PATH1_BAND_SEL_V1,
  820. B_PATH1_BAND_SEL_MSK_V1,
  821. 0, phy_idx);
  822. rtw89_phy_write32_clr(rtwdev, R_2P4G_BAND, B_2P4G_BAND_SEL);
  823. } else {
  824. if (is_2g)
  825. rtw89_phy_write32_clr(rtwdev, R_2P4G_BAND, B_2P4G_BAND_SEL);
  826. else
  827. rtw89_phy_write32_set(rtwdev, R_2P4G_BAND, B_2P4G_BAND_SEL);
  828. }
  829. /* SCO compensate FC setting */
  830. rtw89_phy_write32_idx(rtwdev, R_FC0_V1, B_FC0_MSK_V1,
  831. central_freq, phy_idx);
  832. /* round_up((1/fc0)*pow(2,18)) */
  833. sco = DIV_ROUND_CLOSEST(1 << 18, central_freq);
  834. rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_INV, sco,
  835. phy_idx);
  836. } else {
  837. /* Path B */
  838. rtw8852c_set_gain_error(rtwdev, subband, RF_PATH_B);
  839. rtw8852c_set_gain_offset(rtwdev, chan, phy_idx, RF_PATH_B);
  840. if (is_2g)
  841. rtw89_phy_write32_idx(rtwdev, R_PATH1_BAND_SEL_V1,
  842. B_PATH1_BAND_SEL_MSK_V1,
  843. 1, phy_idx);
  844. else
  845. rtw89_phy_write32_idx(rtwdev, R_PATH1_BAND_SEL_V1,
  846. B_PATH1_BAND_SEL_MSK_V1,
  847. 0, phy_idx);
  848. /* SCO compensate FC setting */
  849. rtw89_phy_write32_idx(rtwdev, R_FC0_V1, B_FC0_MSK_V1,
  850. central_freq, phy_idx);
  851. /* round_up((1/fc0)*pow(2,18)) */
  852. sco = DIV_ROUND_CLOSEST(1 << 18, central_freq);
  853. rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_INV, sco,
  854. phy_idx);
  855. }
  856. /* CCK parameters */
  857. if (band == RTW89_BAND_2G) {
  858. if (central_ch == 14) {
  859. rtw89_phy_write32_mask(rtwdev, R_PCOEFF0_V1,
  860. B_PCOEFF01_MSK_V1, 0x3b13ff);
  861. rtw89_phy_write32_mask(rtwdev, R_PCOEFF2_V1,
  862. B_PCOEFF23_MSK_V1, 0x1c42de);
  863. rtw89_phy_write32_mask(rtwdev, R_PCOEFF4_V1,
  864. B_PCOEFF45_MSK_V1, 0xfdb0ad);
  865. rtw89_phy_write32_mask(rtwdev, R_PCOEFF6_V1,
  866. B_PCOEFF67_MSK_V1, 0xf60f6e);
  867. rtw89_phy_write32_mask(rtwdev, R_PCOEFF8_V1,
  868. B_PCOEFF89_MSK_V1, 0xfd8f92);
  869. rtw89_phy_write32_mask(rtwdev, R_PCOEFFA_V1,
  870. B_PCOEFFAB_MSK_V1, 0x2d011);
  871. rtw89_phy_write32_mask(rtwdev, R_PCOEFFC_V1,
  872. B_PCOEFFCD_MSK_V1, 0x1c02c);
  873. rtw89_phy_write32_mask(rtwdev, R_PCOEFFE_V1,
  874. B_PCOEFFEF_MSK_V1, 0xfff00a);
  875. } else {
  876. rtw89_phy_write32_mask(rtwdev, R_PCOEFF0_V1,
  877. B_PCOEFF01_MSK_V1, 0x3d23ff);
  878. rtw89_phy_write32_mask(rtwdev, R_PCOEFF2_V1,
  879. B_PCOEFF23_MSK_V1, 0x29b354);
  880. rtw89_phy_write32_mask(rtwdev, R_PCOEFF4_V1,
  881. B_PCOEFF45_MSK_V1, 0xfc1c8);
  882. rtw89_phy_write32_mask(rtwdev, R_PCOEFF6_V1,
  883. B_PCOEFF67_MSK_V1, 0xfdb053);
  884. rtw89_phy_write32_mask(rtwdev, R_PCOEFF8_V1,
  885. B_PCOEFF89_MSK_V1, 0xf86f9a);
  886. rtw89_phy_write32_mask(rtwdev, R_PCOEFFA_V1,
  887. B_PCOEFFAB_MSK_V1, 0xfaef92);
  888. rtw89_phy_write32_mask(rtwdev, R_PCOEFFC_V1,
  889. B_PCOEFFCD_MSK_V1, 0xfe5fcc);
  890. rtw89_phy_write32_mask(rtwdev, R_PCOEFFE_V1,
  891. B_PCOEFFEF_MSK_V1, 0xffdff5);
  892. }
  893. }
  894. chan_idx = rtw89_encode_chan_idx(rtwdev, chan->primary_channel, band);
  895. rtw89_phy_write32_idx(rtwdev, R_MAC_PIN_SEL, B_CH_IDX_SEG0, chan_idx, phy_idx);
  896. }
  897. static void rtw8852c_bw_setting(struct rtw89_dev *rtwdev, u8 bw, u8 path)
  898. {
  899. static const u32 adc_sel[2] = {0xC0EC, 0xC1EC};
  900. static const u32 wbadc_sel[2] = {0xC0E4, 0xC1E4};
  901. switch (bw) {
  902. case RTW89_CHANNEL_WIDTH_5:
  903. rtw89_phy_write32_mask(rtwdev, adc_sel[path], 0x6000, 0x1);
  904. rtw89_phy_write32_mask(rtwdev, wbadc_sel[path], 0x30, 0x0);
  905. break;
  906. case RTW89_CHANNEL_WIDTH_10:
  907. rtw89_phy_write32_mask(rtwdev, adc_sel[path], 0x6000, 0x2);
  908. rtw89_phy_write32_mask(rtwdev, wbadc_sel[path], 0x30, 0x1);
  909. break;
  910. case RTW89_CHANNEL_WIDTH_20:
  911. case RTW89_CHANNEL_WIDTH_40:
  912. case RTW89_CHANNEL_WIDTH_80:
  913. case RTW89_CHANNEL_WIDTH_160:
  914. rtw89_phy_write32_mask(rtwdev, adc_sel[path], 0x6000, 0x0);
  915. rtw89_phy_write32_mask(rtwdev, wbadc_sel[path], 0x30, 0x2);
  916. break;
  917. default:
  918. rtw89_warn(rtwdev, "Fail to set ADC\n");
  919. }
  920. }
  921. static void rtw8852c_edcca_per20_bitmap_sifs(struct rtw89_dev *rtwdev, u8 bw,
  922. enum rtw89_phy_idx phy_idx)
  923. {
  924. if (bw == RTW89_CHANNEL_WIDTH_20) {
  925. rtw89_phy_write32_idx(rtwdev, R_SNDCCA_A1, B_SNDCCA_A1_EN, 0xff, phy_idx);
  926. rtw89_phy_write32_idx(rtwdev, R_SNDCCA_A2, B_SNDCCA_A2_VAL, 0, phy_idx);
  927. } else {
  928. rtw89_phy_write32_idx(rtwdev, R_SNDCCA_A1, B_SNDCCA_A1_EN, 0, phy_idx);
  929. rtw89_phy_write32_idx(rtwdev, R_SNDCCA_A2, B_SNDCCA_A2_VAL, 0, phy_idx);
  930. }
  931. }
  932. static void
  933. rtw8852c_ctrl_bw(struct rtw89_dev *rtwdev, u8 pri_ch, u8 bw,
  934. enum rtw89_phy_idx phy_idx)
  935. {
  936. u8 mod_sbw = 0;
  937. switch (bw) {
  938. case RTW89_CHANNEL_WIDTH_5:
  939. case RTW89_CHANNEL_WIDTH_10:
  940. case RTW89_CHANNEL_WIDTH_20:
  941. if (bw == RTW89_CHANNEL_WIDTH_5)
  942. mod_sbw = 0x1;
  943. else if (bw == RTW89_CHANNEL_WIDTH_10)
  944. mod_sbw = 0x2;
  945. else if (bw == RTW89_CHANNEL_WIDTH_20)
  946. mod_sbw = 0x0;
  947. rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_SET, 0x0,
  948. phy_idx);
  949. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_SBW,
  950. mod_sbw, phy_idx);
  951. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_PRICH, 0x0,
  952. phy_idx);
  953. rtw89_phy_write32_mask(rtwdev, R_PATH0_SAMPL_DLY_T_V1,
  954. B_PATH0_SAMPL_DLY_T_MSK_V1, 0x3);
  955. rtw89_phy_write32_mask(rtwdev, R_PATH1_SAMPL_DLY_T_V1,
  956. B_PATH1_SAMPL_DLY_T_MSK_V1, 0x3);
  957. rtw89_phy_write32_mask(rtwdev, R_PATH0_BW_SEL_V1,
  958. B_PATH0_BW_SEL_MSK_V1, 0xf);
  959. rtw89_phy_write32_mask(rtwdev, R_PATH1_BW_SEL_V1,
  960. B_PATH1_BW_SEL_MSK_V1, 0xf);
  961. break;
  962. case RTW89_CHANNEL_WIDTH_40:
  963. rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_SET, 0x1,
  964. phy_idx);
  965. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_SBW, 0x0,
  966. phy_idx);
  967. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_PRICH,
  968. pri_ch,
  969. phy_idx);
  970. rtw89_phy_write32_mask(rtwdev, R_PATH0_SAMPL_DLY_T_V1,
  971. B_PATH0_SAMPL_DLY_T_MSK_V1, 0x3);
  972. rtw89_phy_write32_mask(rtwdev, R_PATH1_SAMPL_DLY_T_V1,
  973. B_PATH1_SAMPL_DLY_T_MSK_V1, 0x3);
  974. rtw89_phy_write32_mask(rtwdev, R_PATH0_BW_SEL_V1,
  975. B_PATH0_BW_SEL_MSK_V1, 0xf);
  976. rtw89_phy_write32_mask(rtwdev, R_PATH1_BW_SEL_V1,
  977. B_PATH1_BW_SEL_MSK_V1, 0xf);
  978. break;
  979. case RTW89_CHANNEL_WIDTH_80:
  980. rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_SET, 0x2,
  981. phy_idx);
  982. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_SBW, 0x0,
  983. phy_idx);
  984. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_PRICH,
  985. pri_ch,
  986. phy_idx);
  987. rtw89_phy_write32_mask(rtwdev, R_PATH0_SAMPL_DLY_T_V1,
  988. B_PATH0_SAMPL_DLY_T_MSK_V1, 0x2);
  989. rtw89_phy_write32_mask(rtwdev, R_PATH1_SAMPL_DLY_T_V1,
  990. B_PATH1_SAMPL_DLY_T_MSK_V1, 0x2);
  991. rtw89_phy_write32_mask(rtwdev, R_PATH0_BW_SEL_V1,
  992. B_PATH0_BW_SEL_MSK_V1, 0xd);
  993. rtw89_phy_write32_mask(rtwdev, R_PATH1_BW_SEL_V1,
  994. B_PATH1_BW_SEL_MSK_V1, 0xd);
  995. break;
  996. case RTW89_CHANNEL_WIDTH_160:
  997. rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_SET, 0x3,
  998. phy_idx);
  999. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_SBW, 0x0,
  1000. phy_idx);
  1001. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_PRICH,
  1002. pri_ch,
  1003. phy_idx);
  1004. rtw89_phy_write32_mask(rtwdev, R_PATH0_SAMPL_DLY_T_V1,
  1005. B_PATH0_SAMPL_DLY_T_MSK_V1, 0x1);
  1006. rtw89_phy_write32_mask(rtwdev, R_PATH1_SAMPL_DLY_T_V1,
  1007. B_PATH1_SAMPL_DLY_T_MSK_V1, 0x1);
  1008. rtw89_phy_write32_mask(rtwdev, R_PATH0_BW_SEL_V1,
  1009. B_PATH0_BW_SEL_MSK_V1, 0xb);
  1010. rtw89_phy_write32_mask(rtwdev, R_PATH1_BW_SEL_V1,
  1011. B_PATH1_BW_SEL_MSK_V1, 0xb);
  1012. break;
  1013. default:
  1014. rtw89_warn(rtwdev, "Fail to switch bw (bw:%d, pri ch:%d)\n", bw,
  1015. pri_ch);
  1016. }
  1017. if (bw == RTW89_CHANNEL_WIDTH_40) {
  1018. rtw89_phy_write32_idx(rtwdev, R_RX_BW40_2XFFT_EN_V1,
  1019. B_RX_BW40_2XFFT_EN_MSK_V1, 0x1, phy_idx);
  1020. rtw89_phy_write32_idx(rtwdev, R_T2F_GI_COMB, B_T2F_GI_COMB_EN, 1, phy_idx);
  1021. } else {
  1022. rtw89_phy_write32_idx(rtwdev, R_RX_BW40_2XFFT_EN_V1,
  1023. B_RX_BW40_2XFFT_EN_MSK_V1, 0x0, phy_idx);
  1024. rtw89_phy_write32_idx(rtwdev, R_T2F_GI_COMB, B_T2F_GI_COMB_EN, 0, phy_idx);
  1025. }
  1026. if (phy_idx == RTW89_PHY_0) {
  1027. rtw8852c_bw_setting(rtwdev, bw, RF_PATH_A);
  1028. if (!rtwdev->dbcc_en)
  1029. rtw8852c_bw_setting(rtwdev, bw, RF_PATH_B);
  1030. } else {
  1031. rtw8852c_bw_setting(rtwdev, bw, RF_PATH_B);
  1032. }
  1033. rtw8852c_edcca_per20_bitmap_sifs(rtwdev, bw, phy_idx);
  1034. }
  1035. static u32 rtw8852c_spur_freq(struct rtw89_dev *rtwdev,
  1036. const struct rtw89_chan *chan)
  1037. {
  1038. u8 center_chan = chan->channel;
  1039. u8 bw = chan->band_width;
  1040. switch (chan->band_type) {
  1041. case RTW89_BAND_2G:
  1042. if (bw == RTW89_CHANNEL_WIDTH_20) {
  1043. if (center_chan >= 5 && center_chan <= 8)
  1044. return 2440;
  1045. if (center_chan == 13)
  1046. return 2480;
  1047. } else if (bw == RTW89_CHANNEL_WIDTH_40) {
  1048. if (center_chan >= 3 && center_chan <= 10)
  1049. return 2440;
  1050. }
  1051. break;
  1052. case RTW89_BAND_5G:
  1053. if (center_chan == 151 || center_chan == 153 ||
  1054. center_chan == 155 || center_chan == 163)
  1055. return 5760;
  1056. break;
  1057. case RTW89_BAND_6G:
  1058. if (center_chan == 195 || center_chan == 197 ||
  1059. center_chan == 199 || center_chan == 207)
  1060. return 6920;
  1061. break;
  1062. default:
  1063. break;
  1064. }
  1065. return 0;
  1066. }
  1067. #define CARRIER_SPACING_312_5 312500 /* 312.5 kHz */
  1068. #define CARRIER_SPACING_78_125 78125 /* 78.125 kHz */
  1069. #define MAX_TONE_NUM 2048
  1070. static void rtw8852c_set_csi_tone_idx(struct rtw89_dev *rtwdev,
  1071. const struct rtw89_chan *chan,
  1072. enum rtw89_phy_idx phy_idx)
  1073. {
  1074. u32 spur_freq;
  1075. s32 freq_diff, csi_idx, csi_tone_idx;
  1076. spur_freq = rtw8852c_spur_freq(rtwdev, chan);
  1077. if (spur_freq == 0) {
  1078. rtw89_phy_write32_idx(rtwdev, R_SEG0CSI_EN, B_SEG0CSI_EN, 0, phy_idx);
  1079. return;
  1080. }
  1081. freq_diff = (spur_freq - chan->freq) * 1000000;
  1082. csi_idx = s32_div_u32_round_closest(freq_diff, CARRIER_SPACING_78_125);
  1083. s32_div_u32_round_down(csi_idx, MAX_TONE_NUM, &csi_tone_idx);
  1084. rtw89_phy_write32_idx(rtwdev, R_SEG0CSI, B_SEG0CSI_IDX, csi_tone_idx, phy_idx);
  1085. rtw89_phy_write32_idx(rtwdev, R_SEG0CSI_EN, B_SEG0CSI_EN, 1, phy_idx);
  1086. }
  1087. static const struct rtw89_nbi_reg_def rtw8852c_nbi_reg_def[] = {
  1088. [RF_PATH_A] = {
  1089. .notch1_idx = {0x4C14, 0xFF},
  1090. .notch1_frac_idx = {0x4C14, 0xC00},
  1091. .notch1_en = {0x4C14, 0x1000},
  1092. .notch2_idx = {0x4C20, 0xFF},
  1093. .notch2_frac_idx = {0x4C20, 0xC00},
  1094. .notch2_en = {0x4C20, 0x1000},
  1095. },
  1096. [RF_PATH_B] = {
  1097. .notch1_idx = {0x4CD8, 0xFF},
  1098. .notch1_frac_idx = {0x4CD8, 0xC00},
  1099. .notch1_en = {0x4CD8, 0x1000},
  1100. .notch2_idx = {0x4CE4, 0xFF},
  1101. .notch2_frac_idx = {0x4CE4, 0xC00},
  1102. .notch2_en = {0x4CE4, 0x1000},
  1103. },
  1104. };
  1105. static void rtw8852c_set_nbi_tone_idx(struct rtw89_dev *rtwdev,
  1106. const struct rtw89_chan *chan,
  1107. enum rtw89_rf_path path)
  1108. {
  1109. const struct rtw89_nbi_reg_def *nbi = &rtw8852c_nbi_reg_def[path];
  1110. u32 spur_freq, fc;
  1111. s32 freq_diff;
  1112. s32 nbi_idx, nbi_tone_idx;
  1113. s32 nbi_frac_idx, nbi_frac_tone_idx;
  1114. bool notch2_chk = false;
  1115. spur_freq = rtw8852c_spur_freq(rtwdev, chan);
  1116. if (spur_freq == 0) {
  1117. rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr, nbi->notch1_en.mask, 0);
  1118. rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr, nbi->notch1_en.mask, 0);
  1119. return;
  1120. }
  1121. fc = chan->freq;
  1122. if (chan->band_width == RTW89_CHANNEL_WIDTH_160) {
  1123. fc = (spur_freq > fc) ? fc + 40 : fc - 40;
  1124. if ((fc > spur_freq &&
  1125. chan->channel < chan->primary_channel) ||
  1126. (fc < spur_freq &&
  1127. chan->channel > chan->primary_channel))
  1128. notch2_chk = true;
  1129. }
  1130. freq_diff = (spur_freq - fc) * 1000000;
  1131. nbi_idx = s32_div_u32_round_down(freq_diff, CARRIER_SPACING_312_5, &nbi_frac_idx);
  1132. if (chan->band_width == RTW89_CHANNEL_WIDTH_20) {
  1133. s32_div_u32_round_down(nbi_idx + 32, 64, &nbi_tone_idx);
  1134. } else {
  1135. u16 tone_para = (chan->band_width == RTW89_CHANNEL_WIDTH_40) ?
  1136. 128 : 256;
  1137. s32_div_u32_round_down(nbi_idx, tone_para, &nbi_tone_idx);
  1138. }
  1139. nbi_frac_tone_idx = s32_div_u32_round_closest(nbi_frac_idx, CARRIER_SPACING_78_125);
  1140. if (chan->band_width == RTW89_CHANNEL_WIDTH_160 && notch2_chk) {
  1141. rtw89_phy_write32_mask(rtwdev, nbi->notch2_idx.addr,
  1142. nbi->notch2_idx.mask, nbi_tone_idx);
  1143. rtw89_phy_write32_mask(rtwdev, nbi->notch2_frac_idx.addr,
  1144. nbi->notch2_frac_idx.mask, nbi_frac_tone_idx);
  1145. rtw89_phy_write32_mask(rtwdev, nbi->notch2_en.addr, nbi->notch2_en.mask, 0);
  1146. rtw89_phy_write32_mask(rtwdev, nbi->notch2_en.addr, nbi->notch2_en.mask, 1);
  1147. rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr, nbi->notch1_en.mask, 0);
  1148. } else {
  1149. rtw89_phy_write32_mask(rtwdev, nbi->notch1_idx.addr,
  1150. nbi->notch1_idx.mask, nbi_tone_idx);
  1151. rtw89_phy_write32_mask(rtwdev, nbi->notch1_frac_idx.addr,
  1152. nbi->notch1_frac_idx.mask, nbi_frac_tone_idx);
  1153. rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr, nbi->notch1_en.mask, 0);
  1154. rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr, nbi->notch1_en.mask, 1);
  1155. rtw89_phy_write32_mask(rtwdev, nbi->notch2_en.addr, nbi->notch2_en.mask, 0);
  1156. }
  1157. }
  1158. static void rtw8852c_spur_notch(struct rtw89_dev *rtwdev, u32 val,
  1159. enum rtw89_phy_idx phy_idx)
  1160. {
  1161. u32 notch;
  1162. u32 notch2;
  1163. if (phy_idx == RTW89_PHY_0) {
  1164. notch = R_PATH0_NOTCH;
  1165. notch2 = R_PATH0_NOTCH2;
  1166. } else {
  1167. notch = R_PATH1_NOTCH;
  1168. notch2 = R_PATH1_NOTCH2;
  1169. }
  1170. rtw89_phy_write32_mask(rtwdev, notch,
  1171. B_PATH0_NOTCH_VAL | B_PATH0_NOTCH_EN, val);
  1172. rtw89_phy_write32_set(rtwdev, notch, B_PATH0_NOTCH_EN);
  1173. rtw89_phy_write32_mask(rtwdev, notch2,
  1174. B_PATH0_NOTCH2_VAL | B_PATH0_NOTCH2_EN, val);
  1175. rtw89_phy_write32_set(rtwdev, notch2, B_PATH0_NOTCH2_EN);
  1176. }
  1177. static void rtw8852c_spur_elimination(struct rtw89_dev *rtwdev,
  1178. const struct rtw89_chan *chan,
  1179. u8 pri_ch_idx,
  1180. enum rtw89_phy_idx phy_idx)
  1181. {
  1182. rtw8852c_set_csi_tone_idx(rtwdev, chan, phy_idx);
  1183. if (phy_idx == RTW89_PHY_0) {
  1184. if (chan->band_width == RTW89_CHANNEL_WIDTH_160 &&
  1185. (pri_ch_idx == RTW89_SC_20_LOWER ||
  1186. pri_ch_idx == RTW89_SC_20_UP3X)) {
  1187. rtw8852c_spur_notch(rtwdev, 0xe7f, RTW89_PHY_0);
  1188. if (!rtwdev->dbcc_en)
  1189. rtw8852c_spur_notch(rtwdev, 0xe7f, RTW89_PHY_1);
  1190. } else if (chan->band_width == RTW89_CHANNEL_WIDTH_160 &&
  1191. (pri_ch_idx == RTW89_SC_20_UPPER ||
  1192. pri_ch_idx == RTW89_SC_20_LOW3X)) {
  1193. rtw8852c_spur_notch(rtwdev, 0x280, RTW89_PHY_0);
  1194. if (!rtwdev->dbcc_en)
  1195. rtw8852c_spur_notch(rtwdev, 0x280, RTW89_PHY_1);
  1196. } else {
  1197. rtw8852c_set_nbi_tone_idx(rtwdev, chan, RF_PATH_A);
  1198. if (!rtwdev->dbcc_en)
  1199. rtw8852c_set_nbi_tone_idx(rtwdev, chan,
  1200. RF_PATH_B);
  1201. }
  1202. } else {
  1203. if (chan->band_width == RTW89_CHANNEL_WIDTH_160 &&
  1204. (pri_ch_idx == RTW89_SC_20_LOWER ||
  1205. pri_ch_idx == RTW89_SC_20_UP3X)) {
  1206. rtw8852c_spur_notch(rtwdev, 0xe7f, RTW89_PHY_1);
  1207. } else if (chan->band_width == RTW89_CHANNEL_WIDTH_160 &&
  1208. (pri_ch_idx == RTW89_SC_20_UPPER ||
  1209. pri_ch_idx == RTW89_SC_20_LOW3X)) {
  1210. rtw8852c_spur_notch(rtwdev, 0x280, RTW89_PHY_1);
  1211. } else {
  1212. rtw8852c_set_nbi_tone_idx(rtwdev, chan, RF_PATH_B);
  1213. }
  1214. }
  1215. if (pri_ch_idx == RTW89_SC_20_UP3X || pri_ch_idx == RTW89_SC_20_LOW3X)
  1216. rtw89_phy_write32_idx(rtwdev, R_PD_BOOST_EN, B_PD_BOOST_EN, 0, phy_idx);
  1217. else
  1218. rtw89_phy_write32_idx(rtwdev, R_PD_BOOST_EN, B_PD_BOOST_EN, 1, phy_idx);
  1219. }
  1220. static void rtw8852c_5m_mask(struct rtw89_dev *rtwdev,
  1221. const struct rtw89_chan *chan,
  1222. enum rtw89_phy_idx phy_idx)
  1223. {
  1224. u8 pri_ch = chan->pri_ch_idx;
  1225. bool mask_5m_low;
  1226. bool mask_5m_en;
  1227. switch (chan->band_width) {
  1228. case RTW89_CHANNEL_WIDTH_40:
  1229. mask_5m_en = true;
  1230. mask_5m_low = pri_ch == RTW89_SC_20_LOWER;
  1231. break;
  1232. case RTW89_CHANNEL_WIDTH_80:
  1233. mask_5m_en = pri_ch == RTW89_SC_20_UPMOST ||
  1234. pri_ch == RTW89_SC_20_LOWEST;
  1235. mask_5m_low = pri_ch == RTW89_SC_20_LOWEST;
  1236. break;
  1237. default:
  1238. mask_5m_en = false;
  1239. mask_5m_low = false;
  1240. break;
  1241. }
  1242. if (!mask_5m_en) {
  1243. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_EN, 0x0);
  1244. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_EN, 0x0);
  1245. rtw89_phy_write32_idx(rtwdev, R_ASSIGN_SBD_OPT,
  1246. B_ASSIGN_SBD_OPT_EN, 0x0, phy_idx);
  1247. } else {
  1248. if (mask_5m_low) {
  1249. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_TH, 0x4);
  1250. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_EN, 0x1);
  1251. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_SB2, 0x0);
  1252. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_SB0, 0x1);
  1253. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_TH, 0x4);
  1254. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_EN, 0x1);
  1255. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_SB2, 0x0);
  1256. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_SB0, 0x1);
  1257. } else {
  1258. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_TH, 0x4);
  1259. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_EN, 0x1);
  1260. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_SB2, 0x1);
  1261. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_SB0, 0x0);
  1262. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_TH, 0x4);
  1263. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_EN, 0x1);
  1264. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_SB2, 0x1);
  1265. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_SB0, 0x0);
  1266. }
  1267. rtw89_phy_write32_idx(rtwdev, R_ASSIGN_SBD_OPT, B_ASSIGN_SBD_OPT_EN, 0x1, phy_idx);
  1268. }
  1269. }
  1270. static void rtw8852c_bb_reset_all(struct rtw89_dev *rtwdev,
  1271. enum rtw89_phy_idx phy_idx)
  1272. {
  1273. /*HW SI reset*/
  1274. rtw89_phy_write32_mask(rtwdev, R_S0_HW_SI_DIS, B_S0_HW_SI_DIS_W_R_TRIG,
  1275. 0x7);
  1276. rtw89_phy_write32_mask(rtwdev, R_S1_HW_SI_DIS, B_S1_HW_SI_DIS_W_R_TRIG,
  1277. 0x7);
  1278. udelay(1);
  1279. rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1,
  1280. phy_idx);
  1281. rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 0,
  1282. phy_idx);
  1283. /*HW SI reset*/
  1284. rtw89_phy_write32_mask(rtwdev, R_S0_HW_SI_DIS, B_S0_HW_SI_DIS_W_R_TRIG,
  1285. 0x0);
  1286. rtw89_phy_write32_mask(rtwdev, R_S1_HW_SI_DIS, B_S1_HW_SI_DIS_W_R_TRIG,
  1287. 0x0);
  1288. rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1,
  1289. phy_idx);
  1290. }
  1291. static void rtw8852c_bb_reset_en(struct rtw89_dev *rtwdev, enum rtw89_band band,
  1292. enum rtw89_phy_idx phy_idx, bool en)
  1293. {
  1294. if (en) {
  1295. rtw89_phy_write32_idx(rtwdev, R_S0_HW_SI_DIS,
  1296. B_S0_HW_SI_DIS_W_R_TRIG, 0x0, phy_idx);
  1297. rtw89_phy_write32_idx(rtwdev, R_S1_HW_SI_DIS,
  1298. B_S1_HW_SI_DIS_W_R_TRIG, 0x0, phy_idx);
  1299. rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1,
  1300. phy_idx);
  1301. if (band == RTW89_BAND_2G)
  1302. rtw89_phy_write32_mask(rtwdev, R_RXCCA_V1, B_RXCCA_DIS_V1, 0x0);
  1303. rtw89_phy_write32_mask(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x0);
  1304. } else {
  1305. rtw89_phy_write32_mask(rtwdev, R_RXCCA_V1, B_RXCCA_DIS_V1, 0x1);
  1306. rtw89_phy_write32_mask(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x1);
  1307. rtw89_phy_write32_idx(rtwdev, R_S0_HW_SI_DIS,
  1308. B_S0_HW_SI_DIS_W_R_TRIG, 0x7, phy_idx);
  1309. rtw89_phy_write32_idx(rtwdev, R_S1_HW_SI_DIS,
  1310. B_S1_HW_SI_DIS_W_R_TRIG, 0x7, phy_idx);
  1311. fsleep(1);
  1312. rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 0,
  1313. phy_idx);
  1314. }
  1315. }
  1316. static void rtw8852c_bb_reset(struct rtw89_dev *rtwdev,
  1317. enum rtw89_phy_idx phy_idx)
  1318. {
  1319. rtw8852c_bb_reset_all(rtwdev, phy_idx);
  1320. }
  1321. static
  1322. void rtw8852c_bb_gpio_trsw(struct rtw89_dev *rtwdev, enum rtw89_rf_path path,
  1323. u8 tx_path_en, u8 trsw_tx,
  1324. u8 trsw_rx, u8 trsw, u8 trsw_b)
  1325. {
  1326. static const u32 path_cr_bases[] = {0x5868, 0x7868};
  1327. u32 mask_ofst = 16;
  1328. u32 cr;
  1329. u32 val;
  1330. if (path >= ARRAY_SIZE(path_cr_bases))
  1331. return;
  1332. cr = path_cr_bases[path];
  1333. mask_ofst += (tx_path_en * 4 + trsw_tx * 2 + trsw_rx) * 2;
  1334. val = FIELD_PREP(B_P0_TRSW_A, trsw) | FIELD_PREP(B_P0_TRSW_B, trsw_b);
  1335. rtw89_phy_write32_mask(rtwdev, cr, (B_P0_TRSW_A | B_P0_TRSW_B) << mask_ofst, val);
  1336. }
  1337. enum rtw8852c_rfe_src {
  1338. PAPE_RFM,
  1339. TRSW_RFM,
  1340. LNAON_RFM,
  1341. };
  1342. static
  1343. void rtw8852c_bb_gpio_rfm(struct rtw89_dev *rtwdev, enum rtw89_rf_path path,
  1344. enum rtw8852c_rfe_src src, u8 dis_tx_gnt_wl,
  1345. u8 active_tx_opt, u8 act_bt_en, u8 rfm_output_val)
  1346. {
  1347. static const u32 path_cr_bases[] = {0x5894, 0x7894};
  1348. static const u32 masks[] = {0, 8, 16};
  1349. u32 mask, mask_ofst;
  1350. u32 cr;
  1351. u32 val;
  1352. if (src >= ARRAY_SIZE(masks) || path >= ARRAY_SIZE(path_cr_bases))
  1353. return;
  1354. mask_ofst = masks[src];
  1355. cr = path_cr_bases[path];
  1356. val = FIELD_PREP(B_P0_RFM_DIS_WL, dis_tx_gnt_wl) |
  1357. FIELD_PREP(B_P0_RFM_TX_OPT, active_tx_opt) |
  1358. FIELD_PREP(B_P0_RFM_BT_EN, act_bt_en) |
  1359. FIELD_PREP(B_P0_RFM_OUT, rfm_output_val);
  1360. mask = 0xff << mask_ofst;
  1361. rtw89_phy_write32_mask(rtwdev, cr, mask, val);
  1362. }
  1363. static void rtw8852c_bb_gpio_init(struct rtw89_dev *rtwdev)
  1364. {
  1365. static const u32 cr_bases[] = {0x5800, 0x7800};
  1366. u32 addr;
  1367. u8 i;
  1368. for (i = 0; i < ARRAY_SIZE(cr_bases); i++) {
  1369. addr = cr_bases[i];
  1370. rtw89_phy_write32_set(rtwdev, (addr | 0x68), B_P0_TRSW_A);
  1371. rtw89_phy_write32_clr(rtwdev, (addr | 0x68), B_P0_TRSW_X);
  1372. rtw89_phy_write32_clr(rtwdev, (addr | 0x68), B_P0_TRSW_SO_A2);
  1373. rtw89_phy_write32(rtwdev, (addr | 0x80), 0x77777777);
  1374. rtw89_phy_write32(rtwdev, (addr | 0x84), 0x77777777);
  1375. }
  1376. rtw89_phy_write32(rtwdev, R_RFE_E_A2, 0xffffffff);
  1377. rtw89_phy_write32(rtwdev, R_RFE_O_SEL_A2, 0);
  1378. rtw89_phy_write32(rtwdev, R_RFE_SEL0_A2, 0);
  1379. rtw89_phy_write32(rtwdev, R_RFE_SEL32_A2, 0);
  1380. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 0, 0, 0, 0, 1);
  1381. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 0, 0, 1, 1, 0);
  1382. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 0, 1, 0, 1, 0);
  1383. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 0, 1, 1, 1, 0);
  1384. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 1, 0, 0, 0, 1);
  1385. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 1, 0, 1, 1, 0);
  1386. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 1, 1, 0, 1, 0);
  1387. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 1, 1, 1, 1, 0);
  1388. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 0, 0, 0, 0, 1);
  1389. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 0, 0, 1, 1, 0);
  1390. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 0, 1, 0, 1, 0);
  1391. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 0, 1, 1, 1, 0);
  1392. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 1, 0, 0, 0, 1);
  1393. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 1, 0, 1, 1, 0);
  1394. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 1, 1, 0, 1, 0);
  1395. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 1, 1, 1, 1, 0);
  1396. rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_A, PAPE_RFM, 0, 0, 0, 0x0);
  1397. rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_A, TRSW_RFM, 0, 0, 0, 0x4);
  1398. rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_A, LNAON_RFM, 0, 0, 0, 0x8);
  1399. rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_B, PAPE_RFM, 0, 0, 0, 0x0);
  1400. rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_B, TRSW_RFM, 0, 0, 0, 0x4);
  1401. rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_B, LNAON_RFM, 0, 0, 0, 0x8);
  1402. }
  1403. static void rtw8852c_bb_macid_ctrl_init(struct rtw89_dev *rtwdev,
  1404. enum rtw89_phy_idx phy_idx)
  1405. {
  1406. u32 addr;
  1407. for (addr = R_AX_PWR_MACID_LMT_TABLE0;
  1408. addr <= R_AX_PWR_MACID_LMT_TABLE127; addr += 4)
  1409. rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, 0);
  1410. }
  1411. static void rtw8852c_bb_sethw(struct rtw89_dev *rtwdev)
  1412. {
  1413. struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
  1414. rtw89_phy_write32_set(rtwdev, R_DBCC_80P80_SEL_EVM_RPT,
  1415. B_DBCC_80P80_SEL_EVM_RPT_EN);
  1416. rtw89_phy_write32_set(rtwdev, R_DBCC_80P80_SEL_EVM_RPT2,
  1417. B_DBCC_80P80_SEL_EVM_RPT2_EN);
  1418. rtw8852c_bb_macid_ctrl_init(rtwdev, RTW89_PHY_0);
  1419. rtw8852c_bb_gpio_init(rtwdev);
  1420. /* read these registers after loading BB parameters */
  1421. gain->offset_base[RTW89_PHY_0] =
  1422. rtw89_phy_read32_mask(rtwdev, R_RPL_BIAS_COMP, B_RPL_BIAS_COMP_MASK);
  1423. gain->offset_base[RTW89_PHY_1] =
  1424. rtw89_phy_read32_mask(rtwdev, R_RPL_BIAS_COMP1, B_RPL_BIAS_COMP1_MASK);
  1425. }
  1426. static void rtw8852c_set_channel_bb(struct rtw89_dev *rtwdev,
  1427. const struct rtw89_chan *chan,
  1428. enum rtw89_phy_idx phy_idx)
  1429. {
  1430. static const u32 ru_alloc_msk[2] = {B_P80_AT_HIGH_FREQ_RU_ALLOC_PHY0,
  1431. B_P80_AT_HIGH_FREQ_RU_ALLOC_PHY1};
  1432. struct rtw89_hal *hal = &rtwdev->hal;
  1433. bool cck_en = chan->band_type == RTW89_BAND_2G;
  1434. u8 pri_ch_idx = chan->pri_ch_idx;
  1435. u32 mask, reg;
  1436. u8 ntx_path;
  1437. if (chan->band_type == RTW89_BAND_2G)
  1438. rtw8852c_ctrl_sco_cck(rtwdev, chan->channel,
  1439. chan->primary_channel,
  1440. chan->band_width);
  1441. rtw8852c_ctrl_ch(rtwdev, chan, phy_idx);
  1442. rtw8852c_ctrl_bw(rtwdev, pri_ch_idx, chan->band_width, phy_idx);
  1443. if (cck_en) {
  1444. rtw89_phy_write32_mask(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 1);
  1445. rtw89_phy_write32_mask(rtwdev, R_RXCCA_V1, B_RXCCA_DIS_V1, 0);
  1446. rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF,
  1447. B_PD_ARBITER_OFF, 0x0, phy_idx);
  1448. } else {
  1449. rtw89_phy_write32_mask(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 0);
  1450. rtw89_phy_write32_mask(rtwdev, R_RXCCA_V1, B_RXCCA_DIS_V1, 1);
  1451. rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF,
  1452. B_PD_ARBITER_OFF, 0x1, phy_idx);
  1453. }
  1454. rtw8852c_spur_elimination(rtwdev, chan, pri_ch_idx, phy_idx);
  1455. rtw8852c_ctrl_btg(rtwdev, chan->band_type == RTW89_BAND_2G);
  1456. rtw8852c_5m_mask(rtwdev, chan, phy_idx);
  1457. if (chan->band_width == RTW89_CHANNEL_WIDTH_160 &&
  1458. rtwdev->hal.cv != CHIP_CAV) {
  1459. rtw89_phy_write32_idx(rtwdev, R_P80_AT_HIGH_FREQ,
  1460. B_P80_AT_HIGH_FREQ, 0x0, phy_idx);
  1461. reg = rtw89_mac_reg_by_idx(rtwdev, R_P80_AT_HIGH_FREQ_BB_WRP, phy_idx);
  1462. if (chan->primary_channel > chan->channel) {
  1463. rtw89_phy_write32_mask(rtwdev,
  1464. R_P80_AT_HIGH_FREQ_RU_ALLOC,
  1465. ru_alloc_msk[phy_idx], 1);
  1466. rtw89_write32_mask(rtwdev, reg,
  1467. B_P80_AT_HIGH_FREQ_BB_WRP, 1);
  1468. } else {
  1469. rtw89_phy_write32_mask(rtwdev,
  1470. R_P80_AT_HIGH_FREQ_RU_ALLOC,
  1471. ru_alloc_msk[phy_idx], 0);
  1472. rtw89_write32_mask(rtwdev, reg,
  1473. B_P80_AT_HIGH_FREQ_BB_WRP, 0);
  1474. }
  1475. }
  1476. if (chan->band_type == RTW89_BAND_6G &&
  1477. chan->band_width == RTW89_CHANNEL_WIDTH_160)
  1478. rtw89_phy_write32_idx(rtwdev, R_CDD_EVM_CHK_EN,
  1479. B_CDD_EVM_CHK_EN, 0, phy_idx);
  1480. else
  1481. rtw89_phy_write32_idx(rtwdev, R_CDD_EVM_CHK_EN,
  1482. B_CDD_EVM_CHK_EN, 1, phy_idx);
  1483. if (!rtwdev->dbcc_en) {
  1484. mask = B_P0_TXPW_RSTB_TSSI | B_P0_TXPW_RSTB_MANON;
  1485. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, mask, 0x1);
  1486. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, mask, 0x3);
  1487. mask = B_P1_TXPW_RSTB_TSSI | B_P1_TXPW_RSTB_MANON;
  1488. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, mask, 0x1);
  1489. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, mask, 0x3);
  1490. } else {
  1491. if (phy_idx == RTW89_PHY_0) {
  1492. mask = B_P0_TXPW_RSTB_TSSI | B_P0_TXPW_RSTB_MANON;
  1493. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, mask, 0x1);
  1494. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, mask, 0x3);
  1495. } else {
  1496. mask = B_P1_TXPW_RSTB_TSSI | B_P1_TXPW_RSTB_MANON;
  1497. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, mask, 0x1);
  1498. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, mask, 0x3);
  1499. }
  1500. }
  1501. if (chan->band_type == RTW89_BAND_6G)
  1502. rtw89_phy_write32_set(rtwdev, R_MUIC, B_MUIC_EN);
  1503. else
  1504. rtw89_phy_write32_clr(rtwdev, R_MUIC, B_MUIC_EN);
  1505. if (hal->antenna_tx)
  1506. ntx_path = hal->antenna_tx;
  1507. else
  1508. ntx_path = chan->band_type == RTW89_BAND_6G ? RF_B : RF_AB;
  1509. rtw8852c_ctrl_tx_path_tmac(rtwdev, ntx_path, (enum rtw89_mac_idx)phy_idx);
  1510. rtw8852c_bb_reset_all(rtwdev, phy_idx);
  1511. }
  1512. static void rtw8852c_set_channel(struct rtw89_dev *rtwdev,
  1513. const struct rtw89_chan *chan,
  1514. enum rtw89_mac_idx mac_idx,
  1515. enum rtw89_phy_idx phy_idx)
  1516. {
  1517. rtw8852c_set_channel_mac(rtwdev, chan, mac_idx);
  1518. rtw8852c_set_channel_bb(rtwdev, chan, phy_idx);
  1519. rtw8852c_set_channel_rf(rtwdev, chan, phy_idx);
  1520. }
  1521. static void rtw8852c_dfs_en(struct rtw89_dev *rtwdev, bool en)
  1522. {
  1523. if (en)
  1524. rtw89_phy_write32_mask(rtwdev, R_UPD_P0, B_UPD_P0_EN, 1);
  1525. else
  1526. rtw89_phy_write32_mask(rtwdev, R_UPD_P0, B_UPD_P0_EN, 0);
  1527. }
  1528. static void rtw8852c_adc_en(struct rtw89_dev *rtwdev, bool en)
  1529. {
  1530. if (en)
  1531. rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_RST,
  1532. 0x0);
  1533. else
  1534. rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_RST,
  1535. 0xf);
  1536. }
  1537. static void rtw8852c_set_channel_help(struct rtw89_dev *rtwdev, bool enter,
  1538. struct rtw89_channel_help_params *p,
  1539. const struct rtw89_chan *chan,
  1540. enum rtw89_mac_idx mac_idx,
  1541. enum rtw89_phy_idx phy_idx)
  1542. {
  1543. if (enter) {
  1544. rtw89_chip_stop_sch_tx(rtwdev, mac_idx, &p->tx_en,
  1545. RTW89_SCH_TX_SEL_ALL);
  1546. rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, false);
  1547. rtw8852c_dfs_en(rtwdev, false);
  1548. rtw8852c_tssi_cont_en_phyidx(rtwdev, false, phy_idx);
  1549. rtw8852c_adc_en(rtwdev, false);
  1550. fsleep(40);
  1551. rtw8852c_bb_reset_en(rtwdev, chan->band_type, phy_idx, false);
  1552. } else {
  1553. rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, true);
  1554. rtw8852c_adc_en(rtwdev, true);
  1555. rtw8852c_dfs_en(rtwdev, true);
  1556. rtw8852c_tssi_cont_en_phyidx(rtwdev, true, phy_idx);
  1557. rtw8852c_bb_reset_en(rtwdev, chan->band_type, phy_idx, true);
  1558. rtw89_chip_resume_sch_tx(rtwdev, mac_idx, p->tx_en);
  1559. }
  1560. }
  1561. static void rtw8852c_rfk_init(struct rtw89_dev *rtwdev)
  1562. {
  1563. struct rtw89_rfk_mcc_info *rfk_mcc = &rtwdev->rfk_mcc;
  1564. rtwdev->is_tssi_mode[RF_PATH_A] = false;
  1565. rtwdev->is_tssi_mode[RF_PATH_B] = false;
  1566. memset(rfk_mcc, 0, sizeof(*rfk_mcc));
  1567. rtw8852c_lck_init(rtwdev);
  1568. rtw8852c_dpk_init(rtwdev);
  1569. rtw8852c_rck(rtwdev);
  1570. rtw8852c_dack(rtwdev);
  1571. rtw8852c_rx_dck(rtwdev, RTW89_PHY_0, false);
  1572. }
  1573. static void rtw8852c_rfk_channel(struct rtw89_dev *rtwdev)
  1574. {
  1575. enum rtw89_phy_idx phy_idx = RTW89_PHY_0;
  1576. rtw8852c_mcc_get_ch_info(rtwdev, phy_idx);
  1577. rtw8852c_rx_dck(rtwdev, phy_idx, false);
  1578. rtw8852c_iqk(rtwdev, phy_idx);
  1579. rtw8852c_tssi(rtwdev, phy_idx);
  1580. rtw8852c_dpk(rtwdev, phy_idx);
  1581. rtw89_fw_h2c_rf_ntfy_mcc(rtwdev);
  1582. }
  1583. static void rtw8852c_rfk_band_changed(struct rtw89_dev *rtwdev,
  1584. enum rtw89_phy_idx phy_idx)
  1585. {
  1586. rtw8852c_tssi_scan(rtwdev, phy_idx);
  1587. }
  1588. static void rtw8852c_rfk_scan(struct rtw89_dev *rtwdev, bool start)
  1589. {
  1590. rtw8852c_wifi_scan_notify(rtwdev, start, RTW89_PHY_0);
  1591. }
  1592. static void rtw8852c_rfk_track(struct rtw89_dev *rtwdev)
  1593. {
  1594. rtw8852c_dpk_track(rtwdev);
  1595. rtw8852c_lck_track(rtwdev);
  1596. rtw8852c_rx_dck_track(rtwdev);
  1597. }
  1598. static u32 rtw8852c_bb_cal_txpwr_ref(struct rtw89_dev *rtwdev,
  1599. enum rtw89_phy_idx phy_idx, s16 ref)
  1600. {
  1601. s8 ofst_int = 0;
  1602. u8 base_cw_0db = 0x27;
  1603. u16 tssi_16dbm_cw = 0x12c;
  1604. s16 pwr_s10_3 = 0;
  1605. s16 rf_pwr_cw = 0;
  1606. u16 bb_pwr_cw = 0;
  1607. u32 pwr_cw = 0;
  1608. u32 tssi_ofst_cw = 0;
  1609. pwr_s10_3 = (ref << 1) + (s16)(ofst_int) + (s16)(base_cw_0db << 3);
  1610. bb_pwr_cw = FIELD_GET(GENMASK(2, 0), pwr_s10_3);
  1611. rf_pwr_cw = FIELD_GET(GENMASK(8, 3), pwr_s10_3);
  1612. rf_pwr_cw = clamp_t(s16, rf_pwr_cw, 15, 63);
  1613. pwr_cw = (rf_pwr_cw << 3) | bb_pwr_cw;
  1614. tssi_ofst_cw = (u32)((s16)tssi_16dbm_cw + (ref << 1) - (16 << 3));
  1615. rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
  1616. "[TXPWR] tssi_ofst_cw=%d rf_cw=0x%x bb_cw=0x%x\n",
  1617. tssi_ofst_cw, rf_pwr_cw, bb_pwr_cw);
  1618. return (tssi_ofst_cw << 18) | (pwr_cw << 9) | (ref & GENMASK(8, 0));
  1619. }
  1620. static
  1621. void rtw8852c_set_txpwr_ul_tb_offset(struct rtw89_dev *rtwdev,
  1622. s8 pw_ofst, enum rtw89_mac_idx mac_idx)
  1623. {
  1624. s8 pw_ofst_2tx;
  1625. s8 val_1t;
  1626. s8 val_2t;
  1627. u32 reg;
  1628. u8 i;
  1629. if (pw_ofst < -32 || pw_ofst > 31) {
  1630. rtw89_warn(rtwdev, "[ULTB] Err pwr_offset=%d\n", pw_ofst);
  1631. return;
  1632. }
  1633. val_1t = pw_ofst << 2;
  1634. pw_ofst_2tx = max(pw_ofst - 3, -32);
  1635. val_2t = pw_ofst_2tx << 2;
  1636. rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[ULTB] val_1tx=0x%x\n", val_1t);
  1637. rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[ULTB] val_2tx=0x%x\n", val_2t);
  1638. for (i = 0; i < 4; i++) {
  1639. /* 1TX */
  1640. reg = rtw89_mac_reg_by_idx(rtwdev, R_AX_PWR_UL_TB_1T, mac_idx);
  1641. rtw89_write32_mask(rtwdev, reg,
  1642. B_AX_PWR_UL_TB_1T_V1_MASK << (8 * i),
  1643. val_1t);
  1644. /* 2TX */
  1645. reg = rtw89_mac_reg_by_idx(rtwdev, R_AX_PWR_UL_TB_2T, mac_idx);
  1646. rtw89_write32_mask(rtwdev, reg,
  1647. B_AX_PWR_UL_TB_2T_V1_MASK << (8 * i),
  1648. val_2t);
  1649. }
  1650. }
  1651. static void rtw8852c_set_txpwr_ref(struct rtw89_dev *rtwdev,
  1652. enum rtw89_phy_idx phy_idx)
  1653. {
  1654. static const u32 addr[RF_PATH_NUM_8852C] = {0x5800, 0x7800};
  1655. const u32 mask = 0x7FFFFFF;
  1656. const u8 ofst_ofdm = 0x4;
  1657. const u8 ofst_cck = 0x8;
  1658. s16 ref_ofdm = 0;
  1659. s16 ref_cck = 0;
  1660. u32 val;
  1661. u8 i;
  1662. rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set txpwr reference\n");
  1663. rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_AX_PWR_RATE_CTRL,
  1664. GENMASK(27, 10), 0x0);
  1665. rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set bb ofdm txpwr ref\n");
  1666. val = rtw8852c_bb_cal_txpwr_ref(rtwdev, phy_idx, ref_ofdm);
  1667. for (i = 0; i < RF_PATH_NUM_8852C; i++)
  1668. rtw89_phy_write32_idx(rtwdev, addr[i] + ofst_ofdm, mask, val,
  1669. phy_idx);
  1670. rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set bb cck txpwr ref\n");
  1671. val = rtw8852c_bb_cal_txpwr_ref(rtwdev, phy_idx, ref_cck);
  1672. for (i = 0; i < RF_PATH_NUM_8852C; i++)
  1673. rtw89_phy_write32_idx(rtwdev, addr[i] + ofst_cck, mask, val,
  1674. phy_idx);
  1675. }
  1676. static void rtw8852c_bb_set_tx_shape_dfir(struct rtw89_dev *rtwdev,
  1677. const struct rtw89_chan *chan,
  1678. u8 tx_shape_idx,
  1679. enum rtw89_phy_idx phy_idx)
  1680. {
  1681. #define __DFIR_CFG_MASK 0xffffff
  1682. #define __DFIR_CFG_NR 8
  1683. #define __DECL_DFIR_VAR(_prefix, _name, _val...) \
  1684. static const u32 _prefix ## _ ## _name[] = {_val}; \
  1685. static_assert(ARRAY_SIZE(_prefix ## _ ## _name) == __DFIR_CFG_NR)
  1686. #define __DECL_DFIR_PARAM(_name, _val...) __DECL_DFIR_VAR(param, _name, _val)
  1687. #define __DECL_DFIR_ADDR(_name, _val...) __DECL_DFIR_VAR(addr, _name, _val)
  1688. __DECL_DFIR_PARAM(flat,
  1689. 0x003D23FF, 0x0029B354, 0x000FC1C8, 0x00FDB053,
  1690. 0x00F86F9A, 0x00FAEF92, 0x00FE5FCC, 0x00FFDFF5);
  1691. __DECL_DFIR_PARAM(sharp,
  1692. 0x003D83FF, 0x002C636A, 0x0013F204, 0x00008090,
  1693. 0x00F87FB0, 0x00F99F83, 0x00FDBFBA, 0x00003FF5);
  1694. __DECL_DFIR_PARAM(sharp_14,
  1695. 0x003B13FF, 0x001C42DE, 0x00FDB0AD, 0x00F60F6E,
  1696. 0x00FD8F92, 0x0002D011, 0x0001C02C, 0x00FFF00A);
  1697. __DECL_DFIR_ADDR(filter,
  1698. 0x45BC, 0x45CC, 0x45D0, 0x45D4, 0x45D8, 0x45C0,
  1699. 0x45C4, 0x45C8);
  1700. u8 ch = chan->channel;
  1701. const u32 *param;
  1702. int i;
  1703. if (ch > 14) {
  1704. rtw89_warn(rtwdev,
  1705. "set tx shape dfir by unknown ch: %d on 2G\n", ch);
  1706. return;
  1707. }
  1708. if (ch == 14)
  1709. param = param_sharp_14;
  1710. else
  1711. param = tx_shape_idx == 0 ? param_flat : param_sharp;
  1712. for (i = 0; i < __DFIR_CFG_NR; i++) {
  1713. rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
  1714. "set tx shape dfir: 0x%x: 0x%x\n", addr_filter[i],
  1715. param[i]);
  1716. rtw89_phy_write32_idx(rtwdev, addr_filter[i], __DFIR_CFG_MASK,
  1717. param[i], phy_idx);
  1718. }
  1719. #undef __DECL_DFIR_ADDR
  1720. #undef __DECL_DFIR_PARAM
  1721. #undef __DECL_DFIR_VAR
  1722. #undef __DFIR_CFG_NR
  1723. #undef __DFIR_CFG_MASK
  1724. }
  1725. static void rtw8852c_set_tx_shape(struct rtw89_dev *rtwdev,
  1726. const struct rtw89_chan *chan,
  1727. enum rtw89_phy_idx phy_idx)
  1728. {
  1729. const struct rtw89_rfe_parms *rfe_parms = rtwdev->rfe_parms;
  1730. u8 band = chan->band_type;
  1731. u8 regd = rtw89_regd_get(rtwdev, band);
  1732. u8 tx_shape_cck = (*rfe_parms->tx_shape.lmt)[band][RTW89_RS_CCK][regd];
  1733. u8 tx_shape_ofdm = (*rfe_parms->tx_shape.lmt)[band][RTW89_RS_OFDM][regd];
  1734. if (band == RTW89_BAND_2G)
  1735. rtw8852c_bb_set_tx_shape_dfir(rtwdev, chan, tx_shape_cck, phy_idx);
  1736. rtw89_phy_tssi_ctrl_set_bandedge_cfg(rtwdev,
  1737. (enum rtw89_mac_idx)phy_idx,
  1738. tx_shape_ofdm);
  1739. rtw89_phy_write32_set(rtwdev, R_P0_DAC_COMP_POST_DPD_EN,
  1740. B_P0_DAC_COMP_POST_DPD_EN);
  1741. rtw89_phy_write32_set(rtwdev, R_P1_DAC_COMP_POST_DPD_EN,
  1742. B_P1_DAC_COMP_POST_DPD_EN);
  1743. }
  1744. static void rtw8852c_set_txpwr(struct rtw89_dev *rtwdev,
  1745. const struct rtw89_chan *chan,
  1746. enum rtw89_phy_idx phy_idx)
  1747. {
  1748. rtw89_phy_set_txpwr_byrate(rtwdev, chan, phy_idx);
  1749. rtw89_phy_set_txpwr_offset(rtwdev, chan, phy_idx);
  1750. rtw8852c_set_tx_shape(rtwdev, chan, phy_idx);
  1751. rtw89_phy_set_txpwr_limit(rtwdev, chan, phy_idx);
  1752. rtw89_phy_set_txpwr_limit_ru(rtwdev, chan, phy_idx);
  1753. }
  1754. static void rtw8852c_set_txpwr_ctrl(struct rtw89_dev *rtwdev,
  1755. enum rtw89_phy_idx phy_idx)
  1756. {
  1757. rtw8852c_set_txpwr_ref(rtwdev, phy_idx);
  1758. }
  1759. static void
  1760. rtw8852c_init_tssi_ctrl(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
  1761. {
  1762. static const struct rtw89_reg2_def ctrl_ini[] = {
  1763. {0xD938, 0x00010100},
  1764. {0xD93C, 0x0500D500},
  1765. {0xD940, 0x00000500},
  1766. {0xD944, 0x00000005},
  1767. {0xD94C, 0x00220000},
  1768. {0xD950, 0x00030000},
  1769. };
  1770. u32 addr;
  1771. int i;
  1772. for (addr = R_AX_TSSI_CTRL_HEAD; addr <= R_AX_TSSI_CTRL_TAIL; addr += 4)
  1773. rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, 0);
  1774. for (i = 0; i < ARRAY_SIZE(ctrl_ini); i++)
  1775. rtw89_mac_txpwr_write32(rtwdev, phy_idx, ctrl_ini[i].addr,
  1776. ctrl_ini[i].data);
  1777. rtw89_phy_tssi_ctrl_set_bandedge_cfg(rtwdev,
  1778. (enum rtw89_mac_idx)phy_idx,
  1779. RTW89_TSSI_BANDEDGE_FLAT);
  1780. }
  1781. static int
  1782. rtw8852c_init_txpwr_unit(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
  1783. {
  1784. int ret;
  1785. ret = rtw89_mac_txpwr_write32(rtwdev, phy_idx, R_AX_PWR_UL_CTRL2, 0x07763333);
  1786. if (ret)
  1787. return ret;
  1788. ret = rtw89_mac_txpwr_write32(rtwdev, phy_idx, R_AX_PWR_COEXT_CTRL, 0x01ebf000);
  1789. if (ret)
  1790. return ret;
  1791. ret = rtw89_mac_txpwr_write32(rtwdev, phy_idx, R_AX_PWR_UL_CTRL0, 0x0002f8ff);
  1792. if (ret)
  1793. return ret;
  1794. rtw8852c_set_txpwr_ul_tb_offset(rtwdev, 0, phy_idx == RTW89_PHY_1 ?
  1795. RTW89_MAC_1 :
  1796. RTW89_MAC_0);
  1797. rtw8852c_init_tssi_ctrl(rtwdev, phy_idx);
  1798. return 0;
  1799. }
  1800. static void rtw8852c_bb_cfg_rx_path(struct rtw89_dev *rtwdev, u8 rx_path)
  1801. {
  1802. const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
  1803. u8 band = chan->band_type;
  1804. u32 rst_mask0 = B_P0_TXPW_RSTB_MANON | B_P0_TXPW_RSTB_TSSI;
  1805. u32 rst_mask1 = B_P1_TXPW_RSTB_MANON | B_P1_TXPW_RSTB_TSSI;
  1806. if (rtwdev->dbcc_en) {
  1807. rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD, B_ANT_RX_SEG0, 1);
  1808. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_ANT_RX_SEG0, 2,
  1809. RTW89_PHY_1);
  1810. rtw89_phy_write32_mask(rtwdev, R_FC0_BW, B_ANT_RX_1RCCA_SEG0,
  1811. 1);
  1812. rtw89_phy_write32_mask(rtwdev, R_FC0_BW, B_ANT_RX_1RCCA_SEG1,
  1813. 1);
  1814. rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_ANT_RX_1RCCA_SEG0, 2,
  1815. RTW89_PHY_1);
  1816. rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_ANT_RX_1RCCA_SEG1, 2,
  1817. RTW89_PHY_1);
  1818. rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT,
  1819. B_RXHT_MCS_LIMIT, 0);
  1820. rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT,
  1821. B_RXVHT_MCS_LIMIT, 0);
  1822. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_USER_MAX, 8);
  1823. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS, 0);
  1824. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS, 0);
  1825. rtw89_phy_write32_idx(rtwdev, R_RXHT_MCS_LIMIT,
  1826. B_RXHT_MCS_LIMIT, 0, RTW89_PHY_1);
  1827. rtw89_phy_write32_idx(rtwdev, R_RXVHT_MCS_LIMIT,
  1828. B_RXVHT_MCS_LIMIT, 0, RTW89_PHY_1);
  1829. rtw89_phy_write32_idx(rtwdev, R_RXHE, B_RXHE_USER_MAX, 1,
  1830. RTW89_PHY_1);
  1831. rtw89_phy_write32_idx(rtwdev, R_RXHE, B_RXHE_MAX_NSS, 0,
  1832. RTW89_PHY_1);
  1833. rtw89_phy_write32_idx(rtwdev, R_RXHE, B_RXHETB_MAX_NSS, 0,
  1834. RTW89_PHY_1);
  1835. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, rst_mask0, 1);
  1836. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, rst_mask0, 3);
  1837. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, rst_mask1, 1);
  1838. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, rst_mask1, 3);
  1839. } else {
  1840. if (rx_path == RF_PATH_A) {
  1841. rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD,
  1842. B_ANT_RX_SEG0, 1);
  1843. rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
  1844. B_ANT_RX_1RCCA_SEG0, 1);
  1845. rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
  1846. B_ANT_RX_1RCCA_SEG1, 1);
  1847. rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT,
  1848. B_RXHT_MCS_LIMIT, 0);
  1849. rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT,
  1850. B_RXVHT_MCS_LIMIT, 0);
  1851. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS,
  1852. 0);
  1853. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS,
  1854. 0);
  1855. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB,
  1856. rst_mask0, 1);
  1857. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB,
  1858. rst_mask0, 3);
  1859. } else if (rx_path == RF_PATH_B) {
  1860. rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD,
  1861. B_ANT_RX_SEG0, 2);
  1862. rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
  1863. B_ANT_RX_1RCCA_SEG0, 2);
  1864. rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
  1865. B_ANT_RX_1RCCA_SEG1, 2);
  1866. rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT,
  1867. B_RXHT_MCS_LIMIT, 0);
  1868. rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT,
  1869. B_RXVHT_MCS_LIMIT, 0);
  1870. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS,
  1871. 0);
  1872. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS,
  1873. 0);
  1874. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB,
  1875. rst_mask1, 1);
  1876. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB,
  1877. rst_mask1, 3);
  1878. } else {
  1879. rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD,
  1880. B_ANT_RX_SEG0, 3);
  1881. rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
  1882. B_ANT_RX_1RCCA_SEG0, 3);
  1883. rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
  1884. B_ANT_RX_1RCCA_SEG1, 3);
  1885. rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT,
  1886. B_RXHT_MCS_LIMIT, 1);
  1887. rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT,
  1888. B_RXVHT_MCS_LIMIT, 1);
  1889. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS,
  1890. 1);
  1891. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS,
  1892. 1);
  1893. rtw8852c_ctrl_btg(rtwdev, band == RTW89_BAND_2G);
  1894. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB,
  1895. rst_mask0, 1);
  1896. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB,
  1897. rst_mask0, 3);
  1898. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB,
  1899. rst_mask1, 1);
  1900. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB,
  1901. rst_mask1, 3);
  1902. }
  1903. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_USER_MAX, 8);
  1904. }
  1905. }
  1906. static void rtw8852c_ctrl_tx_path_tmac(struct rtw89_dev *rtwdev, u8 tx_path,
  1907. enum rtw89_mac_idx mac_idx)
  1908. {
  1909. struct rtw89_reg2_def path_com[] = {
  1910. {R_AX_PATH_COM0, AX_PATH_COM0_DFVAL},
  1911. {R_AX_PATH_COM1, AX_PATH_COM1_DFVAL},
  1912. {R_AX_PATH_COM2, AX_PATH_COM2_DFVAL},
  1913. {R_AX_PATH_COM3, AX_PATH_COM3_DFVAL},
  1914. {R_AX_PATH_COM4, AX_PATH_COM4_DFVAL},
  1915. {R_AX_PATH_COM5, AX_PATH_COM5_DFVAL},
  1916. {R_AX_PATH_COM6, AX_PATH_COM6_DFVAL},
  1917. {R_AX_PATH_COM7, AX_PATH_COM7_DFVAL},
  1918. {R_AX_PATH_COM8, AX_PATH_COM8_DFVAL},
  1919. {R_AX_PATH_COM9, AX_PATH_COM9_DFVAL},
  1920. {R_AX_PATH_COM10, AX_PATH_COM10_DFVAL},
  1921. {R_AX_PATH_COM11, AX_PATH_COM11_DFVAL},
  1922. };
  1923. u32 addr;
  1924. u32 reg;
  1925. u8 cr_size = ARRAY_SIZE(path_com);
  1926. u8 i = 0;
  1927. rtw89_phy_write32_idx(rtwdev, R_MAC_SEL, B_MAC_SEL_MOD, 0, RTW89_PHY_0);
  1928. rtw89_phy_write32_idx(rtwdev, R_MAC_SEL, B_MAC_SEL_MOD, 0, RTW89_PHY_1);
  1929. for (addr = R_AX_MACID_ANT_TABLE;
  1930. addr <= R_AX_MACID_ANT_TABLE_LAST; addr += 4) {
  1931. reg = rtw89_mac_reg_by_idx(rtwdev, addr, mac_idx);
  1932. rtw89_write32(rtwdev, reg, 0);
  1933. }
  1934. if (tx_path == RF_A) {
  1935. path_com[0].data = AX_PATH_COM0_PATHA;
  1936. path_com[1].data = AX_PATH_COM1_PATHA;
  1937. path_com[2].data = AX_PATH_COM2_PATHA;
  1938. path_com[7].data = AX_PATH_COM7_PATHA;
  1939. path_com[8].data = AX_PATH_COM8_PATHA;
  1940. } else if (tx_path == RF_B) {
  1941. path_com[0].data = AX_PATH_COM0_PATHB;
  1942. path_com[1].data = AX_PATH_COM1_PATHB;
  1943. path_com[2].data = AX_PATH_COM2_PATHB;
  1944. path_com[7].data = AX_PATH_COM7_PATHB;
  1945. path_com[8].data = AX_PATH_COM8_PATHB;
  1946. } else if (tx_path == RF_AB) {
  1947. path_com[0].data = AX_PATH_COM0_PATHAB;
  1948. path_com[1].data = AX_PATH_COM1_PATHAB;
  1949. path_com[2].data = AX_PATH_COM2_PATHAB;
  1950. path_com[7].data = AX_PATH_COM7_PATHAB;
  1951. path_com[8].data = AX_PATH_COM8_PATHAB;
  1952. } else {
  1953. rtw89_warn(rtwdev, "[Invalid Tx Path]Tx Path: %d\n", tx_path);
  1954. return;
  1955. }
  1956. for (i = 0; i < cr_size; i++) {
  1957. rtw89_debug(rtwdev, RTW89_DBG_TSSI, "0x%x = 0x%x\n",
  1958. path_com[i].addr, path_com[i].data);
  1959. reg = rtw89_mac_reg_by_idx(rtwdev, path_com[i].addr, mac_idx);
  1960. rtw89_write32(rtwdev, reg, path_com[i].data);
  1961. }
  1962. }
  1963. static void rtw8852c_bb_ctrl_btc_preagc(struct rtw89_dev *rtwdev, bool bt_en)
  1964. {
  1965. if (bt_en) {
  1966. rtw89_phy_write32_mask(rtwdev, R_PATH0_FRC_FIR_TYPE_V1,
  1967. B_PATH0_FRC_FIR_TYPE_MSK_V1, 0x3);
  1968. rtw89_phy_write32_mask(rtwdev, R_PATH1_FRC_FIR_TYPE_V1,
  1969. B_PATH1_FRC_FIR_TYPE_MSK_V1, 0x3);
  1970. rtw89_phy_write32_mask(rtwdev, R_PATH0_RXBB_V1,
  1971. B_PATH0_RXBB_MSK_V1, 0xf);
  1972. rtw89_phy_write32_mask(rtwdev, R_PATH1_RXBB_V1,
  1973. B_PATH1_RXBB_MSK_V1, 0xf);
  1974. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_LNA6_OP1DB_V1,
  1975. B_PATH0_G_LNA6_OP1DB_V1, 0x80);
  1976. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_LNA6_OP1DB_V1,
  1977. B_PATH1_G_LNA6_OP1DB_V1, 0x80);
  1978. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_TIA0_LNA6_OP1DB_V1,
  1979. B_PATH0_G_TIA0_LNA6_OP1DB_V1, 0x80);
  1980. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_TIA1_LNA6_OP1DB_V1,
  1981. B_PATH0_G_TIA1_LNA6_OP1DB_V1, 0x80);
  1982. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA0_LNA6_OP1DB_V1,
  1983. B_PATH1_G_TIA0_LNA6_OP1DB_V1, 0x80);
  1984. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA1_LNA6_OP1DB_V1,
  1985. B_PATH1_G_TIA1_LNA6_OP1DB_V1, 0x80);
  1986. rtw89_phy_write32_mask(rtwdev, R_PATH0_BT_BACKOFF_V1,
  1987. B_PATH0_BT_BACKOFF_V1, 0x780D1E);
  1988. rtw89_phy_write32_mask(rtwdev, R_PATH1_BT_BACKOFF_V1,
  1989. B_PATH1_BT_BACKOFF_V1, 0x780D1E);
  1990. rtw89_phy_write32_mask(rtwdev, R_P0_BACKOFF_IBADC_V1,
  1991. B_P0_BACKOFF_IBADC_V1, 0x34);
  1992. rtw89_phy_write32_mask(rtwdev, R_P1_BACKOFF_IBADC_V1,
  1993. B_P1_BACKOFF_IBADC_V1, 0x34);
  1994. } else {
  1995. rtw89_phy_write32_mask(rtwdev, R_PATH0_FRC_FIR_TYPE_V1,
  1996. B_PATH0_FRC_FIR_TYPE_MSK_V1, 0x0);
  1997. rtw89_phy_write32_mask(rtwdev, R_PATH1_FRC_FIR_TYPE_V1,
  1998. B_PATH1_FRC_FIR_TYPE_MSK_V1, 0x0);
  1999. rtw89_phy_write32_mask(rtwdev, R_PATH0_RXBB_V1,
  2000. B_PATH0_RXBB_MSK_V1, 0x60);
  2001. rtw89_phy_write32_mask(rtwdev, R_PATH1_RXBB_V1,
  2002. B_PATH1_RXBB_MSK_V1, 0x60);
  2003. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_LNA6_OP1DB_V1,
  2004. B_PATH0_G_LNA6_OP1DB_V1, 0x1a);
  2005. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_LNA6_OP1DB_V1,
  2006. B_PATH1_G_LNA6_OP1DB_V1, 0x1a);
  2007. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_TIA0_LNA6_OP1DB_V1,
  2008. B_PATH0_G_TIA0_LNA6_OP1DB_V1, 0x2a);
  2009. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_TIA1_LNA6_OP1DB_V1,
  2010. B_PATH0_G_TIA1_LNA6_OP1DB_V1, 0x2a);
  2011. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA0_LNA6_OP1DB_V1,
  2012. B_PATH1_G_TIA0_LNA6_OP1DB_V1, 0x2a);
  2013. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA1_LNA6_OP1DB_V1,
  2014. B_PATH1_G_TIA1_LNA6_OP1DB_V1, 0x2a);
  2015. rtw89_phy_write32_mask(rtwdev, R_PATH0_BT_BACKOFF_V1,
  2016. B_PATH0_BT_BACKOFF_V1, 0x79E99E);
  2017. rtw89_phy_write32_mask(rtwdev, R_PATH1_BT_BACKOFF_V1,
  2018. B_PATH1_BT_BACKOFF_V1, 0x79E99E);
  2019. rtw89_phy_write32_mask(rtwdev, R_P0_BACKOFF_IBADC_V1,
  2020. B_P0_BACKOFF_IBADC_V1, 0x26);
  2021. rtw89_phy_write32_mask(rtwdev, R_P1_BACKOFF_IBADC_V1,
  2022. B_P1_BACKOFF_IBADC_V1, 0x26);
  2023. }
  2024. }
  2025. static void rtw8852c_bb_cfg_txrx_path(struct rtw89_dev *rtwdev)
  2026. {
  2027. struct rtw89_hal *hal = &rtwdev->hal;
  2028. rtw8852c_bb_cfg_rx_path(rtwdev, RF_PATH_AB);
  2029. if (hal->rx_nss == 1) {
  2030. rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT, B_RXHT_MCS_LIMIT, 0);
  2031. rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT, B_RXVHT_MCS_LIMIT, 0);
  2032. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS, 0);
  2033. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS, 0);
  2034. } else {
  2035. rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT, B_RXHT_MCS_LIMIT, 1);
  2036. rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT, B_RXVHT_MCS_LIMIT, 1);
  2037. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS, 1);
  2038. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS, 1);
  2039. }
  2040. }
  2041. static u8 rtw8852c_get_thermal(struct rtw89_dev *rtwdev, enum rtw89_rf_path rf_path)
  2042. {
  2043. rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1);
  2044. rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x0);
  2045. rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1);
  2046. fsleep(200);
  2047. return rtw89_read_rf(rtwdev, rf_path, RR_TM, RR_TM_VAL);
  2048. }
  2049. static void rtw8852c_btc_set_rfe(struct rtw89_dev *rtwdev)
  2050. {
  2051. struct rtw89_btc *btc = &rtwdev->btc;
  2052. struct rtw89_btc_module *module = &btc->mdinfo;
  2053. module->rfe_type = rtwdev->efuse.rfe_type;
  2054. module->cv = rtwdev->hal.cv;
  2055. module->bt_solo = 0;
  2056. module->switch_type = BTC_SWITCH_INTERNAL;
  2057. if (module->rfe_type > 0)
  2058. module->ant.num = (module->rfe_type % 2 ? 2 : 3);
  2059. else
  2060. module->ant.num = 2;
  2061. module->ant.diversity = 0;
  2062. module->ant.isolation = 10;
  2063. if (module->ant.num == 3) {
  2064. module->ant.type = BTC_ANT_DEDICATED;
  2065. module->bt_pos = BTC_BT_ALONE;
  2066. } else {
  2067. module->ant.type = BTC_ANT_SHARED;
  2068. module->bt_pos = BTC_BT_BTG;
  2069. }
  2070. }
  2071. static void rtw8852c_ctrl_btg(struct rtw89_dev *rtwdev, bool btg)
  2072. {
  2073. if (btg) {
  2074. rtw89_phy_write32_mask(rtwdev, R_PATH0_BT_SHARE_V1,
  2075. B_PATH0_BT_SHARE_V1, 0x1);
  2076. rtw89_phy_write32_mask(rtwdev, R_PATH0_BTG_PATH_V1,
  2077. B_PATH0_BTG_PATH_V1, 0x0);
  2078. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_LNA6_OP1DB_V1,
  2079. B_PATH1_G_LNA6_OP1DB_V1, 0x20);
  2080. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA0_LNA6_OP1DB_V1,
  2081. B_PATH1_G_TIA0_LNA6_OP1DB_V1, 0x30);
  2082. rtw89_phy_write32_mask(rtwdev, R_PATH1_BT_SHARE_V1,
  2083. B_PATH1_BT_SHARE_V1, 0x1);
  2084. rtw89_phy_write32_mask(rtwdev, R_PATH1_BTG_PATH_V1,
  2085. B_PATH1_BTG_PATH_V1, 0x1);
  2086. rtw89_phy_write32_mask(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0x0);
  2087. rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD, B_BT_SHARE, 0x1);
  2088. rtw89_phy_write32_mask(rtwdev, R_FC0_BW, B_ANT_RX_BT_SEG0, 0x2);
  2089. rtw89_phy_write32_mask(rtwdev, R_BT_DYN_DC_EST_EN,
  2090. B_BT_DYN_DC_EST_EN_MSK, 0x1);
  2091. rtw89_phy_write32_mask(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN,
  2092. 0x1);
  2093. } else {
  2094. rtw89_phy_write32_mask(rtwdev, R_PATH0_BT_SHARE_V1,
  2095. B_PATH0_BT_SHARE_V1, 0x0);
  2096. rtw89_phy_write32_mask(rtwdev, R_PATH0_BTG_PATH_V1,
  2097. B_PATH0_BTG_PATH_V1, 0x0);
  2098. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_LNA6_OP1DB_V1,
  2099. B_PATH1_G_LNA6_OP1DB_V1, 0x1a);
  2100. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA0_LNA6_OP1DB_V1,
  2101. B_PATH1_G_TIA0_LNA6_OP1DB_V1, 0x2a);
  2102. rtw89_phy_write32_mask(rtwdev, R_PATH1_BT_SHARE_V1,
  2103. B_PATH1_BT_SHARE_V1, 0x0);
  2104. rtw89_phy_write32_mask(rtwdev, R_PATH1_BTG_PATH_V1,
  2105. B_PATH1_BTG_PATH_V1, 0x0);
  2106. rtw89_phy_write32_mask(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0xf);
  2107. rtw89_phy_write32_mask(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P2, 0x4);
  2108. rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD, B_BT_SHARE, 0x0);
  2109. rtw89_phy_write32_mask(rtwdev, R_FC0_BW, B_ANT_RX_BT_SEG0, 0x0);
  2110. rtw89_phy_write32_mask(rtwdev, R_BT_DYN_DC_EST_EN,
  2111. B_BT_DYN_DC_EST_EN_MSK, 0x0);
  2112. rtw89_phy_write32_mask(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN,
  2113. 0x0);
  2114. }
  2115. }
  2116. static
  2117. void rtw8852c_set_trx_mask(struct rtw89_dev *rtwdev, u8 path, u8 group, u32 val)
  2118. {
  2119. rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, 0x20000);
  2120. rtw89_write_rf(rtwdev, path, RR_LUTWA, RFREG_MASK, group);
  2121. rtw89_write_rf(rtwdev, path, RR_LUTWD0, RFREG_MASK, val);
  2122. rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, 0x0);
  2123. }
  2124. static void rtw8852c_btc_init_cfg(struct rtw89_dev *rtwdev)
  2125. {
  2126. struct rtw89_btc *btc = &rtwdev->btc;
  2127. struct rtw89_btc_module *module = &btc->mdinfo;
  2128. const struct rtw89_chip_info *chip = rtwdev->chip;
  2129. const struct rtw89_mac_ax_coex coex_params = {
  2130. .pta_mode = RTW89_MAC_AX_COEX_RTK_MODE,
  2131. .direction = RTW89_MAC_AX_COEX_INNER,
  2132. };
  2133. /* PTA init */
  2134. rtw89_mac_coex_init_v1(rtwdev, &coex_params);
  2135. /* set WL Tx response = Hi-Pri */
  2136. chip->ops->btc_set_wl_pri(rtwdev, BTC_PRI_MASK_TX_RESP, true);
  2137. chip->ops->btc_set_wl_pri(rtwdev, BTC_PRI_MASK_BEACON, true);
  2138. /* set rf gnt debug off */
  2139. rtw89_write_rf(rtwdev, RF_PATH_A, RR_WLSEL, RFREG_MASK, 0x0);
  2140. rtw89_write_rf(rtwdev, RF_PATH_B, RR_WLSEL, RFREG_MASK, 0x0);
  2141. /* set WL Tx thru in TRX mask table if GNT_WL = 0 && BT_S1 = ss group */
  2142. if (module->ant.type == BTC_ANT_SHARED) {
  2143. rtw8852c_set_trx_mask(rtwdev,
  2144. RF_PATH_A, BTC_BT_SS_GROUP, 0x5ff);
  2145. rtw8852c_set_trx_mask(rtwdev,
  2146. RF_PATH_B, BTC_BT_SS_GROUP, 0x5ff);
  2147. /* set path-A(S0) Tx/Rx no-mask if GNT_WL=0 && BT_S1=tx group */
  2148. rtw8852c_set_trx_mask(rtwdev,
  2149. RF_PATH_A, BTC_BT_TX_GROUP, 0x5ff);
  2150. } else { /* set WL Tx stb if GNT_WL = 0 && BT_S1 = ss group for 3-ant */
  2151. rtw8852c_set_trx_mask(rtwdev,
  2152. RF_PATH_A, BTC_BT_SS_GROUP, 0x5df);
  2153. rtw8852c_set_trx_mask(rtwdev,
  2154. RF_PATH_B, BTC_BT_SS_GROUP, 0x5df);
  2155. }
  2156. /* set PTA break table */
  2157. rtw89_write32(rtwdev, R_AX_BT_BREAK_TABLE, BTC_BREAK_PARAM);
  2158. /* enable BT counter 0xda10[1:0] = 2b'11 */
  2159. rtw89_write32_set(rtwdev,
  2160. R_AX_BT_CNT_CFG, B_AX_BT_CNT_EN |
  2161. B_AX_BT_CNT_RST_V1);
  2162. btc->cx.wl.status.map.init_ok = true;
  2163. }
  2164. static
  2165. void rtw8852c_btc_set_wl_pri(struct rtw89_dev *rtwdev, u8 map, bool state)
  2166. {
  2167. u32 bitmap = 0;
  2168. u32 reg = 0;
  2169. switch (map) {
  2170. case BTC_PRI_MASK_TX_RESP:
  2171. reg = R_BTC_COEX_WL_REQ;
  2172. bitmap = B_BTC_RSP_ACK_HI;
  2173. break;
  2174. case BTC_PRI_MASK_BEACON:
  2175. reg = R_BTC_COEX_WL_REQ;
  2176. bitmap = B_BTC_TX_BCN_HI;
  2177. break;
  2178. default:
  2179. return;
  2180. }
  2181. if (state)
  2182. rtw89_write32_set(rtwdev, reg, bitmap);
  2183. else
  2184. rtw89_write32_clr(rtwdev, reg, bitmap);
  2185. }
  2186. union rtw8852c_btc_wl_txpwr_ctrl {
  2187. u32 txpwr_val;
  2188. struct {
  2189. union {
  2190. u16 ctrl_all_time;
  2191. struct {
  2192. s16 data:9;
  2193. u16 rsvd:6;
  2194. u16 flag:1;
  2195. } all_time;
  2196. };
  2197. union {
  2198. u16 ctrl_gnt_bt;
  2199. struct {
  2200. s16 data:9;
  2201. u16 rsvd:7;
  2202. } gnt_bt;
  2203. };
  2204. };
  2205. } __packed;
  2206. static void
  2207. rtw8852c_btc_set_wl_txpwr_ctrl(struct rtw89_dev *rtwdev, u32 txpwr_val)
  2208. {
  2209. union rtw8852c_btc_wl_txpwr_ctrl arg = { .txpwr_val = txpwr_val };
  2210. s32 val;
  2211. #define __write_ctrl(_reg, _msk, _val, _en, _cond) \
  2212. do { \
  2213. u32 _wrt = FIELD_PREP(_msk, _val); \
  2214. BUILD_BUG_ON((_msk & _en) != 0); \
  2215. if (_cond) \
  2216. _wrt |= _en; \
  2217. else \
  2218. _wrt &= ~_en; \
  2219. rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, _reg, \
  2220. _msk | _en, _wrt); \
  2221. } while (0)
  2222. switch (arg.ctrl_all_time) {
  2223. case 0xffff:
  2224. val = 0;
  2225. break;
  2226. default:
  2227. val = arg.all_time.data;
  2228. break;
  2229. }
  2230. __write_ctrl(R_AX_PWR_RATE_CTRL, B_AX_FORCE_PWR_BY_RATE_VALUE_MASK,
  2231. val, B_AX_FORCE_PWR_BY_RATE_EN,
  2232. arg.ctrl_all_time != 0xffff);
  2233. switch (arg.ctrl_gnt_bt) {
  2234. case 0xffff:
  2235. val = 0;
  2236. break;
  2237. default:
  2238. val = arg.gnt_bt.data;
  2239. break;
  2240. }
  2241. __write_ctrl(R_AX_PWR_COEXT_CTRL, B_AX_TXAGC_BT_MASK, val,
  2242. B_AX_TXAGC_BT_EN, arg.ctrl_gnt_bt != 0xffff);
  2243. #undef __write_ctrl
  2244. }
  2245. static
  2246. s8 rtw8852c_btc_get_bt_rssi(struct rtw89_dev *rtwdev, s8 val)
  2247. {
  2248. /* +6 for compensate offset */
  2249. return clamp_t(s8, val + 6, -100, 0) + 100;
  2250. }
  2251. static const struct rtw89_btc_rf_trx_para rtw89_btc_8852c_rf_ul[] = {
  2252. {255, 0, 0, 7}, /* 0 -> original */
  2253. {255, 2, 0, 7}, /* 1 -> for BT-connected ACI issue && BTG co-rx */
  2254. {255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */
  2255. {255, 0, 0, 7}, /* 3- >reserved for shared-antenna */
  2256. {255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */
  2257. {255, 1, 0, 7}, /* the below id is for non-shared-antenna free-run */
  2258. {6, 1, 0, 7},
  2259. {13, 1, 0, 7},
  2260. {13, 1, 0, 7}
  2261. };
  2262. static const struct rtw89_btc_rf_trx_para rtw89_btc_8852c_rf_dl[] = {
  2263. {255, 0, 0, 7}, /* 0 -> original */
  2264. {255, 2, 0, 7}, /* 1 -> reserved for shared-antenna */
  2265. {255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */
  2266. {255, 0, 0, 7}, /* 3- >reserved for shared-antenna */
  2267. {255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */
  2268. {255, 1, 0, 7}, /* the below id is for non-shared-antenna free-run */
  2269. {255, 1, 0, 7},
  2270. {255, 1, 0, 7},
  2271. {255, 1, 0, 7}
  2272. };
  2273. static const u8 rtw89_btc_8852c_wl_rssi_thres[BTC_WL_RSSI_THMAX] = {60, 50, 40, 30};
  2274. static const u8 rtw89_btc_8852c_bt_rssi_thres[BTC_BT_RSSI_THMAX] = {40, 36, 31, 28};
  2275. static const struct rtw89_btc_fbtc_mreg rtw89_btc_8852c_mon_reg[] = {
  2276. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda00),
  2277. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda04),
  2278. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda24),
  2279. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda30),
  2280. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda34),
  2281. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda38),
  2282. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda44),
  2283. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda48),
  2284. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda4c),
  2285. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xd200),
  2286. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xd220),
  2287. RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x980),
  2288. RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x4aa4),
  2289. RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x4778),
  2290. RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x476c),
  2291. };
  2292. static
  2293. void rtw8852c_btc_update_bt_cnt(struct rtw89_dev *rtwdev)
  2294. {
  2295. /* Feature move to firmware */
  2296. }
  2297. static
  2298. void rtw8852c_btc_wl_s1_standby(struct rtw89_dev *rtwdev, bool state)
  2299. {
  2300. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x80000);
  2301. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1);
  2302. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD1, RFREG_MASK, 0x620);
  2303. /* set WL standby = Rx for GNT_BT_Tx = 1->0 settle issue */
  2304. if (state)
  2305. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0,
  2306. RFREG_MASK, 0x179c);
  2307. else
  2308. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0,
  2309. RFREG_MASK, 0x208);
  2310. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x0);
  2311. }
  2312. static void rtw8852c_set_wl_lna2(struct rtw89_dev *rtwdev, u8 level)
  2313. {
  2314. /* level=0 Default: TIA 1/0= (LNA2,TIAN6) = (7,1)/(5,1) = 21dB/12dB
  2315. * level=1 Fix LNA2=5: TIA 1/0= (LNA2,TIAN6) = (5,0)/(5,1) = 18dB/12dB
  2316. * To improve BT ACI in co-rx
  2317. */
  2318. switch (level) {
  2319. case 0: /* default */
  2320. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x1000);
  2321. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x0);
  2322. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x15);
  2323. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1);
  2324. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x17);
  2325. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x2);
  2326. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x15);
  2327. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x3);
  2328. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x17);
  2329. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x0);
  2330. break;
  2331. case 1: /* Fix LNA2=5 */
  2332. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x1000);
  2333. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x0);
  2334. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x15);
  2335. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1);
  2336. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x5);
  2337. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x2);
  2338. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x15);
  2339. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x3);
  2340. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x5);
  2341. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x0);
  2342. break;
  2343. }
  2344. }
  2345. static void rtw8852c_btc_set_wl_rx_gain(struct rtw89_dev *rtwdev, u32 level)
  2346. {
  2347. struct rtw89_btc *btc = &rtwdev->btc;
  2348. switch (level) {
  2349. case 0: /* original */
  2350. default:
  2351. rtw8852c_bb_ctrl_btc_preagc(rtwdev, false);
  2352. btc->dm.wl_lna2 = 0;
  2353. break;
  2354. case 1: /* for FDD free-run */
  2355. rtw8852c_bb_ctrl_btc_preagc(rtwdev, true);
  2356. btc->dm.wl_lna2 = 0;
  2357. break;
  2358. case 2: /* for BTG Co-Rx*/
  2359. rtw8852c_bb_ctrl_btc_preagc(rtwdev, false);
  2360. btc->dm.wl_lna2 = 1;
  2361. break;
  2362. }
  2363. rtw8852c_set_wl_lna2(rtwdev, btc->dm.wl_lna2);
  2364. }
  2365. static void rtw8852c_fill_freq_with_ppdu(struct rtw89_dev *rtwdev,
  2366. struct rtw89_rx_phy_ppdu *phy_ppdu,
  2367. struct ieee80211_rx_status *status)
  2368. {
  2369. u8 chan_idx = phy_ppdu->chan_idx;
  2370. enum nl80211_band band;
  2371. u8 ch;
  2372. if (chan_idx == 0)
  2373. return;
  2374. rtw89_decode_chan_idx(rtwdev, chan_idx, &ch, &band);
  2375. status->freq = ieee80211_channel_to_frequency(ch, band);
  2376. status->band = band;
  2377. }
  2378. static void rtw8852c_query_ppdu(struct rtw89_dev *rtwdev,
  2379. struct rtw89_rx_phy_ppdu *phy_ppdu,
  2380. struct ieee80211_rx_status *status)
  2381. {
  2382. u8 path;
  2383. u8 *rx_power = phy_ppdu->rssi;
  2384. status->signal = RTW89_RSSI_RAW_TO_DBM(max(rx_power[RF_PATH_A], rx_power[RF_PATH_B]));
  2385. for (path = 0; path < rtwdev->chip->rf_path_num; path++) {
  2386. status->chains |= BIT(path);
  2387. status->chain_signal[path] = RTW89_RSSI_RAW_TO_DBM(rx_power[path]);
  2388. }
  2389. if (phy_ppdu->valid)
  2390. rtw8852c_fill_freq_with_ppdu(rtwdev, phy_ppdu, status);
  2391. }
  2392. static int rtw8852c_mac_enable_bb_rf(struct rtw89_dev *rtwdev)
  2393. {
  2394. int ret;
  2395. rtw89_write8_set(rtwdev, R_AX_SYS_FUNC_EN,
  2396. B_AX_FEN_BBRSTB | B_AX_FEN_BB_GLB_RSTN);
  2397. rtw89_write32_set(rtwdev, R_AX_WLRF_CTRL, B_AX_AFC_AFEDIG);
  2398. rtw89_write32_clr(rtwdev, R_AX_WLRF_CTRL, B_AX_AFC_AFEDIG);
  2399. rtw89_write32_set(rtwdev, R_AX_WLRF_CTRL, B_AX_AFC_AFEDIG);
  2400. rtw89_write32_mask(rtwdev, R_AX_AFE_OFF_CTRL1, B_AX_S0_LDO_VSEL_F_MASK, 0x1);
  2401. rtw89_write32_mask(rtwdev, R_AX_AFE_OFF_CTRL1, B_AX_S1_LDO_VSEL_F_MASK, 0x1);
  2402. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL0, 0x7, FULL_BIT_MASK);
  2403. if (ret)
  2404. return ret;
  2405. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x6c, FULL_BIT_MASK);
  2406. if (ret)
  2407. return ret;
  2408. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0xc7, FULL_BIT_MASK);
  2409. if (ret)
  2410. return ret;
  2411. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, 0xc7, FULL_BIT_MASK);
  2412. if (ret)
  2413. return ret;
  2414. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL3, 0xd, FULL_BIT_MASK);
  2415. if (ret)
  2416. return ret;
  2417. return 0;
  2418. }
  2419. static int rtw8852c_mac_disable_bb_rf(struct rtw89_dev *rtwdev)
  2420. {
  2421. rtw89_write8_clr(rtwdev, R_AX_SYS_FUNC_EN,
  2422. B_AX_FEN_BBRSTB | B_AX_FEN_BB_GLB_RSTN);
  2423. return 0;
  2424. }
  2425. static const struct rtw89_chanctx_listener rtw8852c_chanctx_listener = {
  2426. .callbacks[RTW89_CHANCTX_CALLBACK_RFK] = rtw8852c_rfk_chanctx_cb,
  2427. };
  2428. #ifdef CONFIG_PM
  2429. static const struct wiphy_wowlan_support rtw_wowlan_stub_8852c = {
  2430. .flags = WIPHY_WOWLAN_MAGIC_PKT | WIPHY_WOWLAN_DISCONNECT,
  2431. .n_patterns = RTW89_MAX_PATTERN_NUM,
  2432. .pattern_max_len = RTW89_MAX_PATTERN_SIZE,
  2433. .pattern_min_len = 1,
  2434. };
  2435. #endif
  2436. static const struct rtw89_chip_ops rtw8852c_chip_ops = {
  2437. .enable_bb_rf = rtw8852c_mac_enable_bb_rf,
  2438. .disable_bb_rf = rtw8852c_mac_disable_bb_rf,
  2439. .bb_preinit = NULL,
  2440. .bb_reset = rtw8852c_bb_reset,
  2441. .bb_sethw = rtw8852c_bb_sethw,
  2442. .read_rf = rtw89_phy_read_rf_v1,
  2443. .write_rf = rtw89_phy_write_rf_v1,
  2444. .set_channel = rtw8852c_set_channel,
  2445. .set_channel_help = rtw8852c_set_channel_help,
  2446. .read_efuse = rtw8852c_read_efuse,
  2447. .read_phycap = rtw8852c_read_phycap,
  2448. .fem_setup = NULL,
  2449. .rfe_gpio = NULL,
  2450. .rfk_init = rtw8852c_rfk_init,
  2451. .rfk_channel = rtw8852c_rfk_channel,
  2452. .rfk_band_changed = rtw8852c_rfk_band_changed,
  2453. .rfk_scan = rtw8852c_rfk_scan,
  2454. .rfk_track = rtw8852c_rfk_track,
  2455. .power_trim = rtw8852c_power_trim,
  2456. .set_txpwr = rtw8852c_set_txpwr,
  2457. .set_txpwr_ctrl = rtw8852c_set_txpwr_ctrl,
  2458. .init_txpwr_unit = rtw8852c_init_txpwr_unit,
  2459. .get_thermal = rtw8852c_get_thermal,
  2460. .ctrl_btg = rtw8852c_ctrl_btg,
  2461. .query_ppdu = rtw8852c_query_ppdu,
  2462. .bb_ctrl_btc_preagc = rtw8852c_bb_ctrl_btc_preagc,
  2463. .cfg_txrx_path = rtw8852c_bb_cfg_txrx_path,
  2464. .set_txpwr_ul_tb_offset = rtw8852c_set_txpwr_ul_tb_offset,
  2465. .pwr_on_func = rtw8852c_pwr_on_func,
  2466. .pwr_off_func = rtw8852c_pwr_off_func,
  2467. .query_rxdesc = rtw89_core_query_rxdesc,
  2468. .fill_txdesc = rtw89_core_fill_txdesc_v1,
  2469. .fill_txdesc_fwcmd = rtw89_core_fill_txdesc_fwcmd_v1,
  2470. .cfg_ctrl_path = rtw89_mac_cfg_ctrl_path_v1,
  2471. .mac_cfg_gnt = rtw89_mac_cfg_gnt_v1,
  2472. .stop_sch_tx = rtw89_mac_stop_sch_tx_v1,
  2473. .resume_sch_tx = rtw89_mac_resume_sch_tx_v1,
  2474. .h2c_dctl_sec_cam = rtw89_fw_h2c_dctl_sec_cam_v1,
  2475. .btc_set_rfe = rtw8852c_btc_set_rfe,
  2476. .btc_init_cfg = rtw8852c_btc_init_cfg,
  2477. .btc_set_wl_pri = rtw8852c_btc_set_wl_pri,
  2478. .btc_set_wl_txpwr_ctrl = rtw8852c_btc_set_wl_txpwr_ctrl,
  2479. .btc_get_bt_rssi = rtw8852c_btc_get_bt_rssi,
  2480. .btc_update_bt_cnt = rtw8852c_btc_update_bt_cnt,
  2481. .btc_wl_s1_standby = rtw8852c_btc_wl_s1_standby,
  2482. .btc_set_wl_rx_gain = rtw8852c_btc_set_wl_rx_gain,
  2483. .btc_set_policy = rtw89_btc_set_policy_v1,
  2484. };
  2485. const struct rtw89_chip_info rtw8852c_chip_info = {
  2486. .chip_id = RTL8852C,
  2487. .chip_gen = RTW89_CHIP_AX,
  2488. .ops = &rtw8852c_chip_ops,
  2489. .mac_def = &rtw89_mac_gen_ax,
  2490. .phy_def = &rtw89_phy_gen_ax,
  2491. .fw_basename = RTW8852C_FW_BASENAME,
  2492. .fw_format_max = RTW8852C_FW_FORMAT_MAX,
  2493. .try_ce_fw = false,
  2494. .bbmcu_nr = 0,
  2495. .needed_fw_elms = 0,
  2496. .fifo_size = 458752,
  2497. .small_fifo_size = false,
  2498. .dle_scc_rsvd_size = 0,
  2499. .max_amsdu_limit = 8000,
  2500. .dis_2g_40m_ul_ofdma = false,
  2501. .rsvd_ple_ofst = 0x6f800,
  2502. .hfc_param_ini = rtw8852c_hfc_param_ini_pcie,
  2503. .dle_mem = rtw8852c_dle_mem_pcie,
  2504. .wde_qempty_acq_num = 16,
  2505. .wde_qempty_mgq_sel = 16,
  2506. .rf_base_addr = {0xe000, 0xf000},
  2507. .pwr_on_seq = NULL,
  2508. .pwr_off_seq = NULL,
  2509. .bb_table = &rtw89_8852c_phy_bb_table,
  2510. .bb_gain_table = &rtw89_8852c_phy_bb_gain_table,
  2511. .rf_table = {&rtw89_8852c_phy_radiob_table,
  2512. &rtw89_8852c_phy_radioa_table,},
  2513. .nctl_table = &rtw89_8852c_phy_nctl_table,
  2514. .nctl_post_table = NULL,
  2515. .dflt_parms = &rtw89_8852c_dflt_parms,
  2516. .rfe_parms_conf = NULL,
  2517. .chanctx_listener = &rtw8852c_chanctx_listener,
  2518. .txpwr_factor_rf = 2,
  2519. .txpwr_factor_mac = 1,
  2520. .dig_table = NULL,
  2521. .dig_regs = &rtw8852c_dig_regs,
  2522. .tssi_dbw_table = &rtw89_8852c_tssi_dbw_table,
  2523. #if LINUX_VERSION_CODE >= KERNEL_VERSION(5, 4, 0)
  2524. .support_chanctx_num = 2,
  2525. .support_bands = BIT(NL80211_BAND_2GHZ) |
  2526. BIT(NL80211_BAND_5GHZ) |
  2527. BIT(NL80211_BAND_6GHZ),
  2528. #else
  2529. .support_chanctx_num = 0,
  2530. .support_bands = BIT(NL80211_BAND_2GHZ) |
  2531. BIT(NL80211_BAND_5GHZ),
  2532. #endif
  2533. .support_bw160 = true,
  2534. .support_unii4 = true,
  2535. .ul_tb_waveform_ctrl = false,
  2536. .ul_tb_pwr_diff = true,
  2537. .hw_sec_hdr = true,
  2538. .rf_path_num = 2,
  2539. .tx_nss = 2,
  2540. .rx_nss = 2,
  2541. .acam_num = 128,
  2542. .bcam_num = 20,
  2543. .scam_num = 128,
  2544. .bacam_num = 8,
  2545. .bacam_dynamic_num = 8,
  2546. .bacam_ver = RTW89_BACAM_V0_EXT,
  2547. .sec_ctrl_efuse_size = 4,
  2548. .physical_efuse_size = 1216,
  2549. .logical_efuse_size = 2048,
  2550. .limit_efuse_size = 1280,
  2551. .dav_phy_efuse_size = 96,
  2552. .dav_log_efuse_size = 16,
  2553. .phycap_addr = 0x590,
  2554. .phycap_size = 0x60,
  2555. .para_ver = 0x1,
  2556. .wlcx_desired = 0x06000000,
  2557. .btcx_desired = 0x7,
  2558. .scbd = 0x1,
  2559. .mailbox = 0x1,
  2560. .afh_guard_ch = 6,
  2561. .wl_rssi_thres = rtw89_btc_8852c_wl_rssi_thres,
  2562. .bt_rssi_thres = rtw89_btc_8852c_bt_rssi_thres,
  2563. .rssi_tol = 2,
  2564. .mon_reg_num = ARRAY_SIZE(rtw89_btc_8852c_mon_reg),
  2565. .mon_reg = rtw89_btc_8852c_mon_reg,
  2566. .rf_para_ulink_num = ARRAY_SIZE(rtw89_btc_8852c_rf_ul),
  2567. .rf_para_ulink = rtw89_btc_8852c_rf_ul,
  2568. .rf_para_dlink_num = ARRAY_SIZE(rtw89_btc_8852c_rf_dl),
  2569. .rf_para_dlink = rtw89_btc_8852c_rf_dl,
  2570. .ps_mode_supported = BIT(RTW89_PS_MODE_RFOFF) |
  2571. BIT(RTW89_PS_MODE_CLK_GATED) |
  2572. BIT(RTW89_PS_MODE_PWR_GATED),
  2573. .low_power_hci_modes = BIT(RTW89_PS_MODE_CLK_GATED) |
  2574. BIT(RTW89_PS_MODE_PWR_GATED),
  2575. .h2c_cctl_func_id = H2C_FUNC_MAC_CCTLINFO_UD_V1,
  2576. .hci_func_en_addr = R_AX_HCI_FUNC_EN_V1,
  2577. .h2c_desc_size = sizeof(struct rtw89_rxdesc_short),
  2578. .txwd_body_size = sizeof(struct rtw89_txwd_body_v1),
  2579. .txwd_info_size = sizeof(struct rtw89_txwd_info),
  2580. .h2c_ctrl_reg = R_AX_H2CREG_CTRL_V1,
  2581. .h2c_counter_reg = {R_AX_UDM1 + 1, B_AX_UDM1_HALMAC_H2C_DEQ_CNT_MASK >> 8},
  2582. .h2c_regs = rtw8852c_h2c_regs,
  2583. .c2h_ctrl_reg = R_AX_C2HREG_CTRL_V1,
  2584. .c2h_counter_reg = {R_AX_UDM1 + 1, B_AX_UDM1_HALMAC_C2H_ENQ_CNT_MASK >> 8},
  2585. .c2h_regs = rtw8852c_c2h_regs,
  2586. .page_regs = &rtw8852c_page_regs,
  2587. .cfo_src_fd = false,
  2588. .cfo_hw_comp = false,
  2589. .dcfo_comp = &rtw8852c_dcfo_comp,
  2590. .dcfo_comp_sft = 12,
  2591. .imr_info = &rtw8852c_imr_info,
  2592. .rrsr_cfgs = &rtw8852c_rrsr_cfgs,
  2593. .bss_clr_map_reg = R_BSS_CLR_MAP,
  2594. .dma_ch_mask = 0,
  2595. .edcca_lvl_reg = R_SEG0R_EDCCA_LVL,
  2596. #ifdef CONFIG_PM
  2597. .wowlan_stub = &rtw_wowlan_stub_8852c,
  2598. #endif
  2599. .xtal_info = NULL,
  2600. };
  2601. EXPORT_SYMBOL(rtw8852c_chip_info);
  2602. MODULE_FIRMWARE(RTW8852C_MODULE_FIRMWARE);
  2603. MODULE_AUTHOR("Realtek Corporation");
  2604. MODULE_DESCRIPTION("Realtek 802.11ax wireless 8852C driver");
  2605. MODULE_LICENSE("Dual BSD/GPL");