mt7621_nand.c 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * MediaTek MT7621 NAND Flash Controller driver
  4. *
  5. * Copyright (C) 2020 MediaTek Inc. All Rights Reserved.
  6. *
  7. * Author: Weijie Gao <[email protected]>
  8. */
  9. #include <linux/io.h>
  10. #include <linux/clk.h>
  11. #include <linux/init.h>
  12. #include <linux/errno.h>
  13. #include <linux/sizes.h>
  14. #include <linux/iopoll.h>
  15. #include <linux/kernel.h>
  16. #include <linux/module.h>
  17. #include <linux/mtd/mtd.h>
  18. #include <linux/mtd/rawnand.h>
  19. #include <linux/mtd/partitions.h>
  20. #include <linux/mtd/mtk_bmt.h>
  21. #include <linux/platform_device.h>
  22. #include <asm/addrspace.h>
  23. /* NFI core registers */
  24. #define NFI_CNFG 0x000
  25. #define CNFG_OP_MODE_S 12
  26. #define CNFG_OP_MODE_M GENMASK(14, 12)
  27. #define CNFG_OP_CUSTOM 6
  28. #define CNFG_AUTO_FMT_EN BIT(9)
  29. #define CNFG_HW_ECC_EN BIT(8)
  30. #define CNFG_BYTE_RW BIT(6)
  31. #define CNFG_READ_MODE BIT(1)
  32. #define NFI_PAGEFMT 0x004
  33. #define PAGEFMT_FDM_ECC_S 12
  34. #define PAGEFMT_FDM_ECC_M GENMASK(15, 12)
  35. #define PAGEFMT_FDM_S 8
  36. #define PAGEFMT_FDM_M GENMASK(11, 8)
  37. #define PAGEFMT_SPARE_S 4
  38. #define PAGEFMT_SPARE_M GENMASK(5, 4)
  39. #define PAGEFMT_PAGE_S 0
  40. #define PAGEFMT_PAGE_M GENMASK(1, 0)
  41. #define NFI_CON 0x008
  42. #define CON_NFI_SEC_S 12
  43. #define CON_NFI_SEC_M GENMASK(15, 12)
  44. #define CON_NFI_BWR BIT(9)
  45. #define CON_NFI_BRD BIT(8)
  46. #define CON_NFI_RST BIT(1)
  47. #define CON_FIFO_FLUSH BIT(0)
  48. #define NFI_ACCCON 0x00c
  49. #define ACCCON_POECS_S 28
  50. #define ACCCON_POECS_MAX 0x0f
  51. #define ACCCON_POECS_DEF 3
  52. #define ACCCON_PRECS_S 22
  53. #define ACCCON_PRECS_MAX 0x3f
  54. #define ACCCON_PRECS_DEF 3
  55. #define ACCCON_C2R_S 16
  56. #define ACCCON_C2R_MAX 0x3f
  57. #define ACCCON_C2R_DEF 7
  58. #define ACCCON_W2R_S 12
  59. #define ACCCON_W2R_MAX 0x0f
  60. #define ACCCON_W2R_DEF 7
  61. #define ACCCON_WH_S 8
  62. #define ACCCON_WH_MAX 0x0f
  63. #define ACCCON_WH_DEF 15
  64. #define ACCCON_WST_S 4
  65. #define ACCCON_WST_MAX 0x0f
  66. #define ACCCON_WST_DEF 15
  67. #define ACCCON_WST_MIN 3
  68. #define ACCCON_RLT_S 0
  69. #define ACCCON_RLT_MAX 0x0f
  70. #define ACCCON_RLT_DEF 15
  71. #define ACCCON_RLT_MIN 3
  72. #define NFI_CMD 0x020
  73. #define NFI_ADDRNOB 0x030
  74. #define ADDR_ROW_NOB_S 4
  75. #define ADDR_ROW_NOB_M GENMASK(6, 4)
  76. #define ADDR_COL_NOB_S 0
  77. #define ADDR_COL_NOB_M GENMASK(2, 0)
  78. #define NFI_COLADDR 0x034
  79. #define NFI_ROWADDR 0x038
  80. #define NFI_STRDATA 0x040
  81. #define STR_DATA BIT(0)
  82. #define NFI_CNRNB 0x044
  83. #define CB2R_TIME_S 4
  84. #define CB2R_TIME_M GENMASK(7, 4)
  85. #define STR_CNRNB BIT(0)
  86. #define NFI_DATAW 0x050
  87. #define NFI_DATAR 0x054
  88. #define NFI_PIO_DIRDY 0x058
  89. #define PIO_DIRDY BIT(0)
  90. #define NFI_STA 0x060
  91. #define STA_NFI_FSM_S 16
  92. #define STA_NFI_FSM_M GENMASK(19, 16)
  93. #define STA_FSM_CUSTOM_DATA 14
  94. #define STA_BUSY BIT(8)
  95. #define STA_ADDR BIT(1)
  96. #define STA_CMD BIT(0)
  97. #define NFI_ADDRCNTR 0x070
  98. #define SEC_CNTR_S 12
  99. #define SEC_CNTR_M GENMASK(15, 12)
  100. #define SEC_ADDR_S 0
  101. #define SEC_ADDR_M GENMASK(9, 0)
  102. #define NFI_CSEL 0x090
  103. #define CSEL_S 0
  104. #define CSEL_M GENMASK(1, 0)
  105. #define NFI_FDM0L 0x0a0
  106. #define NFI_FDML(n) (0x0a0 + ((n) << 3))
  107. #define NFI_FDM0M 0x0a4
  108. #define NFI_FDMM(n) (0x0a4 + ((n) << 3))
  109. #define NFI_MASTER_STA 0x210
  110. #define MAS_ADDR GENMASK(11, 9)
  111. #define MAS_RD GENMASK(8, 6)
  112. #define MAS_WR GENMASK(5, 3)
  113. #define MAS_RDDLY GENMASK(2, 0)
  114. /* ECC engine registers */
  115. #define ECC_ENCCON 0x000
  116. #define ENC_EN BIT(0)
  117. #define ECC_ENCCNFG 0x004
  118. #define ENC_CNFG_MSG_S 16
  119. #define ENC_CNFG_MSG_M GENMASK(28, 16)
  120. #define ENC_MODE_S 4
  121. #define ENC_MODE_M GENMASK(5, 4)
  122. #define ENC_MODE_NFI 1
  123. #define ENC_TNUM_S 0
  124. #define ENC_TNUM_M GENMASK(2, 0)
  125. #define ECC_ENCIDLE 0x00c
  126. #define ENC_IDLE BIT(0)
  127. #define ECC_DECCON 0x100
  128. #define DEC_EN BIT(0)
  129. #define ECC_DECCNFG 0x104
  130. #define DEC_EMPTY_EN BIT(31)
  131. #define DEC_CS_S 16
  132. #define DEC_CS_M GENMASK(28, 16)
  133. #define DEC_CON_S 12
  134. #define DEC_CON_M GENMASK(13, 12)
  135. #define DEC_CON_EL 2
  136. #define DEC_MODE_S 4
  137. #define DEC_MODE_M GENMASK(5, 4)
  138. #define DEC_MODE_NFI 1
  139. #define DEC_TNUM_S 0
  140. #define DEC_TNUM_M GENMASK(2, 0)
  141. #define ECC_DECIDLE 0x10c
  142. #define DEC_IDLE BIT(1)
  143. #define ECC_DECENUM 0x114
  144. #define ERRNUM_S 2
  145. #define ERRNUM_M GENMASK(3, 0)
  146. #define ECC_DECDONE 0x118
  147. #define DEC_DONE7 BIT(7)
  148. #define DEC_DONE6 BIT(6)
  149. #define DEC_DONE5 BIT(5)
  150. #define DEC_DONE4 BIT(4)
  151. #define DEC_DONE3 BIT(3)
  152. #define DEC_DONE2 BIT(2)
  153. #define DEC_DONE1 BIT(1)
  154. #define DEC_DONE0 BIT(0)
  155. #define ECC_DECEL(n) (0x11c + (n) * 4)
  156. #define DEC_EL_ODD_S 16
  157. #define DEC_EL_EVEN_S 0
  158. #define DEC_EL_M 0x1fff
  159. #define DEC_EL_BYTE_POS_S 3
  160. #define DEC_EL_BIT_POS_M GENMASK(2, 0)
  161. #define ECC_FDMADDR 0x13c
  162. /* ENCIDLE and DECIDLE */
  163. #define ECC_IDLE BIT(0)
  164. #define ACCTIMING(tpoecs, tprecs, tc2r, tw2r, twh, twst, trlt) \
  165. ((tpoecs) << ACCCON_POECS_S | (tprecs) << ACCCON_PRECS_S | \
  166. (tc2r) << ACCCON_C2R_S | (tw2r) << ACCCON_W2R_S | \
  167. (twh) << ACCCON_WH_S | (twst) << ACCCON_WST_S | (trlt))
  168. #define MASTER_STA_MASK (MAS_ADDR | MAS_RD | MAS_WR | \
  169. MAS_RDDLY)
  170. #define NFI_RESET_TIMEOUT 1000000
  171. #define NFI_CORE_TIMEOUT 500000
  172. #define ECC_ENGINE_TIMEOUT 500000
  173. #define ECC_SECTOR_SIZE 512
  174. #define ECC_PARITY_BITS 13
  175. #define NFI_FDM_SIZE 8
  176. #define MT7621_NFC_NAME "mt7621-nand"
  177. struct mt7621_nfc {
  178. struct nand_controller controller;
  179. struct nand_chip nand;
  180. struct clk *nfi_clk;
  181. struct device *dev;
  182. u32 nfi_base;
  183. void __iomem *nfi_regs;
  184. void __iomem *ecc_regs;
  185. u32 spare_per_sector;
  186. };
  187. static const u16 mt7621_nfi_page_size[] = { SZ_512, SZ_2K, SZ_4K };
  188. static const u8 mt7621_nfi_spare_size[] = { 16, 26, 27, 28 };
  189. static const u8 mt7621_ecc_strength[] = { 4, 6, 8, 10, 12 };
  190. static inline u32 nfi_read32(struct mt7621_nfc *nfc, u32 reg)
  191. {
  192. return readl(nfc->nfi_regs + reg);
  193. }
  194. static inline void nfi_write32(struct mt7621_nfc *nfc, u32 reg, u32 val)
  195. {
  196. writel(val, nfc->nfi_regs + reg);
  197. }
  198. static inline u16 nfi_read16(struct mt7621_nfc *nfc, u32 reg)
  199. {
  200. return readw(nfc->nfi_regs + reg);
  201. }
  202. static inline void nfi_write16(struct mt7621_nfc *nfc, u32 reg, u16 val)
  203. {
  204. writew(val, nfc->nfi_regs + reg);
  205. }
  206. static inline void ecc_write16(struct mt7621_nfc *nfc, u32 reg, u16 val)
  207. {
  208. writew(val, nfc->ecc_regs + reg);
  209. }
  210. static inline u32 ecc_read32(struct mt7621_nfc *nfc, u32 reg)
  211. {
  212. return readl(nfc->ecc_regs + reg);
  213. }
  214. static inline void ecc_write32(struct mt7621_nfc *nfc, u32 reg, u32 val)
  215. {
  216. return writel(val, nfc->ecc_regs + reg);
  217. }
  218. static inline u8 *oob_fdm_ptr(struct nand_chip *nand, int sect)
  219. {
  220. return nand->oob_poi + sect * NFI_FDM_SIZE;
  221. }
  222. static inline u8 *oob_ecc_ptr(struct mt7621_nfc *nfc, int sect)
  223. {
  224. struct nand_chip *nand = &nfc->nand;
  225. return nand->oob_poi + nand->ecc.steps * NFI_FDM_SIZE +
  226. sect * (nfc->spare_per_sector - NFI_FDM_SIZE);
  227. }
  228. static inline u8 *page_data_ptr(struct nand_chip *nand, const u8 *buf,
  229. int sect)
  230. {
  231. return (u8 *)buf + sect * nand->ecc.size;
  232. }
  233. static int mt7621_ecc_wait_idle(struct mt7621_nfc *nfc, u32 reg)
  234. {
  235. struct device *dev = nfc->dev;
  236. u32 val;
  237. int ret;
  238. ret = readw_poll_timeout_atomic(nfc->ecc_regs + reg, val,
  239. val & ECC_IDLE, 10,
  240. ECC_ENGINE_TIMEOUT);
  241. if (ret) {
  242. dev_warn(dev, "ECC engine timed out entering idle mode\n");
  243. return -EIO;
  244. }
  245. return 0;
  246. }
  247. static int mt7621_ecc_decoder_wait_done(struct mt7621_nfc *nfc, u32 sect)
  248. {
  249. struct device *dev = nfc->dev;
  250. u32 val;
  251. int ret;
  252. ret = readw_poll_timeout_atomic(nfc->ecc_regs + ECC_DECDONE, val,
  253. val & (1 << sect), 10,
  254. ECC_ENGINE_TIMEOUT);
  255. if (ret) {
  256. dev_warn(dev, "ECC decoder for sector %d timed out\n",
  257. sect);
  258. return -ETIMEDOUT;
  259. }
  260. return 0;
  261. }
  262. static void mt7621_ecc_encoder_op(struct mt7621_nfc *nfc, bool enable)
  263. {
  264. mt7621_ecc_wait_idle(nfc, ECC_ENCIDLE);
  265. ecc_write16(nfc, ECC_ENCCON, enable ? ENC_EN : 0);
  266. }
  267. static void mt7621_ecc_decoder_op(struct mt7621_nfc *nfc, bool enable)
  268. {
  269. mt7621_ecc_wait_idle(nfc, ECC_DECIDLE);
  270. ecc_write16(nfc, ECC_DECCON, enable ? DEC_EN : 0);
  271. }
  272. static int mt7621_ecc_correct_check(struct mt7621_nfc *nfc, u8 *sector_buf,
  273. u8 *fdm_buf, u32 sect)
  274. {
  275. struct nand_chip *nand = &nfc->nand;
  276. u32 decnum, num_error_bits, fdm_end_bits;
  277. u32 error_locations, error_bit_loc;
  278. u32 error_byte_pos, error_bit_pos;
  279. int bitflips = 0;
  280. u32 i;
  281. decnum = ecc_read32(nfc, ECC_DECENUM);
  282. num_error_bits = (decnum >> (sect << ERRNUM_S)) & ERRNUM_M;
  283. fdm_end_bits = (nand->ecc.size + NFI_FDM_SIZE) << 3;
  284. if (!num_error_bits)
  285. return 0;
  286. if (num_error_bits == ERRNUM_M)
  287. return -1;
  288. for (i = 0; i < num_error_bits; i++) {
  289. error_locations = ecc_read32(nfc, ECC_DECEL(i / 2));
  290. error_bit_loc = (error_locations >> ((i % 2) * DEC_EL_ODD_S)) &
  291. DEC_EL_M;
  292. error_byte_pos = error_bit_loc >> DEC_EL_BYTE_POS_S;
  293. error_bit_pos = error_bit_loc & DEC_EL_BIT_POS_M;
  294. if (error_bit_loc < (nand->ecc.size << 3)) {
  295. if (sector_buf) {
  296. sector_buf[error_byte_pos] ^=
  297. (1 << error_bit_pos);
  298. }
  299. } else if (error_bit_loc < fdm_end_bits) {
  300. if (fdm_buf) {
  301. fdm_buf[error_byte_pos - nand->ecc.size] ^=
  302. (1 << error_bit_pos);
  303. }
  304. }
  305. bitflips++;
  306. }
  307. return bitflips;
  308. }
  309. static int mt7621_nfc_wait_write_completion(struct mt7621_nfc *nfc,
  310. struct nand_chip *nand)
  311. {
  312. struct device *dev = nfc->dev;
  313. u16 val;
  314. int ret;
  315. ret = readw_poll_timeout_atomic(nfc->nfi_regs + NFI_ADDRCNTR, val,
  316. ((val & SEC_CNTR_M) >> SEC_CNTR_S) >= nand->ecc.steps, 10,
  317. NFI_CORE_TIMEOUT);
  318. if (ret) {
  319. dev_warn(dev, "NFI core write operation timed out\n");
  320. return -ETIMEDOUT;
  321. }
  322. return ret;
  323. }
  324. static void mt7621_nfc_hw_reset(struct mt7621_nfc *nfc)
  325. {
  326. u32 val;
  327. int ret;
  328. /* reset all registers and force the NFI master to terminate */
  329. nfi_write16(nfc, NFI_CON, CON_FIFO_FLUSH | CON_NFI_RST);
  330. /* wait for the master to finish the last transaction */
  331. ret = readw_poll_timeout(nfc->nfi_regs + NFI_MASTER_STA, val,
  332. !(val & MASTER_STA_MASK), 50,
  333. NFI_RESET_TIMEOUT);
  334. if (ret) {
  335. dev_warn(nfc->dev, "Failed to reset NFI master in %dms\n",
  336. NFI_RESET_TIMEOUT);
  337. }
  338. /* ensure any status register affected by the NFI master is reset */
  339. nfi_write16(nfc, NFI_CON, CON_FIFO_FLUSH | CON_NFI_RST);
  340. nfi_write16(nfc, NFI_STRDATA, 0);
  341. }
  342. static inline void mt7621_nfc_hw_init(struct mt7621_nfc *nfc)
  343. {
  344. u32 acccon;
  345. /*
  346. * CNRNB: nand ready/busy register
  347. * -------------------------------
  348. * 7:4: timeout register for polling the NAND busy/ready signal
  349. * 0 : poll the status of the busy/ready signal after [7:4]*16 cycles.
  350. */
  351. nfi_write16(nfc, NFI_CNRNB, CB2R_TIME_M | STR_CNRNB);
  352. mt7621_nfc_hw_reset(nfc);
  353. /* Apply default access timing */
  354. acccon = ACCTIMING(ACCCON_POECS_DEF, ACCCON_PRECS_DEF, ACCCON_C2R_DEF,
  355. ACCCON_W2R_DEF, ACCCON_WH_DEF, ACCCON_WST_DEF,
  356. ACCCON_RLT_DEF);
  357. nfi_write32(nfc, NFI_ACCCON, acccon);
  358. }
  359. static int mt7621_nfc_send_command(struct mt7621_nfc *nfc, u8 command)
  360. {
  361. struct device *dev = nfc->dev;
  362. u32 val;
  363. int ret;
  364. nfi_write32(nfc, NFI_CMD, command);
  365. ret = readl_poll_timeout_atomic(nfc->nfi_regs + NFI_STA, val,
  366. !(val & STA_CMD), 10,
  367. NFI_CORE_TIMEOUT);
  368. if (ret) {
  369. dev_warn(dev, "NFI core timed out entering command mode\n");
  370. return -EIO;
  371. }
  372. return 0;
  373. }
  374. static int mt7621_nfc_send_address_byte(struct mt7621_nfc *nfc, int addr)
  375. {
  376. struct device *dev = nfc->dev;
  377. u32 val;
  378. int ret;
  379. nfi_write32(nfc, NFI_COLADDR, addr);
  380. nfi_write32(nfc, NFI_ROWADDR, 0);
  381. nfi_write16(nfc, NFI_ADDRNOB, 1);
  382. ret = readl_poll_timeout_atomic(nfc->nfi_regs + NFI_STA, val,
  383. !(val & STA_ADDR), 10,
  384. NFI_CORE_TIMEOUT);
  385. if (ret) {
  386. dev_warn(dev, "NFI core timed out entering address mode\n");
  387. return -EIO;
  388. }
  389. return 0;
  390. }
  391. static int mt7621_nfc_send_address(struct mt7621_nfc *nfc, const u8 *addr,
  392. unsigned int naddrs)
  393. {
  394. int ret;
  395. while (naddrs) {
  396. ret = mt7621_nfc_send_address_byte(nfc, *addr);
  397. if (ret)
  398. return ret;
  399. addr++;
  400. naddrs--;
  401. }
  402. return 0;
  403. }
  404. static void mt7621_nfc_wait_pio_ready(struct mt7621_nfc *nfc)
  405. {
  406. struct device *dev = nfc->dev;
  407. int ret;
  408. u16 val;
  409. ret = readw_poll_timeout_atomic(nfc->nfi_regs + NFI_PIO_DIRDY, val,
  410. val & PIO_DIRDY, 10,
  411. NFI_CORE_TIMEOUT);
  412. if (ret < 0)
  413. dev_err(dev, "NFI core PIO mode not ready\n");
  414. }
  415. static u32 mt7621_nfc_pio_read(struct mt7621_nfc *nfc, bool br)
  416. {
  417. u32 reg;
  418. /* after each byte read, the NFI_STA reg is reset by the hardware */
  419. reg = (nfi_read32(nfc, NFI_STA) & STA_NFI_FSM_M) >> STA_NFI_FSM_S;
  420. if (reg != STA_FSM_CUSTOM_DATA) {
  421. reg = nfi_read16(nfc, NFI_CNFG);
  422. reg |= CNFG_READ_MODE | CNFG_BYTE_RW;
  423. if (!br)
  424. reg &= ~CNFG_BYTE_RW;
  425. nfi_write16(nfc, NFI_CNFG, reg);
  426. /*
  427. * set to max sector to allow the HW to continue reading over
  428. * unaligned accesses
  429. */
  430. nfi_write16(nfc, NFI_CON, CON_NFI_SEC_M | CON_NFI_BRD);
  431. /* trigger to fetch data */
  432. nfi_write16(nfc, NFI_STRDATA, STR_DATA);
  433. }
  434. mt7621_nfc_wait_pio_ready(nfc);
  435. return nfi_read32(nfc, NFI_DATAR);
  436. }
  437. static void mt7621_nfc_read_data(struct mt7621_nfc *nfc, u8 *buf, u32 len)
  438. {
  439. while (((uintptr_t)buf & 3) && len) {
  440. *buf = mt7621_nfc_pio_read(nfc, true);
  441. buf++;
  442. len--;
  443. }
  444. while (len >= 4) {
  445. *(u32 *)buf = mt7621_nfc_pio_read(nfc, false);
  446. buf += 4;
  447. len -= 4;
  448. }
  449. while (len) {
  450. *buf = mt7621_nfc_pio_read(nfc, true);
  451. buf++;
  452. len--;
  453. }
  454. }
  455. static void mt7621_nfc_read_data_discard(struct mt7621_nfc *nfc, u32 len)
  456. {
  457. while (len >= 4) {
  458. mt7621_nfc_pio_read(nfc, false);
  459. len -= 4;
  460. }
  461. while (len) {
  462. mt7621_nfc_pio_read(nfc, true);
  463. len--;
  464. }
  465. }
  466. static void mt7621_nfc_pio_write(struct mt7621_nfc *nfc, u32 val, bool bw)
  467. {
  468. u32 reg;
  469. reg = (nfi_read32(nfc, NFI_STA) & STA_NFI_FSM_M) >> STA_NFI_FSM_S;
  470. if (reg != STA_FSM_CUSTOM_DATA) {
  471. reg = nfi_read16(nfc, NFI_CNFG);
  472. reg &= ~(CNFG_READ_MODE | CNFG_BYTE_RW);
  473. if (bw)
  474. reg |= CNFG_BYTE_RW;
  475. nfi_write16(nfc, NFI_CNFG, reg);
  476. nfi_write16(nfc, NFI_CON, CON_NFI_SEC_M | CON_NFI_BWR);
  477. nfi_write16(nfc, NFI_STRDATA, STR_DATA);
  478. }
  479. mt7621_nfc_wait_pio_ready(nfc);
  480. nfi_write32(nfc, NFI_DATAW, val);
  481. }
  482. static void mt7621_nfc_write_data(struct mt7621_nfc *nfc, const u8 *buf,
  483. u32 len)
  484. {
  485. while (((uintptr_t)buf & 3) && len) {
  486. mt7621_nfc_pio_write(nfc, *buf, true);
  487. buf++;
  488. len--;
  489. }
  490. while (len >= 4) {
  491. mt7621_nfc_pio_write(nfc, *(const u32 *)buf, false);
  492. buf += 4;
  493. len -= 4;
  494. }
  495. while (len) {
  496. mt7621_nfc_pio_write(nfc, *buf, true);
  497. buf++;
  498. len--;
  499. }
  500. }
  501. static void mt7621_nfc_write_data_empty(struct mt7621_nfc *nfc, u32 len)
  502. {
  503. while (len >= 4) {
  504. mt7621_nfc_pio_write(nfc, 0xffffffff, false);
  505. len -= 4;
  506. }
  507. while (len) {
  508. mt7621_nfc_pio_write(nfc, 0xff, true);
  509. len--;
  510. }
  511. }
  512. static int mt7621_nfc_dev_ready(struct mt7621_nfc *nfc,
  513. unsigned int timeout_ms)
  514. {
  515. u32 val;
  516. return readl_poll_timeout_atomic(nfc->nfi_regs + NFI_STA, val,
  517. !(val & STA_BUSY), 10,
  518. timeout_ms * 1000);
  519. }
  520. static int mt7621_nfc_exec_instr(struct nand_chip *nand,
  521. const struct nand_op_instr *instr)
  522. {
  523. struct mt7621_nfc *nfc = nand_get_controller_data(nand);
  524. switch (instr->type) {
  525. case NAND_OP_CMD_INSTR:
  526. mt7621_nfc_hw_reset(nfc);
  527. nfi_write16(nfc, NFI_CNFG, CNFG_OP_CUSTOM << CNFG_OP_MODE_S);
  528. return mt7621_nfc_send_command(nfc, instr->ctx.cmd.opcode);
  529. case NAND_OP_ADDR_INSTR:
  530. return mt7621_nfc_send_address(nfc, instr->ctx.addr.addrs,
  531. instr->ctx.addr.naddrs);
  532. case NAND_OP_DATA_IN_INSTR:
  533. mt7621_nfc_read_data(nfc, instr->ctx.data.buf.in,
  534. instr->ctx.data.len);
  535. return 0;
  536. case NAND_OP_DATA_OUT_INSTR:
  537. mt7621_nfc_write_data(nfc, instr->ctx.data.buf.out,
  538. instr->ctx.data.len);
  539. return 0;
  540. case NAND_OP_WAITRDY_INSTR:
  541. return mt7621_nfc_dev_ready(nfc,
  542. instr->ctx.waitrdy.timeout_ms);
  543. default:
  544. WARN_ONCE(1, "unsupported NAND instruction type: %d\n",
  545. instr->type);
  546. return -EINVAL;
  547. }
  548. }
  549. static int mt7621_nfc_exec_op(struct nand_chip *nand,
  550. const struct nand_operation *op, bool check_only)
  551. {
  552. struct mt7621_nfc *nfc = nand_get_controller_data(nand);
  553. int i, ret;
  554. if (check_only)
  555. return 0;
  556. /* Only CS0 available */
  557. nfi_write16(nfc, NFI_CSEL, 0);
  558. for (i = 0; i < op->ninstrs; i++) {
  559. ret = mt7621_nfc_exec_instr(nand, &op->instrs[i]);
  560. if (ret)
  561. return ret;
  562. }
  563. return 0;
  564. }
  565. static int mt7621_nfc_setup_interface(struct nand_chip *nand, int csline,
  566. const struct nand_interface_config *conf)
  567. {
  568. struct mt7621_nfc *nfc = nand_get_controller_data(nand);
  569. const struct nand_sdr_timings *timings;
  570. u32 acccon, temp, rate, tpoecs, tprecs, tc2r, tw2r, twh, twst, trlt;
  571. if (!nfc->nfi_clk)
  572. return -ENOTSUPP;
  573. timings = nand_get_sdr_timings(conf);
  574. if (IS_ERR(timings))
  575. return -ENOTSUPP;
  576. rate = clk_get_rate(nfc->nfi_clk);
  577. /* turn clock rate into KHZ */
  578. rate /= 1000;
  579. tpoecs = max(timings->tALH_min, timings->tCLH_min) / 1000;
  580. tpoecs = DIV_ROUND_UP(tpoecs * rate, 1000000);
  581. tpoecs = min_t(u32, tpoecs, ACCCON_POECS_MAX);
  582. tprecs = max(timings->tCLS_min, timings->tALS_min) / 1000;
  583. tprecs = DIV_ROUND_UP(tprecs * rate, 1000000);
  584. tprecs = min_t(u32, tprecs, ACCCON_PRECS_MAX);
  585. /* sdr interface has no tCR which means CE# low to RE# low */
  586. tc2r = 0;
  587. tw2r = timings->tWHR_min / 1000;
  588. tw2r = DIV_ROUND_UP(tw2r * rate, 1000000);
  589. tw2r = DIV_ROUND_UP(tw2r - 1, 2);
  590. tw2r = min_t(u32, tw2r, ACCCON_W2R_MAX);
  591. twh = max(timings->tREH_min, timings->tWH_min) / 1000;
  592. twh = DIV_ROUND_UP(twh * rate, 1000000) - 1;
  593. twh = min_t(u32, twh, ACCCON_WH_MAX);
  594. /* Calculate real WE#/RE# hold time in nanosecond */
  595. temp = (twh + 1) * 1000000 / rate;
  596. /* nanosecond to picosecond */
  597. temp *= 1000;
  598. /*
  599. * WE# low level time should be expaned to meet WE# pulse time
  600. * and WE# cycle time at the same time.
  601. */
  602. if (temp < timings->tWC_min)
  603. twst = timings->tWC_min - temp;
  604. else
  605. twst = 0;
  606. twst = max(timings->tWP_min, twst) / 1000;
  607. twst = DIV_ROUND_UP(twst * rate, 1000000) - 1;
  608. twst = min_t(u32, twst, ACCCON_WST_MAX);
  609. /*
  610. * RE# low level time should be expaned to meet RE# pulse time
  611. * and RE# cycle time at the same time.
  612. */
  613. if (temp < timings->tRC_min)
  614. trlt = timings->tRC_min - temp;
  615. else
  616. trlt = 0;
  617. trlt = max(trlt, timings->tRP_min) / 1000;
  618. trlt = DIV_ROUND_UP(trlt * rate, 1000000) - 1;
  619. trlt = min_t(u32, trlt, ACCCON_RLT_MAX);
  620. if (csline == NAND_DATA_IFACE_CHECK_ONLY) {
  621. if (twst < ACCCON_WST_MIN || trlt < ACCCON_RLT_MIN)
  622. return -ENOTSUPP;
  623. }
  624. acccon = ACCTIMING(tpoecs, tprecs, tc2r, tw2r, twh, twst, trlt);
  625. dev_dbg(nfc->dev, "Using programmed access timing: %08x\n", acccon);
  626. nfi_write32(nfc, NFI_ACCCON, acccon);
  627. return 0;
  628. }
  629. static int mt7621_nfc_calc_ecc_strength(struct mt7621_nfc *nfc,
  630. u32 avail_ecc_bytes)
  631. {
  632. struct nand_chip *nand = &nfc->nand;
  633. struct mtd_info *mtd = nand_to_mtd(nand);
  634. u32 strength;
  635. int i;
  636. strength = avail_ecc_bytes * 8 / ECC_PARITY_BITS;
  637. /* Find the closest supported ecc strength */
  638. for (i = ARRAY_SIZE(mt7621_ecc_strength) - 1; i >= 0; i--) {
  639. if (mt7621_ecc_strength[i] <= strength)
  640. break;
  641. }
  642. if (unlikely(i < 0)) {
  643. dev_err(nfc->dev, "OOB size (%u) is not supported\n",
  644. mtd->oobsize);
  645. return -EINVAL;
  646. }
  647. nand->ecc.strength = mt7621_ecc_strength[i];
  648. nand->ecc.bytes =
  649. DIV_ROUND_UP(nand->ecc.strength * ECC_PARITY_BITS, 8);
  650. dev_info(nfc->dev, "ECC strength adjusted to %u bits\n",
  651. nand->ecc.strength);
  652. return i;
  653. }
  654. static int mt7621_nfc_set_spare_per_sector(struct mt7621_nfc *nfc)
  655. {
  656. struct nand_chip *nand = &nfc->nand;
  657. struct mtd_info *mtd = nand_to_mtd(nand);
  658. u32 size;
  659. int i;
  660. size = nand->ecc.bytes + NFI_FDM_SIZE;
  661. /* Find the closest supported spare size */
  662. for (i = 0; i < ARRAY_SIZE(mt7621_nfi_spare_size); i++) {
  663. if (mt7621_nfi_spare_size[i] >= size)
  664. break;
  665. }
  666. if (unlikely(i >= ARRAY_SIZE(mt7621_nfi_spare_size))) {
  667. dev_err(nfc->dev, "OOB size (%u) is not supported\n",
  668. mtd->oobsize);
  669. return -EINVAL;
  670. }
  671. nfc->spare_per_sector = mt7621_nfi_spare_size[i];
  672. return i;
  673. }
  674. static int mt7621_nfc_ecc_init(struct mt7621_nfc *nfc)
  675. {
  676. struct nand_chip *nand = &nfc->nand;
  677. struct mtd_info *mtd = nand_to_mtd(nand);
  678. u32 spare_per_sector, encode_block_size, decode_block_size;
  679. u32 ecc_enccfg, ecc_deccfg;
  680. int ecc_cap;
  681. /* Only hardware ECC mode is supported */
  682. if (nand->ecc.engine_type != NAND_ECC_ENGINE_TYPE_ON_HOST) {
  683. dev_err(nfc->dev, "Only hardware ECC mode is supported\n");
  684. return -EINVAL;
  685. }
  686. nand->ecc.size = ECC_SECTOR_SIZE;
  687. nand->ecc.steps = mtd->writesize / nand->ecc.size;
  688. spare_per_sector = mtd->oobsize / nand->ecc.steps;
  689. ecc_cap = mt7621_nfc_calc_ecc_strength(nfc,
  690. spare_per_sector - NFI_FDM_SIZE);
  691. if (ecc_cap < 0)
  692. return ecc_cap;
  693. /* Sector + FDM */
  694. encode_block_size = (nand->ecc.size + NFI_FDM_SIZE) * 8;
  695. ecc_enccfg = ecc_cap | (ENC_MODE_NFI << ENC_MODE_S) |
  696. (encode_block_size << ENC_CNFG_MSG_S);
  697. /* Sector + FDM + ECC parity bits */
  698. decode_block_size = ((nand->ecc.size + NFI_FDM_SIZE) * 8) +
  699. nand->ecc.strength * ECC_PARITY_BITS;
  700. ecc_deccfg = ecc_cap | (DEC_MODE_NFI << DEC_MODE_S) |
  701. (decode_block_size << DEC_CS_S) |
  702. (DEC_CON_EL << DEC_CON_S) | DEC_EMPTY_EN;
  703. ecc_write32(nfc, ECC_FDMADDR, nfc->nfi_base + NFI_FDML(0));
  704. mt7621_ecc_encoder_op(nfc, false);
  705. ecc_write32(nfc, ECC_ENCCNFG, ecc_enccfg);
  706. mt7621_ecc_decoder_op(nfc, false);
  707. ecc_write32(nfc, ECC_DECCNFG, ecc_deccfg);
  708. return 0;
  709. }
  710. static int mt7621_nfc_set_page_format(struct mt7621_nfc *nfc)
  711. {
  712. struct nand_chip *nand = &nfc->nand;
  713. struct mtd_info *mtd = nand_to_mtd(nand);
  714. int i, spare_size;
  715. u32 pagefmt;
  716. spare_size = mt7621_nfc_set_spare_per_sector(nfc);
  717. if (spare_size < 0)
  718. return spare_size;
  719. for (i = 0; i < ARRAY_SIZE(mt7621_nfi_page_size); i++) {
  720. if (mt7621_nfi_page_size[i] == mtd->writesize)
  721. break;
  722. }
  723. if (unlikely(i >= ARRAY_SIZE(mt7621_nfi_page_size))) {
  724. dev_err(nfc->dev, "Page size (%u) is not supported\n",
  725. mtd->writesize);
  726. return -EINVAL;
  727. }
  728. pagefmt = i | (spare_size << PAGEFMT_SPARE_S) |
  729. (NFI_FDM_SIZE << PAGEFMT_FDM_S) |
  730. (NFI_FDM_SIZE << PAGEFMT_FDM_ECC_S);
  731. nfi_write16(nfc, NFI_PAGEFMT, pagefmt);
  732. return 0;
  733. }
  734. static int mt7621_nfc_attach_chip(struct nand_chip *nand)
  735. {
  736. struct mt7621_nfc *nfc = nand_get_controller_data(nand);
  737. int ret;
  738. if (nand->options & NAND_BUSWIDTH_16) {
  739. dev_err(nfc->dev, "16-bit buswidth is not supported");
  740. return -EINVAL;
  741. }
  742. ret = mt7621_nfc_ecc_init(nfc);
  743. if (ret)
  744. return ret;
  745. return mt7621_nfc_set_page_format(nfc);
  746. }
  747. static const struct nand_controller_ops mt7621_nfc_controller_ops = {
  748. .attach_chip = mt7621_nfc_attach_chip,
  749. .exec_op = mt7621_nfc_exec_op,
  750. .setup_interface = mt7621_nfc_setup_interface,
  751. };
  752. static int mt7621_nfc_ooblayout_free(struct mtd_info *mtd, int section,
  753. struct mtd_oob_region *oob_region)
  754. {
  755. struct nand_chip *nand = mtd_to_nand(mtd);
  756. if (section >= nand->ecc.steps)
  757. return -ERANGE;
  758. oob_region->length = NFI_FDM_SIZE - 1;
  759. oob_region->offset = section * NFI_FDM_SIZE + 1;
  760. return 0;
  761. }
  762. static int mt7621_nfc_ooblayout_ecc(struct mtd_info *mtd, int section,
  763. struct mtd_oob_region *oob_region)
  764. {
  765. struct nand_chip *nand = mtd_to_nand(mtd);
  766. if (section)
  767. return -ERANGE;
  768. oob_region->offset = NFI_FDM_SIZE * nand->ecc.steps;
  769. oob_region->length = mtd->oobsize - oob_region->offset;
  770. return 0;
  771. }
  772. static const struct mtd_ooblayout_ops mt7621_nfc_ooblayout_ops = {
  773. .free = mt7621_nfc_ooblayout_free,
  774. .ecc = mt7621_nfc_ooblayout_ecc,
  775. };
  776. static void mt7621_nfc_write_fdm(struct mt7621_nfc *nfc)
  777. {
  778. struct nand_chip *nand = &nfc->nand;
  779. u32 vall, valm;
  780. u8 *oobptr;
  781. int i, j;
  782. for (i = 0; i < nand->ecc.steps; i++) {
  783. vall = 0;
  784. valm = 0;
  785. oobptr = oob_fdm_ptr(nand, i);
  786. for (j = 0; j < 4; j++)
  787. vall |= (u32)oobptr[j] << (j * 8);
  788. for (j = 0; j < 4; j++)
  789. valm |= (u32)oobptr[j + 4] << (j * 8);
  790. nfi_write32(nfc, NFI_FDML(i), vall);
  791. nfi_write32(nfc, NFI_FDMM(i), valm);
  792. }
  793. }
  794. static void mt7621_nfc_read_sector_fdm(struct mt7621_nfc *nfc, u32 sect)
  795. {
  796. struct nand_chip *nand = &nfc->nand;
  797. u32 vall, valm;
  798. u8 *oobptr;
  799. int i;
  800. vall = nfi_read32(nfc, NFI_FDML(sect));
  801. valm = nfi_read32(nfc, NFI_FDMM(sect));
  802. oobptr = oob_fdm_ptr(nand, sect);
  803. for (i = 0; i < 4; i++)
  804. oobptr[i] = (vall >> (i * 8)) & 0xff;
  805. for (i = 0; i < 4; i++)
  806. oobptr[i + 4] = (valm >> (i * 8)) & 0xff;
  807. }
  808. static int mt7621_nfc_read_page_hwecc(struct nand_chip *nand, uint8_t *buf,
  809. int oob_required, int page)
  810. {
  811. struct mt7621_nfc *nfc = nand_get_controller_data(nand);
  812. struct mtd_info *mtd = nand_to_mtd(nand);
  813. int bitflips = 0;
  814. int rc, i;
  815. nand_read_page_op(nand, page, 0, NULL, 0);
  816. nfi_write16(nfc, NFI_CNFG, (CNFG_OP_CUSTOM << CNFG_OP_MODE_S) |
  817. CNFG_READ_MODE | CNFG_AUTO_FMT_EN | CNFG_HW_ECC_EN);
  818. mt7621_ecc_decoder_op(nfc, true);
  819. nfi_write16(nfc, NFI_CON,
  820. CON_NFI_BRD | (nand->ecc.steps << CON_NFI_SEC_S));
  821. for (i = 0; i < nand->ecc.steps; i++) {
  822. if (buf)
  823. mt7621_nfc_read_data(nfc, page_data_ptr(nand, buf, i),
  824. nand->ecc.size);
  825. else
  826. mt7621_nfc_read_data_discard(nfc, nand->ecc.size);
  827. rc = mt7621_ecc_decoder_wait_done(nfc, i);
  828. mt7621_nfc_read_sector_fdm(nfc, i);
  829. if (rc < 0) {
  830. bitflips = -EIO;
  831. continue;
  832. }
  833. rc = mt7621_ecc_correct_check(nfc,
  834. buf ? page_data_ptr(nand, buf, i) : NULL,
  835. oob_fdm_ptr(nand, i), i);
  836. if (rc < 0) {
  837. dev_dbg(nfc->dev,
  838. "Uncorrectable ECC error at page %d.%d\n",
  839. page, i);
  840. bitflips = -EBADMSG;
  841. mtd->ecc_stats.failed++;
  842. } else if (bitflips >= 0) {
  843. bitflips += rc;
  844. mtd->ecc_stats.corrected += rc;
  845. }
  846. }
  847. mt7621_ecc_decoder_op(nfc, false);
  848. nfi_write16(nfc, NFI_CON, 0);
  849. return bitflips;
  850. }
  851. static int mt7621_nfc_read_page_raw(struct nand_chip *nand, uint8_t *buf,
  852. int oob_required, int page)
  853. {
  854. struct mt7621_nfc *nfc = nand_get_controller_data(nand);
  855. int i;
  856. nand_read_page_op(nand, page, 0, NULL, 0);
  857. nfi_write16(nfc, NFI_CNFG, (CNFG_OP_CUSTOM << CNFG_OP_MODE_S) |
  858. CNFG_READ_MODE);
  859. nfi_write16(nfc, NFI_CON,
  860. CON_NFI_BRD | (nand->ecc.steps << CON_NFI_SEC_S));
  861. for (i = 0; i < nand->ecc.steps; i++) {
  862. /* Read data */
  863. if (buf)
  864. mt7621_nfc_read_data(nfc, page_data_ptr(nand, buf, i),
  865. nand->ecc.size);
  866. else
  867. mt7621_nfc_read_data_discard(nfc, nand->ecc.size);
  868. /* Read FDM */
  869. mt7621_nfc_read_data(nfc, oob_fdm_ptr(nand, i), NFI_FDM_SIZE);
  870. /* Read ECC parity data */
  871. mt7621_nfc_read_data(nfc, oob_ecc_ptr(nfc, i),
  872. nfc->spare_per_sector - NFI_FDM_SIZE);
  873. }
  874. nfi_write16(nfc, NFI_CON, 0);
  875. return 0;
  876. }
  877. static int mt7621_nfc_read_oob_hwecc(struct nand_chip *nand, int page)
  878. {
  879. return mt7621_nfc_read_page_hwecc(nand, NULL, 1, page);
  880. }
  881. static int mt7621_nfc_read_oob_raw(struct nand_chip *nand, int page)
  882. {
  883. return mt7621_nfc_read_page_raw(nand, NULL, 1, page);
  884. }
  885. static int mt7621_nfc_check_empty_page(struct nand_chip *nand, const u8 *buf)
  886. {
  887. struct mtd_info *mtd = nand_to_mtd(nand);
  888. uint32_t i, j;
  889. u8 *oobptr;
  890. if (buf) {
  891. for (i = 0; i < mtd->writesize; i++)
  892. if (buf[i] != 0xff)
  893. return 0;
  894. }
  895. for (i = 0; i < nand->ecc.steps; i++) {
  896. oobptr = oob_fdm_ptr(nand, i);
  897. for (j = 0; j < NFI_FDM_SIZE; j++)
  898. if (oobptr[j] != 0xff)
  899. return 0;
  900. }
  901. return 1;
  902. }
  903. static int mt7621_nfc_write_page_hwecc(struct nand_chip *nand,
  904. const uint8_t *buf, int oob_required,
  905. int page)
  906. {
  907. struct mt7621_nfc *nfc = nand_get_controller_data(nand);
  908. struct mtd_info *mtd = nand_to_mtd(nand);
  909. if (mt7621_nfc_check_empty_page(nand, buf)) {
  910. /*
  911. * MT7621 ECC engine always generates parity code for input
  912. * pages, even for empty pages. Doing so will write back ECC
  913. * parity code to the oob region, which means such pages will
  914. * no longer be empty pages.
  915. *
  916. * To avoid this, stop write operation if current page is an
  917. * empty page.
  918. */
  919. return 0;
  920. }
  921. nand_prog_page_begin_op(nand, page, 0, NULL, 0);
  922. nfi_write16(nfc, NFI_CNFG, (CNFG_OP_CUSTOM << CNFG_OP_MODE_S) |
  923. CNFG_AUTO_FMT_EN | CNFG_HW_ECC_EN);
  924. mt7621_ecc_encoder_op(nfc, true);
  925. mt7621_nfc_write_fdm(nfc);
  926. nfi_write16(nfc, NFI_CON,
  927. CON_NFI_BWR | (nand->ecc.steps << CON_NFI_SEC_S));
  928. if (buf)
  929. mt7621_nfc_write_data(nfc, buf, mtd->writesize);
  930. else
  931. mt7621_nfc_write_data_empty(nfc, mtd->writesize);
  932. mt7621_nfc_wait_write_completion(nfc, nand);
  933. mt7621_ecc_encoder_op(nfc, false);
  934. nfi_write16(nfc, NFI_CON, 0);
  935. return nand_prog_page_end_op(nand);
  936. }
  937. static int mt7621_nfc_write_page_raw(struct nand_chip *nand,
  938. const uint8_t *buf, int oob_required,
  939. int page)
  940. {
  941. struct mt7621_nfc *nfc = nand_get_controller_data(nand);
  942. int i;
  943. nand_prog_page_begin_op(nand, page, 0, NULL, 0);
  944. nfi_write16(nfc, NFI_CNFG, (CNFG_OP_CUSTOM << CNFG_OP_MODE_S));
  945. nfi_write16(nfc, NFI_CON,
  946. CON_NFI_BWR | (nand->ecc.steps << CON_NFI_SEC_S));
  947. for (i = 0; i < nand->ecc.steps; i++) {
  948. /* Write data */
  949. if (buf)
  950. mt7621_nfc_write_data(nfc, page_data_ptr(nand, buf, i),
  951. nand->ecc.size);
  952. else
  953. mt7621_nfc_write_data_empty(nfc, nand->ecc.size);
  954. /* Write FDM */
  955. mt7621_nfc_write_data(nfc, oob_fdm_ptr(nand, i),
  956. NFI_FDM_SIZE);
  957. /* Write dummy ECC parity data */
  958. mt7621_nfc_write_data_empty(nfc, nfc->spare_per_sector -
  959. NFI_FDM_SIZE);
  960. }
  961. mt7621_nfc_wait_write_completion(nfc, nand);
  962. nfi_write16(nfc, NFI_CON, 0);
  963. return nand_prog_page_end_op(nand);
  964. }
  965. static int mt7621_nfc_write_oob_hwecc(struct nand_chip *nand, int page)
  966. {
  967. return mt7621_nfc_write_page_hwecc(nand, NULL, 1, page);
  968. }
  969. static int mt7621_nfc_write_oob_raw(struct nand_chip *nand, int page)
  970. {
  971. return mt7621_nfc_write_page_raw(nand, NULL, 1, page);
  972. }
  973. static int mt7621_nfc_init_chip(struct mt7621_nfc *nfc)
  974. {
  975. struct nand_chip *nand = &nfc->nand;
  976. struct mtd_info *mtd;
  977. int ret;
  978. nand->controller = &nfc->controller;
  979. nand_set_controller_data(nand, (void *)nfc);
  980. nand_set_flash_node(nand, nfc->dev->of_node);
  981. nand->options |= NAND_USES_DMA | NAND_NO_SUBPAGE_WRITE | NAND_SKIP_BBTSCAN;
  982. if (!nfc->nfi_clk)
  983. nand->options |= NAND_KEEP_TIMINGS;
  984. nand->ecc.engine_type = NAND_ECC_ENGINE_TYPE_ON_HOST;
  985. nand->ecc.read_page = mt7621_nfc_read_page_hwecc;
  986. nand->ecc.read_page_raw = mt7621_nfc_read_page_raw;
  987. nand->ecc.write_page = mt7621_nfc_write_page_hwecc;
  988. nand->ecc.write_page_raw = mt7621_nfc_write_page_raw;
  989. nand->ecc.read_oob = mt7621_nfc_read_oob_hwecc;
  990. nand->ecc.read_oob_raw = mt7621_nfc_read_oob_raw;
  991. nand->ecc.write_oob = mt7621_nfc_write_oob_hwecc;
  992. nand->ecc.write_oob_raw = mt7621_nfc_write_oob_raw;
  993. mtd = nand_to_mtd(nand);
  994. mtd->owner = THIS_MODULE;
  995. mtd->dev.parent = nfc->dev;
  996. mtd->name = MT7621_NFC_NAME;
  997. mtd_set_ooblayout(mtd, &mt7621_nfc_ooblayout_ops);
  998. mt7621_nfc_hw_init(nfc);
  999. ret = nand_scan(nand, 1);
  1000. if (ret)
  1001. return ret;
  1002. mtk_bmt_attach(mtd);
  1003. ret = mtd_device_register(mtd, NULL, 0);
  1004. if (ret) {
  1005. dev_err(nfc->dev, "Failed to register MTD: %d\n", ret);
  1006. mtk_bmt_detach(mtd);
  1007. nand_cleanup(nand);
  1008. return ret;
  1009. }
  1010. return 0;
  1011. }
  1012. static int mt7621_nfc_probe(struct platform_device *pdev)
  1013. {
  1014. struct device *dev = &pdev->dev;
  1015. struct mt7621_nfc *nfc;
  1016. struct resource *res;
  1017. int ret;
  1018. nfc = devm_kzalloc(dev, sizeof(*nfc), GFP_KERNEL);
  1019. if (!nfc)
  1020. return -ENOMEM;
  1021. nand_controller_init(&nfc->controller);
  1022. nfc->controller.ops = &mt7621_nfc_controller_ops;
  1023. nfc->dev = dev;
  1024. res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "nfi");
  1025. nfc->nfi_base = res->start;
  1026. nfc->nfi_regs = devm_ioremap_resource(dev, res);
  1027. if (IS_ERR(nfc->nfi_regs)) {
  1028. ret = PTR_ERR(nfc->nfi_regs);
  1029. return ret;
  1030. }
  1031. res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "ecc");
  1032. nfc->ecc_regs = devm_ioremap_resource(dev, res);
  1033. if (IS_ERR(nfc->ecc_regs)) {
  1034. ret = PTR_ERR(nfc->ecc_regs);
  1035. return ret;
  1036. }
  1037. nfc->nfi_clk = devm_clk_get(dev, "nfi_clk");
  1038. if (IS_ERR(nfc->nfi_clk)) {
  1039. dev_warn(dev, "nfi clk not provided\n");
  1040. nfc->nfi_clk = NULL;
  1041. } else {
  1042. ret = clk_prepare_enable(nfc->nfi_clk);
  1043. if (ret) {
  1044. dev_err(dev, "Failed to enable nfi core clock\n");
  1045. return ret;
  1046. }
  1047. }
  1048. platform_set_drvdata(pdev, nfc);
  1049. ret = mt7621_nfc_init_chip(nfc);
  1050. if (ret) {
  1051. dev_err(dev, "Failed to initialize nand chip\n");
  1052. goto clk_disable;
  1053. }
  1054. return 0;
  1055. clk_disable:
  1056. clk_disable_unprepare(nfc->nfi_clk);
  1057. return ret;
  1058. }
  1059. static int mt7621_nfc_remove(struct platform_device *pdev)
  1060. {
  1061. struct mt7621_nfc *nfc = platform_get_drvdata(pdev);
  1062. struct nand_chip *nand = &nfc->nand;
  1063. struct mtd_info *mtd = nand_to_mtd(nand);
  1064. mtk_bmt_detach(mtd);
  1065. mtd_device_unregister(mtd);
  1066. nand_cleanup(nand);
  1067. clk_disable_unprepare(nfc->nfi_clk);
  1068. return 0;
  1069. }
  1070. static const struct of_device_id mt7621_nfc_id_table[] = {
  1071. { .compatible = "mediatek,mt7621-nfc" },
  1072. { },
  1073. };
  1074. MODULE_DEVICE_TABLE(of, match);
  1075. static struct platform_driver mt7621_nfc_driver = {
  1076. .probe = mt7621_nfc_probe,
  1077. .remove = mt7621_nfc_remove,
  1078. .driver = {
  1079. .name = MT7621_NFC_NAME,
  1080. .owner = THIS_MODULE,
  1081. .of_match_table = mt7621_nfc_id_table,
  1082. },
  1083. };
  1084. module_platform_driver(mt7621_nfc_driver);
  1085. MODULE_LICENSE("GPL");
  1086. MODULE_AUTHOR("Weijie Gao <[email protected]>");
  1087. MODULE_DESCRIPTION("MediaTek MT7621 NAND Flash Controller driver");