| xj | b04a402 | 2021-11-25 15:01:52 +0800 | [diff] [blame^] | 1 | /* | 
|  | 2 | * | 
|  | 3 | * Copyright (C) STMicroelectronics SA 2017 | 
|  | 4 | * Author(s): M'boumba Cedric Madianga <cedric.madianga@gmail.com> | 
|  | 5 | *            Pierre-Yves Mordret <pierre-yves.mordret@st.com> | 
|  | 6 | * | 
|  | 7 | * License terms: GPL V2.0. | 
|  | 8 | * | 
|  | 9 | * This program is free software; you can redistribute it and/or modify it | 
|  | 10 | * under the terms of the GNU General Public License version 2 as published by | 
|  | 11 | * the Free Software Foundation. | 
|  | 12 | * | 
|  | 13 | * This program is distributed in the hope that it will be useful, but | 
|  | 14 | * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | 
|  | 15 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more | 
|  | 16 | * details. | 
|  | 17 | * | 
|  | 18 | * Driver for STM32 MDMA controller | 
|  | 19 | * | 
|  | 20 | * Inspired by stm32-dma.c and dma-jz4780.c | 
|  | 21 | * | 
|  | 22 | */ | 
|  | 23 |  | 
|  | 24 | #include <linux/clk.h> | 
|  | 25 | #include <linux/delay.h> | 
|  | 26 | #include <linux/dmaengine.h> | 
|  | 27 | #include <linux/dma-mapping.h> | 
|  | 28 | #include <linux/dmapool.h> | 
|  | 29 | #include <linux/err.h> | 
|  | 30 | #include <linux/init.h> | 
|  | 31 | #include <linux/iopoll.h> | 
|  | 32 | #include <linux/jiffies.h> | 
|  | 33 | #include <linux/list.h> | 
|  | 34 | #include <linux/log2.h> | 
|  | 35 | #include <linux/module.h> | 
|  | 36 | #include <linux/of.h> | 
|  | 37 | #include <linux/of_device.h> | 
|  | 38 | #include <linux/of_dma.h> | 
|  | 39 | #include <linux/platform_device.h> | 
|  | 40 | #include <linux/reset.h> | 
|  | 41 | #include <linux/slab.h> | 
|  | 42 |  | 
|  | 43 | #include "virt-dma.h" | 
|  | 44 |  | 
|  | 45 | /*  MDMA Generic getter/setter */ | 
|  | 46 | #define STM32_MDMA_SHIFT(n)		(ffs(n) - 1) | 
|  | 47 | #define STM32_MDMA_SET(n, mask)		(((n) << STM32_MDMA_SHIFT(mask)) & \ | 
|  | 48 | (mask)) | 
|  | 49 | #define STM32_MDMA_GET(n, mask)		(((n) & (mask)) >> \ | 
|  | 50 | STM32_MDMA_SHIFT(mask)) | 
|  | 51 |  | 
|  | 52 | #define STM32_MDMA_GISR0		0x0000 /* MDMA Int Status Reg 1 */ | 
|  | 53 | #define STM32_MDMA_GISR1		0x0004 /* MDMA Int Status Reg 2 */ | 
|  | 54 |  | 
|  | 55 | /* MDMA Channel x interrupt/status register */ | 
|  | 56 | #define STM32_MDMA_CISR(x)		(0x40 + 0x40 * (x)) /* x = 0..62 */ | 
|  | 57 | #define STM32_MDMA_CISR_CRQA		BIT(16) | 
|  | 58 | #define STM32_MDMA_CISR_TCIF		BIT(4) | 
|  | 59 | #define STM32_MDMA_CISR_BTIF		BIT(3) | 
|  | 60 | #define STM32_MDMA_CISR_BRTIF		BIT(2) | 
|  | 61 | #define STM32_MDMA_CISR_CTCIF		BIT(1) | 
|  | 62 | #define STM32_MDMA_CISR_TEIF		BIT(0) | 
|  | 63 |  | 
|  | 64 | /* MDMA Channel x interrupt flag clear register */ | 
|  | 65 | #define STM32_MDMA_CIFCR(x)		(0x44 + 0x40 * (x)) | 
|  | 66 | #define STM32_MDMA_CIFCR_CLTCIF		BIT(4) | 
|  | 67 | #define STM32_MDMA_CIFCR_CBTIF		BIT(3) | 
|  | 68 | #define STM32_MDMA_CIFCR_CBRTIF		BIT(2) | 
|  | 69 | #define STM32_MDMA_CIFCR_CCTCIF		BIT(1) | 
|  | 70 | #define STM32_MDMA_CIFCR_CTEIF		BIT(0) | 
|  | 71 | #define STM32_MDMA_CIFCR_CLEAR_ALL	(STM32_MDMA_CIFCR_CLTCIF \ | 
|  | 72 | | STM32_MDMA_CIFCR_CBTIF \ | 
|  | 73 | | STM32_MDMA_CIFCR_CBRTIF \ | 
|  | 74 | | STM32_MDMA_CIFCR_CCTCIF \ | 
|  | 75 | | STM32_MDMA_CIFCR_CTEIF) | 
|  | 76 |  | 
|  | 77 | /* MDMA Channel x error status register */ | 
|  | 78 | #define STM32_MDMA_CESR(x)		(0x48 + 0x40 * (x)) | 
|  | 79 | #define STM32_MDMA_CESR_BSE		BIT(11) | 
|  | 80 | #define STM32_MDMA_CESR_ASR		BIT(10) | 
|  | 81 | #define STM32_MDMA_CESR_TEMD		BIT(9) | 
|  | 82 | #define STM32_MDMA_CESR_TELD		BIT(8) | 
|  | 83 | #define STM32_MDMA_CESR_TED		BIT(7) | 
|  | 84 | #define STM32_MDMA_CESR_TEA_MASK	GENMASK(6, 0) | 
|  | 85 |  | 
|  | 86 | /* MDMA Channel x control register */ | 
|  | 87 | #define STM32_MDMA_CCR(x)		(0x4C + 0x40 * (x)) | 
|  | 88 | #define STM32_MDMA_CCR_SWRQ		BIT(16) | 
|  | 89 | #define STM32_MDMA_CCR_WEX		BIT(14) | 
|  | 90 | #define STM32_MDMA_CCR_HEX		BIT(13) | 
|  | 91 | #define STM32_MDMA_CCR_BEX		BIT(12) | 
|  | 92 | #define STM32_MDMA_CCR_PL_MASK		GENMASK(7, 6) | 
|  | 93 | #define STM32_MDMA_CCR_PL(n)		STM32_MDMA_SET(n, \ | 
|  | 94 | STM32_MDMA_CCR_PL_MASK) | 
|  | 95 | #define STM32_MDMA_CCR_TCIE		BIT(5) | 
|  | 96 | #define STM32_MDMA_CCR_BTIE		BIT(4) | 
|  | 97 | #define STM32_MDMA_CCR_BRTIE		BIT(3) | 
|  | 98 | #define STM32_MDMA_CCR_CTCIE		BIT(2) | 
|  | 99 | #define STM32_MDMA_CCR_TEIE		BIT(1) | 
|  | 100 | #define STM32_MDMA_CCR_EN		BIT(0) | 
|  | 101 | #define STM32_MDMA_CCR_IRQ_MASK		(STM32_MDMA_CCR_TCIE \ | 
|  | 102 | | STM32_MDMA_CCR_BTIE \ | 
|  | 103 | | STM32_MDMA_CCR_BRTIE \ | 
|  | 104 | | STM32_MDMA_CCR_CTCIE \ | 
|  | 105 | | STM32_MDMA_CCR_TEIE) | 
|  | 106 |  | 
|  | 107 | /* MDMA Channel x transfer configuration register */ | 
|  | 108 | #define STM32_MDMA_CTCR(x)		(0x50 + 0x40 * (x)) | 
|  | 109 | #define STM32_MDMA_CTCR_BWM		BIT(31) | 
|  | 110 | #define STM32_MDMA_CTCR_SWRM		BIT(30) | 
|  | 111 | #define STM32_MDMA_CTCR_TRGM_MSK	GENMASK(29, 28) | 
|  | 112 | #define STM32_MDMA_CTCR_TRGM(n)		STM32_MDMA_SET((n), \ | 
|  | 113 | STM32_MDMA_CTCR_TRGM_MSK) | 
|  | 114 | #define STM32_MDMA_CTCR_TRGM_GET(n)	STM32_MDMA_GET((n), \ | 
|  | 115 | STM32_MDMA_CTCR_TRGM_MSK) | 
|  | 116 | #define STM32_MDMA_CTCR_PAM_MASK	GENMASK(27, 26) | 
|  | 117 | #define STM32_MDMA_CTCR_PAM(n)		STM32_MDMA_SET(n, \ | 
|  | 118 | STM32_MDMA_CTCR_PAM_MASK) | 
|  | 119 | #define STM32_MDMA_CTCR_PKE		BIT(25) | 
|  | 120 | #define STM32_MDMA_CTCR_TLEN_MSK	GENMASK(24, 18) | 
|  | 121 | #define STM32_MDMA_CTCR_TLEN(n)		STM32_MDMA_SET((n), \ | 
|  | 122 | STM32_MDMA_CTCR_TLEN_MSK) | 
|  | 123 | #define STM32_MDMA_CTCR_TLEN_GET(n)	STM32_MDMA_GET((n), \ | 
|  | 124 | STM32_MDMA_CTCR_TLEN_MSK) | 
|  | 125 | #define STM32_MDMA_CTCR_LEN2_MSK	GENMASK(25, 18) | 
|  | 126 | #define STM32_MDMA_CTCR_LEN2(n)		STM32_MDMA_SET((n), \ | 
|  | 127 | STM32_MDMA_CTCR_LEN2_MSK) | 
|  | 128 | #define STM32_MDMA_CTCR_LEN2_GET(n)	STM32_MDMA_GET((n), \ | 
|  | 129 | STM32_MDMA_CTCR_LEN2_MSK) | 
|  | 130 | #define STM32_MDMA_CTCR_DBURST_MASK	GENMASK(17, 15) | 
|  | 131 | #define STM32_MDMA_CTCR_DBURST(n)	STM32_MDMA_SET(n, \ | 
|  | 132 | STM32_MDMA_CTCR_DBURST_MASK) | 
|  | 133 | #define STM32_MDMA_CTCR_SBURST_MASK	GENMASK(14, 12) | 
|  | 134 | #define STM32_MDMA_CTCR_SBURST(n)	STM32_MDMA_SET(n, \ | 
|  | 135 | STM32_MDMA_CTCR_SBURST_MASK) | 
|  | 136 | #define STM32_MDMA_CTCR_DINCOS_MASK	GENMASK(11, 10) | 
|  | 137 | #define STM32_MDMA_CTCR_DINCOS(n)	STM32_MDMA_SET((n), \ | 
|  | 138 | STM32_MDMA_CTCR_DINCOS_MASK) | 
|  | 139 | #define STM32_MDMA_CTCR_SINCOS_MASK	GENMASK(9, 8) | 
|  | 140 | #define STM32_MDMA_CTCR_SINCOS(n)	STM32_MDMA_SET((n), \ | 
|  | 141 | STM32_MDMA_CTCR_SINCOS_MASK) | 
|  | 142 | #define STM32_MDMA_CTCR_DSIZE_MASK	GENMASK(7, 6) | 
|  | 143 | #define STM32_MDMA_CTCR_DSIZE(n)	STM32_MDMA_SET(n, \ | 
|  | 144 | STM32_MDMA_CTCR_DSIZE_MASK) | 
|  | 145 | #define STM32_MDMA_CTCR_SSIZE_MASK	GENMASK(5, 4) | 
|  | 146 | #define STM32_MDMA_CTCR_SSIZE(n)	STM32_MDMA_SET(n, \ | 
|  | 147 | STM32_MDMA_CTCR_SSIZE_MASK) | 
|  | 148 | #define STM32_MDMA_CTCR_DINC_MASK	GENMASK(3, 2) | 
|  | 149 | #define STM32_MDMA_CTCR_DINC(n)		STM32_MDMA_SET((n), \ | 
|  | 150 | STM32_MDMA_CTCR_DINC_MASK) | 
|  | 151 | #define STM32_MDMA_CTCR_SINC_MASK	GENMASK(1, 0) | 
|  | 152 | #define STM32_MDMA_CTCR_SINC(n)		STM32_MDMA_SET((n), \ | 
|  | 153 | STM32_MDMA_CTCR_SINC_MASK) | 
|  | 154 | #define STM32_MDMA_CTCR_CFG_MASK	(STM32_MDMA_CTCR_SINC_MASK \ | 
|  | 155 | | STM32_MDMA_CTCR_DINC_MASK \ | 
|  | 156 | | STM32_MDMA_CTCR_SINCOS_MASK \ | 
|  | 157 | | STM32_MDMA_CTCR_DINCOS_MASK \ | 
|  | 158 | | STM32_MDMA_CTCR_LEN2_MSK \ | 
|  | 159 | | STM32_MDMA_CTCR_TRGM_MSK) | 
|  | 160 |  | 
|  | 161 | /* MDMA Channel x block number of data register */ | 
|  | 162 | #define STM32_MDMA_CBNDTR(x)		(0x54 + 0x40 * (x)) | 
|  | 163 | #define STM32_MDMA_CBNDTR_BRC_MK	GENMASK(31, 20) | 
|  | 164 | #define STM32_MDMA_CBNDTR_BRC(n)	STM32_MDMA_SET(n, \ | 
|  | 165 | STM32_MDMA_CBNDTR_BRC_MK) | 
|  | 166 | #define STM32_MDMA_CBNDTR_BRC_GET(n)	STM32_MDMA_GET((n), \ | 
|  | 167 | STM32_MDMA_CBNDTR_BRC_MK) | 
|  | 168 |  | 
|  | 169 | #define STM32_MDMA_CBNDTR_BRDUM		BIT(19) | 
|  | 170 | #define STM32_MDMA_CBNDTR_BRSUM		BIT(18) | 
|  | 171 | #define STM32_MDMA_CBNDTR_BNDT_MASK	GENMASK(16, 0) | 
|  | 172 | #define STM32_MDMA_CBNDTR_BNDT(n)	STM32_MDMA_SET(n, \ | 
|  | 173 | STM32_MDMA_CBNDTR_BNDT_MASK) | 
|  | 174 |  | 
|  | 175 | /* MDMA Channel x source address register */ | 
|  | 176 | #define STM32_MDMA_CSAR(x)		(0x58 + 0x40 * (x)) | 
|  | 177 |  | 
|  | 178 | /* MDMA Channel x destination address register */ | 
|  | 179 | #define STM32_MDMA_CDAR(x)		(0x5C + 0x40 * (x)) | 
|  | 180 |  | 
|  | 181 | /* MDMA Channel x block repeat address update register */ | 
|  | 182 | #define STM32_MDMA_CBRUR(x)		(0x60 + 0x40 * (x)) | 
|  | 183 | #define STM32_MDMA_CBRUR_DUV_MASK	GENMASK(31, 16) | 
|  | 184 | #define STM32_MDMA_CBRUR_DUV(n)		STM32_MDMA_SET(n, \ | 
|  | 185 | STM32_MDMA_CBRUR_DUV_MASK) | 
|  | 186 | #define STM32_MDMA_CBRUR_SUV_MASK	GENMASK(15, 0) | 
|  | 187 | #define STM32_MDMA_CBRUR_SUV(n)		STM32_MDMA_SET(n, \ | 
|  | 188 | STM32_MDMA_CBRUR_SUV_MASK) | 
|  | 189 |  | 
|  | 190 | /* MDMA Channel x link address register */ | 
|  | 191 | #define STM32_MDMA_CLAR(x)		(0x64 + 0x40 * (x)) | 
|  | 192 |  | 
|  | 193 | /* MDMA Channel x trigger and bus selection register */ | 
|  | 194 | #define STM32_MDMA_CTBR(x)		(0x68 + 0x40 * (x)) | 
|  | 195 | #define STM32_MDMA_CTBR_DBUS		BIT(17) | 
|  | 196 | #define STM32_MDMA_CTBR_SBUS		BIT(16) | 
|  | 197 | #define STM32_MDMA_CTBR_TSEL_MASK	GENMASK(7, 0) | 
|  | 198 | #define STM32_MDMA_CTBR_TSEL(n)		STM32_MDMA_SET(n, \ | 
|  | 199 | STM32_MDMA_CTBR_TSEL_MASK) | 
|  | 200 |  | 
|  | 201 | /* MDMA Channel x mask address register */ | 
|  | 202 | #define STM32_MDMA_CMAR(x)		(0x70 + 0x40 * (x)) | 
|  | 203 |  | 
|  | 204 | /* MDMA Channel x mask data register */ | 
|  | 205 | #define STM32_MDMA_CMDR(x)		(0x74 + 0x40 * (x)) | 
|  | 206 |  | 
|  | 207 | #define STM32_MDMA_MAX_BUF_LEN		128 | 
|  | 208 | #define STM32_MDMA_MAX_BLOCK_LEN	65536 | 
|  | 209 | #define STM32_MDMA_MAX_CHANNELS		63 | 
|  | 210 | #define STM32_MDMA_MAX_REQUESTS		256 | 
|  | 211 | #define STM32_MDMA_MAX_BURST		128 | 
|  | 212 | #define STM32_MDMA_VERY_HIGH_PRIORITY	0x11 | 
|  | 213 |  | 
|  | 214 | enum stm32_mdma_trigger_mode { | 
|  | 215 | STM32_MDMA_BUFFER, | 
|  | 216 | STM32_MDMA_BLOCK, | 
|  | 217 | STM32_MDMA_BLOCK_REP, | 
|  | 218 | STM32_MDMA_LINKED_LIST, | 
|  | 219 | }; | 
|  | 220 |  | 
|  | 221 | enum stm32_mdma_width { | 
|  | 222 | STM32_MDMA_BYTE, | 
|  | 223 | STM32_MDMA_HALF_WORD, | 
|  | 224 | STM32_MDMA_WORD, | 
|  | 225 | STM32_MDMA_DOUBLE_WORD, | 
|  | 226 | }; | 
|  | 227 |  | 
|  | 228 | enum stm32_mdma_inc_mode { | 
|  | 229 | STM32_MDMA_FIXED = 0, | 
|  | 230 | STM32_MDMA_INC = 2, | 
|  | 231 | STM32_MDMA_DEC = 3, | 
|  | 232 | }; | 
|  | 233 |  | 
|  | 234 | struct stm32_mdma_chan_config { | 
|  | 235 | u32 request; | 
|  | 236 | u32 priority_level; | 
|  | 237 | u32 transfer_config; | 
|  | 238 | u32 mask_addr; | 
|  | 239 | u32 mask_data; | 
|  | 240 | }; | 
|  | 241 |  | 
|  | 242 | struct stm32_mdma_hwdesc { | 
|  | 243 | u32 ctcr; | 
|  | 244 | u32 cbndtr; | 
|  | 245 | u32 csar; | 
|  | 246 | u32 cdar; | 
|  | 247 | u32 cbrur; | 
|  | 248 | u32 clar; | 
|  | 249 | u32 ctbr; | 
|  | 250 | u32 dummy; | 
|  | 251 | u32 cmar; | 
|  | 252 | u32 cmdr; | 
|  | 253 | } __aligned(64); | 
|  | 254 |  | 
|  | 255 | struct stm32_mdma_desc_node { | 
|  | 256 | struct stm32_mdma_hwdesc *hwdesc; | 
|  | 257 | dma_addr_t hwdesc_phys; | 
|  | 258 | }; | 
|  | 259 |  | 
|  | 260 | struct stm32_mdma_desc { | 
|  | 261 | struct virt_dma_desc vdesc; | 
|  | 262 | u32 ccr; | 
|  | 263 | bool cyclic; | 
|  | 264 | u32 count; | 
|  | 265 | struct stm32_mdma_desc_node node[]; | 
|  | 266 | }; | 
|  | 267 |  | 
|  | 268 | struct stm32_mdma_chan { | 
|  | 269 | struct virt_dma_chan vchan; | 
|  | 270 | struct dma_pool *desc_pool; | 
|  | 271 | u32 id; | 
|  | 272 | struct stm32_mdma_desc *desc; | 
|  | 273 | u32 curr_hwdesc; | 
|  | 274 | struct dma_slave_config dma_config; | 
|  | 275 | struct stm32_mdma_chan_config chan_config; | 
|  | 276 | bool busy; | 
|  | 277 | u32 mem_burst; | 
|  | 278 | u32 mem_width; | 
|  | 279 | }; | 
|  | 280 |  | 
|  | 281 | struct stm32_mdma_device { | 
|  | 282 | struct dma_device ddev; | 
|  | 283 | void __iomem *base; | 
|  | 284 | struct clk *clk; | 
|  | 285 | int irq; | 
|  | 286 | struct reset_control *rst; | 
|  | 287 | u32 nr_channels; | 
|  | 288 | u32 nr_requests; | 
|  | 289 | u32 nr_ahb_addr_masks; | 
|  | 290 | struct stm32_mdma_chan chan[STM32_MDMA_MAX_CHANNELS]; | 
|  | 291 | u32 ahb_addr_masks[]; | 
|  | 292 | }; | 
|  | 293 |  | 
|  | 294 | static struct stm32_mdma_device *stm32_mdma_get_dev( | 
|  | 295 | struct stm32_mdma_chan *chan) | 
|  | 296 | { | 
|  | 297 | return container_of(chan->vchan.chan.device, struct stm32_mdma_device, | 
|  | 298 | ddev); | 
|  | 299 | } | 
|  | 300 |  | 
|  | 301 | static struct stm32_mdma_chan *to_stm32_mdma_chan(struct dma_chan *c) | 
|  | 302 | { | 
|  | 303 | return container_of(c, struct stm32_mdma_chan, vchan.chan); | 
|  | 304 | } | 
|  | 305 |  | 
|  | 306 | static struct stm32_mdma_desc *to_stm32_mdma_desc(struct virt_dma_desc *vdesc) | 
|  | 307 | { | 
|  | 308 | return container_of(vdesc, struct stm32_mdma_desc, vdesc); | 
|  | 309 | } | 
|  | 310 |  | 
|  | 311 | static struct device *chan2dev(struct stm32_mdma_chan *chan) | 
|  | 312 | { | 
|  | 313 | return &chan->vchan.chan.dev->device; | 
|  | 314 | } | 
|  | 315 |  | 
|  | 316 | static struct device *mdma2dev(struct stm32_mdma_device *mdma_dev) | 
|  | 317 | { | 
|  | 318 | return mdma_dev->ddev.dev; | 
|  | 319 | } | 
|  | 320 |  | 
|  | 321 | static u32 stm32_mdma_read(struct stm32_mdma_device *dmadev, u32 reg) | 
|  | 322 | { | 
|  | 323 | return readl_relaxed(dmadev->base + reg); | 
|  | 324 | } | 
|  | 325 |  | 
|  | 326 | static void stm32_mdma_write(struct stm32_mdma_device *dmadev, u32 reg, u32 val) | 
|  | 327 | { | 
|  | 328 | writel_relaxed(val, dmadev->base + reg); | 
|  | 329 | } | 
|  | 330 |  | 
|  | 331 | static void stm32_mdma_set_bits(struct stm32_mdma_device *dmadev, u32 reg, | 
|  | 332 | u32 mask) | 
|  | 333 | { | 
|  | 334 | void __iomem *addr = dmadev->base + reg; | 
|  | 335 |  | 
|  | 336 | writel_relaxed(readl_relaxed(addr) | mask, addr); | 
|  | 337 | } | 
|  | 338 |  | 
|  | 339 | static void stm32_mdma_clr_bits(struct stm32_mdma_device *dmadev, u32 reg, | 
|  | 340 | u32 mask) | 
|  | 341 | { | 
|  | 342 | void __iomem *addr = dmadev->base + reg; | 
|  | 343 |  | 
|  | 344 | writel_relaxed(readl_relaxed(addr) & ~mask, addr); | 
|  | 345 | } | 
|  | 346 |  | 
|  | 347 | static struct stm32_mdma_desc *stm32_mdma_alloc_desc( | 
|  | 348 | struct stm32_mdma_chan *chan, u32 count) | 
|  | 349 | { | 
|  | 350 | struct stm32_mdma_desc *desc; | 
|  | 351 | int i; | 
|  | 352 |  | 
|  | 353 | desc = kzalloc(offsetof(typeof(*desc), node[count]), GFP_NOWAIT); | 
|  | 354 | if (!desc) | 
|  | 355 | return NULL; | 
|  | 356 |  | 
|  | 357 | for (i = 0; i < count; i++) { | 
|  | 358 | desc->node[i].hwdesc = | 
|  | 359 | dma_pool_alloc(chan->desc_pool, GFP_NOWAIT, | 
|  | 360 | &desc->node[i].hwdesc_phys); | 
|  | 361 | if (!desc->node[i].hwdesc) | 
|  | 362 | goto err; | 
|  | 363 | } | 
|  | 364 |  | 
|  | 365 | desc->count = count; | 
|  | 366 |  | 
|  | 367 | return desc; | 
|  | 368 |  | 
|  | 369 | err: | 
|  | 370 | dev_err(chan2dev(chan), "Failed to allocate descriptor\n"); | 
|  | 371 | while (--i >= 0) | 
|  | 372 | dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, | 
|  | 373 | desc->node[i].hwdesc_phys); | 
|  | 374 | kfree(desc); | 
|  | 375 | return NULL; | 
|  | 376 | } | 
|  | 377 |  | 
|  | 378 | static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc) | 
|  | 379 | { | 
|  | 380 | struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc); | 
|  | 381 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan); | 
|  | 382 | int i; | 
|  | 383 |  | 
|  | 384 | for (i = 0; i < desc->count; i++) | 
|  | 385 | dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, | 
|  | 386 | desc->node[i].hwdesc_phys); | 
|  | 387 | kfree(desc); | 
|  | 388 | } | 
|  | 389 |  | 
|  | 390 | static int stm32_mdma_get_width(struct stm32_mdma_chan *chan, | 
|  | 391 | enum dma_slave_buswidth width) | 
|  | 392 | { | 
|  | 393 | switch (width) { | 
|  | 394 | case DMA_SLAVE_BUSWIDTH_1_BYTE: | 
|  | 395 | case DMA_SLAVE_BUSWIDTH_2_BYTES: | 
|  | 396 | case DMA_SLAVE_BUSWIDTH_4_BYTES: | 
|  | 397 | case DMA_SLAVE_BUSWIDTH_8_BYTES: | 
|  | 398 | return ffs(width) - 1; | 
|  | 399 | default: | 
|  | 400 | dev_err(chan2dev(chan), "Dma bus width %i not supported\n", | 
|  | 401 | width); | 
|  | 402 | return -EINVAL; | 
|  | 403 | } | 
|  | 404 | } | 
|  | 405 |  | 
|  | 406 | static enum dma_slave_buswidth stm32_mdma_get_max_width(dma_addr_t addr, | 
|  | 407 | u32 buf_len, u32 tlen) | 
|  | 408 | { | 
|  | 409 | enum dma_slave_buswidth max_width = DMA_SLAVE_BUSWIDTH_8_BYTES; | 
|  | 410 |  | 
|  | 411 | for (max_width = DMA_SLAVE_BUSWIDTH_8_BYTES; | 
|  | 412 | max_width > DMA_SLAVE_BUSWIDTH_1_BYTE; | 
|  | 413 | max_width >>= 1) { | 
|  | 414 | /* | 
|  | 415 | * Address and buffer length both have to be aligned on | 
|  | 416 | * bus width | 
|  | 417 | */ | 
|  | 418 | if ((((buf_len | addr) & (max_width - 1)) == 0) && | 
|  | 419 | tlen >= max_width) | 
|  | 420 | break; | 
|  | 421 | } | 
|  | 422 |  | 
|  | 423 | return max_width; | 
|  | 424 | } | 
|  | 425 |  | 
|  | 426 | static u32 stm32_mdma_get_best_burst(u32 buf_len, u32 tlen, u32 max_burst, | 
|  | 427 | enum dma_slave_buswidth width) | 
|  | 428 | { | 
|  | 429 | u32 best_burst; | 
|  | 430 |  | 
|  | 431 | best_burst = min((u32)1 << __ffs(tlen | buf_len), | 
|  | 432 | max_burst * width) / width; | 
|  | 433 |  | 
|  | 434 | return (best_burst > 0) ? best_burst : 1; | 
|  | 435 | } | 
|  | 436 |  | 
|  | 437 | static int stm32_mdma_disable_chan(struct stm32_mdma_chan *chan) | 
|  | 438 | { | 
|  | 439 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); | 
|  | 440 | u32 ccr, cisr, id, reg; | 
|  | 441 | int ret; | 
|  | 442 |  | 
|  | 443 | id = chan->id; | 
|  | 444 | reg = STM32_MDMA_CCR(id); | 
|  | 445 |  | 
|  | 446 | /* Disable interrupts */ | 
|  | 447 | stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_IRQ_MASK); | 
|  | 448 |  | 
|  | 449 | ccr = stm32_mdma_read(dmadev, reg); | 
|  | 450 | if (ccr & STM32_MDMA_CCR_EN) { | 
|  | 451 | stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_EN); | 
|  | 452 |  | 
|  | 453 | /* Ensure that any ongoing transfer has been completed */ | 
|  | 454 | ret = readl_relaxed_poll_timeout_atomic( | 
|  | 455 | dmadev->base + STM32_MDMA_CISR(id), cisr, | 
|  | 456 | (cisr & STM32_MDMA_CISR_CTCIF), 10, 1000); | 
|  | 457 | if (ret) { | 
|  | 458 | dev_err(chan2dev(chan), "%s: timeout!\n", __func__); | 
|  | 459 | return -EBUSY; | 
|  | 460 | } | 
|  | 461 | } | 
|  | 462 |  | 
|  | 463 | return 0; | 
|  | 464 | } | 
|  | 465 |  | 
|  | 466 | static void stm32_mdma_stop(struct stm32_mdma_chan *chan) | 
|  | 467 | { | 
|  | 468 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); | 
|  | 469 | u32 status; | 
|  | 470 | int ret; | 
|  | 471 |  | 
|  | 472 | /* Disable DMA */ | 
|  | 473 | ret = stm32_mdma_disable_chan(chan); | 
|  | 474 | if (ret < 0) | 
|  | 475 | return; | 
|  | 476 |  | 
|  | 477 | /* Clear interrupt status if it is there */ | 
|  | 478 | status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id)); | 
|  | 479 | if (status) { | 
|  | 480 | dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n", | 
|  | 481 | __func__, status); | 
|  | 482 | stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status); | 
|  | 483 | } | 
|  | 484 |  | 
|  | 485 | chan->busy = false; | 
|  | 486 | } | 
|  | 487 |  | 
|  | 488 | static void stm32_mdma_set_bus(struct stm32_mdma_device *dmadev, u32 *ctbr, | 
|  | 489 | u32 ctbr_mask, u32 src_addr) | 
|  | 490 | { | 
|  | 491 | u32 mask; | 
|  | 492 | int i; | 
|  | 493 |  | 
|  | 494 | /* Check if memory device is on AHB or AXI */ | 
|  | 495 | *ctbr &= ~ctbr_mask; | 
|  | 496 | mask = src_addr & 0xF0000000; | 
|  | 497 | for (i = 0; i < dmadev->nr_ahb_addr_masks; i++) { | 
|  | 498 | if (mask == dmadev->ahb_addr_masks[i]) { | 
|  | 499 | *ctbr |= ctbr_mask; | 
|  | 500 | break; | 
|  | 501 | } | 
|  | 502 | } | 
|  | 503 | } | 
|  | 504 |  | 
|  | 505 | static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan *chan, | 
|  | 506 | enum dma_transfer_direction direction, | 
|  | 507 | u32 *mdma_ccr, u32 *mdma_ctcr, | 
|  | 508 | u32 *mdma_ctbr, dma_addr_t addr, | 
|  | 509 | u32 buf_len) | 
|  | 510 | { | 
|  | 511 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); | 
|  | 512 | struct stm32_mdma_chan_config *chan_config = &chan->chan_config; | 
|  | 513 | enum dma_slave_buswidth src_addr_width, dst_addr_width; | 
|  | 514 | phys_addr_t src_addr, dst_addr; | 
|  | 515 | int src_bus_width, dst_bus_width; | 
|  | 516 | u32 src_maxburst, dst_maxburst, src_best_burst, dst_best_burst; | 
|  | 517 | u32 ccr, ctcr, ctbr, tlen; | 
|  | 518 |  | 
|  | 519 | src_addr_width = chan->dma_config.src_addr_width; | 
|  | 520 | dst_addr_width = chan->dma_config.dst_addr_width; | 
|  | 521 | src_maxburst = chan->dma_config.src_maxburst; | 
|  | 522 | dst_maxburst = chan->dma_config.dst_maxburst; | 
|  | 523 |  | 
|  | 524 | ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)); | 
|  | 525 | ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)); | 
|  | 526 | ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)); | 
|  | 527 |  | 
|  | 528 | /* Enable HW request mode */ | 
|  | 529 | ctcr &= ~STM32_MDMA_CTCR_SWRM; | 
|  | 530 |  | 
|  | 531 | /* Set DINC, SINC, DINCOS, SINCOS, TRGM and TLEN retrieve from DT */ | 
|  | 532 | ctcr &= ~STM32_MDMA_CTCR_CFG_MASK; | 
|  | 533 | ctcr |= chan_config->transfer_config & STM32_MDMA_CTCR_CFG_MASK; | 
|  | 534 |  | 
|  | 535 | /* | 
|  | 536 | * For buffer transfer length (TLEN) we have to set | 
|  | 537 | * the number of bytes - 1 in CTCR register | 
|  | 538 | */ | 
|  | 539 | tlen = STM32_MDMA_CTCR_LEN2_GET(ctcr); | 
|  | 540 | ctcr &= ~STM32_MDMA_CTCR_LEN2_MSK; | 
|  | 541 | ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1)); | 
|  | 542 |  | 
|  | 543 | /* Disable Pack Enable */ | 
|  | 544 | ctcr &= ~STM32_MDMA_CTCR_PKE; | 
|  | 545 |  | 
|  | 546 | /* Check burst size constraints */ | 
|  | 547 | if (src_maxburst * src_addr_width > STM32_MDMA_MAX_BURST || | 
|  | 548 | dst_maxburst * dst_addr_width > STM32_MDMA_MAX_BURST) { | 
|  | 549 | dev_err(chan2dev(chan), | 
|  | 550 | "burst size * bus width higher than %d bytes\n", | 
|  | 551 | STM32_MDMA_MAX_BURST); | 
|  | 552 | return -EINVAL; | 
|  | 553 | } | 
|  | 554 |  | 
|  | 555 | if ((!is_power_of_2(src_maxburst) && src_maxburst > 0) || | 
|  | 556 | (!is_power_of_2(dst_maxburst) && dst_maxburst > 0)) { | 
|  | 557 | dev_err(chan2dev(chan), "burst size must be a power of 2\n"); | 
|  | 558 | return -EINVAL; | 
|  | 559 | } | 
|  | 560 |  | 
|  | 561 | /* | 
|  | 562 | * Configure channel control: | 
|  | 563 | * - Clear SW request as in this case this is a HW one | 
|  | 564 | * - Clear WEX, HEX and BEX bits | 
|  | 565 | * - Set priority level | 
|  | 566 | */ | 
|  | 567 | ccr &= ~(STM32_MDMA_CCR_SWRQ | STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX | | 
|  | 568 | STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK); | 
|  | 569 | ccr |= STM32_MDMA_CCR_PL(chan_config->priority_level); | 
|  | 570 |  | 
|  | 571 | /* Configure Trigger selection */ | 
|  | 572 | ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK; | 
|  | 573 | ctbr |= STM32_MDMA_CTBR_TSEL(chan_config->request); | 
|  | 574 |  | 
|  | 575 | switch (direction) { | 
|  | 576 | case DMA_MEM_TO_DEV: | 
|  | 577 | dst_addr = chan->dma_config.dst_addr; | 
|  | 578 |  | 
|  | 579 | /* Set device data size */ | 
|  | 580 | dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width); | 
|  | 581 | if (dst_bus_width < 0) | 
|  | 582 | return dst_bus_width; | 
|  | 583 | ctcr &= ~STM32_MDMA_CTCR_DSIZE_MASK; | 
|  | 584 | ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width); | 
|  | 585 |  | 
|  | 586 | /* Set device burst value */ | 
|  | 587 | dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen, | 
|  | 588 | dst_maxburst, | 
|  | 589 | dst_addr_width); | 
|  | 590 | chan->mem_burst = dst_best_burst; | 
|  | 591 | ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK; | 
|  | 592 | ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst))); | 
|  | 593 |  | 
|  | 594 | /* Set memory data size */ | 
|  | 595 | src_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen); | 
|  | 596 | chan->mem_width = src_addr_width; | 
|  | 597 | src_bus_width = stm32_mdma_get_width(chan, src_addr_width); | 
|  | 598 | if (src_bus_width < 0) | 
|  | 599 | return src_bus_width; | 
|  | 600 | ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK | | 
|  | 601 | STM32_MDMA_CTCR_SINCOS_MASK; | 
|  | 602 | ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width) | | 
|  | 603 | STM32_MDMA_CTCR_SINCOS(src_bus_width); | 
|  | 604 |  | 
|  | 605 | /* Set memory burst value */ | 
|  | 606 | src_maxburst = STM32_MDMA_MAX_BUF_LEN / src_addr_width; | 
|  | 607 | src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen, | 
|  | 608 | src_maxburst, | 
|  | 609 | src_addr_width); | 
|  | 610 | chan->mem_burst = src_best_burst; | 
|  | 611 | ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK; | 
|  | 612 | ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst))); | 
|  | 613 |  | 
|  | 614 | /* Select bus */ | 
|  | 615 | stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, | 
|  | 616 | dst_addr); | 
|  | 617 |  | 
|  | 618 | if (dst_bus_width != src_bus_width) | 
|  | 619 | ctcr |= STM32_MDMA_CTCR_PKE; | 
|  | 620 |  | 
|  | 621 | /* Set destination address */ | 
|  | 622 | stm32_mdma_write(dmadev, STM32_MDMA_CDAR(chan->id), dst_addr); | 
|  | 623 | break; | 
|  | 624 |  | 
|  | 625 | case DMA_DEV_TO_MEM: | 
|  | 626 | src_addr = chan->dma_config.src_addr; | 
|  | 627 |  | 
|  | 628 | /* Set device data size */ | 
|  | 629 | src_bus_width = stm32_mdma_get_width(chan, src_addr_width); | 
|  | 630 | if (src_bus_width < 0) | 
|  | 631 | return src_bus_width; | 
|  | 632 | ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK; | 
|  | 633 | ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width); | 
|  | 634 |  | 
|  | 635 | /* Set device burst value */ | 
|  | 636 | src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen, | 
|  | 637 | src_maxburst, | 
|  | 638 | src_addr_width); | 
|  | 639 | ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK; | 
|  | 640 | ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst))); | 
|  | 641 |  | 
|  | 642 | /* Set memory data size */ | 
|  | 643 | dst_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen); | 
|  | 644 | chan->mem_width = dst_addr_width; | 
|  | 645 | dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width); | 
|  | 646 | if (dst_bus_width < 0) | 
|  | 647 | return dst_bus_width; | 
|  | 648 | ctcr &= ~(STM32_MDMA_CTCR_DSIZE_MASK | | 
|  | 649 | STM32_MDMA_CTCR_DINCOS_MASK); | 
|  | 650 | ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width) | | 
|  | 651 | STM32_MDMA_CTCR_DINCOS(dst_bus_width); | 
|  | 652 |  | 
|  | 653 | /* Set memory burst value */ | 
|  | 654 | dst_maxburst = STM32_MDMA_MAX_BUF_LEN / dst_addr_width; | 
|  | 655 | dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen, | 
|  | 656 | dst_maxburst, | 
|  | 657 | dst_addr_width); | 
|  | 658 | ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK; | 
|  | 659 | ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst))); | 
|  | 660 |  | 
|  | 661 | /* Select bus */ | 
|  | 662 | stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, | 
|  | 663 | src_addr); | 
|  | 664 |  | 
|  | 665 | if (dst_bus_width != src_bus_width) | 
|  | 666 | ctcr |= STM32_MDMA_CTCR_PKE; | 
|  | 667 |  | 
|  | 668 | /* Set source address */ | 
|  | 669 | stm32_mdma_write(dmadev, STM32_MDMA_CSAR(chan->id), src_addr); | 
|  | 670 | break; | 
|  | 671 |  | 
|  | 672 | default: | 
|  | 673 | dev_err(chan2dev(chan), "Dma direction is not supported\n"); | 
|  | 674 | return -EINVAL; | 
|  | 675 | } | 
|  | 676 |  | 
|  | 677 | *mdma_ccr = ccr; | 
|  | 678 | *mdma_ctcr = ctcr; | 
|  | 679 | *mdma_ctbr = ctbr; | 
|  | 680 |  | 
|  | 681 | return 0; | 
|  | 682 | } | 
|  | 683 |  | 
|  | 684 | static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan *chan, | 
|  | 685 | struct stm32_mdma_desc_node *node) | 
|  | 686 | { | 
|  | 687 | dev_dbg(chan2dev(chan), "hwdesc:  %pad\n", &node->hwdesc_phys); | 
|  | 688 | dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n", node->hwdesc->ctcr); | 
|  | 689 | dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n", node->hwdesc->cbndtr); | 
|  | 690 | dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n", node->hwdesc->csar); | 
|  | 691 | dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n", node->hwdesc->cdar); | 
|  | 692 | dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n", node->hwdesc->cbrur); | 
|  | 693 | dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n", node->hwdesc->clar); | 
|  | 694 | dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n", node->hwdesc->ctbr); | 
|  | 695 | dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n", node->hwdesc->cmar); | 
|  | 696 | dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n\n", node->hwdesc->cmdr); | 
|  | 697 | } | 
|  | 698 |  | 
|  | 699 | static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan, | 
|  | 700 | struct stm32_mdma_desc *desc, | 
|  | 701 | enum dma_transfer_direction dir, u32 count, | 
|  | 702 | dma_addr_t src_addr, dma_addr_t dst_addr, | 
|  | 703 | u32 len, u32 ctcr, u32 ctbr, bool is_last, | 
|  | 704 | bool is_first, bool is_cyclic) | 
|  | 705 | { | 
|  | 706 | struct stm32_mdma_chan_config *config = &chan->chan_config; | 
|  | 707 | struct stm32_mdma_hwdesc *hwdesc; | 
|  | 708 | u32 next = count + 1; | 
|  | 709 |  | 
|  | 710 | hwdesc = desc->node[count].hwdesc; | 
|  | 711 | hwdesc->ctcr = ctcr; | 
|  | 712 | hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | | 
|  | 713 | STM32_MDMA_CBNDTR_BRDUM | | 
|  | 714 | STM32_MDMA_CBNDTR_BRSUM | | 
|  | 715 | STM32_MDMA_CBNDTR_BNDT_MASK); | 
|  | 716 | hwdesc->cbndtr |= STM32_MDMA_CBNDTR_BNDT(len); | 
|  | 717 | hwdesc->csar = src_addr; | 
|  | 718 | hwdesc->cdar = dst_addr; | 
|  | 719 | hwdesc->cbrur = 0; | 
|  | 720 | hwdesc->ctbr = ctbr; | 
|  | 721 | hwdesc->cmar = config->mask_addr; | 
|  | 722 | hwdesc->cmdr = config->mask_data; | 
|  | 723 |  | 
|  | 724 | if (is_last) { | 
|  | 725 | if (is_cyclic) | 
|  | 726 | hwdesc->clar = desc->node[0].hwdesc_phys; | 
|  | 727 | else | 
|  | 728 | hwdesc->clar = 0; | 
|  | 729 | } else { | 
|  | 730 | hwdesc->clar = desc->node[next].hwdesc_phys; | 
|  | 731 | } | 
|  | 732 |  | 
|  | 733 | stm32_mdma_dump_hwdesc(chan, &desc->node[count]); | 
|  | 734 | } | 
|  | 735 |  | 
|  | 736 | static int stm32_mdma_setup_xfer(struct stm32_mdma_chan *chan, | 
|  | 737 | struct stm32_mdma_desc *desc, | 
|  | 738 | struct scatterlist *sgl, u32 sg_len, | 
|  | 739 | enum dma_transfer_direction direction) | 
|  | 740 | { | 
|  | 741 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); | 
|  | 742 | struct dma_slave_config *dma_config = &chan->dma_config; | 
|  | 743 | struct scatterlist *sg; | 
|  | 744 | dma_addr_t src_addr, dst_addr; | 
|  | 745 | u32 ccr, ctcr, ctbr; | 
|  | 746 | int i, ret = 0; | 
|  | 747 |  | 
|  | 748 | for_each_sg(sgl, sg, sg_len, i) { | 
|  | 749 | if (sg_dma_len(sg) > STM32_MDMA_MAX_BLOCK_LEN) { | 
|  | 750 | dev_err(chan2dev(chan), "Invalid block len\n"); | 
|  | 751 | return -EINVAL; | 
|  | 752 | } | 
|  | 753 |  | 
|  | 754 | if (direction == DMA_MEM_TO_DEV) { | 
|  | 755 | src_addr = sg_dma_address(sg); | 
|  | 756 | dst_addr = dma_config->dst_addr; | 
|  | 757 | ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, | 
|  | 758 | &ctcr, &ctbr, src_addr, | 
|  | 759 | sg_dma_len(sg)); | 
|  | 760 | stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, | 
|  | 761 | src_addr); | 
|  | 762 | } else { | 
|  | 763 | src_addr = dma_config->src_addr; | 
|  | 764 | dst_addr = sg_dma_address(sg); | 
|  | 765 | ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, | 
|  | 766 | &ctcr, &ctbr, dst_addr, | 
|  | 767 | sg_dma_len(sg)); | 
|  | 768 | stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, | 
|  | 769 | dst_addr); | 
|  | 770 | } | 
|  | 771 |  | 
|  | 772 | if (ret < 0) | 
|  | 773 | return ret; | 
|  | 774 |  | 
|  | 775 | stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr, | 
|  | 776 | dst_addr, sg_dma_len(sg), ctcr, ctbr, | 
|  | 777 | i == sg_len - 1, i == 0, false); | 
|  | 778 | } | 
|  | 779 |  | 
|  | 780 | /* Enable interrupts */ | 
|  | 781 | ccr &= ~STM32_MDMA_CCR_IRQ_MASK; | 
|  | 782 | ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE; | 
|  | 783 | if (sg_len > 1) | 
|  | 784 | ccr |= STM32_MDMA_CCR_BTIE; | 
|  | 785 | desc->ccr = ccr; | 
|  | 786 |  | 
|  | 787 | return 0; | 
|  | 788 | } | 
|  | 789 |  | 
|  | 790 | static struct dma_async_tx_descriptor * | 
|  | 791 | stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl, | 
|  | 792 | u32 sg_len, enum dma_transfer_direction direction, | 
|  | 793 | unsigned long flags, void *context) | 
|  | 794 | { | 
|  | 795 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); | 
|  | 796 | struct stm32_mdma_desc *desc; | 
|  | 797 | int i, ret; | 
|  | 798 |  | 
|  | 799 | /* | 
|  | 800 | * Once DMA is in setup cyclic mode the channel we cannot assign this | 
|  | 801 | * channel anymore. The DMA channel needs to be aborted or terminated | 
|  | 802 | * for allowing another request. | 
|  | 803 | */ | 
|  | 804 | if (chan->desc && chan->desc->cyclic) { | 
|  | 805 | dev_err(chan2dev(chan), | 
|  | 806 | "Request not allowed when dma in cyclic mode\n"); | 
|  | 807 | return NULL; | 
|  | 808 | } | 
|  | 809 |  | 
|  | 810 | desc = stm32_mdma_alloc_desc(chan, sg_len); | 
|  | 811 | if (!desc) | 
|  | 812 | return NULL; | 
|  | 813 |  | 
|  | 814 | ret = stm32_mdma_setup_xfer(chan, desc, sgl, sg_len, direction); | 
|  | 815 | if (ret < 0) | 
|  | 816 | goto xfer_setup_err; | 
|  | 817 |  | 
|  | 818 | desc->cyclic = false; | 
|  | 819 |  | 
|  | 820 | return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); | 
|  | 821 |  | 
|  | 822 | xfer_setup_err: | 
|  | 823 | for (i = 0; i < desc->count; i++) | 
|  | 824 | dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, | 
|  | 825 | desc->node[i].hwdesc_phys); | 
|  | 826 | kfree(desc); | 
|  | 827 | return NULL; | 
|  | 828 | } | 
|  | 829 |  | 
|  | 830 | static struct dma_async_tx_descriptor * | 
|  | 831 | stm32_mdma_prep_dma_cyclic(struct dma_chan *c, dma_addr_t buf_addr, | 
|  | 832 | size_t buf_len, size_t period_len, | 
|  | 833 | enum dma_transfer_direction direction, | 
|  | 834 | unsigned long flags) | 
|  | 835 | { | 
|  | 836 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); | 
|  | 837 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); | 
|  | 838 | struct dma_slave_config *dma_config = &chan->dma_config; | 
|  | 839 | struct stm32_mdma_desc *desc; | 
|  | 840 | dma_addr_t src_addr, dst_addr; | 
|  | 841 | u32 ccr, ctcr, ctbr, count; | 
|  | 842 | int i, ret; | 
|  | 843 |  | 
|  | 844 | /* | 
|  | 845 | * Once DMA is in setup cyclic mode the channel we cannot assign this | 
|  | 846 | * channel anymore. The DMA channel needs to be aborted or terminated | 
|  | 847 | * for allowing another request. | 
|  | 848 | */ | 
|  | 849 | if (chan->desc && chan->desc->cyclic) { | 
|  | 850 | dev_err(chan2dev(chan), | 
|  | 851 | "Request not allowed when dma in cyclic mode\n"); | 
|  | 852 | return NULL; | 
|  | 853 | } | 
|  | 854 |  | 
|  | 855 | if (!buf_len || !period_len || period_len > STM32_MDMA_MAX_BLOCK_LEN) { | 
|  | 856 | dev_err(chan2dev(chan), "Invalid buffer/period len\n"); | 
|  | 857 | return NULL; | 
|  | 858 | } | 
|  | 859 |  | 
|  | 860 | if (buf_len % period_len) { | 
|  | 861 | dev_err(chan2dev(chan), "buf_len not multiple of period_len\n"); | 
|  | 862 | return NULL; | 
|  | 863 | } | 
|  | 864 |  | 
|  | 865 | count = buf_len / period_len; | 
|  | 866 |  | 
|  | 867 | desc = stm32_mdma_alloc_desc(chan, count); | 
|  | 868 | if (!desc) | 
|  | 869 | return NULL; | 
|  | 870 |  | 
|  | 871 | /* Select bus */ | 
|  | 872 | if (direction == DMA_MEM_TO_DEV) { | 
|  | 873 | src_addr = buf_addr; | 
|  | 874 | ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr, | 
|  | 875 | &ctbr, src_addr, period_len); | 
|  | 876 | stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, | 
|  | 877 | src_addr); | 
|  | 878 | } else { | 
|  | 879 | dst_addr = buf_addr; | 
|  | 880 | ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr, | 
|  | 881 | &ctbr, dst_addr, period_len); | 
|  | 882 | stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, | 
|  | 883 | dst_addr); | 
|  | 884 | } | 
|  | 885 |  | 
|  | 886 | if (ret < 0) | 
|  | 887 | goto xfer_setup_err; | 
|  | 888 |  | 
|  | 889 | /* Enable interrupts */ | 
|  | 890 | ccr &= ~STM32_MDMA_CCR_IRQ_MASK; | 
|  | 891 | ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE | STM32_MDMA_CCR_BTIE; | 
|  | 892 | desc->ccr = ccr; | 
|  | 893 |  | 
|  | 894 | /* Configure hwdesc list */ | 
|  | 895 | for (i = 0; i < count; i++) { | 
|  | 896 | if (direction == DMA_MEM_TO_DEV) { | 
|  | 897 | src_addr = buf_addr + i * period_len; | 
|  | 898 | dst_addr = dma_config->dst_addr; | 
|  | 899 | } else { | 
|  | 900 | src_addr = dma_config->src_addr; | 
|  | 901 | dst_addr = buf_addr + i * period_len; | 
|  | 902 | } | 
|  | 903 |  | 
|  | 904 | stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr, | 
|  | 905 | dst_addr, period_len, ctcr, ctbr, | 
|  | 906 | i == count - 1, i == 0, true); | 
|  | 907 | } | 
|  | 908 |  | 
|  | 909 | desc->cyclic = true; | 
|  | 910 |  | 
|  | 911 | return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); | 
|  | 912 |  | 
|  | 913 | xfer_setup_err: | 
|  | 914 | for (i = 0; i < desc->count; i++) | 
|  | 915 | dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, | 
|  | 916 | desc->node[i].hwdesc_phys); | 
|  | 917 | kfree(desc); | 
|  | 918 | return NULL; | 
|  | 919 | } | 
|  | 920 |  | 
|  | 921 | static struct dma_async_tx_descriptor * | 
|  | 922 | stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src, | 
|  | 923 | size_t len, unsigned long flags) | 
|  | 924 | { | 
|  | 925 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); | 
|  | 926 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); | 
|  | 927 | enum dma_slave_buswidth max_width; | 
|  | 928 | struct stm32_mdma_desc *desc; | 
|  | 929 | struct stm32_mdma_hwdesc *hwdesc; | 
|  | 930 | u32 ccr, ctcr, ctbr, cbndtr, count, max_burst, mdma_burst; | 
|  | 931 | u32 best_burst, tlen; | 
|  | 932 | size_t xfer_count, offset; | 
|  | 933 | int src_bus_width, dst_bus_width; | 
|  | 934 | int i; | 
|  | 935 |  | 
|  | 936 | /* | 
|  | 937 | * Once DMA is in setup cyclic mode the channel we cannot assign this | 
|  | 938 | * channel anymore. The DMA channel needs to be aborted or terminated | 
|  | 939 | * to allow another request | 
|  | 940 | */ | 
|  | 941 | if (chan->desc && chan->desc->cyclic) { | 
|  | 942 | dev_err(chan2dev(chan), | 
|  | 943 | "Request not allowed when dma in cyclic mode\n"); | 
|  | 944 | return NULL; | 
|  | 945 | } | 
|  | 946 |  | 
|  | 947 | count = DIV_ROUND_UP(len, STM32_MDMA_MAX_BLOCK_LEN); | 
|  | 948 | desc = stm32_mdma_alloc_desc(chan, count); | 
|  | 949 | if (!desc) | 
|  | 950 | return NULL; | 
|  | 951 |  | 
|  | 952 | ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)); | 
|  | 953 | ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)); | 
|  | 954 | ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)); | 
|  | 955 | cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)); | 
|  | 956 |  | 
|  | 957 | /* Enable sw req, some interrupts and clear other bits */ | 
|  | 958 | ccr &= ~(STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX | | 
|  | 959 | STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK | | 
|  | 960 | STM32_MDMA_CCR_IRQ_MASK); | 
|  | 961 | ccr |= STM32_MDMA_CCR_TEIE; | 
|  | 962 |  | 
|  | 963 | /* Enable SW request mode, dest/src inc and clear other bits */ | 
|  | 964 | ctcr &= ~(STM32_MDMA_CTCR_BWM | STM32_MDMA_CTCR_TRGM_MSK | | 
|  | 965 | STM32_MDMA_CTCR_PAM_MASK | STM32_MDMA_CTCR_PKE | | 
|  | 966 | STM32_MDMA_CTCR_TLEN_MSK | STM32_MDMA_CTCR_DBURST_MASK | | 
|  | 967 | STM32_MDMA_CTCR_SBURST_MASK | STM32_MDMA_CTCR_DINCOS_MASK | | 
|  | 968 | STM32_MDMA_CTCR_SINCOS_MASK | STM32_MDMA_CTCR_DSIZE_MASK | | 
|  | 969 | STM32_MDMA_CTCR_SSIZE_MASK | STM32_MDMA_CTCR_DINC_MASK | | 
|  | 970 | STM32_MDMA_CTCR_SINC_MASK); | 
|  | 971 | ctcr |= STM32_MDMA_CTCR_SWRM | STM32_MDMA_CTCR_SINC(STM32_MDMA_INC) | | 
|  | 972 | STM32_MDMA_CTCR_DINC(STM32_MDMA_INC); | 
|  | 973 |  | 
|  | 974 | /* Reset HW request */ | 
|  | 975 | ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK; | 
|  | 976 |  | 
|  | 977 | /* Select bus */ | 
|  | 978 | stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, src); | 
|  | 979 | stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, dest); | 
|  | 980 |  | 
|  | 981 | /* Clear CBNDTR registers */ | 
|  | 982 | cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | STM32_MDMA_CBNDTR_BRDUM | | 
|  | 983 | STM32_MDMA_CBNDTR_BRSUM | STM32_MDMA_CBNDTR_BNDT_MASK); | 
|  | 984 |  | 
|  | 985 | if (len <= STM32_MDMA_MAX_BLOCK_LEN) { | 
|  | 986 | cbndtr |= STM32_MDMA_CBNDTR_BNDT(len); | 
|  | 987 | if (len <= STM32_MDMA_MAX_BUF_LEN) { | 
|  | 988 | /* Setup a buffer transfer */ | 
|  | 989 | ccr |= STM32_MDMA_CCR_TCIE | STM32_MDMA_CCR_CTCIE; | 
|  | 990 | ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BUFFER); | 
|  | 991 | } else { | 
|  | 992 | /* Setup a block transfer */ | 
|  | 993 | ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE; | 
|  | 994 | ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BLOCK); | 
|  | 995 | } | 
|  | 996 |  | 
|  | 997 | tlen = STM32_MDMA_MAX_BUF_LEN; | 
|  | 998 | ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1)); | 
|  | 999 |  | 
|  | 1000 | /* Set source best burst size */ | 
|  | 1001 | max_width = stm32_mdma_get_max_width(src, len, tlen); | 
|  | 1002 | src_bus_width = stm32_mdma_get_width(chan, max_width); | 
|  | 1003 |  | 
|  | 1004 | max_burst = tlen / max_width; | 
|  | 1005 | best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst, | 
|  | 1006 | max_width); | 
|  | 1007 | mdma_burst = ilog2(best_burst); | 
|  | 1008 |  | 
|  | 1009 | ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) | | 
|  | 1010 | STM32_MDMA_CTCR_SSIZE(src_bus_width) | | 
|  | 1011 | STM32_MDMA_CTCR_SINCOS(src_bus_width); | 
|  | 1012 |  | 
|  | 1013 | /* Set destination best burst size */ | 
|  | 1014 | max_width = stm32_mdma_get_max_width(dest, len, tlen); | 
|  | 1015 | dst_bus_width = stm32_mdma_get_width(chan, max_width); | 
|  | 1016 |  | 
|  | 1017 | max_burst = tlen / max_width; | 
|  | 1018 | best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst, | 
|  | 1019 | max_width); | 
|  | 1020 | mdma_burst = ilog2(best_burst); | 
|  | 1021 |  | 
|  | 1022 | ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) | | 
|  | 1023 | STM32_MDMA_CTCR_DSIZE(dst_bus_width) | | 
|  | 1024 | STM32_MDMA_CTCR_DINCOS(dst_bus_width); | 
|  | 1025 |  | 
|  | 1026 | if (dst_bus_width != src_bus_width) | 
|  | 1027 | ctcr |= STM32_MDMA_CTCR_PKE; | 
|  | 1028 |  | 
|  | 1029 | /* Prepare hardware descriptor */ | 
|  | 1030 | hwdesc = desc->node[0].hwdesc; | 
|  | 1031 | hwdesc->ctcr = ctcr; | 
|  | 1032 | hwdesc->cbndtr = cbndtr; | 
|  | 1033 | hwdesc->csar = src; | 
|  | 1034 | hwdesc->cdar = dest; | 
|  | 1035 | hwdesc->cbrur = 0; | 
|  | 1036 | hwdesc->clar = 0; | 
|  | 1037 | hwdesc->ctbr = ctbr; | 
|  | 1038 | hwdesc->cmar = 0; | 
|  | 1039 | hwdesc->cmdr = 0; | 
|  | 1040 |  | 
|  | 1041 | stm32_mdma_dump_hwdesc(chan, &desc->node[0]); | 
|  | 1042 | } else { | 
|  | 1043 | /* Setup a LLI transfer */ | 
|  | 1044 | ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST) | | 
|  | 1045 | STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN - 1)); | 
|  | 1046 | ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE; | 
|  | 1047 | tlen = STM32_MDMA_MAX_BUF_LEN; | 
|  | 1048 |  | 
|  | 1049 | for (i = 0, offset = 0; offset < len; | 
|  | 1050 | i++, offset += xfer_count) { | 
|  | 1051 | xfer_count = min_t(size_t, len - offset, | 
|  | 1052 | STM32_MDMA_MAX_BLOCK_LEN); | 
|  | 1053 |  | 
|  | 1054 | /* Set source best burst size */ | 
|  | 1055 | max_width = stm32_mdma_get_max_width(src, len, tlen); | 
|  | 1056 | src_bus_width = stm32_mdma_get_width(chan, max_width); | 
|  | 1057 |  | 
|  | 1058 | max_burst = tlen / max_width; | 
|  | 1059 | best_burst = stm32_mdma_get_best_burst(len, tlen, | 
|  | 1060 | max_burst, | 
|  | 1061 | max_width); | 
|  | 1062 | mdma_burst = ilog2(best_burst); | 
|  | 1063 |  | 
|  | 1064 | ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) | | 
|  | 1065 | STM32_MDMA_CTCR_SSIZE(src_bus_width) | | 
|  | 1066 | STM32_MDMA_CTCR_SINCOS(src_bus_width); | 
|  | 1067 |  | 
|  | 1068 | /* Set destination best burst size */ | 
|  | 1069 | max_width = stm32_mdma_get_max_width(dest, len, tlen); | 
|  | 1070 | dst_bus_width = stm32_mdma_get_width(chan, max_width); | 
|  | 1071 |  | 
|  | 1072 | max_burst = tlen / max_width; | 
|  | 1073 | best_burst = stm32_mdma_get_best_burst(len, tlen, | 
|  | 1074 | max_burst, | 
|  | 1075 | max_width); | 
|  | 1076 | mdma_burst = ilog2(best_burst); | 
|  | 1077 |  | 
|  | 1078 | ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) | | 
|  | 1079 | STM32_MDMA_CTCR_DSIZE(dst_bus_width) | | 
|  | 1080 | STM32_MDMA_CTCR_DINCOS(dst_bus_width); | 
|  | 1081 |  | 
|  | 1082 | if (dst_bus_width != src_bus_width) | 
|  | 1083 | ctcr |= STM32_MDMA_CTCR_PKE; | 
|  | 1084 |  | 
|  | 1085 | /* Prepare hardware descriptor */ | 
|  | 1086 | stm32_mdma_setup_hwdesc(chan, desc, DMA_MEM_TO_MEM, i, | 
|  | 1087 | src + offset, dest + offset, | 
|  | 1088 | xfer_count, ctcr, ctbr, | 
|  | 1089 | i == count - 1, i == 0, false); | 
|  | 1090 | } | 
|  | 1091 | } | 
|  | 1092 |  | 
|  | 1093 | desc->ccr = ccr; | 
|  | 1094 |  | 
|  | 1095 | desc->cyclic = false; | 
|  | 1096 |  | 
|  | 1097 | return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); | 
|  | 1098 | } | 
|  | 1099 |  | 
|  | 1100 | static void stm32_mdma_dump_reg(struct stm32_mdma_chan *chan) | 
|  | 1101 | { | 
|  | 1102 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); | 
|  | 1103 |  | 
|  | 1104 | dev_dbg(chan2dev(chan), "CCR:     0x%08x\n", | 
|  | 1105 | stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id))); | 
|  | 1106 | dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n", | 
|  | 1107 | stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id))); | 
|  | 1108 | dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n", | 
|  | 1109 | stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id))); | 
|  | 1110 | dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n", | 
|  | 1111 | stm32_mdma_read(dmadev, STM32_MDMA_CSAR(chan->id))); | 
|  | 1112 | dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n", | 
|  | 1113 | stm32_mdma_read(dmadev, STM32_MDMA_CDAR(chan->id))); | 
|  | 1114 | dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n", | 
|  | 1115 | stm32_mdma_read(dmadev, STM32_MDMA_CBRUR(chan->id))); | 
|  | 1116 | dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n", | 
|  | 1117 | stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id))); | 
|  | 1118 | dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n", | 
|  | 1119 | stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id))); | 
|  | 1120 | dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n", | 
|  | 1121 | stm32_mdma_read(dmadev, STM32_MDMA_CMAR(chan->id))); | 
|  | 1122 | dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n", | 
|  | 1123 | stm32_mdma_read(dmadev, STM32_MDMA_CMDR(chan->id))); | 
|  | 1124 | } | 
|  | 1125 |  | 
|  | 1126 | static void stm32_mdma_start_transfer(struct stm32_mdma_chan *chan) | 
|  | 1127 | { | 
|  | 1128 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); | 
|  | 1129 | struct virt_dma_desc *vdesc; | 
|  | 1130 | struct stm32_mdma_hwdesc *hwdesc; | 
|  | 1131 | u32 id = chan->id; | 
|  | 1132 | u32 status, reg; | 
|  | 1133 |  | 
|  | 1134 | vdesc = vchan_next_desc(&chan->vchan); | 
|  | 1135 | if (!vdesc) { | 
|  | 1136 | chan->desc = NULL; | 
|  | 1137 | return; | 
|  | 1138 | } | 
|  | 1139 |  | 
|  | 1140 | chan->desc = to_stm32_mdma_desc(vdesc); | 
|  | 1141 | hwdesc = chan->desc->node[0].hwdesc; | 
|  | 1142 | chan->curr_hwdesc = 0; | 
|  | 1143 |  | 
|  | 1144 | stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr); | 
|  | 1145 | stm32_mdma_write(dmadev, STM32_MDMA_CTCR(id), hwdesc->ctcr); | 
|  | 1146 | stm32_mdma_write(dmadev, STM32_MDMA_CBNDTR(id), hwdesc->cbndtr); | 
|  | 1147 | stm32_mdma_write(dmadev, STM32_MDMA_CSAR(id), hwdesc->csar); | 
|  | 1148 | stm32_mdma_write(dmadev, STM32_MDMA_CDAR(id), hwdesc->cdar); | 
|  | 1149 | stm32_mdma_write(dmadev, STM32_MDMA_CBRUR(id), hwdesc->cbrur); | 
|  | 1150 | stm32_mdma_write(dmadev, STM32_MDMA_CLAR(id), hwdesc->clar); | 
|  | 1151 | stm32_mdma_write(dmadev, STM32_MDMA_CTBR(id), hwdesc->ctbr); | 
|  | 1152 | stm32_mdma_write(dmadev, STM32_MDMA_CMAR(id), hwdesc->cmar); | 
|  | 1153 | stm32_mdma_write(dmadev, STM32_MDMA_CMDR(id), hwdesc->cmdr); | 
|  | 1154 |  | 
|  | 1155 | /* Clear interrupt status if it is there */ | 
|  | 1156 | status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id)); | 
|  | 1157 | if (status) | 
|  | 1158 | stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(id), status); | 
|  | 1159 |  | 
|  | 1160 | stm32_mdma_dump_reg(chan); | 
|  | 1161 |  | 
|  | 1162 | /* Start DMA */ | 
|  | 1163 | stm32_mdma_set_bits(dmadev, STM32_MDMA_CCR(id), STM32_MDMA_CCR_EN); | 
|  | 1164 |  | 
|  | 1165 | /* Set SW request in case of MEM2MEM transfer */ | 
|  | 1166 | if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) { | 
|  | 1167 | reg = STM32_MDMA_CCR(id); | 
|  | 1168 | stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ); | 
|  | 1169 | } | 
|  | 1170 |  | 
|  | 1171 | chan->busy = true; | 
|  | 1172 |  | 
|  | 1173 | dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan); | 
|  | 1174 | } | 
|  | 1175 |  | 
|  | 1176 | static void stm32_mdma_issue_pending(struct dma_chan *c) | 
|  | 1177 | { | 
|  | 1178 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); | 
|  | 1179 | unsigned long flags; | 
|  | 1180 |  | 
|  | 1181 | spin_lock_irqsave(&chan->vchan.lock, flags); | 
|  | 1182 |  | 
|  | 1183 | if (!vchan_issue_pending(&chan->vchan)) | 
|  | 1184 | goto end; | 
|  | 1185 |  | 
|  | 1186 | dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan); | 
|  | 1187 |  | 
|  | 1188 | if (!chan->desc && !chan->busy) | 
|  | 1189 | stm32_mdma_start_transfer(chan); | 
|  | 1190 |  | 
|  | 1191 | end: | 
|  | 1192 | spin_unlock_irqrestore(&chan->vchan.lock, flags); | 
|  | 1193 | } | 
|  | 1194 |  | 
|  | 1195 | static int stm32_mdma_pause(struct dma_chan *c) | 
|  | 1196 | { | 
|  | 1197 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); | 
|  | 1198 | unsigned long flags; | 
|  | 1199 | int ret; | 
|  | 1200 |  | 
|  | 1201 | spin_lock_irqsave(&chan->vchan.lock, flags); | 
|  | 1202 | ret = stm32_mdma_disable_chan(chan); | 
|  | 1203 | spin_unlock_irqrestore(&chan->vchan.lock, flags); | 
|  | 1204 |  | 
|  | 1205 | if (!ret) | 
|  | 1206 | dev_dbg(chan2dev(chan), "vchan %pK: pause\n", &chan->vchan); | 
|  | 1207 |  | 
|  | 1208 | return ret; | 
|  | 1209 | } | 
|  | 1210 |  | 
|  | 1211 | static int stm32_mdma_resume(struct dma_chan *c) | 
|  | 1212 | { | 
|  | 1213 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); | 
|  | 1214 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); | 
|  | 1215 | struct stm32_mdma_hwdesc *hwdesc; | 
|  | 1216 | unsigned long flags; | 
|  | 1217 | u32 status, reg; | 
|  | 1218 |  | 
|  | 1219 | hwdesc = chan->desc->node[chan->curr_hwdesc].hwdesc; | 
|  | 1220 |  | 
|  | 1221 | spin_lock_irqsave(&chan->vchan.lock, flags); | 
|  | 1222 |  | 
|  | 1223 | /* Re-configure control register */ | 
|  | 1224 | stm32_mdma_write(dmadev, STM32_MDMA_CCR(chan->id), chan->desc->ccr); | 
|  | 1225 |  | 
|  | 1226 | /* Clear interrupt status if it is there */ | 
|  | 1227 | status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id)); | 
|  | 1228 | if (status) | 
|  | 1229 | stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status); | 
|  | 1230 |  | 
|  | 1231 | stm32_mdma_dump_reg(chan); | 
|  | 1232 |  | 
|  | 1233 | /* Re-start DMA */ | 
|  | 1234 | reg = STM32_MDMA_CCR(chan->id); | 
|  | 1235 | stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_EN); | 
|  | 1236 |  | 
|  | 1237 | /* Set SW request in case of MEM2MEM transfer */ | 
|  | 1238 | if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) | 
|  | 1239 | stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ); | 
|  | 1240 |  | 
|  | 1241 | spin_unlock_irqrestore(&chan->vchan.lock, flags); | 
|  | 1242 |  | 
|  | 1243 | dev_dbg(chan2dev(chan), "vchan %pK: resume\n", &chan->vchan); | 
|  | 1244 |  | 
|  | 1245 | return 0; | 
|  | 1246 | } | 
|  | 1247 |  | 
|  | 1248 | static int stm32_mdma_terminate_all(struct dma_chan *c) | 
|  | 1249 | { | 
|  | 1250 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); | 
|  | 1251 | unsigned long flags; | 
|  | 1252 | LIST_HEAD(head); | 
|  | 1253 |  | 
|  | 1254 | spin_lock_irqsave(&chan->vchan.lock, flags); | 
|  | 1255 | if (chan->busy) { | 
|  | 1256 | stm32_mdma_stop(chan); | 
|  | 1257 | chan->desc = NULL; | 
|  | 1258 | } | 
|  | 1259 | vchan_get_all_descriptors(&chan->vchan, &head); | 
|  | 1260 | spin_unlock_irqrestore(&chan->vchan.lock, flags); | 
|  | 1261 |  | 
|  | 1262 | vchan_dma_desc_free_list(&chan->vchan, &head); | 
|  | 1263 |  | 
|  | 1264 | return 0; | 
|  | 1265 | } | 
|  | 1266 |  | 
|  | 1267 | static void stm32_mdma_synchronize(struct dma_chan *c) | 
|  | 1268 | { | 
|  | 1269 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); | 
|  | 1270 |  | 
|  | 1271 | vchan_synchronize(&chan->vchan); | 
|  | 1272 | } | 
|  | 1273 |  | 
|  | 1274 | static int stm32_mdma_slave_config(struct dma_chan *c, | 
|  | 1275 | struct dma_slave_config *config) | 
|  | 1276 | { | 
|  | 1277 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); | 
|  | 1278 |  | 
|  | 1279 | memcpy(&chan->dma_config, config, sizeof(*config)); | 
|  | 1280 |  | 
|  | 1281 | return 0; | 
|  | 1282 | } | 
|  | 1283 |  | 
|  | 1284 | static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan *chan, | 
|  | 1285 | struct stm32_mdma_desc *desc, | 
|  | 1286 | u32 curr_hwdesc) | 
|  | 1287 | { | 
|  | 1288 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); | 
|  | 1289 | struct stm32_mdma_hwdesc *hwdesc = desc->node[0].hwdesc; | 
|  | 1290 | u32 cbndtr, residue, modulo, burst_size; | 
|  | 1291 | int i; | 
|  | 1292 |  | 
|  | 1293 | residue = 0; | 
|  | 1294 | for (i = curr_hwdesc + 1; i < desc->count; i++) { | 
|  | 1295 | hwdesc = desc->node[i].hwdesc; | 
|  | 1296 | residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr); | 
|  | 1297 | } | 
|  | 1298 | cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)); | 
|  | 1299 | residue += cbndtr & STM32_MDMA_CBNDTR_BNDT_MASK; | 
|  | 1300 |  | 
|  | 1301 | if (!chan->mem_burst) | 
|  | 1302 | return residue; | 
|  | 1303 |  | 
|  | 1304 | burst_size = chan->mem_burst * chan->mem_width; | 
|  | 1305 | modulo = residue % burst_size; | 
|  | 1306 | if (modulo) | 
|  | 1307 | residue = residue - modulo + burst_size; | 
|  | 1308 |  | 
|  | 1309 | return residue; | 
|  | 1310 | } | 
|  | 1311 |  | 
|  | 1312 | static enum dma_status stm32_mdma_tx_status(struct dma_chan *c, | 
|  | 1313 | dma_cookie_t cookie, | 
|  | 1314 | struct dma_tx_state *state) | 
|  | 1315 | { | 
|  | 1316 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); | 
|  | 1317 | struct virt_dma_desc *vdesc; | 
|  | 1318 | enum dma_status status; | 
|  | 1319 | unsigned long flags; | 
|  | 1320 | u32 residue = 0; | 
|  | 1321 |  | 
|  | 1322 | status = dma_cookie_status(c, cookie, state); | 
|  | 1323 | if ((status == DMA_COMPLETE) || (!state)) | 
|  | 1324 | return status; | 
|  | 1325 |  | 
|  | 1326 | spin_lock_irqsave(&chan->vchan.lock, flags); | 
|  | 1327 |  | 
|  | 1328 | vdesc = vchan_find_desc(&chan->vchan, cookie); | 
|  | 1329 | if (chan->desc && cookie == chan->desc->vdesc.tx.cookie) | 
|  | 1330 | residue = stm32_mdma_desc_residue(chan, chan->desc, | 
|  | 1331 | chan->curr_hwdesc); | 
|  | 1332 | else if (vdesc) | 
|  | 1333 | residue = stm32_mdma_desc_residue(chan, | 
|  | 1334 | to_stm32_mdma_desc(vdesc), 0); | 
|  | 1335 | dma_set_residue(state, residue); | 
|  | 1336 |  | 
|  | 1337 | spin_unlock_irqrestore(&chan->vchan.lock, flags); | 
|  | 1338 |  | 
|  | 1339 | return status; | 
|  | 1340 | } | 
|  | 1341 |  | 
|  | 1342 | static void stm32_mdma_xfer_end(struct stm32_mdma_chan *chan) | 
|  | 1343 | { | 
|  | 1344 | list_del(&chan->desc->vdesc.node); | 
|  | 1345 | vchan_cookie_complete(&chan->desc->vdesc); | 
|  | 1346 | chan->desc = NULL; | 
|  | 1347 | chan->busy = false; | 
|  | 1348 |  | 
|  | 1349 | /* Start the next transfer if this driver has a next desc */ | 
|  | 1350 | stm32_mdma_start_transfer(chan); | 
|  | 1351 | } | 
|  | 1352 |  | 
|  | 1353 | static irqreturn_t stm32_mdma_irq_handler(int irq, void *devid) | 
|  | 1354 | { | 
|  | 1355 | struct stm32_mdma_device *dmadev = devid; | 
|  | 1356 | struct stm32_mdma_chan *chan = devid; | 
|  | 1357 | u32 reg, id, ien, status, flag; | 
|  | 1358 |  | 
|  | 1359 | /* Find out which channel generates the interrupt */ | 
|  | 1360 | status = readl_relaxed(dmadev->base + STM32_MDMA_GISR0); | 
|  | 1361 | if (status) { | 
|  | 1362 | id = __ffs(status); | 
|  | 1363 | } else { | 
|  | 1364 | status = readl_relaxed(dmadev->base + STM32_MDMA_GISR1); | 
|  | 1365 | if (!status) { | 
|  | 1366 | dev_dbg(mdma2dev(dmadev), "spurious it\n"); | 
|  | 1367 | return IRQ_NONE; | 
|  | 1368 | } | 
|  | 1369 | id = __ffs(status); | 
|  | 1370 | /* | 
|  | 1371 | * As GISR0 provides status for channel id from 0 to 31, | 
|  | 1372 | * so GISR1 provides status for channel id from 32 to 62 | 
|  | 1373 | */ | 
|  | 1374 | id += 32; | 
|  | 1375 | } | 
|  | 1376 |  | 
|  | 1377 | chan = &dmadev->chan[id]; | 
|  | 1378 | if (!chan) { | 
|  | 1379 | dev_dbg(mdma2dev(dmadev), "MDMA channel not initialized\n"); | 
|  | 1380 | goto exit; | 
|  | 1381 | } | 
|  | 1382 |  | 
|  | 1383 | /* Handle interrupt for the channel */ | 
|  | 1384 | spin_lock(&chan->vchan.lock); | 
|  | 1385 | status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id)); | 
|  | 1386 | ien = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)); | 
|  | 1387 | ien &= STM32_MDMA_CCR_IRQ_MASK; | 
|  | 1388 | ien >>= 1; | 
|  | 1389 |  | 
|  | 1390 | if (!(status & ien)) { | 
|  | 1391 | spin_unlock(&chan->vchan.lock); | 
|  | 1392 | dev_dbg(chan2dev(chan), | 
|  | 1393 | "spurious it (status=0x%04x, ien=0x%04x)\n", | 
|  | 1394 | status, ien); | 
|  | 1395 | return IRQ_NONE; | 
|  | 1396 | } | 
|  | 1397 |  | 
|  | 1398 | flag = __ffs(status & ien); | 
|  | 1399 | reg = STM32_MDMA_CIFCR(chan->id); | 
|  | 1400 |  | 
|  | 1401 | switch (1 << flag) { | 
|  | 1402 | case STM32_MDMA_CISR_TEIF: | 
|  | 1403 | id = chan->id; | 
|  | 1404 | status = readl_relaxed(dmadev->base + STM32_MDMA_CESR(id)); | 
|  | 1405 | dev_err(chan2dev(chan), "Transfer Err: stat=0x%08x\n", status); | 
|  | 1406 | stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CTEIF); | 
|  | 1407 | break; | 
|  | 1408 |  | 
|  | 1409 | case STM32_MDMA_CISR_CTCIF: | 
|  | 1410 | stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CCTCIF); | 
|  | 1411 | stm32_mdma_xfer_end(chan); | 
|  | 1412 | break; | 
|  | 1413 |  | 
|  | 1414 | case STM32_MDMA_CISR_BRTIF: | 
|  | 1415 | stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBRTIF); | 
|  | 1416 | break; | 
|  | 1417 |  | 
|  | 1418 | case STM32_MDMA_CISR_BTIF: | 
|  | 1419 | stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBTIF); | 
|  | 1420 | chan->curr_hwdesc++; | 
|  | 1421 | if (chan->desc && chan->desc->cyclic) { | 
|  | 1422 | if (chan->curr_hwdesc == chan->desc->count) | 
|  | 1423 | chan->curr_hwdesc = 0; | 
|  | 1424 | vchan_cyclic_callback(&chan->desc->vdesc); | 
|  | 1425 | } | 
|  | 1426 | break; | 
|  | 1427 |  | 
|  | 1428 | case STM32_MDMA_CISR_TCIF: | 
|  | 1429 | stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CLTCIF); | 
|  | 1430 | break; | 
|  | 1431 |  | 
|  | 1432 | default: | 
|  | 1433 | dev_err(chan2dev(chan), "it %d unhandled (status=0x%04x)\n", | 
|  | 1434 | 1 << flag, status); | 
|  | 1435 | } | 
|  | 1436 |  | 
|  | 1437 | spin_unlock(&chan->vchan.lock); | 
|  | 1438 |  | 
|  | 1439 | exit: | 
|  | 1440 | return IRQ_HANDLED; | 
|  | 1441 | } | 
|  | 1442 |  | 
|  | 1443 | static int stm32_mdma_alloc_chan_resources(struct dma_chan *c) | 
|  | 1444 | { | 
|  | 1445 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); | 
|  | 1446 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); | 
|  | 1447 | int ret; | 
|  | 1448 |  | 
|  | 1449 | chan->desc_pool = dmam_pool_create(dev_name(&c->dev->device), | 
|  | 1450 | c->device->dev, | 
|  | 1451 | sizeof(struct stm32_mdma_hwdesc), | 
|  | 1452 | __alignof__(struct stm32_mdma_hwdesc), | 
|  | 1453 | 0); | 
|  | 1454 | if (!chan->desc_pool) { | 
|  | 1455 | dev_err(chan2dev(chan), "failed to allocate descriptor pool\n"); | 
|  | 1456 | return -ENOMEM; | 
|  | 1457 | } | 
|  | 1458 |  | 
|  | 1459 | ret = clk_prepare_enable(dmadev->clk); | 
|  | 1460 | if (ret < 0) { | 
|  | 1461 | dev_err(chan2dev(chan), "clk_prepare_enable failed: %d\n", ret); | 
|  | 1462 | return ret; | 
|  | 1463 | } | 
|  | 1464 |  | 
|  | 1465 | ret = stm32_mdma_disable_chan(chan); | 
|  | 1466 | if (ret < 0) | 
|  | 1467 | clk_disable_unprepare(dmadev->clk); | 
|  | 1468 |  | 
|  | 1469 | return ret; | 
|  | 1470 | } | 
|  | 1471 |  | 
|  | 1472 | static void stm32_mdma_free_chan_resources(struct dma_chan *c) | 
|  | 1473 | { | 
|  | 1474 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); | 
|  | 1475 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); | 
|  | 1476 | unsigned long flags; | 
|  | 1477 |  | 
|  | 1478 | dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id); | 
|  | 1479 |  | 
|  | 1480 | if (chan->busy) { | 
|  | 1481 | spin_lock_irqsave(&chan->vchan.lock, flags); | 
|  | 1482 | stm32_mdma_stop(chan); | 
|  | 1483 | chan->desc = NULL; | 
|  | 1484 | spin_unlock_irqrestore(&chan->vchan.lock, flags); | 
|  | 1485 | } | 
|  | 1486 |  | 
|  | 1487 | clk_disable_unprepare(dmadev->clk); | 
|  | 1488 | vchan_free_chan_resources(to_virt_chan(c)); | 
|  | 1489 | dmam_pool_destroy(chan->desc_pool); | 
|  | 1490 | chan->desc_pool = NULL; | 
|  | 1491 | } | 
|  | 1492 |  | 
|  | 1493 | static struct dma_chan *stm32_mdma_of_xlate(struct of_phandle_args *dma_spec, | 
|  | 1494 | struct of_dma *ofdma) | 
|  | 1495 | { | 
|  | 1496 | struct stm32_mdma_device *dmadev = ofdma->of_dma_data; | 
|  | 1497 | struct stm32_mdma_chan *chan; | 
|  | 1498 | struct dma_chan *c; | 
|  | 1499 | struct stm32_mdma_chan_config config; | 
|  | 1500 |  | 
|  | 1501 | if (dma_spec->args_count < 5) { | 
|  | 1502 | dev_err(mdma2dev(dmadev), "Bad number of args\n"); | 
|  | 1503 | return NULL; | 
|  | 1504 | } | 
|  | 1505 |  | 
|  | 1506 | config.request = dma_spec->args[0]; | 
|  | 1507 | config.priority_level = dma_spec->args[1]; | 
|  | 1508 | config.transfer_config = dma_spec->args[2]; | 
|  | 1509 | config.mask_addr = dma_spec->args[3]; | 
|  | 1510 | config.mask_data = dma_spec->args[4]; | 
|  | 1511 |  | 
|  | 1512 | if (config.request >= dmadev->nr_requests) { | 
|  | 1513 | dev_err(mdma2dev(dmadev), "Bad request line\n"); | 
|  | 1514 | return NULL; | 
|  | 1515 | } | 
|  | 1516 |  | 
|  | 1517 | if (config.priority_level > STM32_MDMA_VERY_HIGH_PRIORITY) { | 
|  | 1518 | dev_err(mdma2dev(dmadev), "Priority level not supported\n"); | 
|  | 1519 | return NULL; | 
|  | 1520 | } | 
|  | 1521 |  | 
|  | 1522 | c = dma_get_any_slave_channel(&dmadev->ddev); | 
|  | 1523 | if (!c) { | 
|  | 1524 | dev_err(mdma2dev(dmadev), "No more channels available\n"); | 
|  | 1525 | return NULL; | 
|  | 1526 | } | 
|  | 1527 |  | 
|  | 1528 | chan = to_stm32_mdma_chan(c); | 
|  | 1529 | chan->chan_config = config; | 
|  | 1530 |  | 
|  | 1531 | return c; | 
|  | 1532 | } | 
|  | 1533 |  | 
|  | 1534 | static const struct of_device_id stm32_mdma_of_match[] = { | 
|  | 1535 | { .compatible = "st,stm32h7-mdma", }, | 
|  | 1536 | { /* sentinel */ }, | 
|  | 1537 | }; | 
|  | 1538 | MODULE_DEVICE_TABLE(of, stm32_mdma_of_match); | 
|  | 1539 |  | 
|  | 1540 | static int stm32_mdma_probe(struct platform_device *pdev) | 
|  | 1541 | { | 
|  | 1542 | struct stm32_mdma_chan *chan; | 
|  | 1543 | struct stm32_mdma_device *dmadev; | 
|  | 1544 | struct dma_device *dd; | 
|  | 1545 | struct device_node *of_node; | 
|  | 1546 | struct resource *res; | 
|  | 1547 | u32 nr_channels, nr_requests; | 
|  | 1548 | int i, count, ret; | 
|  | 1549 |  | 
|  | 1550 | of_node = pdev->dev.of_node; | 
|  | 1551 | if (!of_node) | 
|  | 1552 | return -ENODEV; | 
|  | 1553 |  | 
|  | 1554 | ret = device_property_read_u32(&pdev->dev, "dma-channels", | 
|  | 1555 | &nr_channels); | 
|  | 1556 | if (ret) { | 
|  | 1557 | nr_channels = STM32_MDMA_MAX_CHANNELS; | 
|  | 1558 | dev_warn(&pdev->dev, "MDMA defaulting on %i channels\n", | 
|  | 1559 | nr_channels); | 
|  | 1560 | } | 
|  | 1561 |  | 
|  | 1562 | ret = device_property_read_u32(&pdev->dev, "dma-requests", | 
|  | 1563 | &nr_requests); | 
|  | 1564 | if (ret) { | 
|  | 1565 | nr_requests = STM32_MDMA_MAX_REQUESTS; | 
|  | 1566 | dev_warn(&pdev->dev, "MDMA defaulting on %i request lines\n", | 
|  | 1567 | nr_requests); | 
|  | 1568 | } | 
|  | 1569 |  | 
|  | 1570 | count = device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks", | 
|  | 1571 | NULL, 0); | 
|  | 1572 | if (count < 0) | 
|  | 1573 | count = 0; | 
|  | 1574 |  | 
|  | 1575 | dmadev = devm_kzalloc(&pdev->dev, sizeof(*dmadev) + sizeof(u32) * count, | 
|  | 1576 | GFP_KERNEL); | 
|  | 1577 | if (!dmadev) | 
|  | 1578 | return -ENOMEM; | 
|  | 1579 |  | 
|  | 1580 | dmadev->nr_channels = nr_channels; | 
|  | 1581 | dmadev->nr_requests = nr_requests; | 
|  | 1582 | device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks", | 
|  | 1583 | dmadev->ahb_addr_masks, | 
|  | 1584 | count); | 
|  | 1585 | dmadev->nr_ahb_addr_masks = count; | 
|  | 1586 |  | 
|  | 1587 | res = platform_get_resource(pdev, IORESOURCE_MEM, 0); | 
|  | 1588 | dmadev->base = devm_ioremap_resource(&pdev->dev, res); | 
|  | 1589 | if (IS_ERR(dmadev->base)) | 
|  | 1590 | return PTR_ERR(dmadev->base); | 
|  | 1591 |  | 
|  | 1592 | dmadev->clk = devm_clk_get(&pdev->dev, NULL); | 
|  | 1593 | if (IS_ERR(dmadev->clk)) { | 
|  | 1594 | ret = PTR_ERR(dmadev->clk); | 
|  | 1595 | if (ret == -EPROBE_DEFER) | 
|  | 1596 | dev_info(&pdev->dev, "Missing controller clock\n"); | 
|  | 1597 | return ret; | 
|  | 1598 | } | 
|  | 1599 |  | 
|  | 1600 | dmadev->rst = devm_reset_control_get(&pdev->dev, NULL); | 
|  | 1601 | if (!IS_ERR(dmadev->rst)) { | 
|  | 1602 | reset_control_assert(dmadev->rst); | 
|  | 1603 | udelay(2); | 
|  | 1604 | reset_control_deassert(dmadev->rst); | 
|  | 1605 | } | 
|  | 1606 |  | 
|  | 1607 | dd = &dmadev->ddev; | 
|  | 1608 | dma_cap_set(DMA_SLAVE, dd->cap_mask); | 
|  | 1609 | dma_cap_set(DMA_PRIVATE, dd->cap_mask); | 
|  | 1610 | dma_cap_set(DMA_CYCLIC, dd->cap_mask); | 
|  | 1611 | dma_cap_set(DMA_MEMCPY, dd->cap_mask); | 
|  | 1612 | dd->device_alloc_chan_resources = stm32_mdma_alloc_chan_resources; | 
|  | 1613 | dd->device_free_chan_resources = stm32_mdma_free_chan_resources; | 
|  | 1614 | dd->device_tx_status = stm32_mdma_tx_status; | 
|  | 1615 | dd->device_issue_pending = stm32_mdma_issue_pending; | 
|  | 1616 | dd->device_prep_slave_sg = stm32_mdma_prep_slave_sg; | 
|  | 1617 | dd->device_prep_dma_cyclic = stm32_mdma_prep_dma_cyclic; | 
|  | 1618 | dd->device_prep_dma_memcpy = stm32_mdma_prep_dma_memcpy; | 
|  | 1619 | dd->device_config = stm32_mdma_slave_config; | 
|  | 1620 | dd->device_pause = stm32_mdma_pause; | 
|  | 1621 | dd->device_resume = stm32_mdma_resume; | 
|  | 1622 | dd->device_terminate_all = stm32_mdma_terminate_all; | 
|  | 1623 | dd->device_synchronize = stm32_mdma_synchronize; | 
|  | 1624 | dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | | 
|  | 1625 | BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) | | 
|  | 1626 | BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) | | 
|  | 1627 | BIT(DMA_SLAVE_BUSWIDTH_8_BYTES); | 
|  | 1628 | dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | | 
|  | 1629 | BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) | | 
|  | 1630 | BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) | | 
|  | 1631 | BIT(DMA_SLAVE_BUSWIDTH_8_BYTES); | 
|  | 1632 | dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) | | 
|  | 1633 | BIT(DMA_MEM_TO_MEM); | 
|  | 1634 | dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; | 
|  | 1635 | dd->max_burst = STM32_MDMA_MAX_BURST; | 
|  | 1636 | dd->dev = &pdev->dev; | 
|  | 1637 | INIT_LIST_HEAD(&dd->channels); | 
|  | 1638 |  | 
|  | 1639 | for (i = 0; i < dmadev->nr_channels; i++) { | 
|  | 1640 | chan = &dmadev->chan[i]; | 
|  | 1641 | chan->id = i; | 
|  | 1642 | chan->vchan.desc_free = stm32_mdma_desc_free; | 
|  | 1643 | vchan_init(&chan->vchan, dd); | 
|  | 1644 | } | 
|  | 1645 |  | 
|  | 1646 | dmadev->irq = platform_get_irq(pdev, 0); | 
|  | 1647 | if (dmadev->irq < 0) { | 
|  | 1648 | dev_err(&pdev->dev, "failed to get IRQ\n"); | 
|  | 1649 | return dmadev->irq; | 
|  | 1650 | } | 
|  | 1651 |  | 
|  | 1652 | ret = devm_request_irq(&pdev->dev, dmadev->irq, stm32_mdma_irq_handler, | 
|  | 1653 | 0, dev_name(&pdev->dev), dmadev); | 
|  | 1654 | if (ret) { | 
|  | 1655 | dev_err(&pdev->dev, "failed to request IRQ\n"); | 
|  | 1656 | return ret; | 
|  | 1657 | } | 
|  | 1658 |  | 
|  | 1659 | ret = dma_async_device_register(dd); | 
|  | 1660 | if (ret) | 
|  | 1661 | return ret; | 
|  | 1662 |  | 
|  | 1663 | ret = of_dma_controller_register(of_node, stm32_mdma_of_xlate, dmadev); | 
|  | 1664 | if (ret < 0) { | 
|  | 1665 | dev_err(&pdev->dev, | 
|  | 1666 | "STM32 MDMA DMA OF registration failed %d\n", ret); | 
|  | 1667 | goto err_unregister; | 
|  | 1668 | } | 
|  | 1669 |  | 
|  | 1670 | platform_set_drvdata(pdev, dmadev); | 
|  | 1671 |  | 
|  | 1672 | dev_info(&pdev->dev, "STM32 MDMA driver registered\n"); | 
|  | 1673 |  | 
|  | 1674 | return 0; | 
|  | 1675 |  | 
|  | 1676 | err_unregister: | 
|  | 1677 | dma_async_device_unregister(dd); | 
|  | 1678 |  | 
|  | 1679 | return ret; | 
|  | 1680 | } | 
|  | 1681 |  | 
|  | 1682 | static struct platform_driver stm32_mdma_driver = { | 
|  | 1683 | .probe = stm32_mdma_probe, | 
|  | 1684 | .driver = { | 
|  | 1685 | .name = "stm32-mdma", | 
|  | 1686 | .of_match_table = stm32_mdma_of_match, | 
|  | 1687 | }, | 
|  | 1688 | }; | 
|  | 1689 |  | 
|  | 1690 | static int __init stm32_mdma_init(void) | 
|  | 1691 | { | 
|  | 1692 | return platform_driver_register(&stm32_mdma_driver); | 
|  | 1693 | } | 
|  | 1694 |  | 
|  | 1695 | subsys_initcall(stm32_mdma_init); | 
|  | 1696 |  | 
|  | 1697 | MODULE_DESCRIPTION("Driver for STM32 MDMA controller"); | 
|  | 1698 | MODULE_AUTHOR("M'boumba Cedric Madianga <cedric.madianga@gmail.com>"); | 
|  | 1699 | MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>"); | 
|  | 1700 | MODULE_LICENSE("GPL v2"); |