~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/sound/soc/sof/intel/hda-loader-skl.c

Version: ~ [ linux-6.11-rc3 ] ~ [ linux-6.10.4 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.45 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.104 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.164 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.223 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.281 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.319 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.9 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

  1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
  2 //
  3 // This file is provided under a dual BSD/GPLv2 license.  When using or
  4 // redistributing this file, you may do so under either license.
  5 //
  6 // Copyright(c) 2018-2022 Intel Corporation
  7 //
  8 
  9 #include <linux/delay.h>
 10 #include <linux/device.h>
 11 #include <linux/dma-mapping.h>
 12 #include <linux/firmware.h>
 13 #include <linux/fs.h>
 14 #include <linux/interrupt.h>
 15 #include <linux/mm.h>
 16 #include <linux/module.h>
 17 #include <linux/pci.h>
 18 #include <linux/slab.h>
 19 #include <sound/hdaudio_ext.h>
 20 #include <sound/sof.h>
 21 #include <sound/pcm_params.h>
 22 
 23 #include "../sof-priv.h"
 24 #include "../ops.h"
 25 #include "hda.h"
 26 
 27 #define HDA_SKL_WAIT_TIMEOUT            500     /* 500 msec */
 28 #define HDA_SKL_CLDMA_MAX_BUFFER_SIZE   (32 * PAGE_SIZE)
 29 
 30 /* Stream Reset */
 31 #define HDA_CL_SD_CTL_SRST_SHIFT        0
 32 #define HDA_CL_SD_CTL_SRST(x)           (((x) & 0x1) << \
 33                                         HDA_CL_SD_CTL_SRST_SHIFT)
 34 
 35 /* Stream Run */
 36 #define HDA_CL_SD_CTL_RUN_SHIFT         1
 37 #define HDA_CL_SD_CTL_RUN(x)            (((x) & 0x1) << \
 38                                         HDA_CL_SD_CTL_RUN_SHIFT)
 39 
 40 /* Interrupt On Completion Enable */
 41 #define HDA_CL_SD_CTL_IOCE_SHIFT        2
 42 #define HDA_CL_SD_CTL_IOCE(x)           (((x) & 0x1) << \
 43                                         HDA_CL_SD_CTL_IOCE_SHIFT)
 44 
 45 /* FIFO Error Interrupt Enable */
 46 #define HDA_CL_SD_CTL_FEIE_SHIFT        3
 47 #define HDA_CL_SD_CTL_FEIE(x)           (((x) & 0x1) << \
 48                                         HDA_CL_SD_CTL_FEIE_SHIFT)
 49 
 50 /* Descriptor Error Interrupt Enable */
 51 #define HDA_CL_SD_CTL_DEIE_SHIFT        4
 52 #define HDA_CL_SD_CTL_DEIE(x)           (((x) & 0x1) << \
 53                                         HDA_CL_SD_CTL_DEIE_SHIFT)
 54 
 55 /* FIFO Limit Change */
 56 #define HDA_CL_SD_CTL_FIFOLC_SHIFT      5
 57 #define HDA_CL_SD_CTL_FIFOLC(x)         (((x) & 0x1) << \
 58                                         HDA_CL_SD_CTL_FIFOLC_SHIFT)
 59 
 60 /* Stripe Control */
 61 #define HDA_CL_SD_CTL_STRIPE_SHIFT      16
 62 #define HDA_CL_SD_CTL_STRIPE(x)         (((x) & 0x3) << \
 63                                         HDA_CL_SD_CTL_STRIPE_SHIFT)
 64 
 65 /* Traffic Priority */
 66 #define HDA_CL_SD_CTL_TP_SHIFT          18
 67 #define HDA_CL_SD_CTL_TP(x)             (((x) & 0x1) << \
 68                                         HDA_CL_SD_CTL_TP_SHIFT)
 69 
 70 /* Bidirectional Direction Control */
 71 #define HDA_CL_SD_CTL_DIR_SHIFT         19
 72 #define HDA_CL_SD_CTL_DIR(x)            (((x) & 0x1) << \
 73                                         HDA_CL_SD_CTL_DIR_SHIFT)
 74 
 75 /* Stream Number */
 76 #define HDA_CL_SD_CTL_STRM_SHIFT        20
 77 #define HDA_CL_SD_CTL_STRM(x)           (((x) & 0xf) << \
 78                                         HDA_CL_SD_CTL_STRM_SHIFT)
 79 
 80 #define HDA_CL_SD_CTL_INT(x)    \
 81                 (HDA_CL_SD_CTL_IOCE(x) | \
 82                 HDA_CL_SD_CTL_FEIE(x) | \
 83                 HDA_CL_SD_CTL_DEIE(x))
 84 
 85 #define HDA_CL_SD_CTL_INT_MASK  \
 86                 (HDA_CL_SD_CTL_IOCE(1) | \
 87                 HDA_CL_SD_CTL_FEIE(1) | \
 88                 HDA_CL_SD_CTL_DEIE(1))
 89 
 90 #define DMA_ADDRESS_128_BITS_ALIGNMENT  7
 91 #define BDL_ALIGN(x)                    ((x) >> DMA_ADDRESS_128_BITS_ALIGNMENT)
 92 
 93 /* Buffer Descriptor List Lower Base Address */
 94 #define HDA_CL_SD_BDLPLBA_SHIFT         7
 95 #define HDA_CL_SD_BDLPLBA_MASK          GENMASK(31, 7)
 96 #define HDA_CL_SD_BDLPLBA(x)            \
 97         ((BDL_ALIGN(lower_32_bits(x)) << HDA_CL_SD_BDLPLBA_SHIFT) & \
 98          HDA_CL_SD_BDLPLBA_MASK)
 99 
100 /* Buffer Descriptor List Upper Base Address */
101 #define HDA_CL_SD_BDLPUBA(x)            \
102                         (upper_32_bits(x))
103 
104 /* Software Position in Buffer Enable */
105 #define HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_SHIFT     0
106 #define HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_MASK      \
107                         (1 << HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_SHIFT)
108 
109 #define HDA_CL_SPBFIFO_SPBFCCTL_SPIBE(x)        \
110                         (((x) << HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_SHIFT) & \
111                          HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_MASK)
112 
113 #define HDA_CL_DMA_SD_INT_COMPLETE              0x4
114 
115 static int cl_skl_cldma_setup_bdle(struct snd_sof_dev *sdev,
116                                    struct snd_dma_buffer *dmab_data,
117                                    __le32 **bdlp, int size, int with_ioc)
118 {
119         phys_addr_t addr = virt_to_phys(dmab_data->area);
120         __le32 *bdl = *bdlp;
121 
122         /*
123          * This code is simplified by using one fragment of physical memory and assuming
124          * all the code fits. This could be improved with scatter-gather but the firmware
125          * size is limited by DSP memory anyways
126          */
127         bdl[0] = cpu_to_le32(lower_32_bits(addr));
128         bdl[1] = cpu_to_le32(upper_32_bits(addr));
129         bdl[2] = cpu_to_le32(size);
130         bdl[3] = (!with_ioc) ? 0 : cpu_to_le32(0x01);
131 
132         return 1; /* one fragment */
133 }
134 
135 static void cl_skl_cldma_stream_run(struct snd_sof_dev *sdev, bool enable)
136 {
137         int sd_offset = SOF_HDA_ADSP_LOADER_BASE;
138         unsigned char val;
139         int retries;
140         u32 run = enable ? 0x1 : 0;
141 
142         snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR,
143                                 sd_offset + SOF_HDA_ADSP_REG_SD_CTL,
144                                 HDA_CL_SD_CTL_RUN(1), HDA_CL_SD_CTL_RUN(run));
145 
146         retries = 300;
147         do {
148                 udelay(3);
149 
150                 /* waiting for hardware to report the stream Run bit set */
151                 val = snd_sof_dsp_read(sdev, HDA_DSP_BAR,
152                                        sd_offset + SOF_HDA_ADSP_REG_SD_CTL);
153                 val &= HDA_CL_SD_CTL_RUN(1);
154                 if (enable && val)
155                         break;
156                 else if (!enable && !val)
157                         break;
158         } while (--retries);
159 
160         if (retries == 0)
161                 dev_err(sdev->dev, "%s: failed to set Run bit=%d enable=%d\n",
162                         __func__, val, enable);
163 }
164 
165 static void cl_skl_cldma_stream_clear(struct snd_sof_dev *sdev)
166 {
167         int sd_offset = SOF_HDA_ADSP_LOADER_BASE;
168 
169         /* make sure Run bit is cleared before setting stream register */
170         cl_skl_cldma_stream_run(sdev, 0);
171 
172         /* Disable the Interrupt On Completion, FIFO Error Interrupt,
173          * Descriptor Error Interrupt and set the cldma stream number to 0.
174          */
175         snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR,
176                                 sd_offset + SOF_HDA_ADSP_REG_SD_CTL,
177                                 HDA_CL_SD_CTL_INT_MASK, HDA_CL_SD_CTL_INT(0));
178         snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR,
179                                 sd_offset + SOF_HDA_ADSP_REG_SD_CTL,
180                                 HDA_CL_SD_CTL_STRM(0xf), HDA_CL_SD_CTL_STRM(0));
181 
182         snd_sof_dsp_write(sdev, HDA_DSP_BAR,
183                           sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, HDA_CL_SD_BDLPLBA(0));
184         snd_sof_dsp_write(sdev, HDA_DSP_BAR,
185                           sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 0);
186 
187         /* Set the Cyclic Buffer Length to 0. */
188         snd_sof_dsp_write(sdev, HDA_DSP_BAR,
189                           sd_offset + SOF_HDA_ADSP_REG_SD_CBL, 0);
190         /* Set the Last Valid Index. */
191         snd_sof_dsp_write(sdev, HDA_DSP_BAR,
192                           sd_offset + SOF_HDA_ADSP_REG_SD_LVI, 0);
193 }
194 
195 static void cl_skl_cldma_setup_spb(struct snd_sof_dev *sdev,
196                                    unsigned int size, bool enable)
197 {
198         int sd_offset = SOF_DSP_REG_CL_SPBFIFO;
199 
200         if (enable)
201                 snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR,
202                                         sd_offset + SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL,
203                                         HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_MASK,
204                                         HDA_CL_SPBFIFO_SPBFCCTL_SPIBE(1));
205 
206         snd_sof_dsp_write(sdev, HDA_DSP_BAR,
207                           sd_offset + SOF_HDA_ADSP_REG_CL_SPBFIFO_SPIB, size);
208 }
209 
210 static void cl_skl_cldma_set_intr(struct snd_sof_dev *sdev, bool enable)
211 {
212         u32 val = enable ? HDA_DSP_ADSPIC_CL_DMA : 0;
213 
214         snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, HDA_DSP_REG_ADSPIC,
215                                 HDA_DSP_ADSPIC_CL_DMA, val);
216 }
217 
218 static void cl_skl_cldma_cleanup_spb(struct snd_sof_dev *sdev)
219 {
220         int sd_offset = SOF_DSP_REG_CL_SPBFIFO;
221 
222         snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR,
223                                 sd_offset + SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL,
224                                 HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_MASK,
225                                 HDA_CL_SPBFIFO_SPBFCCTL_SPIBE(0));
226 
227         snd_sof_dsp_write(sdev, HDA_DSP_BAR,
228                           sd_offset + SOF_HDA_ADSP_REG_CL_SPBFIFO_SPIB, 0);
229 }
230 
231 static void cl_skl_cldma_setup_controller(struct snd_sof_dev *sdev,
232                                           struct snd_dma_buffer *dmab_bdl,
233                                           unsigned int max_size, u32 count)
234 {
235         int sd_offset = SOF_HDA_ADSP_LOADER_BASE;
236 
237         /* Clear the stream first and then set it. */
238         cl_skl_cldma_stream_clear(sdev);
239 
240         /* setting the stream register */
241         snd_sof_dsp_write(sdev, HDA_DSP_BAR,
242                           sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
243                           HDA_CL_SD_BDLPLBA(dmab_bdl->addr));
244         snd_sof_dsp_write(sdev, HDA_DSP_BAR,
245                           sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
246                           HDA_CL_SD_BDLPUBA(dmab_bdl->addr));
247 
248         /* Set the Cyclic Buffer Length. */
249         snd_sof_dsp_write(sdev, HDA_DSP_BAR,
250                           sd_offset + SOF_HDA_ADSP_REG_SD_CBL, max_size);
251         /* Set the Last Valid Index. */
252         snd_sof_dsp_write(sdev, HDA_DSP_BAR,
253                           sd_offset + SOF_HDA_ADSP_REG_SD_LVI, count - 1);
254 
255         /* Set the Interrupt On Completion, FIFO Error Interrupt,
256          * Descriptor Error Interrupt and the cldma stream number.
257          */
258         snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR,
259                                 sd_offset + SOF_HDA_ADSP_REG_SD_CTL,
260                                 HDA_CL_SD_CTL_INT_MASK, HDA_CL_SD_CTL_INT(1));
261         snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR,
262                                 sd_offset + SOF_HDA_ADSP_REG_SD_CTL,
263                                 HDA_CL_SD_CTL_STRM(0xf),
264                                 HDA_CL_SD_CTL_STRM(1));
265 }
266 
267 static int cl_stream_prepare_skl(struct snd_sof_dev *sdev,
268                                  struct snd_dma_buffer *dmab,
269                                  struct snd_dma_buffer *dmab_bdl)
270 
271 {
272         unsigned int bufsize = HDA_SKL_CLDMA_MAX_BUFFER_SIZE;
273         __le32 *bdl;
274         int frags;
275         int ret;
276 
277         ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, sdev->dev, bufsize, dmab);
278         if (ret < 0) {
279                 dev_err(sdev->dev, "%s: failed to alloc fw buffer: %x\n", __func__, ret);
280                 return ret;
281         }
282 
283         ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, sdev->dev, bufsize, dmab_bdl);
284         if (ret < 0) {
285                 dev_err(sdev->dev, "%s: failed to alloc blde: %x\n", __func__, ret);
286                 snd_dma_free_pages(dmab);
287                 return ret;
288         }
289 
290         bdl = (__le32 *)dmab_bdl->area;
291         frags = cl_skl_cldma_setup_bdle(sdev, dmab, &bdl, bufsize, 1);
292         cl_skl_cldma_setup_controller(sdev, dmab_bdl, bufsize, frags);
293 
294         return ret;
295 }
296 
297 static void cl_cleanup_skl(struct snd_sof_dev *sdev,
298                            struct snd_dma_buffer *dmab,
299                            struct snd_dma_buffer *dmab_bdl)
300 {
301         cl_skl_cldma_cleanup_spb(sdev);
302         cl_skl_cldma_stream_clear(sdev);
303         snd_dma_free_pages(dmab);
304         snd_dma_free_pages(dmab_bdl);
305 }
306 
307 static int cl_dsp_init_skl(struct snd_sof_dev *sdev,
308                            struct snd_dma_buffer *dmab,
309                            struct snd_dma_buffer *dmab_bdl)
310 {
311         struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
312         const struct sof_intel_dsp_desc *chip = hda->desc;
313         unsigned int status;
314         u32 flags;
315         int ret;
316 
317         /* check if the init_core is already enabled, if yes, reset and make it run,
318          * if not, powerdown and enable it again.
319          */
320         if (hda_dsp_core_is_enabled(sdev, chip->init_core_mask)) {
321                 /* if enabled, reset it, and run the init_core. */
322                 ret = hda_dsp_core_stall_reset(sdev, chip->init_core_mask);
323                 if (ret < 0)
324                         goto err;
325 
326                 ret = hda_dsp_core_run(sdev, chip->init_core_mask);
327                 if (ret < 0) {
328                         dev_err(sdev->dev, "%s: dsp core start failed %d\n", __func__, ret);
329                         goto err;
330                 }
331         } else {
332                 /* if not enabled, power down it first and then powerup and run
333                  * the init_core.
334                  */
335                 ret = hda_dsp_core_reset_power_down(sdev, chip->init_core_mask);
336                 if (ret < 0) {
337                         dev_err(sdev->dev, "%s: dsp core0 disable fail: %d\n", __func__, ret);
338                         goto err;
339                 }
340                 ret = hda_dsp_enable_core(sdev, chip->init_core_mask);
341                 if (ret < 0) {
342                         dev_err(sdev->dev, "%s: dsp core0 enable fail: %d\n", __func__, ret);
343                         goto err;
344                 }
345         }
346 
347         /* prepare DMA for code loader stream */
348         ret = cl_stream_prepare_skl(sdev, dmab, dmab_bdl);
349         if (ret < 0) {
350                 dev_err(sdev->dev, "%s: dma prepare fw loading err: %x\n", __func__, ret);
351                 return ret;
352         }
353 
354         /* enable the interrupt */
355         snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, HDA_DSP_REG_ADSPIC,
356                                 HDA_DSP_ADSPIC_IPC, HDA_DSP_ADSPIC_IPC);
357 
358         /* enable IPC DONE interrupt */
359         snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, chip->ipc_ctl,
360                                 HDA_DSP_REG_HIPCCTL_DONE,
361                                 HDA_DSP_REG_HIPCCTL_DONE);
362 
363         /* enable IPC BUSY interrupt */
364         snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, chip->ipc_ctl,
365                                 HDA_DSP_REG_HIPCCTL_BUSY,
366                                 HDA_DSP_REG_HIPCCTL_BUSY);
367 
368         /* polling the ROM init status information. */
369         ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_BAR,
370                                             chip->rom_status_reg, status,
371                                             (FSR_TO_STATE_CODE(status)
372                                              == FSR_STATE_INIT_DONE),
373                                             HDA_DSP_REG_POLL_INTERVAL_US,
374                                             chip->rom_init_timeout *
375                                             USEC_PER_MSEC);
376         if (ret < 0)
377                 goto err;
378 
379         return ret;
380 
381 err:
382         flags = SOF_DBG_DUMP_PCI | SOF_DBG_DUMP_MBOX;
383 
384         snd_sof_dsp_dbg_dump(sdev, "Boot failed\n", flags);
385         cl_cleanup_skl(sdev, dmab, dmab_bdl);
386         hda_dsp_core_reset_power_down(sdev, chip->init_core_mask);
387         return ret;
388 }
389 
390 static void cl_skl_cldma_fill_buffer(struct snd_sof_dev *sdev,
391                                      struct snd_dma_buffer *dmab,
392                                      unsigned int bufsize,
393                                      unsigned int copysize,
394                                      const void *curr_pos,
395                                      bool intr_enable)
396 {
397         struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
398 
399         /* copy the image into the buffer with the maximum buffer size. */
400         unsigned int size = (bufsize == copysize) ? bufsize : copysize;
401 
402         memcpy(dmab->area, curr_pos, size);
403 
404         /* Set the wait condition for every load. */
405         hda->code_loading = 1;
406 
407         /* Set the interrupt. */
408         if (intr_enable)
409                 cl_skl_cldma_set_intr(sdev, true);
410 
411         /* Set the SPB. */
412         cl_skl_cldma_setup_spb(sdev, size, true);
413 
414         /* Trigger the code loading stream. */
415         cl_skl_cldma_stream_run(sdev, true);
416 }
417 
418 static int cl_skl_cldma_wait_interruptible(struct snd_sof_dev *sdev,
419                                            bool intr_wait)
420 {
421         struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
422         const struct sof_intel_dsp_desc *chip = hda->desc;
423         int sd_offset = SOF_HDA_ADSP_LOADER_BASE;
424         u8 cl_dma_intr_status;
425 
426         /*
427          * Wait for CLDMA interrupt to inform the binary segment transfer is
428          * complete.
429          */
430         if (!wait_event_timeout(hda->waitq, !hda->code_loading,
431                                 msecs_to_jiffies(HDA_SKL_WAIT_TIMEOUT))) {
432                 dev_err(sdev->dev, "cldma copy timeout\n");
433                 dev_err(sdev->dev, "ROM code=%#x: FW status=%#x\n",
434                         snd_sof_dsp_read(sdev, HDA_DSP_BAR, HDA_DSP_SRAM_REG_ROM_ERROR),
435                         snd_sof_dsp_read(sdev, HDA_DSP_BAR, chip->rom_status_reg));
436                 return -EIO;
437         }
438 
439         /* now check DMA interrupt status */
440         cl_dma_intr_status = snd_sof_dsp_read(sdev, HDA_DSP_BAR,
441                                               sd_offset + SOF_HDA_ADSP_REG_SD_STS);
442 
443         if (!(cl_dma_intr_status & HDA_CL_DMA_SD_INT_COMPLETE)) {
444                 dev_err(sdev->dev, "cldma copy failed\n");
445                 return -EIO;
446         }
447 
448         dev_dbg(sdev->dev, "cldma buffer copy complete\n");
449         return 0;
450 }
451 
452 static int
453 cl_skl_cldma_copy_to_buf(struct snd_sof_dev *sdev,
454                          struct snd_dma_buffer *dmab,
455                          const void *bin,
456                          u32 total_size, u32 bufsize)
457 {
458         unsigned int bytes_left = total_size;
459         const void *curr_pos = bin;
460         int ret;
461 
462         if (total_size <= 0)
463                 return -EINVAL;
464 
465         while (bytes_left > 0) {
466                 if (bytes_left > bufsize) {
467                         dev_dbg(sdev->dev, "cldma copy %#x bytes\n", bufsize);
468 
469                         cl_skl_cldma_fill_buffer(sdev, dmab, bufsize, bufsize, curr_pos, true);
470 
471                         ret = cl_skl_cldma_wait_interruptible(sdev, false);
472                         if (ret < 0) {
473                                 dev_err(sdev->dev, "%s: fw failed to load. %#x bytes remaining\n",
474                                         __func__, bytes_left);
475                                 return ret;
476                         }
477 
478                         bytes_left -= bufsize;
479                         curr_pos += bufsize;
480                 } else {
481                         dev_dbg(sdev->dev, "cldma copy %#x bytes\n", bytes_left);
482 
483                         cl_skl_cldma_set_intr(sdev, false);
484                         cl_skl_cldma_fill_buffer(sdev, dmab, bufsize, bytes_left, curr_pos, false);
485                         return 0;
486                 }
487         }
488 
489         return bytes_left;
490 }
491 
492 static int cl_copy_fw_skl(struct snd_sof_dev *sdev,
493                           struct snd_dma_buffer *dmab)
494 
495 {
496         const struct firmware *fw =  sdev->basefw.fw;
497         struct firmware stripped_firmware;
498         unsigned int bufsize = HDA_SKL_CLDMA_MAX_BUFFER_SIZE;
499         int ret;
500 
501         stripped_firmware.data = fw->data + sdev->basefw.payload_offset;
502         stripped_firmware.size = fw->size - sdev->basefw.payload_offset;
503 
504         dev_dbg(sdev->dev, "firmware size: %#zx buffer size %#x\n", fw->size, bufsize);
505 
506         ret = cl_skl_cldma_copy_to_buf(sdev, dmab, stripped_firmware.data,
507                                        stripped_firmware.size, bufsize);
508         if (ret < 0)
509                 dev_err(sdev->dev, "%s: fw copy failed %d\n", __func__, ret);
510 
511         return ret;
512 }
513 
514 int hda_dsp_cl_boot_firmware_skl(struct snd_sof_dev *sdev)
515 {
516         struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
517         const struct sof_intel_dsp_desc *chip = hda->desc;
518         struct snd_dma_buffer dmab_bdl;
519         struct snd_dma_buffer dmab;
520         unsigned int reg;
521         u32 flags;
522         int ret;
523 
524         ret = cl_dsp_init_skl(sdev, &dmab, &dmab_bdl);
525 
526         /* retry enabling core and ROM load. seemed to help */
527         if (ret < 0) {
528                 ret = cl_dsp_init_skl(sdev, &dmab, &dmab_bdl);
529                 if (ret < 0) {
530                         dev_err(sdev->dev, "Error code=%#x: FW status=%#x\n",
531                                 snd_sof_dsp_read(sdev, HDA_DSP_BAR, HDA_DSP_SRAM_REG_ROM_ERROR),
532                                 snd_sof_dsp_read(sdev, HDA_DSP_BAR, chip->rom_status_reg));
533                         dev_err(sdev->dev, "Core En/ROM load fail:%d\n", ret);
534                         return ret;
535                 }
536         }
537 
538         dev_dbg(sdev->dev, "ROM init successful\n");
539 
540         /* at this point DSP ROM has been initialized and should be ready for
541          * code loading and firmware boot
542          */
543         ret = cl_copy_fw_skl(sdev, &dmab);
544         if (ret < 0) {
545                 dev_err(sdev->dev, "%s: load firmware failed : %d\n", __func__, ret);
546                 goto err;
547         }
548 
549         ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_BAR,
550                                             chip->rom_status_reg, reg,
551                                             (FSR_TO_STATE_CODE(reg)
552                                              == FSR_STATE_ROM_BASEFW_ENTERED),
553                                             HDA_DSP_REG_POLL_INTERVAL_US,
554                                             HDA_DSP_BASEFW_TIMEOUT_US);
555 
556         dev_dbg(sdev->dev, "Firmware download successful, booting...\n");
557 
558         cl_skl_cldma_stream_run(sdev, false);
559         cl_cleanup_skl(sdev, &dmab, &dmab_bdl);
560 
561         if (!ret)
562                 return chip->init_core_mask;
563 
564         return ret;
565 
566 err:
567         flags = SOF_DBG_DUMP_PCI | SOF_DBG_DUMP_MBOX;
568 
569         snd_sof_dsp_dbg_dump(sdev, "Boot failed\n", flags);
570 
571         /* power down DSP */
572         hda_dsp_core_reset_power_down(sdev, chip->init_core_mask);
573         cl_skl_cldma_stream_run(sdev, false);
574         cl_cleanup_skl(sdev, &dmab, &dmab_bdl);
575 
576         dev_err(sdev->dev, "%s: load fw failed err: %d\n", __func__, ret);
577         return ret;
578 }
579 

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php