2 * Greybus audio Pulse Code Modulation (PCM) driver
4 * Copyright 2015 Google Inc.
5 * Copyright 2015 Linaro Ltd.
7 * Released under the GPLv2 only.
10 #include <linux/kernel.h>
11 #include <linux/device.h>
12 #include <linux/interrupt.h>
13 #include <linux/module.h>
14 #include <linux/platform_device.h>
15 #include <linux/workqueue.h>
16 #include <linux/i2c.h>
17 #include <sound/core.h>
18 #include <sound/pcm.h>
19 #include <sound/pcm_params.h>
20 #include <sound/soc.h>
21 #include <sound/dmaengine_pcm.h>
22 #include <sound/simple_card.h>
28 * timer/workqueue logic for pushing pcm data.
30 * Since when we are playing audio, we don't get any
31 * status or feedback from the codec, we have to use a
32 * hrtimer to trigger sending data to the remote codec.
33 * However since the hrtimer runs in irq context, so we
34 * have to schedule a workqueue to actually send the
38 static void gb_pcm_work(struct work_struct *work)
40 struct gb_snd *snd_dev = container_of(work, struct gb_snd, work);
41 struct snd_pcm_substream *substream = snd_dev->substream;
42 struct snd_pcm_runtime *runtime = substream->runtime;
43 unsigned int stride, frames, oldptr;
44 int period_elapsed, ret;
51 if (!atomic_read(&snd_dev->running)) {
52 if (snd_dev->cport_active) {
53 ret = gb_i2s_mgmt_deactivate_cport(
54 snd_dev->mgmt_connection,
55 snd_dev->i2s_tx_connection->intf_cport_id);
56 if (ret) /* XXX Do what else with failure? */
57 pr_err("deactivate_cport failed: %d\n", ret);
59 snd_dev->cport_active = false;
60 snd_dev->send_data_sample_count = 0;
64 } else if (!snd_dev->cport_active) {
65 ret = gb_i2s_mgmt_activate_cport(snd_dev->mgmt_connection,
66 snd_dev->i2s_tx_connection->intf_cport_id);
68 pr_err("activate_cport failed: %d\n", ret);
70 snd_dev->cport_active = true;
73 address = runtime->dma_area + snd_dev->hwptr_done;
75 len = frames_to_bytes(runtime,
76 runtime->buffer_size) - snd_dev->hwptr_done;
77 len = min(len, MAX_SEND_DATA_LEN);
78 gb_i2s_send_data(snd_dev->i2s_tx_connection, snd_dev->send_data_req_buf,
79 address, len, snd_dev->send_data_sample_count);
81 snd_dev->send_data_sample_count += CONFIG_SAMPLES_PER_MSG;
83 stride = runtime->frame_bits >> 3;
86 snd_pcm_stream_lock(substream);
87 oldptr = snd_dev->hwptr_done;
88 snd_dev->hwptr_done += len;
89 if (snd_dev->hwptr_done >= runtime->buffer_size * stride)
90 snd_dev->hwptr_done -= runtime->buffer_size * stride;
92 frames = (len + (oldptr % stride)) / stride;
95 snd_dev->transfer_done += frames;
96 if (snd_dev->transfer_done >= runtime->period_size) {
97 snd_dev->transfer_done -= runtime->period_size;
101 snd_pcm_stream_unlock(substream);
103 snd_pcm_period_elapsed(snd_dev->substream);
106 static enum hrtimer_restart gb_pcm_timer_function(struct hrtimer *hrtimer)
108 struct gb_snd *snd_dev = container_of(hrtimer, struct gb_snd, timer);
110 if (!atomic_read(&snd_dev->running))
111 return HRTIMER_NORESTART;
112 queue_work(snd_dev->workqueue, &snd_dev->work);
113 hrtimer_forward_now(hrtimer, ns_to_ktime(CONFIG_PERIOD_NS));
114 return HRTIMER_RESTART;
117 void gb_pcm_hrtimer_start(struct gb_snd *snd_dev)
119 atomic_set(&snd_dev->running, 1);
120 queue_work(snd_dev->workqueue, &snd_dev->work); /* Activates CPort */
121 hrtimer_start(&snd_dev->timer, ns_to_ktime(CONFIG_PERIOD_NS),
125 void gb_pcm_hrtimer_stop(struct gb_snd *snd_dev)
127 atomic_set(&snd_dev->running, 0);
128 hrtimer_cancel(&snd_dev->timer);
129 queue_work(snd_dev->workqueue, &snd_dev->work); /* Deactivates CPort */
132 static int gb_pcm_hrtimer_init(struct gb_snd *snd_dev)
134 hrtimer_init(&snd_dev->timer, CLOCK_MONOTONIC, HRTIMER_MODE_REL);
135 snd_dev->timer.function = gb_pcm_timer_function;
136 atomic_set(&snd_dev->running, 0);
137 snd_dev->workqueue = alloc_workqueue("gb-audio", WQ_HIGHPRI, 0);
138 if (!snd_dev->workqueue)
140 INIT_WORK(&snd_dev->work, gb_pcm_work);
146 * Core gb pcm structure
148 static struct snd_pcm_hardware gb_plat_pcm_hardware = {
149 .info = SNDRV_PCM_INFO_INTERLEAVED |
150 SNDRV_PCM_INFO_MMAP |
151 SNDRV_PCM_INFO_MMAP_VALID,
155 .rate_max = GB_SAMPLE_RATE,
158 /* XXX - All the values below are junk */
159 .buffer_bytes_max = 64 * 1024,
160 .period_bytes_min = 32,
161 .period_bytes_max = 8192,
166 static snd_pcm_uframes_t gb_pcm_pointer(struct snd_pcm_substream *substream)
168 struct snd_soc_pcm_runtime *rtd = substream->private_data;
169 struct gb_snd *snd_dev;
171 snd_dev = snd_soc_dai_get_drvdata(rtd->cpu_dai);
173 return snd_dev->hwptr_done / (substream->runtime->frame_bits >> 3);
176 static int gb_pcm_prepare(struct snd_pcm_substream *substream)
178 struct snd_soc_pcm_runtime *rtd = substream->private_data;
179 struct gb_snd *snd_dev;
181 snd_dev = snd_soc_dai_get_drvdata(rtd->cpu_dai);
182 snd_dev->hwptr_done = 0;
183 snd_dev->transfer_done = 0;
187 static unsigned int rates[] = {GB_SAMPLE_RATE};
188 static struct snd_pcm_hw_constraint_list constraints_rates = {
189 .count = ARRAY_SIZE(rates),
194 static int gb_pcm_open(struct snd_pcm_substream *substream)
196 struct snd_pcm_runtime *runtime = substream->runtime;
197 struct snd_soc_pcm_runtime *rtd = substream->private_data;
198 struct gb_snd *snd_dev;
202 snd_dev = snd_soc_dai_get_drvdata(rtd->cpu_dai);
204 spin_lock_irqsave(&snd_dev->lock, flags);
205 runtime->private_data = snd_dev;
206 snd_dev->substream = substream;
207 ret = gb_pcm_hrtimer_init(snd_dev);
208 spin_unlock_irqrestore(&snd_dev->lock, flags);
213 snd_soc_set_runtime_hwparams(substream, &gb_plat_pcm_hardware);
215 ret = snd_pcm_hw_constraint_list(substream->runtime, 0,
216 SNDRV_PCM_HW_PARAM_RATE,
221 return snd_pcm_hw_constraint_integer(runtime,
222 SNDRV_PCM_HW_PARAM_PERIODS);
225 static int gb_pcm_close(struct snd_pcm_substream *substream)
227 substream->runtime->private_data = NULL;
231 static int gb_pcm_hw_params(struct snd_pcm_substream *substream,
232 struct snd_pcm_hw_params *hw_params)
234 struct snd_soc_pcm_runtime *rtd = substream->private_data;
235 struct gb_snd *snd_dev;
236 int rate, chans, bytes_per_chan, is_le, ret;
238 snd_dev = snd_soc_dai_get_drvdata(rtd->cpu_dai);
240 rate = params_rate(hw_params);
241 chans = params_channels(hw_params);
242 bytes_per_chan = snd_pcm_format_width(params_format(hw_params)) / 8;
243 is_le = snd_pcm_format_little_endian(params_format(hw_params));
245 ret = gb_i2s_mgmt_set_cfg(snd_dev, rate, chans, bytes_per_chan, is_le);
249 return snd_pcm_lib_malloc_pages(substream,
250 params_buffer_bytes(hw_params));
253 static int gb_pcm_hw_free(struct snd_pcm_substream *substream)
255 return snd_pcm_lib_free_pages(substream);
258 static struct snd_pcm_ops gb_pcm_ops = {
260 .close = gb_pcm_close,
261 .ioctl = snd_pcm_lib_ioctl,
262 .hw_params = gb_pcm_hw_params,
263 .hw_free = gb_pcm_hw_free,
264 .prepare = gb_pcm_prepare,
265 .pointer = gb_pcm_pointer,
268 static void gb_pcm_free(struct snd_pcm *pcm)
270 snd_pcm_lib_preallocate_free_for_all(pcm);
273 static int gb_pcm_new(struct snd_soc_pcm_runtime *rtd)
275 struct snd_pcm *pcm = rtd->pcm;
277 return snd_pcm_lib_preallocate_pages_for_all(
279 SNDRV_DMA_TYPE_CONTINUOUS,
280 snd_dma_continuous_data(GFP_KERNEL),
281 PREALLOC_BUFFER, PREALLOC_BUFFER_MAX);
284 static struct snd_soc_platform_driver gb_soc_platform = {
286 .pcm_new = gb_pcm_new,
287 .pcm_free = gb_pcm_free,
290 static int gb_soc_platform_probe(struct platform_device *pdev)
292 return snd_soc_register_platform(&pdev->dev, &gb_soc_platform);
295 static int gb_soc_platform_remove(struct platform_device *pdev)
297 snd_soc_unregister_platform(&pdev->dev);
301 struct platform_driver gb_audio_pcm_driver = {
303 .name = "gb-pcm-audio",
304 .owner = THIS_MODULE,
306 .probe = gb_soc_platform_probe,
307 .remove = gb_soc_platform_remove,