cx88-vbi.c (6477B)
1// SPDX-License-Identifier: GPL-2.0 2/* 3 */ 4 5#include "cx88.h" 6 7#include <linux/kernel.h> 8#include <linux/module.h> 9#include <linux/init.h> 10 11static unsigned int vbi_debug; 12module_param(vbi_debug, int, 0644); 13MODULE_PARM_DESC(vbi_debug, "enable debug messages [vbi]"); 14 15#define dprintk(level, fmt, arg...) do { \ 16 if (vbi_debug >= level) \ 17 printk(KERN_DEBUG pr_fmt("%s: vbi:" fmt), \ 18 __func__, ##arg); \ 19} while (0) 20 21/* ------------------------------------------------------------------ */ 22 23int cx8800_vbi_fmt(struct file *file, void *priv, 24 struct v4l2_format *f) 25{ 26 struct cx8800_dev *dev = video_drvdata(file); 27 28 f->fmt.vbi.samples_per_line = VBI_LINE_LENGTH; 29 f->fmt.vbi.sample_format = V4L2_PIX_FMT_GREY; 30 f->fmt.vbi.offset = 244; 31 32 if (dev->core->tvnorm & V4L2_STD_525_60) { 33 /* ntsc */ 34 f->fmt.vbi.sampling_rate = 28636363; 35 f->fmt.vbi.start[0] = 10; 36 f->fmt.vbi.start[1] = 273; 37 f->fmt.vbi.count[0] = VBI_LINE_NTSC_COUNT; 38 f->fmt.vbi.count[1] = VBI_LINE_NTSC_COUNT; 39 40 } else if (dev->core->tvnorm & V4L2_STD_625_50) { 41 /* pal */ 42 f->fmt.vbi.sampling_rate = 35468950; 43 f->fmt.vbi.start[0] = V4L2_VBI_ITU_625_F1_START + 5; 44 f->fmt.vbi.start[1] = V4L2_VBI_ITU_625_F2_START + 5; 45 f->fmt.vbi.count[0] = VBI_LINE_PAL_COUNT; 46 f->fmt.vbi.count[1] = VBI_LINE_PAL_COUNT; 47 } 48 return 0; 49} 50 51static int cx8800_start_vbi_dma(struct cx8800_dev *dev, 52 struct cx88_dmaqueue *q, 53 struct cx88_buffer *buf) 54{ 55 struct cx88_core *core = dev->core; 56 57 /* setup fifo + format */ 58 cx88_sram_channel_setup(dev->core, &cx88_sram_channels[SRAM_CH24], 59 VBI_LINE_LENGTH, buf->risc.dma); 60 61 cx_write(MO_VBOS_CONTROL, (1 << 18) | /* comb filter delay fixup */ 62 (1 << 15) | /* enable vbi capture */ 63 (1 << 11)); 64 65 /* reset counter */ 66 cx_write(MO_VBI_GPCNTRL, GP_COUNT_CONTROL_RESET); 67 q->count = 0; 68 69 /* enable irqs */ 70 cx_set(MO_PCI_INTMSK, core->pci_irqmask | PCI_INT_VIDINT); 71 cx_set(MO_VID_INTMSK, 0x0f0088); 72 73 /* enable capture */ 74 cx_set(VID_CAPTURE_CONTROL, 0x18); 75 76 /* start dma */ 77 cx_set(MO_DEV_CNTRL2, (1 << 5)); 78 cx_set(MO_VID_DMACNTRL, 0x88); 79 80 return 0; 81} 82 83void cx8800_stop_vbi_dma(struct cx8800_dev *dev) 84{ 85 struct cx88_core *core = dev->core; 86 87 /* stop dma */ 88 cx_clear(MO_VID_DMACNTRL, 0x88); 89 90 /* disable capture */ 91 cx_clear(VID_CAPTURE_CONTROL, 0x18); 92 93 /* disable irqs */ 94 cx_clear(MO_PCI_INTMSK, PCI_INT_VIDINT); 95 cx_clear(MO_VID_INTMSK, 0x0f0088); 96} 97 98int cx8800_restart_vbi_queue(struct cx8800_dev *dev, 99 struct cx88_dmaqueue *q) 100{ 101 struct cx88_buffer *buf; 102 103 if (list_empty(&q->active)) 104 return 0; 105 106 buf = list_entry(q->active.next, struct cx88_buffer, list); 107 dprintk(2, "restart_queue [%p/%d]: restart dma\n", 108 buf, buf->vb.vb2_buf.index); 109 cx8800_start_vbi_dma(dev, q, buf); 110 return 0; 111} 112 113/* ------------------------------------------------------------------ */ 114 115static int queue_setup(struct vb2_queue *q, 116 unsigned int *num_buffers, unsigned int *num_planes, 117 unsigned int sizes[], struct device *alloc_devs[]) 118{ 119 struct cx8800_dev *dev = q->drv_priv; 120 121 *num_planes = 1; 122 if (dev->core->tvnorm & V4L2_STD_525_60) 123 sizes[0] = VBI_LINE_NTSC_COUNT * VBI_LINE_LENGTH * 2; 124 else 125 sizes[0] = VBI_LINE_PAL_COUNT * VBI_LINE_LENGTH * 2; 126 return 0; 127} 128 129static int buffer_prepare(struct vb2_buffer *vb) 130{ 131 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 132 struct cx8800_dev *dev = vb->vb2_queue->drv_priv; 133 struct cx88_buffer *buf = container_of(vbuf, struct cx88_buffer, vb); 134 struct sg_table *sgt = vb2_dma_sg_plane_desc(vb, 0); 135 unsigned int lines; 136 unsigned int size; 137 138 if (dev->core->tvnorm & V4L2_STD_525_60) 139 lines = VBI_LINE_NTSC_COUNT; 140 else 141 lines = VBI_LINE_PAL_COUNT; 142 size = lines * VBI_LINE_LENGTH * 2; 143 if (vb2_plane_size(vb, 0) < size) 144 return -EINVAL; 145 vb2_set_plane_payload(vb, 0, size); 146 147 cx88_risc_buffer(dev->pci, &buf->risc, sgt->sgl, 148 0, VBI_LINE_LENGTH * lines, 149 VBI_LINE_LENGTH, 0, 150 lines); 151 return 0; 152} 153 154static void buffer_finish(struct vb2_buffer *vb) 155{ 156 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 157 struct cx8800_dev *dev = vb->vb2_queue->drv_priv; 158 struct cx88_buffer *buf = container_of(vbuf, struct cx88_buffer, vb); 159 struct cx88_riscmem *risc = &buf->risc; 160 161 if (risc->cpu) 162 dma_free_coherent(&dev->pci->dev, risc->size, risc->cpu, 163 risc->dma); 164 memset(risc, 0, sizeof(*risc)); 165} 166 167static void buffer_queue(struct vb2_buffer *vb) 168{ 169 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 170 struct cx8800_dev *dev = vb->vb2_queue->drv_priv; 171 struct cx88_buffer *buf = container_of(vbuf, struct cx88_buffer, vb); 172 struct cx88_buffer *prev; 173 struct cx88_dmaqueue *q = &dev->vbiq; 174 175 /* add jump to start */ 176 buf->risc.cpu[1] = cpu_to_le32(buf->risc.dma + 8); 177 buf->risc.jmp[0] = cpu_to_le32(RISC_JUMP | RISC_CNT_INC); 178 buf->risc.jmp[1] = cpu_to_le32(buf->risc.dma + 8); 179 180 if (list_empty(&q->active)) { 181 list_add_tail(&buf->list, &q->active); 182 dprintk(2, "[%p/%d] vbi_queue - first active\n", 183 buf, buf->vb.vb2_buf.index); 184 185 } else { 186 buf->risc.cpu[0] |= cpu_to_le32(RISC_IRQ1); 187 prev = list_entry(q->active.prev, struct cx88_buffer, list); 188 list_add_tail(&buf->list, &q->active); 189 prev->risc.jmp[1] = cpu_to_le32(buf->risc.dma); 190 dprintk(2, "[%p/%d] buffer_queue - append to active\n", 191 buf, buf->vb.vb2_buf.index); 192 } 193} 194 195static int start_streaming(struct vb2_queue *q, unsigned int count) 196{ 197 struct cx8800_dev *dev = q->drv_priv; 198 struct cx88_dmaqueue *dmaq = &dev->vbiq; 199 struct cx88_buffer *buf = list_entry(dmaq->active.next, 200 struct cx88_buffer, list); 201 202 cx8800_start_vbi_dma(dev, dmaq, buf); 203 return 0; 204} 205 206static void stop_streaming(struct vb2_queue *q) 207{ 208 struct cx8800_dev *dev = q->drv_priv; 209 struct cx88_core *core = dev->core; 210 struct cx88_dmaqueue *dmaq = &dev->vbiq; 211 unsigned long flags; 212 213 cx_clear(MO_VID_DMACNTRL, 0x11); 214 cx_clear(VID_CAPTURE_CONTROL, 0x06); 215 cx8800_stop_vbi_dma(dev); 216 spin_lock_irqsave(&dev->slock, flags); 217 while (!list_empty(&dmaq->active)) { 218 struct cx88_buffer *buf = list_entry(dmaq->active.next, 219 struct cx88_buffer, list); 220 221 list_del(&buf->list); 222 vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_ERROR); 223 } 224 spin_unlock_irqrestore(&dev->slock, flags); 225} 226 227const struct vb2_ops cx8800_vbi_qops = { 228 .queue_setup = queue_setup, 229 .buf_prepare = buffer_prepare, 230 .buf_finish = buffer_finish, 231 .buf_queue = buffer_queue, 232 .wait_prepare = vb2_ops_wait_prepare, 233 .wait_finish = vb2_ops_wait_finish, 234 .start_streaming = start_streaming, 235 .stop_streaming = stop_streaming, 236};