blob: 5566d679d6812ffdc66bf104378cd9bacbd53396 [file] [log] [blame]
// SPDX-License-Identifier: GPL-2.0-or-later
/*
* V4L2 Driver for ASR camera controller
*
* Copyright (C) 2023 ASR Microelectronics Co., Ltd.
*/
#include <linux/init.h>
#include <linux/module.h>
#include <linux/io.h>
#include <linux/delay.h>
#include <linux/device.h>
#include <linux/dma-mapping.h>
#include <linux/err.h>
#include <linux/errno.h>
#include <linux/fs.h>
#include <linux/interrupt.h>
#include <linux/kernel.h>
#include <linux/mm.h>
#include <linux/moduleparam.h>
#include <linux/of.h>
#include <linux/of_graph.h>
#include <linux/of_irq.h>
#include <linux/time.h>
#include <linux/platform_device.h>
#include <linux/clk.h>
#include <linux/sched.h>
#include <linux/slab.h>
#include <linux/syscalls.h>
#include <linux/pm_qos.h>
#include <media/v4l2-async.h>
#include <media/v4l2-clk.h>
#include <media/v4l2-common.h>
#include <media/v4l2-ctrls.h>
#include <media/v4l2-device.h>
#include <media/v4l2-event.h>
#include <media/v4l2-ioctl.h>
#include <media/v4l2-fwnode.h>
#include <media/videobuf2-dma-contig.h>
#include <linux/videodev2.h>
#include "camera_reg.h"
#include "../../video/fbdev/asrfb/asrfb.h"
#ifdef CONFIG_DEBUG_FS
#include <linux/debugfs.h>
#include <linux/seq_file.h>
#endif /* CONFIG_DEBUG_FS */
static int debug;
static int snapshot_flag = 0;
module_param(debug, int, 0644);
#define ASR_CAM_DRV_NAME "asr-camera"
#define DEFAULT_WIDTH 640
#define DEFAULT_HEIGHT 480
enum cam_interface {
INF_SPI1LAN = 0x0,
INF_SPI2LAN,
INF_SPI4LAN,
INF_MIPI1LAN,
INF_MIPI2LAN,
INF_MIPI4LAN,
INF_DVP,
INF_MAX,
};
struct spi_param {
u8 spi_sdr; //0x0:no sdr 0x1:sdr
u8 spi_crc; //0x0:no crc 0x1:crc
u8 spi_manual_enable; //0x0:not enable 0x1:enable
u8 spi_manual_mode;
u8 spi_manual_height_enable; //0x0:not enable 0x1:enable
u8 spi_manual_width_enable; //0x0:not enable 0x1:enable
u16 spi_manual_height;
u16 spi_manual_width;
u8 spi_ignore_line_id;
};
struct crop_param {
u16 start_x;
u16 start_y;
u16 end_x;
u16 end_y;
};
/*size:isp output size-->crop-->subsample-->scaler-->dma size*/
struct pipeline_param {
u8 pipeline_num; //0x0:preview 0x1:video 0x2:capture
u8 pipeline_enable; //0x0:not enable 0x1:enable
u8 shadow_mode; //0x0:direct mode 0x1:shadow mode
struct crop_param pipeline_crop;
u8 subsample; //0x0:1-1 0x1 1-2 0x2:1-4 0x3:1-8 0x4:1-16 0x5 1-32
u8 scaler;
u8 output_format;
u16 dma_stride_y;
u16 pipeline_outw;/*dma_w*/
u16 pipeline_outh;/*dma_h*/
};
/*
* struct asr_sensor_info - Sensor infomations
* @mbus: media bus configuration
*/
struct asr_sensor_info {
struct v4l2_subdev *sd;
struct v4l2_mbus_config mbus;
struct v4l2_subdev_format fmt;
struct v4l2_subdev_pad_config cfg;
};
#define ISP_STAT_MAX_PLANE 3
struct isp_buffer {
/* common v4l buffer stuff -- must be first */
struct vb2_v4l2_buffer vbuf;
struct list_head queue;
dma_addr_t dma;
};
struct asr_camera_dev {
struct v4l2_device v4l2_dev;
struct video_device vdev;
struct v4l2_async_notifier notifier;
struct v4l2_async_subdev asd;
struct vb2_queue vb2_vq;
struct asr_sensor_info sensor;
struct v4l2_pix_format current_pix;
u32 mclk_rate;
unsigned int irq;
void __iomem *ipe_base;
void __iomem *isp_base;
struct resource *res;
enum cam_interface camera_interface;
struct spi_param spi_config;
struct pipeline_param pipe2_config;
spinlock_t lock;
struct mutex mlock;
struct list_head capture;
unsigned int buf_sequence;
bool dma_state;
struct isp_buffer *curr_buf;
struct isp_buffer *next_buf;
struct tasklet_struct task_eof;
struct dentry *dentry;
void *dma_addr;
dma_addr_t dma_addr_phys;
struct pm_qos_request pm_qos_req;
s32 pm_qos;
};
void __iomem* ipe_base_reg;
#define ALIGN_TO(addr,size) (((addr)+(size)-1)&(~((size)-1)))
#define sensor_call(cam, o, f, args...) \
v4l2_subdev_call(cam->sensor.sd, o, f, ##args)
static struct isp_buffer *vb2_to_isp_buffer(struct vb2_buffer *vb)
{
struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
return container_of(vbuf, struct isp_buffer, vbuf);
}
static struct asr_camera_dev *v4l2_dev_to_pcdev(struct v4l2_device *v4l2_dev)
{
return container_of(v4l2_dev, struct asr_camera_dev, v4l2_dev);
}
void asr_camera_mclk_ctrl(int on)
{
u32 value;
value = __raw_readl(ipe_base_reg + REG_CTRL_1);
if (on) {
value &= ~(1<<28);
__raw_writel(value, ipe_base_reg + REG_CTRL_1);
} else {
value |= (1<<28);
__raw_writel(value, ipe_base_reg + REG_CTRL_1);
}
}
EXPORT_SYMBOL(asr_camera_mclk_ctrl);
static void asr_camera_activate(struct asr_camera_dev *pcdev)
{
u32 mask = 0;
init_media_pmu();
clear_camera_setting(pcdev->ipe_base);
pm_qos_update_request(&pcdev->pm_qos_req, pcdev->pm_qos);
/* disable all interrupts */
writel(0xFFFFFFFF, pcdev->ipe_base + REG_CAMERA_IRQ_RAW_MASK);
mask = readl(pcdev->ipe_base + REG_CCIC_IRQ_MASK);
mask |= IRQ_CCIC_MASK_ALL;
writel(mask, pcdev->ipe_base + REG_CCIC_IRQ_MASK);
config_mclk(pcdev->ipe_base, pcdev->mclk_rate);
}
static void asr_camera_deactivate(struct asr_camera_dev *pcdev)
{
deinit_media_pmu();
pm_qos_update_request(&pcdev->pm_qos_req, PM_QOS_CPUIDLE_BLOCK_DEFAULT_VALUE);
}
static u32 asr_mbus_image_size(struct pipeline_param *pipe_config)
{
u32 size = 0;
switch(pipe_config->output_format) {
case RAW8: // only p2 support, refer to YV12
size = pipe_config->dma_stride_y*pipe_config->pipeline_outh;
break;
case YUV422_YUYV: // 1 planar
case YUV422_YVYU: // 1 planar
case YUV422_UYVY: // 1 planar
case YUV422_VYUY: // 1 planar
size = pipe_config->dma_stride_y*pipe_config->pipeline_outh * 2;
break;
case YUV420_YV12: // 3 planar
case YUV420_I420: // 3 planar
size = pipe_config->dma_stride_y*pipe_config->pipeline_outh * 3 / 2;
break;
case YUV420_NV12: // 2 planar
case YUV420_NV21: // 2 planar
size = pipe_config->dma_stride_y * pipe_config->pipeline_outh * 3 / 2;
break;
default:
break;
}
return size;
}
static int pipeline_shadow_mode_set(struct asr_camera_dev *dev, u32 shadow)
{
u32 reg = REG_SUBSAMPLE_SCALER_CTRL2;
u32 mask = PIPE2_CTRL_SHADOW_MODE_MASK;
u32 val = shadow ? PIPE2_CTRL_SHADOW : PIPE2_CTRL_DIRECT;
u32 read_val;
read_val = readl(dev->ipe_base + reg);
read_val = (read_val & ~mask) | (mask & val);
writel(read_val, dev->ipe_base + reg);
v4l2_info(&dev->v4l2_dev, "set to %s mode", shadow ? "shadow" : "direct");
return 0;
}
static void set_pipeline_enable(struct asr_camera_dev *dev)
{
u32 reg = REG_SUBSAMPLE_SCALER_CTRL2;
if (dev->pipe2_config.pipeline_enable)
writel(readl(dev->ipe_base + reg) | (1 << 0), dev->ipe_base + reg);
else
writel(readl(dev->ipe_base + reg) & ~(1 << 0), dev->ipe_base + reg);
}
static void set_pipeline_shadow_ready(struct asr_camera_dev *dev)
{
u32 reg = REG_SUBSAMPLE_SCALER_CTRL2;
writel(readl(dev->ipe_base + reg) | (1 << 6), dev->ipe_base + reg);
}
static void pipeline_shadow_streamon(struct asr_camera_dev *dev)
{
//config shadow mode
pipeline_shadow_mode_set(dev, 1);
//pipeline enable
set_pipeline_enable(dev);
//set shadow ready
set_pipeline_shadow_ready(dev);
}
/* Interface resemble MINIGUI */
static int isp_io_format_set(struct asr_camera_dev *dev)
{
int ret = 0;
u8 input_yuv = 0;
u32 mbus_fmt, out_fmt = YUV444;
u32 val;
mbus_fmt = dev->sensor.fmt.format.code;
switch (mbus_fmt) {
case MEDIA_BUS_FMT_SBGGR8_1X8:
case MEDIA_BUS_FMT_SBGGR10_1X10:
isp_reg_write(dev->isp_base, 0x5c, 0x20); // [5:4] CFA) = 00:GRBG) = 01:RGGB) = 10:BGGR) = 11:GBRG
break;
case MEDIA_BUS_FMT_SGBRG8_1X8:
case MEDIA_BUS_FMT_SGBRG10_1X10:
isp_reg_write(dev->isp_base, 0x5c, 0x30); // [5:4] CFA) = 00:GRBG) = 01:RGGB) = 10:BGGR) = 11:GBRG
break;
case MEDIA_BUS_FMT_SGRBG8_1X8:
case MEDIA_BUS_FMT_SGRBG10_1X10:
isp_reg_write(dev->isp_base, 0x5c, 0x00); // [5:4] CFA) = 00:GRBG) = 01:RGGB) = 10:BGGR) = 11:GBRG
break;
case MEDIA_BUS_FMT_SRGGB8_1X8:
case MEDIA_BUS_FMT_SRGGB10_1X10:
isp_reg_write(dev->isp_base, 0x5c, 0x10); // [5:4] CFA) = 00:GRBG) = 01:RGGB) = 10:BGGR) = 11:GBRG
break;
/*
This field defines the Endianness of the external CMOS sensor's output (CCIC's input).
This field has no effect on the MIPI CSI2 CMOS sensor.
0x0 = Y1CbY0Cr: YUYV sensor
0x1 = Y1CrY0Cb: YVYU sensor
0x2 = CrY1CbY0: VYUY sensor
0x3 = CbY1CrY0: UYVY sensor
*/
case MEDIA_BUS_FMT_YUYV8_2X8:
val = readl(dev->ipe_base + REG_DATA_FORMAT_CTRL);
val = val & ~(0x3 << 24);
writel(val,dev->ipe_base + REG_DATA_FORMAT_CTRL);
input_yuv = 1;
break;
case MEDIA_BUS_FMT_YVYU8_2X8:
val = readl(dev->ipe_base + REG_DATA_FORMAT_CTRL);
val = (val & ~(0x3 << 24)) | (0x1 << 24);
writel(val,dev->ipe_base + REG_DATA_FORMAT_CTRL);
input_yuv = 1;
break;
case MEDIA_BUS_FMT_VYUY8_2X8:
val = readl(dev->ipe_base + REG_DATA_FORMAT_CTRL);
val = (val & ~(0x3 << 24)) | (0x2 << 24);
writel(val,dev->ipe_base + REG_DATA_FORMAT_CTRL);
input_yuv = 1;
break;
case MEDIA_BUS_FMT_UYVY8_2X8:
val = readl(dev->ipe_base + REG_DATA_FORMAT_CTRL);
val = (val & ~(0x3 << 24)) | (0x3 << 24);
writel(val,dev->ipe_base + REG_DATA_FORMAT_CTRL);
input_yuv = 1;
break;
default:
v4l2_err(&dev->v4l2_dev, "invalid media bus format 0x%x\n", mbus_fmt);
return -1;
}
if (input_yuv) {
if (out_fmt == YUV444) {
isp_reg_write(dev->isp_base, REG_ISP_OUT_FMT, INPUT_FMT_YUV);
} else {
v4l2_err(&dev->v4l2_dev, "%s: mbus_fmt(0x%x) mismatch out_fmt(0x%x)\n",
__func__, mbus_fmt, out_fmt);
return -1;
}
} else {
if (out_fmt == YUV444) {
isp_reg_write(dev->isp_base, REG_ISP_OUT_FMT, INPUT_FMT_RAW);
} else if (out_fmt == RAW8) {
isp_reg_write(dev->isp_base, REG_ISP_OUT_FMT, INPUT_FMT_RAW_BYPASS);
} else {
v4l2_err(&dev->v4l2_dev, "%s: mbus_fmt(0x%x) mismatch out_fmt(0x%x)\n",
__func__, mbus_fmt, out_fmt);
return -1;
}
}
return ret;
}
static void isp_cisctl_win_set(void __iomem *base, u32 sensor_w, u32 sensor_h)
{
isp_reg_write(base, 0x34, (sensor_h >> 8) & 0x7); // CISCTL_win_height
isp_reg_write(base, 0x38, sensor_h & 0xff); // CISCTL_win_height
isp_reg_write(base, 0x3c, (sensor_w >> 8) & 0x7); // CISCTL_win_width
isp_reg_write(base, 0x40, sensor_w & 0xff); // CISCTL_win_width
}
static void pipeline_isp_output_size_set(struct asr_camera_dev *dev, u32 isp_out_w, u32 isp_out_h)
{
v4l2_info(&dev->v4l2_dev,"%s: img_width = %d , img_height = %d",
__func__, isp_out_w, isp_out_h);
writel(isp_out_w + (isp_out_h << 16), dev->ipe_base + REG_ISP_IMG_SIZE); // image size from isp
}
static void isp_out_win_crop(struct asr_camera_dev *dev)
{
u32 isp_out_width, isp_out_height;
u32 start_x, end_x, start_y, end_y;
start_x = dev->pipe2_config.pipeline_crop.start_x;
end_x = dev->pipe2_config.pipeline_crop.end_x;
start_y = dev->pipe2_config.pipeline_crop.start_y;
end_y = dev->pipe2_config.pipeline_crop.end_y;
isp_out_width = end_x - start_x;
isp_out_height = end_y - start_y;
isp_reg_write(dev->isp_base, 0x244, (start_y >> 8) & 0x7); // crop start y hight
isp_reg_write(dev->isp_base, 0x248, start_y & 0xff); // crop start y low
isp_reg_write(dev->isp_base, 0x24c, (start_x >> 8) & 0x7); //crop start x hight
isp_reg_write(dev->isp_base, 0x250, start_x & 0xff); // crop start x low
isp_reg_write(dev->isp_base, 0x254, (isp_out_height >> 8) & 0x7); // out window height
isp_reg_write(dev->isp_base, 0x258, isp_out_height & 0xff); // out window height
isp_reg_write(dev->isp_base, 0x25c, (isp_out_width >> 8) & 0x7); // out window width
isp_reg_write(dev->isp_base, 0x260, isp_out_width & 0xff); // out window width
pipeline_isp_output_size_set(dev, isp_out_width, isp_out_height);
}
int fe_isp_process_config(struct asr_camera_dev *dev)
{
int ret = 0;
isp_cisctl_win_set(dev->isp_base, dev->pipe2_config.pipeline_outw, dev->pipe2_config.pipeline_outh);
ret = isp_io_format_set(dev);
if (ret)
return ret;
isp_out_win_crop(dev);
return ret;
}
static int spi_mtk_mode_set(struct asr_camera_dev *dev, u16 fifo_trig_num, u16 manual_w, u16 manual_h)
{
u16 ln_val;
u16 lane_num = 0, spi_sdr_en = 0, spi_crc_en = 0;
lane_num = dev->camera_interface;
spi_sdr_en = dev->spi_config.spi_sdr;
spi_crc_en = dev->spi_config.spi_crc;
v4l2_info(&dev->v4l2_dev, "fifo triger = 0x%x, manual_w = %d, manual_h = %d, lane_num = %d\n",
fifo_trig_num, manual_w, manual_h, lane_num);
switch (lane_num) {
case 0:
ln_val = LN_SPI1_VAL;
break;
case 1:
ln_val = LN_SPI2_VAL;
break;
case 2:
ln_val = LN_SPI4_VAL;
break;
default:
v4l2_err(&dev->v4l2_dev, "spi lane%d not support\n", lane_num);
return -EINVAL;
}
isp_reg_write(dev->isp_base, 0x044, (fifo_trig_num >> 8) & 0xFF); // SPI trig num
isp_reg_write(dev->isp_base, 0x048, (fifo_trig_num & 0xFF));
#if 0
/* working mode: BT565/Spreadtrum (packaged) */
isp_write_mask(dev->isp_base, REG_ISP_SPI_MODE1, 0x0, SPI_MANUAL_MODE_MASK);
isp_set_bit(dev->isp_base, REG_ISP_SPI_MODE1, SPI_MANUAL_MODE_EN | SPI_MANUAL_HEIGHT_EN | SPI_MANUAL_WIDTH_EN);
isp_reg_write(dev->isp_base, 0x084, (manual_w >> 8) & 0x7); // SPI_manual_width=1280 (in bytes)) = = image_width*2 when YUV422 mode
isp_reg_write(dev->isp_base, 0x088, manual_w & 0xff);
isp_reg_write(dev->isp_base, 0x08c, (manual_h >> 8) & 0x7); // SPI_manual_height=80
isp_reg_write(dev->isp_base, 0x090, manual_h & 0xff);
#else
/* working mode: MTK mode */
isp_clr_bit(dev->isp_base, REG_ISP_SPI_MODE1, SPI_MANUAL_MODE_EN | SPI_MANUAL_HEIGHT_EN | SPI_MANUAL_WIDTH_EN);
#endif
isp_set_bit(dev->isp_base, REG_ISP_SPI_MODE1, SPI_IGNORE_LINE_ID_EN);
isp_write_mask(dev->isp_base, REG_ISP_SPI_MODE2, ln_val, SPI_LANE_NUM_MASK);
isp_write_mask(dev->isp_base, REG_ISP_SPI_MODE2, (ln_val << 4), SPI_DATA_SWITCH_MASK);
if (spi_sdr_en) {
isp_clr_bit(dev->isp_base, REG_ISP_SPI_MODE2, SPI_NEG_SAMPLE_EN);
isp_set_bit(dev->isp_base, REG_ISP_SPI_MODE2, SPI_DDR_MODE_EN);
} else {
isp_set_bit(dev->isp_base, REG_ISP_SPI_MODE2, SPI_NEG_SAMPLE_EN);
isp_clr_bit(dev->isp_base, REG_ISP_SPI_MODE2, SPI_DDR_MODE_EN);
}
isp_reg_write(dev->isp_base, REG_ISP_TOP_MODE, SPI_EN | INPUT_SEL_SPI);
if (spi_crc_en) {
isp_set_bit(dev->isp_base, REG_ISP_TOP_MODE, SPI_CRC_EN);
} else {
isp_clr_bit(dev->isp_base, REG_ISP_TOP_MODE, SPI_CRC_EN);
}
return 0;
}
static int spi_config(struct asr_camera_dev *dev)
{
int ret = 0;
u16 mbus_code, spi_fifo = 0;
u16 img_w, img_h, manual_w, manual_h;
u32 dvp_if = 0, val = 0;
struct v4l2_subdev_format fmt;
fmt = dev->sensor.fmt;
mbus_code = fmt.format.code;
img_w = fmt.format.width;
img_h = fmt.format.height;
v4l2_info(&dev->v4l2_dev, "mbus_code = 0x%x %dx%d\n", mbus_code, img_w, img_h);
switch (mbus_code) {
case MEDIA_BUS_FMT_SBGGR8_1X8:
case MEDIA_BUS_FMT_SGBRG8_1X8:
case MEDIA_BUS_FMT_SGRBG8_1X8:
case MEDIA_BUS_FMT_SRGGB8_1X8:
case MEDIA_BUS_FMT_SBGGR10_1X10:
case MEDIA_BUS_FMT_SGBRG10_1X10:
case MEDIA_BUS_FMT_SGRBG10_1X10:
case MEDIA_BUS_FMT_SRGGB10_1X10:
manual_w = img_w;
manual_h = img_h;
spi_fifo = manual_w / 4; // spi_fifo_trig = 1 line bytes / 4
dvp_if = ((img_w & 0x7FF) << 16) | 0xC080;
break;
case MEDIA_BUS_FMT_UYVY8_2X8:
case MEDIA_BUS_FMT_VYUY8_2X8:
case MEDIA_BUS_FMT_YUYV8_2X8:
case MEDIA_BUS_FMT_YVYU8_2X8:
manual_w = img_w * 2;
manual_h = img_h;
if(img_w >= 624)
spi_fifo = 0x138; // spi_fifo_trig = (1 line bytes / 4 -8)
else
spi_fifo = manual_w / 4; // spi_fifo_trig = 1 line bytes / 4
// [27]:ISP_OUT_422 '1' valid, <p>=1: use yuv422 from isp
// george: when isp reg 0x210[4] is_yuv422
dvp_if = BIT(27) | ((img_w & 0x7FF) << 16) | 0xC080;
break;
default:
v4l2_err(&dev->v4l2_dev, "invalid mbus fmt 0x%x\n", mbus_code);
return -EINVAL;
}
ret = spi_mtk_mode_set(dev, spi_fifo, manual_w, manual_h);
if (ret)
v4l2_err(&dev->v4l2_dev, "spi config failed!\n");
//writel(dvp_if, dev->ipe_base + REG_ISP_DVP_IF_CTRL);
val = readl(dev->ipe_base + REG_ISP_DVP_IF_CTRL);
val = (val & ~0x9FFFFFFF) | (dvp_if & 0x9FFFFFFF);
writel(val,dev->ipe_base + REG_ISP_DVP_IF_CTRL);
return ret;
}
int pipeline_update_mac_addr(struct asr_camera_dev *dev, u32 *addr, u32 plane)
{
int i;
u32 mac_reg[3];
mac_reg[0] = REG_Y2_BASE;
mac_reg[1] = REG_U2_BASE;
mac_reg[2] = REG_V2_BASE;
for (i = 0; i < plane; i++)
writel(addr[i], dev->ipe_base + mac_reg[i]);
return 0;
}
void cam_set_addr(struct asr_camera_dev *dev, struct isp_buffer *buf)
{
u32 tmp_u_addr = 0;
u32 tmp_v_addr = 0;
u32 y_size = 0;
u32 u_size = 0;
u32 start_addr;
u32 dmad[3];
u32 planes = 0;
struct pipeline_param *pipe_config = &dev->pipe2_config;
if (!buf || !buf->dma)
return;
else
start_addr = buf->dma;
if (0 != start_addr % 8) {
start_addr = (start_addr + 8) / 8 * 8;
}
switch(pipe_config->output_format) {
case RAW8: // only p2 support, refer to YV12
planes = 1; /* FIXME: asic workaround, if uv channel unset, axi wr error */
break;
case YUV422_YUYV: // 1 planar
case YUV422_YVYU: // 1 planar
case YUV422_UYVY: // 1 planar
case YUV422_VYUY: // 1 planar
planes = 1;
y_size = pipe_config->dma_stride_y*pipe_config->pipeline_outh*2;
break;
case YUV420_YV12: // 3 planar
case YUV420_I420: // 3 planar
planes = 3;
u_size = pipe_config->dma_stride_y*pipe_config->pipeline_outh * 3 / 2;
if(YUV420_I420 == pipe_config->output_format){
tmp_u_addr = start_addr + pipe_config->dma_stride_y*pipe_config->pipeline_outh;
if (0 != tmp_u_addr % 8) {
tmp_u_addr = (tmp_u_addr + 8) / 8 * 8;
}
tmp_v_addr = tmp_u_addr + pipe_config->dma_stride_y*pipe_config->pipeline_outh / 4;
if (0 != tmp_v_addr % 8) {
tmp_v_addr = (tmp_v_addr + 8) / 8 * 8;
}
}else{
tmp_v_addr = start_addr + pipe_config->dma_stride_y*pipe_config->pipeline_outh;
if (0 != tmp_v_addr % 8) {
tmp_v_addr = (tmp_v_addr + 8) / 8 * 8;
}
tmp_u_addr = tmp_v_addr + pipe_config->dma_stride_y*pipe_config->pipeline_outh/ 4;
if (0 != tmp_u_addr % 8) {
tmp_u_addr = (tmp_u_addr + 8) / 8 * 8;
}
}
break;
case YUV420_NV12: // 2 planar
case YUV420_NV21: // 2 planar
planes = 2;
u_size = pipe_config->dma_stride_y*pipe_config->pipeline_outh / 2;
tmp_u_addr = start_addr + pipe_config->dma_stride_y*pipe_config->pipeline_outh;
break;
default:
break;
}
dmad[0] = start_addr;
dmad[1] = tmp_u_addr;
dmad[2] = tmp_v_addr;
v4l2_dbg(1, debug, &dev->v4l2_dev, "update planes%d y:0x%08x u:0x%08x v:0x%08x",
planes, dmad[0], dmad[1], dmad[2]);
pipeline_update_mac_addr(dev, dmad, planes);
}
static void set_pipeline_scaler(struct pipeline_param* pipe_config)
{
u16 width = 0;
u16 height = 0;
u8 subsample_ratio = 0;
u8 scaler_ratio = 4;
u8 scaler_cofe = 0;
u32 h_init_ph = 0x8;
u32 h_delta_ph = 0x10000;
u32 v_init_ph = 0x8;
u32 v_delta_ph = 0x10000;
subsample_ratio = 1 << pipe_config->subsample;
width = pipe_config->pipeline_crop.end_x - pipe_config->pipeline_crop.start_x;
height = pipe_config->pipeline_crop.end_y - pipe_config->pipeline_crop.start_y;
width /= subsample_ratio;
height /= subsample_ratio;
switch(pipe_config->scaler) {
case SCALER_QUARTER:
scaler_ratio = 16;
scaler_cofe = 0x0;
h_init_ph = 0x20;
h_delta_ph = 0x40000;
v_init_ph = 0x20;
v_delta_ph = 0x40000;
break;
case SCALER_HALF:
scaler_ratio = 8;
scaler_cofe = 0x5;
h_init_ph = 0x10;
h_delta_ph = 0x20000;
v_init_ph = 0x10;
v_delta_ph = 0x20000;
break;
case NO_SCALER:
scaler_ratio = 4;
scaler_cofe = 0xf;
h_init_ph = 0x8;
h_delta_ph = 0x10000;
v_init_ph = 0x8;
v_delta_ph = 0x10000;
break;
case SCALER_2:
scaler_ratio = 2;
scaler_cofe = 0xf;
h_init_ph = 0x4;
h_delta_ph = 0x8000;
v_init_ph = 0x4;
v_delta_ph = 0x8000;
break;
case SCALER_4:
scaler_ratio = 1;
scaler_cofe = 0xf;
h_init_ph = 0x2;
h_delta_ph = 0x4000;
v_init_ph = 0x2;
v_delta_ph = 0x4000;
break;
default:
break;
}
pipe_config->pipeline_outw= width * 4 / scaler_ratio;
pipe_config->pipeline_outh= height * 4 / scaler_ratio;
pr_info("set_pipeline_scaler: pipeline%d subsample = %d subsample_ratio = %d, dma_width = %d, dma_height = %d, scaler_ratio = %d\n",
pipe_config->pipeline_num, pipe_config->subsample, subsample_ratio, pipe_config->pipeline_outw, pipe_config->pipeline_outh, scaler_ratio);
}
static void set_pipeline_jpeg_mode(struct asr_camera_dev *dev)
{
u32 val;
if (2 == dev->pipe2_config.pipeline_num) {
val = readl(dev->ipe_base + REG_DATA_FORMAT_CTRL);
val &= ~(1<<23);
writel(val, dev->ipe_base + REG_DATA_FORMAT_CTRL);
}
}
static int set_pipeline_dump_raw(struct asr_camera_dev *dev, u8 pipe_num)
{
if (pipe_num == 2) {
writel(readl(dev->ipe_base + REG_SUBSAMPLE_SCALER_CTRL2) | PIPE2_CTRL_RAW_DUMP,
dev->ipe_base + REG_SUBSAMPLE_SCALER_CTRL2);
return 1;
} else {
v4l2_err(&dev->v4l2_dev, "pipeline%d not support dump raw!\n", pipe_num);
return -1;
}
}
static int set_pipeline_output_format(struct asr_camera_dev *dev)
{
int ret = 0;
u32 value = 0;
u32 sensor_out_fmt = 0;
int is_yuv = 0;
struct pipeline_param *pipe_config;
u32 reg_value = readl(dev->ipe_base + REG_DATA_FORMAT_CTRL);
pipe_config = &dev->pipe2_config;
/*
0x0 422 8 bpp planar
0x4 422 8 bpp packed
0x5 420 8 bpp planar
others Reserved
*/
switch(dev->sensor.fmt.format.code) {
/*
This field defines the Endianness of the external CMOS sensor's output (CCIC's input).
This field has no effect on the MIPI CSI2 CMOS sensor.
0x0 = Y1CbY0Cr: YUYV sensor
0x1 = Y1CrY0Cb: YVYU sensor
0x2 = CrY1CbY0: VYUY sensor
0x3 = CbY1CrY0: UYVY sensor
*/
case MEDIA_BUS_FMT_YUYV8_2X8:
sensor_out_fmt= (0x0<<24); //YUVINFMT
is_yuv =1;
break;
case MEDIA_BUS_FMT_YVYU8_2X8:
sensor_out_fmt = (0x1<<24); //YUVINFMT
is_yuv =1;
break;
case MEDIA_BUS_FMT_VYUY8_2X8:
sensor_out_fmt= (0x2<<24); //YUVINFMT
is_yuv =1;
break;
case MEDIA_BUS_FMT_UYVY8_2X8:
sensor_out_fmt= (0x3<<24); //YUVINFMT
is_yuv =1;
break;
default:
break;
}
if(is_yuv)
isp_reg_write(dev->isp_base, ISP_OUT_FMT_REG, SENSOR_INPUT_FMT_YUV);
else
isp_reg_write(dev->isp_base, ISP_OUT_FMT_REG, SENSOR_INPUT_FMT_RAW);
switch(pipe_config->output_format) {
/*
<p>0x0 = Y1CbY0Cr: image format vyuy
<p>0x1 = Y1CrY0Cb: image format uyvy
<p>0x2 = CrY1CbY0: image format yuyv
<p>0x3 = CbY1CrY0: image format yvyu
*/
case RAW8: /* only p2 support, refer to NV12 */
value += (0x1 << (0 + pipe_config->pipeline_num * 8)); //ISIM_420SP
value += (0x0 << (2 + pipe_config->pipeline_num * 8)); //SEMI_UV_ENDFMT
value += (0x5 << (4 + pipe_config->pipeline_num * 8)); //YUVOUTFMT
ret = set_pipeline_dump_raw(dev, pipe_config->pipeline_num);
break;
case YUV422_YUYV:
value += (0x02 << (26 + pipe_config->pipeline_num * 2)); //YUVENDFMT
value += (0x4 << (4 + pipe_config->pipeline_num * 8)); //YUVOUTFMT
break;
case YUV422_YVYU:
value += (0x03 << (26 + pipe_config->pipeline_num * 2)); //YUVENDFMT
value += (0x4 << (4 + pipe_config->pipeline_num * 8)); //YUVOUTFMT
break;
case YUV422_UYVY:
value += (0x01 << (26 + pipe_config->pipeline_num * 2)); //YUVENDFMT
value += (0x4 << (4 + pipe_config->pipeline_num * 8)); //YUVOUTFMT
break;
case YUV422_VYUY:
value += (0x00 << (26 + pipe_config->pipeline_num * 2)); //YUVENDFMT
value += (0x4 << (4 + pipe_config->pipeline_num * 8)); //YUVOUTFMT
break;
case YUV420_YV12:
value += (0x5 << (4 + pipe_config->pipeline_num * 8)); //YUVOUTFMT
value += (0x1 << (2 + pipe_config->pipeline_num * 8)); //SEMI_UV_ENDFMT
break;
case YUV420_I420:
value += (0x5 << (4 + pipe_config->pipeline_num * 8)); //YUVOUTFMT
value += (0x0 << (2 + pipe_config->pipeline_num * 8)); //SEMI_UV_ENDFMT
break;
case YUV420_NV12:
value += (0x1 << (0 + pipe_config->pipeline_num * 8)); //ISIM_420SP
value += (0x0 << (2 + pipe_config->pipeline_num * 8)); //SEMI_UV_ENDFMT
value += (0x5 << (4 + pipe_config->pipeline_num * 8)); //YUVOUTFMT
break;
case YUV420_NV21:
value += (0x1 << (0 + pipe_config->pipeline_num * 8)); //ISIM_420SP
value += (0x1 << (2 + pipe_config->pipeline_num * 8)); //SEMI_UV_ENDFMT
value += (0x5 << (4 + pipe_config->pipeline_num * 8)); //YUVOUTFMT
break;
case YUV422_3PLANAR:
value += (0x0 << (4 + pipe_config->pipeline_num * 8)); //YUVOUTFMT
break;
default:
break;
}
if (2 == pipe_config->pipeline_num) {
reg_value &= 0X3F80FFFF;
}
reg_value += value;
reg_value = (reg_value & 0xfcffffff) + sensor_out_fmt;
writel(reg_value, dev->ipe_base + REG_DATA_FORMAT_CTRL);
v4l2_info(&dev->v4l2_dev, "set_pipeline_output_format: ------pipeline%d format value = 0x%x----------- \n",
pipe_config->pipeline_num, reg_value);
return ret;
}
static void pipeline_set_img_pitch(struct asr_camera_dev *dev, u16 pitch_y, u16 pitch_uv)
{
u32 reg = REG_PIP2_CCIC_IMG_PITCH;
writel(pitch_y + (pitch_uv << 16), dev->ipe_base + reg);
}
static void pipeline_set_img_size(struct asr_camera_dev *dev, u16 width, u16 height)
{
u32 reg = REG_IMG_SIZE_PIP2;
writel(width + (height << 16), dev->ipe_base + reg);
}
static int cam_set_pitch_dma_size(struct asr_camera_dev *dev)
{
u16 img_pitch_y, img_pitch_uv, img_width, img_height;
struct pipeline_param *pipe_config;
pipe_config = &dev->pipe2_config;
pipe_config->dma_stride_y = ALIGN_TO(pipe_config->pipeline_outw, 8);
switch (pipe_config->output_format) {
case YUV422_YUYV:
case YUV422_YVYU:
case YUV422_UYVY:
case YUV422_VYUY:
img_pitch_y = pipe_config->dma_stride_y * 2;
img_pitch_uv = pipe_config->dma_stride_y * 2;
img_width = pipe_config->pipeline_outw * 2;
img_height = pipe_config->pipeline_outh;
break;
case YUV420_NV12:
case YUV420_NV21:
img_pitch_y = pipe_config->dma_stride_y;
img_pitch_uv = pipe_config->dma_stride_y;
img_width = pipe_config->pipeline_outw;
img_height = pipe_config->pipeline_outh;
break;
case YUV420_I420:
case YUV420_YV12:
img_pitch_y = pipe_config->dma_stride_y;
img_pitch_uv = pipe_config->dma_stride_y / 2;
img_width = pipe_config->pipeline_outw;
img_height = pipe_config->pipeline_outh;
break;
case RAW8: // only p2 support, refer to YV12
if (pipe_config->pipeline_num != 2) {
v4l2_err(&dev->v4l2_dev, "pipeline%d not support format raw8!",
pipe_config->pipeline_num);
return -1;
}
img_pitch_y = pipe_config->dma_stride_y;
img_pitch_uv = pipe_config->dma_stride_y / 2;
img_width = pipe_config->pipeline_outw;
img_height = pipe_config->pipeline_outh;
break;
default:
v4l2_err(&dev->v4l2_dev, "invalid pipeline%d format%d", pipe_config->pipeline_num, pipe_config->output_format);
return -1;
}
v4l2_info(&dev->v4l2_dev, "set_pitch_dma_size: pipeline%d output_format = %d y_pitch = %d, pipe_config->dma_width = %d, pipe_config->dma_height = %d\n",
pipe_config->pipeline_num, pipe_config->output_format, pipe_config->dma_stride_y, pipe_config->pipeline_outw, pipe_config->pipeline_outh);
pipeline_set_img_pitch(dev, img_pitch_y, img_pitch_uv);
pipeline_set_img_size(dev, img_width, img_height);
return 0;
}
static void set_pipeline_default_fmt(struct asr_camera_dev *dev)
{
struct pipeline_param* pipe_config = &dev->pipe2_config;
pipe_config->pipeline_num = 2;
pipe_config->pipeline_enable = 1;
pipe_config->shadow_mode = 1;
pipe_config->pipeline_crop.start_x = 0;
pipe_config->pipeline_crop.start_y = 0;
pipe_config->pipeline_crop.end_x = DEFAULT_WIDTH;
pipe_config->pipeline_crop.end_y = DEFAULT_HEIGHT;
pipe_config->scaler = NO_SCALER;
pipe_config->subsample = 0;
pipe_config->output_format = YUV420_NV12;
return;
}
static int asrc_sensor_set_stream(struct asr_camera_dev *pcdev, int on)
{
int ret;
ret = sensor_call(pcdev, video, s_stream, on);
if (ret == -ENOIOCTLCMD)
ret = 0;
if (ret) {
v4l2_err(&pcdev->v4l2_dev,
"Failed to put subdevice in %s mode: %d\n",
on ? "normal operation" : "set stream", ret);
}
return ret;
}
static int asrc_sensor_set_power(struct asr_camera_dev *pcdev, int on)
{
int ret;
ret = sensor_call(pcdev, core, s_power, on);
if (ret == -ENOIOCTLCMD)
ret = 0;
if (ret) {
v4l2_err(&pcdev->v4l2_dev,
"Failed to put subdevice in %s mode: %d\n",
on ? "normal operation" : "power saving", ret);
}
return ret;
}
/*
* Videobuf2 section
*/
static int asrc_vb2_init(struct vb2_buffer *vb)
{
struct asr_camera_dev *pcdev = vb2_get_drv_priv(vb->vb2_queue);
struct isp_buffer *buf = vb2_to_isp_buffer(vb);
v4l2_dbg(1, debug, &pcdev->v4l2_dev,
"%s(vb=%p) size=%lu\n",
__func__, vb, vb2_get_plane_payload(vb, 0));
INIT_LIST_HEAD(&buf->queue);
return 0;
}
static void asrc_vb2_queue(struct vb2_buffer *vb)
{
struct isp_buffer *buf = vb2_to_isp_buffer(vb);
struct asr_camera_dev *pcdev = vb2_get_drv_priv(vb->vb2_queue);
v4l2_dbg(1, debug, &pcdev->v4l2_dev,
"%s(vb=%p), addr=0x%x, size=%lu\n",
__func__, vb, buf->dma, vb2_get_plane_payload(vb, 0));
list_add_tail(&buf->queue, &pcdev->capture);
}
static int asrc_vb2_prepare(struct vb2_buffer *vb)
{
struct asr_camera_dev *pcdev = vb2_get_drv_priv(vb->vb2_queue);
struct isp_buffer *buf = vb2_to_isp_buffer(vb);
dma_addr_t addr;
addr = vb2_dma_contig_plane_dma_addr(vb, 0);
if (!IS_ALIGNED(addr, 32)) {
v4l2_err(&pcdev->v4l2_dev,
"Buffer address must be aligned to 32 bytes boundary.\n");
return -EINVAL;
}
vb2_set_plane_payload(vb, 0, pcdev->current_pix.sizeimage);
buf->dma = addr;
v4l2_dbg(1, debug, &pcdev->v4l2_dev,
"%s (vb=%p), addr=0x%x, size=%lu\n",
__func__, vb, addr, vb2_get_plane_payload(vb, 0));
return 0;
}
static int asrc_vb2_queue_setup(struct vb2_queue *q,
unsigned int *num_buffers,
unsigned int *num_planes, unsigned int sizes[],
struct device *alloc_devs[])
{
struct asr_camera_dev *pcdev = vb2_get_drv_priv(q);
int size = pcdev->current_pix.sizeimage;
v4l2_dbg(1, debug, &pcdev->v4l2_dev,
"%s(vq=%p nbufs=%d num_planes=%d size=%d)\n",
__func__, q, *num_buffers, *num_planes, size);
if (*num_planes)
return sizes[0] < size ? -EINVAL : 0;
*num_planes = 1;
sizes[0] = size;
return 0;
}
static void asr_camera_eof_done(struct asr_camera_dev *pcdev,
struct isp_buffer *buf,
enum vb2_buffer_state state)
{
struct vb2_buffer *vb = &buf->vbuf.vb2_buf;
struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
v4l2_dbg(1, debug, &pcdev->v4l2_dev, "%s dequeued buffer (buf=0x%x)\n",
__func__, buf->dma);
vb->timestamp = ktime_get_ns();
vbuf->sequence = pcdev->buf_sequence++;
vbuf->field = V4L2_FIELD_NONE;
vb2_buffer_done(vb, state);
if (pcdev->next_buf) {
pcdev->curr_buf = pcdev->next_buf;
pcdev->next_buf = NULL;
} else
pcdev->curr_buf = NULL;
}
static int asrc_vb2_start_streaming(struct vb2_queue *vq, unsigned int count)
{
int ret = 0;
struct asr_camera_dev *pcdev = vb2_get_drv_priv(vq);
unsigned long lock_flags = 0;
spin_lock_irqsave(&pcdev->lock, lock_flags);
pcdev->buf_sequence = 0;
pcdev->curr_buf = list_first_entry(&pcdev->capture,
struct isp_buffer, queue);
list_del(&pcdev->curr_buf->queue);
spin_unlock_irqrestore(&pcdev->lock, lock_flags);
if (pcdev->curr_buf) {
cam_set_addr(pcdev, pcdev->curr_buf);
pcdev->pipe2_config.pipeline_enable = 1;
set_pipeline_enable(pcdev);
set_pipeline_shadow_ready(pcdev);
ret = asrc_sensor_set_stream(pcdev, 1);
}
return ret;
}
static void asrc_vb2_stop_streaming(struct vb2_queue *vq)
{
struct asr_camera_dev *pcdev = vb2_get_drv_priv(vq);
struct isp_buffer *buf;//, *tmp;
unsigned long lock_flags = 0;
unsigned int timeout = 0x100000;
while (pcdev->dma_state && (timeout-- > 0))
udelay(1);
pcdev->pipe2_config.pipeline_enable = 0;
set_pipeline_enable(pcdev);
set_pipeline_shadow_ready(pcdev);
asrc_sensor_set_stream(pcdev, 0);
spin_lock_irqsave(&pcdev->lock, lock_flags);
if (pcdev->curr_buf) {
list_add_tail(&pcdev->curr_buf->queue, &pcdev->capture);
if (pcdev->curr_buf == pcdev->next_buf)
pcdev->next_buf = NULL;
pcdev->curr_buf = NULL;
}
if (pcdev->next_buf) {
list_add_tail(&pcdev->next_buf->queue, &pcdev->capture);
pcdev->next_buf = NULL;
}
while (!list_empty(&pcdev->capture)) {
buf = list_first_entry(&pcdev->capture,
struct isp_buffer, queue);
list_del(&buf->queue);
vb2_buffer_done(&buf->vbuf.vb2_buf, VB2_BUF_STATE_ERROR);
}
spin_unlock_irqrestore(&pcdev->lock, lock_flags);
return;
}
static const struct vb2_ops asrc_vb2_ops = {
.queue_setup = asrc_vb2_queue_setup,
.buf_prepare = asrc_vb2_prepare,
.buf_init = asrc_vb2_init,
.buf_queue = asrc_vb2_queue,
.start_streaming = asrc_vb2_start_streaming,
.stop_streaming = asrc_vb2_stop_streaming,
.wait_prepare = vb2_ops_wait_prepare,
.wait_finish = vb2_ops_wait_finish,
};
static int asr_camera_init_videobuf2(struct asr_camera_dev *pcdev)
{
int ret;
struct vb2_queue *vq = &pcdev->vb2_vq;
memset(vq, 0, sizeof(*vq));
vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
vq->io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF;
vq->drv_priv = pcdev;
vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
vq->buf_struct_size = sizeof(struct isp_buffer);
vq->dev = pcdev->v4l2_dev.dev;
vq->ops = &asrc_vb2_ops;
vq->mem_ops = &vb2_dma_contig_memops;
vq->lock = &pcdev->mlock;
ret = vb2_queue_init(vq);
v4l2_dbg(1, debug, &pcdev->v4l2_dev,
"vb2_queue_init(vq=%p): %d\n", vq, ret);
return ret;
}
static int asrc_vidioc_querycap(struct file *file, void *priv,
struct v4l2_capability *cap)
{
strscpy(cap->bus_info, "platform:asr-camera", sizeof(cap->bus_info));
strscpy(cap->driver, ASR_CAM_DRV_NAME, sizeof(cap->driver));
strscpy(cap->card, "ASR_Camera", sizeof(cap->card));
return 0;
}
static int asrc_vidioc_enum_input(struct file *file, void *priv,
struct v4l2_input *i)
{
if (i->index > 0)
return -EINVAL;
i->type = V4L2_INPUT_TYPE_CAMERA;
strscpy(i->name, "Camera", sizeof(i->name));
return 0;
}
static int asrc_vidioc_g_input(struct file *file, void *priv, unsigned int *i)
{
*i = 0;
return 0;
}
static int asrc_vidioc_s_input(struct file *file, void *priv, unsigned int i)
{
if (i > 0)
return -EINVAL;
return 0;
}
static int asrc_vidioc_enum_fmt_vid_cap(struct file *filp, void *priv,
struct v4l2_fmtdesc *f)
{
struct asr_camera_dev *pcdev = video_drvdata(filp);
if (f->index >= 1)
return -EINVAL;
f->pixelformat = pcdev->current_pix.pixelformat;
return 0;
}
static int asrc_vidioc_g_fmt_vid_cap(struct file *filp, void *priv,
struct v4l2_format *f)
{
struct asr_camera_dev *pcdev = video_drvdata(filp);
struct v4l2_pix_format *pix = &f->fmt.pix;
pix->width = pcdev->current_pix.width;
pix->height = pcdev->current_pix.height;
pix->bytesperline = pcdev->current_pix.bytesperline;
pix->sizeimage = pcdev->current_pix.sizeimage;
pix->field = pcdev->current_pix.field;
pix->pixelformat = pcdev->current_pix.pixelformat;
pix->colorspace = pcdev->current_pix.colorspace;
v4l2_info(&pcdev->v4l2_dev, "current_fmt->fourcc: 0x%08x\n",
pcdev->current_pix.pixelformat);
return 0;
}
static int asrc_vidioc_try_fmt_vid_cap(struct file *filp, void *priv,
struct v4l2_format *f)
{
struct asr_camera_dev *pcdev = video_drvdata(filp);
struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_subdev_pad_config pad_cfg;
struct v4l2_subdev_format format = {
.which = V4L2_SUBDEV_FORMAT_TRY,
};
struct v4l2_mbus_framefmt *mf = &format.format;
int ret;
if (pix->pixelformat != V4L2_PIX_FMT_YVYU) {
v4l2_err(&pcdev->v4l2_dev, "Format %x not found\n", pix->pixelformat);
return -EINVAL;
}
v4l2_fill_mbus_format(mf, pix, MEDIA_BUS_FMT_YVYU8_2X8);
ret = sensor_call(pcdev, pad, set_fmt, &pad_cfg, &format);
if (ret < 0)
return ret;
v4l2_fill_pix_format(pix, mf);
ret = asr_mbus_image_size(&pcdev->pipe2_config);
if (ret < 0)
return ret;
pix->sizeimage = ret;
return 0;
}
static int asrc_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
struct v4l2_format *f)
{
struct asr_camera_dev *pcdev = video_drvdata(filp);
struct v4l2_pix_format *pix = &f->fmt.pix;
unsigned long flags;
int ret, is_busy;
v4l2_info(&pcdev->v4l2_dev,
"s_fmt_vid_cap(pix=%dx%d:%x)\n",
pix->width, pix->height, pix->pixelformat);
spin_lock_irqsave(&pcdev->lock, flags);
is_busy = pcdev->curr_buf || vb2_is_busy(&pcdev->vb2_vq);
spin_unlock_irqrestore(&pcdev->lock, flags);
if (is_busy)
return -EBUSY;
ret = asrc_vidioc_try_fmt_vid_cap(filp, priv, f);
if (ret)
return ret;
return ret;
}
static int asrc_fops_camera_open(struct file *filp)
{
struct asr_camera_dev *pcdev = video_drvdata(filp);
int ret;
mutex_lock(&pcdev->mlock);
ret = v4l2_fh_open(filp);
if (ret < 0)
goto out;
if (!v4l2_fh_is_singular_file(filp))
goto out;
ret = asrc_sensor_set_power(pcdev, 1);
if (ret)
v4l2_fh_release(filp);
out:
mutex_unlock(&pcdev->mlock);
return ret;
}
static int asrc_fops_camera_release(struct file *filp)
{
struct asr_camera_dev *pcdev = video_drvdata(filp);
int ret;
bool fh_singular;
mutex_lock(&pcdev->mlock);
fh_singular = v4l2_fh_is_singular_file(filp);
ret = _vb2_fop_release(filp, NULL);
if (fh_singular)
ret = asrc_sensor_set_power(pcdev, 0);
mutex_unlock(&pcdev->mlock);
return ret;
}
static const struct v4l2_file_operations asr_camera_fops = {
.owner = THIS_MODULE,
.open = asrc_fops_camera_open,
.release = asrc_fops_camera_release,
.read = vb2_fop_read,
.poll = vb2_fop_poll,
.mmap = vb2_fop_mmap,
.unlocked_ioctl = video_ioctl2,
};
static const struct v4l2_ioctl_ops asr_camera_ioctl_ops = {
.vidioc_querycap = asrc_vidioc_querycap,
.vidioc_enum_input = asrc_vidioc_enum_input,
.vidioc_g_input = asrc_vidioc_g_input,
.vidioc_s_input = asrc_vidioc_s_input,
.vidioc_enum_fmt_vid_cap = asrc_vidioc_enum_fmt_vid_cap,
.vidioc_g_fmt_vid_cap = asrc_vidioc_g_fmt_vid_cap,
.vidioc_s_fmt_vid_cap = asrc_vidioc_s_fmt_vid_cap,
.vidioc_try_fmt_vid_cap = asrc_vidioc_try_fmt_vid_cap,
.vidioc_reqbufs = vb2_ioctl_reqbufs,
.vidioc_create_bufs = vb2_ioctl_create_bufs,
.vidioc_querybuf = vb2_ioctl_querybuf,
.vidioc_qbuf = vb2_ioctl_qbuf,
.vidioc_dqbuf = vb2_ioctl_dqbuf,
.vidioc_expbuf = vb2_ioctl_expbuf,
.vidioc_streamon = vb2_ioctl_streamon,
.vidioc_streamoff = vb2_ioctl_streamoff,
.vidioc_subscribe_event = v4l2_ctrl_subscribe_event,
.vidioc_unsubscribe_event = v4l2_event_unsubscribe,
};
static const struct video_device asr_camera_videodev_template = {
.name = "asr-camera",
.minor = -1,
.fops = &asr_camera_fops,
.ioctl_ops = &asr_camera_ioctl_ops,
.release = video_device_release_empty,
.device_caps = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING,
};
int asrc_update_sensor_info(struct asr_camera_dev *dev)
{
struct asr_sensor_info *sensor;
struct v4l2_pix_format *pix = &dev->current_pix;
struct v4l2_mbus_framefmt *mf;
int ret = 0;
struct v4l2_subdev_mbus_code_enum code = {
.which = V4L2_SUBDEV_FORMAT_ACTIVE,
};
unsigned int raw_fmts = 0;
sensor = &dev->sensor;
ret = v4l2_subdev_call(sensor->sd, video, g_mbus_config,
&sensor->mbus);
if (ret && ret != -ENOIOCTLCMD)
return ret;
sensor->fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
ret = v4l2_subdev_call(sensor->sd, pad, get_fmt,
&sensor->cfg, &sensor->fmt);
if (ret && ret != -ENOIOCTLCMD)
return ret;
dev->camera_interface = sensor->fmt.format.reserved[0];
dev->spi_config.spi_sdr = sensor->fmt.format.reserved[1];
dev->spi_config.spi_crc = sensor->fmt.format.reserved[2];
dev->spi_config.spi_manual_enable = sensor->fmt.format.reserved[3];
dev->spi_config.spi_manual_mode = sensor->fmt.format.reserved[4];
dev->spi_config.spi_manual_height_enable = sensor->fmt.format.reserved[5];
dev->spi_config.spi_manual_width_enable = sensor->fmt.format.reserved[6];
dev->spi_config.spi_manual_height = sensor->fmt.format.reserved[7];
dev->spi_config.spi_manual_width = sensor->fmt.format.reserved[8];
dev->spi_config.spi_ignore_line_id = sensor->fmt.format.reserved[9];
v4l2_dbg(1, debug, &dev->v4l2_dev, "lane%d sdr%d crc%d line_id%d\n",
dev->camera_interface, dev->spi_config.spi_sdr, dev->spi_config.spi_crc, dev->spi_config.spi_ignore_line_id);
while (!v4l2_subdev_call(sensor->sd, pad, enum_mbus_code, NULL, &code)) {
raw_fmts++;
code.index++;
}
mf = &sensor->fmt.format;
pix->field = V4L2_FIELD_NONE;
pix->width = DEFAULT_WIDTH;
pix->height = DEFAULT_HEIGHT;
pix->bytesperline = 0;
pix->pixelformat = V4L2_PIX_FMT_YVYU;
v4l2_fill_mbus_format(mf, pix, code.code);//MEDIA_BUS_FMT_YVYU8_2X8);
ret = sensor_call(dev, pad, set_fmt, NULL, &sensor->fmt);
if (ret)
return ret;
v4l2_fill_pix_format(pix, mf);
v4l2_info(&dev->v4l2_dev, "%s(): colorspace=0x%x pixfmt=0x%x\n",
__func__, pix->colorspace, pix->pixelformat);
return ret;
}
static int asr_camera_pipe_set(struct asr_camera_dev *pcdev)
{
int ret = 0;
writel(readl(pcdev->ipe_base + REG_CTRL_1) & ~(1<<31), pcdev->ipe_base + REG_CTRL_1);
set_pipeline_default_fmt(pcdev);
//set scaler
set_pipeline_scaler(&pcdev->pipe2_config);
//set jpeg mode
set_pipeline_jpeg_mode(pcdev);
//set output format
set_pipeline_output_format(pcdev);
//set pitch and dma size
cam_set_pitch_dma_size(pcdev);
pipeline_shadow_streamon(pcdev);
ret = fe_isp_process_config(pcdev);
if (ret < 0) {
v4l2_err(&pcdev->v4l2_dev, "fe_isp_process_config err %d!\n",ret);
return ret;
}
spi_config(pcdev);
return ret;
}
static int asr_camera_sensor_complete(struct v4l2_async_notifier *notifier)
{
int ret = 0;
struct asr_camera_dev *pcdev = v4l2_dev_to_pcdev(notifier->v4l2_dev);
struct v4l2_pix_format *pix = &pcdev->current_pix;
asr_camera_pipe_set(pcdev);
pix->sizeimage = asr_mbus_image_size(&pcdev->pipe2_config);
v4l2_info(&pcdev->v4l2_dev, "Async subdev notifier completed\n");
ret = asrc_sensor_set_power(pcdev, 0);
return ret;
}
static int asr_camera_sensor_bound(struct v4l2_async_notifier *notifier,
struct v4l2_subdev *subdev,
struct v4l2_async_subdev *asd)
{
int err;
struct v4l2_device *v4l2_dev = notifier->v4l2_dev;
struct asr_camera_dev *pcdev = v4l2_dev_to_pcdev(v4l2_dev);
struct video_device *vdev = &pcdev->vdev;
v4l2_info(&pcdev->v4l2_dev, "%s(): trying to bind a device\n",
__func__);
mutex_lock(&pcdev->mlock);
*vdev = asr_camera_videodev_template;
vdev->v4l2_dev = v4l2_dev;
vdev->lock = &pcdev->mlock;
pcdev->sensor.sd = subdev;
pcdev->vdev.queue = &pcdev->vb2_vq;
pcdev->vdev.v4l2_dev = &pcdev->v4l2_dev;
pcdev->vdev.ctrl_handler = subdev->ctrl_handler;
video_set_drvdata(&pcdev->vdev, pcdev);
err = asrc_sensor_set_power(pcdev, 1);
if (err)
goto out;
asrc_sensor_set_stream(pcdev, 0);
err = asrc_update_sensor_info(pcdev);
if (err < 0) {
v4l2_err(&pcdev->v4l2_dev, "update sensor failed\n");
goto out_sensor_poweroff;
}
err = asr_camera_init_videobuf2(pcdev);
if (err)
goto out_sensor_poweroff;
err = video_register_device(&pcdev->vdev, VFL_TYPE_GRABBER, -1);
if (err) {
v4l2_err(v4l2_dev, "register video device failed: %d\n", err);
pcdev->sensor.sd = NULL;
goto out_sensor_poweroff;
} else {
v4l2_info(&pcdev->v4l2_dev,
"ASR Camera driver attached to camera %s\n",
subdev->name);
}
mutex_unlock(&pcdev->mlock);
return err;
out_sensor_poweroff:
err = asrc_sensor_set_power(pcdev, 0);
out:
mutex_unlock(&pcdev->mlock);
return err;
}
static void asr_camera_sensor_unbind(struct v4l2_async_notifier *notifier,
struct v4l2_subdev *subdev,
struct v4l2_async_subdev *asd)
{
struct asr_camera_dev *pcdev = v4l2_dev_to_pcdev(notifier->v4l2_dev);
mutex_lock(&pcdev->mlock);
v4l2_info(&pcdev->v4l2_dev,
"ASR Camera driver detached from camera %s\n",
subdev->name);
video_unregister_device(&pcdev->vdev);
pcdev->sensor.sd = NULL;
mutex_unlock(&pcdev->mlock);
}
static const struct v4l2_async_notifier_operations asr_camera_sensor_ops = {
.bound = asr_camera_sensor_bound,
.complete = asr_camera_sensor_complete,
.unbind = asr_camera_sensor_unbind,
};
static void set_addr(struct asr_camera_dev *dev, int index)
{
u32 init_addr = dev->dma_addr_phys;
u32 tmp_u_addr = 0;
u32 tmp_v_addr = 0;
u32 y_size = 0;
u32 u_size = 0;
u32 start_addr;
u32 dmad[3];
u32 planes = 0;
struct pipeline_param *pipe_config = &dev->pipe2_config;
if (index%2 == 0)
start_addr = init_addr;
else
start_addr = init_addr + dev->current_pix.sizeimage;
if (0 != start_addr % 8) {
start_addr = (start_addr + 8) / 8 * 8;
}
switch(pipe_config->output_format) {
case RAW8: // only p2 support, refer to YV12
planes = 1; /* FIXME: asic workaround, if uv channel unset, axi wr error */
break;
case YUV422_YUYV: // 1 planar
case YUV422_YVYU: // 1 planar
case YUV422_UYVY: // 1 planar
case YUV422_VYUY: // 1 planar
planes = 1;
y_size = pipe_config->dma_stride_y*pipe_config->pipeline_outh*2;
break;
case YUV420_YV12: // 3 planar
case YUV420_I420: // 3 planar
planes = 3;
u_size = pipe_config->dma_stride_y*pipe_config->pipeline_outh * 3 / 2;
if(YUV420_I420 == pipe_config->output_format){
tmp_u_addr = start_addr + pipe_config->dma_stride_y*pipe_config->pipeline_outh;
if (0 != tmp_u_addr % 8) {
tmp_u_addr = (tmp_u_addr + 8) / 8 * 8;
}
tmp_v_addr = tmp_u_addr + pipe_config->dma_stride_y*pipe_config->pipeline_outh / 4;
if (0 != tmp_v_addr % 8) {
tmp_v_addr = (tmp_v_addr + 8) / 8 * 8;
}
}else{
tmp_v_addr = start_addr + pipe_config->dma_stride_y*pipe_config->pipeline_outh;
if (0 != tmp_v_addr % 8) {
tmp_v_addr = (tmp_v_addr + 8) / 8 * 8;
}
tmp_u_addr = tmp_v_addr + pipe_config->dma_stride_y*pipe_config->pipeline_outh/ 4;
if (0 != tmp_u_addr % 8) {
tmp_u_addr = (tmp_u_addr + 8) / 8 * 8;
}
}
break;
case YUV420_NV12: // 2 planar
case YUV420_NV21: // 2 planar
planes = 2;
u_size = pipe_config->dma_stride_y*pipe_config->pipeline_outh / 2;
tmp_u_addr = start_addr + pipe_config->dma_stride_y*pipe_config->pipeline_outh;
break;
default:
break;
}
dmad[0] = start_addr;
dmad[1] = tmp_u_addr;
dmad[2] = tmp_v_addr;
v4l2_dbg(1, debug, &dev->v4l2_dev, "update planes%d y:0x%08x u:0x%08x v:0x%08x",
planes, dmad[0], dmad[1], dmad[2]);
pipeline_update_mac_addr(dev, dmad, planes);
}
#define IMAGE_FILE "/tmp/camera/image"
static void cam_image_dump(struct asr_camera_dev *pcdev, int index)
{
u32 imagesize = asr_mbus_image_size(&pcdev->pipe2_config);
char filename[32];
struct file *file = NULL;
mm_segment_t old_fs;
char *buf;
sprintf(filename,"%s.jpg", IMAGE_FILE);
v4l2_info(&pcdev->v4l2_dev, "file: %s\n", filename);
if(file == NULL)
file = filp_open(filename, O_RDWR | O_CREAT, 0644);
if (IS_ERR(file)) {
v4l2_err(&pcdev->v4l2_dev, "error occured while opening file %s, exiting...\n", filename);
return;
}
if (index%2 == 0)
buf = (char *)pcdev->dma_addr;
else
buf = (char *)pcdev->dma_addr + imagesize;
old_fs = get_fs();
set_fs(KERNEL_DS);
vfs_write(file, (char *)buf, imagesize, &file->f_pos);
set_fs(old_fs);
filp_close(file, NULL);
}
static void asr_camera_eof_task(unsigned long arg)
{
struct asr_camera_dev *pcdev = (struct asr_camera_dev *)arg;
static u32 index = 0;
if (pcdev->dma_addr) {
#ifdef CCIC_IRQ_DEBUG
print_hex_dump(KERN_INFO, "image contents: ",
DUMP_PREFIX_OFFSET, 16, 1,
pcdev->dma_addr, 32, true);
#endif
if (snapshot_flag) {
cam_image_dump(pcdev, index);
snapshot_flag = 0;
}
index++;
#ifdef CONFIG_FB_ASR
asrfb_camera_preview_start(pcdev->dma_addr_phys, 0, 0, DEFAULT_WIDTH, DEFAULT_HEIGHT);
#endif
} else if (!pcdev->curr_buf) {
pcdev->pipe2_config.pipeline_enable = 0;
set_pipeline_enable(pcdev);
set_pipeline_shadow_ready(pcdev);
}
v4l2_dbg(1, debug, &pcdev->v4l2_dev, "Camera interrupt status!\n");
}
#ifdef CCIC_IRQ_DEBUG
static void ccic_irq_handler(u32 irqs)
{
u32 i;
static const char *const err_msg[] = {
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"AXI Write Error IRQ",
"MIPI DPHY RX CLK ULPS Active IRQ",
"MIPI DPHY RX CLK ULPS IRQ",
"MIPI DPHY Lane ULPS Active IRQ",
"MIPI DPHY Lane Error Control IRQ",
"MIPI DPHY Lane Start of Transmission Synchronization Error IRQ",
"MIPI DPHY Lane Start of Transmission Error IRQ",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"CSI2 Packet Error IRQ",
"CSI2 CRC Error IRQ",
"CSI2 ECC 2-bit (or more) Error IRQ",
"CSI2 Parity Error IRQ",
"CSI2 ECC Correctable Error IRQ",
"CSI2 Lane FIFO Overrun Error IRQ",
"CSI2 Parse Error IRQ",
"CSI2 Generic Short Packet Valid IRQ",
"CSI2 Generic Short Packet Error IRQ",
};
pr_debug("ccic irq status = 0x%08x\n", irqs);
for (i = 0; i < 32; i++) {
if (irqs & (1 << i)){
pr_info("### %s\n", err_msg[i]);
}
}
}
#endif
static irqreturn_t camera_isr_handler(int irq, void *data)
{
struct asr_camera_dev *pcdev = data;
u32 status, csi_irqs;
static u32 sof_index = 0;
unsigned long lock_flags = 0;
csi_irqs = readl(pcdev->ipe_base + REG_CCIC_IRQ_STATUS);
writel(csi_irqs, pcdev->ipe_base + REG_CCIC_IRQ_STATUS);
status = readl(pcdev->ipe_base + REG_CAMERA_IRQ_STATUS);
v4l2_dbg(1, debug, &pcdev->v4l2_dev,
"Camera interrupt status 0x%x\n", status);
if (!status) {
return IRQ_NONE;
}
writel(status, pcdev->ipe_base + REG_CAMERA_IRQ_STATUS);
/*deal with isp eof first.due to index would used by other irq.isp eof is very close with DMA sof*/
if (status & ISP_SOF_IRQ) {
sof_index++;
v4l2_dbg(1, debug, &pcdev->v4l2_dev, ";ISP_SOF_IRQ -----index:%d", sof_index);
if (pcdev->dma_addr) {
set_addr(pcdev, sof_index);
set_pipeline_shadow_ready(pcdev);
} else {
spin_lock_irqsave(&pcdev->lock, lock_flags);
if (!list_empty(&pcdev->capture)) {
pcdev->next_buf =
list_first_entry(&pcdev->capture,
struct isp_buffer,
queue);
list_del(&pcdev->next_buf->queue);
} else {
v4l2_err(&pcdev->v4l2_dev, "%s buffer is empty\n", __func__);
spin_unlock_irqrestore(&pcdev->lock, lock_flags);
return IRQ_HANDLED;
}
spin_unlock_irqrestore(&pcdev->lock, lock_flags);
cam_set_addr(pcdev, pcdev->next_buf);
set_pipeline_shadow_ready(pcdev);
}
pcdev->dma_state = true;
}
if (status & ISP_EOF_IRQ) {
v4l2_dbg(1, debug, &pcdev->v4l2_dev, ";ISP_EOF_IRQ ----- ");
pcdev->dma_state = false;
if (!pcdev->dma_addr && pcdev->curr_buf) {
asr_camera_eof_done(pcdev, pcdev->curr_buf, VB2_BUF_STATE_DONE);
}
tasklet_schedule(&pcdev->task_eof);
}
#ifdef CCIC_IRQ_DEBUG
if (status & DVP_FIFO_OVERUN_IRQ) {
v4l2_info(&pcdev->v4l2_dev, ";DVP_FIFO_OVERUN_IRQxxxxx");
}
if (status & DVP_FIFO_UNDERUN_IRQ) {
v4l2_info(&pcdev->v4l2_dev, ";DVP_FIFO_UNDERUN_IRQxxxxx");
}
if (status & PIP2_DMA_FIFO_UNDERUN_IRQ) {
v4l2_info(&pcdev->v4l2_dev, ";PIP2_DMA_FIFO_UNDERUN_IRQxxxxx");
}
if (status & SPI_LINE_ERR_IRQ) {
v4l2_info(&pcdev->v4l2_dev, ";SPI_LINE_ERR_IRQxxxxx");
}
if (status & SPI_CRC_ERR_IRQ) {
v4l2_info(&pcdev->v4l2_dev, ";SPI_CRC_ERR_IRQxxxxx");
}
if (status & PIP2_DMA_SOF_IRQ) {
v4l2_info(&pcdev->v4l2_dev, ";PIP2_DMA_SOF_IRQ------");
}
if (status & PIP2_DMA_FIFO_OVERRUN_IRQ) {
v4l2_info(&pcdev->v4l2_dev, ";PIP2_DMA_FIFO_OVERRUN_IRQxxxxx");
}
if (status & PIP2_DMA_EOF_IRQ) {
v4l2_info(&pcdev->v4l2_dev, ";PIP2_DMA_EOF_IRQ------");
}
if (status & ISIM_IRQ) {
v4l2_info(&pcdev->v4l2_dev, ";ISIM_IRQ------");
ccic_irq_handler(csi_irqs);
}
#endif
return IRQ_HANDLED;
}
static int asr_camera_pdata_from_dt(struct device *dev,
struct asr_camera_dev *pcdev,
struct v4l2_async_subdev *asd)
{
u32 mclk_rate;
struct device_node *remote, *np = dev->of_node;
struct v4l2_fwnode_endpoint ep = { .bus_type = 0 };
int err;
int pm_qos;
if (!of_property_read_u32(np, "clock-frequency", &mclk_rate)){
pcdev->mclk_rate = mclk_rate;
} else
pcdev->mclk_rate = 24;
if (of_property_read_u32(np, "lpm-qos", &pm_qos)) {
dev_err(dev, "no pm qos defined\n");
err = -EINVAL;
goto out;
}
pcdev->pm_qos = pm_qos;
np = of_graph_get_next_endpoint(np, NULL);
if (!np) {
dev_err(dev, "could not find endpoint\n");
return -EINVAL;
}
err = v4l2_fwnode_endpoint_parse(of_fwnode_handle(np), &ep);
if (err) {
dev_err(dev, "could not parse endpoint\n");
goto out;
}
asd->match_type = V4L2_ASYNC_MATCH_FWNODE;
remote = of_graph_get_remote_port_parent(np);
if (remote)
asd->match.fwnode = of_fwnode_handle(remote);
else
dev_notice(dev, "no remote for %pOF\n", np);
dev_err(dev, "of_fwnode_handle, name: %s\n", np->name);
out:
of_node_put(np);
return err;
}
static ssize_t capture_cfg_show(struct seq_file *s, void *data)
{
struct asr_camera_dev *pcdev = s->private;
struct pipeline_param *pipe_config = &pcdev->pipe2_config;
struct v4l2_pix_format *pix = &pcdev->current_pix;
seq_printf(s, "camera pipeline dump\n");
seq_printf(s, "stridey: %d, width: %d, height: %d, outfmt: 0x%x\n",
pipe_config->dma_stride_y, pipe_config->pipeline_outw, pipe_config->pipeline_outh, pipe_config->output_format);
seq_printf(s, "camera pix_format dump\n");
seq_printf(s, "colorspace: 0x%x, width: %d, height: %d, outfmt: 0x%x, sizeimage: 0x%x\n",
pix->colorspace, pix->width, pix->height, pix->pixelformat, pix->sizeimage);
return 0;
}
static ssize_t capture_test_write(struct file *filp, const char __user *user_buf,
size_t count, loff_t *ppos)
{
struct asr_camera_dev *pcdev =
((struct seq_file *)(filp->private_data))->private;
char buf[8];
int err;
int on;
int buf_size;
u32 imagesize;
unsigned int timeout = 0x100000;
if (count > 8) {
pr_err("count must be less than 8.\n");
return count;
}
buf_size = min(count, sizeof(buf) - 1);
if (copy_from_user(buf, user_buf, buf_size))
return -EFAULT;
err = sscanf(buf, "%d", &on);
if (err != 1) {
pr_err("debugfs para count error\n");
return count;
}
if (!pcdev->sensor.sd) {
pr_err("No sensor!\n");
return count;
}
imagesize = pcdev->current_pix.sizeimage;
if (on) {
pr_info("alloc imagesize : 0x%x\n", imagesize);
if (NULL == pcdev->dma_addr) {
pcdev->dma_addr = dma_alloc_coherent(pcdev->v4l2_dev.dev,
imagesize*2, &pcdev->dma_addr_phys, GFP_KERNEL);
if (!pcdev->dma_addr) {
dev_WARN_ONCE(pcdev->v4l2_dev.dev, 1,
"failed to allocate tx dma memory\n");
return count;
}
}
asrc_sensor_set_power(pcdev, 1);
set_addr(pcdev, 0);
pcdev->pipe2_config.pipeline_enable = 1;
set_pipeline_enable(pcdev);
set_pipeline_shadow_ready(pcdev);
asrc_sensor_set_stream(pcdev, 1);
} else {
while (pcdev->dma_state && (timeout-- > 0))
udelay(1);
pcdev->pipe2_config.pipeline_enable = 0;
set_pipeline_enable(pcdev);
set_pipeline_shadow_ready(pcdev);
asrc_sensor_set_stream(pcdev, 0);
asrc_sensor_set_power(pcdev, 0);
#ifdef CONFIG_FB_ASR
asrfb_camera_preview_stop();
#endif
if (pcdev->dma_addr != NULL) {
dma_free_coherent(pcdev->v4l2_dev.dev, imagesize*2, pcdev->dma_addr,
pcdev->dma_addr_phys);
pcdev->dma_addr = NULL;
}
}
return count;
}
static int capture_test_open(struct inode *inode, struct file *file)
{
return single_open(file, capture_cfg_show, inode->i_private);
}
const struct file_operations capture_test_fops = {
.owner = THIS_MODULE,
.open = capture_test_open,
.read = seq_read,
.write = capture_test_write,
};
static ssize_t snapshot_cfg_show(struct seq_file *s, void *data)
{
seq_printf(s, "snapshot: 0x%d\n", snapshot_flag);
return 0;
}
static int snapshot_open(struct inode *inode, struct file *file)
{
return single_open(file, snapshot_cfg_show, inode->i_private);
}
static ssize_t snapshot_write(struct file *filp, const char __user *user_buf,
size_t count, loff_t *ppos)
{
char buf[8];
int err;
int buf_size;
int cmd;
struct asr_camera_dev *pcdev =
((struct seq_file *)(filp->private_data))->private;
if (count > 8) {
pr_err("count must be less than 8.\n");
return count;
}
buf_size = min(count, sizeof(buf) - 1);
if (copy_from_user(buf, user_buf, buf_size))
return -EFAULT;
err = sscanf(buf, "%d", &cmd);
if (err != 1) {
pr_err("debugfs para count error\n");
return count;
}
pr_err("input cmd: %d\n", cmd);
if (!pcdev->sensor.sd) {
pr_err("No sensor!\n");
return count;
}
snapshot_flag = 1;
return count;
}
const struct file_operations snapshot_test_fops = {
.owner = THIS_MODULE,
.open = snapshot_open,
.read = seq_read,
.write = snapshot_write,
};
static int asr_camera_probe(struct platform_device *pdev)
{
int ret;
struct device *dev = &pdev->dev;
struct asr_camera_dev *pcdev;
struct resource *res, *regs;
struct dentry *capture_debug;
struct dentry *snapshot;
pcdev = devm_kzalloc(dev, sizeof(*pcdev), GFP_KERNEL);
if (!pcdev)
return -ENOMEM;
res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
regs = platform_get_resource_byname(pdev, IORESOURCE_MEM, "ipe");
pcdev->ipe_base = devm_ioremap_resource(dev, regs);
if (IS_ERR(pcdev->ipe_base)) {
dev_err(dev, "failed to map ipe base\n");
return -ENODEV;
}
ipe_base_reg = pcdev->ipe_base;
regs = platform_get_resource_byname(pdev, IORESOURCE_MEM, "isp");
pcdev->isp_base = devm_ioremap_resource(dev, regs);
if (IS_ERR(pcdev->isp_base)) {
dev_err(dev, "failed to map isp base\n");
return -ENODEV;
}
pcdev->res = res;
pcdev->pm_qos_req.name = pdev->name;
ret = asr_camera_pdata_from_dt(&pdev->dev, pcdev, &pcdev->asd);
if (ret< 0) {
dev_err(&pdev->dev, "Camera parse dt failed\n");
return ret;
}
pcdev->irq = platform_get_irq(pdev, 0);
if (!res || pcdev->irq < 0)
return -ENODEV;
dev_err(&pdev->dev, "Camera irq: %d\n", pcdev->irq);
/* request irq */
ret = devm_request_irq(&pdev->dev, pcdev->irq, camera_isr_handler, 0,
ASR_CAM_DRV_NAME, pcdev);
if (ret ) {
dev_err(&pdev->dev, "Camera interrupt register failed\n");
return ret;
}
tasklet_init(&pcdev->task_eof, asr_camera_eof_task, (unsigned long)pcdev);
dev_err(&pdev->dev, "Camera creat tasklet\n");
INIT_LIST_HEAD(&pcdev->capture);
spin_lock_init(&pcdev->lock);
mutex_init(&pcdev->mlock);
pm_qos_add_request(&pcdev->pm_qos_req, PM_QOS_CPUIDLE_BLOCK,
PM_QOS_CPUIDLE_BLOCK_DEFAULT_VALUE);
asr_camera_activate(pcdev);
dev_set_drvdata(&pdev->dev, pcdev);
ret = v4l2_device_register(&pdev->dev, &pcdev->v4l2_dev);
if (ret) {
dev_err(&pdev->dev, "v4l2_device_register failed\n");
goto exit_deactivate;
}
v4l2_async_notifier_init(&pcdev->notifier);
ret = v4l2_async_notifier_add_subdev(&pcdev->notifier, &pcdev->asd);
if (ret) {
fwnode_handle_put(pcdev->asd.match.fwnode);
dev_err(&pdev->dev, "v4l2_async_notifier_add_subdev failed\n");
goto exit_free_v4l2dev;
}
pcdev->notifier.ops = &asr_camera_sensor_ops;
ret = asr_camera_init_videobuf2(pcdev);
if (ret)
goto exit_notifier_cleanup;
ret = v4l2_async_notifier_register(&pcdev->v4l2_dev, &pcdev->notifier);
if (ret) {
dev_err(&pdev->dev, "v4l2_async_notifier_register failed\n");
goto exit_notifier_cleanup;
}
if (!pcdev->dentry) {
pcdev->dentry = debugfs_create_dir("camera", NULL);
if (!pcdev->dentry || IS_ERR(pcdev->dentry)) {
pr_err("camera debugfs create directory failed\n");
goto exit_notifier_unregister;
} else {
capture_debug = debugfs_create_file("capture", 0664,
pcdev->dentry, pcdev, &capture_test_fops);
if (!capture_debug) {
pr_err("capture debugfs create file failed\n");
goto clean_up;
}
snapshot= debugfs_create_file("snapshot", 0664,
pcdev->dentry, pcdev, &snapshot_test_fops);
if (!snapshot) {
pr_err("snapshot debugfs create file failed\n");
goto clean_up;
}
}
}
return 0;
clean_up:
debugfs_remove_recursive(pcdev->dentry);
exit_notifier_unregister:
v4l2_async_notifier_unregister(&pcdev->notifier);
exit_notifier_cleanup:
v4l2_async_notifier_cleanup(&pcdev->notifier);
exit_free_v4l2dev:
v4l2_device_unregister(&pcdev->v4l2_dev);
exit_deactivate:
asr_camera_deactivate(pcdev);
return ret;
}
static int asr_camera_remove(struct platform_device *pdev)
{
struct asr_camera_dev *pcdev = dev_get_drvdata(&pdev->dev);
asr_camera_deactivate(pcdev);
v4l2_async_notifier_unregister(&pcdev->notifier);
v4l2_device_unregister(&pcdev->v4l2_dev);
dev_info(&pdev->dev, "ASR Camera driver unloaded\n");
return 0;
}
/*
* Driver probe, remove, suspend and resume operations
*/
static int asr_camera_suspend(struct device *dev)
{
struct asr_camera_dev *pcdev = dev_get_drvdata(dev);
unsigned int timeout = 0x100000;
int ret = 0;
while (pcdev->dma_state && (timeout-- > 0))
udelay(1);
pcdev->pipe2_config.pipeline_enable = 0;
set_pipeline_enable(pcdev);
set_pipeline_shadow_ready(pcdev);
if (pcdev->sensor.sd) {
ret = asrc_sensor_set_stream(pcdev, 0);
ret |= asrc_sensor_set_power(pcdev, 0);
}
asr_camera_deactivate(pcdev);
return ret;
}
static int asr_camera_resume(struct device *dev)
{
struct asr_camera_dev *pcdev = dev_get_drvdata(dev);
int ret = 0;
init_media_pmu();
config_mclk(pcdev->ipe_base, pcdev->mclk_rate);
pm_qos_update_request(&pcdev->pm_qos_req, pcdev->pm_qos);
asr_camera_activate(pcdev);
asr_camera_pipe_set(pcdev);
return ret;
}
static const struct dev_pm_ops asr_camera_pm = {
.suspend = asr_camera_suspend,
.resume = asr_camera_resume,
};
static const struct of_device_id asr_camera_of_match[] = {
{ .compatible = "asr,camera", },
{},
};
MODULE_DEVICE_TABLE(of, asr_camera_of_match);
static struct platform_driver asr_camera_driver = {
.driver = {
.name = ASR_CAM_DRV_NAME,
.pm = &asr_camera_pm,
.of_match_table = of_match_ptr(asr_camera_of_match),
},
.probe = asr_camera_probe,
.remove = asr_camera_remove,
};
module_platform_driver(asr_camera_driver);
MODULE_AUTHOR("ASR Inc.");
MODULE_DESCRIPTION("ASR GCISP platform driver");
MODULE_LICENSE("GPL v2");