1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
|
// SPDX-License-Identifier: GPL-2.0
/*
* camss-vfe-4-1.c
*
* Qualcomm MSM Camera Subsystem - VFE (Video Front End) Module v4.1
*
* Copyright (c) 2013-2015, The Linux Foundation. All rights reserved.
* Copyright (C) 2015-2018 Linaro Ltd.
*/
#include <linux/interrupt.h>
#include <linux/io.h>
#include <linux/iopoll.h>
#include "camss.h"
#include "camss-vfe.h"
#include "camss-vfe-gen1.h"
#define VFE_0_HW_VERSION 0x000
#define VFE_0_GLOBAL_RESET_CMD 0x00c
#define VFE_0_GLOBAL_RESET_CMD_CORE BIT(0)
#define VFE_0_GLOBAL_RESET_CMD_CAMIF BIT(1)
#define VFE_0_GLOBAL_RESET_CMD_BUS BIT(2)
#define VFE_0_GLOBAL_RESET_CMD_BUS_BDG BIT(3)
#define VFE_0_GLOBAL_RESET_CMD_REGISTER BIT(4)
#define VFE_0_GLOBAL_RESET_CMD_TIMER BIT(5)
#define VFE_0_GLOBAL_RESET_CMD_PM BIT(6)
#define VFE_0_GLOBAL_RESET_CMD_BUS_MISR BIT(7)
#define VFE_0_GLOBAL_RESET_CMD_TESTGEN BIT(8)
#define VFE_0_MODULE_CFG 0x018
#define VFE_0_MODULE_CFG_DEMUX BIT(2)
#define VFE_0_MODULE_CFG_CHROMA_UPSAMPLE BIT(3)
#define VFE_0_MODULE_CFG_SCALE_ENC BIT(23)
#define VFE_0_MODULE_CFG_CROP_ENC BIT(27)
#define VFE_0_CORE_CFG 0x01c
#define VFE_0_CORE_CFG_PIXEL_PATTERN_YCBYCR 0x4
#define VFE_0_CORE_CFG_PIXEL_PATTERN_YCRYCB 0x5
#define VFE_0_CORE_CFG_PIXEL_PATTERN_CBYCRY 0x6
#define VFE_0_CORE_CFG_PIXEL_PATTERN_CRYCBY 0x7
#define VFE_0_IRQ_CMD 0x024
#define VFE_0_IRQ_CMD_GLOBAL_CLEAR BIT(0)
#define VFE_0_IRQ_MASK_0 0x028
#define VFE_0_IRQ_MASK_0_CAMIF_SOF BIT(0)
#define VFE_0_IRQ_MASK_0_CAMIF_EOF BIT(1)
#define VFE_0_IRQ_MASK_0_RDIn_REG_UPDATE(n) BIT((n) + 5)
#define VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(n) \
((n) == VFE_LINE_PIX ? BIT(4) : VFE_0_IRQ_MASK_0_RDIn_REG_UPDATE(n))
#define VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(n) BIT((n) + 8)
#define VFE_0_IRQ_MASK_0_IMAGE_COMPOSITE_DONE_n(n) BIT((n) + 25)
#define VFE_0_IRQ_MASK_0_RESET_ACK BIT(31)
#define VFE_0_IRQ_MASK_1 0x02c
#define VFE_0_IRQ_MASK_1_CAMIF_ERROR BIT(0)
#define VFE_0_IRQ_MASK_1_VIOLATION BIT(7)
#define VFE_0_IRQ_MASK_1_BUS_BDG_HALT_ACK BIT(8)
#define VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(n) BIT((n) + 9)
#define VFE_0_IRQ_MASK_1_RDIn_SOF(n) BIT((n) + 29)
#define VFE_0_IRQ_CLEAR_0 0x030
#define VFE_0_IRQ_CLEAR_1 0x034
#define VFE_0_IRQ_STATUS_0 0x038
#define VFE_0_IRQ_STATUS_0_CAMIF_SOF BIT(0)
#define VFE_0_IRQ_STATUS_0_RDIn_REG_UPDATE(n) BIT((n) + 5)
#define VFE_0_IRQ_STATUS_0_line_n_REG_UPDATE(n) \
((n) == VFE_LINE_PIX ? BIT(4) : VFE_0_IRQ_STATUS_0_RDIn_REG_UPDATE(n))
#define VFE_0_IRQ_STATUS_0_IMAGE_MASTER_n_PING_PONG(n) BIT((n) + 8)
#define VFE_0_IRQ_STATUS_0_IMAGE_COMPOSITE_DONE_n(n) BIT((n) + 25)
#define VFE_0_IRQ_STATUS_0_RESET_ACK BIT(31)
#define VFE_0_IRQ_STATUS_1 0x03c
#define VFE_0_IRQ_STATUS_1_VIOLATION BIT(7)
#define VFE_0_IRQ_STATUS_1_BUS_BDG_HALT_ACK BIT(8)
#define VFE_0_IRQ_STATUS_1_RDIn_SOF(n) BIT((n) + 29)
#define VFE_0_IRQ_COMPOSITE_MASK_0 0x40
#define VFE_0_VIOLATION_STATUS 0x48
#define VFE_0_BUS_CMD 0x4c
#define VFE_0_BUS_CMD_Mx_RLD_CMD(x) BIT(x)
#define VFE_0_BUS_CFG 0x050
#define VFE_0_BUS_XBAR_CFG_x(x) (0x58 + 0x4 * ((x) / 2))
#define VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_EN BIT(1)
#define VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTER_INTRA (0x3 << 4)
#define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT 8
#define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_LUMA 0
#define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0 5
#define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1 6
#define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2 7
#define VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(n) (0x06c + 0x24 * (n))
#define VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_WR_PATH_SHIFT 0
#define VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_FRM_BASED_SHIFT 1
#define VFE_0_BUS_IMAGE_MASTER_n_WR_PING_ADDR(n) (0x070 + 0x24 * (n))
#define VFE_0_BUS_IMAGE_MASTER_n_WR_PONG_ADDR(n) (0x074 + 0x24 * (n))
#define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(n) (0x078 + 0x24 * (n))
#define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_SHIFT 2
#define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK (0x1f << 2)
#define VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG(n) (0x07c + 0x24 * (n))
#define VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG_OFFSET_SHIFT 16
#define VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(n) (0x080 + 0x24 * (n))
#define VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(n) (0x084 + 0x24 * (n))
#define VFE_0_BUS_IMAGE_MASTER_n_WR_FRAMEDROP_PATTERN(n) \
(0x088 + 0x24 * (n))
#define VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN(n) \
(0x08c + 0x24 * (n))
#define VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN_DEF 0xffffffff
#define VFE_0_BUS_PING_PONG_STATUS 0x268
#define VFE_0_BUS_BDG_CMD 0x2c0
#define VFE_0_BUS_BDG_CMD_HALT_REQ 1
#define VFE_0_BUS_BDG_QOS_CFG_0 0x2c4
#define VFE_0_BUS_BDG_QOS_CFG_0_CFG 0xaaa5aaa5
#define VFE_0_BUS_BDG_QOS_CFG_1 0x2c8
#define VFE_0_BUS_BDG_QOS_CFG_2 0x2cc
#define VFE_0_BUS_BDG_QOS_CFG_3 0x2d0
#define VFE_0_BUS_BDG_QOS_CFG_4 0x2d4
#define VFE_0_BUS_BDG_QOS_CFG_5 0x2d8
#define VFE_0_BUS_BDG_QOS_CFG_6 0x2dc
#define VFE_0_BUS_BDG_QOS_CFG_7 0x2e0
#define VFE_0_BUS_BDG_QOS_CFG_7_CFG 0x0001aaa5
#define VFE_0_RDI_CFG_x(x) (0x2e8 + (0x4 * (x)))
#define VFE_0_RDI_CFG_x_RDI_STREAM_SEL_SHIFT 28
#define VFE_0_RDI_CFG_x_RDI_STREAM_SEL_MASK (0xf << 28)
#define VFE_0_RDI_CFG_x_RDI_M0_SEL_SHIFT 4
#define VFE_0_RDI_CFG_x_RDI_M0_SEL_MASK (0xf << 4)
#define VFE_0_RDI_CFG_x_RDI_EN_BIT BIT(2)
#define VFE_0_RDI_CFG_x_MIPI_EN_BITS 0x3
#define VFE_0_RDI_CFG_x_RDI_Mr_FRAME_BASED_EN(r) BIT(16 + (r))
#define VFE_0_CAMIF_CMD 0x2f4
#define VFE_0_CAMIF_CMD_DISABLE_FRAME_BOUNDARY 0
#define VFE_0_CAMIF_CMD_ENABLE_FRAME_BOUNDARY 1
#define VFE_0_CAMIF_CMD_NO_CHANGE 3
#define VFE_0_CAMIF_CMD_CLEAR_CAMIF_STATUS BIT(2)
#define VFE_0_CAMIF_CFG 0x2f8
#define VFE_0_CAMIF_CFG_VFE_OUTPUT_EN BIT(6)
#define VFE_0_CAMIF_FRAME_CFG 0x300
#define VFE_0_CAMIF_WINDOW_WIDTH_CFG 0x304
#define VFE_0_CAMIF_WINDOW_HEIGHT_CFG 0x308
#define VFE_0_CAMIF_SUBSAMPLE_CFG_0 0x30c
#define VFE_0_CAMIF_IRQ_SUBSAMPLE_PATTERN 0x314
#define VFE_0_CAMIF_STATUS 0x31c
#define VFE_0_CAMIF_STATUS_HALT BIT(31)
#define VFE_0_REG_UPDATE 0x378
#define VFE_0_REG_UPDATE_RDIn(n) BIT(1 + (n))
#define VFE_0_REG_UPDATE_line_n(n) \
((n) == VFE_LINE_PIX ? 1 : VFE_0_REG_UPDATE_RDIn(n))
#define VFE_0_DEMUX_CFG 0x424
#define VFE_0_DEMUX_CFG_PERIOD 0x3
#define VFE_0_DEMUX_GAIN_0 0x428
#define VFE_0_DEMUX_GAIN_0_CH0_EVEN (0x80 << 0)
#define VFE_0_DEMUX_GAIN_0_CH0_ODD (0x80 << 16)
#define VFE_0_DEMUX_GAIN_1 0x42c
#define VFE_0_DEMUX_GAIN_1_CH1 (0x80 << 0)
#define VFE_0_DEMUX_GAIN_1_CH2 (0x80 << 16)
#define VFE_0_DEMUX_EVEN_CFG 0x438
#define VFE_0_DEMUX_EVEN_CFG_PATTERN_YUYV 0x9cac
#define VFE_0_DEMUX_EVEN_CFG_PATTERN_YVYU 0xac9c
#define VFE_0_DEMUX_EVEN_CFG_PATTERN_UYVY 0xc9ca
#define VFE_0_DEMUX_EVEN_CFG_PATTERN_VYUY 0xcac9
#define VFE_0_DEMUX_ODD_CFG 0x43c
#define VFE_0_DEMUX_ODD_CFG_PATTERN_YUYV 0x9cac
#define VFE_0_DEMUX_ODD_CFG_PATTERN_YVYU 0xac9c
#define VFE_0_DEMUX_ODD_CFG_PATTERN_UYVY 0xc9ca
#define VFE_0_DEMUX_ODD_CFG_PATTERN_VYUY 0xcac9
#define VFE_0_SCALE_ENC_Y_CFG 0x75c
#define VFE_0_SCALE_ENC_Y_H_IMAGE_SIZE 0x760
#define VFE_0_SCALE_ENC_Y_H_PHASE 0x764
#define VFE_0_SCALE_ENC_Y_V_IMAGE_SIZE 0x76c
#define VFE_0_SCALE_ENC_Y_V_PHASE 0x770
#define VFE_0_SCALE_ENC_CBCR_CFG 0x778
#define VFE_0_SCALE_ENC_CBCR_H_IMAGE_SIZE 0x77c
#define VFE_0_SCALE_ENC_CBCR_H_PHASE 0x780
#define VFE_0_SCALE_ENC_CBCR_V_IMAGE_SIZE 0x790
#define VFE_0_SCALE_ENC_CBCR_V_PHASE 0x794
#define VFE_0_CROP_ENC_Y_WIDTH 0x854
#define VFE_0_CROP_ENC_Y_HEIGHT 0x858
#define VFE_0_CROP_ENC_CBCR_WIDTH 0x85c
#define VFE_0_CROP_ENC_CBCR_HEIGHT 0x860
#define VFE_0_CLAMP_ENC_MAX_CFG 0x874
#define VFE_0_CLAMP_ENC_MAX_CFG_CH0 (0xff << 0)
#define VFE_0_CLAMP_ENC_MAX_CFG_CH1 (0xff << 8)
#define VFE_0_CLAMP_ENC_MAX_CFG_CH2 (0xff << 16)
#define VFE_0_CLAMP_ENC_MIN_CFG 0x878
#define VFE_0_CLAMP_ENC_MIN_CFG_CH0 (0x0 << 0)
#define VFE_0_CLAMP_ENC_MIN_CFG_CH1 (0x0 << 8)
#define VFE_0_CLAMP_ENC_MIN_CFG_CH2 (0x0 << 16)
#define VFE_0_CGC_OVERRIDE_1 0x974
#define VFE_0_CGC_OVERRIDE_1_IMAGE_Mx_CGC_OVERRIDE(x) BIT(x)
#define CAMIF_TIMEOUT_SLEEP_US 1000
#define CAMIF_TIMEOUT_ALL_US 1000000
#define MSM_VFE_VFE0_UB_SIZE 1023
#define MSM_VFE_VFE0_UB_SIZE_RDI (MSM_VFE_VFE0_UB_SIZE / 3)
static u32 vfe_hw_version(struct vfe_device *vfe)
{
u32 hw_version = readl_relaxed(vfe->base + VFE_0_HW_VERSION);
dev_dbg(vfe->camss->dev, "VFE HW Version = 0x%08x\n", hw_version);
return hw_version;
}
static u16 vfe_get_ub_size(u8 vfe_id)
{
if (vfe_id == 0)
return MSM_VFE_VFE0_UB_SIZE_RDI;
return 0;
}
static inline void vfe_reg_clr(struct vfe_device *vfe, u32 reg, u32 clr_bits)
{
u32 bits = readl_relaxed(vfe->base + reg);
writel_relaxed(bits & ~clr_bits, vfe->base + reg);
}
static inline void vfe_reg_set(struct vfe_device *vfe, u32 reg, u32 set_bits)
{
u32 bits = readl_relaxed(vfe->base + reg);
writel_relaxed(bits | set_bits, vfe->base + reg);
}
static void vfe_global_reset(struct vfe_device *vfe)
{
u32 reset_bits = VFE_0_GLOBAL_RESET_CMD_TESTGEN |
VFE_0_GLOBAL_RESET_CMD_BUS_MISR |
VFE_0_GLOBAL_RESET_CMD_PM |
VFE_0_GLOBAL_RESET_CMD_TIMER |
VFE_0_GLOBAL_RESET_CMD_REGISTER |
VFE_0_GLOBAL_RESET_CMD_BUS_BDG |
VFE_0_GLOBAL_RESET_CMD_BUS |
VFE_0_GLOBAL_RESET_CMD_CAMIF |
VFE_0_GLOBAL_RESET_CMD_CORE;
writel_relaxed(reset_bits, vfe->base + VFE_0_GLOBAL_RESET_CMD);
}
static void vfe_halt_request(struct vfe_device *vfe)
{
writel_relaxed(VFE_0_BUS_BDG_CMD_HALT_REQ,
vfe->base + VFE_0_BUS_BDG_CMD);
}
static void vfe_halt_clear(struct vfe_device *vfe)
{
writel_relaxed(0x0, vfe->base + VFE_0_BUS_BDG_CMD);
}
static void vfe_wm_enable(struct vfe_device *vfe, u8 wm, u8 enable)
{
if (enable)
vfe_reg_set(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_WR_PATH_SHIFT);
else
vfe_reg_clr(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_WR_PATH_SHIFT);
}
static void vfe_wm_frame_based(struct vfe_device *vfe, u8 wm, u8 enable)
{
if (enable)
vfe_reg_set(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_FRM_BASED_SHIFT);
else
vfe_reg_clr(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_FRM_BASED_SHIFT);
}
static void vfe_get_wm_sizes(struct v4l2_pix_format_mplane *pix, u8 plane,
u16 *width, u16 *height, u16 *bytesperline)
{
*width = pix->width;
*height = pix->height;
*bytesperline = pix->plane_fmt[0].bytesperline;
if (pix->pixelformat == V4L2_PIX_FMT_NV12 ||
pix->pixelformat == V4L2_PIX_FMT_NV21)
if (plane == 1)
*height /= 2;
}
static void vfe_wm_line_based(struct vfe_device *vfe, u32 wm,
struct v4l2_pix_format_mplane *pix,
u8 plane, u32 enable)
{
u32 reg;
if (enable) {
u16 width = 0, height = 0, bytesperline = 0, wpl;
vfe_get_wm_sizes(pix, plane, &width, &height, &bytesperline);
wpl = vfe_word_per_line(pix->pixelformat, width);
reg = height - 1;
reg |= ((wpl + 1) / 2 - 1) << 16;
writel_relaxed(reg, vfe->base +
VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(wm));
wpl = vfe_word_per_line(pix->pixelformat, bytesperline);
reg = 0x3;
reg |= (height - 1) << 4;
reg |= wpl << 16;
writel_relaxed(reg, vfe->base +
VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(wm));
} else {
writel_relaxed(0, vfe->base +
VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(wm));
writel_relaxed(0, vfe->base +
VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(wm));
}
}
static void vfe_wm_set_framedrop_period(struct vfe_device *vfe, u8 wm, u8 per)
{
u32 reg;
reg = readl_relaxed(vfe->base +
VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(wm));
reg &= ~(VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK);
reg |= (per << VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_SHIFT)
& VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK;
writel_relaxed(reg,
vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(wm));
}
static void vfe_wm_set_framedrop_pattern(struct vfe_device *vfe, u8 wm,
u32 pattern)
{
writel_relaxed(pattern,
vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_FRAMEDROP_PATTERN(wm));
}
static void vfe_wm_set_ub_cfg(struct vfe_device *vfe, u8 wm,
u16 offset, u16 depth)
{
u32 reg;
reg = (offset << VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG_OFFSET_SHIFT) |
depth;
writel_relaxed(reg, vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG(wm));
}
static void vfe_bus_reload_wm(struct vfe_device *vfe, u8 wm)
{
wmb();
writel_relaxed(VFE_0_BUS_CMD_Mx_RLD_CMD(wm), vfe->base + VFE_0_BUS_CMD);
wmb();
}
static void vfe_wm_set_ping_addr(struct vfe_device *vfe, u8 wm, u32 addr)
{
writel_relaxed(addr,
vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_PING_ADDR(wm));
}
static void vfe_wm_set_pong_addr(struct vfe_device *vfe, u8 wm, u32 addr)
{
writel_relaxed(addr,
vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_PONG_ADDR(wm));
}
static int vfe_wm_get_ping_pong_status(struct vfe_device *vfe, u8 wm)
{
u32 reg;
reg = readl_relaxed(vfe->base + VFE_0_BUS_PING_PONG_STATUS);
return (reg >> wm) & 0x1;
}
static void vfe_bus_enable_wr_if(struct vfe_device *vfe, u8 enable)
{
if (enable)
writel_relaxed(0x10000009, vfe->base + VFE_0_BUS_CFG);
else
writel_relaxed(0, vfe->base + VFE_0_BUS_CFG);
}
static void vfe_bus_connect_wm_to_rdi(struct vfe_device *vfe, u8 wm,
enum vfe_line_id id)
{
u32 reg;
reg = VFE_0_RDI_CFG_x_MIPI_EN_BITS;
reg |= VFE_0_RDI_CFG_x_RDI_Mr_FRAME_BASED_EN(id);
vfe_reg_set(vfe, VFE_0_RDI_CFG_x(0), reg);
reg = VFE_0_RDI_CFG_x_RDI_EN_BIT;
reg |= ((3 * id) << VFE_0_RDI_CFG_x_RDI_STREAM_SEL_SHIFT) &
VFE_0_RDI_CFG_x_RDI_STREAM_SEL_MASK;
vfe_reg_set(vfe, VFE_0_RDI_CFG_x(id), reg);
switch (id) {
case VFE_LINE_RDI0:
default:
reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0 <<
VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
break;
case VFE_LINE_RDI1:
reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1 <<
VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
break;
case VFE_LINE_RDI2:
reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2 <<
VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
break;
}
if (wm % 2 == 1)
reg <<= 16;
vfe_reg_set(vfe, VFE_0_BUS_XBAR_CFG_x(wm), reg);
}
static void vfe_wm_set_subsample(struct vfe_device *vfe, u8 wm)
{
writel_relaxed(VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN_DEF,
vfe->base +
VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN(wm));
}
static void vfe_bus_disconnect_wm_from_rdi(struct vfe_device *vfe, u8 wm,
enum vfe_line_id id)
{
u32 reg;
reg = VFE_0_RDI_CFG_x_RDI_Mr_FRAME_BASED_EN(id);
vfe_reg_clr(vfe, VFE_0_RDI_CFG_x(0), reg);
reg = VFE_0_RDI_CFG_x_RDI_EN_BIT;
vfe_reg_clr(vfe, VFE_0_RDI_CFG_x(id), reg);
switch (id) {
case VFE_LINE_RDI0:
default:
reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0 <<
VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
break;
case VFE_LINE_RDI1:
reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1 <<
VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
break;
case VFE_LINE_RDI2:
reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2 <<
VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
break;
}
if (wm % 2 == 1)
reg <<= 16;
vfe_reg_clr(vfe, VFE_0_BUS_XBAR_CFG_x(wm), reg);
}
static void vfe_set_xbar_cfg(struct vfe_device *vfe, struct vfe_output *output,
u8 enable)
{
struct vfe_line *line = container_of(output, struct vfe_line, output);
u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat;
u32 reg;
unsigned int i;
for (i = 0; i < output->wm_num; i++) {
if (i == 0) {
reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_LUMA <<
VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
} else if (i == 1) {
reg = VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_EN;
if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV16)
reg |= VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTER_INTRA;
} else {
/* On current devices output->wm_num is always <= 2 */
break;
}
if (output->wm_idx[i] % 2 == 1)
reg <<= 16;
if (enable)
vfe_reg_set(vfe,
VFE_0_BUS_XBAR_CFG_x(output->wm_idx[i]),
reg);
else
vfe_reg_clr(vfe,
VFE_0_BUS_XBAR_CFG_x(output->wm_idx[i]),
reg);
}
}
static void vfe_set_realign_cfg(struct vfe_device *vfe, struct vfe_line *line,
u8 enable)
{
/* empty */
}
static void vfe_set_rdi_cid(struct vfe_device *vfe, enum vfe_line_id id, u8 cid)
{
vfe_reg_clr(vfe, VFE_0_RDI_CFG_x(id),
VFE_0_RDI_CFG_x_RDI_M0_SEL_MASK);
vfe_reg_set(vfe, VFE_0_RDI_CFG_x(id),
cid << VFE_0_RDI_CFG_x_RDI_M0_SEL_SHIFT);
}
static void vfe_reg_update(struct vfe_device *vfe, enum vfe_line_id line_id)
{
vfe->reg_update |= VFE_0_REG_UPDATE_line_n(line_id);
wmb();
writel_relaxed(vfe->reg_update, vfe->base + VFE_0_REG_UPDATE);
wmb();
}
static inline void vfe_reg_update_clear(struct vfe_device *vfe,
enum vfe_line_id line_id)
{
vfe->reg_update &= ~VFE_0_REG_UPDATE_line_n(line_id);
}
static void vfe_enable_irq_wm_line(struct vfe_device *vfe, u8 wm,
enum vfe_line_id line_id, u8 enable)
{
u32 irq_en0 = VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(wm) |
VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(line_id);
u32 irq_en1 = VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(wm) |
VFE_0_IRQ_MASK_1_RDIn_SOF(line_id);
if (enable) {
vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0);
vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1);
} else {
vfe_reg_clr(vfe, VFE_0_IRQ_MASK_0, irq_en0);
vfe_reg_clr(vfe, VFE_0_IRQ_MASK_1, irq_en1);
}
}
static void vfe_enable_irq_pix_line(struct vfe_device *vfe, u8 comp,
enum vfe_line_id line_id, u8 enable)
{
struct vfe_output *output = &vfe->line[line_id].output;
unsigned int i;
u32 irq_en0;
u32 irq_en1;
u32 comp_mask = 0;
irq_en0 = VFE_0_IRQ_MASK_0_CAMIF_SOF;
irq_en0 |= VFE_0_IRQ_MASK_0_CAMIF_EOF;
irq_en0 |= VFE_0_IRQ_MASK_0_IMAGE_COMPOSITE_DONE_n(comp);
irq_en0 |= VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(line_id);
irq_en1 = VFE_0_IRQ_MASK_1_CAMIF_ERROR;
for (i = 0; i < output->wm_num; i++) {
irq_en1 |= VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(
output->wm_idx[i]);
comp_mask |= (1 << output->wm_idx[i]) << comp * 8;
}
if (enable) {
vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0);
vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1);
vfe_reg_set(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask);
} else {
vfe_reg_clr(vfe, VFE_0_IRQ_MASK_0, irq_en0);
vfe_reg_clr(vfe, VFE_0_IRQ_MASK_1, irq_en1);
vfe_reg_clr(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask);
}
}
static void vfe_enable_irq_common(struct vfe_device *vfe)
{
u32 irq_en0 = VFE_0_IRQ_MASK_0_RESET_ACK;
u32 irq_en1 = VFE_0_IRQ_MASK_1_VIOLATION |
VFE_0_IRQ_MASK_1_BUS_BDG_HALT_ACK;
vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0);
vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1);
}
static void vfe_set_demux_cfg(struct vfe_device *vfe, struct vfe_line *line)
{
u32 val, even_cfg, odd_cfg;
writel_relaxed(VFE_0_DEMUX_CFG_PERIOD, vfe->base + VFE_0_DEMUX_CFG);
val = VFE_0_DEMUX_GAIN_0_CH0_EVEN | VFE_0_DEMUX_GAIN_0_CH0_ODD;
writel_relaxed(val, vfe->base + VFE_0_DEMUX_GAIN_0);
val = VFE_0_DEMUX_GAIN_1_CH1 | VFE_0_DEMUX_GAIN_1_CH2;
writel_relaxed(val, vfe->base + VFE_0_DEMUX_GAIN_1);
switch (line->fmt[MSM_VFE_PAD_SINK].code) {
case MEDIA_BUS_FMT_YUYV8_1X16:
even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_YUYV;
odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_YUYV;
break;
case MEDIA_BUS_FMT_YVYU8_1X16:
even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_YVYU;
odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_YVYU;
break;
case MEDIA_BUS_FMT_UYVY8_1X16:
default:
even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_UYVY;
odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_UYVY;
break;
case MEDIA_BUS_FMT_VYUY8_1X16:
even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_VYUY;
odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_VYUY;
break;
}
writel_relaxed(even_cfg, vfe->base + VFE_0_DEMUX_EVEN_CFG);
writel_relaxed(odd_cfg, vfe->base + VFE_0_DEMUX_ODD_CFG);
}
static void vfe_set_scale_cfg(struct vfe_device *vfe, struct vfe_line *line)
{
u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat;
u32 reg;
u16 input, output;
u8 interp_reso;
u32 phase_mult;
writel_relaxed(0x3, vfe->base + VFE_0_SCALE_ENC_Y_CFG);
input = line->fmt[MSM_VFE_PAD_SINK].width;
output = line->compose.width;
reg = (output << 16) | input;
writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_H_IMAGE_SIZE);
interp_reso = vfe_calc_interp_reso(input, output);
phase_mult = input * (1 << (13 + interp_reso)) / output;
reg = (interp_reso << 20) | phase_mult;
writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_H_PHASE);
input = line->fmt[MSM_VFE_PAD_SINK].height;
output = line->compose.height;
reg = (output << 16) | input;
writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_V_IMAGE_SIZE);
interp_reso = vfe_calc_interp_reso(input, output);
phase_mult = input * (1 << (13 + interp_reso)) / output;
reg = (interp_reso << 20) | phase_mult;
writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_V_PHASE);
writel_relaxed(0x3, vfe->base + VFE_0_SCALE_ENC_CBCR_CFG);
input = line->fmt[MSM_VFE_PAD_SINK].width;
output = line->compose.width / 2;
reg = (output << 16) | input;
writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_H_IMAGE_SIZE);
interp_reso = vfe_calc_interp_reso(input, output);
phase_mult = input * (1 << (13 + interp_reso)) / output;
reg = (interp_reso << 20) | phase_mult;
writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_H_PHASE);
input = line->fmt[MSM_VFE_PAD_SINK].height;
output = line->compose.height;
if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV21)
output = line->compose.height / 2;
reg = (output << 16) | input;
writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_V_IMAGE_SIZE);
interp_reso = vfe_calc_interp_reso(input, output);
phase_mult = input * (1 << (13 + interp_reso)) / output;
reg = (interp_reso << 20) | phase_mult;
writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_V_PHASE);
}
static void vfe_set_crop_cfg(struct vfe_device *vfe, struct vfe_line *line)
{
u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat;
u32 reg;
u16 first, last;
first = line->crop.left;
last = line->crop.left + line->crop.width - 1;
reg = (first << 16) | last;
writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_Y_WIDTH);
first = line->crop.top;
last = line->crop.top + line->crop.height - 1;
reg = (first << 16) | last;
writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_Y_HEIGHT);
first = line->crop.left / 2;
last = line->crop.left / 2 + line->crop.width / 2 - 1;
reg = (first << 16) | last;
writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_CBCR_WIDTH);
first = line->crop.top;
last = line->crop.top + line->crop.height - 1;
if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV21) {
first = line->crop.top / 2;
last = line->crop.top / 2 + line->crop.height / 2 - 1;
}
reg = (first << 16) | last;
writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_CBCR_HEIGHT);
}
static void vfe_set_clamp_cfg(struct vfe_device *vfe)
{
u32 val = VFE_0_CLAMP_ENC_MAX_CFG_CH0 |
VFE_0_CLAMP_ENC_MAX_CFG_CH1 |
VFE_0_CLAMP_ENC_MAX_CFG_CH2;
writel_relaxed(val, vfe->base + VFE_0_CLAMP_ENC_MAX_CFG);
val = VFE_0_CLAMP_ENC_MIN_CFG_CH0 |
VFE_0_CLAMP_ENC_MIN_CFG_CH1 |
VFE_0_CLAMP_ENC_MIN_CFG_CH2;
writel_relaxed(val, vfe->base + VFE_0_CLAMP_ENC_MIN_CFG);
}
static void vfe_set_qos(struct vfe_device *vfe)
{
u32 val = VFE_0_BUS_BDG_QOS_CFG_0_CFG;
u32 val7 = VFE_0_BUS_BDG_QOS_CFG_7_CFG;
writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_0);
writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_1);
writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_2);
writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_3);
writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_4);
writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_5);
writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_6);
writel_relaxed(val7, vfe->base + VFE_0_BUS_BDG_QOS_CFG_7);
}
static void vfe_set_ds(struct vfe_device *vfe)
{
/* empty */
}
static void vfe_set_cgc_override(struct vfe_device *vfe, u8 wm, u8 enable)
{
u32 val = VFE_0_CGC_OVERRIDE_1_IMAGE_Mx_CGC_OVERRIDE(wm);
if (enable)
vfe_reg_set(vfe, VFE_0_CGC_OVERRIDE_1, val);
else
vfe_reg_clr(vfe, VFE_0_CGC_OVERRIDE_1, val);
wmb();
}
static void vfe_set_camif_cfg(struct vfe_device *vfe, struct vfe_line *line)
{
u32 val;
switch (line->fmt[MSM_VFE_PAD_SINK].code) {
case MEDIA_BUS_FMT_YUYV8_1X16:
val = VFE_0_CORE_CFG_PIXEL_PATTERN_YCBYCR;
break;
case MEDIA_BUS_FMT_YVYU8_1X16:
val = VFE_0_CORE_CFG_PIXEL_PATTERN_YCRYCB;
break;
case MEDIA_BUS_FMT_UYVY8_1X16:
default:
val = VFE_0_CORE_CFG_PIXEL_PATTERN_CBYCRY;
break;
case MEDIA_BUS_FMT_VYUY8_1X16:
val = VFE_0_CORE_CFG_PIXEL_PATTERN_CRYCBY;
break;
}
writel_relaxed(val, vfe->base + VFE_0_CORE_CFG);
val = line->fmt[MSM_VFE_PAD_SINK].width * 2;
val |= line->fmt[MSM_VFE_PAD_SINK].height << 16;
writel_relaxed(val, vfe->base + VFE_0_CAMIF_FRAME_CFG);
val = line->fmt[MSM_VFE_PAD_SINK].width * 2 - 1;
writel_relaxed(val, vfe->base + VFE_0_CAMIF_WINDOW_WIDTH_CFG);
val = line->fmt[MSM_VFE_PAD_SINK].height - 1;
writel_relaxed(val, vfe->base + VFE_0_CAMIF_WINDOW_HEIGHT_CFG);
val = 0xffffffff;
writel_relaxed(val, vfe->base + VFE_0_CAMIF_SUBSAMPLE_CFG_0);
val = 0xffffffff;
writel_relaxed(val, vfe->base + VFE_0_CAMIF_IRQ_SUBSAMPLE_PATTERN);
val = VFE_0_RDI_CFG_x_MIPI_EN_BITS;
vfe_reg_set(vfe, VFE_0_RDI_CFG_x(0), val);
val = VFE_0_CAMIF_CFG_VFE_OUTPUT_EN;
writel_relaxed(val, vfe->base + VFE_0_CAMIF_CFG);
}
static void vfe_set_camif_cmd(struct vfe_device *vfe, u8 enable)
{
u32 cmd;
cmd = VFE_0_CAMIF_CMD_CLEAR_CAMIF_STATUS | VFE_0_CAMIF_CMD_NO_CHANGE;
writel_relaxed(cmd, vfe->base + VFE_0_CAMIF_CMD);
wmb();
if (enable)
cmd = VFE_0_CAMIF_CMD_ENABLE_FRAME_BOUNDARY;
else
cmd = VFE_0_CAMIF_CMD_DISABLE_FRAME_BOUNDARY;
writel_relaxed(cmd, vfe->base + VFE_0_CAMIF_CMD);
}
static void vfe_set_module_cfg(struct vfe_device *vfe, u8 enable)
{
u32 val = VFE_0_MODULE_CFG_DEMUX |
VFE_0_MODULE_CFG_CHROMA_UPSAMPLE |
VFE_0_MODULE_CFG_SCALE_ENC |
VFE_0_MODULE_CFG_CROP_ENC;
if (enable)
writel_relaxed(val, vfe->base + VFE_0_MODULE_CFG);
else
writel_relaxed(0x0, vfe->base + VFE_0_MODULE_CFG);
}
static int vfe_camif_wait_for_stop(struct vfe_device *vfe, struct device *dev)
{
u32 val;
int ret;
ret = readl_poll_timeout(vfe->base + VFE_0_CAMIF_STATUS,
val,
(val & VFE_0_CAMIF_STATUS_HALT),
CAMIF_TIMEOUT_SLEEP_US,
CAMIF_TIMEOUT_ALL_US);
if (ret < 0)
dev_err(dev, "%s: camif stop timeout\n", __func__);
return ret;
}
static void vfe_isr_read(struct vfe_device *vfe, u32 *value0, u32 *value1)
{
*value0 = readl_relaxed(vfe->base + VFE_0_IRQ_STATUS_0);
*value1 = readl_relaxed(vfe->base + VFE_0_IRQ_STATUS_1);
writel_relaxed(*value0, vfe->base + VFE_0_IRQ_CLEAR_0);
writel_relaxed(*value1, vfe->base + VFE_0_IRQ_CLEAR_1);
wmb();
writel_relaxed(VFE_0_IRQ_CMD_GLOBAL_CLEAR, vfe->base + VFE_0_IRQ_CMD);
}
static void vfe_violation_read(struct vfe_device *vfe)
{
u32 violation = readl_relaxed(vfe->base + VFE_0_VIOLATION_STATUS);
pr_err_ratelimited("VFE: violation = 0x%08x\n", violation);
}
/*
* vfe_isr - VFE module interrupt handler
* @irq: Interrupt line
* @dev: VFE device
*
* Return IRQ_HANDLED on success
*/
static irqreturn_t vfe_isr(int irq, void *dev)
{
struct vfe_device *vfe = dev;
u32 value0, value1;
int i, j;
vfe->res->hw_ops->isr_read(vfe, &value0, &value1);
dev_dbg(vfe->camss->dev, "VFE: status0 = 0x%08x, status1 = 0x%08x\n",
value0, value1);
if (value0 & VFE_0_IRQ_STATUS_0_RESET_ACK)
vfe->isr_ops.reset_ack(vfe);
if (value1 & VFE_0_IRQ_STATUS_1_VIOLATION)
vfe->res->hw_ops->violation_read(vfe);
if (value1 & VFE_0_IRQ_STATUS_1_BUS_BDG_HALT_ACK)
vfe->isr_ops.halt_ack(vfe);
for (i = VFE_LINE_RDI0; i <= VFE_LINE_PIX; i++)
if (value0 & VFE_0_IRQ_STATUS_0_line_n_REG_UPDATE(i))
vfe->isr_ops.reg_update(vfe, i);
if (value0 & VFE_0_IRQ_STATUS_0_CAMIF_SOF)
vfe->isr_ops.sof(vfe, VFE_LINE_PIX);
for (i = VFE_LINE_RDI0; i <= VFE_LINE_RDI2; i++)
if (value1 & VFE_0_IRQ_STATUS_1_RDIn_SOF(i))
vfe->isr_ops.sof(vfe, i);
for (i = 0; i < MSM_VFE_COMPOSITE_IRQ_NUM; i++)
if (value0 & VFE_0_IRQ_STATUS_0_IMAGE_COMPOSITE_DONE_n(i)) {
vfe->isr_ops.comp_done(vfe, i);
for (j = 0; j < ARRAY_SIZE(vfe->wm_output_map); j++)
if (vfe->wm_output_map[j] == VFE_LINE_PIX)
value0 &= ~VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(j);
}
for (i = 0; i < MSM_VFE_IMAGE_MASTERS_NUM; i++)
if (value0 & VFE_0_IRQ_STATUS_0_IMAGE_MASTER_n_PING_PONG(i))
vfe->isr_ops.wm_done(vfe, i);
return IRQ_HANDLED;
}
/*
* vfe_pm_domain_off - Disable power domains specific to this VFE.
* @vfe: VFE Device
*/
static void vfe_4_1_pm_domain_off(struct vfe_device *vfe)
{
/* nop */
}
/*
* vfe_pm_domain_on - Enable power domains specific to this VFE.
* @vfe: VFE Device
*/
static int vfe_4_1_pm_domain_on(struct vfe_device *vfe)
{
return 0;
}
static const struct vfe_hw_ops_gen1 vfe_ops_gen1_4_1 = {
.bus_connect_wm_to_rdi = vfe_bus_connect_wm_to_rdi,
.bus_disconnect_wm_from_rdi = vfe_bus_disconnect_wm_from_rdi,
.bus_enable_wr_if = vfe_bus_enable_wr_if,
.bus_reload_wm = vfe_bus_reload_wm,
.camif_wait_for_stop = vfe_camif_wait_for_stop,
.enable_irq_common = vfe_enable_irq_common,
.enable_irq_pix_line = vfe_enable_irq_pix_line,
.enable_irq_wm_line = vfe_enable_irq_wm_line,
.get_ub_size = vfe_get_ub_size,
.halt_clear = vfe_halt_clear,
.halt_request = vfe_halt_request,
.set_camif_cfg = vfe_set_camif_cfg,
.set_camif_cmd = vfe_set_camif_cmd,
.set_cgc_override = vfe_set_cgc_override,
.set_clamp_cfg = vfe_set_clamp_cfg,
.set_crop_cfg = vfe_set_crop_cfg,
.set_demux_cfg = vfe_set_demux_cfg,
.set_ds = vfe_set_ds,
.set_module_cfg = vfe_set_module_cfg,
.set_qos = vfe_set_qos,
.set_rdi_cid = vfe_set_rdi_cid,
.set_realign_cfg = vfe_set_realign_cfg,
.set_scale_cfg = vfe_set_scale_cfg,
.set_xbar_cfg = vfe_set_xbar_cfg,
.wm_enable = vfe_wm_enable,
.wm_frame_based = vfe_wm_frame_based,
.wm_get_ping_pong_status = vfe_wm_get_ping_pong_status,
.wm_line_based = vfe_wm_line_based,
.wm_set_framedrop_pattern = vfe_wm_set_framedrop_pattern,
.wm_set_framedrop_period = vfe_wm_set_framedrop_period,
.wm_set_ping_addr = vfe_wm_set_ping_addr,
.wm_set_pong_addr = vfe_wm_set_pong_addr,
.wm_set_subsample = vfe_wm_set_subsample,
.wm_set_ub_cfg = vfe_wm_set_ub_cfg,
};
static void vfe_subdev_init(struct device *dev, struct vfe_device *vfe)
{
vfe->isr_ops = vfe_isr_ops_gen1;
vfe->ops_gen1 = &vfe_ops_gen1_4_1;
vfe->video_ops = vfe_video_ops_gen1;
}
const struct vfe_hw_ops vfe_ops_4_1 = {
.global_reset = vfe_global_reset,
.hw_version = vfe_hw_version,
.isr_read = vfe_isr_read,
.isr = vfe_isr,
.pm_domain_off = vfe_4_1_pm_domain_off,
.pm_domain_on = vfe_4_1_pm_domain_on,
.reg_update_clear = vfe_reg_update_clear,
.reg_update = vfe_reg_update,
.subdev_init = vfe_subdev_init,
.vfe_disable = vfe_gen1_disable,
.vfe_enable = vfe_gen1_enable,
.vfe_halt = vfe_gen1_halt,
.violation_read = vfe_violation_read,
};
|