blob: c4db459d304a0825fdd5ab4ad0d6e4479fba2ffc (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
|
# SPDX-License-Identifier: GPL-2.0
#
# Makefile for ppc-specific library files..
#
ccflags-$(CONFIG_PPC64) := $(NO_MINIMAL_TOC)
CFLAGS_code-patching.o += -fno-stack-protector
CFLAGS_feature-fixups.o += -fno-stack-protector
CFLAGS_REMOVE_code-patching.o = $(CC_FLAGS_FTRACE)
CFLAGS_REMOVE_feature-fixups.o = $(CC_FLAGS_FTRACE)
KASAN_SANITIZE_code-patching.o := n
KASAN_SANITIZE_feature-fixups.o := n
# restart_table.o contains functions called in the NMI interrupt path
# which can be in real mode. Disable KASAN.
KASAN_SANITIZE_restart_table.o := n
KCSAN_SANITIZE_code-patching.o := n
KCSAN_SANITIZE_feature-fixups.o := n
ifdef CONFIG_KASAN
CFLAGS_code-patching.o += -DDISABLE_BRANCH_PROFILING
CFLAGS_feature-fixups.o += -DDISABLE_BRANCH_PROFILING
endif
CFLAGS_code-patching.o += $(DISABLE_LATENT_ENTROPY_PLUGIN)
CFLAGS_feature-fixups.o += $(DISABLE_LATENT_ENTROPY_PLUGIN)
obj-y += alloc.o code-patching.o feature-fixups.o pmem.o
obj-$(CONFIG_CODE_PATCHING_SELFTEST) += test-code-patching.o
ifndef CONFIG_KASAN
obj-y += string.o memcmp_$(BITS).o
obj-$(CONFIG_PPC32) += strlen_32.o
endif
obj-$(CONFIG_PPC32) += div64.o copy_32.o crtsavres.o
obj-$(CONFIG_FUNCTION_ERROR_INJECTION) += error-inject.o
# See corresponding test in arch/powerpc/Makefile
# 64-bit linker creates .sfpr on demand for final link (vmlinux),
# so it is only needed for modules, and only for older linkers which
# do not support --save-restore-funcs
ifeq ($(call ld-ifversion, -lt, 22500, y),y)
extra-$(CONFIG_PPC64) += crtsavres.o
endif
obj-$(CONFIG_PPC_BOOK3S_64) += copyuser_power7.o copypage_power7.o \
memcpy_power7.o restart_table.o
obj64-y += copypage_64.o copyuser_64.o mem_64.o hweight_64.o \
memcpy_64.o copy_mc_64.o
ifdef CONFIG_PPC_QUEUED_SPINLOCKS
obj-$(CONFIG_SMP) += qspinlock.o
else
obj64-$(CONFIG_SMP) += locks.o
endif
obj64-$(CONFIG_ALTIVEC) += vmx-helper.o
obj64-$(CONFIG_KPROBES_SANITY_TEST) += test_emulate_step.o \
test_emulate_step_exec_instr.o
obj-y += checksum_$(BITS).o checksum_wrappers.o \
string_$(BITS).o
obj-y += sstep.o
obj-$(CONFIG_PPC_FPU) += ldstfp.o
obj64-y += quad.o
obj-$(CONFIG_PPC_LIB_RHEAP) += rheap.o
obj-$(CONFIG_FTR_FIXUP_SELFTEST) += feature-fixups-test.o
obj-$(CONFIG_ALTIVEC) += xor_vmx.o xor_vmx_glue.o
CFLAGS_xor_vmx.o += -maltivec $(call cc-option,-mabi=altivec)
# Enable <altivec.h>
CFLAGS_xor_vmx.o += -isystem $(shell $(CC) -print-file-name=include)
obj-$(CONFIG_PPC64) += $(obj64-y)
|