drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1050
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1051
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1088
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1089
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1097
gmu_write(&a6xx_gpu->gmu, REG_A8XX_GMU_GMU_PWR_COL_KEEPALIVE, 0);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1099
gmu_write(&a6xx_gpu->gmu, REG_A6XX_GMU_GMU_PWR_COL_KEEPALIVE, 0);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1160
int a6xx_gmu_resume(struct a6xx_gpu *a6xx_gpu)
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1162
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1164
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1207
status = a6xx_llc_read(a6xx_gpu, REG_A7XX_CX_MISC_TCM_RET_CNTL) == 1 ?
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
125
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
126
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1274
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1275
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1285
if (a6xx_gmu_set_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET))
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1288
a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1300
adreno_gpu->funcs->bus_halt(adreno_gpu, a6xx_gpu->hung);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1341
int a6xx_gmu_stop(struct a6xx_gpu *a6xx_gpu)
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1343
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1344
struct msm_gpu *gpu = &a6xx_gpu->base.base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1394
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1395
struct drm_device *dev = a6xx_gpu->base.base.dev;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1700
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1701
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1799
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1800
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1842
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1844
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1948
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1949
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1974
void a6xx_gmu_remove(struct a6xx_gpu *a6xx_gpu)
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1976
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
1977
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
2054
int a6xx_gmu_wrapper_init(struct a6xx_gpu *a6xx_gpu, struct device_node *node)
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
2057
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
2059
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
2140
int a6xx_gmu_init(struct a6xx_gpu *a6xx_gpu, struct device_node *node)
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
2143
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
2145
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
219
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
220
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
227
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
228
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
23
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
24
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
258
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
259
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
360
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
361
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
400
if (completion_done(&a6xx_gpu->base.fault_coredump_done))
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
407
wait_for_completion(&a6xx_gpu->base.fault_coredump_done);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
425
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
426
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
513
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
514
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
531
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
532
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
604
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
605
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
635
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
636
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
768
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
769
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
830
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
831
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
895
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
896
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
915
a6xx_llc_write(a6xx_gpu, REG_A7XX_CX_MISC_TCM_RET_CNTL, 1);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
96
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_gmu.c
97
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1001
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1039
OUT_RING(ring, lower_32_bits(a6xx_gpu->pwrup_reglist_iova));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1041
OUT_RING(ring, upper_32_bits(a6xx_gpu->pwrup_reglist_iova));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
105
ret = fenced_write(a6xx_gpu, offset + 1, upper_32_bits(value), mask);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1053
static bool a6xx_ucode_check_version(struct a6xx_gpu *a6xx_gpu,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1056
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1092
a6xx_gpu->has_whereami = true;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1123
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1125
if (!a6xx_gpu->sqe_bo) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1126
a6xx_gpu->sqe_bo = adreno_fw_create_bo(gpu,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1127
adreno_gpu->fw[ADRENO_FW_SQE], &a6xx_gpu->sqe_iova);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1129
if (IS_ERR(a6xx_gpu->sqe_bo)) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
113
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1130
int ret = PTR_ERR(a6xx_gpu->sqe_bo);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1132
a6xx_gpu->sqe_bo = NULL;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1139
msm_gem_object_set_name(a6xx_gpu->sqe_bo, "sqefw");
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1140
if (!a6xx_ucode_check_version(a6xx_gpu, a6xx_gpu->sqe_bo)) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1141
msm_gem_unpin_iova(a6xx_gpu->sqe_bo, gpu->vm);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1142
drm_gem_object_put(a6xx_gpu->sqe_bo);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1144
a6xx_gpu->sqe_bo = NULL;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1149
if (!a6xx_gpu->aqe_bo && adreno_gpu->fw[ADRENO_FW_AQE]) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1150
a6xx_gpu->aqe_bo = adreno_fw_create_bo(gpu,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1151
adreno_gpu->fw[ADRENO_FW_AQE], &a6xx_gpu->aqe_iova);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1153
if (IS_ERR(a6xx_gpu->aqe_bo)) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1154
int ret = PTR_ERR(a6xx_gpu->aqe_bo);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1156
a6xx_gpu->aqe_bo = NULL;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
116
if (!adreno_has_gmu_wrapper(adreno_gpu) && !a6xx_gmu_isidle(&a6xx_gpu->gmu))
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1163
msm_gem_object_set_name(a6xx_gpu->aqe_bo, "aqefw");
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1170
if ((adreno_gpu->base.hw_apriv || a6xx_gpu->has_whereami) &&
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1171
!a6xx_gpu->shadow_bo) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1172
a6xx_gpu->shadow = msm_gem_kernel_new(gpu->dev,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1175
gpu->vm, &a6xx_gpu->shadow_bo,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1176
&a6xx_gpu->shadow_iova);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1178
if (IS_ERR(a6xx_gpu->shadow))
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1179
return PTR_ERR(a6xx_gpu->shadow);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1181
msm_gem_object_set_name(a6xx_gpu->shadow_bo, "shadow");
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1184
a6xx_gpu->pwrup_reglist_ptr = msm_gem_kernel_new(gpu->dev, PAGE_SIZE,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1186
gpu->vm, &a6xx_gpu->pwrup_reglist_bo,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1187
&a6xx_gpu->pwrup_reglist_iova);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1189
if (IS_ERR(a6xx_gpu->pwrup_reglist_ptr))
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1190
return PTR_ERR(a6xx_gpu->pwrup_reglist_ptr);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1192
msm_gem_object_set_name(a6xx_gpu->pwrup_reglist_bo, "pwrup_reglist");
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1250
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1251
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1258
ret = a6xx_gmu_set_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1404
gmu_write(&a6xx_gpu->gmu, REG_A6XX_GMU_CX_GMU_POWER_COUNTER_SELECT_1,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
150
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1509
gpu_write64(gpu, REG_A6XX_CP_SQE_INSTR_BASE, a6xx_gpu->sqe_iova);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1525
if (a6xx_gpu->shadow_bo) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1527
shadowptr(a6xx_gpu, gpu->rb[0]));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1529
a6xx_gpu->shadow[i] = 0;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
153
if (a6xx_gpu->has_whereami && !adreno_gpu->base.hw_apriv) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1541
a6xx_gpu->cur_ring = gpu->rb[0];
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1549
if (adreno_is_a7xx(adreno_gpu) && !a6xx_gpu->pwrup_reglist_emitted) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
155
OUT_RING(ring, lower_32_bits(shadowptr(a6xx_gpu, ring)));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1551
a6xx_gpu->pwrup_reglist_emitted = true;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
156
OUT_RING(ring, upper_32_bits(shadowptr(a6xx_gpu, ring)));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1599
a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1601
if (a6xx_gpu->gmu.legacy) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1603
a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_BOOT_SLUMBER);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1612
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1615
mutex_lock(&a6xx_gpu->gmu.lock);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1617
mutex_unlock(&a6xx_gpu->gmu.lock);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
163
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1632
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1633
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1638
if (a6xx_gmu_gx_is_on(&a6xx_gpu->gmu)) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1652
a6xx_gpu->hung = true;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1702
a6xx_gpu->hung = false;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
178
if (!a6xx_in_preempt(a6xx_gpu)) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
179
if (a6xx_gpu->cur_ring == ring)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
180
a6xx_fenced_write(a6xx_gpu, REG_A6XX_CP_RB_WPTR, wptr, BIT(0), false);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
19
static u64 read_gmu_ao_counter(struct a6xx_gpu *a6xx_gpu)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1938
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1943
gmu_write(&a6xx_gpu->gmu, REG_A6XX_GMU_GMU_PWR_COL_KEEPALIVE, on);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1949
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1950
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
201
static void a6xx_set_pagetable(struct a6xx_gpu *a6xx_gpu,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2020
static void a6xx_llc_deactivate(struct a6xx_gpu *a6xx_gpu)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2022
llcc_slice_deactivate(a6xx_gpu->llc_slice);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2023
llcc_slice_deactivate(a6xx_gpu->htw_llc_slice);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2026
static void a6xx_llc_activate(struct a6xx_gpu *a6xx_gpu)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2028
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2032
if (IS_ERR(a6xx_gpu->llc_mmio))
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2035
if (!llcc_slice_activate(a6xx_gpu->llc_slice)) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2036
u32 gpu_scid = llcc_get_slice_id(a6xx_gpu->llc_slice);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
204
bool sysprof = refcount_read(&a6xx_gpu->base.base.sysprof_active) > 1;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2054
if (!llcc_slice_activate(a6xx_gpu->htw_llc_slice)) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2055
if (!a6xx_gpu->have_mmu500) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2056
u32 gpuhtw_scid = llcc_get_slice_id(a6xx_gpu->htw_llc_slice);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
207
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2070
if (!a6xx_gpu->have_mmu500) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2071
a6xx_llc_write(a6xx_gpu,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2078
a6xx_llc_rmw(a6xx_gpu,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2086
static void a7xx_llc_activate(struct a6xx_gpu *a6xx_gpu)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2088
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2091
if (IS_ERR(a6xx_gpu->llc_mmio))
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2094
if (!llcc_slice_activate(a6xx_gpu->llc_slice)) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2095
u32 gpu_scid = llcc_get_slice_id(a6xx_gpu->llc_slice);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2112
llcc_slice_activate(a6xx_gpu->htw_llc_slice);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2115
static void a6xx_llc_slices_destroy(struct a6xx_gpu *a6xx_gpu)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2118
if (adreno_has_gmu_wrapper(&a6xx_gpu->base))
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2121
llcc_slice_putd(a6xx_gpu->llc_slice);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2122
llcc_slice_putd(a6xx_gpu->htw_llc_slice);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2126
struct a6xx_gpu *a6xx_gpu, bool is_a7xx)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2131
if (adreno_has_gmu_wrapper(&a6xx_gpu->base))
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2139
a6xx_gpu->have_mmu500 = (phandle &&
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2143
if (is_a7xx || !a6xx_gpu->have_mmu500)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2144
a6xx_gpu->llc_mmio = msm_ioremap(pdev, "cx_mem");
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2146
a6xx_gpu->llc_mmio = NULL;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2148
a6xx_gpu->llc_slice = llcc_slice_getd(LLCC_GPU);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2149
a6xx_gpu->htw_llc_slice = llcc_slice_getd(LLCC_GPUHTW);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2151
if (IS_ERR_OR_NULL(a6xx_gpu->llc_slice) && IS_ERR_OR_NULL(a6xx_gpu->htw_llc_slice))
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2152
a6xx_gpu->llc_mmio = ERR_PTR(-EINVAL);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2155
static int a7xx_cx_mem_init(struct a6xx_gpu *a6xx_gpu)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2157
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2174
a6xx_llc_write(a6xx_gpu, REG_A7XX_CX_MISC_SW_FUSE_VALUE,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2192
fuse_val = a6xx_llc_read(a6xx_gpu,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2267
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2274
mutex_lock(&a6xx_gpu->gmu.lock);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2275
ret = a6xx_gmu_resume(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2276
mutex_unlock(&a6xx_gpu->gmu.lock);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2283
a8xx_llc_activate(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2285
a7xx_llc_activate(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2287
a6xx_llc_activate(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2295
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2296
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2305
mutex_lock(&a6xx_gpu->gmu.lock);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2341
mutex_unlock(&a6xx_gpu->gmu.lock);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2345
a6xx_llc_activate(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2354
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2359
a6xx_llc_deactivate(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2363
mutex_lock(&a6xx_gpu->gmu.lock);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2364
ret = a6xx_gmu_stop(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2365
mutex_unlock(&a6xx_gpu->gmu.lock);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2369
if (a6xx_gpu->shadow_bo)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2371
a6xx_gpu->shadow[i] = 0;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2381
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2382
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2387
a6xx_llc_deactivate(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2391
mutex_lock(&a6xx_gpu->gmu.lock);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
24
count_hi = gmu_read(&a6xx_gpu->gmu, REG_A6XX_GMU_ALWAYS_ON_COUNTER_H);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2406
mutex_unlock(&a6xx_gpu->gmu.lock);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2408
if (a6xx_gpu->shadow_bo)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2410
a6xx_gpu->shadow[i] = 0;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2420
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2422
*value = read_gmu_ao_counter(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2436
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2438
return a6xx_gpu->cur_ring;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2444
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2446
if (a6xx_gpu->sqe_bo) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2447
msm_gem_unpin_iova(a6xx_gpu->sqe_bo, gpu->vm);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2448
drm_gem_object_put(a6xx_gpu->sqe_bo);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2451
if (a6xx_gpu->aqe_bo) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2452
msm_gem_unpin_iova(a6xx_gpu->aqe_bo, gpu->vm);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2453
drm_gem_object_put(a6xx_gpu->aqe_bo);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2456
if (a6xx_gpu->shadow_bo) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2457
msm_gem_unpin_iova(a6xx_gpu->shadow_bo, gpu->vm);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2458
drm_gem_object_put(a6xx_gpu->shadow_bo);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2461
a6xx_llc_slices_destroy(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2463
a6xx_gmu_remove(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2467
kfree(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2473
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2479
busy_cycles = gmu_read64(&a6xx_gpu->gmu,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2490
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2492
mutex_lock(&a6xx_gpu->gmu.lock);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2494
mutex_unlock(&a6xx_gpu->gmu.lock);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
25
count_lo = gmu_read(&a6xx_gpu->gmu, REG_A6XX_GMU_ALWAYS_ON_COUNTER_L);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2501
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2508
if (!IS_ERR_OR_NULL(a6xx_gpu->htw_llc_slice) &&
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2532
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2534
if (adreno_gpu->base.hw_apriv || a6xx_gpu->has_whereami)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2535
return a6xx_gpu->shadow[ring->id];
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
26
temp = gmu_read(&a6xx_gpu->gmu, REG_A6XX_GMU_ALWAYS_ON_COUNTER_H);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2644
struct a6xx_gpu *a6xx_gpu;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2651
a6xx_gpu = kzalloc_obj(*a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2652
if (!a6xx_gpu)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2655
adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2658
mutex_init(&a6xx_gpu->gmu.lock);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2675
a6xx_llc_slices_init(pdev, a6xx_gpu, is_a7xx);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2679
a6xx_llc_slices_destroy(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2680
kfree(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2690
a6xx_destroy(&(a6xx_gpu->base.base));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2702
ret = a6xx_gmu_wrapper_init(a6xx_gpu, node);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2704
ret = a6xx_gmu_init(a6xx_gpu, node);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2707
a6xx_destroy(&(a6xx_gpu->base.base));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2712
ret = a7xx_cx_mem_init(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2714
a6xx_destroy(&(a6xx_gpu->base.base));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2726
a6xx_destroy(&(a6xx_gpu->base.base));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
291
if (adreno_is_a7xx(&a6xx_gpu->base) || adreno_is_a8xx(&a6xx_gpu->base)) {
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
332
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
338
a6xx_set_pagetable(a6xx_gpu, ring, submit);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
407
trace_msm_gpu_submit_flush(submit, read_gmu_ao_counter(a6xx_gpu));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
413
struct a6xx_gpu *a6xx_gpu, struct msm_gpu_submitqueue *queue)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
427
a6xx_gpu->preempt_iova[ring->id]));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
429
a6xx_gpu->preempt_iova[ring->id]));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
442
preempt_postamble = a6xx_gpu->preempt_postamble_iova;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
448
a6xx_gpu->preempt_postamble_len) |
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
456
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
470
a6xx_set_pagetable(a6xx_gpu, ring, submit);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
477
a6xx_emit_set_pseudo_reg(ring, a6xx_gpu, submit->queue);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
50
static int fenced_write(struct a6xx_gpu *a6xx_gpu, u32 offset, u32 value, u32 mask)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
52
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
54
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
583
a6xx_gpu->last_seqno[ring->id] = submit->seqno;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
617
trace_msm_gpu_submit_flush(submit, read_gmu_ao_counter(a6xx_gpu));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
628
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
629
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
661
gmu_write(&a6xx_gpu->gmu, REG_A6XX_GPU_GMU_AO_GMU_CGC_MODE_CNTL,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
663
gmu_write(&a6xx_gpu->gmu, REG_A6XX_GPU_GMU_AO_GMU_CGC_DELAY_CNTL,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
665
gmu_write(&a6xx_gpu->gmu, REG_A6XX_GPU_GMU_AO_GMU_CGC_HYST_CNTL,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
873
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
876
void *ptr = a6xx_gpu->pwrup_reglist_ptr;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
94
int a6xx_fenced_write(struct a6xx_gpu *a6xx_gpu, u32 offset, u64 value, u32 mask, bool is_64b)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
944
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
954
a6xx_emit_set_pseudo_reg(ring, a6xx_gpu, NULL);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
98
ret = fenced_write(a6xx_gpu, offset, lower_32_bits(value), mask);
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
117
#define to_a6xx_gpu(x) container_of(x, struct a6xx_gpu, base)
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
243
static inline void a6xx_llc_rmw(struct a6xx_gpu *a6xx_gpu, u32 reg, u32 mask, u32 or)
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
245
return msm_rmw(a6xx_gpu->llc_mmio + (reg << 2), mask, or);
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
248
static inline u32 a6xx_llc_read(struct a6xx_gpu *a6xx_gpu, u32 reg)
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
250
return readl(a6xx_gpu->llc_mmio + (reg << 2));
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
253
static inline void a6xx_llc_write(struct a6xx_gpu *a6xx_gpu, u32 reg, u32 value)
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
255
writel(value, a6xx_gpu->llc_mmio + (reg << 2));
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
261
int a6xx_gmu_resume(struct a6xx_gpu *gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
262
int a6xx_gmu_stop(struct a6xx_gpu *gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
271
int a6xx_gmu_init(struct a6xx_gpu *a6xx_gpu, struct device_node *node);
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
272
int a6xx_gmu_wrapper_init(struct a6xx_gpu *a6xx_gpu, struct device_node *node);
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
273
void a6xx_gmu_remove(struct a6xx_gpu *a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
287
static inline bool a6xx_in_preempt(struct a6xx_gpu *a6xx_gpu)
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
295
int preempt_state = atomic_read(&a6xx_gpu->preempt_state);
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
316
int a6xx_fenced_write(struct a6xx_gpu *gpu, u32 offset, u64 value, u32 mask, bool is_64b);
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
329
void a8xx_llc_activate(struct a6xx_gpu *a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c
1197
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c
1198
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c
1231
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c
1254
if (!a6xx_gmu_gx_is_on(&a6xx_gpu->gmu))
drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c
1258
gmu_write(&a6xx_gpu->gmu, REG_A6XX_GMU_AO_AHB_FENCE_CTRL, 0);
drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c
1291
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c
1292
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c
148
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c
155
if (!a6xx_gmu_sptprac_is_on(&a6xx_gpu->gmu))
drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c
1586
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c
1602
a6xx_state->gmu_log = a6xx_snapshot_gmu_bo(a6xx_state, &a6xx_gpu->gmu.log);
drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c
1603
a6xx_state->gmu_hfi = a6xx_snapshot_gmu_bo(a6xx_state, &a6xx_gpu->gmu.hfi);
drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c
1604
a6xx_state->gmu_debug = a6xx_snapshot_gmu_bo(a6xx_state, &a6xx_gpu->gmu.debug);
drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c
1610
if (!a6xx_gmu_gx_is_on(&a6xx_gpu->gmu))
drivers/gpu/drm/msm/adreno/a6xx_hfi.c
109
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_hfi.c
119
if (completion_done(&a6xx_gpu->base.fault_coredump_done))
drivers/gpu/drm/msm/adreno/a6xx_hfi.c
126
wait_for_completion(&a6xx_gpu->base.fault_coredump_done);
drivers/gpu/drm/msm/adreno/a6xx_hfi.c
323
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_hfi.c
324
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_hfi.c
794
struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu);
drivers/gpu/drm/msm/adreno/a6xx_hfi.c
795
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
102
static void preempt_prepare_postamble(struct a6xx_gpu *a6xx_gpu)
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
104
u32 *postamble = a6xx_gpu->preempt_postamble_ptr;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
121
a6xx_gpu->preempt_postamble_len = count;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
123
a6xx_gpu->postamble_enabled = true;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
126
static void preempt_disable_postamble(struct a6xx_gpu *a6xx_gpu)
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
128
u32 *postamble = a6xx_gpu->preempt_postamble_ptr;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
134
*postamble = PKT7(CP_NOP, (a6xx_gpu->preempt_postamble_len - 1));
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
136
a6xx_gpu->postamble_enabled = false;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
146
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
151
gmu_write(&a6xx_gpu->gmu, REG_A6XX_GMU_PWR_COL_PREEMPT_KEEPALIVE, on);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
158
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
16
static inline bool try_preempt_state(struct a6xx_gpu *a6xx_gpu,
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
161
if (!try_preempt_state(a6xx_gpu, PREEMPT_TRIGGERED, PREEMPT_PENDING))
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
165
timer_delete(&a6xx_gpu->preempt_timer);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
178
set_preempt_state(a6xx_gpu, PREEMPT_FAULTED);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
185
a6xx_gpu->cur_ring = a6xx_gpu->next_ring;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
186
a6xx_gpu->next_ring = NULL;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
188
set_preempt_state(a6xx_gpu, PREEMPT_FINISH);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
19
enum a6xx_preempt_state cur = atomic_cmpxchg(&a6xx_gpu->preempt_state,
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
190
update_wptr(a6xx_gpu, a6xx_gpu->cur_ring);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
192
set_preempt_state(a6xx_gpu, PREEMPT_NONE);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
196
trace_msm_gpu_preemption_irq(a6xx_gpu->cur_ring->id);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
208
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
216
struct a6xx_preempt_record *record_ptr = a6xx_gpu->preempt[i];
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
220
record_ptr->rptr_addr = shadowptr(a6xx_gpu, gpu->rb[i]);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
233
set_preempt_state(a6xx_gpu, PREEMPT_NONE);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
235
spin_lock_init(&a6xx_gpu->eval_lock);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
238
a6xx_gpu->cur_ring = gpu->rb[0];
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
244
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
258
spin_lock_irqsave(&a6xx_gpu->eval_lock, flags);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
264
if (!try_preempt_state(a6xx_gpu, PREEMPT_NONE, PREEMPT_START)) {
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
265
spin_unlock_irqrestore(&a6xx_gpu->eval_lock, flags);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
269
cntl = A6XX_CP_CONTEXT_SWITCH_CNTL_LEVEL(a6xx_gpu->preempt_level);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
271
if (a6xx_gpu->skip_save_restore)
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
274
if (a6xx_gpu->uses_gmem)
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
286
if (!ring || (a6xx_gpu->cur_ring == ring)) {
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
287
set_preempt_state(a6xx_gpu, PREEMPT_FINISH);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
288
update_wptr(a6xx_gpu, a6xx_gpu->cur_ring);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
289
set_preempt_state(a6xx_gpu, PREEMPT_NONE);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
29
static inline void set_preempt_state(struct a6xx_gpu *gpu,
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
290
spin_unlock_irqrestore(&a6xx_gpu->eval_lock, flags);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
294
spin_unlock_irqrestore(&a6xx_gpu->eval_lock, flags);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
299
a6xx_gpu->preempt_smmu[ring->id];
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
300
struct a6xx_preempt_record *record_ptr = a6xx_gpu->preempt[ring->id];
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
318
trace_msm_gpu_preemption_trigger(a6xx_gpu->cur_ring->id, ring->id);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
325
a6xx_fenced_write(a6xx_gpu,
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
326
REG_A6XX_CP_CONTEXT_SWITCH_SMMU_INFO, a6xx_gpu->preempt_smmu_iova[ring->id],
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
329
a6xx_fenced_write(a6xx_gpu,
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
331
a6xx_gpu->preempt_iova[ring->id], BIT(1), true);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
333
a6xx_gpu->next_ring = ring;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
336
mod_timer(&a6xx_gpu->preempt_timer, jiffies + msecs_to_jiffies(10000));
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
339
sysprof = refcount_read(&a6xx_gpu->base.base.sysprof_active) > 1;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
341
if (!sysprof && !a6xx_gpu->postamble_enabled)
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
342
preempt_prepare_postamble(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
344
if (sysprof && a6xx_gpu->postamble_enabled)
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
345
preempt_disable_postamble(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
348
set_preempt_state(a6xx_gpu, PREEMPT_TRIGGERED);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
351
a6xx_fenced_write(a6xx_gpu, REG_A6XX_CP_CONTEXT_SWITCH_CNTL, cntl, BIT(1), false);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
354
static int preempt_init_ring(struct a6xx_gpu *a6xx_gpu,
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
357
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
376
a6xx_gpu->preempt_bo[ring->id] = bo;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
377
a6xx_gpu->preempt_iova[ring->id] = iova;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
378
a6xx_gpu->preempt[ring->id] = ptr;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
394
a6xx_gpu->preempt_smmu_bo[ring->id] = bo;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
395
a6xx_gpu->preempt_smmu_iova[ring->id] = iova;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
396
a6xx_gpu->preempt_smmu[ring->id] = ptr;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
424
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
428
msm_gem_kernel_put(a6xx_gpu->preempt_bo[i], gpu->vm);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
434
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
44
static inline void update_wptr(struct a6xx_gpu *a6xx_gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
442
if (preempt_init_ring(a6xx_gpu, gpu->rb[i]))
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
447
a6xx_gpu->preempt_level = 1;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
448
a6xx_gpu->uses_gmem = 1;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
449
a6xx_gpu->skip_save_restore = 1;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
451
a6xx_gpu->preempt_postamble_ptr = msm_gem_kernel_new(gpu->dev,
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
454
gpu->vm, &a6xx_gpu->preempt_postamble_bo,
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
455
&a6xx_gpu->preempt_postamble_iova);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
457
if (IS_ERR(a6xx_gpu->preempt_postamble_ptr))
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
460
preempt_prepare_postamble(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
462
timer_setup(&a6xx_gpu->preempt_timer, a6xx_preempt_timer, 0);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
54
a6xx_fenced_write(a6xx_gpu, REG_A6XX_CP_RB_WPTR, wptr, BIT(0), false);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
66
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
77
if (!empty && ring == a6xx_gpu->cur_ring)
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
78
empty = ring->memptrs->fence == a6xx_gpu->last_seqno[i];
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
90
struct a6xx_gpu *a6xx_gpu = timer_container_of(a6xx_gpu, t,
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
92
struct msm_gpu *gpu = &a6xx_gpu->base.base;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
95
if (!try_preempt_state(a6xx_gpu, PREEMPT_TRIGGERED, PREEMPT_FAULTED))
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
1003
gmu_write(&a6xx_gpu->gmu, REG_A6XX_GMU_GMU_PWR_COL_KEEPALIVE, 1);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
102
a6xx_gpu->slice_mask = slice_mask;
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
106
slice_mask &= a6xx_llc_read(a6xx_gpu,
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
109
a6xx_gpu->slice_mask = slice_mask;
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
1120
void a8xx_llc_activate(struct a6xx_gpu *a6xx_gpu)
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
1122
struct adreno_gpu *adreno_gpu = &a6xx_gpu->base;
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
1125
if (!llcc_slice_activate(a6xx_gpu->llc_slice)) {
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
1126
u32 gpu_scid = llcc_get_slice_id(a6xx_gpu->llc_slice);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
1144
llcc_slice_activate(a6xx_gpu->htw_llc_slice);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
115
static u32 a8xx_get_first_slice(struct a6xx_gpu *a6xx_gpu)
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
117
return ffs(a6xx_gpu->slice_mask) - 1;
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
1180
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
1182
mutex_lock(&a6xx_gpu->gmu.lock);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
1185
a6xx_gmu_set_oob(&a6xx_gpu->gmu, GMU_OOB_PERFCOUNTER_SET);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
1189
a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_PERFCOUNTER_SET);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
1191
mutex_unlock(&a6xx_gpu->gmu.lock);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
1199
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
1205
busy_cycles = gmu_read64(&a6xx_gpu->gmu,
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
123
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
126
if (!a6xx_gmu_isidle(&a6xx_gpu->gmu))
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
161
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
174
if (!a6xx_in_preempt(a6xx_gpu)) {
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
175
if (a6xx_gpu->cur_ring == ring)
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
189
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
190
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
22
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
27
if (a6xx_gpu->cached_aperture == val)
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
32
a6xx_gpu->cached_aperture = val;
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
38
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
40
spin_lock_irqsave(&a6xx_gpu->aperture_lock, *flags);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
48
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
50
spin_unlock_irqrestore(&a6xx_gpu->aperture_lock, flags);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
507
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
508
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
514
ret = a6xx_gmu_set_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
519
a6xx_gpu->cached_aperture = UINT_MAX;
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
559
gmu_write(&a6xx_gpu->gmu, REG_A8XX_GMU_CX_GMU_POWER_COUNTER_SELECT_XOCLK_1,
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
639
gpu_write64(gpu, REG_A8XX_CP_SQE_INSTR_BASE, a6xx_gpu->sqe_iova);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
640
if (a6xx_gpu->aqe_iova)
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
641
gpu_write64(gpu, REG_A8XX_CP_AQE_INSTR_BASE_0, a6xx_gpu->aqe_iova);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
648
gpu_write64(gpu, REG_A6XX_CP_RB_RPTR_ADDR, shadowptr(a6xx_gpu, gpu->rb[0]));
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
652
a6xx_gpu->shadow[i] = 0;
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
655
a6xx_gpu->cur_ring = gpu->rb[0];
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
712
a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
720
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
723
mutex_lock(&a6xx_gpu->gmu.lock);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
725
mutex_unlock(&a6xx_gpu->gmu.lock);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
73
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
739
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
740
struct a6xx_gmu *gmu = &a6xx_gpu->gmu;
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
752
a6xx_gpu->hung = true;
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
77
spin_lock_irqsave(&a6xx_gpu->aperture_lock, flags);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
796
a6xx_gpu->hung = false;
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
80
spin_unlock_irqrestore(&a6xx_gpu->aperture_lock, flags);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
88
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
888
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
889
u32 slice = a8xx_get_first_slice(a6xx_gpu);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
95
if (a6xx_gpu->slice_mask)
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
985
struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);