2019-06-19 20:13:43 -04:00
|
|
|
/* SPDX-License-Identifier: MIT */
|
2014-06-12 22:15:21 +10:00
|
|
|
#ifndef __NVKM_MC_PRIV_H__
|
|
|
|
|
#define __NVKM_MC_PRIV_H__
|
2015-08-20 14:54:21 +10:00
|
|
|
#define nvkm_mc(p) container_of((p), struct nvkm_mc, subdev)
|
2014-06-12 22:15:21 +10:00
|
|
|
#include <subdev/mc.h>
|
|
|
|
|
|
2020-12-04 12:28:41 +10:00
|
|
|
void nvkm_mc_ctor(const struct nvkm_mc_func *, struct nvkm_device *, enum nvkm_subdev_type, int,
|
|
|
|
|
struct nvkm_mc *);
|
|
|
|
|
int nvkm_mc_new_(const struct nvkm_mc_func *, struct nvkm_device *, enum nvkm_subdev_type, int,
|
|
|
|
|
struct nvkm_mc **);
|
2014-06-12 22:15:21 +10:00
|
|
|
|
2016-04-08 17:24:40 +10:00
|
|
|
struct nvkm_mc_map {
|
2014-06-12 22:15:21 +10:00
|
|
|
u32 stat;
|
2021-02-04 17:59:28 +10:00
|
|
|
enum nvkm_subdev_type type;
|
|
|
|
|
int inst;
|
2016-05-30 08:23:41 +10:00
|
|
|
bool noauto;
|
2014-06-12 22:15:21 +10:00
|
|
|
};
|
|
|
|
|
|
2015-08-20 14:54:21 +10:00
|
|
|
struct nvkm_mc_func {
|
|
|
|
|
void (*init)(struct nvkm_mc *);
|
2016-04-08 17:24:40 +10:00
|
|
|
const struct nvkm_mc_map *intr;
|
2015-08-20 14:54:22 +10:00
|
|
|
/* disable reporting of interrupts to host */
|
|
|
|
|
void (*intr_unarm)(struct nvkm_mc *);
|
|
|
|
|
/* enable reporting of interrupts to host */
|
|
|
|
|
void (*intr_rearm)(struct nvkm_mc *);
|
2016-05-30 08:39:27 +10:00
|
|
|
/* (un)mask delivery of specific interrupts */
|
|
|
|
|
void (*intr_mask)(struct nvkm_mc *, u32 mask, u32 stat);
|
2015-08-20 14:54:22 +10:00
|
|
|
/* retrieve pending interrupt mask (NV_PMC_INTR) */
|
2016-05-30 08:27:22 +10:00
|
|
|
u32 (*intr_stat)(struct nvkm_mc *);
|
2016-04-08 17:24:40 +10:00
|
|
|
const struct nvkm_mc_map *reset;
|
2015-01-14 15:08:21 +10:00
|
|
|
void (*unk260)(struct nvkm_mc *, u32);
|
2014-06-12 22:15:21 +10:00
|
|
|
};
|
|
|
|
|
|
2015-08-20 14:54:21 +10:00
|
|
|
void nv04_mc_init(struct nvkm_mc *);
|
2015-08-20 14:54:22 +10:00
|
|
|
void nv04_mc_intr_unarm(struct nvkm_mc *);
|
|
|
|
|
void nv04_mc_intr_rearm(struct nvkm_mc *);
|
2016-05-30 08:27:22 +10:00
|
|
|
u32 nv04_mc_intr_stat(struct nvkm_mc *);
|
2016-04-08 17:24:40 +10:00
|
|
|
extern const struct nvkm_mc_map nv04_mc_reset[];
|
2015-08-20 14:54:21 +10:00
|
|
|
|
2016-04-08 17:24:40 +10:00
|
|
|
extern const struct nvkm_mc_map nv17_mc_intr[];
|
2016-04-08 17:24:40 +10:00
|
|
|
extern const struct nvkm_mc_map nv17_mc_reset[];
|
|
|
|
|
|
2015-08-20 14:54:21 +10:00
|
|
|
void nv44_mc_init(struct nvkm_mc *);
|
|
|
|
|
|
|
|
|
|
void nv50_mc_init(struct nvkm_mc *);
|
2017-03-29 18:31:18 +09:00
|
|
|
void gk104_mc_init(struct nvkm_mc *);
|
2015-08-20 14:54:21 +10:00
|
|
|
|
2015-08-20 14:54:22 +10:00
|
|
|
void gf100_mc_intr_unarm(struct nvkm_mc *);
|
|
|
|
|
void gf100_mc_intr_rearm(struct nvkm_mc *);
|
2016-05-30 08:50:50 +10:00
|
|
|
void gf100_mc_intr_mask(struct nvkm_mc *, u32, u32);
|
2016-05-30 08:27:22 +10:00
|
|
|
u32 gf100_mc_intr_stat(struct nvkm_mc *);
|
2015-01-14 15:08:21 +10:00
|
|
|
void gf100_mc_unk260(struct nvkm_mc *, u32);
|
2017-03-29 18:31:18 +09:00
|
|
|
void gp100_mc_intr_unarm(struct nvkm_mc *);
|
|
|
|
|
void gp100_mc_intr_rearm(struct nvkm_mc *);
|
|
|
|
|
void gp100_mc_intr_mask(struct nvkm_mc *, u32, u32);
|
2020-12-04 12:28:41 +10:00
|
|
|
int gp100_mc_new_(const struct nvkm_mc_func *, struct nvkm_device *, enum nvkm_subdev_type, int,
|
2017-03-29 18:31:18 +09:00
|
|
|
struct nvkm_mc **);
|
2016-04-08 17:24:40 +10:00
|
|
|
|
|
|
|
|
extern const struct nvkm_mc_map gk104_mc_intr[];
|
|
|
|
|
extern const struct nvkm_mc_map gk104_mc_reset[];
|
2018-05-08 20:39:46 +10:00
|
|
|
|
|
|
|
|
extern const struct nvkm_mc_map gp100_mc_intr[];
|
2014-06-12 22:15:21 +10:00
|
|
|
#endif
|