blob: 0879f3be8cbc9eed0a95174954fabc4dff01b468 [file] [log] [blame]
Jerome Glisse771fe6b2009-06-05 14:42:42 +02001/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef __RADEON_ASIC_H__
29#define __RADEON_ASIC_H__
30
31/*
32 * common functions
33 */
Rafał Miłecki74338742009-11-03 00:53:02 +010034uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020035void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki5ea597f2009-12-17 13:50:09 +010036uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020037void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
38
Rafał Miłecki74338742009-11-03 00:53:02 +010039uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020040void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki74338742009-11-03 00:53:02 +010041uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020042void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
43void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
44
Alex Deucher37e9b6a2012-08-03 11:39:43 -040045void atombios_set_backlight_level(struct radeon_encoder *radeon_encoder, u8 level);
Alex Deucher6d92f812012-09-14 09:59:26 -040046u8 atombios_get_backlight_level(struct radeon_encoder *radeon_encoder);
Alex Deucher37e9b6a2012-08-03 11:39:43 -040047void radeon_legacy_set_backlight_level(struct radeon_encoder *radeon_encoder, u8 level);
Alex Deucher6d92f812012-09-14 09:59:26 -040048u8 radeon_legacy_get_backlight_level(struct radeon_encoder *radeon_encoder);
Alex Deucher37e9b6a2012-08-03 11:39:43 -040049
Alex Deucherf93bdef2013-01-29 14:10:56 -050050u32 radeon_ring_generic_get_rptr(struct radeon_device *rdev,
51 struct radeon_ring *ring);
52u32 radeon_ring_generic_get_wptr(struct radeon_device *rdev,
53 struct radeon_ring *ring);
54void radeon_ring_generic_set_wptr(struct radeon_device *rdev,
55 struct radeon_ring *ring);
Alex Deucher37e9b6a2012-08-03 11:39:43 -040056
Jerome Glisse771fe6b2009-06-05 14:42:42 +020057/*
Pauli Nieminen44ca7472010-02-11 17:25:47 +000058 * r100,rv100,rs100,rv200,rs200
Jerome Glisse771fe6b2009-06-05 14:42:42 +020059 */
Daniel Vetter2b497502010-03-11 21:19:18 +000060struct r100_mc_save {
61 u32 GENMO_WT;
62 u32 CRTC_EXT_CNTL;
63 u32 CRTC_GEN_CNTL;
64 u32 CRTC2_GEN_CNTL;
65 u32 CUR_OFFSET;
66 u32 CUR2_OFFSET;
67};
68int r100_init(struct radeon_device *rdev);
69void r100_fini(struct radeon_device *rdev);
70int r100_suspend(struct radeon_device *rdev);
71int r100_resume(struct radeon_device *rdev);
Dave Airlie28d52042009-09-21 14:33:58 +100072void r100_vga_set_state(struct radeon_device *rdev, bool state);
Christian Könige32eb502011-10-23 12:56:27 +020073bool r100_gpu_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jerome Glissea2d07b72010-03-09 14:45:11 +000074int r100_asic_reset(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +020075u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020076void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
77int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
Alex Deucherf7128122012-02-23 17:53:45 -050078void r100_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020079int r100_irq_set(struct radeon_device *rdev);
80int r100_irq_process(struct radeon_device *rdev);
81void r100_fence_ring_emit(struct radeon_device *rdev,
82 struct radeon_fence *fence);
Christian König15d33322011-09-15 19:02:22 +020083void r100_semaphore_ring_emit(struct radeon_device *rdev,
Christian Könige32eb502011-10-23 12:56:27 +020084 struct radeon_ring *cp,
Christian König15d33322011-09-15 19:02:22 +020085 struct radeon_semaphore *semaphore,
Christian König7b1f2482011-09-23 15:11:23 +020086 bool emit_wait);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020087int r100_cs_parse(struct radeon_cs_parser *p);
88void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
89uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
90int r100_copy_blit(struct radeon_device *rdev,
91 uint64_t src_offset,
92 uint64_t dst_offset,
Alex Deucher003cefe2011-09-16 12:04:08 -040093 unsigned num_gpu_pages,
Christian König876dc9f2012-05-08 14:24:01 +020094 struct radeon_fence **fence);
Dave Airliee024e112009-06-24 09:48:08 +100095int r100_set_surface_reg(struct radeon_device *rdev, int reg,
96 uint32_t tiling_flags, uint32_t pitch,
97 uint32_t offset, uint32_t obj_size);
Daniel Vetter9479c542010-03-11 21:19:16 +000098void r100_clear_surface_reg(struct radeon_device *rdev, int reg);
Jerome Glissec93bb852009-07-13 21:04:08 +020099void r100_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000100void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Christian Könige32eb502011-10-23 12:56:27 +0200101int r100_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Alex Deucher429770b2009-12-04 15:26:55 -0500102void r100_hpd_init(struct radeon_device *rdev);
103void r100_hpd_fini(struct radeon_device *rdev);
104bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
105void r100_hpd_set_polarity(struct radeon_device *rdev,
106 enum radeon_hpd_id hpd);
Daniel Vetter2b497502010-03-11 21:19:18 +0000107int r100_debugfs_rbbm_init(struct radeon_device *rdev);
108int r100_debugfs_cp_init(struct radeon_device *rdev);
109void r100_cp_disable(struct radeon_device *rdev);
110int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
111void r100_cp_fini(struct radeon_device *rdev);
112int r100_pci_gart_init(struct radeon_device *rdev);
113void r100_pci_gart_fini(struct radeon_device *rdev);
114int r100_pci_gart_enable(struct radeon_device *rdev);
115void r100_pci_gart_disable(struct radeon_device *rdev);
116int r100_debugfs_mc_info_init(struct radeon_device *rdev);
117int r100_gui_wait_for_idle(struct radeon_device *rdev);
Alex Deucherf7128122012-02-23 17:53:45 -0500118int r100_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Daniel Vetter2b497502010-03-11 21:19:18 +0000119void r100_irq_disable(struct radeon_device *rdev);
120void r100_mc_stop(struct radeon_device *rdev, struct r100_mc_save *save);
121void r100_mc_resume(struct radeon_device *rdev, struct r100_mc_save *save);
122void r100_vram_init_sizes(struct radeon_device *rdev);
Daniel Vetter2b497502010-03-11 21:19:18 +0000123int r100_cp_reset(struct radeon_device *rdev);
124void r100_vga_render_disable(struct radeon_device *rdev);
Dave Airlie4c712e62010-07-15 12:13:50 +1000125void r100_restore_sanity(struct radeon_device *rdev);
Daniel Vetter2b497502010-03-11 21:19:18 +0000126int r100_cs_track_check_pkt3_indx_buffer(struct radeon_cs_parser *p,
127 struct radeon_cs_packet *pkt,
128 struct radeon_bo *robj);
129int r100_cs_parse_packet0(struct radeon_cs_parser *p,
130 struct radeon_cs_packet *pkt,
131 const unsigned *auth, unsigned n,
132 radeon_packet0_check_t check);
133int r100_cs_packet_parse(struct radeon_cs_parser *p,
134 struct radeon_cs_packet *pkt,
135 unsigned idx);
136void r100_enable_bm(struct radeon_device *rdev);
137void r100_set_common_regs(struct radeon_device *rdev);
Jerome Glisse90aca4d2010-03-09 14:45:12 +0000138void r100_bm_disable(struct radeon_device *rdev);
Alex Deucherdef9ba92010-04-22 12:39:58 -0400139extern bool r100_gui_idle(struct radeon_device *rdev);
Alex Deucher49e02b72010-04-23 17:57:27 -0400140extern void r100_pm_misc(struct radeon_device *rdev);
141extern void r100_pm_prepare(struct radeon_device *rdev);
142extern void r100_pm_finish(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400143extern void r100_pm_init_profile(struct radeon_device *rdev);
144extern void r100_pm_get_dynpm_state(struct radeon_device *rdev);
Alex Deucher6f34be52010-11-21 10:59:01 -0500145extern void r100_pre_page_flip(struct radeon_device *rdev, int crtc);
146extern u32 r100_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
147extern void r100_post_page_flip(struct radeon_device *rdev, int crtc);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500148extern void r100_wait_for_vblank(struct radeon_device *rdev, int crtc);
Alex Deucher89e51812012-02-23 17:53:38 -0500149extern int r100_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucherbae6b5622010-04-22 13:38:05 -0400150
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000151/*
152 * r200,rv250,rs300,rv280
153 */
154extern int r200_copy_dma(struct radeon_device *rdev,
Daniel Vetter187f3da2010-11-28 19:06:09 +0100155 uint64_t src_offset,
156 uint64_t dst_offset,
Alex Deucher003cefe2011-09-16 12:04:08 -0400157 unsigned num_gpu_pages,
Christian König876dc9f2012-05-08 14:24:01 +0200158 struct radeon_fence **fence);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100159void r200_set_safe_registers(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200160
161/*
162 * r300,r350,rv350,rv380
163 */
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200164extern int r300_init(struct radeon_device *rdev);
165extern void r300_fini(struct radeon_device *rdev);
166extern int r300_suspend(struct radeon_device *rdev);
167extern int r300_resume(struct radeon_device *rdev);
Jerome Glissea2d07b72010-03-09 14:45:11 +0000168extern int r300_asic_reset(struct radeon_device *rdev);
Alex Deucherf7128122012-02-23 17:53:45 -0500169extern void r300_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200170extern void r300_fence_ring_emit(struct radeon_device *rdev,
171 struct radeon_fence *fence);
172extern int r300_cs_parse(struct radeon_cs_parser *p);
173extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
174extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200175extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
Alex Deucherc836a412009-12-23 10:07:50 -0500176extern int rv370_get_pcie_lanes(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100177extern void r300_set_reg_safe(struct radeon_device *rdev);
178extern void r300_mc_program(struct radeon_device *rdev);
179extern void r300_mc_init(struct radeon_device *rdev);
180extern void r300_clock_startup(struct radeon_device *rdev);
181extern int r300_mc_wait_for_idle(struct radeon_device *rdev);
182extern int rv370_pcie_gart_init(struct radeon_device *rdev);
183extern void rv370_pcie_gart_fini(struct radeon_device *rdev);
184extern int rv370_pcie_gart_enable(struct radeon_device *rdev);
185extern void rv370_pcie_gart_disable(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500186extern int r300_mc_wait_for_idle(struct radeon_device *rdev);
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000187
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200188/*
189 * r420,r423,rv410
190 */
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200191extern int r420_init(struct radeon_device *rdev);
192extern void r420_fini(struct radeon_device *rdev);
193extern int r420_suspend(struct radeon_device *rdev);
194extern int r420_resume(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400195extern void r420_pm_init_profile(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100196extern u32 r420_mc_rreg(struct radeon_device *rdev, u32 reg);
197extern void r420_mc_wreg(struct radeon_device *rdev, u32 reg, u32 v);
198extern int r420_debugfs_pipes_info_init(struct radeon_device *rdev);
199extern void r420_pipes_init(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200200
201/*
202 * rs400,rs480
203 */
Jerome Glisseca6ffc62009-10-01 10:20:52 +0200204extern int rs400_init(struct radeon_device *rdev);
205extern void rs400_fini(struct radeon_device *rdev);
206extern int rs400_suspend(struct radeon_device *rdev);
207extern int rs400_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200208void rs400_gart_tlb_flush(struct radeon_device *rdev);
209int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
210uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
211void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100212int rs400_gart_init(struct radeon_device *rdev);
213int rs400_gart_enable(struct radeon_device *rdev);
214void rs400_gart_adjust_size(struct radeon_device *rdev);
215void rs400_gart_disable(struct radeon_device *rdev);
216void rs400_gart_fini(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500217extern int rs400_mc_wait_for_idle(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100218
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200219/*
220 * rs600.
221 */
Jerome Glisse90aca4d2010-03-09 14:45:12 +0000222extern int rs600_asic_reset(struct radeon_device *rdev);
Jerome Glissec010f802009-09-30 22:09:06 +0200223extern int rs600_init(struct radeon_device *rdev);
224extern void rs600_fini(struct radeon_device *rdev);
225extern int rs600_suspend(struct radeon_device *rdev);
226extern int rs600_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200227int rs600_irq_set(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200228int rs600_irq_process(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100229void rs600_irq_disable(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200230u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200231void rs600_gart_tlb_flush(struct radeon_device *rdev);
232int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
233uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
234void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200235void rs600_bandwidth_update(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -0500236void rs600_hpd_init(struct radeon_device *rdev);
237void rs600_hpd_fini(struct radeon_device *rdev);
238bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
239void rs600_hpd_set_polarity(struct radeon_device *rdev,
240 enum radeon_hpd_id hpd);
Alex Deucher49e02b72010-04-23 17:57:27 -0400241extern void rs600_pm_misc(struct radeon_device *rdev);
242extern void rs600_pm_prepare(struct radeon_device *rdev);
243extern void rs600_pm_finish(struct radeon_device *rdev);
Alex Deucher6f34be52010-11-21 10:59:01 -0500244extern void rs600_pre_page_flip(struct radeon_device *rdev, int crtc);
245extern u32 rs600_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
246extern void rs600_post_page_flip(struct radeon_device *rdev, int crtc);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100247void rs600_set_safe_registers(struct radeon_device *rdev);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500248extern void avivo_wait_for_vblank(struct radeon_device *rdev, int crtc);
Alex Deucher89e51812012-02-23 17:53:38 -0500249extern int rs600_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -0500250
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200251/*
252 * rs690,rs740
253 */
Jerome Glisse3bc68532009-10-01 09:39:24 +0200254int rs690_init(struct radeon_device *rdev);
255void rs690_fini(struct radeon_device *rdev);
256int rs690_resume(struct radeon_device *rdev);
257int rs690_suspend(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200258uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
259void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200260void rs690_bandwidth_update(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100261void rs690_line_buffer_adjust(struct radeon_device *rdev,
262 struct drm_display_mode *mode1,
263 struct drm_display_mode *mode2);
Alex Deucher89e51812012-02-23 17:53:38 -0500264extern int rs690_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200265
266/*
267 * rv515
268 */
Daniel Vetter187f3da2010-11-28 19:06:09 +0100269struct rv515_mc_save {
Daniel Vetter187f3da2010-11-28 19:06:09 +0100270 u32 vga_render_control;
271 u32 vga_hdp_control;
Alex Deucher6253e4c2012-12-12 14:30:32 -0500272 bool crtc_enabled[2];
Daniel Vetter187f3da2010-11-28 19:06:09 +0100273};
Jerome Glisse81ee8fb2012-07-27 16:32:24 -0400274
Jerome Glisse068a1172009-06-17 13:28:30 +0200275int rv515_init(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200276void rv515_fini(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200277uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
278void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Alex Deucherf7128122012-02-23 17:53:45 -0500279void rv515_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glissec93bb852009-07-13 21:04:08 +0200280void rv515_bandwidth_update(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200281int rv515_resume(struct radeon_device *rdev);
282int rv515_suspend(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100283void rv515_bandwidth_avivo_update(struct radeon_device *rdev);
284void rv515_vga_render_disable(struct radeon_device *rdev);
285void rv515_set_safe_registers(struct radeon_device *rdev);
286void rv515_mc_stop(struct radeon_device *rdev, struct rv515_mc_save *save);
287void rv515_mc_resume(struct radeon_device *rdev, struct rv515_mc_save *save);
288void rv515_clock_startup(struct radeon_device *rdev);
289void rv515_debugfs(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500290int rv515_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200291
292/*
293 * r520,rv530,rv560,rv570,r580
294 */
Jerome Glissed39c3b82009-09-28 18:34:43 +0200295int r520_init(struct radeon_device *rdev);
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200296int r520_resume(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500297int r520_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200298
299/*
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000300 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200301 */
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000302int r600_init(struct radeon_device *rdev);
303void r600_fini(struct radeon_device *rdev);
304int r600_suspend(struct radeon_device *rdev);
305int r600_resume(struct radeon_device *rdev);
Dave Airlie28d52042009-09-21 14:33:58 +1000306void r600_vga_set_state(struct radeon_device *rdev, bool state);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000307int r600_wb_init(struct radeon_device *rdev);
308void r600_wb_fini(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000309void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200310uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
311void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000312int r600_cs_parse(struct radeon_cs_parser *p);
Alex Deuchercf4ccd02011-11-18 10:19:47 -0500313int r600_dma_cs_parse(struct radeon_cs_parser *p);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000314void r600_fence_ring_emit(struct radeon_device *rdev,
315 struct radeon_fence *fence);
Christian König15d33322011-09-15 19:02:22 +0200316void r600_semaphore_ring_emit(struct radeon_device *rdev,
Christian Könige32eb502011-10-23 12:56:27 +0200317 struct radeon_ring *cp,
Christian König15d33322011-09-15 19:02:22 +0200318 struct radeon_semaphore *semaphore,
Christian König7b1f2482011-09-23 15:11:23 +0200319 bool emit_wait);
Alex Deucher4d756582012-09-27 15:08:35 -0400320void r600_dma_fence_ring_emit(struct radeon_device *rdev,
321 struct radeon_fence *fence);
322void r600_dma_semaphore_ring_emit(struct radeon_device *rdev,
323 struct radeon_ring *ring,
324 struct radeon_semaphore *semaphore,
325 bool emit_wait);
326void r600_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
327bool r600_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucher123bc182013-01-24 11:37:19 -0500328bool r600_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jerome Glissea2d07b72010-03-09 14:45:11 +0000329int r600_asic_reset(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000330int r600_set_surface_reg(struct radeon_device *rdev, int reg,
331 uint32_t tiling_flags, uint32_t pitch,
332 uint32_t offset, uint32_t obj_size);
Daniel Vetter9479c542010-03-11 21:19:16 +0000333void r600_clear_surface_reg(struct radeon_device *rdev, int reg);
Alex Deucherf7128122012-02-23 17:53:45 -0500334int r600_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucher4d756582012-09-27 15:08:35 -0400335int r600_dma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000336void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Christian Könige32eb502011-10-23 12:56:27 +0200337int r600_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Alex Deucher4d756582012-09-27 15:08:35 -0400338int r600_dma_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Christian Königf2ba57b2013-04-08 12:41:29 +0200339int r600_uvd_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000340int r600_copy_blit(struct radeon_device *rdev,
341 uint64_t src_offset, uint64_t dst_offset,
Christian König876dc9f2012-05-08 14:24:01 +0200342 unsigned num_gpu_pages, struct radeon_fence **fence);
Alex Deucher4d756582012-09-27 15:08:35 -0400343int r600_copy_dma(struct radeon_device *rdev,
344 uint64_t src_offset, uint64_t dst_offset,
345 unsigned num_gpu_pages, struct radeon_fence **fence);
Alex Deucher429770b2009-12-04 15:26:55 -0500346void r600_hpd_init(struct radeon_device *rdev);
347void r600_hpd_fini(struct radeon_device *rdev);
348bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
349void r600_hpd_set_polarity(struct radeon_device *rdev,
350 enum radeon_hpd_id hpd);
Jerome Glisse062b3892010-02-04 20:36:39 +0100351extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo);
Alex Deucherdef9ba92010-04-22 12:39:58 -0400352extern bool r600_gui_idle(struct radeon_device *rdev);
Alex Deucher49e02b72010-04-23 17:57:27 -0400353extern void r600_pm_misc(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400354extern void r600_pm_init_profile(struct radeon_device *rdev);
355extern void rs780_pm_init_profile(struct radeon_device *rdev);
Samuel Li65337e62013-04-05 17:50:53 -0400356extern uint32_t rs780_mc_rreg(struct radeon_device *rdev, uint32_t reg);
357extern void rs780_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Alex Deucherce8f5372010-05-07 15:10:16 -0400358extern void r600_pm_get_dynpm_state(struct radeon_device *rdev);
Alex Deucher3313e3d2011-01-06 18:49:34 -0500359extern void r600_set_pcie_lanes(struct radeon_device *rdev, int lanes);
360extern int r600_get_pcie_lanes(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100361bool r600_card_posted(struct radeon_device *rdev);
362void r600_cp_stop(struct radeon_device *rdev);
363int r600_cp_start(struct radeon_device *rdev);
Christian Könige32eb502011-10-23 12:56:27 +0200364void r600_ring_init(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ring_size);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100365int r600_cp_resume(struct radeon_device *rdev);
366void r600_cp_fini(struct radeon_device *rdev);
367int r600_count_pipe_bits(uint32_t val);
368int r600_mc_wait_for_idle(struct radeon_device *rdev);
369int r600_pcie_gart_init(struct radeon_device *rdev);
370void r600_scratch_init(struct radeon_device *rdev);
371int r600_blit_init(struct radeon_device *rdev);
372void r600_blit_fini(struct radeon_device *rdev);
373int r600_init_microcode(struct radeon_device *rdev);
374/* r600 irq */
375int r600_irq_process(struct radeon_device *rdev);
376int r600_irq_init(struct radeon_device *rdev);
377void r600_irq_fini(struct radeon_device *rdev);
378void r600_ih_ring_init(struct radeon_device *rdev, unsigned ring_size);
379int r600_irq_set(struct radeon_device *rdev);
380void r600_irq_suspend(struct radeon_device *rdev);
381void r600_disable_interrupts(struct radeon_device *rdev);
382void r600_rlc_stop(struct radeon_device *rdev);
383/* r600 audio */
384int r600_audio_init(struct radeon_device *rdev);
Rafał Miłecki3299de92012-05-14 21:25:57 +0200385struct r600_audio r600_audio_status(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100386void r600_audio_fini(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100387int r600_hdmi_buffer_status_changed(struct drm_encoder *encoder);
388void r600_hdmi_update_audio_settings(struct drm_encoder *encoder);
Alex Deuchera973bea2013-04-18 11:32:16 -0400389void r600_hdmi_enable(struct drm_encoder *encoder, bool enable);
390void r600_hdmi_setmode(struct drm_encoder *encoder, struct drm_display_mode *mode);
Daniel Vetter4546b2c2011-02-18 17:59:21 +0100391/* r600 blit */
Christian Königf2377502012-05-09 15:35:01 +0200392int r600_blit_prepare_copy(struct radeon_device *rdev, unsigned num_gpu_pages,
Christian König220907d2012-05-10 16:46:43 +0200393 struct radeon_fence **fence, struct radeon_sa_bo **vb,
394 struct radeon_semaphore **sem);
Christian König876dc9f2012-05-08 14:24:01 +0200395void r600_blit_done_copy(struct radeon_device *rdev, struct radeon_fence **fence,
Christian König220907d2012-05-10 16:46:43 +0200396 struct radeon_sa_bo *vb, struct radeon_semaphore *sem);
Daniel Vetter4546b2c2011-02-18 17:59:21 +0100397void r600_kms_blit_copy(struct radeon_device *rdev,
398 u64 src_gpu_addr, u64 dst_gpu_addr,
Christian Königf2377502012-05-09 15:35:01 +0200399 unsigned num_gpu_pages,
400 struct radeon_sa_bo *vb);
Alex Deucher89e51812012-02-23 17:53:38 -0500401int r600_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucher454d2e22013-02-14 10:04:02 -0500402u32 r600_get_xclk(struct radeon_device *rdev);
Alex Deucherd0418892013-01-24 10:35:23 -0500403uint64_t r600_get_gpu_clock_counter(struct radeon_device *rdev);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400404int rv6xx_get_temp(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000405
Christian Königf2ba57b2013-04-08 12:41:29 +0200406/* uvd */
407int r600_uvd_init(struct radeon_device *rdev);
408int r600_uvd_rbc_start(struct radeon_device *rdev);
409void r600_uvd_rbc_stop(struct radeon_device *rdev);
410int r600_uvd_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
411void r600_uvd_fence_emit(struct radeon_device *rdev,
412 struct radeon_fence *fence);
413void r600_uvd_semaphore_emit(struct radeon_device *rdev,
414 struct radeon_ring *ring,
415 struct radeon_semaphore *semaphore,
416 bool emit_wait);
417void r600_uvd_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
418
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000419/*
420 * rv770,rv730,rv710,rv740
421 */
422int rv770_init(struct radeon_device *rdev);
423void rv770_fini(struct radeon_device *rdev);
424int rv770_suspend(struct radeon_device *rdev);
425int rv770_resume(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100426void rv770_pm_misc(struct radeon_device *rdev);
427u32 rv770_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
428void r700_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
429void r700_cp_stop(struct radeon_device *rdev);
430void r700_cp_fini(struct radeon_device *rdev);
Alex Deucher43fb7782013-01-04 09:24:18 -0500431int rv770_copy_dma(struct radeon_device *rdev,
432 uint64_t src_offset, uint64_t dst_offset,
433 unsigned num_gpu_pages,
434 struct radeon_fence **fence);
Alex Deucher454d2e22013-02-14 10:04:02 -0500435u32 rv770_get_xclk(struct radeon_device *rdev);
Christian Königf2ba57b2013-04-08 12:41:29 +0200436int rv770_uvd_resume(struct radeon_device *rdev);
Christian Königef0e6e62013-04-08 12:41:35 +0200437int rv770_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400438int rv770_get_temp(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000439
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500440/*
441 * evergreen
442 */
Daniel Vetter3574dda2011-02-18 17:59:19 +0100443struct evergreen_mc_save {
Daniel Vetter3574dda2011-02-18 17:59:19 +0100444 u32 vga_render_control;
445 u32 vga_hdp_control;
Alex Deucher62444b72012-08-15 17:18:42 -0400446 bool crtc_enabled[RADEON_MAX_CRTCS];
Daniel Vetter3574dda2011-02-18 17:59:19 +0100447};
Jerome Glisse81ee8fb2012-07-27 16:32:24 -0400448
Alex Deucher0fcdb612010-03-24 13:20:41 -0400449void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500450int evergreen_init(struct radeon_device *rdev);
451void evergreen_fini(struct radeon_device *rdev);
452int evergreen_suspend(struct radeon_device *rdev);
453int evergreen_resume(struct radeon_device *rdev);
Alex Deucher123bc182013-01-24 11:37:19 -0500454bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
455bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jerome Glissea2d07b72010-03-09 14:45:11 +0000456int evergreen_asic_reset(struct radeon_device *rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500457void evergreen_bandwidth_update(struct radeon_device *rdev);
Alex Deucher12920592011-02-02 12:37:40 -0500458void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500459void evergreen_hpd_init(struct radeon_device *rdev);
460void evergreen_hpd_fini(struct radeon_device *rdev);
461bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
462void evergreen_hpd_set_polarity(struct radeon_device *rdev,
463 enum radeon_hpd_id hpd);
Alex Deucher45f9a392010-03-24 13:55:51 -0400464u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc);
465int evergreen_irq_set(struct radeon_device *rdev);
466int evergreen_irq_process(struct radeon_device *rdev);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400467extern int evergreen_cs_parse(struct radeon_cs_parser *p);
Alex Deucherd2ead3e2012-12-13 09:55:45 -0500468extern int evergreen_dma_cs_parse(struct radeon_cs_parser *p);
Alex Deucher49e02b72010-04-23 17:57:27 -0400469extern void evergreen_pm_misc(struct radeon_device *rdev);
470extern void evergreen_pm_prepare(struct radeon_device *rdev);
471extern void evergreen_pm_finish(struct radeon_device *rdev);
Alex Deuchera4c9e2e2011-11-04 10:09:41 -0400472extern void sumo_pm_init_profile(struct radeon_device *rdev);
Alex Deucher27810fb2012-10-01 19:25:11 -0400473extern void btc_pm_init_profile(struct radeon_device *rdev);
Alex Deucher23d33ba2013-04-08 12:41:32 +0200474int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deuchera8b49252013-04-08 12:41:33 +0200475int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deucher6f34be52010-11-21 10:59:01 -0500476extern void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc);
477extern u32 evergreen_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
478extern void evergreen_post_page_flip(struct radeon_device *rdev, int crtc);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500479extern void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100480void evergreen_disable_interrupt_state(struct radeon_device *rdev);
481int evergreen_blit_init(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500482int evergreen_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucher233d1ad2012-12-04 15:25:59 -0500483void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
484 struct radeon_fence *fence);
485void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
486 struct radeon_ib *ib);
487int evergreen_copy_dma(struct radeon_device *rdev,
488 uint64_t src_offset, uint64_t dst_offset,
489 unsigned num_gpu_pages,
490 struct radeon_fence **fence);
Alex Deuchera973bea2013-04-18 11:32:16 -0400491void evergreen_hdmi_enable(struct drm_encoder *encoder, bool enable);
492void evergreen_hdmi_setmode(struct drm_encoder *encoder, struct drm_display_mode *mode);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400493int evergreen_get_temp(struct radeon_device *rdev);
494int sumo_get_temp(struct radeon_device *rdev);
Alex Deucher29a15222012-12-14 11:57:36 -0500495int tn_get_temp(struct radeon_device *rdev);
Daniel Vetter4546b2c2011-02-18 17:59:21 +0100496
Alex Deuchere3487622011-03-02 20:07:36 -0500497/*
498 * cayman
499 */
Alex Deucherb40e7e12011-11-17 14:57:50 -0500500void cayman_fence_ring_emit(struct radeon_device *rdev,
501 struct radeon_fence *fence);
Christian Königf2ba57b2013-04-08 12:41:29 +0200502void cayman_uvd_semaphore_emit(struct radeon_device *rdev,
503 struct radeon_ring *ring,
504 struct radeon_semaphore *semaphore,
505 bool emit_wait);
Alex Deuchere3487622011-03-02 20:07:36 -0500506void cayman_pcie_gart_tlb_flush(struct radeon_device *rdev);
507int cayman_init(struct radeon_device *rdev);
508void cayman_fini(struct radeon_device *rdev);
509int cayman_suspend(struct radeon_device *rdev);
510int cayman_resume(struct radeon_device *rdev);
Alex Deuchere3487622011-03-02 20:07:36 -0500511int cayman_asic_reset(struct radeon_device *rdev);
Jerome Glisse721604a2012-01-05 22:11:05 -0500512void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
513int cayman_vm_init(struct radeon_device *rdev);
514void cayman_vm_fini(struct radeon_device *rdev);
Alex Deucher498522b2012-10-02 14:43:38 -0400515void cayman_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm);
Christian König089a7862012-08-11 11:54:05 +0200516uint32_t cayman_vm_page_flags(struct radeon_device *rdev, uint32_t flags);
Alex Deucher43f12142013-02-01 17:32:42 +0100517void cayman_vm_set_page(struct radeon_device *rdev,
518 struct radeon_ib *ib,
519 uint64_t pe,
Christian Königdce34bf2012-09-17 19:36:18 +0200520 uint64_t addr, unsigned count,
521 uint32_t incr, uint32_t flags);
Jerome Glisse721604a2012-01-05 22:11:05 -0500522int evergreen_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deuchercd459e52012-12-13 12:17:38 -0500523int evergreen_dma_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deucherf60cbd12012-12-04 15:27:33 -0500524void cayman_dma_ring_ib_execute(struct radeon_device *rdev,
525 struct radeon_ib *ib);
Alex Deucher123bc182013-01-24 11:37:19 -0500526bool cayman_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucherf60cbd12012-12-04 15:27:33 -0500527bool cayman_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
528void cayman_dma_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm);
Alex Deucher45f9a392010-03-24 13:55:51 -0400529
Alex Deucher43b3cd92012-03-20 17:18:00 -0400530/* DCE6 - SI */
531void dce6_bandwidth_update(struct radeon_device *rdev);
532
Alex Deucher02779c02012-03-20 17:18:25 -0400533/*
534 * si
535 */
536void si_fence_ring_emit(struct radeon_device *rdev,
537 struct radeon_fence *fence);
538void si_pcie_gart_tlb_flush(struct radeon_device *rdev);
539int si_init(struct radeon_device *rdev);
540void si_fini(struct radeon_device *rdev);
541int si_suspend(struct radeon_device *rdev);
542int si_resume(struct radeon_device *rdev);
Alex Deucher123bc182013-01-24 11:37:19 -0500543bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
544bool si_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Alex Deucher02779c02012-03-20 17:18:25 -0400545int si_asic_reset(struct radeon_device *rdev);
546void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
547int si_irq_set(struct radeon_device *rdev);
548int si_irq_process(struct radeon_device *rdev);
549int si_vm_init(struct radeon_device *rdev);
550void si_vm_fini(struct radeon_device *rdev);
Alex Deucher43f12142013-02-01 17:32:42 +0100551void si_vm_set_page(struct radeon_device *rdev,
552 struct radeon_ib *ib,
553 uint64_t pe,
Alex Deucher82ffd922012-10-02 14:47:46 -0400554 uint64_t addr, unsigned count,
555 uint32_t incr, uint32_t flags);
Alex Deucher498522b2012-10-02 14:43:38 -0400556void si_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm);
Alex Deucher02779c02012-03-20 17:18:25 -0400557int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deucher8c5fd7e2012-12-04 15:28:18 -0500558int si_copy_dma(struct radeon_device *rdev,
559 uint64_t src_offset, uint64_t dst_offset,
560 unsigned num_gpu_pages,
561 struct radeon_fence **fence);
562void si_dma_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm);
Alex Deucher454d2e22013-02-14 10:04:02 -0500563u32 si_get_xclk(struct radeon_device *rdev);
Alex Deucherd0418892013-01-24 10:35:23 -0500564uint64_t si_get_gpu_clock_counter(struct radeon_device *rdev);
Christian König2539eb02013-04-08 12:41:34 +0200565int si_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400566int si_get_temp(struct radeon_device *rdev);
Alex Deucher02779c02012-03-20 17:18:25 -0400567
Alex Deucher0672e272013-04-09 16:22:31 -0400568/* DCE8 - CIK */
569void dce8_bandwidth_update(struct radeon_device *rdev);
570
Alex Deucher44fa3462012-12-18 22:17:00 -0500571/*
572 * cik
573 */
574uint64_t cik_get_gpu_clock_counter(struct radeon_device *rdev);
Alex Deucher2c679122013-04-09 13:32:18 -0400575u32 cik_get_xclk(struct radeon_device *rdev);
Alex Deucher6e2c3c02013-04-03 19:28:32 -0400576uint32_t cik_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
577void cik_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Christian König87167bb2013-04-09 13:39:21 -0400578int cik_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
579int cik_uvd_resume(struct radeon_device *rdev);
Alex Deucher0672e272013-04-09 16:22:31 -0400580void cik_sdma_fence_ring_emit(struct radeon_device *rdev,
581 struct radeon_fence *fence);
582void cik_sdma_semaphore_ring_emit(struct radeon_device *rdev,
583 struct radeon_ring *ring,
584 struct radeon_semaphore *semaphore,
585 bool emit_wait);
586void cik_sdma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
587int cik_copy_dma(struct radeon_device *rdev,
588 uint64_t src_offset, uint64_t dst_offset,
589 unsigned num_gpu_pages,
590 struct radeon_fence **fence);
591int cik_sdma_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
592int cik_sdma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
593bool cik_sdma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
594void cik_fence_gfx_ring_emit(struct radeon_device *rdev,
595 struct radeon_fence *fence);
596void cik_fence_compute_ring_emit(struct radeon_device *rdev,
597 struct radeon_fence *fence);
598void cik_semaphore_ring_emit(struct radeon_device *rdev,
599 struct radeon_ring *cp,
600 struct radeon_semaphore *semaphore,
601 bool emit_wait);
602void cik_pcie_gart_tlb_flush(struct radeon_device *rdev);
603int cik_init(struct radeon_device *rdev);
604void cik_fini(struct radeon_device *rdev);
605int cik_suspend(struct radeon_device *rdev);
606int cik_resume(struct radeon_device *rdev);
607bool cik_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
608int cik_asic_reset(struct radeon_device *rdev);
609void cik_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
610int cik_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
611int cik_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
612int cik_irq_set(struct radeon_device *rdev);
613int cik_irq_process(struct radeon_device *rdev);
614int cik_vm_init(struct radeon_device *rdev);
615void cik_vm_fini(struct radeon_device *rdev);
616void cik_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm);
617void cik_vm_set_page(struct radeon_device *rdev,
618 struct radeon_ib *ib,
619 uint64_t pe,
620 uint64_t addr, unsigned count,
621 uint32_t incr, uint32_t flags);
622void cik_dma_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm);
623int cik_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
624u32 cik_compute_ring_get_rptr(struct radeon_device *rdev,
625 struct radeon_ring *ring);
626u32 cik_compute_ring_get_wptr(struct radeon_device *rdev,
627 struct radeon_ring *ring);
628void cik_compute_ring_set_wptr(struct radeon_device *rdev,
629 struct radeon_ring *ring);
Alex Deucher44fa3462012-12-18 22:17:00 -0500630
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200631#endif