blob: f4134a823958e305a5701458ef8599e07f87402a [file] [log] [blame]
Jerome Glisse771fe6b2009-06-05 14:42:42 +02001/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef __RADEON_ASIC_H__
29#define __RADEON_ASIC_H__
30
31/*
32 * common functions
33 */
Rafał Miłecki74338742009-11-03 00:53:02 +010034uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020035void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki5ea597f2009-12-17 13:50:09 +010036uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020037void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
38
Rafał Miłecki74338742009-11-03 00:53:02 +010039uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020040void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki74338742009-11-03 00:53:02 +010041uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020042void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
43void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
44
Alex Deucher37e9b6a2012-08-03 11:39:43 -040045void atombios_set_backlight_level(struct radeon_encoder *radeon_encoder, u8 level);
Alex Deucher6d92f812012-09-14 09:59:26 -040046u8 atombios_get_backlight_level(struct radeon_encoder *radeon_encoder);
Alex Deucher37e9b6a2012-08-03 11:39:43 -040047void radeon_legacy_set_backlight_level(struct radeon_encoder *radeon_encoder, u8 level);
Alex Deucher6d92f812012-09-14 09:59:26 -040048u8 radeon_legacy_get_backlight_level(struct radeon_encoder *radeon_encoder);
Alex Deucher37e9b6a2012-08-03 11:39:43 -040049
50
Jerome Glisse771fe6b2009-06-05 14:42:42 +020051/*
Pauli Nieminen44ca7472010-02-11 17:25:47 +000052 * r100,rv100,rs100,rv200,rs200
Jerome Glisse771fe6b2009-06-05 14:42:42 +020053 */
Daniel Vetter2b497502010-03-11 21:19:18 +000054struct r100_mc_save {
55 u32 GENMO_WT;
56 u32 CRTC_EXT_CNTL;
57 u32 CRTC_GEN_CNTL;
58 u32 CRTC2_GEN_CNTL;
59 u32 CUR_OFFSET;
60 u32 CUR2_OFFSET;
61};
62int r100_init(struct radeon_device *rdev);
63void r100_fini(struct radeon_device *rdev);
64int r100_suspend(struct radeon_device *rdev);
65int r100_resume(struct radeon_device *rdev);
Dave Airlie28d52042009-09-21 14:33:58 +100066void r100_vga_set_state(struct radeon_device *rdev, bool state);
Christian Könige32eb502011-10-23 12:56:27 +020067bool r100_gpu_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jerome Glissea2d07b72010-03-09 14:45:11 +000068int r100_asic_reset(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +020069u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020070void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
71int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
Alex Deucherf7128122012-02-23 17:53:45 -050072void r100_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020073int r100_irq_set(struct radeon_device *rdev);
74int r100_irq_process(struct radeon_device *rdev);
75void r100_fence_ring_emit(struct radeon_device *rdev,
76 struct radeon_fence *fence);
Christian König15d33322011-09-15 19:02:22 +020077void r100_semaphore_ring_emit(struct radeon_device *rdev,
Christian Könige32eb502011-10-23 12:56:27 +020078 struct radeon_ring *cp,
Christian König15d33322011-09-15 19:02:22 +020079 struct radeon_semaphore *semaphore,
Christian König7b1f2482011-09-23 15:11:23 +020080 bool emit_wait);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020081int r100_cs_parse(struct radeon_cs_parser *p);
82void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
83uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
84int r100_copy_blit(struct radeon_device *rdev,
85 uint64_t src_offset,
86 uint64_t dst_offset,
Alex Deucher003cefe2011-09-16 12:04:08 -040087 unsigned num_gpu_pages,
Christian König876dc9f2012-05-08 14:24:01 +020088 struct radeon_fence **fence);
Dave Airliee024e112009-06-24 09:48:08 +100089int r100_set_surface_reg(struct radeon_device *rdev, int reg,
90 uint32_t tiling_flags, uint32_t pitch,
91 uint32_t offset, uint32_t obj_size);
Daniel Vetter9479c542010-03-11 21:19:16 +000092void r100_clear_surface_reg(struct radeon_device *rdev, int reg);
Jerome Glissec93bb852009-07-13 21:04:08 +020093void r100_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100094void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Christian Könige32eb502011-10-23 12:56:27 +020095int r100_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Alex Deucher429770b2009-12-04 15:26:55 -050096void r100_hpd_init(struct radeon_device *rdev);
97void r100_hpd_fini(struct radeon_device *rdev);
98bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
99void r100_hpd_set_polarity(struct radeon_device *rdev,
100 enum radeon_hpd_id hpd);
Daniel Vetter2b497502010-03-11 21:19:18 +0000101int r100_debugfs_rbbm_init(struct radeon_device *rdev);
102int r100_debugfs_cp_init(struct radeon_device *rdev);
103void r100_cp_disable(struct radeon_device *rdev);
104int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
105void r100_cp_fini(struct radeon_device *rdev);
106int r100_pci_gart_init(struct radeon_device *rdev);
107void r100_pci_gart_fini(struct radeon_device *rdev);
108int r100_pci_gart_enable(struct radeon_device *rdev);
109void r100_pci_gart_disable(struct radeon_device *rdev);
110int r100_debugfs_mc_info_init(struct radeon_device *rdev);
111int r100_gui_wait_for_idle(struct radeon_device *rdev);
Alex Deucherf7128122012-02-23 17:53:45 -0500112int r100_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Daniel Vetter2b497502010-03-11 21:19:18 +0000113void r100_irq_disable(struct radeon_device *rdev);
114void r100_mc_stop(struct radeon_device *rdev, struct r100_mc_save *save);
115void r100_mc_resume(struct radeon_device *rdev, struct r100_mc_save *save);
116void r100_vram_init_sizes(struct radeon_device *rdev);
Daniel Vetter2b497502010-03-11 21:19:18 +0000117int r100_cp_reset(struct radeon_device *rdev);
118void r100_vga_render_disable(struct radeon_device *rdev);
Dave Airlie4c712e62010-07-15 12:13:50 +1000119void r100_restore_sanity(struct radeon_device *rdev);
Daniel Vetter2b497502010-03-11 21:19:18 +0000120int r100_cs_track_check_pkt3_indx_buffer(struct radeon_cs_parser *p,
121 struct radeon_cs_packet *pkt,
122 struct radeon_bo *robj);
123int r100_cs_parse_packet0(struct radeon_cs_parser *p,
124 struct radeon_cs_packet *pkt,
125 const unsigned *auth, unsigned n,
126 radeon_packet0_check_t check);
127int r100_cs_packet_parse(struct radeon_cs_parser *p,
128 struct radeon_cs_packet *pkt,
129 unsigned idx);
130void r100_enable_bm(struct radeon_device *rdev);
131void r100_set_common_regs(struct radeon_device *rdev);
Jerome Glisse90aca4d2010-03-09 14:45:12 +0000132void r100_bm_disable(struct radeon_device *rdev);
Alex Deucherdef9ba92010-04-22 12:39:58 -0400133extern bool r100_gui_idle(struct radeon_device *rdev);
Alex Deucher49e02b72010-04-23 17:57:27 -0400134extern void r100_pm_misc(struct radeon_device *rdev);
135extern void r100_pm_prepare(struct radeon_device *rdev);
136extern void r100_pm_finish(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400137extern void r100_pm_init_profile(struct radeon_device *rdev);
138extern void r100_pm_get_dynpm_state(struct radeon_device *rdev);
Alex Deucher6f34be52010-11-21 10:59:01 -0500139extern void r100_pre_page_flip(struct radeon_device *rdev, int crtc);
140extern u32 r100_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
141extern void r100_post_page_flip(struct radeon_device *rdev, int crtc);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500142extern void r100_wait_for_vblank(struct radeon_device *rdev, int crtc);
Alex Deucher89e51812012-02-23 17:53:38 -0500143extern int r100_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucherbae6b5622010-04-22 13:38:05 -0400144
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000145/*
146 * r200,rv250,rs300,rv280
147 */
148extern int r200_copy_dma(struct radeon_device *rdev,
Daniel Vetter187f3da2010-11-28 19:06:09 +0100149 uint64_t src_offset,
150 uint64_t dst_offset,
Alex Deucher003cefe2011-09-16 12:04:08 -0400151 unsigned num_gpu_pages,
Christian König876dc9f2012-05-08 14:24:01 +0200152 struct radeon_fence **fence);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100153void r200_set_safe_registers(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200154
155/*
156 * r300,r350,rv350,rv380
157 */
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200158extern int r300_init(struct radeon_device *rdev);
159extern void r300_fini(struct radeon_device *rdev);
160extern int r300_suspend(struct radeon_device *rdev);
161extern int r300_resume(struct radeon_device *rdev);
Jerome Glissea2d07b72010-03-09 14:45:11 +0000162extern int r300_asic_reset(struct radeon_device *rdev);
Alex Deucherf7128122012-02-23 17:53:45 -0500163extern void r300_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200164extern void r300_fence_ring_emit(struct radeon_device *rdev,
165 struct radeon_fence *fence);
166extern int r300_cs_parse(struct radeon_cs_parser *p);
167extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
168extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200169extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
Alex Deucherc836a412009-12-23 10:07:50 -0500170extern int rv370_get_pcie_lanes(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100171extern void r300_set_reg_safe(struct radeon_device *rdev);
172extern void r300_mc_program(struct radeon_device *rdev);
173extern void r300_mc_init(struct radeon_device *rdev);
174extern void r300_clock_startup(struct radeon_device *rdev);
175extern int r300_mc_wait_for_idle(struct radeon_device *rdev);
176extern int rv370_pcie_gart_init(struct radeon_device *rdev);
177extern void rv370_pcie_gart_fini(struct radeon_device *rdev);
178extern int rv370_pcie_gart_enable(struct radeon_device *rdev);
179extern void rv370_pcie_gart_disable(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500180extern int r300_mc_wait_for_idle(struct radeon_device *rdev);
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000181
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200182/*
183 * r420,r423,rv410
184 */
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200185extern int r420_init(struct radeon_device *rdev);
186extern void r420_fini(struct radeon_device *rdev);
187extern int r420_suspend(struct radeon_device *rdev);
188extern int r420_resume(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400189extern void r420_pm_init_profile(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100190extern u32 r420_mc_rreg(struct radeon_device *rdev, u32 reg);
191extern void r420_mc_wreg(struct radeon_device *rdev, u32 reg, u32 v);
192extern int r420_debugfs_pipes_info_init(struct radeon_device *rdev);
193extern void r420_pipes_init(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200194
195/*
196 * rs400,rs480
197 */
Jerome Glisseca6ffc62009-10-01 10:20:52 +0200198extern int rs400_init(struct radeon_device *rdev);
199extern void rs400_fini(struct radeon_device *rdev);
200extern int rs400_suspend(struct radeon_device *rdev);
201extern int rs400_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200202void rs400_gart_tlb_flush(struct radeon_device *rdev);
203int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
204uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
205void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100206int rs400_gart_init(struct radeon_device *rdev);
207int rs400_gart_enable(struct radeon_device *rdev);
208void rs400_gart_adjust_size(struct radeon_device *rdev);
209void rs400_gart_disable(struct radeon_device *rdev);
210void rs400_gart_fini(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500211extern int rs400_mc_wait_for_idle(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100212
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200213/*
214 * rs600.
215 */
Jerome Glisse90aca4d2010-03-09 14:45:12 +0000216extern int rs600_asic_reset(struct radeon_device *rdev);
Jerome Glissec010f802009-09-30 22:09:06 +0200217extern int rs600_init(struct radeon_device *rdev);
218extern void rs600_fini(struct radeon_device *rdev);
219extern int rs600_suspend(struct radeon_device *rdev);
220extern int rs600_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200221int rs600_irq_set(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200222int rs600_irq_process(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100223void rs600_irq_disable(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200224u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200225void rs600_gart_tlb_flush(struct radeon_device *rdev);
226int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
227uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
228void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200229void rs600_bandwidth_update(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -0500230void rs600_hpd_init(struct radeon_device *rdev);
231void rs600_hpd_fini(struct radeon_device *rdev);
232bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
233void rs600_hpd_set_polarity(struct radeon_device *rdev,
234 enum radeon_hpd_id hpd);
Alex Deucher49e02b72010-04-23 17:57:27 -0400235extern void rs600_pm_misc(struct radeon_device *rdev);
236extern void rs600_pm_prepare(struct radeon_device *rdev);
237extern void rs600_pm_finish(struct radeon_device *rdev);
Alex Deucher6f34be52010-11-21 10:59:01 -0500238extern void rs600_pre_page_flip(struct radeon_device *rdev, int crtc);
239extern u32 rs600_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
240extern void rs600_post_page_flip(struct radeon_device *rdev, int crtc);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100241void rs600_set_safe_registers(struct radeon_device *rdev);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500242extern void avivo_wait_for_vblank(struct radeon_device *rdev, int crtc);
Alex Deucher89e51812012-02-23 17:53:38 -0500243extern int rs600_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -0500244
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200245/*
246 * rs690,rs740
247 */
Jerome Glisse3bc68532009-10-01 09:39:24 +0200248int rs690_init(struct radeon_device *rdev);
249void rs690_fini(struct radeon_device *rdev);
250int rs690_resume(struct radeon_device *rdev);
251int rs690_suspend(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200252uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
253void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200254void rs690_bandwidth_update(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100255void rs690_line_buffer_adjust(struct radeon_device *rdev,
256 struct drm_display_mode *mode1,
257 struct drm_display_mode *mode2);
Alex Deucher89e51812012-02-23 17:53:38 -0500258extern int rs690_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200259
260/*
261 * rv515
262 */
Daniel Vetter187f3da2010-11-28 19:06:09 +0100263struct rv515_mc_save {
Daniel Vetter187f3da2010-11-28 19:06:09 +0100264 u32 vga_render_control;
265 u32 vga_hdp_control;
Alex Deucher6253e4c2012-12-12 14:30:32 -0500266 bool crtc_enabled[2];
Daniel Vetter187f3da2010-11-28 19:06:09 +0100267};
Jerome Glisse81ee8fb2012-07-27 16:32:24 -0400268
Jerome Glisse068a1172009-06-17 13:28:30 +0200269int rv515_init(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200270void rv515_fini(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200271uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
272void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Alex Deucherf7128122012-02-23 17:53:45 -0500273void rv515_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glissec93bb852009-07-13 21:04:08 +0200274void rv515_bandwidth_update(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200275int rv515_resume(struct radeon_device *rdev);
276int rv515_suspend(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100277void rv515_bandwidth_avivo_update(struct radeon_device *rdev);
278void rv515_vga_render_disable(struct radeon_device *rdev);
279void rv515_set_safe_registers(struct radeon_device *rdev);
280void rv515_mc_stop(struct radeon_device *rdev, struct rv515_mc_save *save);
281void rv515_mc_resume(struct radeon_device *rdev, struct rv515_mc_save *save);
282void rv515_clock_startup(struct radeon_device *rdev);
283void rv515_debugfs(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500284int rv515_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200285
286/*
287 * r520,rv530,rv560,rv570,r580
288 */
Jerome Glissed39c3b82009-09-28 18:34:43 +0200289int r520_init(struct radeon_device *rdev);
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200290int r520_resume(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500291int r520_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200292
293/*
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000294 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200295 */
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000296int r600_init(struct radeon_device *rdev);
297void r600_fini(struct radeon_device *rdev);
298int r600_suspend(struct radeon_device *rdev);
299int r600_resume(struct radeon_device *rdev);
Dave Airlie28d52042009-09-21 14:33:58 +1000300void r600_vga_set_state(struct radeon_device *rdev, bool state);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000301int r600_wb_init(struct radeon_device *rdev);
302void r600_wb_fini(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000303void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200304uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
305void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000306int r600_cs_parse(struct radeon_cs_parser *p);
Alex Deuchercf4ccd02011-11-18 10:19:47 -0500307int r600_dma_cs_parse(struct radeon_cs_parser *p);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000308void r600_fence_ring_emit(struct radeon_device *rdev,
309 struct radeon_fence *fence);
Christian König15d33322011-09-15 19:02:22 +0200310void r600_semaphore_ring_emit(struct radeon_device *rdev,
Christian Könige32eb502011-10-23 12:56:27 +0200311 struct radeon_ring *cp,
Christian König15d33322011-09-15 19:02:22 +0200312 struct radeon_semaphore *semaphore,
Christian König7b1f2482011-09-23 15:11:23 +0200313 bool emit_wait);
Alex Deucher4d756582012-09-27 15:08:35 -0400314void r600_dma_fence_ring_emit(struct radeon_device *rdev,
315 struct radeon_fence *fence);
316void r600_dma_semaphore_ring_emit(struct radeon_device *rdev,
317 struct radeon_ring *ring,
318 struct radeon_semaphore *semaphore,
319 bool emit_wait);
320void r600_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
321bool r600_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucher123bc182013-01-24 11:37:19 -0500322bool r600_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jerome Glissea2d07b72010-03-09 14:45:11 +0000323int r600_asic_reset(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000324int r600_set_surface_reg(struct radeon_device *rdev, int reg,
325 uint32_t tiling_flags, uint32_t pitch,
326 uint32_t offset, uint32_t obj_size);
Daniel Vetter9479c542010-03-11 21:19:16 +0000327void r600_clear_surface_reg(struct radeon_device *rdev, int reg);
Alex Deucherf7128122012-02-23 17:53:45 -0500328int r600_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucher4d756582012-09-27 15:08:35 -0400329int r600_dma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000330void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Christian Könige32eb502011-10-23 12:56:27 +0200331int r600_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Alex Deucher4d756582012-09-27 15:08:35 -0400332int r600_dma_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000333int r600_copy_blit(struct radeon_device *rdev,
334 uint64_t src_offset, uint64_t dst_offset,
Christian König876dc9f2012-05-08 14:24:01 +0200335 unsigned num_gpu_pages, struct radeon_fence **fence);
Alex Deucher4d756582012-09-27 15:08:35 -0400336int r600_copy_dma(struct radeon_device *rdev,
337 uint64_t src_offset, uint64_t dst_offset,
338 unsigned num_gpu_pages, struct radeon_fence **fence);
Alex Deucher429770b2009-12-04 15:26:55 -0500339void r600_hpd_init(struct radeon_device *rdev);
340void r600_hpd_fini(struct radeon_device *rdev);
341bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
342void r600_hpd_set_polarity(struct radeon_device *rdev,
343 enum radeon_hpd_id hpd);
Jerome Glisse062b3892010-02-04 20:36:39 +0100344extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo);
Alex Deucherdef9ba92010-04-22 12:39:58 -0400345extern bool r600_gui_idle(struct radeon_device *rdev);
Alex Deucher49e02b72010-04-23 17:57:27 -0400346extern void r600_pm_misc(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400347extern void r600_pm_init_profile(struct radeon_device *rdev);
348extern void rs780_pm_init_profile(struct radeon_device *rdev);
349extern void r600_pm_get_dynpm_state(struct radeon_device *rdev);
Alex Deucher3313e3d2011-01-06 18:49:34 -0500350extern void r600_set_pcie_lanes(struct radeon_device *rdev, int lanes);
351extern int r600_get_pcie_lanes(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100352bool r600_card_posted(struct radeon_device *rdev);
353void r600_cp_stop(struct radeon_device *rdev);
354int r600_cp_start(struct radeon_device *rdev);
Christian Könige32eb502011-10-23 12:56:27 +0200355void r600_ring_init(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ring_size);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100356int r600_cp_resume(struct radeon_device *rdev);
357void r600_cp_fini(struct radeon_device *rdev);
358int r600_count_pipe_bits(uint32_t val);
359int r600_mc_wait_for_idle(struct radeon_device *rdev);
360int r600_pcie_gart_init(struct radeon_device *rdev);
361void r600_scratch_init(struct radeon_device *rdev);
362int r600_blit_init(struct radeon_device *rdev);
363void r600_blit_fini(struct radeon_device *rdev);
364int r600_init_microcode(struct radeon_device *rdev);
365/* r600 irq */
366int r600_irq_process(struct radeon_device *rdev);
367int r600_irq_init(struct radeon_device *rdev);
368void r600_irq_fini(struct radeon_device *rdev);
369void r600_ih_ring_init(struct radeon_device *rdev, unsigned ring_size);
370int r600_irq_set(struct radeon_device *rdev);
371void r600_irq_suspend(struct radeon_device *rdev);
372void r600_disable_interrupts(struct radeon_device *rdev);
373void r600_rlc_stop(struct radeon_device *rdev);
374/* r600 audio */
375int r600_audio_init(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100376void r600_audio_set_clock(struct drm_encoder *encoder, int clock);
Rafał Miłecki3299de92012-05-14 21:25:57 +0200377struct r600_audio r600_audio_status(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100378void r600_audio_fini(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100379int r600_hdmi_buffer_status_changed(struct drm_encoder *encoder);
380void r600_hdmi_update_audio_settings(struct drm_encoder *encoder);
Daniel Vetter4546b2c2011-02-18 17:59:21 +0100381/* r600 blit */
Christian Königf2377502012-05-09 15:35:01 +0200382int r600_blit_prepare_copy(struct radeon_device *rdev, unsigned num_gpu_pages,
Christian König220907d2012-05-10 16:46:43 +0200383 struct radeon_fence **fence, struct radeon_sa_bo **vb,
384 struct radeon_semaphore **sem);
Christian König876dc9f2012-05-08 14:24:01 +0200385void r600_blit_done_copy(struct radeon_device *rdev, struct radeon_fence **fence,
Christian König220907d2012-05-10 16:46:43 +0200386 struct radeon_sa_bo *vb, struct radeon_semaphore *sem);
Daniel Vetter4546b2c2011-02-18 17:59:21 +0100387void r600_kms_blit_copy(struct radeon_device *rdev,
388 u64 src_gpu_addr, u64 dst_gpu_addr,
Christian Königf2377502012-05-09 15:35:01 +0200389 unsigned num_gpu_pages,
390 struct radeon_sa_bo *vb);
Alex Deucher89e51812012-02-23 17:53:38 -0500391int r600_mc_wait_for_idle(struct radeon_device *rdev);
Marek Olšák6759a0a2012-08-09 16:34:17 +0200392uint64_t r600_get_gpu_clock(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000393
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000394/*
395 * rv770,rv730,rv710,rv740
396 */
397int rv770_init(struct radeon_device *rdev);
398void rv770_fini(struct radeon_device *rdev);
399int rv770_suspend(struct radeon_device *rdev);
400int rv770_resume(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100401void rv770_pm_misc(struct radeon_device *rdev);
402u32 rv770_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
403void r700_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
404void r700_cp_stop(struct radeon_device *rdev);
405void r700_cp_fini(struct radeon_device *rdev);
Alex Deucher43fb7782013-01-04 09:24:18 -0500406int rv770_copy_dma(struct radeon_device *rdev,
407 uint64_t src_offset, uint64_t dst_offset,
408 unsigned num_gpu_pages,
409 struct radeon_fence **fence);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000410
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500411/*
412 * evergreen
413 */
Daniel Vetter3574dda2011-02-18 17:59:19 +0100414struct evergreen_mc_save {
Daniel Vetter3574dda2011-02-18 17:59:19 +0100415 u32 vga_render_control;
416 u32 vga_hdp_control;
Alex Deucher62444b72012-08-15 17:18:42 -0400417 bool crtc_enabled[RADEON_MAX_CRTCS];
Daniel Vetter3574dda2011-02-18 17:59:19 +0100418};
Jerome Glisse81ee8fb2012-07-27 16:32:24 -0400419
Alex Deucher0fcdb612010-03-24 13:20:41 -0400420void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500421int evergreen_init(struct radeon_device *rdev);
422void evergreen_fini(struct radeon_device *rdev);
423int evergreen_suspend(struct radeon_device *rdev);
424int evergreen_resume(struct radeon_device *rdev);
Alex Deucher123bc182013-01-24 11:37:19 -0500425bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
426bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jerome Glissea2d07b72010-03-09 14:45:11 +0000427int evergreen_asic_reset(struct radeon_device *rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500428void evergreen_bandwidth_update(struct radeon_device *rdev);
Alex Deucher12920592011-02-02 12:37:40 -0500429void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500430void evergreen_hpd_init(struct radeon_device *rdev);
431void evergreen_hpd_fini(struct radeon_device *rdev);
432bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
433void evergreen_hpd_set_polarity(struct radeon_device *rdev,
434 enum radeon_hpd_id hpd);
Alex Deucher45f9a392010-03-24 13:55:51 -0400435u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc);
436int evergreen_irq_set(struct radeon_device *rdev);
437int evergreen_irq_process(struct radeon_device *rdev);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400438extern int evergreen_cs_parse(struct radeon_cs_parser *p);
Alex Deucherd2ead3e2012-12-13 09:55:45 -0500439extern int evergreen_dma_cs_parse(struct radeon_cs_parser *p);
Alex Deucher49e02b72010-04-23 17:57:27 -0400440extern void evergreen_pm_misc(struct radeon_device *rdev);
441extern void evergreen_pm_prepare(struct radeon_device *rdev);
442extern void evergreen_pm_finish(struct radeon_device *rdev);
Alex Deuchera4c9e2e2011-11-04 10:09:41 -0400443extern void sumo_pm_init_profile(struct radeon_device *rdev);
Alex Deucher27810fb2012-10-01 19:25:11 -0400444extern void btc_pm_init_profile(struct radeon_device *rdev);
Alex Deucher6f34be52010-11-21 10:59:01 -0500445extern void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc);
446extern u32 evergreen_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
447extern void evergreen_post_page_flip(struct radeon_device *rdev, int crtc);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500448extern void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100449void evergreen_disable_interrupt_state(struct radeon_device *rdev);
450int evergreen_blit_init(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500451int evergreen_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucher233d1ad2012-12-04 15:25:59 -0500452void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
453 struct radeon_fence *fence);
454void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
455 struct radeon_ib *ib);
456int evergreen_copy_dma(struct radeon_device *rdev,
457 uint64_t src_offset, uint64_t dst_offset,
458 unsigned num_gpu_pages,
459 struct radeon_fence **fence);
Daniel Vetter4546b2c2011-02-18 17:59:21 +0100460
Alex Deuchere3487622011-03-02 20:07:36 -0500461/*
462 * cayman
463 */
Alex Deucherb40e7e12011-11-17 14:57:50 -0500464void cayman_fence_ring_emit(struct radeon_device *rdev,
465 struct radeon_fence *fence);
Alex Deuchere3487622011-03-02 20:07:36 -0500466void cayman_pcie_gart_tlb_flush(struct radeon_device *rdev);
467int cayman_init(struct radeon_device *rdev);
468void cayman_fini(struct radeon_device *rdev);
469int cayman_suspend(struct radeon_device *rdev);
470int cayman_resume(struct radeon_device *rdev);
Alex Deuchere3487622011-03-02 20:07:36 -0500471int cayman_asic_reset(struct radeon_device *rdev);
Jerome Glisse721604a2012-01-05 22:11:05 -0500472void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
473int cayman_vm_init(struct radeon_device *rdev);
474void cayman_vm_fini(struct radeon_device *rdev);
Alex Deucher498522b2012-10-02 14:43:38 -0400475void cayman_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm);
Christian König089a7862012-08-11 11:54:05 +0200476uint32_t cayman_vm_page_flags(struct radeon_device *rdev, uint32_t flags);
Alex Deucher43f12142013-02-01 17:32:42 +0100477void cayman_vm_set_page(struct radeon_device *rdev,
478 struct radeon_ib *ib,
479 uint64_t pe,
Christian Königdce34bf2012-09-17 19:36:18 +0200480 uint64_t addr, unsigned count,
481 uint32_t incr, uint32_t flags);
Jerome Glisse721604a2012-01-05 22:11:05 -0500482int evergreen_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deuchercd459e52012-12-13 12:17:38 -0500483int evergreen_dma_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deucherf60cbd12012-12-04 15:27:33 -0500484void cayman_dma_ring_ib_execute(struct radeon_device *rdev,
485 struct radeon_ib *ib);
Alex Deucher123bc182013-01-24 11:37:19 -0500486bool cayman_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucherf60cbd12012-12-04 15:27:33 -0500487bool cayman_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
488void cayman_dma_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm);
Alex Deucher45f9a392010-03-24 13:55:51 -0400489
Alex Deucher43b3cd92012-03-20 17:18:00 -0400490/* DCE6 - SI */
491void dce6_bandwidth_update(struct radeon_device *rdev);
492
Alex Deucher02779c02012-03-20 17:18:25 -0400493/*
494 * si
495 */
496void si_fence_ring_emit(struct radeon_device *rdev,
497 struct radeon_fence *fence);
498void si_pcie_gart_tlb_flush(struct radeon_device *rdev);
499int si_init(struct radeon_device *rdev);
500void si_fini(struct radeon_device *rdev);
501int si_suspend(struct radeon_device *rdev);
502int si_resume(struct radeon_device *rdev);
Alex Deucher123bc182013-01-24 11:37:19 -0500503bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
504bool si_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Alex Deucher02779c02012-03-20 17:18:25 -0400505int si_asic_reset(struct radeon_device *rdev);
506void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
507int si_irq_set(struct radeon_device *rdev);
508int si_irq_process(struct radeon_device *rdev);
509int si_vm_init(struct radeon_device *rdev);
510void si_vm_fini(struct radeon_device *rdev);
Alex Deucher43f12142013-02-01 17:32:42 +0100511void si_vm_set_page(struct radeon_device *rdev,
512 struct radeon_ib *ib,
513 uint64_t pe,
Alex Deucher82ffd922012-10-02 14:47:46 -0400514 uint64_t addr, unsigned count,
515 uint32_t incr, uint32_t flags);
Alex Deucher498522b2012-10-02 14:43:38 -0400516void si_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm);
Alex Deucher02779c02012-03-20 17:18:25 -0400517int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Marek Olšák6759a0a2012-08-09 16:34:17 +0200518uint64_t si_get_gpu_clock(struct radeon_device *rdev);
Alex Deucher8c5fd7e2012-12-04 15:28:18 -0500519int si_copy_dma(struct radeon_device *rdev,
520 uint64_t src_offset, uint64_t dst_offset,
521 unsigned num_gpu_pages,
522 struct radeon_fence **fence);
523void si_dma_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm);
Alex Deucher02779c02012-03-20 17:18:25 -0400524
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200525#endif