Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 2 | /* |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 3 | * Thunderbolt driver - Tunneling support |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 4 | * |
| 5 | * Copyright (c) 2014 Andreas Noever <andreas.noever@gmail.com> |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 6 | * Copyright (C) 2019, Intel Corporation |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 7 | */ |
| 8 | |
Mika Westerberg | 1752b9f | 2017-02-19 10:58:35 +0200 | [diff] [blame] | 9 | #ifndef TB_TUNNEL_H_ |
| 10 | #define TB_TUNNEL_H_ |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 11 | |
| 12 | #include "tb.h" |
| 13 | |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 14 | enum tb_tunnel_type { |
| 15 | TB_TUNNEL_PCI, |
| 16 | TB_TUNNEL_DP, |
Mika Westerberg | 44242d6 | 2018-09-28 16:35:32 +0300 | [diff] [blame] | 17 | TB_TUNNEL_DMA, |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 18 | }; |
| 19 | |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 20 | /** |
| 21 | * struct tb_tunnel - Tunnel between two ports |
| 22 | * @tb: Pointer to the domain |
| 23 | * @src_port: Source port of the tunnel |
Mika Westerberg | 0414bec | 2017-02-19 23:43:26 +0200 | [diff] [blame] | 24 | * @dst_port: Destination port of the tunnel. For discovered incomplete |
| 25 | * tunnels may be %NULL or null adapter port instead. |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 26 | * @paths: All paths required by the tunnel |
| 27 | * @npaths: Number of paths in @paths |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 28 | * @init: Optional tunnel specific initialization |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 29 | * @activate: Optional tunnel specific activation/deactivation |
| 30 | * @list: Tunnels are linked using this field |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 31 | * @type: Type of the tunnel |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 32 | */ |
| 33 | struct tb_tunnel { |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 34 | struct tb *tb; |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 35 | struct tb_port *src_port; |
| 36 | struct tb_port *dst_port; |
| 37 | struct tb_path **paths; |
| 38 | size_t npaths; |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 39 | int (*init)(struct tb_tunnel *tunnel); |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 40 | int (*activate)(struct tb_tunnel *tunnel, bool activate); |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 41 | struct list_head list; |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 42 | enum tb_tunnel_type type; |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 43 | }; |
| 44 | |
Mika Westerberg | 0414bec | 2017-02-19 23:43:26 +0200 | [diff] [blame] | 45 | struct tb_tunnel *tb_tunnel_discover_pci(struct tb *tb, struct tb_port *down); |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 46 | struct tb_tunnel *tb_tunnel_alloc_pci(struct tb *tb, struct tb_port *up, |
| 47 | struct tb_port *down); |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 48 | struct tb_tunnel *tb_tunnel_discover_dp(struct tb *tb, struct tb_port *in); |
| 49 | struct tb_tunnel *tb_tunnel_alloc_dp(struct tb *tb, struct tb_port *in, |
| 50 | struct tb_port *out); |
Mika Westerberg | 44242d6 | 2018-09-28 16:35:32 +0300 | [diff] [blame] | 51 | struct tb_tunnel *tb_tunnel_alloc_dma(struct tb *tb, struct tb_port *nhi, |
| 52 | struct tb_port *dst, int transmit_ring, |
| 53 | int transmit_path, int receive_ring, |
| 54 | int receive_path); |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 55 | |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 56 | void tb_tunnel_free(struct tb_tunnel *tunnel); |
| 57 | int tb_tunnel_activate(struct tb_tunnel *tunnel); |
| 58 | int tb_tunnel_restart(struct tb_tunnel *tunnel); |
| 59 | void tb_tunnel_deactivate(struct tb_tunnel *tunnel); |
| 60 | bool tb_tunnel_is_invalid(struct tb_tunnel *tunnel); |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 61 | |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 62 | static inline bool tb_tunnel_is_pci(const struct tb_tunnel *tunnel) |
| 63 | { |
| 64 | return tunnel->type == TB_TUNNEL_PCI; |
| 65 | } |
| 66 | |
| 67 | static inline bool tb_tunnel_is_dp(const struct tb_tunnel *tunnel) |
| 68 | { |
| 69 | return tunnel->type == TB_TUNNEL_DP; |
| 70 | } |
| 71 | |
Mika Westerberg | 44242d6 | 2018-09-28 16:35:32 +0300 | [diff] [blame] | 72 | static inline bool tb_tunnel_is_dma(const struct tb_tunnel *tunnel) |
| 73 | { |
| 74 | return tunnel->type == TB_TUNNEL_DMA; |
| 75 | } |
| 76 | |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 77 | #endif |
| 78 | |