Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 2 | /* |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 3 | * Thunderbolt driver - Tunneling support |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 4 | * |
| 5 | * Copyright (c) 2014 Andreas Noever <andreas.noever@gmail.com> |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 6 | * Copyright (C) 2019, Intel Corporation |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 7 | */ |
| 8 | |
Mika Westerberg | 1752b9f | 2017-02-19 10:58:35 +0200 | [diff] [blame] | 9 | #ifndef TB_TUNNEL_H_ |
| 10 | #define TB_TUNNEL_H_ |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 11 | |
| 12 | #include "tb.h" |
| 13 | |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 14 | enum tb_tunnel_type { |
| 15 | TB_TUNNEL_PCI, |
| 16 | TB_TUNNEL_DP, |
Mika Westerberg | 44242d6 | 2018-09-28 16:35:32 +0300 | [diff] [blame] | 17 | TB_TUNNEL_DMA, |
Rajmohan Mani | e6f8185 | 2019-12-17 15:33:44 +0300 | [diff] [blame] | 18 | TB_TUNNEL_USB3, |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 19 | }; |
| 20 | |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 21 | /** |
| 22 | * struct tb_tunnel - Tunnel between two ports |
| 23 | * @tb: Pointer to the domain |
| 24 | * @src_port: Source port of the tunnel |
Mika Westerberg | 0414bec | 2017-02-19 23:43:26 +0200 | [diff] [blame] | 25 | * @dst_port: Destination port of the tunnel. For discovered incomplete |
| 26 | * tunnels may be %NULL or null adapter port instead. |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 27 | * @paths: All paths required by the tunnel |
| 28 | * @npaths: Number of paths in @paths |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 29 | * @init: Optional tunnel specific initialization |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 30 | * @activate: Optional tunnel specific activation/deactivation |
Mika Westerberg | a11b88a | 2019-03-26 16:03:48 +0300 | [diff] [blame] | 31 | * @consumed_bandwidth: Return how much bandwidth the tunnel consumes |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 32 | * @list: Tunnels are linked using this field |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 33 | * @type: Type of the tunnel |
Mika Westerberg | a11b88a | 2019-03-26 16:03:48 +0300 | [diff] [blame] | 34 | * @max_bw: Maximum bandwidth (Mb/s) available for the tunnel (only for DP). |
| 35 | * Only set if the bandwidth needs to be limited. |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 36 | */ |
| 37 | struct tb_tunnel { |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 38 | struct tb *tb; |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 39 | struct tb_port *src_port; |
| 40 | struct tb_port *dst_port; |
| 41 | struct tb_path **paths; |
| 42 | size_t npaths; |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 43 | int (*init)(struct tb_tunnel *tunnel); |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 44 | int (*activate)(struct tb_tunnel *tunnel, bool activate); |
Mika Westerberg | 7c0ee8f | 2020-03-28 12:52:31 +0200 | [diff] [blame^] | 45 | int (*consumed_bandwidth)(struct tb_tunnel *tunnel, int *consumed_up, |
| 46 | int *consumed_down); |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 47 | struct list_head list; |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 48 | enum tb_tunnel_type type; |
Mika Westerberg | a11b88a | 2019-03-26 16:03:48 +0300 | [diff] [blame] | 49 | unsigned int max_bw; |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 50 | }; |
| 51 | |
Mika Westerberg | 0414bec | 2017-02-19 23:43:26 +0200 | [diff] [blame] | 52 | struct tb_tunnel *tb_tunnel_discover_pci(struct tb *tb, struct tb_port *down); |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 53 | struct tb_tunnel *tb_tunnel_alloc_pci(struct tb *tb, struct tb_port *up, |
| 54 | struct tb_port *down); |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 55 | struct tb_tunnel *tb_tunnel_discover_dp(struct tb *tb, struct tb_port *in); |
| 56 | struct tb_tunnel *tb_tunnel_alloc_dp(struct tb *tb, struct tb_port *in, |
Mika Westerberg | a11b88a | 2019-03-26 16:03:48 +0300 | [diff] [blame] | 57 | struct tb_port *out, int max_bw); |
Mika Westerberg | 44242d6 | 2018-09-28 16:35:32 +0300 | [diff] [blame] | 58 | struct tb_tunnel *tb_tunnel_alloc_dma(struct tb *tb, struct tb_port *nhi, |
| 59 | struct tb_port *dst, int transmit_ring, |
| 60 | int transmit_path, int receive_ring, |
| 61 | int receive_path); |
Rajmohan Mani | e6f8185 | 2019-12-17 15:33:44 +0300 | [diff] [blame] | 62 | struct tb_tunnel *tb_tunnel_discover_usb3(struct tb *tb, struct tb_port *down); |
| 63 | struct tb_tunnel *tb_tunnel_alloc_usb3(struct tb *tb, struct tb_port *up, |
| 64 | struct tb_port *down); |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 65 | |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 66 | void tb_tunnel_free(struct tb_tunnel *tunnel); |
| 67 | int tb_tunnel_activate(struct tb_tunnel *tunnel); |
| 68 | int tb_tunnel_restart(struct tb_tunnel *tunnel); |
| 69 | void tb_tunnel_deactivate(struct tb_tunnel *tunnel); |
| 70 | bool tb_tunnel_is_invalid(struct tb_tunnel *tunnel); |
Mika Westerberg | a11b88a | 2019-03-26 16:03:48 +0300 | [diff] [blame] | 71 | bool tb_tunnel_switch_on_path(const struct tb_tunnel *tunnel, |
| 72 | const struct tb_switch *sw); |
Mika Westerberg | 7c0ee8f | 2020-03-28 12:52:31 +0200 | [diff] [blame^] | 73 | int tb_tunnel_consumed_bandwidth(struct tb_tunnel *tunnel, int *consumed_up, |
| 74 | int *consumed_down); |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 75 | |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 76 | static inline bool tb_tunnel_is_pci(const struct tb_tunnel *tunnel) |
| 77 | { |
| 78 | return tunnel->type == TB_TUNNEL_PCI; |
| 79 | } |
| 80 | |
| 81 | static inline bool tb_tunnel_is_dp(const struct tb_tunnel *tunnel) |
| 82 | { |
| 83 | return tunnel->type == TB_TUNNEL_DP; |
| 84 | } |
| 85 | |
Mika Westerberg | 44242d6 | 2018-09-28 16:35:32 +0300 | [diff] [blame] | 86 | static inline bool tb_tunnel_is_dma(const struct tb_tunnel *tunnel) |
| 87 | { |
| 88 | return tunnel->type == TB_TUNNEL_DMA; |
| 89 | } |
| 90 | |
Rajmohan Mani | e6f8185 | 2019-12-17 15:33:44 +0300 | [diff] [blame] | 91 | static inline bool tb_tunnel_is_usb3(const struct tb_tunnel *tunnel) |
| 92 | { |
| 93 | return tunnel->type == TB_TUNNEL_USB3; |
| 94 | } |
| 95 | |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 96 | #endif |
| 97 | |