Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 2 | /* |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 3 | * Thunderbolt driver - Tunneling support |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 4 | * |
| 5 | * Copyright (c) 2014 Andreas Noever <andreas.noever@gmail.com> |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 6 | * Copyright (C) 2019, Intel Corporation |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 7 | */ |
| 8 | |
Mika Westerberg | 1752b9f | 2017-02-19 10:58:35 +0200 | [diff] [blame] | 9 | #ifndef TB_TUNNEL_H_ |
| 10 | #define TB_TUNNEL_H_ |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 11 | |
| 12 | #include "tb.h" |
| 13 | |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 14 | enum tb_tunnel_type { |
| 15 | TB_TUNNEL_PCI, |
| 16 | TB_TUNNEL_DP, |
Mika Westerberg | 44242d6 | 2018-09-28 16:35:32 +0300 | [diff] [blame] | 17 | TB_TUNNEL_DMA, |
Rajmohan Mani | e6f8185 | 2019-12-17 15:33:44 +0300 | [diff] [blame] | 18 | TB_TUNNEL_USB3, |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 19 | }; |
| 20 | |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 21 | /** |
| 22 | * struct tb_tunnel - Tunnel between two ports |
| 23 | * @tb: Pointer to the domain |
| 24 | * @src_port: Source port of the tunnel |
Mika Westerberg | 0414bec | 2017-02-19 23:43:26 +0200 | [diff] [blame] | 25 | * @dst_port: Destination port of the tunnel. For discovered incomplete |
| 26 | * tunnels may be %NULL or null adapter port instead. |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 27 | * @paths: All paths required by the tunnel |
| 28 | * @npaths: Number of paths in @paths |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 29 | * @init: Optional tunnel specific initialization |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 30 | * @activate: Optional tunnel specific activation/deactivation |
Mika Westerberg | a11b88a | 2019-03-26 16:03:48 +0300 | [diff] [blame] | 31 | * @consumed_bandwidth: Return how much bandwidth the tunnel consumes |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 32 | * @release_unused_bandwidth: Release all unused bandwidth |
| 33 | * @reclaim_available_bandwidth: Reclaim back available bandwidth |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 34 | * @list: Tunnels are linked using this field |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 35 | * @type: Type of the tunnel |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 36 | * @max_up: Maximum upstream bandwidth (Mb/s) available for the tunnel. |
Mika Westerberg | a11b88a | 2019-03-26 16:03:48 +0300 | [diff] [blame] | 37 | * Only set if the bandwidth needs to be limited. |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 38 | * @max_down: Maximum downstream bandwidth (Mb/s) available for the tunnel. |
| 39 | * Only set if the bandwidth needs to be limited. |
| 40 | * @allocated_up: Allocated upstream bandwidth (only for USB3) |
| 41 | * @allocated_down: Allocated downstream bandwidth (only for USB3) |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 42 | */ |
| 43 | struct tb_tunnel { |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 44 | struct tb *tb; |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 45 | struct tb_port *src_port; |
| 46 | struct tb_port *dst_port; |
| 47 | struct tb_path **paths; |
| 48 | size_t npaths; |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 49 | int (*init)(struct tb_tunnel *tunnel); |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 50 | int (*activate)(struct tb_tunnel *tunnel, bool activate); |
Mika Westerberg | 7c0ee8f | 2020-03-28 12:52:31 +0200 | [diff] [blame] | 51 | int (*consumed_bandwidth)(struct tb_tunnel *tunnel, int *consumed_up, |
| 52 | int *consumed_down); |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 53 | int (*release_unused_bandwidth)(struct tb_tunnel *tunnel); |
| 54 | void (*reclaim_available_bandwidth)(struct tb_tunnel *tunnel, |
| 55 | int *available_up, |
| 56 | int *available_down); |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 57 | struct list_head list; |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 58 | enum tb_tunnel_type type; |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 59 | int max_up; |
| 60 | int max_down; |
| 61 | int allocated_up; |
| 62 | int allocated_down; |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 63 | }; |
| 64 | |
Mika Westerberg | 0414bec | 2017-02-19 23:43:26 +0200 | [diff] [blame] | 65 | struct tb_tunnel *tb_tunnel_discover_pci(struct tb *tb, struct tb_port *down); |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 66 | struct tb_tunnel *tb_tunnel_alloc_pci(struct tb *tb, struct tb_port *up, |
| 67 | struct tb_port *down); |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 68 | struct tb_tunnel *tb_tunnel_discover_dp(struct tb *tb, struct tb_port *in); |
| 69 | struct tb_tunnel *tb_tunnel_alloc_dp(struct tb *tb, struct tb_port *in, |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 70 | struct tb_port *out, int max_up, |
| 71 | int max_down); |
Mika Westerberg | 44242d6 | 2018-09-28 16:35:32 +0300 | [diff] [blame] | 72 | struct tb_tunnel *tb_tunnel_alloc_dma(struct tb *tb, struct tb_port *nhi, |
| 73 | struct tb_port *dst, int transmit_ring, |
| 74 | int transmit_path, int receive_ring, |
| 75 | int receive_path); |
Rajmohan Mani | e6f8185 | 2019-12-17 15:33:44 +0300 | [diff] [blame] | 76 | struct tb_tunnel *tb_tunnel_discover_usb3(struct tb *tb, struct tb_port *down); |
| 77 | struct tb_tunnel *tb_tunnel_alloc_usb3(struct tb *tb, struct tb_port *up, |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 78 | struct tb_port *down, int max_up, |
| 79 | int max_down); |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 80 | |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 81 | void tb_tunnel_free(struct tb_tunnel *tunnel); |
| 82 | int tb_tunnel_activate(struct tb_tunnel *tunnel); |
| 83 | int tb_tunnel_restart(struct tb_tunnel *tunnel); |
| 84 | void tb_tunnel_deactivate(struct tb_tunnel *tunnel); |
| 85 | bool tb_tunnel_is_invalid(struct tb_tunnel *tunnel); |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 86 | bool tb_tunnel_port_on_path(const struct tb_tunnel *tunnel, |
| 87 | const struct tb_port *port); |
Mika Westerberg | 7c0ee8f | 2020-03-28 12:52:31 +0200 | [diff] [blame] | 88 | int tb_tunnel_consumed_bandwidth(struct tb_tunnel *tunnel, int *consumed_up, |
| 89 | int *consumed_down); |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 90 | int tb_tunnel_release_unused_bandwidth(struct tb_tunnel *tunnel); |
| 91 | void tb_tunnel_reclaim_available_bandwidth(struct tb_tunnel *tunnel, |
| 92 | int *available_up, |
| 93 | int *available_down); |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 94 | |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 95 | static inline bool tb_tunnel_is_pci(const struct tb_tunnel *tunnel) |
| 96 | { |
| 97 | return tunnel->type == TB_TUNNEL_PCI; |
| 98 | } |
| 99 | |
| 100 | static inline bool tb_tunnel_is_dp(const struct tb_tunnel *tunnel) |
| 101 | { |
| 102 | return tunnel->type == TB_TUNNEL_DP; |
| 103 | } |
| 104 | |
Mika Westerberg | 44242d6 | 2018-09-28 16:35:32 +0300 | [diff] [blame] | 105 | static inline bool tb_tunnel_is_dma(const struct tb_tunnel *tunnel) |
| 106 | { |
| 107 | return tunnel->type == TB_TUNNEL_DMA; |
| 108 | } |
| 109 | |
Rajmohan Mani | e6f8185 | 2019-12-17 15:33:44 +0300 | [diff] [blame] | 110 | static inline bool tb_tunnel_is_usb3(const struct tb_tunnel *tunnel) |
| 111 | { |
| 112 | return tunnel->type == TB_TUNNEL_USB3; |
| 113 | } |
| 114 | |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 115 | #endif |
| 116 | |