Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 2 | /* |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 3 | * Thunderbolt driver - Tunneling support |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 4 | * |
| 5 | * Copyright (c) 2014 Andreas Noever <andreas.noever@gmail.com> |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 6 | * Copyright (C) 2019, Intel Corporation |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 7 | */ |
| 8 | |
Mika Westerberg | 1752b9f | 2017-02-19 10:58:35 +0200 | [diff] [blame] | 9 | #ifndef TB_TUNNEL_H_ |
| 10 | #define TB_TUNNEL_H_ |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 11 | |
| 12 | #include "tb.h" |
| 13 | |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 14 | enum tb_tunnel_type { |
| 15 | TB_TUNNEL_PCI, |
| 16 | TB_TUNNEL_DP, |
Mika Westerberg | 44242d6 | 2018-09-28 16:35:32 +0300 | [diff] [blame] | 17 | TB_TUNNEL_DMA, |
Rajmohan Mani | e6f8185 | 2019-12-17 15:33:44 +0300 | [diff] [blame] | 18 | TB_TUNNEL_USB3, |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 19 | }; |
| 20 | |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 21 | /** |
| 22 | * struct tb_tunnel - Tunnel between two ports |
| 23 | * @tb: Pointer to the domain |
| 24 | * @src_port: Source port of the tunnel |
Mika Westerberg | 0414bec | 2017-02-19 23:43:26 +0200 | [diff] [blame] | 25 | * @dst_port: Destination port of the tunnel. For discovered incomplete |
| 26 | * tunnels may be %NULL or null adapter port instead. |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 27 | * @paths: All paths required by the tunnel |
| 28 | * @npaths: Number of paths in @paths |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 29 | * @init: Optional tunnel specific initialization |
Mika Westerberg | 6ed541c | 2021-03-22 18:09:35 +0200 | [diff] [blame] | 30 | * @deinit: Optional tunnel specific de-initialization |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 31 | * @activate: Optional tunnel specific activation/deactivation |
Mika Westerberg | a11b88a | 2019-03-26 16:03:48 +0300 | [diff] [blame] | 32 | * @consumed_bandwidth: Return how much bandwidth the tunnel consumes |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 33 | * @release_unused_bandwidth: Release all unused bandwidth |
| 34 | * @reclaim_available_bandwidth: Reclaim back available bandwidth |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 35 | * @list: Tunnels are linked using this field |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 36 | * @type: Type of the tunnel |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 37 | * @max_up: Maximum upstream bandwidth (Mb/s) available for the tunnel. |
Mika Westerberg | a11b88a | 2019-03-26 16:03:48 +0300 | [diff] [blame] | 38 | * Only set if the bandwidth needs to be limited. |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 39 | * @max_down: Maximum downstream bandwidth (Mb/s) available for the tunnel. |
| 40 | * Only set if the bandwidth needs to be limited. |
| 41 | * @allocated_up: Allocated upstream bandwidth (only for USB3) |
| 42 | * @allocated_down: Allocated downstream bandwidth (only for USB3) |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 43 | */ |
| 44 | struct tb_tunnel { |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 45 | struct tb *tb; |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 46 | struct tb_port *src_port; |
| 47 | struct tb_port *dst_port; |
| 48 | struct tb_path **paths; |
| 49 | size_t npaths; |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 50 | int (*init)(struct tb_tunnel *tunnel); |
Mika Westerberg | 6ed541c | 2021-03-22 18:09:35 +0200 | [diff] [blame] | 51 | void (*deinit)(struct tb_tunnel *tunnel); |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 52 | int (*activate)(struct tb_tunnel *tunnel, bool activate); |
Mika Westerberg | 7c0ee8f | 2020-03-28 12:52:31 +0200 | [diff] [blame] | 53 | int (*consumed_bandwidth)(struct tb_tunnel *tunnel, int *consumed_up, |
| 54 | int *consumed_down); |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 55 | int (*release_unused_bandwidth)(struct tb_tunnel *tunnel); |
| 56 | void (*reclaim_available_bandwidth)(struct tb_tunnel *tunnel, |
| 57 | int *available_up, |
| 58 | int *available_down); |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 59 | struct list_head list; |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 60 | enum tb_tunnel_type type; |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 61 | int max_up; |
| 62 | int max_down; |
| 63 | int allocated_up; |
| 64 | int allocated_down; |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 65 | }; |
| 66 | |
Mika Westerberg | 43bddb2 | 2021-11-14 17:20:59 +0200 | [diff] [blame] | 67 | struct tb_tunnel *tb_tunnel_discover_pci(struct tb *tb, struct tb_port *down, |
| 68 | bool alloc_hopid); |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 69 | struct tb_tunnel *tb_tunnel_alloc_pci(struct tb *tb, struct tb_port *up, |
| 70 | struct tb_port *down); |
Mika Westerberg | 43bddb2 | 2021-11-14 17:20:59 +0200 | [diff] [blame] | 71 | struct tb_tunnel *tb_tunnel_discover_dp(struct tb *tb, struct tb_port *in, |
| 72 | bool alloc_hopid); |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 73 | struct tb_tunnel *tb_tunnel_alloc_dp(struct tb *tb, struct tb_port *in, |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 74 | struct tb_port *out, int max_up, |
| 75 | int max_down); |
Mika Westerberg | 44242d6 | 2018-09-28 16:35:32 +0300 | [diff] [blame] | 76 | struct tb_tunnel *tb_tunnel_alloc_dma(struct tb *tb, struct tb_port *nhi, |
Mika Westerberg | 180b068 | 2021-01-08 16:25:39 +0200 | [diff] [blame] | 77 | struct tb_port *dst, int transmit_path, |
| 78 | int transmit_ring, int receive_path, |
| 79 | int receive_ring); |
| 80 | bool tb_tunnel_match_dma(const struct tb_tunnel *tunnel, int transmit_path, |
| 81 | int transmit_ring, int receive_path, int receive_ring); |
Mika Westerberg | 43bddb2 | 2021-11-14 17:20:59 +0200 | [diff] [blame] | 82 | struct tb_tunnel *tb_tunnel_discover_usb3(struct tb *tb, struct tb_port *down, |
| 83 | bool alloc_hopid); |
Rajmohan Mani | e6f8185 | 2019-12-17 15:33:44 +0300 | [diff] [blame] | 84 | struct tb_tunnel *tb_tunnel_alloc_usb3(struct tb *tb, struct tb_port *up, |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 85 | struct tb_port *down, int max_up, |
| 86 | int max_down); |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 87 | |
Mika Westerberg | 93f36ad | 2017-02-19 13:48:29 +0200 | [diff] [blame] | 88 | void tb_tunnel_free(struct tb_tunnel *tunnel); |
| 89 | int tb_tunnel_activate(struct tb_tunnel *tunnel); |
| 90 | int tb_tunnel_restart(struct tb_tunnel *tunnel); |
| 91 | void tb_tunnel_deactivate(struct tb_tunnel *tunnel); |
| 92 | bool tb_tunnel_is_invalid(struct tb_tunnel *tunnel); |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 93 | bool tb_tunnel_port_on_path(const struct tb_tunnel *tunnel, |
| 94 | const struct tb_port *port); |
Mika Westerberg | 7c0ee8f | 2020-03-28 12:52:31 +0200 | [diff] [blame] | 95 | int tb_tunnel_consumed_bandwidth(struct tb_tunnel *tunnel, int *consumed_up, |
| 96 | int *consumed_down); |
Mika Westerberg | 0bd680c | 2020-03-24 14:44:13 +0200 | [diff] [blame] | 97 | int tb_tunnel_release_unused_bandwidth(struct tb_tunnel *tunnel); |
| 98 | void tb_tunnel_reclaim_available_bandwidth(struct tb_tunnel *tunnel, |
| 99 | int *available_up, |
| 100 | int *available_down); |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 101 | |
Mika Westerberg | 4f807e4 | 2018-09-17 16:30:49 +0300 | [diff] [blame] | 102 | static inline bool tb_tunnel_is_pci(const struct tb_tunnel *tunnel) |
| 103 | { |
| 104 | return tunnel->type == TB_TUNNEL_PCI; |
| 105 | } |
| 106 | |
| 107 | static inline bool tb_tunnel_is_dp(const struct tb_tunnel *tunnel) |
| 108 | { |
| 109 | return tunnel->type == TB_TUNNEL_DP; |
| 110 | } |
| 111 | |
Mika Westerberg | 44242d6 | 2018-09-28 16:35:32 +0300 | [diff] [blame] | 112 | static inline bool tb_tunnel_is_dma(const struct tb_tunnel *tunnel) |
| 113 | { |
| 114 | return tunnel->type == TB_TUNNEL_DMA; |
| 115 | } |
| 116 | |
Rajmohan Mani | e6f8185 | 2019-12-17 15:33:44 +0300 | [diff] [blame] | 117 | static inline bool tb_tunnel_is_usb3(const struct tb_tunnel *tunnel) |
| 118 | { |
| 119 | return tunnel->type == TB_TUNNEL_USB3; |
| 120 | } |
| 121 | |
Andreas Noever | 3364f0c | 2014-06-03 22:04:08 +0200 | [diff] [blame] | 122 | #endif |
| 123 | |