Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 2 | #ifndef _INET_ECN_H_ |
| 3 | #define _INET_ECN_H_ |
| 4 | |
| 5 | #include <linux/ip.h> |
Thomas Graf | 2566a50 | 2005-11-05 21:14:04 +0100 | [diff] [blame] | 6 | #include <linux/skbuff.h> |
Toke Høiland-Jørgensen | d7bf2eb | 2020-07-03 22:26:43 +0200 | [diff] [blame] | 7 | #include <linux/if_vlan.h> |
Arnaldo Carvalho de Melo | 14c8502 | 2005-12-27 02:43:12 -0200 | [diff] [blame] | 8 | |
| 9 | #include <net/inet_sock.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 10 | #include <net/dsfield.h> |
Toke Høiland-Jørgensen | 0780b41 | 2020-12-11 15:26:38 +0100 | [diff] [blame] | 11 | #include <net/checksum.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 12 | |
| 13 | enum { |
| 14 | INET_ECN_NOT_ECT = 0, |
| 15 | INET_ECN_ECT_1 = 1, |
| 16 | INET_ECN_ECT_0 = 2, |
| 17 | INET_ECN_CE = 3, |
| 18 | INET_ECN_MASK = 3, |
| 19 | }; |
| 20 | |
stephen hemminger | eccc1bb | 2012-09-25 11:02:48 +0000 | [diff] [blame] | 21 | extern int sysctl_tunnel_ecn_log; |
| 22 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 23 | static inline int INET_ECN_is_ce(__u8 dsfield) |
| 24 | { |
| 25 | return (dsfield & INET_ECN_MASK) == INET_ECN_CE; |
| 26 | } |
| 27 | |
| 28 | static inline int INET_ECN_is_not_ect(__u8 dsfield) |
| 29 | { |
| 30 | return (dsfield & INET_ECN_MASK) == INET_ECN_NOT_ECT; |
| 31 | } |
| 32 | |
| 33 | static inline int INET_ECN_is_capable(__u8 dsfield) |
| 34 | { |
Eric Dumazet | a02cec2 | 2010-09-22 20:43:57 +0000 | [diff] [blame] | 35 | return dsfield & INET_ECN_ECT_0; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 36 | } |
| 37 | |
Eric Dumazet | b5d9c9c | 2011-10-22 01:25:23 -0400 | [diff] [blame] | 38 | /* |
| 39 | * RFC 3168 9.1.1 |
| 40 | * The full-functionality option for ECN encapsulation is to copy the |
| 41 | * ECN codepoint of the inside header to the outside header on |
| 42 | * encapsulation if the inside header is not-ECT or ECT, and to set the |
| 43 | * ECN codepoint of the outside header to ECT(0) if the ECN codepoint of |
| 44 | * the inside header is CE. |
| 45 | */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 46 | static inline __u8 INET_ECN_encapsulate(__u8 outer, __u8 inner) |
| 47 | { |
| 48 | outer &= ~INET_ECN_MASK; |
| 49 | outer |= !INET_ECN_is_ce(inner) ? (inner & INET_ECN_MASK) : |
| 50 | INET_ECN_ECT_0; |
| 51 | return outer; |
| 52 | } |
| 53 | |
Steinar H. Gunderson | ca06707 | 2011-05-06 23:44:46 +0000 | [diff] [blame] | 54 | static inline void INET_ECN_xmit(struct sock *sk) |
| 55 | { |
| 56 | inet_sk(sk)->tos |= INET_ECN_ECT_0; |
| 57 | if (inet6_sk(sk) != NULL) |
| 58 | inet6_sk(sk)->tclass |= INET_ECN_ECT_0; |
| 59 | } |
| 60 | |
| 61 | static inline void INET_ECN_dontxmit(struct sock *sk) |
| 62 | { |
| 63 | inet_sk(sk)->tos &= ~INET_ECN_MASK; |
| 64 | if (inet6_sk(sk) != NULL) |
| 65 | inet6_sk(sk)->tclass &= ~INET_ECN_MASK; |
| 66 | } |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 67 | |
| 68 | #define IP6_ECN_flow_init(label) do { \ |
| 69 | (label) &= ~htonl(INET_ECN_MASK << 20); \ |
| 70 | } while (0) |
| 71 | |
| 72 | #define IP6_ECN_flow_xmit(sk, label) do { \ |
YOSHIFUJI Hideaki | e9df2e8 | 2008-04-13 23:40:51 -0700 | [diff] [blame] | 73 | if (INET_ECN_is_capable(inet6_sk(sk)->tclass)) \ |
Al Viro | 95026cd | 2006-11-03 00:55:35 -0800 | [diff] [blame] | 74 | (label) |= htonl(INET_ECN_ECT_0 << 20); \ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 75 | } while (0) |
| 76 | |
Thomas Graf | 2566a50 | 2005-11-05 21:14:04 +0100 | [diff] [blame] | 77 | static inline int IP_ECN_set_ce(struct iphdr *iph) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 78 | { |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 79 | u32 ecn = (iph->tos + 1) & INET_ECN_MASK; |
Toke Høiland-Jørgensen | 0780b41 | 2020-12-11 15:26:38 +0100 | [diff] [blame] | 80 | __be16 check_add; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 81 | |
| 82 | /* |
| 83 | * After the last operation we have (in binary): |
| 84 | * INET_ECN_NOT_ECT => 01 |
| 85 | * INET_ECN_ECT_1 => 10 |
| 86 | * INET_ECN_ECT_0 => 11 |
| 87 | * INET_ECN_CE => 00 |
| 88 | */ |
| 89 | if (!(ecn & 2)) |
Thomas Graf | 2566a50 | 2005-11-05 21:14:04 +0100 | [diff] [blame] | 90 | return !ecn; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 91 | |
| 92 | /* |
| 93 | * The following gives us: |
| 94 | * INET_ECN_ECT_1 => check += htons(0xFFFD) |
| 95 | * INET_ECN_ECT_0 => check += htons(0xFFFE) |
| 96 | */ |
Toke Høiland-Jørgensen | 0780b41 | 2020-12-11 15:26:38 +0100 | [diff] [blame] | 97 | check_add = (__force __be16)((__force u16)htons(0xFFFB) + |
| 98 | (__force u16)htons(ecn)); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 99 | |
Toke Høiland-Jørgensen | 0780b41 | 2020-12-11 15:26:38 +0100 | [diff] [blame] | 100 | iph->check = csum16_add(iph->check, check_add); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 101 | iph->tos |= INET_ECN_CE; |
Thomas Graf | 2566a50 | 2005-11-05 21:14:04 +0100 | [diff] [blame] | 102 | return 1; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 103 | } |
| 104 | |
Toke Høiland-Jørgensen | b723748 | 2020-04-27 16:11:05 +0200 | [diff] [blame] | 105 | static inline int IP_ECN_set_ect1(struct iphdr *iph) |
| 106 | { |
Toke Høiland-Jørgensen | b723748 | 2020-04-27 16:11:05 +0200 | [diff] [blame] | 107 | if ((iph->tos & INET_ECN_MASK) != INET_ECN_ECT_0) |
| 108 | return 0; |
| 109 | |
Toke Høiland-Jørgensen | 0780b41 | 2020-12-11 15:26:38 +0100 | [diff] [blame] | 110 | iph->check = csum16_add(iph->check, htons(0x1)); |
Toke Høiland-Jørgensen | b723748 | 2020-04-27 16:11:05 +0200 | [diff] [blame] | 111 | iph->tos ^= INET_ECN_MASK; |
| 112 | return 1; |
| 113 | } |
| 114 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 115 | static inline void IP_ECN_clear(struct iphdr *iph) |
| 116 | { |
| 117 | iph->tos &= ~INET_ECN_MASK; |
| 118 | } |
| 119 | |
Herbert Xu | 29bb43b4 | 2007-11-13 21:40:13 -0800 | [diff] [blame] | 120 | static inline void ipv4_copy_dscp(unsigned int dscp, struct iphdr *inner) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 121 | { |
Herbert Xu | 29bb43b4 | 2007-11-13 21:40:13 -0800 | [diff] [blame] | 122 | dscp &= ~INET_ECN_MASK; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 123 | ipv4_change_dsfield(inner, INET_ECN_MASK, dscp); |
| 124 | } |
| 125 | |
| 126 | struct ipv6hdr; |
| 127 | |
Eric Dumazet | 34ae6a1 | 2016-01-15 04:56:56 -0800 | [diff] [blame] | 128 | /* Note: |
| 129 | * IP_ECN_set_ce() has to tweak IPV4 checksum when setting CE, |
| 130 | * meaning both changes have no effect on skb->csum if/when CHECKSUM_COMPLETE |
| 131 | * In IPv6 case, no checksum compensates the change in IPv6 header, |
| 132 | * so we have to update skb->csum. |
| 133 | */ |
| 134 | static inline int IP6_ECN_set_ce(struct sk_buff *skb, struct ipv6hdr *iph) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 135 | { |
Eric Dumazet | 34ae6a1 | 2016-01-15 04:56:56 -0800 | [diff] [blame] | 136 | __be32 from, to; |
| 137 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 138 | if (INET_ECN_is_not_ect(ipv6_get_dsfield(iph))) |
Thomas Graf | 2566a50 | 2005-11-05 21:14:04 +0100 | [diff] [blame] | 139 | return 0; |
Eric Dumazet | 34ae6a1 | 2016-01-15 04:56:56 -0800 | [diff] [blame] | 140 | |
| 141 | from = *(__be32 *)iph; |
| 142 | to = from | htonl(INET_ECN_CE << 20); |
| 143 | *(__be32 *)iph = to; |
| 144 | if (skb->ip_summed == CHECKSUM_COMPLETE) |
Johannes Berg | c15c0ab | 2016-08-12 07:48:21 +0200 | [diff] [blame] | 145 | skb->csum = csum_add(csum_sub(skb->csum, (__force __wsum)from), |
| 146 | (__force __wsum)to); |
Thomas Graf | 2566a50 | 2005-11-05 21:14:04 +0100 | [diff] [blame] | 147 | return 1; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 148 | } |
| 149 | |
Toke Høiland-Jørgensen | b723748 | 2020-04-27 16:11:05 +0200 | [diff] [blame] | 150 | static inline int IP6_ECN_set_ect1(struct sk_buff *skb, struct ipv6hdr *iph) |
| 151 | { |
| 152 | __be32 from, to; |
| 153 | |
| 154 | if ((ipv6_get_dsfield(iph) & INET_ECN_MASK) != INET_ECN_ECT_0) |
| 155 | return 0; |
| 156 | |
| 157 | from = *(__be32 *)iph; |
| 158 | to = from ^ htonl(INET_ECN_MASK << 20); |
| 159 | *(__be32 *)iph = to; |
| 160 | if (skb->ip_summed == CHECKSUM_COMPLETE) |
| 161 | skb->csum = csum_add(csum_sub(skb->csum, (__force __wsum)from), |
| 162 | (__force __wsum)to); |
| 163 | return 1; |
| 164 | } |
| 165 | |
Herbert Xu | 29bb43b4 | 2007-11-13 21:40:13 -0800 | [diff] [blame] | 166 | static inline void ipv6_copy_dscp(unsigned int dscp, struct ipv6hdr *inner) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 167 | { |
Herbert Xu | 29bb43b4 | 2007-11-13 21:40:13 -0800 | [diff] [blame] | 168 | dscp &= ~INET_ECN_MASK; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 169 | ipv6_change_dsfield(inner, INET_ECN_MASK, dscp); |
| 170 | } |
| 171 | |
Thomas Graf | 2566a50 | 2005-11-05 21:14:04 +0100 | [diff] [blame] | 172 | static inline int INET_ECN_set_ce(struct sk_buff *skb) |
| 173 | { |
Toke Høiland-Jørgensen | d7bf2eb | 2020-07-03 22:26:43 +0200 | [diff] [blame] | 174 | switch (skb_protocol(skb, true)) { |
Harvey Harrison | f3a7c66 | 2009-02-14 22:58:35 -0800 | [diff] [blame] | 175 | case cpu_to_be16(ETH_P_IP): |
Simon Horman | ced14f6 | 2013-05-28 20:34:25 +0000 | [diff] [blame] | 176 | if (skb_network_header(skb) + sizeof(struct iphdr) <= |
| 177 | skb_tail_pointer(skb)) |
Arnaldo Carvalho de Melo | eddc9ec | 2007-04-20 22:47:35 -0700 | [diff] [blame] | 178 | return IP_ECN_set_ce(ip_hdr(skb)); |
Thomas Graf | 2566a50 | 2005-11-05 21:14:04 +0100 | [diff] [blame] | 179 | break; |
| 180 | |
Harvey Harrison | f3a7c66 | 2009-02-14 22:58:35 -0800 | [diff] [blame] | 181 | case cpu_to_be16(ETH_P_IPV6): |
Simon Horman | ced14f6 | 2013-05-28 20:34:25 +0000 | [diff] [blame] | 182 | if (skb_network_header(skb) + sizeof(struct ipv6hdr) <= |
| 183 | skb_tail_pointer(skb)) |
Eric Dumazet | 34ae6a1 | 2016-01-15 04:56:56 -0800 | [diff] [blame] | 184 | return IP6_ECN_set_ce(skb, ipv6_hdr(skb)); |
Thomas Graf | 2566a50 | 2005-11-05 21:14:04 +0100 | [diff] [blame] | 185 | break; |
| 186 | } |
| 187 | |
| 188 | return 0; |
| 189 | } |
| 190 | |
Eric Dumazet | 70e939d | 2021-10-14 10:59:17 -0700 | [diff] [blame] | 191 | static inline int skb_get_dsfield(struct sk_buff *skb) |
| 192 | { |
| 193 | switch (skb_protocol(skb, true)) { |
| 194 | case cpu_to_be16(ETH_P_IP): |
| 195 | if (!pskb_network_may_pull(skb, sizeof(struct iphdr))) |
| 196 | break; |
| 197 | return ipv4_get_dsfield(ip_hdr(skb)); |
| 198 | |
| 199 | case cpu_to_be16(ETH_P_IPV6): |
| 200 | if (!pskb_network_may_pull(skb, sizeof(struct ipv6hdr))) |
| 201 | break; |
| 202 | return ipv6_get_dsfield(ipv6_hdr(skb)); |
| 203 | } |
| 204 | |
| 205 | return -1; |
| 206 | } |
| 207 | |
Toke Høiland-Jørgensen | b723748 | 2020-04-27 16:11:05 +0200 | [diff] [blame] | 208 | static inline int INET_ECN_set_ect1(struct sk_buff *skb) |
| 209 | { |
Toke Høiland-Jørgensen | d7bf2eb | 2020-07-03 22:26:43 +0200 | [diff] [blame] | 210 | switch (skb_protocol(skb, true)) { |
Toke Høiland-Jørgensen | b723748 | 2020-04-27 16:11:05 +0200 | [diff] [blame] | 211 | case cpu_to_be16(ETH_P_IP): |
| 212 | if (skb_network_header(skb) + sizeof(struct iphdr) <= |
| 213 | skb_tail_pointer(skb)) |
| 214 | return IP_ECN_set_ect1(ip_hdr(skb)); |
| 215 | break; |
| 216 | |
| 217 | case cpu_to_be16(ETH_P_IPV6): |
| 218 | if (skb_network_header(skb) + sizeof(struct ipv6hdr) <= |
| 219 | skb_tail_pointer(skb)) |
| 220 | return IP6_ECN_set_ect1(skb, ipv6_hdr(skb)); |
| 221 | break; |
| 222 | } |
| 223 | |
| 224 | return 0; |
| 225 | } |
| 226 | |
stephen hemminger | eccc1bb | 2012-09-25 11:02:48 +0000 | [diff] [blame] | 227 | /* |
Neal Cardwell | d28071d | 2014-05-04 20:55:39 -0400 | [diff] [blame] | 228 | * RFC 6040 4.2 |
stephen hemminger | eccc1bb | 2012-09-25 11:02:48 +0000 | [diff] [blame] | 229 | * To decapsulate the inner header at the tunnel egress, a compliant |
| 230 | * tunnel egress MUST set the outgoing ECN field to the codepoint at the |
| 231 | * intersection of the appropriate arriving inner header (row) and outer |
| 232 | * header (column) in Figure 4 |
| 233 | * |
| 234 | * +---------+------------------------------------------------+ |
| 235 | * |Arriving | Arriving Outer Header | |
| 236 | * | Inner +---------+------------+------------+------------+ |
| 237 | * | Header | Not-ECT | ECT(0) | ECT(1) | CE | |
| 238 | * +---------+---------+------------+------------+------------+ |
| 239 | * | Not-ECT | Not-ECT |Not-ECT(!!!)|Not-ECT(!!!)| <drop>(!!!)| |
| 240 | * | ECT(0) | ECT(0) | ECT(0) | ECT(1) | CE | |
| 241 | * | ECT(1) | ECT(1) | ECT(1) (!) | ECT(1) | CE | |
| 242 | * | CE | CE | CE | CE(!!!)| CE | |
| 243 | * +---------+---------+------------+------------+------------+ |
| 244 | * |
| 245 | * Figure 4: New IP in IP Decapsulation Behaviour |
| 246 | * |
| 247 | * returns 0 on success |
| 248 | * 1 if something is broken and should be logged (!!! above) |
| 249 | * 2 if packet should be dropped |
| 250 | */ |
Ido Schimmel | 28e4503 | 2018-10-17 08:53:12 +0000 | [diff] [blame] | 251 | static inline int __INET_ECN_decapsulate(__u8 outer, __u8 inner, bool *set_ce) |
stephen hemminger | eccc1bb | 2012-09-25 11:02:48 +0000 | [diff] [blame] | 252 | { |
| 253 | if (INET_ECN_is_not_ect(inner)) { |
| 254 | switch (outer & INET_ECN_MASK) { |
| 255 | case INET_ECN_NOT_ECT: |
| 256 | return 0; |
| 257 | case INET_ECN_ECT_0: |
| 258 | case INET_ECN_ECT_1: |
| 259 | return 1; |
| 260 | case INET_ECN_CE: |
| 261 | return 2; |
| 262 | } |
| 263 | } |
| 264 | |
Ido Schimmel | 28e4503 | 2018-10-17 08:53:12 +0000 | [diff] [blame] | 265 | *set_ce = INET_ECN_is_ce(outer); |
| 266 | return 0; |
| 267 | } |
| 268 | |
| 269 | static inline int INET_ECN_decapsulate(struct sk_buff *skb, |
| 270 | __u8 outer, __u8 inner) |
| 271 | { |
| 272 | bool set_ce = false; |
| 273 | int rc; |
| 274 | |
| 275 | rc = __INET_ECN_decapsulate(outer, inner, &set_ce); |
Toke Høiland-Jørgensen | b723748 | 2020-04-27 16:11:05 +0200 | [diff] [blame] | 276 | if (!rc) { |
| 277 | if (set_ce) |
| 278 | INET_ECN_set_ce(skb); |
| 279 | else if ((outer & INET_ECN_MASK) == INET_ECN_ECT_1) |
| 280 | INET_ECN_set_ect1(skb); |
| 281 | } |
stephen hemminger | eccc1bb | 2012-09-25 11:02:48 +0000 | [diff] [blame] | 282 | |
Ido Schimmel | 28e4503 | 2018-10-17 08:53:12 +0000 | [diff] [blame] | 283 | return rc; |
stephen hemminger | eccc1bb | 2012-09-25 11:02:48 +0000 | [diff] [blame] | 284 | } |
| 285 | |
| 286 | static inline int IP_ECN_decapsulate(const struct iphdr *oiph, |
| 287 | struct sk_buff *skb) |
| 288 | { |
| 289 | __u8 inner; |
| 290 | |
Toke Høiland-Jørgensen | d7bf2eb | 2020-07-03 22:26:43 +0200 | [diff] [blame] | 291 | switch (skb_protocol(skb, true)) { |
| 292 | case htons(ETH_P_IP): |
stephen hemminger | eccc1bb | 2012-09-25 11:02:48 +0000 | [diff] [blame] | 293 | inner = ip_hdr(skb)->tos; |
Toke Høiland-Jørgensen | d7bf2eb | 2020-07-03 22:26:43 +0200 | [diff] [blame] | 294 | break; |
| 295 | case htons(ETH_P_IPV6): |
stephen hemminger | eccc1bb | 2012-09-25 11:02:48 +0000 | [diff] [blame] | 296 | inner = ipv6_get_dsfield(ipv6_hdr(skb)); |
Toke Høiland-Jørgensen | d7bf2eb | 2020-07-03 22:26:43 +0200 | [diff] [blame] | 297 | break; |
| 298 | default: |
stephen hemminger | eccc1bb | 2012-09-25 11:02:48 +0000 | [diff] [blame] | 299 | return 0; |
Toke Høiland-Jørgensen | d7bf2eb | 2020-07-03 22:26:43 +0200 | [diff] [blame] | 300 | } |
stephen hemminger | eccc1bb | 2012-09-25 11:02:48 +0000 | [diff] [blame] | 301 | |
| 302 | return INET_ECN_decapsulate(skb, oiph->tos, inner); |
| 303 | } |
| 304 | |
| 305 | static inline int IP6_ECN_decapsulate(const struct ipv6hdr *oipv6h, |
| 306 | struct sk_buff *skb) |
| 307 | { |
| 308 | __u8 inner; |
| 309 | |
Toke Høiland-Jørgensen | d7bf2eb | 2020-07-03 22:26:43 +0200 | [diff] [blame] | 310 | switch (skb_protocol(skb, true)) { |
| 311 | case htons(ETH_P_IP): |
stephen hemminger | eccc1bb | 2012-09-25 11:02:48 +0000 | [diff] [blame] | 312 | inner = ip_hdr(skb)->tos; |
Toke Høiland-Jørgensen | d7bf2eb | 2020-07-03 22:26:43 +0200 | [diff] [blame] | 313 | break; |
| 314 | case htons(ETH_P_IPV6): |
stephen hemminger | eccc1bb | 2012-09-25 11:02:48 +0000 | [diff] [blame] | 315 | inner = ipv6_get_dsfield(ipv6_hdr(skb)); |
Toke Høiland-Jørgensen | d7bf2eb | 2020-07-03 22:26:43 +0200 | [diff] [blame] | 316 | break; |
| 317 | default: |
stephen hemminger | eccc1bb | 2012-09-25 11:02:48 +0000 | [diff] [blame] | 318 | return 0; |
Toke Høiland-Jørgensen | d7bf2eb | 2020-07-03 22:26:43 +0200 | [diff] [blame] | 319 | } |
stephen hemminger | eccc1bb | 2012-09-25 11:02:48 +0000 | [diff] [blame] | 320 | |
| 321 | return INET_ECN_decapsulate(skb, ipv6_get_dsfield(oipv6h), inner); |
| 322 | } |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 323 | #endif |