blob: 6f51c050d84cc2bf55bf53488b388e86dc768cce [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001// SPDX-License-Identifier: GPL-2.0
Ursula Brauna046d572017-01-09 16:55:16 +01002/*
3 * Shared Memory Communications over RDMA (SMC-R) and RoCE
4 *
5 * CLC (connection layer control) handshake over initial TCP socket to
6 * prepare for RDMA traffic
7 *
Karsten Graul1a26d022018-03-16 15:06:40 +01008 * Copyright IBM Corp. 2016, 2018
Ursula Brauna046d572017-01-09 16:55:16 +01009 *
10 * Author(s): Ursula Braun <ubraun@linux.vnet.ibm.com>
11 */
12
13#include <linux/in.h>
Karsten Graul696cd302018-03-01 13:51:27 +010014#include <linux/inetdevice.h>
Ursula Braun143c0172017-01-12 14:57:15 +010015#include <linux/if_ether.h>
Ingo Molnarc3edc402017-02-02 08:35:14 +010016#include <linux/sched/signal.h>
17
Karsten Graul1a26d022018-03-16 15:06:40 +010018#include <net/addrconf.h>
Ursula Brauna046d572017-01-09 16:55:16 +010019#include <net/sock.h>
20#include <net/tcp.h>
21
22#include "smc.h"
Ursula Braun0cfdd8f2017-01-09 16:55:17 +010023#include "smc_core.h"
Ursula Brauna046d572017-01-09 16:55:16 +010024#include "smc_clc.h"
25#include "smc_ib.h"
Hans Wippelc758dfd2018-06-28 19:05:09 +020026#include "smc_ism.h"
27
28#define SMCR_CLC_ACCEPT_CONFIRM_LEN 68
29#define SMCD_CLC_ACCEPT_CONFIRM_LEN 48
Ursula Brauna7c9c5f2020-09-26 12:44:30 +020030#define SMCD_CLC_ACCEPT_CONFIRM_LEN_V2 78
Ursula Braunfb4f7922020-07-08 17:05:15 +020031#define SMC_CLC_RECV_BUF_LEN 100
Ursula Brauna046d572017-01-09 16:55:16 +010032
Stefan Raspl0f627122018-03-01 13:51:26 +010033/* eye catcher "SMCR" EBCDIC for CLC messages */
34static const char SMC_EYECATCHER[4] = {'\xe2', '\xd4', '\xc3', '\xd9'};
Hans Wippelc758dfd2018-06-28 19:05:09 +020035/* eye catcher "SMCD" EBCDIC for CLC messages */
36static const char SMCD_EYECATCHER[4] = {'\xe2', '\xd4', '\xc3', '\xc4'};
Stefan Raspl0f627122018-03-01 13:51:26 +010037
Ursula Braun8c3dca32020-09-26 12:44:28 +020038/* check arriving CLC proposal */
39static bool smc_clc_msg_prop_valid(struct smc_clc_msg_proposal *pclc)
40{
41 struct smc_clc_msg_proposal_prefix *pclc_prfx;
42 struct smc_clc_smcd_v2_extension *smcd_v2_ext;
43 struct smc_clc_msg_hdr *hdr = &pclc->hdr;
44 struct smc_clc_v2_extension *v2_ext;
45
46 v2_ext = smc_get_clc_v2_ext(pclc);
47 pclc_prfx = smc_clc_proposal_get_prefix(pclc);
48 if (hdr->version == SMC_V1) {
49 if (hdr->typev1 == SMC_TYPE_N)
50 return false;
51 if (ntohs(hdr->length) !=
52 sizeof(*pclc) + ntohs(pclc->iparea_offset) +
53 sizeof(*pclc_prfx) +
54 pclc_prfx->ipv6_prefixes_cnt *
55 sizeof(struct smc_clc_ipv6_prefix) +
56 sizeof(struct smc_clc_msg_trail))
57 return false;
58 } else {
59 if (ntohs(hdr->length) !=
60 sizeof(*pclc) +
61 sizeof(struct smc_clc_msg_smcd) +
62 (hdr->typev1 != SMC_TYPE_N ?
63 sizeof(*pclc_prfx) +
64 pclc_prfx->ipv6_prefixes_cnt *
65 sizeof(struct smc_clc_ipv6_prefix) : 0) +
66 (hdr->typev2 != SMC_TYPE_N ?
67 sizeof(*v2_ext) +
68 v2_ext->hdr.eid_cnt * SMC_MAX_EID_LEN : 0) +
69 (smcd_indicated(hdr->typev2) ?
70 sizeof(*smcd_v2_ext) + v2_ext->hdr.ism_gid_cnt *
71 sizeof(struct smc_clc_smcd_gid_chid) :
72 0) +
73 sizeof(struct smc_clc_msg_trail))
74 return false;
75 }
76 return true;
77}
78
Ursula Brauna7c9c5f2020-09-26 12:44:30 +020079/* check arriving CLC accept or confirm */
80static bool
81smc_clc_msg_acc_conf_valid(struct smc_clc_msg_accept_confirm_v2 *clc_v2)
82{
83 struct smc_clc_msg_hdr *hdr = &clc_v2->hdr;
84
85 if (hdr->typev1 != SMC_TYPE_R && hdr->typev1 != SMC_TYPE_D)
86 return false;
87 if (hdr->version == SMC_V1) {
88 if ((hdr->typev1 == SMC_TYPE_R &&
89 ntohs(hdr->length) != SMCR_CLC_ACCEPT_CONFIRM_LEN) ||
90 (hdr->typev1 == SMC_TYPE_D &&
91 ntohs(hdr->length) != SMCD_CLC_ACCEPT_CONFIRM_LEN))
92 return false;
93 } else {
94 if (hdr->typev1 == SMC_TYPE_D &&
95 ntohs(hdr->length) != SMCD_CLC_ACCEPT_CONFIRM_LEN_V2)
96 return false;
97 }
98 return true;
99}
100
Ursula Braune7b7a642017-12-07 13:38:49 +0100101/* check if received message has a correct header length and contains valid
102 * heading and trailing eyecatchers
103 */
Ursula Braunfb4f7922020-07-08 17:05:15 +0200104static bool smc_clc_msg_hdr_valid(struct smc_clc_msg_hdr *clcm, bool check_trl)
Ursula Braune7b7a642017-12-07 13:38:49 +0100105{
Ursula Brauna7c9c5f2020-09-26 12:44:30 +0200106 struct smc_clc_msg_accept_confirm_v2 *clc_v2;
Ursula Braune7b7a642017-12-07 13:38:49 +0100107 struct smc_clc_msg_proposal *pclc;
108 struct smc_clc_msg_decline *dclc;
109 struct smc_clc_msg_trail *trl;
110
Hans Wippelc758dfd2018-06-28 19:05:09 +0200111 if (memcmp(clcm->eyecatcher, SMC_EYECATCHER, sizeof(SMC_EYECATCHER)) &&
112 memcmp(clcm->eyecatcher, SMCD_EYECATCHER, sizeof(SMCD_EYECATCHER)))
Ursula Braune7b7a642017-12-07 13:38:49 +0100113 return false;
114 switch (clcm->type) {
115 case SMC_CLC_PROPOSAL:
116 pclc = (struct smc_clc_msg_proposal *)clcm;
Ursula Braun8c3dca32020-09-26 12:44:28 +0200117 if (!smc_clc_msg_prop_valid(pclc))
Ursula Braune7b7a642017-12-07 13:38:49 +0100118 return false;
119 trl = (struct smc_clc_msg_trail *)
120 ((u8 *)pclc + ntohs(pclc->hdr.length) - sizeof(*trl));
121 break;
122 case SMC_CLC_ACCEPT:
123 case SMC_CLC_CONFIRM:
Ursula Brauna7c9c5f2020-09-26 12:44:30 +0200124 clc_v2 = (struct smc_clc_msg_accept_confirm_v2 *)clcm;
125 if (!smc_clc_msg_acc_conf_valid(clc_v2))
Hans Wippelc758dfd2018-06-28 19:05:09 +0200126 return false;
127 trl = (struct smc_clc_msg_trail *)
Ursula Brauna7c9c5f2020-09-26 12:44:30 +0200128 ((u8 *)clc_v2 + ntohs(clc_v2->hdr.length) -
129 sizeof(*trl));
Ursula Braune7b7a642017-12-07 13:38:49 +0100130 break;
131 case SMC_CLC_DECLINE:
132 dclc = (struct smc_clc_msg_decline *)clcm;
133 if (ntohs(dclc->hdr.length) != sizeof(*dclc))
134 return false;
135 trl = &dclc->trl;
136 break;
137 default:
138 return false;
139 }
Ursula Braunfb4f7922020-07-08 17:05:15 +0200140 if (check_trl &&
141 memcmp(trl->eyecatcher, SMC_EYECATCHER, sizeof(SMC_EYECATCHER)) &&
Hans Wippelc758dfd2018-06-28 19:05:09 +0200142 memcmp(trl->eyecatcher, SMCD_EYECATCHER, sizeof(SMCD_EYECATCHER)))
Ursula Braune7b7a642017-12-07 13:38:49 +0100143 return false;
144 return true;
145}
146
Karsten Graulc246d942018-03-16 15:06:39 +0100147/* find ipv4 addr on device and get the prefix len, fill CLC proposal msg */
148static int smc_clc_prfx_set4_rcu(struct dst_entry *dst, __be32 ipv4,
149 struct smc_clc_msg_proposal_prefix *prop)
150{
151 struct in_device *in_dev = __in_dev_get_rcu(dst->dev);
Florian Westphalcd5a4112019-05-31 18:27:07 +0200152 const struct in_ifaddr *ifa;
Karsten Graulc246d942018-03-16 15:06:39 +0100153
154 if (!in_dev)
155 return -ENODEV;
Florian Westphalcd5a4112019-05-31 18:27:07 +0200156
157 in_dev_for_each_ifa_rcu(ifa, in_dev) {
Karsten Graulc246d942018-03-16 15:06:39 +0100158 if (!inet_ifa_match(ipv4, ifa))
159 continue;
160 prop->prefix_len = inet_mask_len(ifa->ifa_mask);
161 prop->outgoing_subnet = ifa->ifa_address & ifa->ifa_mask;
162 /* prop->ipv6_prefixes_cnt = 0; already done by memset before */
163 return 0;
Florian Westphalcd5a4112019-05-31 18:27:07 +0200164 }
Karsten Graulc246d942018-03-16 15:06:39 +0100165 return -ENOENT;
166}
167
Karsten Graul1a26d022018-03-16 15:06:40 +0100168/* fill CLC proposal msg with ipv6 prefixes from device */
169static int smc_clc_prfx_set6_rcu(struct dst_entry *dst,
170 struct smc_clc_msg_proposal_prefix *prop,
171 struct smc_clc_ipv6_prefix *ipv6_prfx)
172{
173#if IS_ENABLED(CONFIG_IPV6)
174 struct inet6_dev *in6_dev = __in6_dev_get(dst->dev);
175 struct inet6_ifaddr *ifa;
176 int cnt = 0;
177
178 if (!in6_dev)
179 return -ENODEV;
180 /* use a maximum of 8 IPv6 prefixes from device */
181 list_for_each_entry(ifa, &in6_dev->addr_list, if_list) {
182 if (ipv6_addr_type(&ifa->addr) & IPV6_ADDR_LINKLOCAL)
183 continue;
184 ipv6_addr_prefix(&ipv6_prfx[cnt].prefix,
185 &ifa->addr, ifa->prefix_len);
186 ipv6_prfx[cnt].prefix_len = ifa->prefix_len;
187 cnt++;
188 if (cnt == SMC_CLC_MAX_V6_PREFIX)
189 break;
190 }
191 prop->ipv6_prefixes_cnt = cnt;
192 if (cnt)
193 return 0;
194#endif
195 return -ENOENT;
196}
197
Karsten Graulc246d942018-03-16 15:06:39 +0100198/* retrieve and set prefixes in CLC proposal msg */
199static int smc_clc_prfx_set(struct socket *clcsock,
Karsten Graul1a26d022018-03-16 15:06:40 +0100200 struct smc_clc_msg_proposal_prefix *prop,
201 struct smc_clc_ipv6_prefix *ipv6_prfx)
Karsten Graul696cd302018-03-01 13:51:27 +0100202{
203 struct dst_entry *dst = sk_dst_get(clcsock->sk);
Karsten Graulc246d942018-03-16 15:06:39 +0100204 struct sockaddr_storage addrs;
Karsten Graul1a26d022018-03-16 15:06:40 +0100205 struct sockaddr_in6 *addr6;
Karsten Graulc246d942018-03-16 15:06:39 +0100206 struct sockaddr_in *addr;
207 int rc = -ENOENT;
208
Karsten Graulc246d942018-03-16 15:06:39 +0100209 if (!dst) {
210 rc = -ENOTCONN;
211 goto out;
212 }
213 if (!dst->dev) {
214 rc = -ENODEV;
215 goto out_rel;
216 }
217 /* get address to which the internal TCP socket is bound */
218 kernel_getsockname(clcsock, (struct sockaddr *)&addrs);
219 /* analyze IP specific data of net_device belonging to TCP socket */
Karsten Graul1a26d022018-03-16 15:06:40 +0100220 addr6 = (struct sockaddr_in6 *)&addrs;
Karsten Graulc246d942018-03-16 15:06:39 +0100221 rcu_read_lock();
222 if (addrs.ss_family == PF_INET) {
223 /* IPv4 */
224 addr = (struct sockaddr_in *)&addrs;
225 rc = smc_clc_prfx_set4_rcu(dst, addr->sin_addr.s_addr, prop);
Karsten Graul1a26d022018-03-16 15:06:40 +0100226 } else if (ipv6_addr_v4mapped(&addr6->sin6_addr)) {
227 /* mapped IPv4 address - peer is IPv4 only */
228 rc = smc_clc_prfx_set4_rcu(dst, addr6->sin6_addr.s6_addr32[3],
229 prop);
230 } else {
231 /* IPv6 */
232 rc = smc_clc_prfx_set6_rcu(dst, prop, ipv6_prfx);
Karsten Graulc246d942018-03-16 15:06:39 +0100233 }
234 rcu_read_unlock();
235out_rel:
236 dst_release(dst);
237out:
238 return rc;
239}
240
241/* match ipv4 addrs of dev against addr in CLC proposal */
242static int smc_clc_prfx_match4_rcu(struct net_device *dev,
243 struct smc_clc_msg_proposal_prefix *prop)
244{
245 struct in_device *in_dev = __in_dev_get_rcu(dev);
Florian Westphalcd5a4112019-05-31 18:27:07 +0200246 const struct in_ifaddr *ifa;
Karsten Graulc246d942018-03-16 15:06:39 +0100247
248 if (!in_dev)
249 return -ENODEV;
Florian Westphalcd5a4112019-05-31 18:27:07 +0200250 in_dev_for_each_ifa_rcu(ifa, in_dev) {
Karsten Graulc246d942018-03-16 15:06:39 +0100251 if (prop->prefix_len == inet_mask_len(ifa->ifa_mask) &&
252 inet_ifa_match(prop->outgoing_subnet, ifa))
253 return 0;
Florian Westphalcd5a4112019-05-31 18:27:07 +0200254 }
Karsten Graulc246d942018-03-16 15:06:39 +0100255
256 return -ENOENT;
257}
258
Karsten Graul1a26d022018-03-16 15:06:40 +0100259/* match ipv6 addrs of dev against addrs in CLC proposal */
260static int smc_clc_prfx_match6_rcu(struct net_device *dev,
261 struct smc_clc_msg_proposal_prefix *prop)
262{
263#if IS_ENABLED(CONFIG_IPV6)
264 struct inet6_dev *in6_dev = __in6_dev_get(dev);
265 struct smc_clc_ipv6_prefix *ipv6_prfx;
266 struct inet6_ifaddr *ifa;
267 int i, max;
268
269 if (!in6_dev)
270 return -ENODEV;
271 /* ipv6 prefix list starts behind smc_clc_msg_proposal_prefix */
272 ipv6_prfx = (struct smc_clc_ipv6_prefix *)((u8 *)prop + sizeof(*prop));
273 max = min_t(u8, prop->ipv6_prefixes_cnt, SMC_CLC_MAX_V6_PREFIX);
274 list_for_each_entry(ifa, &in6_dev->addr_list, if_list) {
275 if (ipv6_addr_type(&ifa->addr) & IPV6_ADDR_LINKLOCAL)
276 continue;
277 for (i = 0; i < max; i++) {
278 if (ifa->prefix_len == ipv6_prfx[i].prefix_len &&
279 ipv6_prefix_equal(&ifa->addr, &ipv6_prfx[i].prefix,
280 ifa->prefix_len))
281 return 0;
282 }
283 }
284#endif
285 return -ENOENT;
286}
287
Karsten Graulc246d942018-03-16 15:06:39 +0100288/* check if proposed prefixes match one of our device prefixes */
289int smc_clc_prfx_match(struct socket *clcsock,
290 struct smc_clc_msg_proposal_prefix *prop)
291{
292 struct dst_entry *dst = sk_dst_get(clcsock->sk);
Karsten Graul1a26d022018-03-16 15:06:40 +0100293 int rc;
Karsten Graul696cd302018-03-01 13:51:27 +0100294
295 if (!dst) {
296 rc = -ENOTCONN;
297 goto out;
298 }
299 if (!dst->dev) {
300 rc = -ENODEV;
301 goto out_rel;
302 }
Karsten Graul696cd302018-03-01 13:51:27 +0100303 rcu_read_lock();
Karsten Graulc246d942018-03-16 15:06:39 +0100304 if (!prop->ipv6_prefixes_cnt)
305 rc = smc_clc_prfx_match4_rcu(dst->dev, prop);
Karsten Graul1a26d022018-03-16 15:06:40 +0100306 else
307 rc = smc_clc_prfx_match6_rcu(dst->dev, prop);
Karsten Graul696cd302018-03-01 13:51:27 +0100308 rcu_read_unlock();
Karsten Graul696cd302018-03-01 13:51:27 +0100309out_rel:
310 dst_release(dst);
311out:
312 return rc;
313}
314
Ursula Brauna046d572017-01-09 16:55:16 +0100315/* Wait for data on the tcp-socket, analyze received data
316 * Returns:
317 * 0 if success and it was not a decline that we received.
318 * SMC_CLC_DECL_REPLY if decline received for fallback w/o another decl send.
319 * clcsock error, -EINTR, -ECONNRESET, -EPROTO otherwise.
320 */
321int smc_clc_wait_msg(struct smc_sock *smc, void *buf, int buflen,
Ursula Braun2b59f582018-11-22 10:26:39 +0100322 u8 expected_type, unsigned long timeout)
Ursula Brauna046d572017-01-09 16:55:16 +0100323{
Karsten Graulf6bdc422018-07-18 15:22:51 +0200324 long rcvtimeo = smc->clcsock->sk->sk_rcvtimeo;
Ursula Brauna046d572017-01-09 16:55:16 +0100325 struct sock *clc_sk = smc->clcsock->sk;
326 struct smc_clc_msg_hdr *clcm = buf;
327 struct msghdr msg = {NULL, 0};
328 int reason_code = 0;
Al Virod63d2712017-09-20 20:21:22 -0400329 struct kvec vec = {buf, buflen};
Ursula Braunfb4f7922020-07-08 17:05:15 +0200330 int len, datlen, recvlen;
331 bool check_trl = true;
Ursula Brauna046d572017-01-09 16:55:16 +0100332 int krflags;
333
334 /* peek the first few bytes to determine length of data to receive
335 * so we don't consume any subsequent CLC message or payload data
336 * in the TCP byte stream
337 */
Al Virod63d2712017-09-20 20:21:22 -0400338 /*
339 * Caller must make sure that buflen is no less than
340 * sizeof(struct smc_clc_msg_hdr)
341 */
Ursula Brauna046d572017-01-09 16:55:16 +0100342 krflags = MSG_PEEK | MSG_WAITALL;
Ursula Braun2b59f582018-11-22 10:26:39 +0100343 clc_sk->sk_rcvtimeo = timeout;
David Howellsaa563d72018-10-20 00:57:56 +0100344 iov_iter_kvec(&msg.msg_iter, READ, &vec, 1,
Al Virod63d2712017-09-20 20:21:22 -0400345 sizeof(struct smc_clc_msg_hdr));
346 len = sock_recvmsg(smc->clcsock, &msg, krflags);
Ursula Brauna046d572017-01-09 16:55:16 +0100347 if (signal_pending(current)) {
348 reason_code = -EINTR;
349 clc_sk->sk_err = EINTR;
350 smc->sk.sk_err = EINTR;
351 goto out;
352 }
353 if (clc_sk->sk_err) {
354 reason_code = -clc_sk->sk_err;
Ursula Braun9ed28552018-11-22 10:26:37 +0100355 if (clc_sk->sk_err == EAGAIN &&
356 expected_type == SMC_CLC_DECLINE)
357 clc_sk->sk_err = 0; /* reset for fallback usage */
358 else
359 smc->sk.sk_err = clc_sk->sk_err;
Ursula Brauna046d572017-01-09 16:55:16 +0100360 goto out;
361 }
362 if (!len) { /* peer has performed orderly shutdown */
363 smc->sk.sk_err = ECONNRESET;
364 reason_code = -ECONNRESET;
365 goto out;
366 }
367 if (len < 0) {
Ursula Braun9ed28552018-11-22 10:26:37 +0100368 if (len != -EAGAIN || expected_type != SMC_CLC_DECLINE)
369 smc->sk.sk_err = -len;
Ursula Brauna046d572017-01-09 16:55:16 +0100370 reason_code = len;
371 goto out;
372 }
373 datlen = ntohs(clcm->length);
374 if ((len < sizeof(struct smc_clc_msg_hdr)) ||
Ursula Braun5ac54d82020-09-10 18:48:21 +0200375 (clcm->version < SMC_V1) ||
Ursula Brauna046d572017-01-09 16:55:16 +0100376 ((clcm->type != SMC_CLC_DECLINE) &&
377 (clcm->type != expected_type))) {
378 smc->sk.sk_err = EPROTO;
379 reason_code = -EPROTO;
380 goto out;
381 }
382
383 /* receive the complete CLC message */
Ursula Brauna046d572017-01-09 16:55:16 +0100384 memset(&msg, 0, sizeof(struct msghdr));
Ursula Braunfb4f7922020-07-08 17:05:15 +0200385 if (datlen > buflen) {
386 check_trl = false;
387 recvlen = buflen;
388 } else {
389 recvlen = datlen;
390 }
391 iov_iter_kvec(&msg.msg_iter, READ, &vec, 1, recvlen);
Ursula Brauna046d572017-01-09 16:55:16 +0100392 krflags = MSG_WAITALL;
Al Virod63d2712017-09-20 20:21:22 -0400393 len = sock_recvmsg(smc->clcsock, &msg, krflags);
Ursula Braunfb4f7922020-07-08 17:05:15 +0200394 if (len < recvlen || !smc_clc_msg_hdr_valid(clcm, check_trl)) {
Ursula Brauna046d572017-01-09 16:55:16 +0100395 smc->sk.sk_err = EPROTO;
396 reason_code = -EPROTO;
397 goto out;
398 }
Ursula Braunfb4f7922020-07-08 17:05:15 +0200399 datlen -= len;
400 while (datlen) {
401 u8 tmp[SMC_CLC_RECV_BUF_LEN];
402
403 vec.iov_base = &tmp;
404 vec.iov_len = SMC_CLC_RECV_BUF_LEN;
405 /* receive remaining proposal message */
406 recvlen = datlen > SMC_CLC_RECV_BUF_LEN ?
407 SMC_CLC_RECV_BUF_LEN : datlen;
408 iov_iter_kvec(&msg.msg_iter, READ, &vec, 1, recvlen);
409 len = sock_recvmsg(smc->clcsock, &msg, krflags);
410 datlen -= len;
411 }
Ursula Braun0cfdd8f2017-01-09 16:55:17 +0100412 if (clcm->type == SMC_CLC_DECLINE) {
Karsten Graul603cc142018-07-25 16:35:32 +0200413 struct smc_clc_msg_decline *dclc;
414
415 dclc = (struct smc_clc_msg_decline *)clcm;
416 reason_code = SMC_CLC_DECL_PEERDECL;
417 smc->peer_diagnosis = ntohl(dclc->peer_diagnosis);
Ursula Braunf1eb02f2020-09-26 12:44:20 +0200418 if (((struct smc_clc_msg_decline *)buf)->hdr.typev2 &
419 SMC_FIRST_CONTACT_MASK) {
Karsten Graul517c3002018-05-15 17:05:03 +0200420 smc->conn.lgr->sync_err = 1;
Karsten Graul5f78fe92020-02-17 16:24:54 +0100421 smc_lgr_terminate_sched(smc->conn.lgr);
Ursula Braunbfbedfd2017-09-21 09:16:32 +0200422 }
Ursula Braun0cfdd8f2017-01-09 16:55:17 +0100423 }
424
Ursula Brauna046d572017-01-09 16:55:16 +0100425out:
Ursula Braun2b59f582018-11-22 10:26:39 +0100426 clc_sk->sk_rcvtimeo = rcvtimeo;
Ursula Brauna046d572017-01-09 16:55:16 +0100427 return reason_code;
428}
429
430/* send CLC DECLINE message across internal TCP socket */
Ursula Braunbfbedfd2017-09-21 09:16:32 +0200431int smc_clc_send_decline(struct smc_sock *smc, u32 peer_diag_info)
Ursula Brauna046d572017-01-09 16:55:16 +0100432{
433 struct smc_clc_msg_decline dclc;
434 struct msghdr msg;
435 struct kvec vec;
436 int len;
437
438 memset(&dclc, 0, sizeof(dclc));
439 memcpy(dclc.hdr.eyecatcher, SMC_EYECATCHER, sizeof(SMC_EYECATCHER));
440 dclc.hdr.type = SMC_CLC_DECLINE;
441 dclc.hdr.length = htons(sizeof(struct smc_clc_msg_decline));
Ursula Braun5ac54d82020-09-10 18:48:21 +0200442 dclc.hdr.version = SMC_V1;
Ursula Braunf1eb02f2020-09-26 12:44:20 +0200443 dclc.hdr.typev2 = (peer_diag_info == SMC_CLC_DECL_SYNCERR) ?
444 SMC_FIRST_CONTACT_MASK : 0;
Hans Wippela082ec82020-02-25 22:41:22 +0100445 if ((!smc->conn.lgr || !smc->conn.lgr->is_smcd) &&
446 smc_ib_is_valid_local_systemid())
Ursula Braun369537c2020-02-14 08:59:00 +0100447 memcpy(dclc.id_for_peer, local_systemid,
448 sizeof(local_systemid));
Ursula Brauna046d572017-01-09 16:55:16 +0100449 dclc.peer_diagnosis = htonl(peer_diag_info);
450 memcpy(dclc.trl.eyecatcher, SMC_EYECATCHER, sizeof(SMC_EYECATCHER));
451
452 memset(&msg, 0, sizeof(msg));
453 vec.iov_base = &dclc;
454 vec.iov_len = sizeof(struct smc_clc_msg_decline);
455 len = kernel_sendmsg(smc->clcsock, &msg, &vec, 1,
456 sizeof(struct smc_clc_msg_decline));
Ursula Braun14d22d42019-01-30 18:51:00 +0100457 if (len < 0 || len < sizeof(struct smc_clc_msg_decline))
Ursula Braun6ae36bf2018-11-22 10:26:36 +0100458 len = -EPROTO;
459 return len > 0 ? 0 : len;
Ursula Brauna046d572017-01-09 16:55:16 +0100460}
461
462/* send CLC PROPOSAL message across internal TCP socket */
Ursula Braund70bf4f2020-09-26 12:44:27 +0200463int smc_clc_send_proposal(struct smc_sock *smc, struct smc_init_info *ini)
Ursula Brauna046d572017-01-09 16:55:16 +0100464{
Ursula Braun8c3dca32020-09-26 12:44:28 +0200465 struct smc_clc_smcd_v2_extension *smcd_v2_ext;
Ursula Braun6bb14e482020-09-10 18:48:22 +0200466 struct smc_clc_msg_proposal_prefix *pclc_prfx;
467 struct smc_clc_msg_proposal *pclc_base;
Ursula Braun8c3dca32020-09-26 12:44:28 +0200468 struct smc_clc_smcd_gid_chid *gidchids;
Ursula Braun6bb14e482020-09-10 18:48:22 +0200469 struct smc_clc_msg_proposal_area *pclc;
470 struct smc_clc_ipv6_prefix *ipv6_prfx;
Ursula Braun8c3dca32020-09-26 12:44:28 +0200471 struct smc_clc_v2_extension *v2_ext;
Ursula Braun6bb14e482020-09-10 18:48:22 +0200472 struct smc_clc_msg_smcd *pclc_smcd;
473 struct smc_clc_msg_trail *trl;
Karsten Graul1a26d022018-03-16 15:06:40 +0100474 int len, i, plen, rc;
Ursula Brauna046d572017-01-09 16:55:16 +0100475 int reason_code = 0;
Ursula Braun8c3dca32020-09-26 12:44:28 +0200476 struct kvec vec[8];
Ursula Brauna046d572017-01-09 16:55:16 +0100477 struct msghdr msg;
Ursula Brauna046d572017-01-09 16:55:16 +0100478
Ursula Braun6bb14e482020-09-10 18:48:22 +0200479 pclc = kzalloc(sizeof(*pclc), GFP_KERNEL);
480 if (!pclc)
481 return -ENOMEM;
482
483 pclc_base = &pclc->pclc_base;
484 pclc_smcd = &pclc->pclc_smcd;
485 pclc_prfx = &pclc->pclc_prfx;
486 ipv6_prfx = pclc->pclc_prfx_ipv6;
Ursula Braun8c3dca32020-09-26 12:44:28 +0200487 v2_ext = &pclc->pclc_v2_ext;
488 smcd_v2_ext = &pclc->pclc_smcd_v2_ext;
489 gidchids = pclc->pclc_gidchids;
Ursula Braun6bb14e482020-09-10 18:48:22 +0200490 trl = &pclc->pclc_trl;
491
Ursula Braun8c3dca32020-09-26 12:44:28 +0200492 pclc_base->hdr.version = SMC_V2;
493 pclc_base->hdr.typev1 = ini->smc_type_v1;
494 pclc_base->hdr.typev2 = ini->smc_type_v2;
495 plen = sizeof(*pclc_base) + sizeof(*pclc_smcd) + sizeof(*trl);
496
Karsten Graulc246d942018-03-16 15:06:39 +0100497 /* retrieve ip prefixes for CLC proposal msg */
Ursula Braun8c3dca32020-09-26 12:44:28 +0200498 if (ini->smc_type_v1 != SMC_TYPE_N) {
499 rc = smc_clc_prfx_set(smc->clcsock, pclc_prfx, ipv6_prfx);
500 if (rc) {
501 if (ini->smc_type_v2 == SMC_TYPE_N) {
502 kfree(pclc);
503 return SMC_CLC_DECL_CNFERR;
504 }
505 pclc_base->hdr.typev1 = SMC_TYPE_N;
506 } else {
507 pclc_base->iparea_offset = htons(sizeof(*pclc_smcd));
508 plen += sizeof(*pclc_prfx) +
509 pclc_prfx->ipv6_prefixes_cnt *
510 sizeof(ipv6_prfx[0]);
511 }
Ursula Braun6bb14e482020-09-10 18:48:22 +0200512 }
Karsten Graulc246d942018-03-16 15:06:39 +0100513
Ursula Braun8c3dca32020-09-26 12:44:28 +0200514 /* build SMC Proposal CLC message */
Ursula Braun6bb14e482020-09-10 18:48:22 +0200515 memcpy(pclc_base->hdr.eyecatcher, SMC_EYECATCHER,
516 sizeof(SMC_EYECATCHER));
517 pclc_base->hdr.type = SMC_CLC_PROPOSAL;
Ursula Braund70bf4f2020-09-26 12:44:27 +0200518 if (smcr_indicated(ini->smc_type_v1)) {
Hans Wippelc758dfd2018-06-28 19:05:09 +0200519 /* add SMC-R specifics */
Ursula Braun6bb14e482020-09-10 18:48:22 +0200520 memcpy(pclc_base->lcl.id_for_peer, local_systemid,
Hans Wippelc758dfd2018-06-28 19:05:09 +0200521 sizeof(local_systemid));
Ursula Braun6bb14e482020-09-10 18:48:22 +0200522 memcpy(pclc_base->lcl.gid, ini->ib_gid, SMC_GID_SIZE);
523 memcpy(pclc_base->lcl.mac, &ini->ib_dev->mac[ini->ib_port - 1],
Karsten Graulbc36d2f2019-04-12 12:57:26 +0200524 ETH_ALEN);
Hans Wippelc758dfd2018-06-28 19:05:09 +0200525 }
Ursula Braund70bf4f2020-09-26 12:44:27 +0200526 if (smcd_indicated(ini->smc_type_v1)) {
Hans Wippelc758dfd2018-06-28 19:05:09 +0200527 /* add SMC-D specifics */
Ursula Braun8c3dca32020-09-26 12:44:28 +0200528 if (ini->ism_dev[0]) {
529 pclc_smcd->ism.gid = htonll(ini->ism_dev[0]->local_gid);
530 pclc_smcd->ism.chid =
531 htons(smc_ism_get_chid(ini->ism_dev[0]));
532 }
533 }
534 if (ini->smc_type_v2 == SMC_TYPE_N) {
535 pclc_smcd->v2_ext_offset = 0;
536 } else {
537 u16 v2_ext_offset;
538 u8 *eid = NULL;
539
540 v2_ext_offset = sizeof(*pclc_smcd) -
541 offsetofend(struct smc_clc_msg_smcd, v2_ext_offset);
542 if (ini->smc_type_v1 != SMC_TYPE_N)
543 v2_ext_offset += sizeof(*pclc_prfx) +
544 pclc_prfx->ipv6_prefixes_cnt *
545 sizeof(ipv6_prfx[0]);
546 pclc_smcd->v2_ext_offset = htons(v2_ext_offset);
547 v2_ext->hdr.eid_cnt = 0;
548 v2_ext->hdr.ism_gid_cnt = ini->ism_offered_cnt;
549 v2_ext->hdr.flag.release = SMC_RELEASE;
550 v2_ext->hdr.flag.seid = 1;
551 v2_ext->hdr.smcd_v2_ext_offset = htons(sizeof(*v2_ext) -
552 offsetofend(struct smc_clnt_opts_area_hdr,
553 smcd_v2_ext_offset) +
554 v2_ext->hdr.eid_cnt * SMC_MAX_EID_LEN);
555 if (ini->ism_dev[0])
556 smc_ism_get_system_eid(ini->ism_dev[0], &eid);
557 else
558 smc_ism_get_system_eid(ini->ism_dev[1], &eid);
559 if (eid)
560 memcpy(smcd_v2_ext->system_eid, eid, SMC_MAX_EID_LEN);
561 plen += sizeof(*v2_ext) + sizeof(*smcd_v2_ext);
562 if (ini->ism_offered_cnt) {
563 for (i = 1; i <= ini->ism_offered_cnt; i++) {
564 gidchids[i - 1].gid =
565 htonll(ini->ism_dev[i]->local_gid);
566 gidchids[i - 1].chid =
567 htons(smc_ism_get_chid(ini->ism_dev[i]));
568 }
569 plen += ini->ism_offered_cnt *
570 sizeof(struct smc_clc_smcd_gid_chid);
571 }
Hans Wippelc758dfd2018-06-28 19:05:09 +0200572 }
Ursula Braun6bb14e482020-09-10 18:48:22 +0200573 pclc_base->hdr.length = htons(plen);
Ursula Braun6bb14e482020-09-10 18:48:22 +0200574 memcpy(trl->eyecatcher, SMC_EYECATCHER, sizeof(SMC_EYECATCHER));
Ursula Braun8c3dca32020-09-26 12:44:28 +0200575
576 /* send SMC Proposal CLC message */
Ursula Brauna046d572017-01-09 16:55:16 +0100577 memset(&msg, 0, sizeof(msg));
Karsten Graul1a26d022018-03-16 15:06:40 +0100578 i = 0;
Ursula Braun6bb14e482020-09-10 18:48:22 +0200579 vec[i].iov_base = pclc_base;
580 vec[i++].iov_len = sizeof(*pclc_base);
Ursula Braun8c3dca32020-09-26 12:44:28 +0200581 vec[i].iov_base = pclc_smcd;
582 vec[i++].iov_len = sizeof(*pclc_smcd);
583 if (ini->smc_type_v1 != SMC_TYPE_N) {
584 vec[i].iov_base = pclc_prfx;
585 vec[i++].iov_len = sizeof(*pclc_prfx);
586 if (pclc_prfx->ipv6_prefixes_cnt > 0) {
587 vec[i].iov_base = ipv6_prfx;
588 vec[i++].iov_len = pclc_prfx->ipv6_prefixes_cnt *
589 sizeof(ipv6_prfx[0]);
590 }
Hans Wippelc758dfd2018-06-28 19:05:09 +0200591 }
Ursula Braun8c3dca32020-09-26 12:44:28 +0200592 if (ini->smc_type_v2 != SMC_TYPE_N) {
593 vec[i].iov_base = v2_ext;
594 vec[i++].iov_len = sizeof(*v2_ext);
595 vec[i].iov_base = smcd_v2_ext;
596 vec[i++].iov_len = sizeof(*smcd_v2_ext);
597 if (ini->ism_offered_cnt) {
598 vec[i].iov_base = gidchids;
599 vec[i++].iov_len = ini->ism_offered_cnt *
600 sizeof(struct smc_clc_smcd_gid_chid);
601 }
Karsten Graul1a26d022018-03-16 15:06:40 +0100602 }
Ursula Braun6bb14e482020-09-10 18:48:22 +0200603 vec[i].iov_base = trl;
604 vec[i++].iov_len = sizeof(*trl);
Ursula Brauna046d572017-01-09 16:55:16 +0100605 /* due to the few bytes needed for clc-handshake this cannot block */
Karsten Graul1a26d022018-03-16 15:06:40 +0100606 len = kernel_sendmsg(smc->clcsock, &msg, vec, i, plen);
YueHaibing38189772018-09-18 15:46:38 +0200607 if (len < 0) {
608 smc->sk.sk_err = smc->clcsock->sk->sk_err;
609 reason_code = -smc->sk.sk_err;
Ursula Braun6bb14e482020-09-10 18:48:22 +0200610 } else if (len < ntohs(pclc_base->hdr.length)) {
YueHaibing38189772018-09-18 15:46:38 +0200611 reason_code = -ENETUNREACH;
612 smc->sk.sk_err = -reason_code;
Ursula Brauna046d572017-01-09 16:55:16 +0100613 }
614
Ursula Braun6bb14e482020-09-10 18:48:22 +0200615 kfree(pclc);
Ursula Brauna046d572017-01-09 16:55:16 +0100616 return reason_code;
617}
618
Ursula Braun3d9725a2020-09-10 18:48:23 +0200619/* build and send CLC CONFIRM / ACCEPT message */
620static int smc_clc_send_confirm_accept(struct smc_sock *smc,
Ursula Brauna7c9c5f2020-09-26 12:44:30 +0200621 struct smc_clc_msg_accept_confirm_v2 *clc_v2,
622 int first_contact, u8 version)
Ursula Braun3d9725a2020-09-10 18:48:23 +0200623{
624 struct smc_connection *conn = &smc->conn;
Ursula Brauna7c9c5f2020-09-26 12:44:30 +0200625 struct smc_clc_msg_accept_confirm *clc;
Ursula Braune15c6c42020-09-26 12:44:22 +0200626 struct smc_clc_msg_trail trl;
627 struct kvec vec[2];
Ursula Braun3d9725a2020-09-10 18:48:23 +0200628 struct msghdr msg;
Ursula Braune15c6c42020-09-26 12:44:22 +0200629 int i;
Ursula Braun3d9725a2020-09-10 18:48:23 +0200630
631 /* send SMC Confirm CLC msg */
Ursula Brauna7c9c5f2020-09-26 12:44:30 +0200632 clc = (struct smc_clc_msg_accept_confirm *)clc_v2;
633 clc->hdr.version = version; /* SMC version */
Ursula Braun3d9725a2020-09-10 18:48:23 +0200634 if (first_contact)
Ursula Braunf1eb02f2020-09-26 12:44:20 +0200635 clc->hdr.typev2 |= SMC_FIRST_CONTACT_MASK;
Ursula Braun3d9725a2020-09-10 18:48:23 +0200636 if (conn->lgr->is_smcd) {
637 /* SMC-D specific settings */
638 memcpy(clc->hdr.eyecatcher, SMCD_EYECATCHER,
639 sizeof(SMCD_EYECATCHER));
Ursula Braunf1eb02f2020-09-26 12:44:20 +0200640 clc->hdr.typev1 = SMC_TYPE_D;
Ursula Braun3d9725a2020-09-10 18:48:23 +0200641 clc->d0.gid = conn->lgr->smcd->local_gid;
642 clc->d0.token = conn->rmb_desc->token;
643 clc->d0.dmbe_size = conn->rmbe_size_short;
644 clc->d0.dmbe_idx = 0;
645 memcpy(&clc->d0.linkid, conn->lgr->id, SMC_LGR_ID_SIZE);
Ursula Brauna7c9c5f2020-09-26 12:44:30 +0200646 if (version == SMC_V1) {
647 clc->hdr.length = htons(SMCD_CLC_ACCEPT_CONFIRM_LEN);
648 } else {
649 u8 *eid = NULL;
650
651 clc_v2->chid = htons(smc_ism_get_chid(conn->lgr->smcd));
652 smc_ism_get_system_eid(conn->lgr->smcd, &eid);
653 if (eid)
654 memcpy(clc_v2->eid, eid, SMC_MAX_EID_LEN);
655 clc_v2->hdr.length =
656 htons(SMCD_CLC_ACCEPT_CONFIRM_LEN_V2);
657 }
Ursula Braune15c6c42020-09-26 12:44:22 +0200658 memcpy(trl.eyecatcher, SMCD_EYECATCHER,
Ursula Braun3d9725a2020-09-10 18:48:23 +0200659 sizeof(SMCD_EYECATCHER));
660 } else {
661 struct smc_link *link = conn->lnk;
662
663 /* SMC-R specific settings */
664 link = conn->lnk;
665 memcpy(clc->hdr.eyecatcher, SMC_EYECATCHER,
666 sizeof(SMC_EYECATCHER));
Ursula Braunf1eb02f2020-09-26 12:44:20 +0200667 clc->hdr.typev1 = SMC_TYPE_R;
Ursula Braun3d9725a2020-09-10 18:48:23 +0200668 clc->hdr.length = htons(SMCR_CLC_ACCEPT_CONFIRM_LEN);
669 memcpy(clc->r0.lcl.id_for_peer, local_systemid,
670 sizeof(local_systemid));
671 memcpy(&clc->r0.lcl.gid, link->gid, SMC_GID_SIZE);
672 memcpy(&clc->r0.lcl.mac, &link->smcibdev->mac[link->ibport - 1],
673 ETH_ALEN);
674 hton24(clc->r0.qpn, link->roce_qp->qp_num);
675 clc->r0.rmb_rkey =
676 htonl(conn->rmb_desc->mr_rx[link->link_idx]->rkey);
677 clc->r0.rmbe_idx = 1; /* for now: 1 RMB = 1 RMBE */
678 clc->r0.rmbe_alert_token = htonl(conn->alert_token_local);
679 switch (clc->hdr.type) {
680 case SMC_CLC_ACCEPT:
681 clc->r0.qp_mtu = link->path_mtu;
682 break;
683 case SMC_CLC_CONFIRM:
684 clc->r0.qp_mtu = min(link->path_mtu, link->peer_mtu);
685 break;
686 }
687 clc->r0.rmbe_size = conn->rmbe_size_short;
688 clc->r0.rmb_dma_addr = cpu_to_be64((u64)sg_dma_address
689 (conn->rmb_desc->sgt[link->link_idx].sgl));
690 hton24(clc->r0.psn, link->psn_initial);
Ursula Braune15c6c42020-09-26 12:44:22 +0200691 memcpy(trl.eyecatcher, SMC_EYECATCHER, sizeof(SMC_EYECATCHER));
Ursula Braun3d9725a2020-09-10 18:48:23 +0200692 }
693
694 memset(&msg, 0, sizeof(msg));
Ursula Braune15c6c42020-09-26 12:44:22 +0200695 i = 0;
Ursula Brauna7c9c5f2020-09-26 12:44:30 +0200696 vec[i].iov_base = clc_v2;
697 if (version > SMC_V1)
698 vec[i++].iov_len = SMCD_CLC_ACCEPT_CONFIRM_LEN_V2 - sizeof(trl);
699 else
700 vec[i++].iov_len = (clc->hdr.typev1 == SMC_TYPE_D ?
701 SMCD_CLC_ACCEPT_CONFIRM_LEN :
702 SMCR_CLC_ACCEPT_CONFIRM_LEN) -
703 sizeof(trl);
Ursula Braune15c6c42020-09-26 12:44:22 +0200704 vec[i].iov_base = &trl;
705 vec[i++].iov_len = sizeof(trl);
706 return kernel_sendmsg(smc->clcsock, &msg, vec, 1,
Ursula Braun3d9725a2020-09-10 18:48:23 +0200707 ntohs(clc->hdr.length));
708}
709
Ursula Brauna046d572017-01-09 16:55:16 +0100710/* send CLC CONFIRM message across internal TCP socket */
Ursula Brauna7c9c5f2020-09-26 12:44:30 +0200711int smc_clc_send_confirm(struct smc_sock *smc, bool clnt_first_contact,
712 u8 version)
Ursula Brauna046d572017-01-09 16:55:16 +0100713{
Ursula Brauna7c9c5f2020-09-26 12:44:30 +0200714 struct smc_clc_msg_accept_confirm_v2 cclc_v2;
Ursula Brauna046d572017-01-09 16:55:16 +0100715 int reason_code = 0;
Ursula Brauna046d572017-01-09 16:55:16 +0100716 int len;
717
718 /* send SMC Confirm CLC msg */
Ursula Brauna7c9c5f2020-09-26 12:44:30 +0200719 memset(&cclc_v2, 0, sizeof(cclc_v2));
720 cclc_v2.hdr.type = SMC_CLC_CONFIRM;
721 len = smc_clc_send_confirm_accept(smc, &cclc_v2, clnt_first_contact,
722 version);
723 if (len < ntohs(cclc_v2.hdr.length)) {
Ursula Brauna046d572017-01-09 16:55:16 +0100724 if (len >= 0) {
725 reason_code = -ENETUNREACH;
726 smc->sk.sk_err = -reason_code;
727 } else {
728 smc->sk.sk_err = smc->clcsock->sk->sk_err;
729 reason_code = -smc->sk.sk_err;
730 }
731 }
732 return reason_code;
733}
734
735/* send CLC ACCEPT message across internal TCP socket */
Ursula Brauna7c9c5f2020-09-26 12:44:30 +0200736int smc_clc_send_accept(struct smc_sock *new_smc, bool srv_first_contact,
737 u8 version)
Ursula Brauna046d572017-01-09 16:55:16 +0100738{
Ursula Brauna7c9c5f2020-09-26 12:44:30 +0200739 struct smc_clc_msg_accept_confirm_v2 aclc_v2;
Ursula Brauna046d572017-01-09 16:55:16 +0100740 int len;
741
Ursula Brauna7c9c5f2020-09-26 12:44:30 +0200742 memset(&aclc_v2, 0, sizeof(aclc_v2));
743 aclc_v2.hdr.type = SMC_CLC_ACCEPT;
744 len = smc_clc_send_confirm_accept(new_smc, &aclc_v2, srv_first_contact,
745 version);
746 if (len < ntohs(aclc_v2.hdr.length))
Ursula Braun6ae36bf2018-11-22 10:26:36 +0100747 len = len >= 0 ? -EPROTO : -new_smc->clcsock->sk->sk_err;
Ursula Brauna046d572017-01-09 16:55:16 +0100748
Ursula Braun6ae36bf2018-11-22 10:26:36 +0100749 return len > 0 ? 0 : len;
Ursula Brauna046d572017-01-09 16:55:16 +0100750}