Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 1 | /* |
| 2 | BlueZ - Bluetooth protocol stack for Linux |
| 3 | Copyright (C) 2014 Intel Corporation |
| 4 | |
| 5 | This program is free software; you can redistribute it and/or modify |
| 6 | it under the terms of the GNU General Public License version 2 as |
| 7 | published by the Free Software Foundation; |
| 8 | |
| 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| 10 | OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 11 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. |
| 12 | IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) AND AUTHOR(S) BE LIABLE FOR ANY |
| 13 | CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES |
| 14 | WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN |
| 15 | ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF |
| 16 | OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. |
| 17 | |
| 18 | ALL LIABILITY, INCLUDING LIABILITY FOR INFRINGEMENT OF ANY PATENTS, |
| 19 | COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS, RELATING TO USE OF THIS |
| 20 | SOFTWARE IS DISCLAIMED. |
| 21 | */ |
| 22 | |
Michał Narajowski | 1b42206 | 2016-10-05 12:28:27 +0200 | [diff] [blame] | 23 | #include <asm/unaligned.h> |
| 24 | |
Johan Hedberg | b504430 | 2015-11-10 09:44:55 +0200 | [diff] [blame] | 25 | #define hci_req_sync_lock(hdev) mutex_lock(&hdev->req_lock) |
| 26 | #define hci_req_sync_unlock(hdev) mutex_unlock(&hdev->req_lock) |
Johan Hedberg | be91cd0 | 2015-11-10 09:44:54 +0200 | [diff] [blame] | 27 | |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 28 | struct hci_request { |
| 29 | struct hci_dev *hdev; |
| 30 | struct sk_buff_head cmd_q; |
| 31 | |
| 32 | /* If something goes wrong when building the HCI request, the error |
| 33 | * value is stored in this field. |
| 34 | */ |
| 35 | int err; |
| 36 | }; |
| 37 | |
| 38 | void hci_req_init(struct hci_request *req, struct hci_dev *hdev); |
Jaganath Kanakkassery | f17d858 | 2017-10-25 10:58:48 +0530 | [diff] [blame] | 39 | void hci_req_purge(struct hci_request *req); |
João Paulo Rechi Vita | f80c5da | 2019-05-02 10:01:52 +0800 | [diff] [blame] | 40 | bool hci_req_status_pend(struct hci_dev *hdev); |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 41 | int hci_req_run(struct hci_request *req, hci_req_complete_t complete); |
Johan Hedberg | e6214487 | 2015-04-02 13:41:08 +0300 | [diff] [blame] | 42 | int hci_req_run_skb(struct hci_request *req, hci_req_complete_skb_t complete); |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 43 | void hci_req_add(struct hci_request *req, u16 opcode, u32 plen, |
| 44 | const void *param); |
| 45 | void hci_req_add_ev(struct hci_request *req, u16 opcode, u32 plen, |
| 46 | const void *param, u8 event); |
Johan Hedberg | e6214487 | 2015-04-02 13:41:08 +0300 | [diff] [blame] | 47 | void hci_req_cmd_complete(struct hci_dev *hdev, u16 opcode, u8 status, |
| 48 | hci_req_complete_t *req_complete, |
| 49 | hci_req_complete_skb_t *req_complete_skb); |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 50 | |
Johan Hedberg | a1d01db | 2015-11-11 08:11:25 +0200 | [diff] [blame] | 51 | int hci_req_sync(struct hci_dev *hdev, int (*req)(struct hci_request *req, |
| 52 | unsigned long opt), |
Johan Hedberg | 4ebeee2 | 2015-11-11 08:11:19 +0200 | [diff] [blame] | 53 | unsigned long opt, u32 timeout, u8 *hci_status); |
Johan Hedberg | a1d01db | 2015-11-11 08:11:25 +0200 | [diff] [blame] | 54 | int __hci_req_sync(struct hci_dev *hdev, int (*func)(struct hci_request *req, |
| 55 | unsigned long opt), |
Johan Hedberg | 4ebeee2 | 2015-11-11 08:11:19 +0200 | [diff] [blame] | 56 | unsigned long opt, u32 timeout, u8 *hci_status); |
Johan Hedberg | b504430 | 2015-11-10 09:44:55 +0200 | [diff] [blame] | 57 | void hci_req_sync_cancel(struct hci_dev *hdev, int err); |
Johan Hedberg | be91cd0 | 2015-11-10 09:44:54 +0200 | [diff] [blame] | 58 | |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 59 | struct sk_buff *hci_prepare_cmd(struct hci_dev *hdev, u16 opcode, u32 plen, |
| 60 | const void *param); |
| 61 | |
Johan Hedberg | 2ff1389 | 2015-11-25 16:15:44 +0200 | [diff] [blame] | 62 | int __hci_req_hci_power_on(struct hci_dev *hdev); |
| 63 | |
Johan Hedberg | bf943cb | 2015-11-25 16:15:43 +0200 | [diff] [blame] | 64 | void __hci_req_write_fast_connectable(struct hci_request *req, bool enable); |
Johan Hedberg | 00cf504 | 2015-11-25 16:15:41 +0200 | [diff] [blame] | 65 | void __hci_req_update_name(struct hci_request *req); |
Johan Hedberg | b1a8917 | 2015-11-25 16:15:42 +0200 | [diff] [blame] | 66 | void __hci_req_update_eir(struct hci_request *req); |
Johan Hedberg | 00cf504 | 2015-11-25 16:15:41 +0200 | [diff] [blame] | 67 | |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 68 | void hci_req_add_le_scan_disable(struct hci_request *req); |
| 69 | void hci_req_add_le_passive_scan(struct hci_request *req); |
| 70 | |
Johan Hedberg | f225257 | 2015-11-18 12:49:20 +0200 | [diff] [blame] | 71 | void hci_req_reenable_advertising(struct hci_dev *hdev); |
| 72 | void __hci_req_enable_advertising(struct hci_request *req); |
| 73 | void __hci_req_disable_advertising(struct hci_request *req); |
Johan Hedberg | cab054a | 2015-11-30 11:21:45 +0200 | [diff] [blame] | 74 | void __hci_req_update_adv_data(struct hci_request *req, u8 instance); |
| 75 | int hci_req_update_adv_data(struct hci_dev *hdev, u8 instance); |
| 76 | void __hci_req_update_scan_rsp_data(struct hci_request *req, u8 instance); |
Johan Hedberg | f225257 | 2015-11-18 12:49:20 +0200 | [diff] [blame] | 77 | |
| 78 | int __hci_req_schedule_adv_instance(struct hci_request *req, u8 instance, |
| 79 | bool force); |
Johan Hedberg | 37d3a1f | 2016-08-28 20:53:34 +0300 | [diff] [blame] | 80 | void hci_req_clear_adv_instance(struct hci_dev *hdev, struct sock *sk, |
| 81 | struct hci_request *req, u8 instance, |
| 82 | bool force); |
Johan Hedberg | f225257 | 2015-11-18 12:49:20 +0200 | [diff] [blame] | 83 | |
Jaganath Kanakkassery | a0fb372 | 2018-07-19 17:09:42 +0530 | [diff] [blame] | 84 | int __hci_req_setup_ext_adv_instance(struct hci_request *req, u8 instance); |
Jaganath Kanakkassery | de181e8 | 2018-07-19 17:09:41 +0530 | [diff] [blame] | 85 | int __hci_req_start_ext_adv(struct hci_request *req, u8 instance); |
Luiz Augusto von Dentz | 1d0fac2 | 2019-06-03 13:48:42 +0300 | [diff] [blame] | 86 | int __hci_req_enable_ext_advertising(struct hci_request *req, u8 instance); |
Jaganath Kanakkassery | 45b7749 | 2018-07-19 17:09:43 +0530 | [diff] [blame] | 87 | void __hci_req_clear_ext_adv_sets(struct hci_request *req); |
Jaganath Kanakkassery | a73c046 | 2018-07-19 17:09:45 +0530 | [diff] [blame] | 88 | int hci_get_random_address(struct hci_dev *hdev, bool require_privacy, |
| 89 | bool use_rpa, struct adv_info *adv_instance, |
| 90 | u8 *own_addr_type, bdaddr_t *rand_addr); |
Jaganath Kanakkassery | de181e8 | 2018-07-19 17:09:41 +0530 | [diff] [blame] | 91 | |
Johan Hedberg | 14bf5ea | 2015-11-22 19:00:22 +0200 | [diff] [blame] | 92 | void __hci_req_update_class(struct hci_request *req); |
| 93 | |
Johan Hedberg | 2154d3f | 2015-11-11 08:30:45 +0200 | [diff] [blame] | 94 | /* Returns true if HCI commands were queued */ |
| 95 | bool hci_req_stop_discovery(struct hci_request *req); |
| 96 | |
Johan Hedberg | 01b1cb8 | 2015-11-16 12:52:21 +0200 | [diff] [blame] | 97 | static inline void hci_req_update_scan(struct hci_dev *hdev) |
| 98 | { |
| 99 | queue_work(hdev->req_workqueue, &hdev->scan_update); |
| 100 | } |
| 101 | |
| 102 | void __hci_req_update_scan(struct hci_request *req); |
Johan Hedberg | 0857dd3 | 2014-12-19 13:40:20 +0200 | [diff] [blame] | 103 | |
| 104 | int hci_update_random_address(struct hci_request *req, bool require_privacy, |
Johan Hedberg | 82a37ad | 2016-03-09 17:30:34 +0200 | [diff] [blame] | 105 | bool use_rpa, u8 *own_addr_type); |
Johan Hedberg | 2cf2221 | 2014-12-19 22:26:00 +0200 | [diff] [blame] | 106 | |
Johan Hedberg | dcc0f0d9 | 2015-10-22 10:49:37 +0300 | [diff] [blame] | 107 | int hci_abort_conn(struct hci_conn *conn, u8 reason); |
| 108 | void __hci_abort_conn(struct hci_request *req, struct hci_conn *conn, |
| 109 | u8 reason); |
Johan Hedberg | 5fc16cc | 2015-11-11 08:11:16 +0200 | [diff] [blame] | 110 | |
Johan Hedberg | 2e93e53 | 2015-11-11 08:11:17 +0200 | [diff] [blame] | 111 | static inline void hci_update_background_scan(struct hci_dev *hdev) |
| 112 | { |
| 113 | queue_work(hdev->req_workqueue, &hdev->bg_scan_update); |
| 114 | } |
| 115 | |
Johan Hedberg | 5fc16cc | 2015-11-11 08:11:16 +0200 | [diff] [blame] | 116 | void hci_request_setup(struct hci_dev *hdev); |
| 117 | void hci_request_cancel_all(struct hci_dev *hdev); |
Michał Narajowski | 1b42206 | 2016-10-05 12:28:27 +0200 | [diff] [blame] | 118 | |
Michał Narajowski | f61851f | 2016-10-19 10:20:27 +0200 | [diff] [blame] | 119 | u8 append_local_name(struct hci_dev *hdev, u8 *ptr, u8 ad_len); |
| 120 | |
Michał Narajowski | 1b42206 | 2016-10-05 12:28:27 +0200 | [diff] [blame] | 121 | static inline u16 eir_append_data(u8 *eir, u16 eir_len, u8 type, |
| 122 | u8 *data, u8 data_len) |
| 123 | { |
| 124 | eir[eir_len++] = sizeof(type) + data_len; |
| 125 | eir[eir_len++] = type; |
| 126 | memcpy(&eir[eir_len], data, data_len); |
| 127 | eir_len += data_len; |
| 128 | |
| 129 | return eir_len; |
| 130 | } |
| 131 | |
| 132 | static inline u16 eir_append_le16(u8 *eir, u16 eir_len, u8 type, u16 data) |
| 133 | { |
| 134 | eir[eir_len++] = sizeof(type) + sizeof(data); |
| 135 | eir[eir_len++] = type; |
| 136 | put_unaligned_le16(data, &eir[eir_len]); |
| 137 | eir_len += sizeof(data); |
| 138 | |
| 139 | return eir_len; |
| 140 | } |