Jens Axboe | 771b53d0 | 2019-10-22 10:25:58 -0600 | [diff] [blame] | 1 | #ifndef INTERNAL_IO_WQ_H |
| 2 | #define INTERNAL_IO_WQ_H |
| 3 | |
| 4 | struct io_wq; |
| 5 | |
| 6 | enum { |
| 7 | IO_WQ_WORK_CANCEL = 1, |
Jens Axboe | 771b53d0 | 2019-10-22 10:25:58 -0600 | [diff] [blame] | 8 | IO_WQ_WORK_HASHED = 4, |
Jens Axboe | c5def4a | 2019-11-07 11:41:16 -0700 | [diff] [blame] | 9 | IO_WQ_WORK_UNBOUND = 32, |
Jens Axboe | 0c9d5cc | 2019-12-11 19:29:43 -0700 | [diff] [blame] | 10 | IO_WQ_WORK_NO_CANCEL = 256, |
Jens Axboe | 895e2ca | 2019-12-17 08:46:33 -0700 | [diff] [blame] | 11 | IO_WQ_WORK_CONCURRENT = 512, |
Jens Axboe | 771b53d0 | 2019-10-22 10:25:58 -0600 | [diff] [blame] | 12 | |
| 13 | IO_WQ_HASH_SHIFT = 24, /* upper 8 bits are used for hash key */ |
| 14 | }; |
| 15 | |
| 16 | enum io_wq_cancel { |
| 17 | IO_WQ_CANCEL_OK, /* cancelled before started */ |
| 18 | IO_WQ_CANCEL_RUNNING, /* found, running, and attempted cancelled */ |
| 19 | IO_WQ_CANCEL_NOTFOUND, /* work not found */ |
| 20 | }; |
| 21 | |
Jens Axboe | 6206f0e | 2019-11-26 11:59:32 -0700 | [diff] [blame] | 22 | struct io_wq_work_node { |
| 23 | struct io_wq_work_node *next; |
| 24 | }; |
| 25 | |
| 26 | struct io_wq_work_list { |
| 27 | struct io_wq_work_node *first; |
| 28 | struct io_wq_work_node *last; |
| 29 | }; |
| 30 | |
Pavel Begunkov | 86f3cd1 | 2020-03-23 22:57:22 +0300 | [diff] [blame] | 31 | static inline void wq_list_add_after(struct io_wq_work_node *node, |
| 32 | struct io_wq_work_node *pos, |
| 33 | struct io_wq_work_list *list) |
| 34 | { |
| 35 | struct io_wq_work_node *next = pos->next; |
| 36 | |
| 37 | pos->next = node; |
| 38 | node->next = next; |
| 39 | if (!next) |
| 40 | list->last = node; |
| 41 | } |
| 42 | |
Jens Axboe | 6206f0e | 2019-11-26 11:59:32 -0700 | [diff] [blame] | 43 | static inline void wq_list_add_tail(struct io_wq_work_node *node, |
| 44 | struct io_wq_work_list *list) |
| 45 | { |
| 46 | if (!list->first) { |
Jens Axboe | e995d51 | 2019-12-07 21:06:46 -0700 | [diff] [blame] | 47 | list->last = node; |
| 48 | WRITE_ONCE(list->first, node); |
Jens Axboe | 6206f0e | 2019-11-26 11:59:32 -0700 | [diff] [blame] | 49 | } else { |
| 50 | list->last->next = node; |
| 51 | list->last = node; |
| 52 | } |
| 53 | } |
| 54 | |
Pavel Begunkov | 86f3cd1 | 2020-03-23 22:57:22 +0300 | [diff] [blame] | 55 | static inline void wq_list_cut(struct io_wq_work_list *list, |
| 56 | struct io_wq_work_node *last, |
| 57 | struct io_wq_work_node *prev) |
| 58 | { |
| 59 | /* first in the list, if prev==NULL */ |
| 60 | if (!prev) |
| 61 | WRITE_ONCE(list->first, last->next); |
| 62 | else |
| 63 | prev->next = last->next; |
| 64 | |
| 65 | if (last == list->last) |
| 66 | list->last = prev; |
| 67 | last->next = NULL; |
| 68 | } |
| 69 | |
| 70 | static inline void wq_list_del(struct io_wq_work_list *list, |
Jens Axboe | 6206f0e | 2019-11-26 11:59:32 -0700 | [diff] [blame] | 71 | struct io_wq_work_node *node, |
| 72 | struct io_wq_work_node *prev) |
| 73 | { |
Pavel Begunkov | 86f3cd1 | 2020-03-23 22:57:22 +0300 | [diff] [blame] | 74 | wq_list_cut(list, node, prev); |
Jens Axboe | 6206f0e | 2019-11-26 11:59:32 -0700 | [diff] [blame] | 75 | } |
| 76 | |
| 77 | #define wq_list_for_each(pos, prv, head) \ |
| 78 | for (pos = (head)->first, prv = NULL; pos; prv = pos, pos = (pos)->next) |
| 79 | |
Jens Axboe | e995d51 | 2019-12-07 21:06:46 -0700 | [diff] [blame] | 80 | #define wq_list_empty(list) (READ_ONCE((list)->first) == NULL) |
Jens Axboe | 6206f0e | 2019-11-26 11:59:32 -0700 | [diff] [blame] | 81 | #define INIT_WQ_LIST(list) do { \ |
| 82 | (list)->first = NULL; \ |
| 83 | (list)->last = NULL; \ |
| 84 | } while (0) |
| 85 | |
Jens Axboe | 771b53d0 | 2019-10-22 10:25:58 -0600 | [diff] [blame] | 86 | struct io_wq_work { |
Pavel Begunkov | 18a542f | 2020-03-23 00:23:29 +0300 | [diff] [blame] | 87 | struct io_wq_work_node list; |
Jens Axboe | fcb323c | 2019-10-24 12:39:47 -0600 | [diff] [blame] | 88 | struct files_struct *files; |
Jens Axboe | cccf0ee | 2020-01-27 16:34:48 -0700 | [diff] [blame] | 89 | struct mm_struct *mm; |
| 90 | const struct cred *creds; |
Jens Axboe | 9392a27 | 2020-02-06 21:42:51 -0700 | [diff] [blame] | 91 | struct fs_struct *fs; |
Jens Axboe | 6206f0e | 2019-11-26 11:59:32 -0700 | [diff] [blame] | 92 | unsigned flags; |
Jens Axboe | 771b53d0 | 2019-10-22 10:25:58 -0600 | [diff] [blame] | 93 | }; |
| 94 | |
Pavel Begunkov | 86f3cd1 | 2020-03-23 22:57:22 +0300 | [diff] [blame] | 95 | static inline struct io_wq_work *wq_next_work(struct io_wq_work *work) |
| 96 | { |
| 97 | if (!work->list.next) |
| 98 | return NULL; |
| 99 | |
| 100 | return container_of(work->list.next, struct io_wq_work, list); |
| 101 | } |
| 102 | |
Pavel Begunkov | e9fd939 | 2020-03-04 16:14:12 +0300 | [diff] [blame] | 103 | typedef void (free_work_fn)(struct io_wq_work *); |
Pavel Begunkov | f5fa38c | 2020-06-08 21:08:20 +0300 | [diff] [blame] | 104 | typedef void (io_wq_work_fn)(struct io_wq_work **); |
Jens Axboe | 7d72306 | 2019-11-12 22:31:31 -0700 | [diff] [blame] | 105 | |
Jens Axboe | 576a347 | 2019-11-25 08:49:20 -0700 | [diff] [blame] | 106 | struct io_wq_data { |
Jens Axboe | 576a347 | 2019-11-25 08:49:20 -0700 | [diff] [blame] | 107 | struct user_struct *user; |
| 108 | |
Pavel Begunkov | f5fa38c | 2020-06-08 21:08:20 +0300 | [diff] [blame] | 109 | io_wq_work_fn *do_work; |
Pavel Begunkov | e9fd939 | 2020-03-04 16:14:12 +0300 | [diff] [blame] | 110 | free_work_fn *free_work; |
Jens Axboe | 576a347 | 2019-11-25 08:49:20 -0700 | [diff] [blame] | 111 | }; |
| 112 | |
| 113 | struct io_wq *io_wq_create(unsigned bounded, struct io_wq_data *data); |
Pavel Begunkov | eba6f5a | 2020-01-28 03:15:47 +0300 | [diff] [blame] | 114 | bool io_wq_get(struct io_wq *wq, struct io_wq_data *data); |
Jens Axboe | 771b53d0 | 2019-10-22 10:25:58 -0600 | [diff] [blame] | 115 | void io_wq_destroy(struct io_wq *wq); |
| 116 | |
| 117 | void io_wq_enqueue(struct io_wq *wq, struct io_wq_work *work); |
Pavel Begunkov | 8766dd5 | 2020-03-14 00:31:04 +0300 | [diff] [blame] | 118 | void io_wq_hash_work(struct io_wq_work *work, void *val); |
| 119 | |
| 120 | static inline bool io_wq_is_hashed(struct io_wq_work *work) |
| 121 | { |
| 122 | return work->flags & IO_WQ_WORK_HASHED; |
| 123 | } |
Jens Axboe | 771b53d0 | 2019-10-22 10:25:58 -0600 | [diff] [blame] | 124 | |
| 125 | void io_wq_cancel_all(struct io_wq *wq); |
| 126 | enum io_wq_cancel io_wq_cancel_work(struct io_wq *wq, struct io_wq_work *cwork); |
| 127 | |
Jens Axboe | 62755e3 | 2019-10-28 21:49:21 -0600 | [diff] [blame] | 128 | typedef bool (work_cancel_fn)(struct io_wq_work *, void *); |
| 129 | |
| 130 | enum io_wq_cancel io_wq_cancel_cb(struct io_wq *wq, work_cancel_fn *cancel, |
Pavel Begunkov | 4f26bda | 2020-06-15 10:24:03 +0300 | [diff] [blame] | 131 | void *data, bool cancel_all); |
Jens Axboe | 62755e3 | 2019-10-28 21:49:21 -0600 | [diff] [blame] | 132 | |
Jens Axboe | aa96bf8 | 2020-04-03 11:26:26 -0600 | [diff] [blame] | 133 | struct task_struct *io_wq_get_task(struct io_wq *wq); |
| 134 | |
Jens Axboe | 771b53d0 | 2019-10-22 10:25:58 -0600 | [diff] [blame] | 135 | #if defined(CONFIG_IO_WQ) |
| 136 | extern void io_wq_worker_sleeping(struct task_struct *); |
| 137 | extern void io_wq_worker_running(struct task_struct *); |
| 138 | #else |
| 139 | static inline void io_wq_worker_sleeping(struct task_struct *tsk) |
| 140 | { |
| 141 | } |
| 142 | static inline void io_wq_worker_running(struct task_struct *tsk) |
| 143 | { |
| 144 | } |
Jens Axboe | 525b305 | 2019-12-17 14:13:37 -0700 | [diff] [blame] | 145 | #endif |
Jens Axboe | 771b53d0 | 2019-10-22 10:25:58 -0600 | [diff] [blame] | 146 | |
Jens Axboe | 525b305 | 2019-12-17 14:13:37 -0700 | [diff] [blame] | 147 | static inline bool io_wq_current_is_worker(void) |
| 148 | { |
| 149 | return in_task() && (current->flags & PF_IO_WORKER); |
| 150 | } |
| 151 | #endif |