3 * Copyright 2015 gRPC authors.
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
9 * http://www.apache.org/licenses/LICENSE-2.0
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
19 #include <grpc/support/port_platform.h>
21 #include <grpc/slice_buffer.h>
25 #include <grpc/support/alloc.h>
26 #include <grpc/support/log.h>
28 #include "src/core/lib/gpr/useful.h"
29 #include "src/core/lib/iomgr/exec_ctx.h"
30 #include "src/core/lib/slice/slice_internal.h"
32 /* grow a buffer; requires GRPC_SLICE_BUFFER_INLINE_ELEMENTS > 1 */
33 #define GROW(x) (3 * (x) / 2)
35 /* Typically, we do not actually need to embiggen (by calling
36 * memmove/malloc/realloc) - only if we were up against the full capacity of the
37 * slice buffer. If do_embiggen is inlined, the compiler clobbers multiple
38 * registers pointlessly in the common case. */
39 static void GPR_ATTRIBUTE_NOINLINE do_embiggen(grpc_slice_buffer* sb,
40 const size_t slice_count,
41 const size_t slice_offset) {
42 if (slice_offset != 0) {
43 /* Make room by moving elements if there's still space unused */
44 memmove(sb->base_slices, sb->slices, sb->count * sizeof(grpc_slice));
45 sb->slices = sb->base_slices;
47 /* Allocate more memory if no more space is available */
48 const size_t new_capacity = GROW(sb->capacity);
49 sb->capacity = new_capacity;
50 if (sb->base_slices == sb->inlined) {
51 sb->base_slices = static_cast<grpc_slice*>(
52 gpr_malloc(new_capacity * sizeof(grpc_slice)));
53 memcpy(sb->base_slices, sb->inlined, slice_count * sizeof(grpc_slice));
55 sb->base_slices = static_cast<grpc_slice*>(
56 gpr_realloc(sb->base_slices, new_capacity * sizeof(grpc_slice)));
59 sb->slices = sb->base_slices + slice_offset;
63 static void maybe_embiggen(grpc_slice_buffer* sb) {
65 sb->slices = sb->base_slices;
69 /* How far away from sb->base_slices is sb->slices pointer */
70 size_t slice_offset = static_cast<size_t>(sb->slices - sb->base_slices);
71 size_t slice_count = sb->count + slice_offset;
72 if (GPR_UNLIKELY(slice_count == sb->capacity)) {
73 do_embiggen(sb, slice_count, slice_offset);
77 void grpc_slice_buffer_init(grpc_slice_buffer* sb) {
80 sb->capacity = GRPC_SLICE_BUFFER_INLINE_ELEMENTS;
81 sb->base_slices = sb->slices = sb->inlined;
84 void grpc_slice_buffer_destroy_internal(grpc_slice_buffer* sb) {
85 grpc_slice_buffer_reset_and_unref_internal(sb);
86 if (sb->base_slices != sb->inlined) {
87 gpr_free(sb->base_slices);
91 void grpc_slice_buffer_destroy(grpc_slice_buffer* sb) {
92 if (grpc_core::ExecCtx::Get() == nullptr) {
93 grpc_core::ExecCtx exec_ctx;
94 grpc_slice_buffer_destroy_internal(sb);
96 grpc_slice_buffer_destroy_internal(sb);
100 uint8_t* grpc_slice_buffer_tiny_add(grpc_slice_buffer* sb, size_t n) {
106 if (sb->count == 0) goto add_first;
107 back = &sb->slices[sb->count - 1];
108 if (back->refcount) goto add_new;
109 if ((back->data.inlined.length + n) > sizeof(back->data.inlined.bytes))
111 out = back->data.inlined.bytes + back->data.inlined.length;
112 back->data.inlined.length =
113 static_cast<uint8_t>(back->data.inlined.length + n);
119 back = &sb->slices[sb->count];
121 back->refcount = nullptr;
122 back->data.inlined.length = static_cast<uint8_t>(n);
123 return back->data.inlined.bytes;
126 size_t grpc_slice_buffer_add_indexed(grpc_slice_buffer* sb, grpc_slice s) {
127 size_t out = sb->count;
130 sb->length += GRPC_SLICE_LENGTH(s);
135 void grpc_slice_buffer_add(grpc_slice_buffer* sb, grpc_slice s) {
136 size_t n = sb->count;
137 /* if both the last slice in the slice buffer and the slice being added
138 are inlined (that is, that they carry their data inside the slice data
139 structure), and the back slice is not full, then concatenate directly
140 into the back slice, preventing many small slices being passed into
142 if (!s.refcount && n) {
143 grpc_slice* back = &sb->slices[n - 1];
144 if (!back->refcount &&
145 back->data.inlined.length < GRPC_SLICE_INLINED_SIZE) {
146 if (s.data.inlined.length + back->data.inlined.length <=
147 GRPC_SLICE_INLINED_SIZE) {
148 memcpy(back->data.inlined.bytes + back->data.inlined.length,
149 s.data.inlined.bytes, s.data.inlined.length);
150 back->data.inlined.length = static_cast<uint8_t>(
151 back->data.inlined.length + s.data.inlined.length);
153 size_t cp1 = GRPC_SLICE_INLINED_SIZE - back->data.inlined.length;
154 memcpy(back->data.inlined.bytes + back->data.inlined.length,
155 s.data.inlined.bytes, cp1);
156 back->data.inlined.length = GRPC_SLICE_INLINED_SIZE;
158 back = &sb->slices[n];
160 back->refcount = nullptr;
161 back->data.inlined.length =
162 static_cast<uint8_t>(s.data.inlined.length - cp1);
163 memcpy(back->data.inlined.bytes, s.data.inlined.bytes + cp1,
164 s.data.inlined.length - cp1);
166 sb->length += s.data.inlined.length;
167 return; /* early out */
170 grpc_slice_buffer_add_indexed(sb, s);
173 void grpc_slice_buffer_addn(grpc_slice_buffer* sb, grpc_slice* s, size_t n) {
175 for (i = 0; i < n; i++) {
176 grpc_slice_buffer_add(sb, s[i]);
180 void grpc_slice_buffer_pop(grpc_slice_buffer* sb) {
181 if (sb->count != 0) {
182 size_t count = --sb->count;
183 sb->length -= GRPC_SLICE_LENGTH(sb->slices[count]);
187 void grpc_slice_buffer_reset_and_unref_internal(grpc_slice_buffer* sb) {
189 for (i = 0; i < sb->count; i++) {
190 grpc_slice_unref_internal(sb->slices[i]);
195 sb->slices = sb->base_slices;
198 void grpc_slice_buffer_reset_and_unref(grpc_slice_buffer* sb) {
199 if (grpc_core::ExecCtx::Get() == nullptr) {
200 grpc_core::ExecCtx exec_ctx;
201 grpc_slice_buffer_reset_and_unref_internal(sb);
203 grpc_slice_buffer_reset_and_unref_internal(sb);
207 void grpc_slice_buffer_swap(grpc_slice_buffer* a, grpc_slice_buffer* b) {
208 size_t a_offset = static_cast<size_t>(a->slices - a->base_slices);
209 size_t b_offset = static_cast<size_t>(b->slices - b->base_slices);
211 size_t a_count = a->count + a_offset;
212 size_t b_count = b->count + b_offset;
214 if (a->base_slices == a->inlined) {
215 if (b->base_slices == b->inlined) {
216 /* swap contents of inlined buffer */
217 grpc_slice temp[GRPC_SLICE_BUFFER_INLINE_ELEMENTS];
218 memcpy(temp, a->base_slices, a_count * sizeof(grpc_slice));
219 memcpy(a->base_slices, b->base_slices, b_count * sizeof(grpc_slice));
220 memcpy(b->base_slices, temp, a_count * sizeof(grpc_slice));
222 /* a is inlined, b is not - copy a inlined into b, fix pointers */
223 a->base_slices = b->base_slices;
224 b->base_slices = b->inlined;
225 memcpy(b->base_slices, a->inlined, a_count * sizeof(grpc_slice));
227 } else if (b->base_slices == b->inlined) {
228 /* b is inlined, a is not - copy b inlined int a, fix pointers */
229 b->base_slices = a->base_slices;
230 a->base_slices = a->inlined;
231 memcpy(a->base_slices, b->inlined, b_count * sizeof(grpc_slice));
233 /* no inlining: easy swap */
234 GPR_SWAP(grpc_slice*, a->base_slices, b->base_slices);
237 /* Update the slices pointers (cannot do a GPR_SWAP on slices fields here).
238 * Also note that since the base_slices pointers are already swapped we need
239 * use 'b_offset' for 'a->base_slices' and vice versa */
240 a->slices = a->base_slices + b_offset;
241 b->slices = b->base_slices + a_offset;
243 /* base_slices and slices fields are correctly set. Swap all other fields */
244 GPR_SWAP(size_t, a->count, b->count);
245 GPR_SWAP(size_t, a->capacity, b->capacity);
246 GPR_SWAP(size_t, a->length, b->length);
249 void grpc_slice_buffer_move_into(grpc_slice_buffer* src,
250 grpc_slice_buffer* dst) {
251 /* anything to move? */
252 if (src->count == 0) {
255 /* anything in dst? */
256 if (dst->count == 0) {
257 grpc_slice_buffer_swap(src, dst);
260 /* both buffers have data - copy, and reset src */
261 grpc_slice_buffer_addn(dst, src->slices, src->count);
266 template <bool incref>
267 static void slice_buffer_move_first_maybe_ref(grpc_slice_buffer* src, size_t n,
268 grpc_slice_buffer* dst) {
269 GPR_ASSERT(src->length >= n);
270 if (src->length == n) {
271 grpc_slice_buffer_move_into(src, dst);
275 size_t output_len = dst->length + n;
276 size_t new_input_len = src->length - n;
278 while (src->count > 0) {
279 grpc_slice slice = grpc_slice_buffer_take_first(src);
280 size_t slice_len = GRPC_SLICE_LENGTH(slice);
282 grpc_slice_buffer_add(dst, slice);
284 } else if (n == slice_len) {
285 grpc_slice_buffer_add(dst, slice);
287 } else if (incref) { /* n < slice_len */
288 grpc_slice_buffer_undo_take_first(
289 src, grpc_slice_split_tail_maybe_ref(&slice, n, GRPC_SLICE_REF_BOTH));
290 GPR_ASSERT(GRPC_SLICE_LENGTH(slice) == n);
291 grpc_slice_buffer_add(dst, slice);
293 } else { /* n < slice_len */
294 grpc_slice_buffer_undo_take_first(
295 src, grpc_slice_split_tail_maybe_ref(&slice, n, GRPC_SLICE_REF_TAIL));
296 GPR_ASSERT(GRPC_SLICE_LENGTH(slice) == n);
297 grpc_slice_buffer_add_indexed(dst, slice);
301 GPR_ASSERT(dst->length == output_len);
302 GPR_ASSERT(src->length == new_input_len);
303 GPR_ASSERT(src->count > 0);
306 void grpc_slice_buffer_move_first(grpc_slice_buffer* src, size_t n,
307 grpc_slice_buffer* dst) {
308 slice_buffer_move_first_maybe_ref<true>(src, n, dst);
311 void grpc_slice_buffer_move_first_no_ref(grpc_slice_buffer* src, size_t n,
312 grpc_slice_buffer* dst) {
313 slice_buffer_move_first_maybe_ref<false>(src, n, dst);
316 void grpc_slice_buffer_move_first_into_buffer(grpc_slice_buffer* src, size_t n,
318 char* dstp = static_cast<char*>(dst);
319 GPR_ASSERT(src->length >= n);
322 grpc_slice slice = grpc_slice_buffer_take_first(src);
323 size_t slice_len = GRPC_SLICE_LENGTH(slice);
325 memcpy(dstp, GRPC_SLICE_START_PTR(slice), n);
326 grpc_slice_buffer_undo_take_first(
327 src, grpc_slice_sub_no_ref(slice, n, slice_len));
329 } else if (slice_len == n) {
330 memcpy(dstp, GRPC_SLICE_START_PTR(slice), n);
331 grpc_slice_unref_internal(slice);
334 memcpy(dstp, GRPC_SLICE_START_PTR(slice), slice_len);
337 grpc_slice_unref_internal(slice);
342 void grpc_slice_buffer_trim_end(grpc_slice_buffer* sb, size_t n,
343 grpc_slice_buffer* garbage) {
344 GPR_ASSERT(n <= sb->length);
347 size_t idx = sb->count - 1;
348 grpc_slice slice = sb->slices[idx];
349 size_t slice_len = GRPC_SLICE_LENGTH(slice);
351 sb->slices[idx] = grpc_slice_split_head(&slice, slice_len - n);
353 grpc_slice_buffer_add_indexed(garbage, slice);
355 grpc_slice_unref_internal(slice);
358 } else if (slice_len == n) {
360 grpc_slice_buffer_add_indexed(garbage, slice);
362 grpc_slice_unref_internal(slice);
368 grpc_slice_buffer_add_indexed(garbage, slice);
370 grpc_slice_unref_internal(slice);
378 grpc_slice grpc_slice_buffer_take_first(grpc_slice_buffer* sb) {
380 GPR_ASSERT(sb->count > 0);
381 slice = sb->slices[0];
384 sb->length -= GRPC_SLICE_LENGTH(slice);
389 void grpc_slice_buffer_remove_first(grpc_slice_buffer* sb) {
390 GPR_DEBUG_ASSERT(sb->count > 0);
391 sb->length -= GRPC_SLICE_LENGTH(sb->slices[0]);
392 grpc_slice_unref_internal(sb->slices[0]);
394 if (--sb->count == 0) {
395 sb->slices = sb->base_slices;
399 void grpc_slice_buffer_sub_first(grpc_slice_buffer* sb, size_t begin,
401 // TODO(soheil): Introduce a ptr version for sub.
402 sb->length -= GRPC_SLICE_LENGTH(sb->slices[0]);
403 sb->slices[0] = grpc_slice_sub_no_ref(sb->slices[0], begin, end);
404 sb->length += end - begin;
407 void grpc_slice_buffer_undo_take_first(grpc_slice_buffer* sb,
410 sb->slices[0] = slice;
412 sb->length += GRPC_SLICE_LENGTH(slice);