3 * Copyright 2016 gRPC authors.
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
9 * http://www.apache.org/licenses/LICENSE-2.0
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
18 #include <grpc/support/port_platform.h>
20 #include "src/core/lib/iomgr/error.h"
25 #include <grpc/status.h>
26 #include <grpc/support/alloc.h>
27 #include <grpc/support/log.h>
28 #include <grpc/support/string_util.h>
31 #include <grpc/support/log_windows.h>
34 #include "src/core/lib/debug/trace.h"
35 #include "src/core/lib/gpr/useful.h"
36 #include "src/core/lib/iomgr/error_internal.h"
37 #include "src/core/lib/profiling/timers.h"
38 #include "src/core/lib/slice/slice_internal.h"
40 grpc_core::DebugOnlyTraceFlag grpc_trace_error_refcount(false,
42 grpc_core::DebugOnlyTraceFlag grpc_trace_closure(false, "closure");
44 static const char* error_int_name(grpc_error_ints key) {
46 case GRPC_ERROR_INT_ERRNO:
48 case GRPC_ERROR_INT_FILE_LINE:
50 case GRPC_ERROR_INT_STREAM_ID:
52 case GRPC_ERROR_INT_GRPC_STATUS:
54 case GRPC_ERROR_INT_OFFSET:
56 case GRPC_ERROR_INT_INDEX:
58 case GRPC_ERROR_INT_SIZE:
60 case GRPC_ERROR_INT_HTTP2_ERROR:
62 case GRPC_ERROR_INT_TSI_CODE:
64 case GRPC_ERROR_INT_SECURITY_STATUS:
65 return "security_status";
66 case GRPC_ERROR_INT_FD:
68 case GRPC_ERROR_INT_WSA_ERROR:
70 case GRPC_ERROR_INT_HTTP_STATUS:
72 case GRPC_ERROR_INT_LIMIT:
74 case GRPC_ERROR_INT_OCCURRED_DURING_WRITE:
75 return "occurred_during_write";
76 case GRPC_ERROR_INT_CHANNEL_CONNECTIVITY_STATE:
77 return "channel_connectivity_state";
78 case GRPC_ERROR_INT_MAX:
79 GPR_UNREACHABLE_CODE(return "unknown");
81 GPR_UNREACHABLE_CODE(return "unknown");
84 static const char* error_str_name(grpc_error_strs key) {
86 case GRPC_ERROR_STR_KEY:
88 case GRPC_ERROR_STR_VALUE:
90 case GRPC_ERROR_STR_DESCRIPTION:
92 case GRPC_ERROR_STR_OS_ERROR:
94 case GRPC_ERROR_STR_TARGET_ADDRESS:
95 return "target_address";
96 case GRPC_ERROR_STR_SYSCALL:
98 case GRPC_ERROR_STR_FILE:
100 case GRPC_ERROR_STR_GRPC_MESSAGE:
101 return "grpc_message";
102 case GRPC_ERROR_STR_RAW_BYTES:
104 case GRPC_ERROR_STR_TSI_ERROR:
106 case GRPC_ERROR_STR_FILENAME:
108 case GRPC_ERROR_STR_QUEUED_BUFFERS:
109 return "queued_buffers";
110 case GRPC_ERROR_STR_MAX:
111 GPR_UNREACHABLE_CODE(return "unknown");
113 GPR_UNREACHABLE_CODE(return "unknown");
116 static const char* error_time_name(grpc_error_times key) {
118 case GRPC_ERROR_TIME_CREATED:
120 case GRPC_ERROR_TIME_MAX:
121 GPR_UNREACHABLE_CODE(return "unknown");
123 GPR_UNREACHABLE_CODE(return "unknown");
127 grpc_error* grpc_error_do_ref(grpc_error* err, const char* file, int line) {
128 if (grpc_trace_error_refcount.enabled()) {
129 gpr_log(GPR_DEBUG, "%p: %" PRIdPTR " -> %" PRIdPTR " [%s:%d]", err,
130 gpr_atm_no_barrier_load(&err->atomics.refs.count),
131 gpr_atm_no_barrier_load(&err->atomics.refs.count) + 1, file, line);
133 gpr_ref(&err->atomics.refs);
137 grpc_error* grpc_error_do_ref(grpc_error* err) {
138 gpr_ref(&err->atomics.refs);
143 static void unref_errs(grpc_error* err) {
144 uint8_t slot = err->first_err;
145 while (slot != UINT8_MAX) {
146 grpc_linked_error* lerr =
147 reinterpret_cast<grpc_linked_error*>(err->arena + slot);
148 GRPC_ERROR_UNREF(lerr->err);
149 GPR_ASSERT(err->last_err == slot ? lerr->next == UINT8_MAX
150 : lerr->next != UINT8_MAX);
155 static void unref_strs(grpc_error* err) {
156 for (size_t which = 0; which < GRPC_ERROR_STR_MAX; ++which) {
157 uint8_t slot = err->strs[which];
158 if (slot != UINT8_MAX) {
159 grpc_slice_unref_internal(
160 *reinterpret_cast<grpc_slice*>(err->arena + slot));
165 static void error_destroy(grpc_error* err) {
166 GPR_ASSERT(!grpc_error_is_special(err));
169 gpr_free((void*)gpr_atm_acq_load(&err->atomics.error_string));
174 void grpc_error_do_unref(grpc_error* err, const char* file, int line) {
175 if (grpc_trace_error_refcount.enabled()) {
176 gpr_log(GPR_DEBUG, "%p: %" PRIdPTR " -> %" PRIdPTR " [%s:%d]", err,
177 gpr_atm_no_barrier_load(&err->atomics.refs.count),
178 gpr_atm_no_barrier_load(&err->atomics.refs.count) - 1, file, line);
180 if (gpr_unref(&err->atomics.refs)) {
185 void grpc_error_do_unref(grpc_error* err) {
186 if (gpr_unref(&err->atomics.refs)) {
192 static uint8_t get_placement(grpc_error** err, size_t size) {
194 uint8_t slots = static_cast<uint8_t>(size / sizeof(intptr_t));
195 if ((*err)->arena_size + slots > (*err)->arena_capacity) {
196 (*err)->arena_capacity = static_cast<uint8_t> GPR_MIN(
197 UINT8_MAX - 1, (3 * (*err)->arena_capacity / 2));
198 if ((*err)->arena_size + slots > (*err)->arena_capacity) {
202 grpc_error* orig = *err;
204 *err = static_cast<grpc_error*>(gpr_realloc(
205 *err, sizeof(grpc_error) + (*err)->arena_capacity * sizeof(intptr_t)));
207 if (grpc_trace_error_refcount.enabled()) {
209 gpr_log(GPR_DEBUG, "realloc %p -> %p", orig, *err);
214 uint8_t placement = (*err)->arena_size;
215 (*err)->arena_size = static_cast<uint8_t>((*err)->arena_size + slots);
219 static void internal_set_int(grpc_error** err, grpc_error_ints which,
221 uint8_t slot = (*err)->ints[which];
222 if (slot == UINT8_MAX) {
223 slot = get_placement(err, sizeof(value));
224 if (slot == UINT8_MAX) {
225 gpr_log(GPR_ERROR, "Error %p is full, dropping int {\"%s\":%" PRIiPTR "}",
226 *err, error_int_name(which), value);
230 (*err)->ints[which] = slot;
231 (*err)->arena[slot] = value;
234 static void internal_set_str(grpc_error** err, grpc_error_strs which,
235 const grpc_slice& value) {
236 uint8_t slot = (*err)->strs[which];
237 if (slot == UINT8_MAX) {
238 slot = get_placement(err, sizeof(value));
239 if (slot == UINT8_MAX) {
240 const char* str = grpc_slice_to_c_string(value);
241 gpr_log(GPR_ERROR, "Error %p is full, dropping string {\"%s\":\"%s\"}",
242 *err, error_str_name(which), str);
243 gpr_free((void*)str);
247 grpc_slice_unref_internal(
248 *reinterpret_cast<grpc_slice*>((*err)->arena + slot));
250 (*err)->strs[which] = slot;
251 memcpy((*err)->arena + slot, &value, sizeof(value));
254 static char* fmt_time(gpr_timespec tm);
255 static void internal_set_time(grpc_error** err, grpc_error_times which,
256 gpr_timespec value) {
257 uint8_t slot = (*err)->times[which];
258 if (slot == UINT8_MAX) {
259 slot = get_placement(err, sizeof(value));
260 if (slot == UINT8_MAX) {
261 const char* time_str = fmt_time(value);
262 gpr_log(GPR_ERROR, "Error %p is full, dropping \"%s\":\"%s\"}", *err,
263 error_time_name(which), time_str);
264 gpr_free((void*)time_str);
268 (*err)->times[which] = slot;
269 memcpy((*err)->arena + slot, &value, sizeof(value));
272 static void internal_add_error(grpc_error** err, grpc_error* new_err) {
273 grpc_linked_error new_last = {new_err, UINT8_MAX};
274 uint8_t slot = get_placement(err, sizeof(grpc_linked_error));
275 if (slot == UINT8_MAX) {
276 gpr_log(GPR_ERROR, "Error %p is full, dropping error %p = %s", *err,
277 new_err, grpc_error_string(new_err));
278 GRPC_ERROR_UNREF(new_err);
281 if ((*err)->first_err == UINT8_MAX) {
282 GPR_ASSERT((*err)->last_err == UINT8_MAX);
283 (*err)->last_err = slot;
284 (*err)->first_err = slot;
286 GPR_ASSERT((*err)->last_err != UINT8_MAX);
287 grpc_linked_error* old_last =
288 reinterpret_cast<grpc_linked_error*>((*err)->arena + (*err)->last_err);
289 old_last->next = slot;
290 (*err)->last_err = slot;
292 memcpy((*err)->arena + slot, &new_last, sizeof(grpc_linked_error));
295 #define SLOTS_PER_INT (sizeof(intptr_t) / sizeof(intptr_t))
296 #define SLOTS_PER_STR (sizeof(grpc_slice) / sizeof(intptr_t))
297 #define SLOTS_PER_TIME (sizeof(gpr_timespec) / sizeof(intptr_t))
298 #define SLOTS_PER_LINKED_ERROR (sizeof(grpc_linked_error) / sizeof(intptr_t))
300 // size of storing one int and two slices and a timespec. For line, desc, file,
302 #define DEFAULT_ERROR_CAPACITY \
303 (SLOTS_PER_INT + (SLOTS_PER_STR * 2) + SLOTS_PER_TIME)
305 // It is very common to include and extra int and string in an error
306 #define SURPLUS_CAPACITY (2 * SLOTS_PER_INT + SLOTS_PER_TIME)
308 static gpr_atm g_error_creation_allowed = true;
310 void grpc_disable_error_creation() {
311 gpr_atm_no_barrier_store(&g_error_creation_allowed, false);
314 void grpc_enable_error_creation() {
315 gpr_atm_no_barrier_store(&g_error_creation_allowed, true);
318 grpc_error* grpc_error_create(const char* file, int line,
319 const grpc_slice& desc, grpc_error** referencing,
320 size_t num_referencing) {
321 GPR_TIMER_SCOPE("grpc_error_create", 0);
322 uint8_t initial_arena_capacity = static_cast<uint8_t>(
323 DEFAULT_ERROR_CAPACITY +
324 static_cast<uint8_t>(num_referencing * SLOTS_PER_LINKED_ERROR) +
326 grpc_error* err = static_cast<grpc_error*>(
327 gpr_malloc(sizeof(*err) + initial_arena_capacity * sizeof(intptr_t)));
328 if (err == nullptr) { // TODO(ctiller): make gpr_malloc return NULL
329 return GRPC_ERROR_OOM;
332 if (!gpr_atm_no_barrier_load(&g_error_creation_allowed)) {
334 "Error creation occurred when error creation was disabled [%s:%d]",
338 if (grpc_trace_error_refcount.enabled()) {
339 gpr_log(GPR_DEBUG, "%p create [%s:%d]", err, file, line);
344 err->arena_capacity = initial_arena_capacity;
345 err->first_err = UINT8_MAX;
346 err->last_err = UINT8_MAX;
348 memset(err->ints, UINT8_MAX, GRPC_ERROR_INT_MAX);
349 memset(err->strs, UINT8_MAX, GRPC_ERROR_STR_MAX);
350 memset(err->times, UINT8_MAX, GRPC_ERROR_TIME_MAX);
352 internal_set_int(&err, GRPC_ERROR_INT_FILE_LINE, line);
353 internal_set_str(&err, GRPC_ERROR_STR_FILE,
354 grpc_slice_from_static_string(file));
355 internal_set_str(&err, GRPC_ERROR_STR_DESCRIPTION, desc);
357 for (size_t i = 0; i < num_referencing; ++i) {
358 if (referencing[i] == GRPC_ERROR_NONE) continue;
362 referencing[i])); // TODO(ncteisen), change ownership semantics
365 internal_set_time(&err, GRPC_ERROR_TIME_CREATED, gpr_now(GPR_CLOCK_REALTIME));
367 gpr_atm_no_barrier_store(&err->atomics.error_string, 0);
368 gpr_ref_init(&err->atomics.refs, 1);
372 static void ref_strs(grpc_error* err) {
373 for (size_t i = 0; i < GRPC_ERROR_STR_MAX; ++i) {
374 uint8_t slot = err->strs[i];
375 if (slot != UINT8_MAX) {
376 grpc_slice_ref_internal(
377 *reinterpret_cast<grpc_slice*>(err->arena + slot));
382 static void ref_errs(grpc_error* err) {
383 uint8_t slot = err->first_err;
384 while (slot != UINT8_MAX) {
385 grpc_linked_error* lerr =
386 reinterpret_cast<grpc_linked_error*>(err->arena + slot);
387 GRPC_ERROR_REF(lerr->err);
392 static grpc_error* copy_error_and_unref(grpc_error* in) {
393 GPR_TIMER_SCOPE("copy_error_and_unref", 0);
395 if (grpc_error_is_special(in)) {
396 out = GRPC_ERROR_CREATE_FROM_STATIC_STRING("unknown");
397 if (in == GRPC_ERROR_NONE) {
398 internal_set_str(&out, GRPC_ERROR_STR_DESCRIPTION,
399 grpc_slice_from_static_string("no error"));
400 internal_set_int(&out, GRPC_ERROR_INT_GRPC_STATUS, GRPC_STATUS_OK);
401 } else if (in == GRPC_ERROR_OOM) {
402 internal_set_str(&out, GRPC_ERROR_STR_DESCRIPTION,
403 grpc_slice_from_static_string("oom"));
404 } else if (in == GRPC_ERROR_CANCELLED) {
405 internal_set_str(&out, GRPC_ERROR_STR_DESCRIPTION,
406 grpc_slice_from_static_string("cancelled"));
407 internal_set_int(&out, GRPC_ERROR_INT_GRPC_STATUS, GRPC_STATUS_CANCELLED);
409 } else if (gpr_ref_is_unique(&in->atomics.refs)) {
412 uint8_t new_arena_capacity = in->arena_capacity;
413 // the returned err will be added to, so we ensure this is room to avoid
414 // unneeded allocations.
415 if (in->arena_capacity - in->arena_size <
416 static_cast<uint8_t> SLOTS_PER_STR) {
417 new_arena_capacity = static_cast<uint8_t>(3 * new_arena_capacity / 2);
419 out = static_cast<grpc_error*>(
420 gpr_malloc(sizeof(*in) + new_arena_capacity * sizeof(intptr_t)));
422 if (grpc_trace_error_refcount.enabled()) {
423 gpr_log(GPR_DEBUG, "%p create copying %p", out, in);
426 // bulk memcpy of the rest of the struct.
427 size_t skip = sizeof(&out->atomics);
428 memcpy((void*)((uintptr_t)out + skip), (void*)((uintptr_t)in + skip),
429 sizeof(*in) + (in->arena_size * sizeof(intptr_t)) - skip);
430 // manually set the atomics and the new capacity
431 gpr_atm_no_barrier_store(&out->atomics.error_string, 0);
432 gpr_ref_init(&out->atomics.refs, 1);
433 out->arena_capacity = new_arena_capacity;
436 GRPC_ERROR_UNREF(in);
441 grpc_error* grpc_error_set_int(grpc_error* src, grpc_error_ints which,
443 GPR_TIMER_SCOPE("grpc_error_set_int", 0);
444 grpc_error* new_err = copy_error_and_unref(src);
445 internal_set_int(&new_err, which, value);
450 grpc_status_code code;
453 } special_error_status_map;
455 const special_error_status_map error_status_map[] = {
456 {GRPC_STATUS_OK, "", 0}, // GRPC_ERROR_NONE
457 {GRPC_STATUS_INVALID_ARGUMENT, "", 0}, // GRPC_ERROR_RESERVED_1
458 {GRPC_STATUS_RESOURCE_EXHAUSTED, "Out of memory",
459 strlen("Out of memory")}, // GRPC_ERROR_OOM
460 {GRPC_STATUS_INVALID_ARGUMENT, "", 0}, // GRPC_ERROR_RESERVED_2
461 {GRPC_STATUS_CANCELLED, "Cancelled",
462 strlen("Cancelled")}, // GRPC_ERROR_CANCELLED
465 bool grpc_error_get_int(grpc_error* err, grpc_error_ints which, intptr_t* p) {
466 GPR_TIMER_SCOPE("grpc_error_get_int", 0);
467 if (grpc_error_is_special(err)) {
468 if (which != GRPC_ERROR_INT_GRPC_STATUS) return false;
469 *p = error_status_map[reinterpret_cast<size_t>(err)].code;
472 uint8_t slot = err->ints[which];
473 if (slot != UINT8_MAX) {
474 if (p != nullptr) *p = err->arena[slot];
480 grpc_error* grpc_error_set_str(grpc_error* src, grpc_error_strs which,
481 const grpc_slice& str) {
482 GPR_TIMER_SCOPE("grpc_error_set_str", 0);
483 grpc_error* new_err = copy_error_and_unref(src);
484 internal_set_str(&new_err, which, str);
488 bool grpc_error_get_str(grpc_error* err, grpc_error_strs which,
490 if (grpc_error_is_special(err)) {
491 if (which != GRPC_ERROR_STR_GRPC_MESSAGE) return false;
492 const special_error_status_map& msg =
493 error_status_map[reinterpret_cast<size_t>(err)];
494 str->refcount = &grpc_core::kNoopRefcount;
495 str->data.refcounted.bytes =
496 reinterpret_cast<uint8_t*>(const_cast<char*>(msg.msg));
497 str->data.refcounted.length = msg.len;
500 uint8_t slot = err->strs[which];
501 if (slot != UINT8_MAX) {
502 *str = *reinterpret_cast<grpc_slice*>(err->arena + slot);
509 grpc_error* grpc_error_add_child(grpc_error* src, grpc_error* child) {
510 GPR_TIMER_SCOPE("grpc_error_add_child", 0);
511 if (src != GRPC_ERROR_NONE) {
512 if (child == GRPC_ERROR_NONE) {
513 /* \a child is empty. Simply return the ref to \a src */
515 } else if (child != src) {
516 grpc_error* new_err = copy_error_and_unref(src);
517 internal_add_error(&new_err, child);
520 /* \a src and \a child are the same. Drop one of the references and return
522 GRPC_ERROR_UNREF(child);
526 /* \a src is empty. Simply return the ref to \a child */
531 static const char* no_error_string = "\"No Error\"";
532 static const char* oom_error_string = "\"Out of memory\"";
533 static const char* cancelled_error_string = "\"Cancelled\"";
546 static void append_chr(char c, char** s, size_t* sz, size_t* cap) {
548 *cap = GPR_MAX(8, 3 * *cap / 2);
549 *s = static_cast<char*>(gpr_realloc(*s, *cap));
554 static void append_str(const char* str, char** s, size_t* sz, size_t* cap) {
555 for (const char* c = str; *c; c++) {
556 append_chr(*c, s, sz, cap);
560 static void append_esc_str(const uint8_t* str, size_t len, char** s, size_t* sz,
562 static const char* hex = "0123456789abcdef";
563 append_chr('"', s, sz, cap);
564 for (size_t i = 0; i < len; i++, str++) {
565 if (*str < 32 || *str >= 127) {
566 append_chr('\\', s, sz, cap);
569 append_chr('b', s, sz, cap);
572 append_chr('f', s, sz, cap);
575 append_chr('n', s, sz, cap);
578 append_chr('r', s, sz, cap);
581 append_chr('t', s, sz, cap);
584 append_chr('u', s, sz, cap);
585 append_chr('0', s, sz, cap);
586 append_chr('0', s, sz, cap);
587 append_chr(hex[*str >> 4], s, sz, cap);
588 append_chr(hex[*str & 0x0f], s, sz, cap);
592 append_chr(static_cast<char>(*str), s, sz, cap);
595 append_chr('"', s, sz, cap);
598 static void append_kv(kv_pairs* kvs, char* key, char* value) {
599 if (kvs->num_kvs == kvs->cap_kvs) {
600 kvs->cap_kvs = GPR_MAX(3 * kvs->cap_kvs / 2, 4);
601 kvs->kvs = static_cast<kv_pair*>(
602 gpr_realloc(kvs->kvs, sizeof(*kvs->kvs) * kvs->cap_kvs));
604 kvs->kvs[kvs->num_kvs].key = key;
605 kvs->kvs[kvs->num_kvs].value = value;
609 static char* key_int(grpc_error_ints which) {
610 return gpr_strdup(error_int_name(which));
613 static char* fmt_int(intptr_t p) {
615 gpr_asprintf(&s, "%" PRIdPTR, p);
619 static void collect_ints_kvs(grpc_error* err, kv_pairs* kvs) {
620 for (size_t which = 0; which < GRPC_ERROR_INT_MAX; ++which) {
621 uint8_t slot = err->ints[which];
622 if (slot != UINT8_MAX) {
623 append_kv(kvs, key_int(static_cast<grpc_error_ints>(which)),
624 fmt_int(err->arena[slot]));
629 static char* key_str(grpc_error_strs which) {
630 return gpr_strdup(error_str_name(which));
633 static char* fmt_str(const grpc_slice& slice) {
637 append_esc_str((const uint8_t*)GRPC_SLICE_START_PTR(slice),
638 GRPC_SLICE_LENGTH(slice), &s, &sz, &cap);
639 append_chr(0, &s, &sz, &cap);
643 static void collect_strs_kvs(grpc_error* err, kv_pairs* kvs) {
644 for (size_t which = 0; which < GRPC_ERROR_STR_MAX; ++which) {
645 uint8_t slot = err->strs[which];
646 if (slot != UINT8_MAX) {
647 append_kv(kvs, key_str(static_cast<grpc_error_strs>(which)),
648 fmt_str(*reinterpret_cast<grpc_slice*>(err->arena + slot)));
653 static char* key_time(grpc_error_times which) {
654 return gpr_strdup(error_time_name(which));
657 static char* fmt_time(gpr_timespec tm) {
659 const char* pfx = "!!";
660 switch (tm.clock_type) {
661 case GPR_CLOCK_MONOTONIC:
664 case GPR_CLOCK_REALTIME:
667 case GPR_CLOCK_PRECISE:
674 gpr_asprintf(&out, "\"%s%" PRId64 ".%09d\"", pfx, tm.tv_sec, tm.tv_nsec);
678 static void collect_times_kvs(grpc_error* err, kv_pairs* kvs) {
679 for (size_t which = 0; which < GRPC_ERROR_TIME_MAX; ++which) {
680 uint8_t slot = err->times[which];
681 if (slot != UINT8_MAX) {
682 append_kv(kvs, key_time(static_cast<grpc_error_times>(which)),
683 fmt_time(*reinterpret_cast<gpr_timespec*>(err->arena + slot)));
688 static void add_errs(grpc_error* err, char** s, size_t* sz, size_t* cap) {
689 uint8_t slot = err->first_err;
691 while (slot != UINT8_MAX) {
692 grpc_linked_error* lerr =
693 reinterpret_cast<grpc_linked_error*>(err->arena + slot);
694 if (!first) append_chr(',', s, sz, cap);
696 const char* e = grpc_error_string(lerr->err);
697 append_str(e, s, sz, cap);
698 GPR_ASSERT(err->last_err == slot ? lerr->next == UINT8_MAX
699 : lerr->next != UINT8_MAX);
704 static char* errs_string(grpc_error* err) {
708 append_chr('[', &s, &sz, &cap);
709 add_errs(err, &s, &sz, &cap);
710 append_chr(']', &s, &sz, &cap);
711 append_chr(0, &s, &sz, &cap);
715 static int cmp_kvs(const void* a, const void* b) {
716 const kv_pair* ka = static_cast<const kv_pair*>(a);
717 const kv_pair* kb = static_cast<const kv_pair*>(b);
718 return strcmp(ka->key, kb->key);
721 static char* finish_kvs(kv_pairs* kvs) {
726 append_chr('{', &s, &sz, &cap);
727 for (size_t i = 0; i < kvs->num_kvs; i++) {
728 if (i != 0) append_chr(',', &s, &sz, &cap);
729 append_esc_str(reinterpret_cast<const uint8_t*>(kvs->kvs[i].key),
730 strlen(kvs->kvs[i].key), &s, &sz, &cap);
731 gpr_free(kvs->kvs[i].key);
732 append_chr(':', &s, &sz, &cap);
733 append_str(kvs->kvs[i].value, &s, &sz, &cap);
734 gpr_free(kvs->kvs[i].value);
736 append_chr('}', &s, &sz, &cap);
737 append_chr(0, &s, &sz, &cap);
743 const char* grpc_error_string(grpc_error* err) {
744 GPR_TIMER_SCOPE("grpc_error_string", 0);
745 if (err == GRPC_ERROR_NONE) return no_error_string;
746 if (err == GRPC_ERROR_OOM) return oom_error_string;
747 if (err == GRPC_ERROR_CANCELLED) return cancelled_error_string;
749 void* p = (void*)gpr_atm_acq_load(&err->atomics.error_string);
751 return static_cast<const char*>(p);
755 memset(&kvs, 0, sizeof(kvs));
757 collect_ints_kvs(err, &kvs);
758 collect_strs_kvs(err, &kvs);
759 collect_times_kvs(err, &kvs);
760 if (err->first_err != UINT8_MAX) {
761 append_kv(&kvs, gpr_strdup("referenced_errors"), errs_string(err));
764 qsort(kvs.kvs, kvs.num_kvs, sizeof(kv_pair), cmp_kvs);
766 char* out = finish_kvs(&kvs);
768 if (!gpr_atm_rel_cas(&err->atomics.error_string, 0, (gpr_atm)out)) {
770 out = (char*)gpr_atm_acq_load(&err->atomics.error_string);
776 grpc_error* grpc_os_error(const char* file, int line, int err,
777 const char* call_name) {
778 return grpc_error_set_str(
781 grpc_error_create(file, line,
782 grpc_slice_from_static_string(strerror(err)),
784 GRPC_ERROR_INT_ERRNO, err),
785 GRPC_ERROR_STR_OS_ERROR,
786 grpc_slice_from_static_string(strerror(err))),
787 GRPC_ERROR_STR_SYSCALL, grpc_slice_from_copied_string(call_name));
791 grpc_error* grpc_wsa_error(const char* file, int line, int err,
792 const char* call_name) {
793 char* utf8_message = gpr_format_message(err);
794 grpc_error* error = grpc_error_set_str(
797 grpc_error_create(file, line,
798 grpc_slice_from_static_string("OS Error"), NULL,
800 GRPC_ERROR_INT_WSA_ERROR, err),
801 GRPC_ERROR_STR_OS_ERROR, grpc_slice_from_copied_string(utf8_message)),
802 GRPC_ERROR_STR_SYSCALL, grpc_slice_from_static_string(call_name));
803 gpr_free(utf8_message);
808 bool grpc_log_error(const char* what, grpc_error* error, const char* file,
810 GPR_DEBUG_ASSERT(error != GRPC_ERROR_NONE);
811 const char* msg = grpc_error_string(error);
812 gpr_log(file, line, GPR_LOG_SEVERITY_ERROR, "%s: %s", what, msg);
813 GRPC_ERROR_UNREF(error);