aboutsummaryrefslogtreecommitdiff
path: root/src/backend/utils/adt/jsonfuncs.c
diff options
context:
space:
mode:
authorRobert Haas <rhaas@postgresql.org>2020-01-27 11:22:13 -0500
committerRobert Haas <rhaas@postgresql.org>2020-01-27 11:22:13 -0500
commit73ce2a03f30b52d6bfb26bc28f1e3e1aa1637577 (patch)
treea6a19f146fb7281f146fec7d413a79032fd1557f /src/backend/utils/adt/jsonfuncs.c
parent1f3a021730be98b880d94cabbe21de7e4d8136f5 (diff)
downloadpostgresql-73ce2a03f30b52d6bfb26bc28f1e3e1aa1637577.tar.gz
postgresql-73ce2a03f30b52d6bfb26bc28f1e3e1aa1637577.zip
Move some code from jsonapi.c to jsonfuncs.c.
Specifically, move those functions that depend on ereport() from jsonapi.c to jsonfuncs.c, in preparation for allowing jsonapi.c to be used from frontend code. A few cases where elog(ERROR, ...) is used for can't-happen conditions are left alone; we can handle those in some other way in frontend code. Reviewed by Mark Dilger and Andrew Dunstan. Discussion: http://postgr.es/m/CA+TgmoYfOXhd27MUDGioVh6QtpD0C1K-f6ObSA10AWiHBAL5bA@mail.gmail.com
Diffstat (limited to 'src/backend/utils/adt/jsonfuncs.c')
-rw-r--r--src/backend/utils/adt/jsonfuncs.c126
1 files changed, 126 insertions, 0 deletions
diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c
index 9eff5068553..66ea11b971c 100644
--- a/src/backend/utils/adt/jsonfuncs.c
+++ b/src/backend/utils/adt/jsonfuncs.c
@@ -329,6 +329,8 @@ typedef struct JsObject
hash_destroy((jso)->val.json_hash); \
} while (0)
+static int report_json_context(JsonLexContext *lex);
+
/* semantic action functions for json_object_keys */
static void okeys_object_field_start(void *state, char *fname, bool isnull);
static void okeys_array_start(void *state);
@@ -485,6 +487,37 @@ static void transform_string_values_array_element_start(void *state, bool isnull
static void transform_string_values_scalar(void *state, char *token, JsonTokenType tokentype);
/*
+ * pg_parse_json_or_ereport
+ *
+ * This fuction is like pg_parse_json, except that it does not return a
+ * JsonParseErrorType. Instead, in case of any failure, this function will
+ * ereport(ERROR).
+ */
+void
+pg_parse_json_or_ereport(JsonLexContext *lex, JsonSemAction *sem)
+{
+ JsonParseErrorType result;
+
+ result = pg_parse_json(lex, sem);
+ if (result != JSON_SUCCESS)
+ json_ereport_error(result, lex);
+}
+
+/*
+ * makeJsonLexContext
+ *
+ * This is like makeJsonLexContextCstringLen, but it accepts a text value
+ * directly.
+ */
+JsonLexContext *
+makeJsonLexContext(text *json, bool need_escapes)
+{
+ return makeJsonLexContextCstringLen(VARDATA_ANY(json),
+ VARSIZE_ANY_EXHDR(json),
+ need_escapes);
+}
+
+/*
* SQL function json_object_keys
*
* Returns the set of keys for the object argument.
@@ -573,6 +606,99 @@ jsonb_object_keys(PG_FUNCTION_ARGS)
SRF_RETURN_DONE(funcctx);
}
+/*
+ * Report a JSON error.
+ */
+void
+json_ereport_error(JsonParseErrorType error, JsonLexContext *lex)
+{
+ if (error == JSON_UNICODE_HIGH_ESCAPE ||
+ error == JSON_UNICODE_CODE_POINT_ZERO)
+ ereport(ERROR,
+ (errcode(ERRCODE_UNTRANSLATABLE_CHARACTER),
+ errmsg("unsupported Unicode escape sequence"),
+ errdetail("%s", json_errdetail(error, lex)),
+ report_json_context(lex)));
+ else
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
+ errmsg("invalid input syntax for type %s", "json"),
+ errdetail("%s", json_errdetail(error, lex)),
+ report_json_context(lex)));
+}
+
+/*
+ * Report a CONTEXT line for bogus JSON input.
+ *
+ * lex->token_terminator must be set to identify the spot where we detected
+ * the error. Note that lex->token_start might be NULL, in case we recognized
+ * error at EOF.
+ *
+ * The return value isn't meaningful, but we make it non-void so that this
+ * can be invoked inside ereport().
+ */
+static int
+report_json_context(JsonLexContext *lex)
+{
+ const char *context_start;
+ const char *context_end;
+ const char *line_start;
+ int line_number;
+ char *ctxt;
+ int ctxtlen;
+ const char *prefix;
+ const char *suffix;
+
+ /* Choose boundaries for the part of the input we will display */
+ context_start = lex->input;
+ context_end = lex->token_terminator;
+ line_start = context_start;
+ line_number = 1;
+ for (;;)
+ {
+ /* Always advance over newlines */
+ if (context_start < context_end && *context_start == '\n')
+ {
+ context_start++;
+ line_start = context_start;
+ line_number++;
+ continue;
+ }
+ /* Otherwise, done as soon as we are close enough to context_end */
+ if (context_end - context_start < 50)
+ break;
+ /* Advance to next multibyte character */
+ if (IS_HIGHBIT_SET(*context_start))
+ context_start += pg_mblen(context_start);
+ else
+ context_start++;
+ }
+
+ /*
+ * We add "..." to indicate that the excerpt doesn't start at the
+ * beginning of the line ... but if we're within 3 characters of the
+ * beginning of the line, we might as well just show the whole line.
+ */
+ if (context_start - line_start <= 3)
+ context_start = line_start;
+
+ /* Get a null-terminated copy of the data to present */
+ ctxtlen = context_end - context_start;
+ ctxt = palloc(ctxtlen + 1);
+ memcpy(ctxt, context_start, ctxtlen);
+ ctxt[ctxtlen] = '\0';
+
+ /*
+ * Show the context, prefixing "..." if not starting at start of line, and
+ * suffixing "..." if not ending at end of line.
+ */
+ prefix = (context_start > line_start) ? "..." : "";
+ suffix = (lex->token_type != JSON_TOKEN_END && context_end - lex->input < lex->input_length && *context_end != '\n' && *context_end != '\r') ? "..." : "";
+
+ return errcontext("JSON data, line %d: %s%s%s",
+ line_number, prefix, ctxt, suffix);
+}
+
Datum
json_object_keys(PG_FUNCTION_ARGS)