diff options
Diffstat (limited to 'src/backend/utils/adt')
-rw-r--r-- | src/backend/utils/adt/json.c | 39 | ||||
-rw-r--r-- | src/backend/utils/adt/jsonb.c | 13 | ||||
-rw-r--r-- | src/backend/utils/adt/jsonfuncs.c | 107 |
3 files changed, 86 insertions, 73 deletions
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c index 2c620809b20..55413c0fdf2 100644 --- a/src/backend/utils/adt/json.c +++ b/src/backend/utils/adt/json.c @@ -106,11 +106,11 @@ json_in(PG_FUNCTION_ARGS) { char *json = PG_GETARG_CSTRING(0); text *result = cstring_to_text(json); - JsonLexContext *lex; + JsonLexContext lex; /* validate it */ - lex = makeJsonLexContext(result, false); - if (!pg_parse_json_or_errsave(lex, &nullSemAction, fcinfo->context)) + makeJsonLexContext(&lex, result, false); + if (!pg_parse_json_or_errsave(&lex, &nullSemAction, fcinfo->context)) PG_RETURN_NULL(); /* Internal representation is the same as text */ @@ -152,13 +152,14 @@ json_recv(PG_FUNCTION_ARGS) StringInfo buf = (StringInfo) PG_GETARG_POINTER(0); char *str; int nbytes; - JsonLexContext *lex; + JsonLexContext lex; str = pq_getmsgtext(buf, buf->len - buf->cursor, &nbytes); /* Validate it. */ - lex = makeJsonLexContextCstringLen(str, nbytes, GetDatabaseEncoding(), false); - pg_parse_json_or_ereport(lex, &nullSemAction); + makeJsonLexContextCstringLen(&lex, str, nbytes, GetDatabaseEncoding(), + false); + pg_parse_json_or_ereport(&lex, &nullSemAction); PG_RETURN_TEXT_P(cstring_to_text_with_len(str, nbytes)); } @@ -1625,14 +1626,16 @@ json_unique_object_field_start(void *_state, char *field, bool isnull) bool json_validate(text *json, bool check_unique_keys, bool throw_error) { - JsonLexContext *lex = makeJsonLexContext(json, check_unique_keys); + JsonLexContext lex; JsonSemAction uniqueSemAction = {0}; JsonUniqueParsingState state; JsonParseErrorType result; + makeJsonLexContext(&lex, json, check_unique_keys); + if (check_unique_keys) { - state.lex = lex; + state.lex = &lex; state.stack = NULL; state.id_counter = 0; state.unique = true; @@ -1644,12 +1647,12 @@ json_validate(text *json, bool check_unique_keys, bool throw_error) uniqueSemAction.object_end = json_unique_object_end; } - result = pg_parse_json(lex, check_unique_keys ? &uniqueSemAction : &nullSemAction); + result = pg_parse_json(&lex, check_unique_keys ? &uniqueSemAction : &nullSemAction); if (result != JSON_SUCCESS) { if (throw_error) - json_errsave_error(result, lex, NULL); + json_errsave_error(result, &lex, NULL); return false; /* invalid json */ } @@ -1664,6 +1667,9 @@ json_validate(text *json, bool check_unique_keys, bool throw_error) return false; /* not unique keys */ } + if (check_unique_keys) + freeJsonLexContext(&lex); + return true; /* ok */ } @@ -1683,18 +1689,17 @@ Datum json_typeof(PG_FUNCTION_ARGS) { text *json = PG_GETARG_TEXT_PP(0); - JsonLexContext *lex = makeJsonLexContext(json, false); + JsonLexContext lex; char *type; - JsonTokenType tok; JsonParseErrorType result; /* Lex exactly one token from the input and check its type. */ - result = json_lex(lex); + makeJsonLexContext(&lex, json, false); + result = json_lex(&lex); if (result != JSON_SUCCESS) - json_errsave_error(result, lex, NULL); - tok = lex->token_type; + json_errsave_error(result, &lex, NULL); - switch (tok) + switch (lex.token_type) { case JSON_TOKEN_OBJECT_START: type = "object"; @@ -1716,7 +1721,7 @@ json_typeof(PG_FUNCTION_ARGS) type = "null"; break; default: - elog(ERROR, "unexpected json token: %d", tok); + elog(ERROR, "unexpected json token: %d", lex.token_type); } PG_RETURN_TEXT_P(cstring_to_text(type)); diff --git a/src/backend/utils/adt/jsonb.c b/src/backend/utils/adt/jsonb.c index 9781852b0cb..b10a60ac665 100644 --- a/src/backend/utils/adt/jsonb.c +++ b/src/backend/utils/adt/jsonb.c @@ -252,13 +252,13 @@ jsonb_typeof(PG_FUNCTION_ARGS) static inline Datum jsonb_from_cstring(char *json, int len, bool unique_keys, Node *escontext) { - JsonLexContext *lex; + JsonLexContext lex; JsonbInState state; JsonSemAction sem; memset(&state, 0, sizeof(state)); memset(&sem, 0, sizeof(sem)); - lex = makeJsonLexContextCstringLen(json, len, GetDatabaseEncoding(), true); + makeJsonLexContextCstringLen(&lex, json, len, GetDatabaseEncoding(), true); state.unique_keys = unique_keys; state.escontext = escontext; @@ -271,7 +271,7 @@ jsonb_from_cstring(char *json, int len, bool unique_keys, Node *escontext) sem.scalar = jsonb_in_scalar; sem.object_field_start = jsonb_in_object_field_start; - if (!pg_parse_json_or_errsave(lex, &sem, escontext)) + if (!pg_parse_json_or_errsave(&lex, &sem, escontext)) return (Datum) 0; /* after parsing, the item member has the composed jsonb structure */ @@ -755,11 +755,11 @@ datum_to_jsonb_internal(Datum val, bool is_null, JsonbInState *result, case JSONTYPE_JSON: { /* parse the json right into the existing result object */ - JsonLexContext *lex; + JsonLexContext lex; JsonSemAction sem; text *json = DatumGetTextPP(val); - lex = makeJsonLexContext(json, true); + makeJsonLexContext(&lex, json, true); memset(&sem, 0, sizeof(sem)); @@ -772,7 +772,8 @@ datum_to_jsonb_internal(Datum val, bool is_null, JsonbInState *result, sem.scalar = jsonb_in_scalar; sem.object_field_start = jsonb_in_object_field_start; - pg_parse_json_or_ereport(lex, &sem); + pg_parse_json_or_ereport(&lex, &sem); + freeJsonLexContext(&lex); } break; case JSONTYPE_JSONB: diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c index a4bfa5e4040..0bff272f245 100644 --- a/src/backend/utils/adt/jsonfuncs.c +++ b/src/backend/utils/adt/jsonfuncs.c @@ -526,7 +526,7 @@ pg_parse_json_or_errsave(JsonLexContext *lex, JsonSemAction *sem, * directly. */ JsonLexContext * -makeJsonLexContext(text *json, bool need_escapes) +makeJsonLexContext(JsonLexContext *lex, text *json, bool need_escapes) { /* * Most callers pass a detoasted datum, but it's not clear that they all @@ -534,7 +534,8 @@ makeJsonLexContext(text *json, bool need_escapes) */ json = pg_detoast_datum_packed(json); - return makeJsonLexContextCstringLen(VARDATA_ANY(json), + return makeJsonLexContextCstringLen(lex, + VARDATA_ANY(json), VARSIZE_ANY_EXHDR(json), GetDatabaseEncoding(), need_escapes); @@ -725,7 +726,7 @@ json_object_keys(PG_FUNCTION_ARGS) if (SRF_IS_FIRSTCALL()) { text *json = PG_GETARG_TEXT_PP(0); - JsonLexContext *lex = makeJsonLexContext(json, true); + JsonLexContext lex; JsonSemAction *sem; MemoryContext oldcontext; @@ -735,7 +736,7 @@ json_object_keys(PG_FUNCTION_ARGS) state = palloc(sizeof(OkeysState)); sem = palloc0(sizeof(JsonSemAction)); - state->lex = lex; + state->lex = makeJsonLexContext(&lex, json, true); state->result_size = 256; state->result_count = 0; state->sent_count = 0; @@ -747,12 +748,10 @@ json_object_keys(PG_FUNCTION_ARGS) sem->object_field_start = okeys_object_field_start; /* remainder are all NULL, courtesy of palloc0 above */ - pg_parse_json_or_ereport(lex, sem); + pg_parse_json_or_ereport(&lex, sem); /* keys are now in state->result */ - pfree(lex->strval->data); - pfree(lex->strval); - pfree(lex); + freeJsonLexContext(&lex); pfree(sem); MemoryContextSwitchTo(oldcontext); @@ -1096,13 +1095,13 @@ get_worker(text *json, int npath, bool normalize_results) { - JsonLexContext *lex = makeJsonLexContext(json, true); JsonSemAction *sem = palloc0(sizeof(JsonSemAction)); GetState *state = palloc0(sizeof(GetState)); Assert(npath >= 0); - state->lex = lex; + state->lex = makeJsonLexContext(NULL, json, true); + /* is it "_as_text" variant? */ state->normalize_results = normalize_results; state->npath = npath; @@ -1140,7 +1139,8 @@ get_worker(text *json, sem->array_element_end = get_array_element_end; } - pg_parse_json_or_ereport(lex, sem); + pg_parse_json_or_ereport(state->lex, sem); + freeJsonLexContext(state->lex); return state->tresult; } @@ -1842,25 +1842,23 @@ json_array_length(PG_FUNCTION_ARGS) { text *json = PG_GETARG_TEXT_PP(0); AlenState *state; - JsonLexContext *lex; + JsonLexContext lex; JsonSemAction *sem; - lex = makeJsonLexContext(json, false); state = palloc0(sizeof(AlenState)); - sem = palloc0(sizeof(JsonSemAction)); - + state->lex = makeJsonLexContext(&lex, json, false); /* palloc0 does this for us */ #if 0 state->count = 0; #endif - state->lex = lex; + sem = palloc0(sizeof(JsonSemAction)); sem->semstate = (void *) state; sem->object_start = alen_object_start; sem->scalar = alen_scalar; sem->array_element_start = alen_array_element_start; - pg_parse_json_or_ereport(lex, sem); + pg_parse_json_or_ereport(state->lex, sem); PG_RETURN_INT32(state->count); } @@ -2049,12 +2047,11 @@ static Datum each_worker(FunctionCallInfo fcinfo, bool as_text) { text *json = PG_GETARG_TEXT_PP(0); - JsonLexContext *lex; + JsonLexContext lex; JsonSemAction *sem; ReturnSetInfo *rsi; EachState *state; - lex = makeJsonLexContext(json, true); state = palloc0(sizeof(EachState)); sem = palloc0(sizeof(JsonSemAction)); @@ -2072,14 +2069,15 @@ each_worker(FunctionCallInfo fcinfo, bool as_text) state->normalize_results = as_text; state->next_scalar = false; - state->lex = lex; + state->lex = makeJsonLexContext(&lex, json, true); state->tmp_cxt = AllocSetContextCreate(CurrentMemoryContext, "json_each temporary cxt", ALLOCSET_DEFAULT_SIZES); - pg_parse_json_or_ereport(lex, sem); + pg_parse_json_or_ereport(&lex, sem); MemoryContextDelete(state->tmp_cxt); + freeJsonLexContext(&lex); PG_RETURN_NULL(); } @@ -2299,13 +2297,14 @@ static Datum elements_worker(FunctionCallInfo fcinfo, const char *funcname, bool as_text) { text *json = PG_GETARG_TEXT_PP(0); - - /* elements only needs escaped strings when as_text */ - JsonLexContext *lex = makeJsonLexContext(json, as_text); + JsonLexContext lex; JsonSemAction *sem; ReturnSetInfo *rsi; ElementsState *state; + /* elements only needs escaped strings when as_text */ + makeJsonLexContext(&lex, json, as_text); + state = palloc0(sizeof(ElementsState)); sem = palloc0(sizeof(JsonSemAction)); @@ -2323,14 +2322,15 @@ elements_worker(FunctionCallInfo fcinfo, const char *funcname, bool as_text) state->function_name = funcname; state->normalize_results = as_text; state->next_scalar = false; - state->lex = lex; + state->lex = &lex; state->tmp_cxt = AllocSetContextCreate(CurrentMemoryContext, "json_array_elements temporary cxt", ALLOCSET_DEFAULT_SIZES); - pg_parse_json_or_ereport(lex, sem); + pg_parse_json_or_ereport(&lex, sem); MemoryContextDelete(state->tmp_cxt); + freeJsonLexContext(&lex); PG_RETURN_NULL(); } @@ -2704,7 +2704,8 @@ populate_array_json(PopulateArrayContext *ctx, char *json, int len) PopulateArrayState state; JsonSemAction sem; - state.lex = makeJsonLexContextCstringLen(json, len, GetDatabaseEncoding(), true); + state.lex = makeJsonLexContextCstringLen(NULL, json, len, + GetDatabaseEncoding(), true); state.ctx = ctx; memset(&sem, 0, sizeof(sem)); @@ -2720,7 +2721,7 @@ populate_array_json(PopulateArrayContext *ctx, char *json, int len) /* number of dimensions should be already known */ Assert(ctx->ndims > 0 && ctx->dims); - pfree(state.lex); + freeJsonLexContext(state.lex); } /* @@ -3547,7 +3548,6 @@ get_json_object_as_hash(char *json, int len, const char *funcname) HASHCTL ctl; HTAB *tab; JHashState *state; - JsonLexContext *lex = makeJsonLexContextCstringLen(json, len, GetDatabaseEncoding(), true); JsonSemAction *sem; ctl.keysize = NAMEDATALEN; @@ -3563,7 +3563,8 @@ get_json_object_as_hash(char *json, int len, const char *funcname) state->function_name = funcname; state->hash = tab; - state->lex = lex; + state->lex = makeJsonLexContextCstringLen(NULL, json, len, + GetDatabaseEncoding(), true); sem->semstate = (void *) state; sem->array_start = hash_array_start; @@ -3571,7 +3572,9 @@ get_json_object_as_hash(char *json, int len, const char *funcname) sem->object_field_start = hash_object_field_start; sem->object_field_end = hash_object_field_end; - pg_parse_json_or_ereport(lex, sem); + pg_parse_json_or_ereport(state->lex, sem); + + freeJsonLexContext(state->lex); return tab; } @@ -3863,12 +3866,12 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname, if (is_json) { text *json = PG_GETARG_TEXT_PP(json_arg_num); - JsonLexContext *lex; + JsonLexContext lex; JsonSemAction *sem; sem = palloc0(sizeof(JsonSemAction)); - lex = makeJsonLexContext(json, true); + makeJsonLexContext(&lex, json, true); sem->semstate = (void *) state; sem->array_start = populate_recordset_array_start; @@ -3879,9 +3882,12 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname, sem->object_start = populate_recordset_object_start; sem->object_end = populate_recordset_object_end; - state->lex = lex; + state->lex = &lex; - pg_parse_json_or_ereport(lex, sem); + pg_parse_json_or_ereport(&lex, sem); + + freeJsonLexContext(&lex); + state->lex = NULL; } else { @@ -4217,16 +4223,15 @@ json_strip_nulls(PG_FUNCTION_ARGS) { text *json = PG_GETARG_TEXT_PP(0); StripnullState *state; - JsonLexContext *lex; + JsonLexContext lex; JsonSemAction *sem; - lex = makeJsonLexContext(json, true); state = palloc0(sizeof(StripnullState)); sem = palloc0(sizeof(JsonSemAction)); + state->lex = makeJsonLexContext(&lex, json, true); state->strval = makeStringInfo(); state->skip_next_null = false; - state->lex = lex; sem->semstate = (void *) state; sem->object_start = sn_object_start; @@ -4237,7 +4242,7 @@ json_strip_nulls(PG_FUNCTION_ARGS) sem->array_element_start = sn_array_element_start; sem->object_field_start = sn_object_field_start; - pg_parse_json_or_ereport(lex, sem); + pg_parse_json_or_ereport(&lex, sem); PG_RETURN_TEXT_P(cstring_to_text_with_len(state->strval->data, state->strval->len)); @@ -5433,11 +5438,11 @@ void iterate_json_values(text *json, uint32 flags, void *action_state, JsonIterateStringValuesAction action) { - JsonLexContext *lex = makeJsonLexContext(json, true); + JsonLexContext lex; JsonSemAction *sem = palloc0(sizeof(JsonSemAction)); IterateJsonStringValuesState *state = palloc0(sizeof(IterateJsonStringValuesState)); - state->lex = lex; + state->lex = makeJsonLexContext(&lex, json, true); state->action = action; state->action_state = action_state; state->flags = flags; @@ -5446,7 +5451,8 @@ iterate_json_values(text *json, uint32 flags, void *action_state, sem->scalar = iterate_values_scalar; sem->object_field_start = iterate_values_object_field_start; - pg_parse_json_or_ereport(lex, sem); + pg_parse_json_or_ereport(&lex, sem); + freeJsonLexContext(&lex); } /* @@ -5553,11 +5559,11 @@ text * transform_json_string_values(text *json, void *action_state, JsonTransformStringValuesAction transform_action) { - JsonLexContext *lex = makeJsonLexContext(json, true); + JsonLexContext lex; JsonSemAction *sem = palloc0(sizeof(JsonSemAction)); TransformJsonStringValuesState *state = palloc0(sizeof(TransformJsonStringValuesState)); - state->lex = lex; + state->lex = makeJsonLexContext(&lex, json, true); state->strval = makeStringInfo(); state->action = transform_action; state->action_state = action_state; @@ -5571,7 +5577,8 @@ transform_json_string_values(text *json, void *action_state, sem->array_element_start = transform_string_values_array_element_start; sem->object_field_start = transform_string_values_object_field_start; - pg_parse_json_or_ereport(lex, sem); + pg_parse_json_or_ereport(&lex, sem); + freeJsonLexContext(&lex); return cstring_to_text_with_len(state->strval->data, state->strval->len); } @@ -5670,19 +5677,19 @@ transform_string_values_scalar(void *state, char *token, JsonTokenType tokentype JsonTokenType json_get_first_token(text *json, bool throw_error) { - JsonLexContext *lex; + JsonLexContext lex; JsonParseErrorType result; - lex = makeJsonLexContext(json, false); + makeJsonLexContext(&lex, json, false); /* Lex exactly one token from the input and check its type. */ - result = json_lex(lex); + result = json_lex(&lex); if (result == JSON_SUCCESS) - return lex->token_type; + return lex.token_type; if (throw_error) - json_errsave_error(result, lex, NULL); + json_errsave_error(result, &lex, NULL); return JSON_TOKEN_INVALID; /* invalid json */ } |