aboutsummaryrefslogtreecommitdiff
path: root/src/backend/utils/adt/jsonfuncs.c
diff options
context:
space:
mode:
authorAndrew Dunstan <andrew@dunslane.net>2014-03-23 16:40:19 -0400
committerAndrew Dunstan <andrew@dunslane.net>2014-03-23 16:40:19 -0400
commitd9134d0a355cfa447adc80db4505d5931084278a (patch)
treecefe155d0f0f71b9279444a86eab4b1b1facafdb /src/backend/utils/adt/jsonfuncs.c
parentb2b2491b06074e68fc7c96148cb0fdf0c8eb0469 (diff)
downloadpostgresql-d9134d0a355cfa447adc80db4505d5931084278a.tar.gz
postgresql-d9134d0a355cfa447adc80db4505d5931084278a.zip
Introduce jsonb, a structured format for storing json.
The new format accepts exactly the same data as the json type. However, it is stored in a format that does not require reparsing the orgiginal text in order to process it, making it much more suitable for indexing and other operations. Insignificant whitespace is discarded, and the order of object keys is not preserved. Neither are duplicate object keys kept - the later value for a given key is the only one stored. The new type has all the functions and operators that the json type has, with the exception of the json generation functions (to_json, json_agg etc.) and with identical semantics. In addition, there are operator classes for hash and btree indexing, and two classes for GIN indexing, that have no equivalent in the json type. This feature grew out of previous work by Oleg Bartunov and Teodor Sigaev, which was intended to provide similar facilities to a nested hstore type, but which in the end proved to have some significant compatibility issues. Authors: Oleg Bartunov, Teodor Sigaev, Peter Geoghegan and Andrew Dunstan. Review: Andres Freund
Diffstat (limited to 'src/backend/utils/adt/jsonfuncs.c')
-rw-r--r--src/backend/utils/adt/jsonfuncs.c1151
1 files changed, 1060 insertions, 91 deletions
diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c
index 23203056085..f80eaeb1c6d 100644
--- a/src/backend/utils/adt/jsonfuncs.c
+++ b/src/backend/utils/adt/jsonfuncs.c
@@ -1,7 +1,7 @@
/*-------------------------------------------------------------------------
*
* jsonfuncs.c
- * Functions to process JSON data type.
+ * Functions to process JSON data types.
*
* Portions Copyright (c) 1996-2014, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
@@ -27,6 +27,7 @@
#include "utils/builtins.h"
#include "utils/hsearch.h"
#include "utils/json.h"
+#include "utils/jsonb.h"
#include "utils/jsonapi.h"
#include "utils/lsyscache.h"
#include "utils/memutils.h"
@@ -47,18 +48,20 @@ static void get_array_element_end(void *state, bool isnull);
static void get_scalar(void *state, char *token, JsonTokenType tokentype);
/* common worker function for json getter functions */
-static inline Datum get_path_all(PG_FUNCTION_ARGS, bool as_text);
+static inline Datum get_path_all(FunctionCallInfo fcinfo, bool as_text);
static inline text *get_worker(text *json, char *field, int elem_index,
char **tpath, int *ipath, int npath,
bool normalize_results);
+static inline Datum get_jsonb_path_all(FunctionCallInfo fcinfo, bool as_text);
/* semantic action functions for json_array_length */
static void alen_object_start(void *state);
static void alen_scalar(void *state, char *token, JsonTokenType tokentype);
static void alen_array_element_start(void *state, bool isnull);
-/* common worker for json_each* functions */
-static inline Datum each_worker(PG_FUNCTION_ARGS, bool as_text);
+/* common workers for json{b}_each* functions */
+static inline Datum each_worker(FunctionCallInfo fcinfo, bool as_text);
+static inline Datum each_worker_jsonb(FunctionCallInfo fcinfo, bool as_text);
/* semantic action functions for json_each */
static void each_object_field_start(void *state, char *fname, bool isnull);
@@ -66,8 +69,9 @@ static void each_object_field_end(void *state, char *fname, bool isnull);
static void each_array_start(void *state);
static void each_scalar(void *state, char *token, JsonTokenType tokentype);
-/* common worker for json_each* functions */
-static inline Datum elements_worker(PG_FUNCTION_ARGS, bool as_text);
+/* common workers for json{b}_array_elements_* functions */
+static inline Datum elements_worker(FunctionCallInfo fcinfo, bool as_text);
+static inline Datum elements_worker_jsonb(FunctionCallInfo fcinfo, bool as_text);
/* semantic action functions for json_array_elements */
static void elements_object_start(void *state);
@@ -79,7 +83,7 @@ static void elements_scalar(void *state, char *token, JsonTokenType tokentype);
static HTAB *get_json_object_as_hash(text *json, char *funcname, bool use_json_as_text);
/* common worker for populate_record and to_record */
-static inline Datum populate_record_worker(PG_FUNCTION_ARGS,
+static inline Datum populate_record_worker(FunctionCallInfo fcinfo,
bool have_record_arg);
/* semantic action functions for get_json_object_as_hash */
@@ -98,8 +102,13 @@ static void populate_recordset_array_start(void *state);
static void populate_recordset_array_element_start(void *state, bool isnull);
/* worker function for populate_recordset and to_recordset */
-static inline Datum populate_recordset_worker(PG_FUNCTION_ARGS,
+static inline Datum populate_recordset_worker(FunctionCallInfo fcinfo,
bool have_record_arg);
+/* Worker that takes care of common setup for us */
+static JsonbValue *findJsonbValueFromSuperHeaderLen(JsonbSuperHeader sheader,
+ uint32 flags,
+ char *key,
+ uint32 keylen);
/* search type classification for json_get* functions */
typedef enum
@@ -225,18 +234,98 @@ typedef struct PopulateRecordsetState
MemoryContext fn_mcxt; /* used to stash IO funcs */
} PopulateRecordsetState;
+/* Turn a jsonb object into a record */
+static void make_row_from_rec_and_jsonb(Jsonb * element,
+ PopulateRecordsetState *state);
+
/*
- * SQL function json_object-keys
+ * SQL function json_object_keys
*
* Returns the set of keys for the object argument.
*
* This SRF operates in value-per-call mode. It processes the
* object during the first call, and the keys are simply stashed
- * in an array, whise size is expanded as necessary. This is probably
+ * in an array, whose size is expanded as necessary. This is probably
* safe enough for a list of keys of a single object, since they are
* limited in size to NAMEDATALEN and the number of keys is unlikely to
* be so huge that it has major memory implications.
*/
+Datum
+jsonb_object_keys(PG_FUNCTION_ARGS)
+{
+ FuncCallContext *funcctx;
+ OkeysState *state;
+ int i;
+
+ if (SRF_IS_FIRSTCALL())
+ {
+ MemoryContext oldcontext;
+ Jsonb *jb = PG_GETARG_JSONB(0);
+ bool skipNested = false;
+ JsonbIterator *it;
+ JsonbValue v;
+ int r;
+
+ if (JB_ROOT_IS_SCALAR(jb))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot call jsonb_object_keys on a scalar")));
+ else if (JB_ROOT_IS_ARRAY(jb))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot call jsonb_object_keys on an array")));
+
+ funcctx = SRF_FIRSTCALL_INIT();
+ oldcontext = MemoryContextSwitchTo(funcctx->multi_call_memory_ctx);
+
+ state = palloc(sizeof(OkeysState));
+
+ state->result_size = JB_ROOT_COUNT(jb);
+ state->result_count = 0;
+ state->sent_count = 0;
+ state->result = palloc(state->result_size * sizeof(char *));
+
+ it = JsonbIteratorInit(VARDATA_ANY(jb));
+
+ while ((r = JsonbIteratorNext(&it, &v, skipNested)) != WJB_DONE)
+ {
+ skipNested = true;
+
+ if (r == WJB_KEY)
+ {
+ char *cstr;
+
+ cstr = palloc(v.string.len + 1 * sizeof(char));
+ memcpy(cstr, v.string.val, v.string.len);
+ cstr[v.string.len] = '\0';
+ state->result[state->result_count++] = cstr;
+ }
+ }
+
+
+ MemoryContextSwitchTo(oldcontext);
+ funcctx->user_fctx = (void *) state;
+
+ }
+
+ funcctx = SRF_PERCALL_SETUP();
+ state = (OkeysState *) funcctx->user_fctx;
+
+ if (state->sent_count < state->result_count)
+ {
+ char *nxt = state->result[state->sent_count++];
+
+ SRF_RETURN_NEXT(funcctx, CStringGetTextDatum(nxt));
+ }
+
+ /* cleanup to reduce or eliminate memory leaks */
+ for (i = 0; i < state->result_count; i++)
+ pfree(state->result[i]);
+ pfree(state->result);
+ pfree(state);
+
+ SRF_RETURN_DONE(funcctx);
+}
Datum
@@ -350,9 +439,9 @@ okeys_scalar(void *state, char *token, JsonTokenType tokentype)
}
/*
- * json getter functions
+ * json and jsonb getter functions
* these implement the -> ->> #> and #>> operators
- * and the json_extract_path*(json, text, ...) functions
+ * and the json{b?}_extract_path*(json, text, ...) functions
*/
@@ -373,6 +462,51 @@ json_object_field(PG_FUNCTION_ARGS)
}
Datum
+jsonb_object_field(PG_FUNCTION_ARGS)
+{
+ Jsonb *jb = PG_GETARG_JSONB(0);
+ char *key = text_to_cstring(PG_GETARG_TEXT_P(1));
+ int klen = strlen(key);
+ JsonbIterator *it;
+ JsonbValue v;
+ int r;
+ bool skipNested = false;
+
+ if (JB_ROOT_IS_SCALAR(jb))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot call jsonb_object_field (jsonb -> text operator) on a scalar")));
+ else if (JB_ROOT_IS_ARRAY(jb))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot call jsonb_object_field (jsonb -> text operator) on an array")));
+
+ Assert(JB_ROOT_IS_OBJECT(jb));
+
+ it = JsonbIteratorInit(VARDATA_ANY(jb));
+
+ while ((r = JsonbIteratorNext(&it, &v, skipNested)) != WJB_DONE)
+ {
+ skipNested = true;
+
+ if (r == WJB_KEY)
+ {
+ if (klen == v.string.len && strncmp(key, v.string.val, klen) == 0)
+ {
+ /*
+ * The next thing the iterator fetches should be the value, no
+ * matter what shape it is.
+ */
+ (void) JsonbIteratorNext(&it, &v, skipNested);
+ PG_RETURN_JSONB(JsonbValueToJsonb(&v));
+ }
+ }
+ }
+
+ PG_RETURN_NULL();
+}
+
+Datum
json_object_field_text(PG_FUNCTION_ARGS)
{
text *json = PG_GETARG_TEXT_P(0);
@@ -389,6 +523,74 @@ json_object_field_text(PG_FUNCTION_ARGS)
}
Datum
+jsonb_object_field_text(PG_FUNCTION_ARGS)
+{
+ Jsonb *jb = PG_GETARG_JSONB(0);
+ char *key = text_to_cstring(PG_GETARG_TEXT_P(1));
+ int klen = strlen(key);
+ JsonbIterator *it;
+ JsonbValue v;
+ int r;
+ bool skipNested = false;
+
+ if (JB_ROOT_IS_SCALAR(jb))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot call jsonb_object_field_text (jsonb ->> text operator) on a scalar")));
+ else if (JB_ROOT_IS_ARRAY(jb))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot call jsonb_object_field_text (jsonb ->> text operator) on an array")));
+
+ Assert(JB_ROOT_IS_OBJECT(jb));
+
+ it = JsonbIteratorInit(VARDATA_ANY(jb));
+
+ while ((r = JsonbIteratorNext(&it, &v, skipNested)) != WJB_DONE)
+ {
+ skipNested = true;
+
+ if (r == WJB_KEY)
+ {
+ if (klen == v.string.len && strncmp(key, v.string.val, klen) == 0)
+ {
+ text *result;
+
+ /*
+ * The next thing the iterator fetches should be the value, no
+ * matter what shape it is.
+ */
+ r = JsonbIteratorNext(&it, &v, skipNested);
+
+ /*
+ * if it's a scalar string it needs to be de-escaped,
+ * otherwise just return the text
+ */
+ if (v.type == jbvString)
+ {
+ result = cstring_to_text_with_len(v.string.val, v.string.len);
+ }
+ else if (v.type == jbvNull)
+ {
+ PG_RETURN_NULL();
+ }
+ else
+ {
+ StringInfo jtext = makeStringInfo();
+ Jsonb *tjb = JsonbValueToJsonb(&v);
+
+ (void) JsonbToCString(jtext, VARDATA(tjb), -1);
+ result = cstring_to_text_with_len(jtext->data, jtext->len);
+ }
+ PG_RETURN_TEXT_P(result);
+ }
+ }
+ }
+
+ PG_RETURN_NULL();
+}
+
+Datum
json_array_element(PG_FUNCTION_ARGS)
{
text *json = PG_GETARG_TEXT_P(0);
@@ -404,6 +606,44 @@ json_array_element(PG_FUNCTION_ARGS)
}
Datum
+jsonb_array_element(PG_FUNCTION_ARGS)
+{
+ Jsonb *jb = PG_GETARG_JSONB(0);
+ int element = PG_GETARG_INT32(1);
+ JsonbIterator *it;
+ JsonbValue v;
+ int r;
+ bool skipNested = false;
+ int element_number = 0;
+
+ if (JB_ROOT_IS_SCALAR(jb))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot call jsonb_array_element (jsonb -> int operator) on a scalar")));
+ else if (JB_ROOT_IS_OBJECT(jb))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot call jsonb_array_element (jsonb -> int operator) on an object")));
+
+ Assert(JB_ROOT_IS_ARRAY(jb));
+
+ it = JsonbIteratorInit(VARDATA_ANY(jb));
+
+ while ((r = JsonbIteratorNext(&it, &v, skipNested)) != WJB_DONE)
+ {
+ skipNested = true;
+
+ if (r == WJB_ELEM)
+ {
+ if (element_number++ == element)
+ PG_RETURN_JSONB(JsonbValueToJsonb(&v));
+ }
+ }
+
+ PG_RETURN_NULL();
+}
+
+Datum
json_array_element_text(PG_FUNCTION_ARGS)
{
text *json = PG_GETARG_TEXT_P(0);
@@ -419,6 +659,69 @@ json_array_element_text(PG_FUNCTION_ARGS)
}
Datum
+jsonb_array_element_text(PG_FUNCTION_ARGS)
+{
+ Jsonb *jb = PG_GETARG_JSONB(0);
+ int element = PG_GETARG_INT32(1);
+ JsonbIterator *it;
+ JsonbValue v;
+ int r;
+ bool skipNested = false;
+ int element_number = 0;
+
+
+ if (JB_ROOT_IS_SCALAR(jb))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot call jsonb_array_element_text on a scalar")));
+ else if (JB_ROOT_IS_OBJECT(jb))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot call jsonb_array_element_text on an object")));
+
+ Assert(JB_ROOT_IS_ARRAY(jb));
+
+ it = JsonbIteratorInit(VARDATA_ANY(jb));
+
+ while ((r = JsonbIteratorNext(&it, &v, skipNested)) != WJB_DONE)
+ {
+ skipNested = true;
+
+ if (r == WJB_ELEM)
+ {
+ if (element_number++ == element)
+ {
+ /*
+ * if it's a scalar string it needs to be de-escaped,
+ * otherwise just return the text
+ */
+ text *result;
+
+ if (v.type == jbvString)
+ {
+ result = cstring_to_text_with_len(v.string.val, v.string.len);
+ }
+ else if (v.type == jbvNull)
+ {
+ PG_RETURN_NULL();
+ }
+ else
+ {
+ StringInfo jtext = makeStringInfo();
+ Jsonb *tjb = JsonbValueToJsonb(&v);
+
+ (void) JsonbToCString(jtext, VARDATA(tjb), -1);
+ result = cstring_to_text_with_len(jtext->data, jtext->len);
+ }
+ PG_RETURN_TEXT_P(result);
+ }
+ }
+ }
+
+ PG_RETURN_NULL();
+}
+
+Datum
json_extract_path(PG_FUNCTION_ARGS)
{
return get_path_all(fcinfo, false);
@@ -434,9 +737,9 @@ json_extract_path_text(PG_FUNCTION_ARGS)
* common routine for extract_path functions
*/
static inline Datum
-get_path_all(PG_FUNCTION_ARGS, bool as_text)
+get_path_all(FunctionCallInfo fcinfo, bool as_text)
{
- text *json = PG_GETARG_TEXT_P(0);
+ text *json;
ArrayType *path = PG_GETARG_ARRAYTYPE_P(1);
text *result;
Datum *pathtext;
@@ -448,6 +751,8 @@ get_path_all(PG_FUNCTION_ARGS, bool as_text)
long ind;
char *endptr;
+ json = PG_GETARG_TEXT_P(0);
+
if (array_contains_nulls(path))
ereport(ERROR,
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
@@ -486,8 +791,9 @@ get_path_all(PG_FUNCTION_ARGS, bool as_text)
result = get_worker(json, NULL, -1, tpath, ipath, npath, as_text);
if (result != NULL)
- PG_RETURN_TEXT_P(result);
+ PG_RETURN_TEXT_P(result);
else
+ /* null is NULL, regardless */
PG_RETURN_NULL();
}
@@ -668,7 +974,7 @@ get_object_field_end(void *state, char *fname, bool isnull)
/*
* make a text object from the string from the prevously noted json
* start up to the end of the previous token (the lexer is by now
- * ahead of us on whatevere came after what we're interested in).
+ * ahead of us on whatever came after what we're interested in).
*/
int len = _state->lex->prev_token_terminator - _state->result_start;
@@ -822,18 +1128,139 @@ get_scalar(void *state, char *token, JsonTokenType tokentype)
}
+Datum
+jsonb_extract_path(PG_FUNCTION_ARGS)
+{
+ return get_jsonb_path_all(fcinfo, false);
+}
+
+Datum
+jsonb_extract_path_text(PG_FUNCTION_ARGS)
+{
+ return get_jsonb_path_all(fcinfo, true);
+}
+
+static inline Datum
+get_jsonb_path_all(FunctionCallInfo fcinfo, bool as_text)
+{
+ Jsonb *jb = PG_GETARG_JSONB(0);
+ ArrayType *path = PG_GETARG_ARRAYTYPE_P(1);
+ Datum *pathtext;
+ bool *pathnulls;
+ int npath;
+ int i;
+ Jsonb *res;
+ bool have_object = false,
+ have_array = false;
+ JsonbValue *jbvp = NULL;
+ JsonbValue tv;
+ JsonbSuperHeader superHeader;
+
+ if (array_contains_nulls(path))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot call function with null path elements")));
+
+ deconstruct_array(path, TEXTOID, -1, false, 'i',
+ &pathtext, &pathnulls, &npath);
+
+ if (JB_ROOT_IS_OBJECT(jb))
+ have_object = true;
+ else if (JB_ROOT_IS_ARRAY(jb) && !JB_ROOT_IS_SCALAR(jb))
+ have_array = true;
+
+ superHeader = (JsonbSuperHeader) VARDATA(jb);
+
+ for (i = 0; i < npath; i++)
+ {
+ if (have_object)
+ {
+ jbvp = findJsonbValueFromSuperHeaderLen(superHeader,
+ JB_FOBJECT,
+ VARDATA_ANY(pathtext[i]),
+ VARSIZE_ANY_EXHDR(pathtext[i]));
+ }
+ else if (have_array)
+ {
+ long lindex;
+ uint32 index;
+ char *indextext = TextDatumGetCString(pathtext[i]);
+ char *endptr;
+
+ lindex = strtol(indextext, &endptr, 10);
+ if (*endptr != '\0' || lindex > INT_MAX || lindex < 0)
+ PG_RETURN_NULL();
+ index = (uint32) lindex;
+ jbvp = getIthJsonbValueFromSuperHeader(superHeader, index);
+ }
+ else
+ {
+ if (i == 0)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot call extract path from a scalar")));
+ PG_RETURN_NULL();
+ }
+
+ if (jbvp == NULL)
+ PG_RETURN_NULL();
+ else if (i == npath - 1)
+ break;
+
+ if (jbvp->type == jbvBinary)
+ {
+ JsonbIterator *it = JsonbIteratorInit(jbvp->binary.data);
+ int r;
+
+ r = JsonbIteratorNext(&it, &tv, true);
+ superHeader = (JsonbSuperHeader) jbvp->binary.data;
+ have_object = r == WJB_BEGIN_OBJECT;
+ have_array = r == WJB_BEGIN_ARRAY;
+ }
+ else
+ {
+ have_object = jbvp->type == jbvObject;
+ have_array = jbvp->type == jbvArray;
+ }
+ }
+
+ if (as_text)
+ {
+ if (jbvp->type == jbvString)
+ PG_RETURN_TEXT_P(cstring_to_text_with_len(jbvp->string.val, jbvp->string.len));
+ else if (jbvp->type == jbvNull)
+ PG_RETURN_NULL();
+ }
+
+ res = JsonbValueToJsonb(jbvp);
+
+ if (as_text)
+ {
+ PG_RETURN_TEXT_P(cstring_to_text(JsonbToCString(NULL,
+ VARDATA(res),
+ VARSIZE(res))));
+ }
+ else
+ {
+ /* not text mode - just hand back the jsonb */
+ PG_RETURN_JSONB(res);
+ }
+}
+
/*
* SQL function json_array_length(json) -> int
*/
Datum
json_array_length(PG_FUNCTION_ARGS)
{
- text *json = PG_GETARG_TEXT_P(0);
+ text *json;
AlenState *state;
- JsonLexContext *lex = makeJsonLexContext(json, false);
+ JsonLexContext *lex;
JsonSemAction *sem;
+ json = PG_GETARG_TEXT_P(0);
+ lex = makeJsonLexContext(json, false);
state = palloc0(sizeof(AlenState));
sem = palloc0(sizeof(JsonSemAction));
@@ -853,6 +1280,23 @@ json_array_length(PG_FUNCTION_ARGS)
PG_RETURN_INT32(state->count);
}
+Datum
+jsonb_array_length(PG_FUNCTION_ARGS)
+{
+ Jsonb *jb = PG_GETARG_JSONB(0);
+
+ if (JB_ROOT_IS_SCALAR(jb))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot get array length of a scalar")));
+ else if (!JB_ROOT_IS_ARRAY(jb))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot get array length of a non-array")));
+
+ PG_RETURN_INT32(JB_ROOT_COUNT(jb));
+}
+
/*
* These next two check ensure that the json is an array (since it can't be
* a scalar or an object).
@@ -909,22 +1353,177 @@ json_each(PG_FUNCTION_ARGS)
}
Datum
+jsonb_each(PG_FUNCTION_ARGS)
+{
+ return each_worker_jsonb(fcinfo, false);
+}
+
+Datum
json_each_text(PG_FUNCTION_ARGS)
{
return each_worker(fcinfo, true);
}
+Datum
+jsonb_each_text(PG_FUNCTION_ARGS)
+{
+ return each_worker_jsonb(fcinfo, true);
+}
+
static inline Datum
-each_worker(PG_FUNCTION_ARGS, bool as_text)
+each_worker_jsonb(FunctionCallInfo fcinfo, bool as_text)
{
- text *json = PG_GETARG_TEXT_P(0);
- JsonLexContext *lex = makeJsonLexContext(json, true);
+ Jsonb *jb = PG_GETARG_JSONB(0);
+ ReturnSetInfo *rsi;
+ Tuplestorestate *tuple_store;
+ TupleDesc tupdesc;
+ TupleDesc ret_tdesc;
+ MemoryContext old_cxt,
+ tmp_cxt;
+ bool skipNested = false;
+ JsonbIterator *it;
+ JsonbValue v;
+ int r;
+
+ if (!JB_ROOT_IS_OBJECT(jb))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot call jsonb_each%s on a non-object",
+ as_text ? "_text" : "")));
+
+ rsi = (ReturnSetInfo *) fcinfo->resultinfo;
+
+ if (!rsi || !IsA(rsi, ReturnSetInfo) ||
+ (rsi->allowedModes & SFRM_Materialize) == 0 ||
+ rsi->expectedDesc == NULL)
+ ereport(ERROR,
+ (errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
+ errmsg("set-valued function called in context that "
+ "cannot accept a set")));
+
+
+ rsi->returnMode = SFRM_Materialize;
+
+ if (get_call_result_type(fcinfo, NULL, &tupdesc) != TYPEFUNC_COMPOSITE)
+ ereport(ERROR,
+ (errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
+ errmsg("function returning record called in context "
+ "that cannot accept type record")));
+
+ old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory);
+
+ ret_tdesc = CreateTupleDescCopy(tupdesc);
+ BlessTupleDesc(ret_tdesc);
+ tuple_store =
+ tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize_Random,
+ false, work_mem);
+
+ MemoryContextSwitchTo(old_cxt);
+
+ tmp_cxt = AllocSetContextCreate(CurrentMemoryContext,
+ "jsonb_each temporary cxt",
+ ALLOCSET_DEFAULT_MINSIZE,
+ ALLOCSET_DEFAULT_INITSIZE,
+ ALLOCSET_DEFAULT_MAXSIZE);
+
+
+ it = JsonbIteratorInit(VARDATA_ANY(jb));
+
+ while ((r = JsonbIteratorNext(&it, &v, skipNested)) != WJB_DONE)
+ {
+ skipNested = true;
+
+ if (r == WJB_KEY)
+ {
+ text *key;
+ HeapTuple tuple;
+ Datum values[2];
+ bool nulls[2] = {false, false};
+
+ /* Use the tmp context so we can clean up after each tuple is done */
+ old_cxt = MemoryContextSwitchTo(tmp_cxt);
+
+ key = cstring_to_text_with_len(v.string.val, v.string.len);
+
+ /*
+ * The next thing the iterator fetches should be the value, no
+ * matter what shape it is.
+ */
+ r = JsonbIteratorNext(&it, &v, skipNested);
+
+ values[0] = PointerGetDatum(key);
+
+ if (as_text)
+ {
+ if (v.type == jbvNull)
+ {
+ /* a json null is an sql null in text mode */
+ nulls[1] = true;
+ values[1] = (Datum) NULL;
+ }
+ else
+ {
+ text *sv;
+
+ if (v.type == jbvString)
+ {
+ /* In text mode, scalar strings should be dequoted */
+ sv = cstring_to_text_with_len(v.string.val, v.string.len);
+ }
+ else
+ {
+ /* Turn anything else into a json string */
+ StringInfo jtext = makeStringInfo();
+ Jsonb *jb = JsonbValueToJsonb(&v);
+
+ (void) JsonbToCString(jtext, VARDATA(jb), 2 * v.estSize);
+ sv = cstring_to_text_with_len(jtext->data, jtext->len);
+ }
+
+ values[1] = PointerGetDatum(sv);
+ }
+ }
+ else
+ {
+ /* Not in text mode, just return the Jsonb */
+ Jsonb *val = JsonbValueToJsonb(&v);
+
+ values[1] = PointerGetDatum(val);
+ }
+
+ tuple = heap_form_tuple(ret_tdesc, values, nulls);
+
+ tuplestore_puttuple(tuple_store, tuple);
+
+ /* clean up and switch back */
+ MemoryContextSwitchTo(old_cxt);
+ MemoryContextReset(tmp_cxt);
+ }
+ }
+
+ MemoryContextDelete(tmp_cxt);
+
+ rsi->setResult = tuple_store;
+ rsi->setDesc = ret_tdesc;
+
+ PG_RETURN_NULL();
+}
+
+
+static inline Datum
+each_worker(FunctionCallInfo fcinfo, bool as_text)
+{
+ text *json;
+ JsonLexContext *lex;
JsonSemAction *sem;
ReturnSetInfo *rsi;
MemoryContext old_cxt;
TupleDesc tupdesc;
EachState *state;
+ json = PG_GETARG_TEXT_P(0);
+
+ lex = makeJsonLexContext(json, true);
state = palloc0(sizeof(EachState));
sem = palloc0(sizeof(JsonSemAction));
@@ -941,11 +1540,7 @@ each_worker(PG_FUNCTION_ARGS, bool as_text)
rsi->returnMode = SFRM_Materialize;
- if (get_call_result_type(fcinfo, NULL, &tupdesc) != TYPEFUNC_COMPOSITE)
- ereport(ERROR,
- (errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
- errmsg("function returning record called in context "
- "that cannot accept type record")));
+ (void) get_call_result_type(fcinfo, NULL, &tupdesc);
/* make these in a sufficiently long-lived memory context */
old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory);
@@ -1087,6 +1682,146 @@ each_scalar(void *state, char *token, JsonTokenType tokentype)
*
* a lot of this processing is similar to the json_each* functions
*/
+
+Datum
+jsonb_array_elements(PG_FUNCTION_ARGS)
+{
+ return elements_worker_jsonb(fcinfo, false);
+}
+
+Datum
+jsonb_array_elements_text(PG_FUNCTION_ARGS)
+{
+ return elements_worker_jsonb(fcinfo, true);
+}
+
+static inline Datum
+elements_worker_jsonb(FunctionCallInfo fcinfo, bool as_text)
+{
+ Jsonb *jb = PG_GETARG_JSONB(0);
+ ReturnSetInfo *rsi;
+ Tuplestorestate *tuple_store;
+ TupleDesc tupdesc;
+ TupleDesc ret_tdesc;
+ MemoryContext old_cxt,
+ tmp_cxt;
+ bool skipNested = false;
+ JsonbIterator *it;
+ JsonbValue v;
+ int r;
+
+ if (JB_ROOT_IS_SCALAR(jb))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot extract elements from a scalar")));
+ else if (!JB_ROOT_IS_ARRAY(jb))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot extract elements from an object")));
+
+ rsi = (ReturnSetInfo *) fcinfo->resultinfo;
+
+ if (!rsi || !IsA(rsi, ReturnSetInfo) ||
+ (rsi->allowedModes & SFRM_Materialize) == 0 ||
+ rsi->expectedDesc == NULL)
+ ereport(ERROR,
+ (errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
+ errmsg("set-valued function called in context that "
+ "cannot accept a set")));
+
+
+ rsi->returnMode = SFRM_Materialize;
+
+ /* it's a simple type, so don't use get_call_result_type() */
+ tupdesc = rsi->expectedDesc;
+
+ old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory);
+
+ ret_tdesc = CreateTupleDescCopy(tupdesc);
+ BlessTupleDesc(ret_tdesc);
+ tuple_store =
+ tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize_Random,
+ false, work_mem);
+
+ MemoryContextSwitchTo(old_cxt);
+
+ tmp_cxt = AllocSetContextCreate(CurrentMemoryContext,
+ "jsonb_each temporary cxt",
+ ALLOCSET_DEFAULT_MINSIZE,
+ ALLOCSET_DEFAULT_INITSIZE,
+ ALLOCSET_DEFAULT_MAXSIZE);
+
+
+ it = JsonbIteratorInit(VARDATA_ANY(jb));
+
+ while ((r = JsonbIteratorNext(&it, &v, skipNested)) != WJB_DONE)
+ {
+ skipNested = true;
+
+ if (r == WJB_ELEM)
+ {
+ HeapTuple tuple;
+ Datum values[1];
+ bool nulls[1] = {false};
+
+ /* use the tmp context so we can clean up after each tuple is done */
+ old_cxt = MemoryContextSwitchTo(tmp_cxt);
+
+ if (!as_text)
+ {
+ Jsonb *val = JsonbValueToJsonb(&v);
+
+ values[0] = PointerGetDatum(val);
+ }
+ else
+ {
+ if (v.type == jbvNull)
+ {
+ /* a json null is an sql null in text mode */
+ nulls[0] = true;
+ values[0] = (Datum) NULL;
+ }
+ else
+ {
+ text *sv;
+
+ if (v.type == jbvString)
+ {
+ /* in text mode scalar strings should be dequoted */
+ sv = cstring_to_text_with_len(v.string.val, v.string.len);
+ }
+ else
+ {
+ /* turn anything else into a json string */
+ StringInfo jtext = makeStringInfo();
+ Jsonb *jb = JsonbValueToJsonb(&v);
+
+ (void) JsonbToCString(jtext, VARDATA(jb), 2 * v.estSize);
+ sv = cstring_to_text_with_len(jtext->data, jtext->len);
+ }
+
+ values[0] = PointerGetDatum(sv);
+ }
+ }
+
+ tuple = heap_form_tuple(ret_tdesc, values, nulls);
+
+ tuplestore_puttuple(tuple_store, tuple);
+
+ /* clean up and switch back */
+ MemoryContextSwitchTo(old_cxt);
+ MemoryContextReset(tmp_cxt);
+ }
+ }
+
+ MemoryContextDelete(tmp_cxt);
+
+ rsi->setResult = tuple_store;
+ rsi->setDesc = ret_tdesc;
+
+ PG_RETURN_NULL();
+}
+
Datum
json_array_elements(PG_FUNCTION_ARGS)
{
@@ -1100,7 +1835,7 @@ json_array_elements_text(PG_FUNCTION_ARGS)
}
static inline Datum
-elements_worker(PG_FUNCTION_ARGS, bool as_text)
+elements_worker(FunctionCallInfo fcinfo, bool as_text)
{
text *json = PG_GETARG_TEXT_P(0);
@@ -1270,9 +2005,16 @@ elements_scalar(void *state, char *token, JsonTokenType tokentype)
* which is in turn partly adapted from record_out.
*
* The json is decomposed into a hash table, in which each
- * field in the record is then looked up by name.
+ * field in the record is then looked up by name. For jsonb
+ * we fetch the values direct from the object.
*/
Datum
+jsonb_populate_record(PG_FUNCTION_ARGS)
+{
+ return populate_record_worker(fcinfo, true);
+}
+
+Datum
json_populate_record(PG_FUNCTION_ARGS)
{
return populate_record_worker(fcinfo, true);
@@ -1285,11 +2027,14 @@ json_to_record(PG_FUNCTION_ARGS)
}
static inline Datum
-populate_record_worker(PG_FUNCTION_ARGS, bool have_record_arg)
+populate_record_worker(FunctionCallInfo fcinfo, bool have_record_arg)
{
+ Oid argtype;
+ Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, have_record_arg ? 1 : 0);
text *json;
+ Jsonb *jb = NULL;
bool use_json_as_text;
- HTAB *json_hash;
+ HTAB *json_hash = NULL;
HeapTupleHeader rec = NULL;
Oid tupType = InvalidOid;
int32 tupTypmod = -1;
@@ -1301,19 +2046,20 @@ populate_record_worker(PG_FUNCTION_ARGS, bool have_record_arg)
int i;
Datum *values;
bool *nulls;
- char fname[NAMEDATALEN];
- JsonHashEntry *hashentry;
+
+ Assert(jtype == JSONOID || jtype == JSONBOID);
+
+ use_json_as_text = PG_ARGISNULL(have_record_arg ? 2 : 1) ? false :
+ PG_GETARG_BOOL(have_record_arg ? 2 : 1);
if (have_record_arg)
{
- Oid argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
-
- use_json_as_text = PG_ARGISNULL(2) ? false : PG_GETARG_BOOL(2);
+ argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
if (!type_is_rowtype(argtype))
ereport(ERROR,
(errcode(ERRCODE_DATATYPE_MISMATCH),
- errmsg("first argument of json_populate_record must be a row type")));
+ errmsg("first argument of json%s_populate_record must be a row type", jtype == JSONBOID ? "b" : "")));
if (PG_ARGISNULL(0))
{
@@ -1340,19 +2086,16 @@ populate_record_worker(PG_FUNCTION_ARGS, bool have_record_arg)
tupTypmod = HeapTupleHeaderGetTypMod(rec);
}
- json = PG_GETARG_TEXT_P(1);
+ tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
}
else
- {
- /* json_to_record case */
+ { /* json{b}_to_record case */
use_json_as_text = PG_ARGISNULL(1) ? false : PG_GETARG_BOOL(1);
if (PG_ARGISNULL(0))
PG_RETURN_NULL();
- json = PG_GETARG_TEXT_P(0);
-
if (get_call_result_type(fcinfo, NULL, &tupdesc) != TYPEFUNC_COMPOSITE)
ereport(ERROR,
(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
@@ -1362,11 +2105,13 @@ populate_record_worker(PG_FUNCTION_ARGS, bool have_record_arg)
"using a column definition list.")));
}
- json_hash = get_json_object_as_hash(json, "json_populate_record",
- use_json_as_text);
-
- if (have_record_arg)
+ if (jtype == JSONOID)
{
+ /* just get the text */
+ json = PG_GETARG_TEXT_P(have_record_arg ? 1 : 0);
+
+ json_hash = get_json_object_as_hash(json, "json_populate_record", use_json_as_text);
+
/*
* if the input json is empty, we can only skip the rest if we were
* passed in a non-null record, since otherwise there may be issues
@@ -1375,8 +2120,14 @@ populate_record_worker(PG_FUNCTION_ARGS, bool have_record_arg)
if (hash_get_num_entries(json_hash) == 0 && rec)
PG_RETURN_POINTER(rec);
+ }
+ else
+ {
+ jb = PG_GETARG_JSONB(have_record_arg ? 1 : 0);
- tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
+ /* same logic as for json */
+ if (!have_record_arg && rec)
+ PG_RETURN_POINTER(rec);
}
ncolumns = tupdesc->natts;
@@ -1439,7 +2190,9 @@ populate_record_worker(PG_FUNCTION_ARGS, bool have_record_arg)
{
ColumnIOData *column_info = &my_extra->columns[i];
Oid column_type = tupdesc->attrs[i]->atttypid;
- char *value;
+ JsonbValue *v = NULL;
+ char fname[NAMEDATALEN];
+ JsonHashEntry *hashentry = NULL;
/* Ignore dropped columns in datatype */
if (tupdesc->attrs[i]->attisdropped)
@@ -1448,9 +2201,20 @@ populate_record_worker(PG_FUNCTION_ARGS, bool have_record_arg)
continue;
}
- memset(fname, 0, NAMEDATALEN);
- strncpy(fname, NameStr(tupdesc->attrs[i]->attname), NAMEDATALEN);
- hashentry = hash_search(json_hash, fname, HASH_FIND, NULL);
+ if (jtype == JSONOID)
+ {
+
+ memset(fname, 0, NAMEDATALEN);
+ strncpy(fname, NameStr(tupdesc->attrs[i]->attname), NAMEDATALEN);
+ hashentry = hash_search(json_hash, fname, HASH_FIND, NULL);
+ }
+ else
+ {
+ char *key = NameStr(tupdesc->attrs[i]->attname);
+
+ v = findJsonbValueFromSuperHeaderLen(VARDATA(jb), JB_FOBJECT, key,
+ strlen(key));
+ }
/*
* we can't just skip here if the key wasn't found since we might have
@@ -1460,7 +2224,8 @@ populate_record_worker(PG_FUNCTION_ARGS, bool have_record_arg)
* then every field which we don't populate needs to be run through
* the input function just in case it's a domain type.
*/
- if (hashentry == NULL && rec)
+ if (((jtype == JSONOID && hashentry == NULL) ||
+ (jtype == JSONBOID && v == NULL)) && rec)
continue;
/*
@@ -1475,7 +2240,8 @@ populate_record_worker(PG_FUNCTION_ARGS, bool have_record_arg)
fcinfo->flinfo->fn_mcxt);
column_info->column_type = column_type;
}
- if (hashentry == NULL || hashentry->isnull)
+ if ((jtype == JSONOID && (hashentry == NULL || hashentry->isnull)) ||
+ (jtype == JSONBOID && (v == NULL || v->type == jbvNull)))
{
/*
* need InputFunctionCall to happen even for nulls, so that domain
@@ -1488,9 +2254,33 @@ populate_record_worker(PG_FUNCTION_ARGS, bool have_record_arg)
}
else
{
- value = hashentry->val;
+ char *s = NULL;
- values[i] = InputFunctionCall(&column_info->proc, value,
+ if (jtype == JSONOID)
+ {
+ /* already done the hard work in the json case */
+ s = hashentry->val;
+ }
+ else
+ {
+ if (v->type == jbvString)
+ s = pnstrdup(v->string.val, v->string.len);
+ else if (v->type == jbvBool)
+ s = pnstrdup((v->boolean) ? "t" : "f", 1);
+ else if (v->type == jbvNumeric)
+ s = DatumGetCString(DirectFunctionCall1(numeric_out,
+ PointerGetDatum(v->numeric)));
+ else if (!use_json_as_text)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot populate with a nested object unless use_json_as_text is true")));
+ else if (v->type == jbvBinary)
+ s = JsonbToCString(NULL, v->binary.data, v->binary.len);
+ else
+ elog(ERROR, "invalid jsonb type");
+ }
+
+ values[i] = InputFunctionCall(&column_info->proc, s,
column_info->typioparam,
tupdesc->attrs[i]->atttypmod);
nulls[i] = false;
@@ -1656,6 +2446,134 @@ hash_scalar(void *state, char *token, JsonTokenType tokentype)
* per object in the array.
*/
Datum
+jsonb_populate_recordset(PG_FUNCTION_ARGS)
+{
+ return populate_recordset_worker(fcinfo, true);
+}
+
+static void
+make_row_from_rec_and_jsonb(Jsonb * element, PopulateRecordsetState *state)
+{
+ Datum *values;
+ bool *nulls;
+ int i;
+ RecordIOData *my_extra = state->my_extra;
+ int ncolumns = my_extra->ncolumns;
+ TupleDesc tupdesc = state->ret_tdesc;
+ HeapTupleHeader rec = state->rec;
+ HeapTuple rettuple;
+
+ values = (Datum *) palloc(ncolumns * sizeof(Datum));
+ nulls = (bool *) palloc(ncolumns * sizeof(bool));
+
+ if (state->rec)
+ {
+ HeapTupleData tuple;
+
+ /* Build a temporary HeapTuple control structure */
+ tuple.t_len = HeapTupleHeaderGetDatumLength(state->rec);
+ ItemPointerSetInvalid(&(tuple.t_self));
+ tuple.t_tableOid = InvalidOid;
+ tuple.t_data = state->rec;
+
+ /* Break down the tuple into fields */
+ heap_deform_tuple(&tuple, tupdesc, values, nulls);
+ }
+ else
+ {
+ for (i = 0; i < ncolumns; ++i)
+ {
+ values[i] = (Datum) 0;
+ nulls[i] = true;
+ }
+ }
+
+ for (i = 0; i < ncolumns; ++i)
+ {
+ ColumnIOData *column_info = &my_extra->columns[i];
+ Oid column_type = tupdesc->attrs[i]->atttypid;
+ JsonbValue *v = NULL;
+ char *key;
+
+ /* Ignore dropped columns in datatype */
+ if (tupdesc->attrs[i]->attisdropped)
+ {
+ nulls[i] = true;
+ continue;
+ }
+
+ key = NameStr(tupdesc->attrs[i]->attname);
+
+ v = findJsonbValueFromSuperHeaderLen(VARDATA(element), JB_FOBJECT,
+ key, strlen(key));
+
+ /*
+ * We can't just skip here if the key wasn't found since we might have
+ * a domain to deal with. If we were passed in a non-null record
+ * datum, we assume that the existing values are valid (if they're
+ * not, then it's not our fault), but if we were passed in a null,
+ * then every field which we don't populate needs to be run through
+ * the input function just in case it's a domain type.
+ */
+ if (v == NULL && rec)
+ continue;
+
+ /*
+ * Prepare to convert the column value from text
+ */
+ if (column_info->column_type != column_type)
+ {
+ getTypeInputInfo(column_type,
+ &column_info->typiofunc,
+ &column_info->typioparam);
+ fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
+ state->fn_mcxt);
+ column_info->column_type = column_type;
+ }
+ if (v == NULL || v->type == jbvNull)
+ {
+ /*
+ * Need InputFunctionCall to happen even for nulls, so that domain
+ * checks are done
+ */
+ values[i] = InputFunctionCall(&column_info->proc, NULL,
+ column_info->typioparam,
+ tupdesc->attrs[i]->atttypmod);
+ nulls[i] = true;
+ }
+ else
+ {
+ char *s = NULL;
+
+ if (v->type == jbvString)
+ s = pnstrdup(v->string.val, v->string.len);
+ else if (v->type == jbvBool)
+ s = pnstrdup((v->boolean) ? "t" : "f", 1);
+ else if (v->type == jbvNumeric)
+ s = DatumGetCString(DirectFunctionCall1(numeric_out,
+ PointerGetDatum(v->numeric)));
+ else if (!state->use_json_as_text)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot populate with a nested object unless use_json_as_text is true")));
+ else if (v->type == jbvBinary)
+ s = JsonbToCString(NULL, v->binary.data, v->binary.len);
+ else
+ elog(ERROR, "invalid jsonb type");
+
+ values[i] = InputFunctionCall(&column_info->proc, s,
+ column_info->typioparam,
+ tupdesc->attrs[i]->atttypmod);
+ nulls[i] = false;
+ }
+ }
+
+ rettuple = heap_form_tuple(tupdesc, values, nulls);
+
+ tuplestore_puttuple(state->tuple_store, rettuple);
+}
+
+Datum
json_populate_recordset(PG_FUNCTION_ARGS)
{
return populate_recordset_worker(fcinfo, true);
@@ -1671,10 +2589,10 @@ json_to_recordset(PG_FUNCTION_ARGS)
* common worker for json_populate_recordset() and json_to_recordset()
*/
static inline Datum
-populate_recordset_worker(PG_FUNCTION_ARGS, bool have_record_arg)
+populate_recordset_worker(FunctionCallInfo fcinfo, bool have_record_arg)
{
Oid argtype;
- text *json;
+ Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, have_record_arg ? 1 : 0);
bool use_json_as_text;
ReturnSetInfo *rsi;
MemoryContext old_cxt;
@@ -1684,8 +2602,6 @@ populate_recordset_worker(PG_FUNCTION_ARGS, bool have_record_arg)
TupleDesc tupdesc;
RecordIOData *my_extra;
int ncolumns;
- JsonLexContext *lex;
- JsonSemAction *sem;
PopulateRecordsetState *state;
if (have_record_arg)
@@ -1721,7 +2637,8 @@ populate_recordset_worker(PG_FUNCTION_ARGS, bool have_record_arg)
/*
* get the tupdesc from the result set info - it must be a record type
- * because we already checked that arg1 is a record type.
+ * because we already checked that arg1 is a record type, or we're in a
+ * to_record function which returns a setof record.
*/
if (get_call_result_type(fcinfo, NULL, &tupdesc) != TYPEFUNC_COMPOSITE)
ereport(ERROR,
@@ -1729,29 +2646,12 @@ populate_recordset_worker(PG_FUNCTION_ARGS, bool have_record_arg)
errmsg("function returning record called in context "
"that cannot accept type record")));
- state = palloc0(sizeof(PopulateRecordsetState));
- sem = palloc0(sizeof(JsonSemAction));
-
-
- /* make these in a sufficiently long-lived memory context */
- old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory);
-
- state->ret_tdesc = CreateTupleDescCopy(tupdesc);
- BlessTupleDesc(state->ret_tdesc);
- state->tuple_store =
- tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize_Random,
- false, work_mem);
-
- MemoryContextSwitchTo(old_cxt);
-
/* if the json is null send back an empty set */
if (have_record_arg)
{
if (PG_ARGISNULL(1))
PG_RETURN_NULL();
- json = PG_GETARG_TEXT_P(1);
-
if (PG_ARGISNULL(0))
rec = NULL;
else
@@ -1759,11 +2659,9 @@ populate_recordset_worker(PG_FUNCTION_ARGS, bool have_record_arg)
}
else
{
- if (PG_ARGISNULL(0))
+ if (PG_ARGISNULL(1))
PG_RETURN_NULL();
- json = PG_GETARG_TEXT_P(0);
-
rec = NULL;
}
@@ -1771,8 +2669,6 @@ populate_recordset_worker(PG_FUNCTION_ARGS, bool have_record_arg)
tupTypmod = tupdesc->tdtypmod;
ncolumns = tupdesc->natts;
- lex = makeJsonLexContext(json, true);
-
/*
* We arrange to look up the needed I/O info just once per series of
* calls, assuming the record type doesn't change underneath us.
@@ -1801,23 +2697,80 @@ populate_recordset_worker(PG_FUNCTION_ARGS, bool have_record_arg)
my_extra->ncolumns = ncolumns;
}
- sem->semstate = (void *) state;
- sem->array_start = populate_recordset_array_start;
- sem->array_element_start = populate_recordset_array_element_start;
- sem->scalar = populate_recordset_scalar;
- sem->object_field_start = populate_recordset_object_field_start;
- sem->object_field_end = populate_recordset_object_field_end;
- sem->object_start = populate_recordset_object_start;
- sem->object_end = populate_recordset_object_end;
+ state = palloc0(sizeof(PopulateRecordsetState));
- state->lex = lex;
+ /* make these in a sufficiently long-lived memory context */
+ old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory);
+ state->ret_tdesc = CreateTupleDescCopy(tupdesc);;
+ BlessTupleDesc(state->ret_tdesc);
+ state->tuple_store = tuplestore_begin_heap(rsi->allowedModes &
+ SFRM_Materialize_Random,
+ false, work_mem);
+ MemoryContextSwitchTo(old_cxt);
state->my_extra = my_extra;
state->rec = rec;
state->use_json_as_text = use_json_as_text;
state->fn_mcxt = fcinfo->flinfo->fn_mcxt;
- pg_parse_json(lex, sem);
+ if (jtype == JSONOID)
+ {
+ text *json = PG_GETARG_TEXT_P(have_record_arg ? 1 : 0);
+ JsonLexContext *lex;
+ JsonSemAction *sem;
+
+ sem = palloc0(sizeof(JsonSemAction));
+
+ lex = makeJsonLexContext(json, true);
+
+ sem->semstate = (void *) state;
+ sem->array_start = populate_recordset_array_start;
+ sem->array_element_start = populate_recordset_array_element_start;
+ sem->scalar = populate_recordset_scalar;
+ sem->object_field_start = populate_recordset_object_field_start;
+ sem->object_field_end = populate_recordset_object_field_end;
+ sem->object_start = populate_recordset_object_start;
+ sem->object_end = populate_recordset_object_end;
+
+ state->lex = lex;
+
+ pg_parse_json(lex, sem);
+
+ }
+ else
+ {
+ Jsonb *jb;
+ JsonbIterator *it;
+ JsonbValue v;
+ bool skipNested = false;
+ int r;
+
+ Assert(jtype == JSONBOID);
+ jb = PG_GETARG_JSONB(have_record_arg ? 1 : 0);
+
+ if (JB_ROOT_IS_SCALAR(jb) || !JB_ROOT_IS_ARRAY(jb))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("cannot call jsonb_populate_recordset on non-array")));
+
+ it = JsonbIteratorInit(VARDATA_ANY(jb));
+
+ while ((r = JsonbIteratorNext(&it, &v, skipNested)) != WJB_DONE)
+ {
+ skipNested = true;
+
+ if (r == WJB_ELEM)
+ {
+ Jsonb *element = JsonbValueToJsonb(&v);
+
+ if (!JB_ROOT_IS_OBJECT(element))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("jsonb_populate_recordset argument must be an array of objects")));
+ make_row_from_rec_and_jsonb(element, state);
+ }
+ }
+ }
rsi->setResult = state->tuple_store;
rsi->setDesc = state->ret_tdesc;
@@ -2067,3 +3020,19 @@ populate_recordset_object_field_end(void *state, char *fname, bool isnull)
hashentry->val = _state->saved_scalar;
}
}
+
+/*
+ * findJsonbValueFromSuperHeader() wrapper that sets up JsonbValue key string.
+ */
+static JsonbValue *
+findJsonbValueFromSuperHeaderLen(JsonbSuperHeader sheader, uint32 flags,
+ char *key, uint32 keylen)
+{
+ JsonbValue k;
+
+ k.type = jbvString;
+ k.string.val = key;
+ k.string.len = keylen;
+
+ return findJsonbValueFromSuperHeader(sheader, flags, NULL, &k);
+}