| /*------------------------------------------------------------------------- |
| * |
| * json.c |
| * JSON data type support. |
| * |
| * Portions Copyright (c) 1996-2021, PostgreSQL Global Development Group |
| * Portions Copyright (c) 1994, Regents of the University of California |
| * |
| * IDENTIFICATION |
| * src/backend/utils/adt/json.c |
| * |
| *------------------------------------------------------------------------- |
| */ |
| #include "postgres.h" |
| |
| #include "catalog/pg_type.h" |
| #include "funcapi.h" |
| #include "libpq/pqformat.h" |
| #include "miscadmin.h" |
| #include "parser/parse_coerce.h" |
| #include "utils/array.h" |
| #include "utils/builtins.h" |
| #include "utils/date.h" |
| #include "utils/datetime.h" |
| #include "utils/json.h" |
| #include "utils/jsonfuncs.h" |
| #include "utils/lsyscache.h" |
| #include "utils/typcache.h" |
| |
| typedef enum /* type categories for datum_to_json */ |
| { |
| JSONTYPE_NULL, /* null, so we didn't bother to identify */ |
| JSONTYPE_BOOL, /* boolean (built-in types only) */ |
| JSONTYPE_NUMERIC, /* numeric (ditto) */ |
| JSONTYPE_DATE, /* we use special formatting for datetimes */ |
| JSONTYPE_TIMESTAMP, |
| JSONTYPE_TIMESTAMPTZ, |
| JSONTYPE_JSON, /* JSON itself (and JSONB) */ |
| JSONTYPE_ARRAY, /* array */ |
| JSONTYPE_COMPOSITE, /* composite */ |
| JSONTYPE_CAST, /* something with an explicit cast to JSON */ |
| JSONTYPE_OTHER /* all else */ |
| } JsonTypeCategory; |
| |
| typedef struct JsonAggState |
| { |
| StringInfo str; |
| JsonTypeCategory key_category; |
| Oid key_output_func; |
| JsonTypeCategory val_category; |
| Oid val_output_func; |
| } JsonAggState; |
| |
| static void composite_to_json(Datum composite, StringInfo result, |
| bool use_line_feeds); |
| static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims, |
| Datum *vals, bool *nulls, int *valcount, |
| JsonTypeCategory tcategory, Oid outfuncoid, |
| bool use_line_feeds); |
| static void array_to_json_internal(Datum array, StringInfo result, |
| bool use_line_feeds); |
| static void json_categorize_type(Oid typoid, |
| JsonTypeCategory *tcategory, |
| Oid *outfuncoid); |
| static void datum_to_json(Datum val, bool is_null, StringInfo result, |
| JsonTypeCategory tcategory, Oid outfuncoid, |
| bool key_scalar); |
| static void add_json(Datum val, bool is_null, StringInfo result, |
| Oid val_type, bool key_scalar); |
| static text *catenate_stringinfo_string(StringInfo buffer, const char *addon); |
| |
| /* |
| * Input. |
| */ |
| Datum |
| json_in(PG_FUNCTION_ARGS) |
| { |
| char *json = PG_GETARG_CSTRING(0); |
| text *result = cstring_to_text(json); |
| JsonLexContext *lex; |
| |
| /* validate it */ |
| lex = makeJsonLexContext(result, false); |
| pg_parse_json_or_ereport(lex, &nullSemAction); |
| |
| /* Internal representation is the same as text, for now */ |
| PG_RETURN_TEXT_P(result); |
| } |
| |
| /* |
| * Output. |
| */ |
| Datum |
| json_out(PG_FUNCTION_ARGS) |
| { |
| /* we needn't detoast because text_to_cstring will handle that */ |
| Datum txt = PG_GETARG_DATUM(0); |
| |
| PG_RETURN_CSTRING(TextDatumGetCString(txt)); |
| } |
| |
| /* |
| * Binary send. |
| */ |
| Datum |
| json_send(PG_FUNCTION_ARGS) |
| { |
| text *t = PG_GETARG_TEXT_PP(0); |
| StringInfoData buf; |
| |
| pq_begintypsend(&buf); |
| pq_sendtext(&buf, VARDATA_ANY(t), VARSIZE_ANY_EXHDR(t)); |
| PG_RETURN_BYTEA_P(pq_endtypsend(&buf)); |
| } |
| |
| /* |
| * Binary receive. |
| */ |
| Datum |
| json_recv(PG_FUNCTION_ARGS) |
| { |
| StringInfo buf = (StringInfo) PG_GETARG_POINTER(0); |
| char *str; |
| int nbytes; |
| JsonLexContext *lex; |
| |
| str = pq_getmsgtext(buf, buf->len - buf->cursor, &nbytes); |
| |
| /* Validate it. */ |
| lex = makeJsonLexContextCstringLen(str, nbytes, GetDatabaseEncoding(), false); |
| pg_parse_json_or_ereport(lex, &nullSemAction); |
| |
| PG_RETURN_TEXT_P(cstring_to_text_with_len(str, nbytes)); |
| } |
| |
| /* |
| * Determine how we want to print values of a given type in datum_to_json. |
| * |
| * Given the datatype OID, return its JsonTypeCategory, as well as the type's |
| * output function OID. If the returned category is JSONTYPE_CAST, we |
| * return the OID of the type->JSON cast function instead. |
| */ |
| static void |
| json_categorize_type(Oid typoid, |
| JsonTypeCategory *tcategory, |
| Oid *outfuncoid) |
| { |
| bool typisvarlena; |
| |
| /* Look through any domain */ |
| typoid = getBaseType(typoid); |
| |
| *outfuncoid = InvalidOid; |
| |
| /* |
| * We need to get the output function for everything except date and |
| * timestamp types, array and composite types, booleans, and non-builtin |
| * types where there's a cast to json. |
| */ |
| |
| switch (typoid) |
| { |
| case BOOLOID: |
| *tcategory = JSONTYPE_BOOL; |
| break; |
| |
| case INT2OID: |
| case INT4OID: |
| case INT8OID: |
| case FLOAT4OID: |
| case FLOAT8OID: |
| case NUMERICOID: |
| getTypeOutputInfo(typoid, outfuncoid, &typisvarlena); |
| *tcategory = JSONTYPE_NUMERIC; |
| break; |
| |
| case DATEOID: |
| *tcategory = JSONTYPE_DATE; |
| break; |
| |
| case TIMESTAMPOID: |
| *tcategory = JSONTYPE_TIMESTAMP; |
| break; |
| |
| case TIMESTAMPTZOID: |
| *tcategory = JSONTYPE_TIMESTAMPTZ; |
| break; |
| |
| case JSONOID: |
| case JSONBOID: |
| getTypeOutputInfo(typoid, outfuncoid, &typisvarlena); |
| *tcategory = JSONTYPE_JSON; |
| break; |
| |
| default: |
| /* Check for arrays and composites */ |
| if (OidIsValid(get_element_type(typoid)) || typoid == ANYARRAYOID |
| || typoid == ANYCOMPATIBLEARRAYOID || typoid == RECORDARRAYOID) |
| *tcategory = JSONTYPE_ARRAY; |
| else if (type_is_rowtype(typoid)) /* includes RECORDOID */ |
| *tcategory = JSONTYPE_COMPOSITE; |
| else |
| { |
| /* It's probably the general case ... */ |
| *tcategory = JSONTYPE_OTHER; |
| /* but let's look for a cast to json, if it's not built-in */ |
| if (typoid >= FirstNormalObjectId) |
| { |
| Oid castfunc; |
| CoercionPathType ctype; |
| |
| ctype = find_coercion_pathway(JSONOID, typoid, |
| COERCION_EXPLICIT, |
| &castfunc); |
| if (ctype == COERCION_PATH_FUNC && OidIsValid(castfunc)) |
| { |
| *tcategory = JSONTYPE_CAST; |
| *outfuncoid = castfunc; |
| } |
| else |
| { |
| /* non builtin type with no cast */ |
| getTypeOutputInfo(typoid, outfuncoid, &typisvarlena); |
| } |
| } |
| else |
| { |
| /* any other builtin type */ |
| getTypeOutputInfo(typoid, outfuncoid, &typisvarlena); |
| } |
| } |
| break; |
| } |
| } |
| |
| /* |
| * Turn a Datum into JSON text, appending the string to "result". |
| * |
| * tcategory and outfuncoid are from a previous call to json_categorize_type, |
| * except that if is_null is true then they can be invalid. |
| * |
| * If key_scalar is true, the value is being printed as a key, so insist |
| * it's of an acceptable type, and force it to be quoted. |
| */ |
| static void |
| datum_to_json(Datum val, bool is_null, StringInfo result, |
| JsonTypeCategory tcategory, Oid outfuncoid, |
| bool key_scalar) |
| { |
| char *outputstr; |
| text *jsontext; |
| |
| check_stack_depth(); |
| |
| /* callers are expected to ensure that null keys are not passed in */ |
| Assert(!(key_scalar && is_null)); |
| |
| if (is_null) |
| { |
| appendStringInfoString(result, "null"); |
| return; |
| } |
| |
| if (key_scalar && |
| (tcategory == JSONTYPE_ARRAY || |
| tcategory == JSONTYPE_COMPOSITE || |
| tcategory == JSONTYPE_JSON || |
| tcategory == JSONTYPE_CAST)) |
| ereport(ERROR, |
| (errcode(ERRCODE_INVALID_PARAMETER_VALUE), |
| errmsg("key value must be scalar, not array, composite, or json"))); |
| |
| switch (tcategory) |
| { |
| case JSONTYPE_ARRAY: |
| array_to_json_internal(val, result, false); |
| break; |
| case JSONTYPE_COMPOSITE: |
| composite_to_json(val, result, false); |
| break; |
| case JSONTYPE_BOOL: |
| outputstr = DatumGetBool(val) ? "true" : "false"; |
| if (key_scalar) |
| escape_json(result, outputstr); |
| else |
| appendStringInfoString(result, outputstr); |
| break; |
| case JSONTYPE_NUMERIC: |
| outputstr = OidOutputFunctionCall(outfuncoid, val); |
| |
| /* |
| * Don't call escape_json for a non-key if it's a valid JSON |
| * number. |
| */ |
| if (!key_scalar && IsValidJsonNumber(outputstr, strlen(outputstr))) |
| appendStringInfoString(result, outputstr); |
| else |
| escape_json(result, outputstr); |
| pfree(outputstr); |
| break; |
| case JSONTYPE_DATE: |
| { |
| char buf[MAXDATELEN + 1]; |
| |
| JsonEncodeDateTime(buf, val, DATEOID, NULL); |
| appendStringInfo(result, "\"%s\"", buf); |
| } |
| break; |
| case JSONTYPE_TIMESTAMP: |
| { |
| char buf[MAXDATELEN + 1]; |
| |
| JsonEncodeDateTime(buf, val, TIMESTAMPOID, NULL); |
| appendStringInfo(result, "\"%s\"", buf); |
| } |
| break; |
| case JSONTYPE_TIMESTAMPTZ: |
| { |
| char buf[MAXDATELEN + 1]; |
| |
| JsonEncodeDateTime(buf, val, TIMESTAMPTZOID, NULL); |
| appendStringInfo(result, "\"%s\"", buf); |
| } |
| break; |
| case JSONTYPE_JSON: |
| /* JSON and JSONB output will already be escaped */ |
| outputstr = OidOutputFunctionCall(outfuncoid, val); |
| appendStringInfoString(result, outputstr); |
| pfree(outputstr); |
| break; |
| case JSONTYPE_CAST: |
| /* outfuncoid refers to a cast function, not an output function */ |
| jsontext = DatumGetTextPP(OidFunctionCall1(outfuncoid, val)); |
| outputstr = text_to_cstring(jsontext); |
| appendStringInfoString(result, outputstr); |
| pfree(outputstr); |
| pfree(jsontext); |
| break; |
| default: |
| outputstr = OidOutputFunctionCall(outfuncoid, val); |
| escape_json(result, outputstr); |
| pfree(outputstr); |
| break; |
| } |
| } |
| |
| /* |
| * Encode 'value' of datetime type 'typid' into JSON string in ISO format using |
| * optionally preallocated buffer 'buf'. Optional 'tzp' determines time-zone |
| * offset (in seconds) in which we want to show timestamptz. |
| */ |
| char * |
| JsonEncodeDateTime(char *buf, Datum value, Oid typid, const int *tzp) |
| { |
| if (!buf) |
| buf = palloc(MAXDATELEN + 1); |
| |
| switch (typid) |
| { |
| case DATEOID: |
| { |
| DateADT date; |
| struct pg_tm tm; |
| |
| date = DatumGetDateADT(value); |
| |
| /* Same as date_out(), but forcing DateStyle */ |
| if (DATE_NOT_FINITE(date)) |
| EncodeSpecialDate(date, buf); |
| else |
| { |
| j2date(date + POSTGRES_EPOCH_JDATE, |
| &(tm.tm_year), &(tm.tm_mon), &(tm.tm_mday)); |
| EncodeDateOnly(&tm, USE_XSD_DATES, buf); |
| } |
| } |
| break; |
| case TIMEOID: |
| { |
| TimeADT time = DatumGetTimeADT(value); |
| struct pg_tm tt, |
| *tm = &tt; |
| fsec_t fsec; |
| |
| /* Same as time_out(), but forcing DateStyle */ |
| time2tm(time, tm, &fsec); |
| EncodeTimeOnly(tm, fsec, false, 0, USE_XSD_DATES, buf); |
| } |
| break; |
| case TIMETZOID: |
| { |
| TimeTzADT *time = DatumGetTimeTzADTP(value); |
| struct pg_tm tt, |
| *tm = &tt; |
| fsec_t fsec; |
| int tz; |
| |
| /* Same as timetz_out(), but forcing DateStyle */ |
| timetz2tm(time, tm, &fsec, &tz); |
| EncodeTimeOnly(tm, fsec, true, tz, USE_XSD_DATES, buf); |
| } |
| break; |
| case TIMESTAMPOID: |
| { |
| Timestamp timestamp; |
| struct pg_tm tm; |
| fsec_t fsec; |
| |
| timestamp = DatumGetTimestamp(value); |
| /* Same as timestamp_out(), but forcing DateStyle */ |
| if (TIMESTAMP_NOT_FINITE(timestamp)) |
| EncodeSpecialTimestamp(timestamp, buf); |
| else if (timestamp2tm(timestamp, NULL, &tm, &fsec, NULL, NULL) == 0) |
| EncodeDateTime(&tm, fsec, false, 0, NULL, USE_XSD_DATES, buf); |
| else |
| ereport(ERROR, |
| (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), |
| errmsg("timestamp out of range"))); |
| } |
| break; |
| case TIMESTAMPTZOID: |
| { |
| TimestampTz timestamp; |
| struct pg_tm tm; |
| int tz; |
| fsec_t fsec; |
| const char *tzn = NULL; |
| |
| timestamp = DatumGetTimestampTz(value); |
| |
| /* |
| * If a time zone is specified, we apply the time-zone shift, |
| * convert timestamptz to pg_tm as if it were without a time |
| * zone, and then use the specified time zone for converting |
| * the timestamp into a string. |
| */ |
| if (tzp) |
| { |
| tz = *tzp; |
| timestamp -= (TimestampTz) tz * USECS_PER_SEC; |
| } |
| |
| /* Same as timestamptz_out(), but forcing DateStyle */ |
| if (TIMESTAMP_NOT_FINITE(timestamp)) |
| EncodeSpecialTimestamp(timestamp, buf); |
| else if (timestamp2tm(timestamp, tzp ? NULL : &tz, &tm, &fsec, |
| tzp ? NULL : &tzn, NULL) == 0) |
| { |
| if (tzp) |
| tm.tm_isdst = 1; /* set time-zone presence flag */ |
| |
| EncodeDateTime(&tm, fsec, true, tz, tzn, USE_XSD_DATES, buf); |
| } |
| else |
| ereport(ERROR, |
| (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), |
| errmsg("timestamp out of range"))); |
| } |
| break; |
| default: |
| elog(ERROR, "unknown jsonb value datetime type oid %u", typid); |
| return NULL; |
| } |
| |
| return buf; |
| } |
| |
| /* |
| * Process a single dimension of an array. |
| * If it's the innermost dimension, output the values, otherwise call |
| * ourselves recursively to process the next dimension. |
| */ |
| static void |
| array_dim_to_json(StringInfo result, int dim, int ndims, int *dims, Datum *vals, |
| bool *nulls, int *valcount, JsonTypeCategory tcategory, |
| Oid outfuncoid, bool use_line_feeds) |
| { |
| int i; |
| const char *sep; |
| |
| Assert(dim < ndims); |
| |
| sep = use_line_feeds ? ",\n " : ","; |
| |
| appendStringInfoChar(result, '['); |
| |
| for (i = 1; i <= dims[dim]; i++) |
| { |
| if (i > 1) |
| appendStringInfoString(result, sep); |
| |
| if (dim + 1 == ndims) |
| { |
| datum_to_json(vals[*valcount], nulls[*valcount], result, tcategory, |
| outfuncoid, false); |
| (*valcount)++; |
| } |
| else |
| { |
| /* |
| * Do we want line feeds on inner dimensions of arrays? For now |
| * we'll say no. |
| */ |
| array_dim_to_json(result, dim + 1, ndims, dims, vals, nulls, |
| valcount, tcategory, outfuncoid, false); |
| } |
| } |
| |
| appendStringInfoChar(result, ']'); |
| } |
| |
| /* |
| * Turn an array into JSON. |
| */ |
| static void |
| array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds) |
| { |
| ArrayType *v = DatumGetArrayTypeP(array); |
| Oid element_type = ARR_ELEMTYPE(v); |
| int *dim; |
| int ndim; |
| int nitems; |
| int count = 0; |
| Datum *elements; |
| bool *nulls; |
| int16 typlen; |
| bool typbyval; |
| char typalign; |
| JsonTypeCategory tcategory; |
| Oid outfuncoid; |
| |
| ndim = ARR_NDIM(v); |
| dim = ARR_DIMS(v); |
| nitems = ArrayGetNItems(ndim, dim); |
| |
| if (nitems <= 0) |
| { |
| appendStringInfoString(result, "[]"); |
| return; |
| } |
| |
| get_typlenbyvalalign(element_type, |
| &typlen, &typbyval, &typalign); |
| |
| json_categorize_type(element_type, |
| &tcategory, &outfuncoid); |
| |
| deconstruct_array(v, element_type, typlen, typbyval, |
| typalign, &elements, &nulls, |
| &nitems); |
| |
| array_dim_to_json(result, 0, ndim, dim, elements, nulls, &count, tcategory, |
| outfuncoid, use_line_feeds); |
| |
| pfree(elements); |
| pfree(nulls); |
| } |
| |
| /* |
| * Turn a composite / record into JSON. |
| */ |
| static void |
| composite_to_json(Datum composite, StringInfo result, bool use_line_feeds) |
| { |
| HeapTupleHeader td; |
| Oid tupType; |
| int32 tupTypmod; |
| TupleDesc tupdesc; |
| HeapTupleData tmptup, |
| *tuple; |
| int i; |
| bool needsep = false; |
| const char *sep; |
| |
| sep = use_line_feeds ? ",\n " : ","; |
| |
| td = DatumGetHeapTupleHeader(composite); |
| |
| /* Extract rowtype info and find a tupdesc */ |
| tupType = HeapTupleHeaderGetTypeId(td); |
| tupTypmod = HeapTupleHeaderGetTypMod(td); |
| tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod); |
| |
| /* Build a temporary HeapTuple control structure */ |
| tmptup.t_len = HeapTupleHeaderGetDatumLength(td); |
| tmptup.t_data = td; |
| tuple = &tmptup; |
| |
| appendStringInfoChar(result, '{'); |
| |
| for (i = 0; i < tupdesc->natts; i++) |
| { |
| Datum val; |
| bool isnull; |
| char *attname; |
| JsonTypeCategory tcategory; |
| Oid outfuncoid; |
| Form_pg_attribute att = TupleDescAttr(tupdesc, i); |
| |
| if (att->attisdropped) |
| continue; |
| |
| if (needsep) |
| appendStringInfoString(result, sep); |
| needsep = true; |
| |
| attname = NameStr(att->attname); |
| escape_json(result, attname); |
| appendStringInfoChar(result, ':'); |
| |
| val = heap_getattr(tuple, i + 1, tupdesc, &isnull); |
| |
| if (isnull) |
| { |
| tcategory = JSONTYPE_NULL; |
| outfuncoid = InvalidOid; |
| } |
| else |
| json_categorize_type(att->atttypid, &tcategory, &outfuncoid); |
| |
| datum_to_json(val, isnull, result, tcategory, outfuncoid, false); |
| } |
| |
| appendStringInfoChar(result, '}'); |
| ReleaseTupleDesc(tupdesc); |
| } |
| |
| /* |
| * Append JSON text for "val" to "result". |
| * |
| * This is just a thin wrapper around datum_to_json. If the same type will be |
| * printed many times, avoid using this; better to do the json_categorize_type |
| * lookups only once. |
| */ |
| static void |
| add_json(Datum val, bool is_null, StringInfo result, |
| Oid val_type, bool key_scalar) |
| { |
| JsonTypeCategory tcategory; |
| Oid outfuncoid; |
| |
| if (val_type == InvalidOid) |
| ereport(ERROR, |
| (errcode(ERRCODE_INVALID_PARAMETER_VALUE), |
| errmsg("could not determine input data type"))); |
| |
| if (is_null) |
| { |
| tcategory = JSONTYPE_NULL; |
| outfuncoid = InvalidOid; |
| } |
| else |
| json_categorize_type(val_type, |
| &tcategory, &outfuncoid); |
| |
| datum_to_json(val, is_null, result, tcategory, outfuncoid, key_scalar); |
| } |
| |
| /* |
| * SQL function array_to_json(row) |
| */ |
| Datum |
| array_to_json(PG_FUNCTION_ARGS) |
| { |
| Datum array = PG_GETARG_DATUM(0); |
| StringInfo result; |
| |
| result = makeStringInfo(); |
| |
| array_to_json_internal(array, result, false); |
| |
| PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); |
| } |
| |
| /* |
| * SQL function array_to_json(row, prettybool) |
| */ |
| Datum |
| array_to_json_pretty(PG_FUNCTION_ARGS) |
| { |
| Datum array = PG_GETARG_DATUM(0); |
| bool use_line_feeds = PG_GETARG_BOOL(1); |
| StringInfo result; |
| |
| result = makeStringInfo(); |
| |
| array_to_json_internal(array, result, use_line_feeds); |
| |
| PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); |
| } |
| |
| /* |
| * SQL function row_to_json(row) |
| */ |
| Datum |
| row_to_json(PG_FUNCTION_ARGS) |
| { |
| Datum array = PG_GETARG_DATUM(0); |
| StringInfo result; |
| |
| result = makeStringInfo(); |
| |
| composite_to_json(array, result, false); |
| |
| PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); |
| } |
| |
| /* |
| * SQL function row_to_json(row, prettybool) |
| */ |
| Datum |
| row_to_json_pretty(PG_FUNCTION_ARGS) |
| { |
| Datum array = PG_GETARG_DATUM(0); |
| bool use_line_feeds = PG_GETARG_BOOL(1); |
| StringInfo result; |
| |
| result = makeStringInfo(); |
| |
| composite_to_json(array, result, use_line_feeds); |
| |
| PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); |
| } |
| |
| /* |
| * SQL function to_json(anyvalue) |
| */ |
| Datum |
| to_json(PG_FUNCTION_ARGS) |
| { |
| Datum val = PG_GETARG_DATUM(0); |
| Oid val_type = get_fn_expr_argtype(fcinfo->flinfo, 0); |
| StringInfo result; |
| JsonTypeCategory tcategory; |
| Oid outfuncoid; |
| |
| if (val_type == InvalidOid) |
| ereport(ERROR, |
| (errcode(ERRCODE_INVALID_PARAMETER_VALUE), |
| errmsg("could not determine input data type"))); |
| |
| json_categorize_type(val_type, |
| &tcategory, &outfuncoid); |
| |
| result = makeStringInfo(); |
| |
| datum_to_json(val, false, result, tcategory, outfuncoid, false); |
| |
| PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); |
| } |
| |
| /* |
| * json_agg transition function |
| * |
| * aggregate input column as a json array value. |
| */ |
| Datum |
| json_agg_transfn(PG_FUNCTION_ARGS) |
| { |
| MemoryContext aggcontext, |
| oldcontext; |
| JsonAggState *state; |
| Datum val; |
| |
| if (!AggCheckCallContext(fcinfo, &aggcontext)) |
| { |
| /* cannot be called directly because of internal-type argument */ |
| elog(ERROR, "json_agg_transfn called in non-aggregate context"); |
| } |
| |
| if (PG_ARGISNULL(0)) |
| { |
| Oid arg_type = get_fn_expr_argtype(fcinfo->flinfo, 1); |
| |
| if (arg_type == InvalidOid) |
| ereport(ERROR, |
| (errcode(ERRCODE_INVALID_PARAMETER_VALUE), |
| errmsg("could not determine input data type"))); |
| |
| /* |
| * Make this state object in a context where it will persist for the |
| * duration of the aggregate call. MemoryContextSwitchTo is only |
| * needed the first time, as the StringInfo routines make sure they |
| * use the right context to enlarge the object if necessary. |
| */ |
| oldcontext = MemoryContextSwitchTo(aggcontext); |
| state = (JsonAggState *) palloc(sizeof(JsonAggState)); |
| state->str = makeStringInfo(); |
| MemoryContextSwitchTo(oldcontext); |
| |
| appendStringInfoChar(state->str, '['); |
| json_categorize_type(arg_type, &state->val_category, |
| &state->val_output_func); |
| } |
| else |
| { |
| state = (JsonAggState *) PG_GETARG_POINTER(0); |
| appendStringInfoString(state->str, ", "); |
| } |
| |
| /* fast path for NULLs */ |
| if (PG_ARGISNULL(1)) |
| { |
| datum_to_json((Datum) 0, true, state->str, JSONTYPE_NULL, |
| InvalidOid, false); |
| PG_RETURN_POINTER(state); |
| } |
| |
| val = PG_GETARG_DATUM(1); |
| |
| /* add some whitespace if structured type and not first item */ |
| if (!PG_ARGISNULL(0) && |
| (state->val_category == JSONTYPE_ARRAY || |
| state->val_category == JSONTYPE_COMPOSITE)) |
| { |
| appendStringInfoString(state->str, "\n "); |
| } |
| |
| datum_to_json(val, false, state->str, state->val_category, |
| state->val_output_func, false); |
| |
| /* |
| * The transition type for json_agg() is declared to be "internal", which |
| * is a pass-by-value type the same size as a pointer. So we can safely |
| * pass the JsonAggState pointer through nodeAgg.c's machinations. |
| */ |
| PG_RETURN_POINTER(state); |
| } |
| |
| /* |
| * json_agg final function |
| */ |
| Datum |
| json_agg_finalfn(PG_FUNCTION_ARGS) |
| { |
| JsonAggState *state; |
| |
| /* cannot be called directly because of internal-type argument */ |
| Assert(AggCheckCallContext(fcinfo, NULL)); |
| |
| state = PG_ARGISNULL(0) ? |
| NULL : |
| (JsonAggState *) PG_GETARG_POINTER(0); |
| |
| /* NULL result for no rows in, as is standard with aggregates */ |
| if (state == NULL) |
| PG_RETURN_NULL(); |
| |
| /* Else return state with appropriate array terminator added */ |
| PG_RETURN_TEXT_P(catenate_stringinfo_string(state->str, "]")); |
| } |
| |
| /* |
| * json_object_agg transition function. |
| * |
| * aggregate two input columns as a single json object value. |
| */ |
| Datum |
| json_object_agg_transfn(PG_FUNCTION_ARGS) |
| { |
| MemoryContext aggcontext, |
| oldcontext; |
| JsonAggState *state; |
| Datum arg; |
| |
| if (!AggCheckCallContext(fcinfo, &aggcontext)) |
| { |
| /* cannot be called directly because of internal-type argument */ |
| elog(ERROR, "json_object_agg_transfn called in non-aggregate context"); |
| } |
| |
| if (PG_ARGISNULL(0)) |
| { |
| Oid arg_type; |
| |
| /* |
| * Make the StringInfo in a context where it will persist for the |
| * duration of the aggregate call. Switching context is only needed |
| * for this initial step, as the StringInfo routines make sure they |
| * use the right context to enlarge the object if necessary. |
| */ |
| oldcontext = MemoryContextSwitchTo(aggcontext); |
| state = (JsonAggState *) palloc(sizeof(JsonAggState)); |
| state->str = makeStringInfo(); |
| MemoryContextSwitchTo(oldcontext); |
| |
| arg_type = get_fn_expr_argtype(fcinfo->flinfo, 1); |
| |
| if (arg_type == InvalidOid) |
| ereport(ERROR, |
| (errcode(ERRCODE_INVALID_PARAMETER_VALUE), |
| errmsg("could not determine data type for argument %d", 1))); |
| |
| json_categorize_type(arg_type, &state->key_category, |
| &state->key_output_func); |
| |
| arg_type = get_fn_expr_argtype(fcinfo->flinfo, 2); |
| |
| if (arg_type == InvalidOid) |
| ereport(ERROR, |
| (errcode(ERRCODE_INVALID_PARAMETER_VALUE), |
| errmsg("could not determine data type for argument %d", 2))); |
| |
| json_categorize_type(arg_type, &state->val_category, |
| &state->val_output_func); |
| |
| appendStringInfoString(state->str, "{ "); |
| } |
| else |
| { |
| state = (JsonAggState *) PG_GETARG_POINTER(0); |
| appendStringInfoString(state->str, ", "); |
| } |
| |
| /* |
| * Note: since json_object_agg() is declared as taking type "any", the |
| * parser will not do any type conversion on unknown-type literals (that |
| * is, undecorated strings or NULLs). Such values will arrive here as |
| * type UNKNOWN, which fortunately does not matter to us, since |
| * unknownout() works fine. |
| */ |
| |
| if (PG_ARGISNULL(1)) |
| ereport(ERROR, |
| (errcode(ERRCODE_INVALID_PARAMETER_VALUE), |
| errmsg("field name must not be null"))); |
| |
| arg = PG_GETARG_DATUM(1); |
| |
| datum_to_json(arg, false, state->str, state->key_category, |
| state->key_output_func, true); |
| |
| appendStringInfoString(state->str, " : "); |
| |
| if (PG_ARGISNULL(2)) |
| arg = (Datum) 0; |
| else |
| arg = PG_GETARG_DATUM(2); |
| |
| datum_to_json(arg, PG_ARGISNULL(2), state->str, state->val_category, |
| state->val_output_func, false); |
| |
| PG_RETURN_POINTER(state); |
| } |
| |
| /* |
| * json_object_agg final function. |
| */ |
| Datum |
| json_object_agg_finalfn(PG_FUNCTION_ARGS) |
| { |
| JsonAggState *state; |
| |
| /* cannot be called directly because of internal-type argument */ |
| Assert(AggCheckCallContext(fcinfo, NULL)); |
| |
| state = PG_ARGISNULL(0) ? NULL : (JsonAggState *) PG_GETARG_POINTER(0); |
| |
| /* NULL result for no rows in, as is standard with aggregates */ |
| if (state == NULL) |
| PG_RETURN_NULL(); |
| |
| /* Else return state with appropriate object terminator added */ |
| PG_RETURN_TEXT_P(catenate_stringinfo_string(state->str, " }")); |
| } |
| |
| /* |
| * Helper function for aggregates: return given StringInfo's contents plus |
| * specified trailing string, as a text datum. We need this because aggregate |
| * final functions are not allowed to modify the aggregate state. |
| */ |
| static text * |
| catenate_stringinfo_string(StringInfo buffer, const char *addon) |
| { |
| /* custom version of cstring_to_text_with_len */ |
| int buflen = buffer->len; |
| int addlen = strlen(addon); |
| text *result = (text *) palloc(buflen + addlen + VARHDRSZ); |
| |
| SET_VARSIZE(result, buflen + addlen + VARHDRSZ); |
| memcpy(VARDATA(result), buffer->data, buflen); |
| memcpy(VARDATA(result) + buflen, addon, addlen); |
| |
| return result; |
| } |
| |
| /* |
| * SQL function json_build_object(variadic "any") |
| */ |
| Datum |
| json_build_object(PG_FUNCTION_ARGS) |
| { |
| int nargs; |
| int i; |
| const char *sep = ""; |
| StringInfo result; |
| Datum *args; |
| bool *nulls; |
| Oid *types; |
| |
| /* fetch argument values to build the object */ |
| nargs = extract_variadic_args(fcinfo, 0, false, &args, &types, &nulls); |
| |
| if (nargs < 0) |
| PG_RETURN_NULL(); |
| |
| if (nargs % 2 != 0) |
| ereport(ERROR, |
| (errcode(ERRCODE_INVALID_PARAMETER_VALUE), |
| errmsg("argument list must have even number of elements"), |
| /* translator: %s is a SQL function name */ |
| errhint("The arguments of %s must consist of alternating keys and values.", |
| "json_build_object()"))); |
| |
| result = makeStringInfo(); |
| |
| appendStringInfoChar(result, '{'); |
| |
| for (i = 0; i < nargs; i += 2) |
| { |
| appendStringInfoString(result, sep); |
| sep = ", "; |
| |
| /* process key */ |
| if (nulls[i]) |
| ereport(ERROR, |
| (errcode(ERRCODE_INVALID_PARAMETER_VALUE), |
| errmsg("argument %d cannot be null", i + 1), |
| errhint("Object keys should be text."))); |
| |
| add_json(args[i], false, result, types[i], true); |
| |
| appendStringInfoString(result, " : "); |
| |
| /* process value */ |
| add_json(args[i + 1], nulls[i + 1], result, types[i + 1], false); |
| } |
| |
| appendStringInfoChar(result, '}'); |
| |
| PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); |
| } |
| |
| /* |
| * degenerate case of json_build_object where it gets 0 arguments. |
| */ |
| Datum |
| json_build_object_noargs(PG_FUNCTION_ARGS) |
| { |
| PG_RETURN_TEXT_P(cstring_to_text_with_len("{}", 2)); |
| } |
| |
| /* |
| * SQL function json_build_array(variadic "any") |
| */ |
| Datum |
| json_build_array(PG_FUNCTION_ARGS) |
| { |
| int nargs; |
| int i; |
| const char *sep = ""; |
| StringInfo result; |
| Datum *args; |
| bool *nulls; |
| Oid *types; |
| |
| /* fetch argument values to build the array */ |
| nargs = extract_variadic_args(fcinfo, 0, false, &args, &types, &nulls); |
| |
| if (nargs < 0) |
| PG_RETURN_NULL(); |
| |
| result = makeStringInfo(); |
| |
| appendStringInfoChar(result, '['); |
| |
| for (i = 0; i < nargs; i++) |
| { |
| appendStringInfoString(result, sep); |
| sep = ", "; |
| add_json(args[i], nulls[i], result, types[i], false); |
| } |
| |
| appendStringInfoChar(result, ']'); |
| |
| PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); |
| } |
| |
| /* |
| * degenerate case of json_build_array where it gets 0 arguments. |
| */ |
| Datum |
| json_build_array_noargs(PG_FUNCTION_ARGS) |
| { |
| PG_RETURN_TEXT_P(cstring_to_text_with_len("[]", 2)); |
| } |
| |
| /* |
| * SQL function json_object(text[]) |
| * |
| * take a one or two dimensional array of text as key/value pairs |
| * for a json object. |
| */ |
| Datum |
| pg_json_object(PG_FUNCTION_ARGS) |
| { |
| ArrayType *in_array = PG_GETARG_ARRAYTYPE_P(0); |
| int ndims = ARR_NDIM(in_array); |
| StringInfoData result; |
| Datum *in_datums; |
| bool *in_nulls; |
| int in_count, |
| count, |
| i; |
| text *rval; |
| char *v; |
| |
| switch (ndims) |
| { |
| case 0: |
| PG_RETURN_DATUM(CStringGetTextDatum("{}")); |
| break; |
| |
| case 1: |
| if ((ARR_DIMS(in_array)[0]) % 2) |
| ereport(ERROR, |
| (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR), |
| errmsg("array must have even number of elements"))); |
| break; |
| |
| case 2: |
| if ((ARR_DIMS(in_array)[1]) != 2) |
| ereport(ERROR, |
| (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR), |
| errmsg("array must have two columns"))); |
| break; |
| |
| default: |
| ereport(ERROR, |
| (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR), |
| errmsg("wrong number of array subscripts"))); |
| } |
| |
| deconstruct_array(in_array, |
| TEXTOID, -1, false, TYPALIGN_INT, |
| &in_datums, &in_nulls, &in_count); |
| |
| count = in_count / 2; |
| |
| initStringInfo(&result); |
| |
| appendStringInfoChar(&result, '{'); |
| |
| for (i = 0; i < count; ++i) |
| { |
| if (in_nulls[i * 2]) |
| ereport(ERROR, |
| (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED), |
| errmsg("null value not allowed for object key"))); |
| |
| v = TextDatumGetCString(in_datums[i * 2]); |
| if (i > 0) |
| appendStringInfoString(&result, ", "); |
| escape_json(&result, v); |
| appendStringInfoString(&result, " : "); |
| pfree(v); |
| if (in_nulls[i * 2 + 1]) |
| appendStringInfoString(&result, "null"); |
| else |
| { |
| v = TextDatumGetCString(in_datums[i * 2 + 1]); |
| escape_json(&result, v); |
| pfree(v); |
| } |
| } |
| |
| appendStringInfoChar(&result, '}'); |
| |
| pfree(in_datums); |
| pfree(in_nulls); |
| |
| rval = cstring_to_text_with_len(result.data, result.len); |
| pfree(result.data); |
| |
| PG_RETURN_TEXT_P(rval); |
| |
| } |
| |
| /* |
| * SQL function json_object(text[], text[]) |
| * |
| * take separate key and value arrays of text to construct a json object |
| * pairwise. |
| */ |
| Datum |
| json_object_two_arg(PG_FUNCTION_ARGS) |
| { |
| ArrayType *key_array = PG_GETARG_ARRAYTYPE_P(0); |
| ArrayType *val_array = PG_GETARG_ARRAYTYPE_P(1); |
| int nkdims = ARR_NDIM(key_array); |
| int nvdims = ARR_NDIM(val_array); |
| StringInfoData result; |
| Datum *key_datums, |
| *val_datums; |
| bool *key_nulls, |
| *val_nulls; |
| int key_count, |
| val_count, |
| i; |
| text *rval; |
| char *v; |
| |
| if (nkdims > 1 || nkdims != nvdims) |
| ereport(ERROR, |
| (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR), |
| errmsg("wrong number of array subscripts"))); |
| |
| if (nkdims == 0) |
| PG_RETURN_DATUM(CStringGetTextDatum("{}")); |
| |
| deconstruct_array(key_array, |
| TEXTOID, -1, false, TYPALIGN_INT, |
| &key_datums, &key_nulls, &key_count); |
| |
| deconstruct_array(val_array, |
| TEXTOID, -1, false, TYPALIGN_INT, |
| &val_datums, &val_nulls, &val_count); |
| |
| if (key_count != val_count) |
| ereport(ERROR, |
| (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR), |
| errmsg("mismatched array dimensions"))); |
| |
| initStringInfo(&result); |
| |
| appendStringInfoChar(&result, '{'); |
| |
| for (i = 0; i < key_count; ++i) |
| { |
| if (key_nulls[i]) |
| ereport(ERROR, |
| (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED), |
| errmsg("null value not allowed for object key"))); |
| |
| v = TextDatumGetCString(key_datums[i]); |
| if (i > 0) |
| appendStringInfoString(&result, ", "); |
| escape_json(&result, v); |
| appendStringInfoString(&result, " : "); |
| pfree(v); |
| if (val_nulls[i]) |
| appendStringInfoString(&result, "null"); |
| else |
| { |
| v = TextDatumGetCString(val_datums[i]); |
| escape_json(&result, v); |
| pfree(v); |
| } |
| } |
| |
| appendStringInfoChar(&result, '}'); |
| |
| pfree(key_datums); |
| pfree(key_nulls); |
| pfree(val_datums); |
| pfree(val_nulls); |
| |
| rval = cstring_to_text_with_len(result.data, result.len); |
| pfree(result.data); |
| |
| PG_RETURN_TEXT_P(rval); |
| } |
| |
| |
| /* |
| * Produce a JSON string literal, properly escaping characters in the text. |
| */ |
| void |
| escape_json(StringInfo buf, const char *str) |
| { |
| const char *p; |
| |
| appendStringInfoCharMacro(buf, '"'); |
| for (p = str; *p; p++) |
| { |
| switch (*p) |
| { |
| case '\b': |
| appendStringInfoString(buf, "\\b"); |
| break; |
| case '\f': |
| appendStringInfoString(buf, "\\f"); |
| break; |
| case '\n': |
| appendStringInfoString(buf, "\\n"); |
| break; |
| case '\r': |
| appendStringInfoString(buf, "\\r"); |
| break; |
| case '\t': |
| appendStringInfoString(buf, "\\t"); |
| break; |
| case '"': |
| appendStringInfoString(buf, "\\\""); |
| break; |
| case '\\': |
| appendStringInfoString(buf, "\\\\"); |
| break; |
| default: |
| if ((unsigned char) *p < ' ') |
| appendStringInfo(buf, "\\u%04x", (int) *p); |
| else |
| appendStringInfoCharMacro(buf, *p); |
| break; |
| } |
| } |
| appendStringInfoCharMacro(buf, '"'); |
| } |
| |
| /* |
| * SQL function json_typeof(json) -> text |
| * |
| * Returns the type of the outermost JSON value as TEXT. Possible types are |
| * "object", "array", "string", "number", "boolean", and "null". |
| * |
| * Performs a single call to json_lex() to get the first token of the supplied |
| * value. This initial token uniquely determines the value's type. As our |
| * input must already have been validated by json_in() or json_recv(), the |
| * initial token should never be JSON_TOKEN_OBJECT_END, JSON_TOKEN_ARRAY_END, |
| * JSON_TOKEN_COLON, JSON_TOKEN_COMMA, or JSON_TOKEN_END. |
| */ |
| Datum |
| json_typeof(PG_FUNCTION_ARGS) |
| { |
| text *json; |
| |
| JsonLexContext *lex; |
| JsonTokenType tok; |
| char *type; |
| JsonParseErrorType result; |
| |
| json = PG_GETARG_TEXT_PP(0); |
| lex = makeJsonLexContext(json, false); |
| |
| /* Lex exactly one token from the input and check its type. */ |
| result = json_lex(lex); |
| if (result != JSON_SUCCESS) |
| json_ereport_error(result, lex); |
| tok = lex->token_type; |
| switch (tok) |
| { |
| case JSON_TOKEN_OBJECT_START: |
| type = "object"; |
| break; |
| case JSON_TOKEN_ARRAY_START: |
| type = "array"; |
| break; |
| case JSON_TOKEN_STRING: |
| type = "string"; |
| break; |
| case JSON_TOKEN_NUMBER: |
| type = "number"; |
| break; |
| case JSON_TOKEN_TRUE: |
| case JSON_TOKEN_FALSE: |
| type = "boolean"; |
| break; |
| case JSON_TOKEN_NULL: |
| type = "null"; |
| break; |
| default: |
| elog(ERROR, "unexpected json token: %d", tok); |
| } |
| |
| PG_RETURN_TEXT_P(cstring_to_text(type)); |
| } |