diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000000..c5e05f5950 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,33 @@ +sudo: required +addons: + apt: + packages: + - gdb + - lcov + - libipc-run-perl + - libperl-dev + - libpython-dev + - tcl-dev + - libldap2-dev + - libicu-dev + - docbook + - docbook-dsssl + - docbook-xsl + - libxml2-utils + - openjade1.3 + - opensp + - xsltproc +language: c +cache: ccache +before_install: + - echo '/tmp/%e-%s-%p.core' | sudo tee /proc/sys/kernel/core_pattern +script: ./configure --enable-debug --enable-cassert --enable-tap-tests --with-tcl --with-python --with-perl --with-ldap --with-icu && make -j4 all contrib docs && make check-world +after_failure: + - for f in $(find . -name regression.diffs) ; do echo "========= Contents of $f" ; head -1000 $f ; done + - | + for corefile in $(find /tmp/ -name '*.core' 2>/dev/null) ; do + binary=$(gdb -quiet -core $corefile -batch -ex 'info auxv' | grep AT_EXECFN | perl -pe "s/^.*\"(.*)\"\$/\$1/g") + echo dumping $corefile for $binary + gdb --batch --quiet -ex "thread apply all bt full" -ex "quit" $binary $corefile + done + diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 0000000000..04e041ad63 --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,25 @@ +# appveyor.yml +install: + - cinst winflexbison + - '"C:\Program Files\Microsoft SDKs\Windows\v7.1\Bin\SetEnv.cmd" /x64' + +before_build: + - rename c:\ProgramData\chocolatey\bin\win_flex.exe flex.exe + - rename c:\ProgramData\chocolatey\bin\win_bison.exe bison.exe + - perl buildsetup.pl + +build: + project: pgsql.sln + +before_test: + - 'perl -p -i.bak -e "s/^test: tablespace/#test: tablespace/" src/test/regress/serial_schedule' + - 'perl -p -i.bak -e "s/^test: tablespace/#test: tablespace/" src/test/regress/parallel_schedule' + +test_script: + - cd src\tools\msvc && vcregress check + +on_failure: + - perl dumpregr.pl + +configuration: + - Release diff --git a/buildsetup.pl b/buildsetup.pl new file mode 100644 index 0000000000..23df2fb1aa --- /dev/null +++ b/buildsetup.pl @@ -0,0 +1,38 @@ +# first part of postgres build.pl, just doesn't run msbuild + +use strict; + +BEGIN +{ + + chdir("../../..") if (-d "../msvc" && -d "../../../src"); + +} + +use lib "src/tools/msvc"; + +use Cwd; + +use Mkvcbuild; + +# buildenv.pl is for specifying the build environment settings +# it should contain lines like: +# $ENV{PATH} = "c:/path/to/bison/bin;$ENV{PATH}"; + +if (-e "src/tools/msvc/buildenv.pl") +{ + do "src/tools/msvc/buildenv.pl"; +} +elsif (-e "./buildenv.pl") +{ + do "./buildenv.pl"; +} + +# set up the project +our $config; +do "config_default.pl"; +do "config.pl" if (-f "src/tools/msvc/config.pl"); + +# print "PATH: $_\n" foreach (split(';',$ENV{PATH})); + +Mkvcbuild::mkvcbuild($config); diff --git a/dumpregr.pl b/dumpregr.pl new file mode 100644 index 0000000000..08d276b52d --- /dev/null +++ b/dumpregr.pl @@ -0,0 +1,20 @@ +use strict; +use warnings FATAL => qw(all); + +use File::Find; + +my $Target = "regression.diffs"; + +find(\&dump, "src"); + +sub dump { + if ($_ eq $Target) { + my $path = $File::Find::name; + print "=== \$path ===\\n"; + open(my $fh, "<", $_) || die "wtf"; + while (my $line = <$fh>) { + print $line; + if ($. > 1000) { last; } + } + } +} diff --git a/src/backend/Makefile b/src/backend/Makefile index 3a58bf6685..92c881af8a 100644 --- a/src/backend/Makefile +++ b/src/backend/Makefile @@ -136,6 +136,9 @@ parser/gram.h: parser/gram.y storage/lmgr/lwlocknames.h: storage/lmgr/generate-lwlocknames.pl storage/lmgr/lwlocknames.txt $(MAKE) -C storage/lmgr lwlocknames.h lwlocknames.c +utils/adt/jsonpath_gram.h: utils/adt/jsonpath_gram.y + $(MAKE) -C utils/adt jsonpath_gram.h + # run this unconditionally to avoid needing to know its dependencies here: submake-catalog-headers: $(MAKE) -C catalog distprep generated-header-symlinks @@ -159,7 +162,7 @@ submake-utils-headers: .PHONY: generated-headers -generated-headers: $(top_builddir)/src/include/parser/gram.h $(top_builddir)/src/include/storage/lwlocknames.h submake-catalog-headers submake-utils-headers +generated-headers: $(top_builddir)/src/include/parser/gram.h $(top_builddir)/src/include/storage/lwlocknames.h $(top_builddir)/src/include/utils/jsonpath_gram.h submake-catalog-headers submake-utils-headers $(top_builddir)/src/include/parser/gram.h: parser/gram.h prereqdir=`cd '$(dir $<)' >/dev/null && pwd` && \ @@ -171,6 +174,10 @@ $(top_builddir)/src/include/storage/lwlocknames.h: storage/lmgr/lwlocknames.h cd '$(dir $@)' && rm -f $(notdir $@) && \ $(LN_S) "$$prereqdir/$(notdir $<)" . +$(top_builddir)/src/include/utils/jsonpath_gram.h: utils/adt/jsonpath_gram.h + prereqdir=`cd '$(dir $<)' >/dev/null && pwd` && \ + cd '$(dir $@)' && rm -f $(notdir $@) && \ + $(LN_S) "$$prereqdir/$(notdir $<)" . utils/probes.o: utils/probes.d $(SUBDIROBJS) $(DTRACE) $(DTRACEFLAGS) -C -G -s $(call expand_subsys,$^) -o $@ @@ -186,6 +193,7 @@ distprep: $(MAKE) -C replication repl_gram.c repl_scanner.c syncrep_gram.c syncrep_scanner.c $(MAKE) -C storage/lmgr lwlocknames.h lwlocknames.c $(MAKE) -C utils distprep + $(MAKE) -C utils/adt jsonpath_gram.c jsonpath_gram.h jsonpath_scan.c $(MAKE) -C utils/misc guc-file.c $(MAKE) -C utils/sort qsort_tuple.c @@ -310,6 +318,7 @@ maintainer-clean: distclean storage/lmgr/lwlocknames.c \ storage/lmgr/lwlocknames.h \ utils/misc/guc-file.c \ + utils/adt/jsonpath_gram.h \ utils/sort/qsort_tuple.c diff --git a/src/backend/lib/stringinfo.c b/src/backend/lib/stringinfo.c index df7e01f76d..fffc791ea8 100644 --- a/src/backend/lib/stringinfo.c +++ b/src/backend/lib/stringinfo.c @@ -312,3 +312,24 @@ enlargeStringInfo(StringInfo str, int needed) str->maxlen = newlen; } + +/* + * alignStringInfoInt - aling StringInfo to int by adding + * zero padding bytes + */ +void +alignStringInfoInt(StringInfo buf) +{ + switch(INTALIGN(buf->len) - buf->len) + { + case 3: + appendStringInfoCharMacro(buf, 0); + case 2: + appendStringInfoCharMacro(buf, 0); + case 1: + appendStringInfoCharMacro(buf, 0); + default: + break; + } +} + diff --git a/src/backend/utils/adt/.gitignore b/src/backend/utils/adt/.gitignore new file mode 100644 index 0000000000..7fab054407 --- /dev/null +++ b/src/backend/utils/adt/.gitignore @@ -0,0 +1,3 @@ +/jsonpath_gram.h +/jsonpath_gram.c +/jsonpath_scan.c diff --git a/src/backend/utils/adt/Makefile b/src/backend/utils/adt/Makefile index 20eead1798..8db7f98cf6 100644 --- a/src/backend/utils/adt/Makefile +++ b/src/backend/utils/adt/Makefile @@ -17,7 +17,8 @@ OBJS = acl.o amutils.o arrayfuncs.o array_expanded.o array_selfuncs.o \ float.o format_type.o formatting.o genfile.o \ geo_ops.o geo_selfuncs.o geo_spgist.o inet_cidr_ntop.o inet_net_pton.o \ int.o int8.o json.o jsonb.o jsonb_gin.o jsonb_op.o jsonb_util.o \ - jsonfuncs.o like.o lockfuncs.o mac.o mac8.o misc.o name.o \ + jsonfuncs.o jsonpath_gram.o jsonpath_scan.o jsonpath.o jsonpath_exec.o jsonpath_json.o \ + like.o lockfuncs.o mac.o mac8.o misc.o name.o \ network.o network_gist.o network_selfuncs.o network_spgist.o \ numeric.o numutils.o oid.o oracle_compat.o \ orderedsetaggs.o partitionfuncs.o pg_locale.o pg_lsn.o \ @@ -32,6 +33,23 @@ OBJS = acl.o amutils.o arrayfuncs.o array_expanded.o array_selfuncs.o \ txid.o uuid.o varbit.o varchar.o varlena.o version.o \ windowfuncs.o xid.o xml.o +jsonpath_gram.c: BISONFLAGS += -d + +jsonpath_scan.c: FLEXFLAGS = -CF -p -p + +jsonpath_gram.h: jsonpath_gram.c ; + +# Force these dependencies to be known even without dependency info built: +jsonpath_gram.o jsonpath_scan.o jsonpath_parser.o: jsonpath_gram.h + +jsonpath_json.o: jsonpath_exec.c + +# jsonpath_gram.c, jsonpath_gram.h, and jsonpath_scan.c are in the distribution +# tarball, so they are not cleaned here. +clean distclean maintainer-clean: + rm -f lex.backup + + like.o: like.c like_match.c varlena.o: varlena.c levenshtein.c diff --git a/src/backend/utils/adt/date.c b/src/backend/utils/adt/date.c index cb6b5e55bf..7d5c8ac6c8 100644 --- a/src/backend/utils/adt/date.c +++ b/src/backend/utils/adt/date.c @@ -40,11 +40,6 @@ #endif -static int tm2time(struct pg_tm *tm, fsec_t fsec, TimeADT *result); -static int tm2timetz(struct pg_tm *tm, fsec_t fsec, int tz, TimeTzADT *result); -static void AdjustTimeForTypmod(TimeADT *time, int32 typmod); - - /* common code for timetypmodin and timetztypmodin */ static int32 anytime_typmodin(bool istz, ArrayType *ta) @@ -1210,7 +1205,7 @@ time_in(PG_FUNCTION_ARGS) /* tm2time() * Convert a tm structure to a time data type. */ -static int +int tm2time(struct pg_tm *tm, fsec_t fsec, TimeADT *result) { *result = ((((tm->tm_hour * MINS_PER_HOUR + tm->tm_min) * SECS_PER_MINUTE) + tm->tm_sec) @@ -1376,7 +1371,7 @@ time_scale(PG_FUNCTION_ARGS) * have a fundamental tie together but rather a coincidence of * implementation. - thomas */ -static void +void AdjustTimeForTypmod(TimeADT *time, int32 typmod) { static const int64 TimeScales[MAX_TIME_PRECISION + 1] = { @@ -1954,7 +1949,7 @@ time_part(PG_FUNCTION_ARGS) /* tm2timetz() * Convert a tm structure to a time data type. */ -static int +int tm2timetz(struct pg_tm *tm, fsec_t fsec, int tz, TimeTzADT *result) { result->time = ((((tm->tm_hour * MINS_PER_HOUR + tm->tm_min) * SECS_PER_MINUTE) + tm->tm_sec) * diff --git a/src/backend/utils/adt/float.c b/src/backend/utils/adt/float.c index c91bb1a305..391b323acf 100644 --- a/src/backend/utils/adt/float.c +++ b/src/backend/utils/adt/float.c @@ -287,7 +287,7 @@ float8in(PG_FUNCTION_ARGS) } /* - * float8in_internal - guts of float8in() + * float8in_internal_safe - guts of float8in() * * This is exposed for use by functions that want a reasonably * platform-independent way of inputting doubles. The behavior is @@ -305,8 +305,8 @@ float8in(PG_FUNCTION_ARGS) * unreasonable amount of extra casting both here and in callers, so we don't. */ double -float8in_internal(char *num, char **endptr_p, - const char *type_name, const char *orig_string) +float8in_internal_safe(char *num, char **endptr_p, const char *type_name, + const char *orig_string, ErrorData **edata) { double val; char *endptr; @@ -320,10 +320,13 @@ float8in_internal(char *num, char **endptr_p, * strtod() on different platforms. */ if (*num == '\0') - ereport(ERROR, - (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), - errmsg("invalid input syntax for type %s: \"%s\"", - type_name, orig_string))); + { + ereport_safe(edata, ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for type %s: \"%s\"", + type_name, orig_string))); + return 0; + } errno = 0; val = strtod(num, &endptr); @@ -396,17 +399,21 @@ float8in_internal(char *num, char **endptr_p, char *errnumber = pstrdup(num); errnumber[endptr - num] = '\0'; - ereport(ERROR, - (errcode(ERRCODE_NUMERIC_VALUE_OUT_OF_RANGE), - errmsg("\"%s\" is out of range for type double precision", - errnumber))); + ereport_safe(edata, ERROR, + (errcode(ERRCODE_NUMERIC_VALUE_OUT_OF_RANGE), + errmsg("\"%s\" is out of range for type double precision", + errnumber))); + return 0; } } else - ereport(ERROR, - (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), - errmsg("invalid input syntax for type %s: \"%s\"", - type_name, orig_string))); + { + ereport_safe(edata, ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for type %s: \"%s\"", + type_name, orig_string))); + return 0; + } } #ifdef HAVE_BUGGY_SOLARIS_STRTOD else @@ -429,10 +436,13 @@ float8in_internal(char *num, char **endptr_p, if (endptr_p) *endptr_p = endptr; else if (*endptr != '\0') - ereport(ERROR, - (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), - errmsg("invalid input syntax for type %s: \"%s\"", - type_name, orig_string))); + { + ereport_safe(edata, ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for type %s: \"%s\"", + type_name, orig_string))); + return 0; + } return val; } diff --git a/src/backend/utils/adt/formatting.c b/src/backend/utils/adt/formatting.c index 2923afe7b6..ec570f2684 100644 --- a/src/backend/utils/adt/formatting.c +++ b/src/backend/utils/adt/formatting.c @@ -86,6 +86,7 @@ #endif #include "catalog/pg_collation.h" +#include "catalog/pg_type.h" #include "mb/pg_wchar.h" #include "utils/builtins.h" #include "utils/date.h" @@ -436,7 +437,8 @@ typedef struct clock, /* 12 or 24 hour clock? */ tzsign, /* +1, -1 or 0 if timezone info is absent */ tzh, - tzm; + tzm, + ff; /* fractional precision */ } TmFromChar; #define ZERO_tmfc(_X) memset(_X, 0, sizeof(TmFromChar)) @@ -596,6 +598,15 @@ typedef enum DCH_Day, DCH_Dy, DCH_D, + DCH_FF1, + DCH_FF2, + DCH_FF3, + DCH_FF4, + DCH_FF5, + DCH_FF6, + DCH_FF7, + DCH_FF8, + DCH_FF9, DCH_FX, /* global suffix */ DCH_HH24, DCH_HH12, @@ -645,6 +656,15 @@ typedef enum DCH_dd, DCH_dy, DCH_d, + DCH_ff1, + DCH_ff2, + DCH_ff3, + DCH_ff4, + DCH_ff5, + DCH_ff6, + DCH_ff7, + DCH_ff8, + DCH_ff9, DCH_fx, DCH_hh24, DCH_hh12, @@ -745,7 +765,16 @@ static const KeyWord DCH_keywords[] = { {"Day", 3, DCH_Day, false, FROM_CHAR_DATE_NONE}, {"Dy", 2, DCH_Dy, false, FROM_CHAR_DATE_NONE}, {"D", 1, DCH_D, true, FROM_CHAR_DATE_GREGORIAN}, - {"FX", 2, DCH_FX, false, FROM_CHAR_DATE_NONE}, /* F */ + {"FF1", 3, DCH_FF1, false, FROM_CHAR_DATE_NONE}, /* F */ + {"FF2", 3, DCH_FF2, false, FROM_CHAR_DATE_NONE}, + {"FF3", 3, DCH_FF3, false, FROM_CHAR_DATE_NONE}, + {"FF4", 3, DCH_FF4, false, FROM_CHAR_DATE_NONE}, + {"FF5", 3, DCH_FF5, false, FROM_CHAR_DATE_NONE}, + {"FF6", 3, DCH_FF6, false, FROM_CHAR_DATE_NONE}, + {"FF7", 3, DCH_FF7, false, FROM_CHAR_DATE_NONE}, + {"FF8", 3, DCH_FF8, false, FROM_CHAR_DATE_NONE}, + {"FF9", 3, DCH_FF9, false, FROM_CHAR_DATE_NONE}, + {"FX", 2, DCH_FX, false, FROM_CHAR_DATE_NONE}, {"HH24", 4, DCH_HH24, true, FROM_CHAR_DATE_NONE}, /* H */ {"HH12", 4, DCH_HH12, true, FROM_CHAR_DATE_NONE}, {"HH", 2, DCH_HH, true, FROM_CHAR_DATE_NONE}, @@ -794,7 +823,16 @@ static const KeyWord DCH_keywords[] = { {"dd", 2, DCH_DD, true, FROM_CHAR_DATE_GREGORIAN}, {"dy", 2, DCH_dy, false, FROM_CHAR_DATE_NONE}, {"d", 1, DCH_D, true, FROM_CHAR_DATE_GREGORIAN}, - {"fx", 2, DCH_FX, false, FROM_CHAR_DATE_NONE}, /* f */ + {"ff1", 3, DCH_FF1, false, FROM_CHAR_DATE_NONE}, /* F */ + {"ff2", 3, DCH_FF2, false, FROM_CHAR_DATE_NONE}, + {"ff3", 3, DCH_FF3, false, FROM_CHAR_DATE_NONE}, + {"ff4", 3, DCH_FF4, false, FROM_CHAR_DATE_NONE}, + {"ff5", 3, DCH_FF5, false, FROM_CHAR_DATE_NONE}, + {"ff6", 3, DCH_FF6, false, FROM_CHAR_DATE_NONE}, + {"ff7", 3, DCH_FF7, false, FROM_CHAR_DATE_NONE}, + {"ff8", 3, DCH_FF8, false, FROM_CHAR_DATE_NONE}, + {"ff9", 3, DCH_FF9, false, FROM_CHAR_DATE_NONE}, + {"fx", 2, DCH_FX, false, FROM_CHAR_DATE_NONE}, {"hh24", 4, DCH_HH24, true, FROM_CHAR_DATE_NONE}, /* h */ {"hh12", 4, DCH_HH12, true, FROM_CHAR_DATE_NONE}, {"hh", 2, DCH_HH, true, FROM_CHAR_DATE_NONE}, @@ -895,10 +933,10 @@ static const int DCH_index[KeyWord_INDEX_SIZE] = { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, DCH_A_D, DCH_B_C, DCH_CC, DCH_DAY, -1, - DCH_FX, -1, DCH_HH24, DCH_IDDD, DCH_J, -1, -1, DCH_MI, -1, DCH_OF, + DCH_FF1, -1, DCH_HH24, DCH_IDDD, DCH_J, -1, -1, DCH_MI, -1, DCH_OF, DCH_P_M, DCH_Q, DCH_RM, DCH_SSSS, DCH_TZH, DCH_US, -1, DCH_WW, -1, DCH_Y_YYY, -1, -1, -1, -1, -1, -1, -1, DCH_a_d, DCH_b_c, DCH_cc, - DCH_day, -1, DCH_fx, -1, DCH_hh24, DCH_iddd, DCH_j, -1, -1, DCH_mi, + DCH_day, -1, DCH_ff1, -1, DCH_hh24, DCH_iddd, DCH_j, -1, -1, DCH_mi, -1, -1, DCH_p_m, DCH_q, DCH_rm, DCH_ssss, DCH_tz, DCH_us, -1, DCH_ww, -1, DCH_y_yyy, -1, -1, -1, -1 @@ -962,6 +1000,10 @@ typedef struct NUMProc *L_currency_symbol; } NUMProc; +/* Return flags for DCH_from_char() */ +#define DCH_DATED 0x01 +#define DCH_TIMED 0x02 +#define DCH_ZONED 0x04 /* ---------- * Functions @@ -977,7 +1019,8 @@ static void parse_format(FormatNode *node, const char *str, const KeyWord *kw, static void DCH_to_char(FormatNode *node, bool is_interval, TmToChar *in, char *out, Oid collid); -static void DCH_from_char(FormatNode *node, char *in, TmFromChar *out); +static void DCH_from_char(FormatNode *node, char *in, TmFromChar *out, + bool strict); #ifdef DEBUG_TO_FROM_CHAR static void dump_index(const KeyWord *k, const int *index); @@ -994,8 +1037,8 @@ static int from_char_parse_int_len(int *dest, char **src, const int len, FormatN static int from_char_parse_int(int *dest, char **src, FormatNode *node); static int seq_search(char *name, const char *const *array, int type, int max, int *len); static int from_char_seq_search(int *dest, char **src, const char *const *array, int type, int max, FormatNode *node); -static void do_to_timestamp(text *date_txt, text *fmt, - struct pg_tm *tm, fsec_t *fsec); +static void do_to_timestamp(text *date_txt, const char *fmt, int fmt_len, + bool strict, struct pg_tm *tm, fsec_t *fsec, int *fprec, int *flags); static char *fill_str(char *str, int c, int max); static FormatNode *NUM_cache(int len, NUMDesc *Num, text *pars_str, bool *shouldFree); static char *int_to_roman(int number); @@ -2514,17 +2557,39 @@ DCH_to_char(FormatNode *node, bool is_interval, TmToChar *in, char *out, Oid col str_numth(s, s, S_TH_TYPE(n->suffix)); s += strlen(s); break; - case DCH_MS: /* millisecond */ - sprintf(s, "%03d", (int) (in->fsec / INT64CONST(1000))); - if (S_THth(n->suffix)) - str_numth(s, s, S_TH_TYPE(n->suffix)); +#define DCH_to_char_fsec(frac_fmt, frac_val) \ + sprintf(s, frac_fmt, (int) (frac_val)); \ + if (S_THth(n->suffix)) \ + str_numth(s, s, S_TH_TYPE(n->suffix)); \ s += strlen(s); + case DCH_FF1: /* decisecond */ + DCH_to_char_fsec("%01d", in->fsec / INT64CONST(100000)); + break; + case DCH_FF2: /* centisecond */ + DCH_to_char_fsec("%02d", in->fsec / INT64CONST(10000)); + break; + case DCH_FF3: + case DCH_MS: /* millisecond */ + DCH_to_char_fsec("%03d", in->fsec / INT64CONST(1000)); + break; + case DCH_FF4: + DCH_to_char_fsec("%04d", in->fsec / INT64CONST(100)); break; + case DCH_FF5: + DCH_to_char_fsec("%05d", in->fsec / INT64CONST(10)); + break; + case DCH_FF6: case DCH_US: /* microsecond */ - sprintf(s, "%06d", (int) in->fsec); - if (S_THth(n->suffix)) - str_numth(s, s, S_TH_TYPE(n->suffix)); - s += strlen(s); + DCH_to_char_fsec("%06d", in->fsec); + break; +#undef DCH_to_char_fsec + case DCH_FF7: + case DCH_FF8: + case DCH_FF9: + ereport(ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + errmsg("datetime formatting field \"%s\" is not supported", + n->key->name))); break; case DCH_SSSS: sprintf(s, "%d", tm->tm_hour * SECS_PER_HOUR + @@ -3007,13 +3072,15 @@ DCH_to_char(FormatNode *node, bool is_interval, TmToChar *in, char *out, Oid col /* ---------- * Process a string as denoted by a list of FormatNodes. * The TmFromChar struct pointed to by 'out' is populated with the results. + * 'strict' enables error reporting when trailing input characters or format + * nodes remain after parsing. * * Note: we currently don't have any to_interval() function, so there * is no need here for INVALID_FOR_INTERVAL checks. * ---------- */ static void -DCH_from_char(FormatNode *node, char *in, TmFromChar *out) +DCH_from_char(FormatNode *node, char *in, TmFromChar *out, bool strict) { FormatNode *n; char *s; @@ -3149,8 +3216,18 @@ DCH_from_char(FormatNode *node, char *in, TmFromChar *out) SKIP_THth(s, n->suffix); break; + case DCH_FF1: + case DCH_FF2: + case DCH_FF3: + case DCH_FF4: + case DCH_FF5: + case DCH_FF6: + out->ff = n->key->id - DCH_FF1 + 1; + /* fall through */ case DCH_US: /* microsecond */ - len = from_char_parse_int_len(&out->us, &s, 6, n); + len = from_char_parse_int_len(&out->us, &s, + n->key->id == DCH_US ? 6 : + out->ff, n); out->us *= len == 1 ? 100000 : len == 2 ? 10000 : @@ -3164,6 +3241,14 @@ DCH_from_char(FormatNode *node, char *in, TmFromChar *out) from_char_parse_int(&out->ssss, &s, n); SKIP_THth(s, n->suffix); break; + case DCH_FF7: + case DCH_FF8: + case DCH_FF9: + ereport(ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + errmsg("datetime formatting field \"%s\" is not supported", + n->key->name))); + break; case DCH_tz: case DCH_TZ: case DCH_OF: @@ -3374,6 +3459,23 @@ DCH_from_char(FormatNode *node, char *in, TmFromChar *out) } } } + + if (strict) + { + if (n->type != NODE_TYPE_END) + ereport(ERROR, + (errcode(ERRCODE_INVALID_DATETIME_FORMAT), + errmsg("input string is too short for datetime format"))); + + while (*s == ' ') + s++; + + if (*s != '\0') + ereport(ERROR, + (errcode(ERRCODE_INVALID_DATETIME_FORMAT), + errmsg("trailing characters remain in input string after " + "datetime format"))); + } } /* @@ -3394,6 +3496,112 @@ DCH_prevent_counter_overflow(void) } } +/* Get mask of date/time/zone formatting components present in format nodes. */ +static int +DCH_datetime_type(FormatNode *node) +{ + FormatNode *n; + int flags = 0; + + for (n = node; n->type != NODE_TYPE_END; n++) + { + if (n->type != NODE_TYPE_ACTION) + continue; + + switch (n->key->id) + { + case DCH_FX: + break; + case DCH_A_M: + case DCH_P_M: + case DCH_a_m: + case DCH_p_m: + case DCH_AM: + case DCH_PM: + case DCH_am: + case DCH_pm: + case DCH_HH: + case DCH_HH12: + case DCH_HH24: + case DCH_MI: + case DCH_SS: + case DCH_MS: /* millisecond */ + case DCH_US: /* microsecond */ + case DCH_FF1: + case DCH_FF2: + case DCH_FF3: + case DCH_FF4: + case DCH_FF5: + case DCH_FF6: + case DCH_FF7: + case DCH_FF8: + case DCH_FF9: + case DCH_SSSS: + flags |= DCH_TIMED; + break; + case DCH_tz: + case DCH_TZ: + case DCH_OF: + ereport(ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + errmsg("formatting field \"%s\" is only supported in to_char", + n->key->name))); + flags |= DCH_ZONED; + break; + case DCH_TZH: + case DCH_TZM: + flags |= DCH_ZONED; + break; + case DCH_A_D: + case DCH_B_C: + case DCH_a_d: + case DCH_b_c: + case DCH_AD: + case DCH_BC: + case DCH_ad: + case DCH_bc: + case DCH_MONTH: + case DCH_Month: + case DCH_month: + case DCH_MON: + case DCH_Mon: + case DCH_mon: + case DCH_MM: + case DCH_DAY: + case DCH_Day: + case DCH_day: + case DCH_DY: + case DCH_Dy: + case DCH_dy: + case DCH_DDD: + case DCH_IDDD: + case DCH_DD: + case DCH_D: + case DCH_ID: + case DCH_WW: + case DCH_Q: + case DCH_CC: + case DCH_Y_YYY: + case DCH_YYYY: + case DCH_IYYY: + case DCH_YYY: + case DCH_IYY: + case DCH_YY: + case DCH_IY: + case DCH_Y: + case DCH_I: + case DCH_RM: + case DCH_rm: + case DCH_W: + case DCH_J: + flags |= DCH_DATED; + break; + } + } + + return flags; +} + /* select a DCHCacheEntry to hold the given format picture */ static DCHCacheEntry * DCH_cache_getnew(const char *str) @@ -3682,8 +3890,10 @@ to_timestamp(PG_FUNCTION_ARGS) int tz; struct pg_tm tm; fsec_t fsec; + int fprec; - do_to_timestamp(date_txt, fmt, &tm, &fsec); + do_to_timestamp(date_txt, VARDATA(fmt), VARSIZE_ANY_EXHDR(fmt), false, + &tm, &fsec, &fprec, NULL); /* Use the specified time zone, if any. */ if (tm.tm_zone) @@ -3701,6 +3911,10 @@ to_timestamp(PG_FUNCTION_ARGS) (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), errmsg("timestamp out of range"))); + /* Use the specified fractional precision, if any. */ + if (fprec) + AdjustTimestampForTypmod(&result, fprec); + PG_RETURN_TIMESTAMP(result); } @@ -3718,7 +3932,8 @@ to_date(PG_FUNCTION_ARGS) struct pg_tm tm; fsec_t fsec; - do_to_timestamp(date_txt, fmt, &tm, &fsec); + do_to_timestamp(date_txt, VARDATA(fmt), VARSIZE_ANY_EXHDR(fmt), false, + &tm, &fsec, NULL, NULL); /* Prevent overflow in Julian-day routines */ if (!IS_VALID_JULIAN(tm.tm_year, tm.tm_mon, tm.tm_mday)) @@ -3739,11 +3954,176 @@ to_date(PG_FUNCTION_ARGS) PG_RETURN_DATEADT(result); } +/* + * Make datetime type from 'date_txt' which is formated at argument 'fmt'. + * Actual datatype (returned in 'typid', 'typmod') is determined by + * presence of date/time/zone components in the format string. + */ +Datum +to_datetime(text *date_txt, const char *fmt, int fmt_len, char *tzname, + bool strict, Oid *typid, int32 *typmod, int *tz) +{ + struct pg_tm tm; + fsec_t fsec; + int fprec = 0; + int flags; + + do_to_timestamp(date_txt, fmt, fmt_len, strict, &tm, &fsec, &fprec, &flags); + + *typmod = fprec ? fprec : -1; /* fractional part precision */ + *tz = 0; + + if (flags & DCH_DATED) + { + if (flags & DCH_TIMED) + { + if (flags & DCH_ZONED) + { + TimestampTz result; + + if (tm.tm_zone) + tzname = (char *) tm.tm_zone; + + if (tzname) + { + int dterr = DecodeTimezone(tzname, tz); + + if (dterr) + DateTimeParseError(dterr, tzname, "timestamptz"); + } + else + { + ereport(ERROR, + (errcode(ERRCODE_INVALID_DATETIME_FORMAT), + errmsg("missing time-zone in timestamptz input string"))); + + *tz = DetermineTimeZoneOffset(&tm, session_timezone); + } + + if (tm2timestamp(&tm, fsec, tz, &result) != 0) + ereport(ERROR, + (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), + errmsg("timestamptz out of range"))); + + AdjustTimestampForTypmod(&result, *typmod); + + *typid = TIMESTAMPTZOID; + return TimestampTzGetDatum(result); + } + else + { + Timestamp result; + + if (tm2timestamp(&tm, fsec, NULL, &result) != 0) + ereport(ERROR, + (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), + errmsg("timestamp out of range"))); + + AdjustTimestampForTypmod(&result, *typmod); + + *typid = TIMESTAMPOID; + return TimestampGetDatum(result); + } + } + else + { + if (flags & DCH_ZONED) + { + ereport(ERROR, + (errcode(ERRCODE_INVALID_DATETIME_FORMAT), + errmsg("datetime format is zoned but not timed"))); + } + else + { + DateADT result; + + /* Prevent overflow in Julian-day routines */ + if (!IS_VALID_JULIAN(tm.tm_year, tm.tm_mon, tm.tm_mday)) + ereport(ERROR, + (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), + errmsg("date out of range: \"%s\"", + text_to_cstring(date_txt)))); + + result = date2j(tm.tm_year, tm.tm_mon, tm.tm_mday) - + POSTGRES_EPOCH_JDATE; + + /* Now check for just-out-of-range dates */ + if (!IS_VALID_DATE(result)) + ereport(ERROR, + (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), + errmsg("date out of range: \"%s\"", + text_to_cstring(date_txt)))); + + *typid = DATEOID; + return DateADTGetDatum(result); + } + } + } + else if (flags & DCH_TIMED) + { + if (flags & DCH_ZONED) + { + TimeTzADT *result = palloc(sizeof(TimeTzADT)); + + if (tm.tm_zone) + tzname = (char *) tm.tm_zone; + + if (tzname) + { + int dterr = DecodeTimezone(tzname, tz); + + if (dterr) + DateTimeParseError(dterr, tzname, "timetz"); + } + else + { + ereport(ERROR, + (errcode(ERRCODE_INVALID_DATETIME_FORMAT), + errmsg("missing time-zone in timestamptz input string"))); + + *tz = DetermineTimeZoneOffset(&tm, session_timezone); + } + + if (tm2timetz(&tm, fsec, *tz, result) != 0) + ereport(ERROR, + (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), + errmsg("timetz out of range"))); + + AdjustTimeForTypmod(&result->time, *typmod); + + *typid = TIMETZOID; + return TimeTzADTPGetDatum(result); + } + else + { + TimeADT result; + + if (tm2time(&tm, fsec, &result) != 0) + ereport(ERROR, + (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), + errmsg("time out of range"))); + + AdjustTimeForTypmod(&result, *typmod); + + *typid = TIMEOID; + return TimeADTGetDatum(result); + } + } + else + { + ereport(ERROR, + (errcode(ERRCODE_INVALID_DATETIME_FORMAT), + errmsg("datetime format is not dated and not timed"))); + } + + return (Datum) 0; +} + /* * do_to_timestamp: shared code for to_timestamp and to_date * * Parse the 'date_txt' according to 'fmt', return results as a struct pg_tm - * and fractional seconds. + * and fractional seconds and fractional precision. * * We parse 'fmt' into a list of FormatNodes, which is then passed to * DCH_from_char to populate a TmFromChar with the parsed contents of @@ -3751,14 +4131,20 @@ to_date(PG_FUNCTION_ARGS) * * The TmFromChar is then analysed and converted into the final results in * struct 'tm' and 'fsec'. + * + * Bit mask of date/time/zone formatting components found in 'fmt_str' is + * returned in 'flags'. + * + * 'strict' enables error reporting when trailing characters remain in input or + * format strings after parsing. */ static void -do_to_timestamp(text *date_txt, text *fmt, - struct pg_tm *tm, fsec_t *fsec) +do_to_timestamp(text *date_txt, const char *fmt_str, int fmt_len, bool strict, + struct pg_tm *tm, fsec_t *fsec, int *fprec, int *flags) { FormatNode *format; TmFromChar tmfc; - int fmt_len; + char *fmt_tmp = NULL; char *date_str; int fmask; @@ -3769,15 +4155,15 @@ do_to_timestamp(text *date_txt, text *fmt, *fsec = 0; fmask = 0; /* bit mask for ValidateDate() */ - fmt_len = VARSIZE_ANY_EXHDR(fmt); + if (fmt_len < 0) /* zero-terminated */ + fmt_len = strlen(fmt_str); + else if (fmt_len > 0) /* not zero-terminated */ + fmt_str = fmt_tmp = pnstrdup(fmt_str, fmt_len); if (fmt_len) { - char *fmt_str; bool incache; - fmt_str = text_to_cstring(fmt); - if (fmt_len > DCH_CACHE_SIZE) { /* @@ -3807,13 +4193,18 @@ do_to_timestamp(text *date_txt, text *fmt, /* dump_index(DCH_keywords, DCH_index); */ #endif - DCH_from_char(format, date_str, &tmfc); + DCH_from_char(format, date_str, &tmfc, strict); + + if (flags) + *flags = DCH_datetime_type(format); - pfree(fmt_str); if (!incache) pfree(format); } + if (fmt_tmp) + pfree(fmt_tmp); + DEBUG_TMFC(&tmfc); /* @@ -3991,6 +4382,8 @@ do_to_timestamp(text *date_txt, text *fmt, *fsec += tmfc.ms * 1000; if (tmfc.us) *fsec += tmfc.us; + if (fprec) + *fprec = tmfc.ff; /* fractional precision, if specified */ /* Range-check date fields according to bit mask computed above */ if (fmask != 0) diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c index f47a498228..fb61d684bc 100644 --- a/src/backend/utils/adt/json.c +++ b/src/backend/utils/adt/json.c @@ -106,6 +106,9 @@ static void add_json(Datum val, bool is_null, StringInfo result, Oid val_type, bool key_scalar); static text *catenate_stringinfo_string(StringInfo buffer, const char *addon); +static JsonIterator *JsonIteratorInitFromLex(JsonContainer *jc, + JsonLexContext *lex, JsonIterator *parent); + /* the null action object used for pure validation */ static JsonSemAction nullSemAction = { @@ -126,6 +129,22 @@ lex_peek(JsonLexContext *lex) return lex->token_type; } +static inline char * +lex_peek_value(JsonLexContext *lex) +{ + if (lex->token_type == JSON_TOKEN_STRING) + return lex->strval ? pstrdup(lex->strval->data) : NULL; + else + { + int len = (lex->token_terminator - lex->token_start); + char *tokstr = palloc(len + 1); + + memcpy(tokstr, lex->token_start, len); + tokstr[len] = '\0'; + return tokstr; + } +} + /* * lex_accept * @@ -141,22 +160,8 @@ lex_accept(JsonLexContext *lex, JsonTokenType token, char **lexeme) if (lex->token_type == token) { if (lexeme != NULL) - { - if (lex->token_type == JSON_TOKEN_STRING) - { - if (lex->strval != NULL) - *lexeme = pstrdup(lex->strval->data); - } - else - { - int len = (lex->token_terminator - lex->token_start); - char *tokstr = palloc(len + 1); + *lexeme = lex_peek_value(lex); - memcpy(tokstr, lex->token_start, len); - tokstr[len] = '\0'; - *lexeme = tokstr; - } - } json_lex(lex); return true; } @@ -1506,7 +1511,7 @@ datum_to_json(Datum val, bool is_null, StringInfo result, { char buf[MAXDATELEN + 1]; - JsonEncodeDateTime(buf, val, DATEOID); + JsonEncodeDateTime(buf, val, DATEOID, NULL); appendStringInfo(result, "\"%s\"", buf); } break; @@ -1514,7 +1519,7 @@ datum_to_json(Datum val, bool is_null, StringInfo result, { char buf[MAXDATELEN + 1]; - JsonEncodeDateTime(buf, val, TIMESTAMPOID); + JsonEncodeDateTime(buf, val, TIMESTAMPOID, NULL); appendStringInfo(result, "\"%s\"", buf); } break; @@ -1522,7 +1527,7 @@ datum_to_json(Datum val, bool is_null, StringInfo result, { char buf[MAXDATELEN + 1]; - JsonEncodeDateTime(buf, val, TIMESTAMPTZOID); + JsonEncodeDateTime(buf, val, TIMESTAMPTZOID, NULL); appendStringInfo(result, "\"%s\"", buf); } break; @@ -1553,7 +1558,7 @@ datum_to_json(Datum val, bool is_null, StringInfo result, * optionally preallocated buffer 'buf'. */ char * -JsonEncodeDateTime(char *buf, Datum value, Oid typid) +JsonEncodeDateTime(char *buf, Datum value, Oid typid, int *tzp) { if (!buf) buf = palloc(MAXDATELEN + 1); @@ -1630,11 +1635,30 @@ JsonEncodeDateTime(char *buf, Datum value, Oid typid) const char *tzn = NULL; timestamp = DatumGetTimestampTz(value); + + /* + * If time-zone is specified, we apply a time-zone shift, + * convert timestamptz to pg_tm as if it was without time-zone, + * and then use specified time-zone for encoding timestamp + * into a string. + */ + if (tzp) + { + tz = *tzp; + timestamp -= (TimestampTz) tz * USECS_PER_SEC; + } + /* Same as timestamptz_out(), but forcing DateStyle */ if (TIMESTAMP_NOT_FINITE(timestamp)) EncodeSpecialTimestamp(timestamp, buf); - else if (timestamp2tm(timestamp, &tz, &tm, &fsec, &tzn, NULL) == 0) + else if (timestamp2tm(timestamp, tzp ? NULL : &tz, &tm, &fsec, + tzp ? NULL : &tzn, NULL) == 0) + { + if (tzp) + tm.tm_isdst = 1; /* set time-zone presence flag */ + EncodeDateTime(&tm, fsec, true, tz, tzn, USE_XSD_DATES, buf); + } else ereport(ERROR, (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), @@ -2553,3 +2577,804 @@ json_typeof(PG_FUNCTION_ARGS) PG_RETURN_TEXT_P(cstring_to_text(type)); } + +static void +jsonInitContainer(JsonContainerData *jc, char *json, int len, int type, + int size) +{ + if (size < 0 || size > JB_CMASK) + size = JB_CMASK; /* unknown size */ + + jc->data = json; + jc->len = len; + jc->header = type | size; +} + +/* + * Initialize a JsonContainer from a text datum. + */ +static void +jsonInit(JsonContainerData *jc, Datum value) +{ + text *json = DatumGetTextP(value); + JsonLexContext *lex = makeJsonLexContext(json, false); + JsonTokenType tok; + int type; + int size = -1; + + /* Lex exactly one token from the input and check its type. */ + json_lex(lex); + tok = lex_peek(lex); + + switch (tok) + { + case JSON_TOKEN_OBJECT_START: + type = JB_FOBJECT; + lex_accept(lex, tok, NULL); + if (lex_peek(lex) == JSON_TOKEN_OBJECT_END) + size = 0; + break; + case JSON_TOKEN_ARRAY_START: + type = JB_FARRAY; + lex_accept(lex, tok, NULL); + if (lex_peek(lex) == JSON_TOKEN_ARRAY_END) + size = 0; + break; + case JSON_TOKEN_STRING: + case JSON_TOKEN_NUMBER: + case JSON_TOKEN_TRUE: + case JSON_TOKEN_FALSE: + case JSON_TOKEN_NULL: + type = JB_FARRAY | JB_FSCALAR; + size = 1; + break; + default: + elog(ERROR, "unexpected json token: %d", tok); + type = jbvNull; + break; + } + + pfree(lex); + + jsonInitContainer(jc, VARDATA(json), VARSIZE(json) - VARHDRSZ, type, size); +} + +/* + * Wrap JSON text into a palloc()'d Json structure. + */ +Json * +JsonCreate(text *json) +{ + Json *res = palloc0(sizeof(*res)); + + jsonInit((JsonContainerData *) &res->root, PointerGetDatum(json)); + + return res; +} + +static bool +jsonFillValue(JsonIterator **pit, JsonbValue *res, bool skipNested, + JsontIterState nextState) +{ + JsonIterator *it = *pit; + JsonLexContext *lex = it->lex; + JsonTokenType tok = lex_peek(lex); + + switch (tok) + { + case JSON_TOKEN_NULL: + res->type = jbvNull; + break; + + case JSON_TOKEN_TRUE: + res->type = jbvBool; + res->val.boolean = true; + break; + + case JSON_TOKEN_FALSE: + res->type = jbvBool; + res->val.boolean = false; + break; + + case JSON_TOKEN_STRING: + { + char *token = lex_peek_value(lex); + res->type = jbvString; + res->val.string.val = token; + res->val.string.len = strlen(token); + break; + } + + case JSON_TOKEN_NUMBER: + { + char *token = lex_peek_value(lex); + res->type = jbvNumeric; + res->val.numeric = DatumGetNumeric(DirectFunctionCall3( + numeric_in, CStringGetDatum(token), 0, -1)); + break; + } + + case JSON_TOKEN_OBJECT_START: + case JSON_TOKEN_ARRAY_START: + { + JsonContainerData *cont = palloc(sizeof(*cont)); + char *token_start = lex->token_start; + int len; + + if (skipNested) + { + /* find the end of a container for its length calculation */ + if (tok == JSON_TOKEN_OBJECT_START) + parse_object(lex, &nullSemAction); + else + parse_array(lex, &nullSemAction); + + len = lex->token_start - token_start; + } + else + len = lex->input_length - (lex->token_start - lex->input); + + jsonInitContainer(cont, + token_start, len, + tok == JSON_TOKEN_OBJECT_START ? + JB_FOBJECT : JB_FARRAY, + -1); + + res->type = jbvBinary; + res->val.binary.data = (JsonbContainer *) cont; + res->val.binary.len = len; + + if (skipNested) + return false; + + /* recurse into container */ + it->state = nextState; + *pit = JsonIteratorInitFromLex(cont, lex, *pit); + return true; + } + + default: + report_parse_error(JSON_PARSE_VALUE, lex); + } + + lex_accept(lex, tok, NULL); + + return false; +} + +static inline JsonIterator * +JsonIteratorFreeAndGetParent(JsonIterator *it) +{ + JsonIterator *parent = it->parent; + + pfree(it); + + return parent; +} + +/* + * Free a whole stack of JsonIterator iterators. + */ +void +JsonIteratorFree(JsonIterator *it) +{ + while (it) + it = JsonIteratorFreeAndGetParent(it); +} + +/* + * Get next JsonbValue while iterating through JsonContainer. + * + * For more details, see JsonbIteratorNext(). + */ +JsonbIteratorToken +JsonIteratorNext(JsonIterator **pit, JsonbValue *val, bool skipNested) +{ + JsonIterator *it; + + if (*pit == NULL) + return WJB_DONE; + +recurse: + it = *pit; + + /* parse by recursive descent */ + switch (it->state) + { + case JTI_ARRAY_START: + val->type = jbvArray; + val->val.array.nElems = it->isScalar ? 1 : -1; + val->val.array.rawScalar = it->isScalar; + val->val.array.elems = NULL; + it->state = it->isScalar ? JTI_ARRAY_ELEM_SCALAR : JTI_ARRAY_ELEM; + return WJB_BEGIN_ARRAY; + + case JTI_ARRAY_ELEM_SCALAR: + { + (void) jsonFillValue(pit, val, skipNested, JTI_ARRAY_END); + it->state = JTI_ARRAY_END; + return WJB_ELEM; + } + + case JTI_ARRAY_END: + if (!it->parent && lex_peek(it->lex) != JSON_TOKEN_END) + report_parse_error(JSON_PARSE_END, it->lex); + *pit = JsonIteratorFreeAndGetParent(*pit); + return WJB_END_ARRAY; + + case JTI_ARRAY_ELEM: + if (lex_accept(it->lex, JSON_TOKEN_ARRAY_END, NULL)) + { + it->state = JTI_ARRAY_END; + goto recurse; + } + + if (jsonFillValue(pit, val, skipNested, JTI_ARRAY_ELEM_AFTER)) + goto recurse; + + /* fall through */ + + case JTI_ARRAY_ELEM_AFTER: + if (!lex_accept(it->lex, JSON_TOKEN_COMMA, NULL)) + { + if (lex_peek(it->lex) != JSON_TOKEN_ARRAY_END) + report_parse_error(JSON_PARSE_ARRAY_NEXT, it->lex); + } + + if (it->state == JTI_ARRAY_ELEM_AFTER) + { + it->state = JTI_ARRAY_ELEM; + goto recurse; + } + + return WJB_ELEM; + + case JTI_OBJECT_START: + val->type = jbvObject; + val->val.object.nPairs = -1; + val->val.object.pairs = NULL; + val->val.object.uniquified = false; + it->state = JTI_OBJECT_KEY; + return WJB_BEGIN_OBJECT; + + case JTI_OBJECT_KEY: + if (lex_accept(it->lex, JSON_TOKEN_OBJECT_END, NULL)) + { + if (!it->parent && lex_peek(it->lex) != JSON_TOKEN_END) + report_parse_error(JSON_PARSE_END, it->lex); + *pit = JsonIteratorFreeAndGetParent(*pit); + return WJB_END_OBJECT; + } + + if (lex_peek(it->lex) != JSON_TOKEN_STRING) + report_parse_error(JSON_PARSE_OBJECT_START, it->lex); + + (void) jsonFillValue(pit, val, true, JTI_OBJECT_VALUE); + + if (!lex_accept(it->lex, JSON_TOKEN_COLON, NULL)) + report_parse_error(JSON_PARSE_OBJECT_LABEL, it->lex); + + it->state = JTI_OBJECT_VALUE; + return WJB_KEY; + + case JTI_OBJECT_VALUE: + if (jsonFillValue(pit, val, skipNested, JTI_OBJECT_VALUE_AFTER)) + goto recurse; + + /* fall through */ + + case JTI_OBJECT_VALUE_AFTER: + if (!lex_accept(it->lex, JSON_TOKEN_COMMA, NULL)) + { + if (lex_peek(it->lex) != JSON_TOKEN_OBJECT_END) + report_parse_error(JSON_PARSE_OBJECT_NEXT, it->lex); + } + + if (it->state == JTI_OBJECT_VALUE_AFTER) + { + it->state = JTI_OBJECT_KEY; + goto recurse; + } + + it->state = JTI_OBJECT_KEY; + return WJB_VALUE; + + default: + break; + } + + return WJB_DONE; +} + +static JsonIterator * +JsonIteratorInitFromLex(JsonContainer *jc, JsonLexContext *lex, + JsonIterator *parent) +{ + JsonIterator *it = palloc(sizeof(JsonIterator)); + JsonTokenType tok; + + it->container = jc; + it->parent = parent; + it->lex = lex; + + tok = lex_peek(it->lex); + + switch (tok) + { + case JSON_TOKEN_OBJECT_START: + it->isScalar = false; + it->state = JTI_OBJECT_START; + lex_accept(it->lex, tok, NULL); + break; + case JSON_TOKEN_ARRAY_START: + it->isScalar = false; + it->state = JTI_ARRAY_START; + lex_accept(it->lex, tok, NULL); + break; + case JSON_TOKEN_STRING: + case JSON_TOKEN_NUMBER: + case JSON_TOKEN_TRUE: + case JSON_TOKEN_FALSE: + case JSON_TOKEN_NULL: + it->isScalar = true; + it->state = JTI_ARRAY_START; + break; + default: + report_parse_error(JSON_PARSE_VALUE, it->lex); + } + + return it; +} + +/* + * Given a JsonContainer, expand to JsonIterator to iterate over items + * fully expanded to in-memory representation for manipulation. + * + * See JsonbIteratorNext() for notes on memory management. + */ +JsonIterator * +JsonIteratorInit(JsonContainer *jc) +{ + JsonLexContext *lex = makeJsonLexContextCstringLen(jc->data, jc->len, true); + json_lex(lex); + return JsonIteratorInitFromLex(jc, lex, NULL); +} + +/* + * Serialize a single JsonbValue into text buffer. + */ +static void +JsonEncodeJsonbValue(StringInfo buf, JsonbValue *jbv) +{ + check_stack_depth(); + + switch (jbv->type) + { + case jbvNull: + appendBinaryStringInfo(buf, "null", 4); + break; + + case jbvBool: + if (jbv->val.boolean) + appendBinaryStringInfo(buf, "true", 4); + else + appendBinaryStringInfo(buf, "false", 5); + break; + + case jbvNumeric: + appendStringInfoString(buf, DatumGetCString(DirectFunctionCall1( + numeric_out, NumericGetDatum(jbv->val.numeric)))); + break; + + case jbvString: + { + char *str = jbv->val.string.len < 0 ? jbv->val.string.val : + pnstrdup(jbv->val.string.val, jbv->val.string.len); + + escape_json(buf, str); + + if (jbv->val.string.len >= 0) + pfree(str); + + break; + } + + case jbvDatetime: + { + char dtbuf[MAXDATELEN + 1]; + + JsonEncodeDateTime(dtbuf, + jbv->val.datetime.value, + jbv->val.datetime.typid, + &jbv->val.datetime.tz); + + escape_json(buf, dtbuf); + + break; + } + + case jbvArray: + { + int i; + + if (!jbv->val.array.rawScalar) + appendStringInfoChar(buf, '['); + + for (i = 0; i < jbv->val.array.nElems; i++) + { + if (i > 0) + appendBinaryStringInfo(buf, ", ", 2); + + JsonEncodeJsonbValue(buf, &jbv->val.array.elems[i]); + } + + if (!jbv->val.array.rawScalar) + appendStringInfoChar(buf, ']'); + + break; + } + + case jbvObject: + { + int i; + + appendStringInfoChar(buf, '{'); + + for (i = 0; i < jbv->val.object.nPairs; i++) + { + if (i > 0) + appendBinaryStringInfo(buf, ", ", 2); + + JsonEncodeJsonbValue(buf, &jbv->val.object.pairs[i].key); + appendBinaryStringInfo(buf, ": ", 2); + JsonEncodeJsonbValue(buf, &jbv->val.object.pairs[i].value); + } + + appendStringInfoChar(buf, '}'); + break; + } + + case jbvBinary: + { + JsonContainer *json = (JsonContainer *) jbv->val.binary.data; + + appendBinaryStringInfo(buf, json->data, json->len); + break; + } + + default: + elog(ERROR, "unknown jsonb value type: %d", jbv->type); + break; + } +} + +/* + * Turn an in-memory JsonbValue into a json for on-disk storage. + */ +Json * +JsonbValueToJson(JsonbValue *jbv) +{ + StringInfoData buf; + Json *json = palloc0(sizeof(*json)); + int type; + int size; + + if (jbv->type == jbvBinary) + { + /* simply copy the whole container and its data */ + JsonContainer *src = (JsonContainer *) jbv->val.binary.data; + JsonContainerData *dst = (JsonContainerData *) &json->root; + + *dst = *src; + dst->data = memcpy(palloc(src->len), src->data, src->len); + + return json; + } + + initStringInfo(&buf); + + JsonEncodeJsonbValue(&buf, jbv); + + switch (jbv->type) + { + case jbvArray: + type = JB_FARRAY; + size = jbv->val.array.nElems; + break; + + case jbvObject: + type = JB_FOBJECT; + size = jbv->val.object.nPairs; + break; + + default: /* scalar */ + type = JB_FARRAY | JB_FSCALAR; + size = 1; + break; + } + + jsonInitContainer((JsonContainerData *) &json->root, + buf.data, buf.len, type, size); + + return json; +} + +/* Context and semantic actions for JsonGetArraySize() */ +typedef struct JsonGetArraySizeState +{ + int level; + uint32 size; +} JsonGetArraySizeState; + +static void +JsonGetArraySize_array_start(void *state) +{ + ((JsonGetArraySizeState *) state)->level++; +} + +static void +JsonGetArraySize_array_end(void *state) +{ + ((JsonGetArraySizeState *) state)->level--; +} + +static void +JsonGetArraySize_array_element_start(void *state, bool isnull) +{ + JsonGetArraySizeState *s = state; + if (s->level == 1) + s->size++; +} + +/* + * Calculate the size of a json array by iterating through its elements. + */ +uint32 +JsonGetArraySize(JsonContainer *jc) +{ + JsonLexContext *lex = makeJsonLexContextCstringLen(jc->data, jc->len, false); + JsonSemAction sem; + JsonGetArraySizeState state; + + state.level = 0; + state.size = 0; + + memset(&sem, 0, sizeof(sem)); + sem.semstate = &state; + sem.array_start = JsonGetArraySize_array_start; + sem.array_end = JsonGetArraySize_array_end; + sem.array_element_end = JsonGetArraySize_array_element_start; + + json_lex(lex); + parse_array(lex, &sem); + + return state.size; +} + +/* + * Find last key in a json object by name. Returns palloc()'d copy of the + * corresponding value, or NULL if is not found. + */ +static inline JsonbValue * +jsonFindLastKeyInObject(JsonContainer *obj, const JsonbValue *key) +{ + JsonbValue *res = NULL; + JsonbValue jbv; + JsonIterator *it; + JsonbIteratorToken tok; + + Assert(JsonContainerIsObject(obj)); + Assert(key->type == jbvString); + + it = JsonIteratorInit(obj); + + while ((tok = JsonIteratorNext(&it, &jbv, true)) != WJB_DONE) + { + if (tok == WJB_KEY && !lengthCompareJsonbStringValue(key, &jbv)) + { + if (!res) + res = palloc(sizeof(*res)); + + tok = JsonIteratorNext(&it, res, true); + Assert(tok == WJB_VALUE); + } + } + + return res; +} + +/* + * Find scalar element in a array. Returns palloc()'d copy of value or NULL. + */ +static JsonbValue * +jsonFindValueInArray(JsonContainer *array, const JsonbValue *elem) +{ + JsonbValue *val = palloc(sizeof(*val)); + JsonIterator *it; + JsonbIteratorToken tok; + + Assert(JsonContainerIsArray(array)); + Assert(IsAJsonbScalar(elem)); + + it = JsonIteratorInit(array); + + while ((tok = JsonIteratorNext(&it, val, true)) != WJB_DONE) + { + if (tok == WJB_ELEM && val->type == elem->type && + equalsJsonbScalarValue(val, (JsonbValue *) elem)) + { + JsonIteratorFree(it); + return val; + } + } + + pfree(val); + return NULL; +} + +/* + * Find value in object (i.e. the "value" part of some key/value pair in an + * object), or find a matching element if we're looking through an array. + * The "flags" argument allows the caller to specify which container types are + * of interest. If we cannot find the value, return NULL. Otherwise, return + * palloc()'d copy of value. + * + * For more details, see findJsonbValueFromContainer(). + */ +JsonbValue * +findJsonValueFromContainer(JsonContainer *jc, uint32 flags, JsonbValue *key) +{ + Assert((flags & ~(JB_FARRAY | JB_FOBJECT)) == 0); + + if (!JsonContainerSize(jc)) + return NULL; + + if ((flags & JB_FARRAY) && JsonContainerIsArray(jc)) + return jsonFindValueInArray(jc, key); + + if ((flags & JB_FOBJECT) && JsonContainerIsObject(jc)) + return jsonFindLastKeyInObject(jc, key); + + /* Not found */ + return NULL; +} + +/* + * Get i-th element of a json array. + * + * Returns palloc()'d copy of the value, or NULL if it does not exist. + */ +JsonbValue * +getIthJsonValueFromContainer(JsonContainer *array, uint32 index) +{ + JsonbValue *val = palloc(sizeof(JsonbValue)); + JsonIterator *it; + JsonbIteratorToken tok; + + Assert(JsonContainerIsArray(array)); + + it = JsonIteratorInit(array); + + while ((tok = JsonIteratorNext(&it, val, true)) != WJB_DONE) + { + if (tok == WJB_ELEM) + { + if (index-- == 0) + { + JsonIteratorFree(it); + return val; + } + } + } + + pfree(val); + + return NULL; +} + +/* + * Push json JsonbValue into JsonbParseState. + * + * Used for converting an in-memory JsonbValue to a json. For more details, + * see pushJsonbValue(). This function differs from pushJsonbValue() only by + * resetting "uniquified" flag in objects. + */ +JsonbValue * +pushJsonValue(JsonbParseState **pstate, JsonbIteratorToken seq, + JsonbValue *jbval) +{ + JsonIterator *it; + JsonbValue *res = NULL; + JsonbValue v; + JsonbIteratorToken tok; + + if (!jbval || (seq != WJB_ELEM && seq != WJB_VALUE) || + jbval->type != jbvBinary) + { + /* drop through */ + res = pushJsonbValueScalar(pstate, seq, jbval); + + /* reset "uniquified" flag of objects */ + if (seq == WJB_BEGIN_OBJECT) + (*pstate)->contVal.val.object.uniquified = false; + + return res; + } + + /* unpack the binary and add each piece to the pstate */ + it = JsonIteratorInit((JsonContainer *) jbval->val.binary.data); + while ((tok = JsonIteratorNext(&it, &v, false)) != WJB_DONE) + { + res = pushJsonbValueScalar(pstate, tok, + tok < WJB_BEGIN_ARRAY ? &v : NULL); + + /* reset "uniquified" flag of objects */ + if (tok == WJB_BEGIN_OBJECT) + (*pstate)->contVal.val.object.uniquified = false; + } + + return res; +} + +JsonbValue * +JsonExtractScalar(JsonContainer *jbc, JsonbValue *res) +{ + JsonIterator *it = JsonIteratorInit(jbc); + JsonbIteratorToken tok PG_USED_FOR_ASSERTS_ONLY; + JsonbValue tmp; + + tok = JsonIteratorNext(&it, &tmp, true); + Assert(tok == WJB_BEGIN_ARRAY); + Assert(tmp.val.array.nElems == 1 && tmp.val.array.rawScalar); + + tok = JsonIteratorNext(&it, res, true); + Assert(tok == WJB_ELEM); + Assert(IsAJsonbScalar(res)); + + tok = JsonIteratorNext(&it, &tmp, true); + Assert(tok == WJB_END_ARRAY); + + return res; +} + +/* + * Turn a Json into its C-string representation with stripping quotes from + * scalar strings. + */ +char * +JsonUnquote(Json *jb) +{ + if (JsonContainerIsScalar(&jb->root)) + { + JsonbValue v; + + JsonExtractScalar(&jb->root, &v); + + if (v.type == jbvString) + return pnstrdup(v.val.string.val, v.val.string.len); + } + + return pnstrdup(jb->root.data, jb->root.len); +} + +/* + * Turn a JsonContainer into its C-string representation. + */ +char * +JsonToCString(StringInfo out, JsonContainer *jc, int estimated_len) +{ + if (out) + { + appendBinaryStringInfo(out, jc->data, jc->len); + return out->data; + } + else + { + char *str = palloc(jc->len + 1); + + memcpy(str, jc->data, jc->len); + str[jc->len] = 0; + + return str; + } +} diff --git a/src/backend/utils/adt/jsonb.c b/src/backend/utils/adt/jsonb.c index 0ae9d7b9c5..00a7f3a293 100644 --- a/src/backend/utils/adt/jsonb.c +++ b/src/backend/utils/adt/jsonb.c @@ -794,17 +794,17 @@ datum_to_jsonb(Datum val, bool is_null, JsonbInState *result, break; case JSONBTYPE_DATE: jb.type = jbvString; - jb.val.string.val = JsonEncodeDateTime(NULL, val, DATEOID); + jb.val.string.val = JsonEncodeDateTime(NULL, val, DATEOID, NULL); jb.val.string.len = strlen(jb.val.string.val); break; case JSONBTYPE_TIMESTAMP: jb.type = jbvString; - jb.val.string.val = JsonEncodeDateTime(NULL, val, TIMESTAMPOID); + jb.val.string.val = JsonEncodeDateTime(NULL, val, TIMESTAMPOID, NULL); jb.val.string.len = strlen(jb.val.string.val); break; case JSONBTYPE_TIMESTAMPTZ: jb.type = jbvString; - jb.val.string.val = JsonEncodeDateTime(NULL, val, TIMESTAMPTZOID); + jb.val.string.val = JsonEncodeDateTime(NULL, val, TIMESTAMPTZOID, NULL); jb.val.string.len = strlen(jb.val.string.val); break; case JSONBTYPE_JSONCAST: @@ -1857,7 +1857,7 @@ jsonb_object_agg_finalfn(PG_FUNCTION_ARGS) /* * Extract scalar value from raw-scalar pseudo-array jsonb. */ -static bool +JsonbValue * JsonbExtractScalar(JsonbContainer *jbc, JsonbValue *res) { JsonbIterator *it; @@ -1868,7 +1868,7 @@ JsonbExtractScalar(JsonbContainer *jbc, JsonbValue *res) { /* inform caller about actual type of container */ res->type = (JsonContainerIsArray(jbc)) ? jbvArray : jbvObject; - return false; + return NULL; } /* @@ -1891,7 +1891,7 @@ JsonbExtractScalar(JsonbContainer *jbc, JsonbValue *res) tok = JsonbIteratorNext(&it, &tmp, true); Assert(tok == WJB_DONE); - return true; + return res; } /* diff --git a/src/backend/utils/adt/jsonb_gin.c b/src/backend/utils/adt/jsonb_gin.c index c8a27451d2..c11960c03b 100644 --- a/src/backend/utils/adt/jsonb_gin.c +++ b/src/backend/utils/adt/jsonb_gin.c @@ -13,6 +13,7 @@ */ #include "postgres.h" +#include "miscadmin.h" #include "access/gin.h" #include "access/hash.h" #include "access/stratnum.h" @@ -20,6 +21,7 @@ #include "catalog/pg_type.h" #include "utils/builtins.h" #include "utils/jsonb.h" +#include "utils/jsonpath.h" #include "utils/varlena.h" typedef struct PathHashStack @@ -28,9 +30,140 @@ typedef struct PathHashStack struct PathHashStack *parent; } PathHashStack; +typedef enum { eOr, eAnd, eEntry } JsonPathNodeType; + +typedef struct JsonPathNode +{ + JsonPathNodeType type; + union + { + int nargs; + int entryIndex; + Datum entryDatum; + } val; + struct JsonPathNode *args[FLEXIBLE_ARRAY_MEMBER]; +} JsonPathNode; + +typedef struct GinEntries +{ + Datum *buf; + int count; + int allocated; +} GinEntries; + +typedef struct ExtractedPathEntry +{ + struct ExtractedPathEntry *parent; + Datum entry; + JsonPathItemType type; +} ExtractedPathEntry; + +typedef union ExtractedJsonPath +{ + ExtractedPathEntry *entries; + uint32 hash; +} ExtractedJsonPath; + +typedef struct JsonPathExtractionContext +{ + ExtractedJsonPath (*addKey)(ExtractedJsonPath path, char *key, int len); + JsonPath *indexedPaths; + bool pathOps; + bool lax; +} JsonPathExtractionContext; + + static Datum make_text_key(char flag, const char *str, int len); static Datum make_scalar_key(const JsonbValue *scalarVal, bool is_key); +static JsonPathNode *gin_extract_jsonpath_expr(JsonPathExtractionContext *cxt, + JsonPathItem *jsp, ExtractedJsonPath path, bool not); + + +static void +gin_entries_init(GinEntries *list, int preallocated) +{ + list->allocated = preallocated; + list->buf = (Datum *) palloc(sizeof(Datum) * list->allocated); + list->count = 0; +} + +static int +gin_entries_add(GinEntries *list, Datum entry) +{ + int id = list->count; + + if (list->count >= list->allocated) + { + + if (list->allocated) + { + list->allocated *= 2; + list->buf = (Datum *) repalloc(list->buf, + sizeof(Datum) * list->allocated); + } + else + { + list->allocated = 8; + list->buf = (Datum *) palloc(sizeof(Datum) * list->allocated); + } + } + + list->buf[list->count++] = entry; + + return id; +} + +/* Append key name to a path. */ +static ExtractedJsonPath +gin_jsonb_ops_add_key(ExtractedJsonPath path, char *key, int len) +{ + ExtractedPathEntry *pentry = palloc(sizeof(*pentry)); + + pentry->parent = path.entries; + + if (key) + { + pentry->entry = make_text_key(JGINFLAG_KEY, key, len); + pentry->type = jpiKey; + } + else + { + pentry->entry = PointerGetDatum(NULL); + pentry->type = len; + } + + path.entries = pentry; + + return path; +} + +/* Combine existing path hash with next key hash. */ +static ExtractedJsonPath +gin_jsonb_path_ops_add_key(ExtractedJsonPath path, char *key, int len) +{ + if (key) + { + JsonbValue jbv; + + jbv.type = jbvString; + jbv.val.string.val = key; + jbv.val.string.len = len; + + JsonbHashScalarValue(&jbv, &path.hash); + } + + return path; +} + +static void +gin_jsonpath_init_context(JsonPathExtractionContext *cxt, bool pathOps, bool lax) +{ + cxt->addKey = pathOps ? gin_jsonb_path_ops_add_key : gin_jsonb_ops_add_key; + cxt->pathOps = pathOps; + cxt->lax = lax; +} + /* * * jsonb_ops GIN opclass support functions @@ -68,12 +201,11 @@ gin_extract_jsonb(PG_FUNCTION_ARGS) { Jsonb *jb = (Jsonb *) PG_GETARG_JSONB_P(0); int32 *nentries = (int32 *) PG_GETARG_POINTER(1); - int total = 2 * JB_ROOT_COUNT(jb); + int total = JB_ROOT_COUNT(jb); JsonbIterator *it; JsonbValue v; JsonbIteratorToken r; - int i = 0; - Datum *entries; + GinEntries entries; /* If the root level is empty, we certainly have no keys */ if (total == 0) @@ -83,30 +215,23 @@ gin_extract_jsonb(PG_FUNCTION_ARGS) } /* Otherwise, use 2 * root count as initial estimate of result size */ - entries = (Datum *) palloc(sizeof(Datum) * total); + gin_entries_init(&entries, 2 * total); it = JsonbIteratorInit(&jb->root); while ((r = JsonbIteratorNext(&it, &v, false)) != WJB_DONE) { - /* Since we recurse into the object, we might need more space */ - if (i >= total) - { - total *= 2; - entries = (Datum *) repalloc(entries, sizeof(Datum) * total); - } - switch (r) { case WJB_KEY: - entries[i++] = make_scalar_key(&v, true); + gin_entries_add(&entries, make_scalar_key(&v, true)); break; case WJB_ELEM: /* Pretend string array elements are keys, see jsonb.h */ - entries[i++] = make_scalar_key(&v, (v.type == jbvString)); + gin_entries_add(&entries, make_scalar_key(&v, v.type == jbvString)); break; case WJB_VALUE: - entries[i++] = make_scalar_key(&v, false); + gin_entries_add(&entries, make_scalar_key(&v, false)); break; default: /* we can ignore structural items */ @@ -114,9 +239,447 @@ gin_extract_jsonb(PG_FUNCTION_ARGS) } } - *nentries = i; + *nentries = entries.count; - PG_RETURN_POINTER(entries); + PG_RETURN_POINTER(entries.buf); +} + + +/* + * Extract JSON path into the 'path' with filters. + * Returns true iff this path is supported by the index opclass. + */ +static bool +gin_extract_jsonpath_path(JsonPathExtractionContext *cxt, JsonPathItem *jsp, + ExtractedJsonPath *path, List **filters) +{ + JsonPathItem next; + + for (;;) + { + switch (jsp->type) + { + case jpiRoot: + path->entries = NULL; /* reset path */ + break; + + case jpiCurrent: + break; + + case jpiKey: + { + int keylen; + char *key = jspGetString(jsp, &keylen); + + *path = cxt->addKey(*path, key, keylen); + break; + } + + case jpiIndexArray: + case jpiAnyArray: + *path = cxt->addKey(*path, NULL, jsp->type); + break; + + case jpiAny: + case jpiAnyKey: + if (cxt->pathOps) + /* jsonb_path_ops doesn't support wildcard paths */ + return false; + + *path = cxt->addKey(*path, NULL, jsp->type); + break; + + case jpiFilter: + { + JsonPathItem arg; + JsonPathNode *filter; + + jspGetArg(jsp, &arg); + + filter = gin_extract_jsonpath_expr(cxt, &arg, *path, false); + + if (filter) + *filters = lappend(*filters, filter); + + break; + } + + default: + /* other path items (like item methods) are not supported */ + return false; + } + + if (!jspGetNext(jsp, &next)) + break; + + jsp = &next; + } + + return true; +} + +/* Append an entry node to the global entry list. */ +static inline JsonPathNode * +gin_jsonpath_make_entry_node(Datum entry) +{ + JsonPathNode *node = palloc(offsetof(JsonPathNode, args)); + + node->type = eEntry; + node->val.entryDatum = entry; + + return node; +} + +static inline JsonPathNode * +gin_jsonpath_make_entry_node_scalar(JsonbValue *scalar, bool iskey) +{ + return gin_jsonpath_make_entry_node(make_scalar_key(scalar, iskey)); +} + +static inline JsonPathNode * +gin_jsonpath_make_expr_node(JsonPathNodeType type, int nargs) +{ + JsonPathNode *node = palloc(offsetof(JsonPathNode, args) + + sizeof(node->args[0]) * nargs); + + node->type = type; + node->val.nargs = nargs; + + return node; +} + +static inline JsonPathNode * +gin_jsonpath_make_expr_node_args(JsonPathNodeType type, List *args) +{ + JsonPathNode *node = gin_jsonpath_make_expr_node(type, list_length(args)); + ListCell *lc; + int i = 0; + + foreach(lc, args) + node->args[i++] = lfirst(lc); + + return node; +} + +static inline JsonPathNode * +gin_jsonpath_make_expr_node_binary(JsonPathNodeType type, + JsonPathNode *arg1, JsonPathNode *arg2) +{ + JsonPathNode *node = gin_jsonpath_make_expr_node(type, 2); + + node->args[0] = arg1; + node->args[1] = arg2; + + return node; +} + +/* + * Extract node from the EXISTS/equality-comparison jsonpath expression. If + * 'scalar' is not NULL this is equality-comparsion, otherwise this is + * EXISTS-predicate. The current path is passed in 'pathcxt'. + */ +static JsonPathNode * +gin_extract_jsonpath_node(JsonPathExtractionContext *cxt, JsonPathItem *jsp, + ExtractedJsonPath path, JsonbValue *scalar) +{ + List *nodes = NIL; /* nodes to be AND-ed */ + + /* filters extracted into 'nodes' */ + if (!gin_extract_jsonpath_path(cxt, jsp, &path, &nodes)) + return NULL; + + if (cxt->pathOps) + { + if (scalar) + { + /* append path hash node for equality queries */ + uint32 hash = path.hash; + JsonPathNode *node; + + JsonbHashScalarValue(scalar, &hash); + + node = gin_jsonpath_make_entry_node(UInt32GetDatum(hash)); + nodes = lappend(nodes, node); + } + /* else: jsonb_path_ops doesn't support EXISTS queries */ + } + else + { + ExtractedPathEntry *pentry; + + /* append path entry nodes */ + for (pentry = path.entries; pentry; pentry = pentry->parent) + { + if (pentry->type == jpiKey) /* only keys are indexed */ + nodes = lappend(nodes, + gin_jsonpath_make_entry_node(pentry->entry)); + } + + if (scalar) + { + /* append scalar node for equality queries */ + JsonPathNode *node; + ExtractedPathEntry *last = path.entries; + GinTernaryValue lastIsArrayAccessor = !last ? GIN_FALSE : + last->type == jpiIndexArray || + last->type == jpiAnyArray ? GIN_TRUE : + last->type == jpiAny ? GIN_MAYBE : GIN_FALSE; + + /* + * Create OR-node when the string scalar can be matched as a key + * and a non-key. It is possible in lax mode where arrays are + * automatically unwrapped, or in strict mode for jpiAny items. + */ + if (scalar->type == jbvString && + (cxt->lax || lastIsArrayAccessor == GIN_MAYBE)) + node = gin_jsonpath_make_expr_node_binary(eOr, + gin_jsonpath_make_entry_node_scalar(scalar, true), + gin_jsonpath_make_entry_node_scalar(scalar, false)); + else + node = gin_jsonpath_make_entry_node_scalar(scalar, + scalar->type == jbvString && + lastIsArrayAccessor == GIN_TRUE); + + nodes = lappend(nodes, node); + } + } + + if (list_length(nodes) <= 0) + return NULL; /* need full scan for EXISTS($) queries without filters */ + + if (list_length(nodes) == 1) + return linitial(nodes); /* avoid extra AND-node */ + + /* construct AND-node for path with filters */ + return gin_jsonpath_make_expr_node_args(eAnd, nodes); +} + +/* Recursively extract nodes from the boolean jsonpath expression. */ +static JsonPathNode * +gin_extract_jsonpath_expr(JsonPathExtractionContext *cxt, JsonPathItem *jsp, + ExtractedJsonPath path, bool not) +{ + check_stack_depth(); + + switch (jsp->type) + { + case jpiAnd: + case jpiOr: + { + JsonPathItem arg; + JsonPathNode *larg; + JsonPathNode *rarg; + JsonPathNodeType type; + + jspGetLeftArg(jsp, &arg); + larg = gin_extract_jsonpath_expr(cxt, &arg, path, not); + + jspGetRightArg(jsp, &arg); + rarg = gin_extract_jsonpath_expr(cxt, &arg, path, not); + + if (!larg || !rarg) + { + if (jsp->type == jpiOr) + return NULL; + return larg ? larg : rarg; + } + + type = not ^ (jsp->type == jpiAnd) ? eAnd : eOr; + + return gin_jsonpath_make_expr_node_binary(type, larg, rarg); + } + + case jpiNot: + { + JsonPathItem arg; + + jspGetArg(jsp, &arg); + + return gin_extract_jsonpath_expr(cxt, &arg, path, !not); + } + + case jpiExists: + { + JsonPathItem arg; + + if (not) + return NULL; + + jspGetArg(jsp, &arg); + + return gin_extract_jsonpath_node(cxt, &arg, path, NULL); + } + + case jpiEqual: + { + JsonPathItem leftItem; + JsonPathItem rightItem; + JsonPathItem *pathItem; + JsonPathItem *scalarItem; + JsonbValue scalar; + + if (not) + return NULL; + + jspGetLeftArg(jsp, &leftItem); + jspGetRightArg(jsp, &rightItem); + + if (jspIsScalar(leftItem.type)) + { + scalarItem = &leftItem; + pathItem = &rightItem; + } + else if (jspIsScalar(rightItem.type)) + { + scalarItem = &rightItem; + pathItem = &leftItem; + } + else + return NULL; /* at least one operand should be a scalar */ + + switch (scalarItem->type) + { + case jpiNull: + scalar.type = jbvNull; + break; + case jpiBool: + scalar.type = jbvBool; + scalar.val.boolean = !!*scalarItem->content.value.data; + break; + case jpiNumeric: + scalar.type = jbvNumeric; + scalar.val.numeric = + (Numeric) scalarItem->content.value.data; + break; + case jpiString: + scalar.type = jbvString; + scalar.val.string.val = scalarItem->content.value.data; + scalar.val.string.len = scalarItem->content.value.datalen; + break; + default: + elog(ERROR, "invalid scalar jsonpath item type: %d", + scalarItem->type); + return NULL; + } + + return gin_extract_jsonpath_node(cxt, pathItem, path, &scalar); + } + + default: + return NULL; + } +} + +/* Recursively emit all GIN entries found in the node tree */ +static void +gin_jsonpath_emit_entries(JsonPathNode *node, GinEntries *entries) +{ + check_stack_depth(); + + switch (node->type) + { + case eEntry: + /* replace datum with its index in the array */ + node->val.entryIndex = + gin_entries_add(entries, node->val.entryDatum); + break; + + case eOr: + case eAnd: + { + int i; + + for (i = 0; i < node->val.nargs; i++) + gin_jsonpath_emit_entries(node->args[i], entries); + + break; + } + } +} + +static Datum * +gin_extract_jsonpath_query(JsonPath *jp, StrategyNumber strat, bool pathOps, + int32 *nentries, Pointer **extra_data) +{ + JsonPathExtractionContext cxt; + JsonPathItem root; + JsonPathNode *node; + ExtractedJsonPath path = { 0 }; + GinEntries entries = { 0 }; + + gin_jsonpath_init_context(&cxt, pathOps, (jp->header & JSONPATH_LAX) != 0); + + jspInit(&root, jp); + + node = strat == JsonbJsonpathExistsStrategyNumber + ? gin_extract_jsonpath_node(&cxt, &root, path, NULL) + : gin_extract_jsonpath_expr(&cxt, &root, path, false); + + if (!node) + { + *nentries = 0; + return NULL; + } + + gin_jsonpath_emit_entries(node, &entries); + + *nentries = entries.count; + if (!*nentries) + return NULL; + + *extra_data = palloc0(sizeof(**extra_data) * entries.count); + **extra_data = (Pointer) node; + + return entries.buf; +} + +static GinTernaryValue +gin_execute_jsonpath(JsonPathNode *node, void *check, bool ternary) +{ + GinTernaryValue res; + GinTernaryValue v; + int i; + + switch (node->type) + { + case eAnd: + res = GIN_TRUE; + for (i = 0; i < node->val.nargs; i++) + { + v = gin_execute_jsonpath(node->args[i], check, ternary); + if (v == GIN_FALSE) + return GIN_FALSE; + else if (v == GIN_MAYBE) + res = GIN_MAYBE; + } + return res; + + case eOr: + res = GIN_FALSE; + for (i = 0; i < node->val.nargs; i++) + { + v = gin_execute_jsonpath(node->args[i], check, ternary); + if (v == GIN_TRUE) + return GIN_TRUE; + else if (v == GIN_MAYBE) + res = GIN_MAYBE; + } + return res; + + case eEntry: + { + int index = node->val.entryIndex; + bool maybe = ternary + ? ((GinTernaryValue *) check)[index] != GIN_FALSE + : ((bool *) check)[index]; + + return maybe ? GIN_MAYBE : GIN_FALSE; + } + + default: + elog(ERROR, "invalid jsonpath gin node type: %d", node->type); + return GIN_FALSE; + } } Datum @@ -181,6 +744,18 @@ gin_extract_jsonb_query(PG_FUNCTION_ARGS) if (j == 0 && strategy == JsonbExistsAllStrategyNumber) *searchMode = GIN_SEARCH_MODE_ALL; } + else if (strategy == JsonbJsonpathPredicateStrategyNumber || + strategy == JsonbJsonpathExistsStrategyNumber) + { + JsonPath *jp = PG_GETARG_JSONPATH_P(0); + Pointer **extra_data = (Pointer **) PG_GETARG_POINTER(4); + + entries = gin_extract_jsonpath_query(jp, strategy, false, nentries, + extra_data); + + if (!entries) + *searchMode = GIN_SEARCH_MODE_ALL; + } else { elog(ERROR, "unrecognized strategy number: %d", strategy); @@ -199,7 +774,7 @@ gin_consistent_jsonb(PG_FUNCTION_ARGS) /* Jsonb *query = PG_GETARG_JSONB_P(2); */ int32 nkeys = PG_GETARG_INT32(3); - /* Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); */ + Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); bool *recheck = (bool *) PG_GETARG_POINTER(5); bool res = true; int32 i; @@ -256,6 +831,15 @@ gin_consistent_jsonb(PG_FUNCTION_ARGS) } } } + else if (strategy == JsonbJsonpathPredicateStrategyNumber || + strategy == JsonbJsonpathExistsStrategyNumber) + { + JsonPathNode *node = (JsonPathNode *) extra_data[0]; + + *recheck = true; + res = nkeys <= 0 || + gin_execute_jsonpath(node, check, false) != GIN_FALSE; + } else elog(ERROR, "unrecognized strategy number: %d", strategy); @@ -270,8 +854,7 @@ gin_triconsistent_jsonb(PG_FUNCTION_ARGS) /* Jsonb *query = PG_GETARG_JSONB_P(2); */ int32 nkeys = PG_GETARG_INT32(3); - - /* Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); */ + Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); GinTernaryValue res = GIN_MAYBE; int32 i; @@ -308,6 +891,12 @@ gin_triconsistent_jsonb(PG_FUNCTION_ARGS) } } } + else if (strategy == JsonbJsonpathPredicateStrategyNumber || + strategy == JsonbJsonpathExistsStrategyNumber) + { + res = nkeys <= 0 ? GIN_MAYBE : + gin_execute_jsonpath((JsonPathNode *) extra_data[0], check, true); + } else elog(ERROR, "unrecognized strategy number: %d", strategy); @@ -331,14 +920,13 @@ gin_extract_jsonb_path(PG_FUNCTION_ARGS) { Jsonb *jb = PG_GETARG_JSONB_P(0); int32 *nentries = (int32 *) PG_GETARG_POINTER(1); - int total = 2 * JB_ROOT_COUNT(jb); + int total = JB_ROOT_COUNT(jb); JsonbIterator *it; JsonbValue v; JsonbIteratorToken r; PathHashStack tail; PathHashStack *stack; - int i = 0; - Datum *entries; + GinEntries entries; /* If the root level is empty, we certainly have no keys */ if (total == 0) @@ -348,7 +936,7 @@ gin_extract_jsonb_path(PG_FUNCTION_ARGS) } /* Otherwise, use 2 * root count as initial estimate of result size */ - entries = (Datum *) palloc(sizeof(Datum) * total); + gin_entries_init(&entries, 2 * total); /* We keep a stack of partial hashes corresponding to parent key levels */ tail.parent = NULL; @@ -361,13 +949,6 @@ gin_extract_jsonb_path(PG_FUNCTION_ARGS) { PathHashStack *parent; - /* Since we recurse into the object, we might need more space */ - if (i >= total) - { - total *= 2; - entries = (Datum *) repalloc(entries, sizeof(Datum) * total); - } - switch (r) { case WJB_BEGIN_ARRAY: @@ -398,7 +979,7 @@ gin_extract_jsonb_path(PG_FUNCTION_ARGS) /* mix the element or value's hash into the prepared hash */ JsonbHashScalarValue(&v, &stack->hash); /* and emit an index entry */ - entries[i++] = UInt32GetDatum(stack->hash); + gin_entries_add(&entries, UInt32GetDatum(stack->hash)); /* reset hash for next key, value, or sub-object */ stack->hash = stack->parent->hash; break; @@ -419,9 +1000,9 @@ gin_extract_jsonb_path(PG_FUNCTION_ARGS) } } - *nentries = i; + *nentries = entries.count; - PG_RETURN_POINTER(entries); + PG_RETURN_POINTER(entries.buf); } Datum @@ -432,18 +1013,35 @@ gin_extract_jsonb_query_path(PG_FUNCTION_ARGS) int32 *searchMode = (int32 *) PG_GETARG_POINTER(6); Datum *entries; - if (strategy != JsonbContainsStrategyNumber) - elog(ERROR, "unrecognized strategy number: %d", strategy); + if (strategy == JsonbContainsStrategyNumber) + { + /* Query is a jsonb, so just apply gin_extract_jsonb_path ... */ + entries = (Datum *) + DatumGetPointer(DirectFunctionCall2(gin_extract_jsonb_path, + PG_GETARG_DATUM(0), + PointerGetDatum(nentries))); - /* Query is a jsonb, so just apply gin_extract_jsonb_path ... */ - entries = (Datum *) - DatumGetPointer(DirectFunctionCall2(gin_extract_jsonb_path, - PG_GETARG_DATUM(0), - PointerGetDatum(nentries))); + /* ... although "contains {}" requires a full index scan */ + if (*nentries == 0) + *searchMode = GIN_SEARCH_MODE_ALL; + } + else if (strategy == JsonbJsonpathPredicateStrategyNumber || + strategy == JsonbJsonpathExistsStrategyNumber) + { + JsonPath *jp = PG_GETARG_JSONPATH_P(0); + Pointer **extra_data = (Pointer **) PG_GETARG_POINTER(4); - /* ... although "contains {}" requires a full index scan */ - if (*nentries == 0) - *searchMode = GIN_SEARCH_MODE_ALL; + entries = gin_extract_jsonpath_query(jp, strategy, true, nentries, + extra_data); + + if (!entries) + *searchMode = GIN_SEARCH_MODE_ALL; + } + else + { + elog(ERROR, "unrecognized strategy number: %d", strategy); + entries = NULL; + } PG_RETURN_POINTER(entries); } @@ -456,32 +1054,42 @@ gin_consistent_jsonb_path(PG_FUNCTION_ARGS) /* Jsonb *query = PG_GETARG_JSONB_P(2); */ int32 nkeys = PG_GETARG_INT32(3); - - /* Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); */ + Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); bool *recheck = (bool *) PG_GETARG_POINTER(5); bool res = true; int32 i; - if (strategy != JsonbContainsStrategyNumber) - elog(ERROR, "unrecognized strategy number: %d", strategy); - - /* - * jsonb_path_ops is necessarily lossy, not only because of hash - * collisions but also because it doesn't preserve complete information - * about the structure of the JSON object. Besides, there are some - * special rules around the containment of raw scalars in arrays that are - * not handled here. So we must always recheck a match. However, if not - * all of the keys are present, the tuple certainly doesn't match. - */ - *recheck = true; - for (i = 0; i < nkeys; i++) + if (strategy == JsonbContainsStrategyNumber) { - if (!check[i]) + /* + * jsonb_path_ops is necessarily lossy, not only because of hash + * collisions but also because it doesn't preserve complete information + * about the structure of the JSON object. Besides, there are some + * special rules around the containment of raw scalars in arrays that are + * not handled here. So we must always recheck a match. However, if not + * all of the keys are present, the tuple certainly doesn't match. + */ + *recheck = true; + for (i = 0; i < nkeys; i++) { - res = false; - break; + if (!check[i]) + { + res = false; + break; + } } } + else if (strategy == JsonbJsonpathPredicateStrategyNumber || + strategy == JsonbJsonpathExistsStrategyNumber) + { + JsonPathNode *node = (JsonPathNode *) extra_data[0]; + + *recheck = true; + res = nkeys <= 0 || + gin_execute_jsonpath(node, check, false) != GIN_FALSE; + } + else + elog(ERROR, "unrecognized strategy number: %d", strategy); PG_RETURN_BOOL(res); } @@ -494,27 +1102,34 @@ gin_triconsistent_jsonb_path(PG_FUNCTION_ARGS) /* Jsonb *query = PG_GETARG_JSONB_P(2); */ int32 nkeys = PG_GETARG_INT32(3); - - /* Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); */ + Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); GinTernaryValue res = GIN_MAYBE; int32 i; - if (strategy != JsonbContainsStrategyNumber) - elog(ERROR, "unrecognized strategy number: %d", strategy); - - /* - * Note that we never return GIN_TRUE, only GIN_MAYBE or GIN_FALSE; this - * corresponds to always forcing recheck in the regular consistent - * function, for the reasons listed there. - */ - for (i = 0; i < nkeys; i++) + if (strategy == JsonbContainsStrategyNumber) { - if (check[i] == GIN_FALSE) + /* + * Note that we never return GIN_TRUE, only GIN_MAYBE or GIN_FALSE; this + * corresponds to always forcing recheck in the regular consistent + * function, for the reasons listed there. + */ + for (i = 0; i < nkeys; i++) { - res = GIN_FALSE; - break; + if (check[i] == GIN_FALSE) + { + res = GIN_FALSE; + break; + } } } + else if (strategy == JsonbJsonpathPredicateStrategyNumber || + strategy == JsonbJsonpathExistsStrategyNumber) + { + res = nkeys <= 0 ? GIN_MAYBE : + gin_execute_jsonpath((JsonPathNode *) extra_data[0], check, true); + } + else + elog(ERROR, "unrecognized strategy number: %d", strategy); PG_RETURN_GIN_TERNARY_VALUE(res); } diff --git a/src/backend/utils/adt/jsonb_util.c b/src/backend/utils/adt/jsonb_util.c index 713631b04f..8a45cb05ff 100644 --- a/src/backend/utils/adt/jsonb_util.c +++ b/src/backend/utils/adt/jsonb_util.c @@ -15,8 +15,11 @@ #include "access/hash.h" #include "catalog/pg_collation.h" +#include "catalog/pg_type.h" #include "miscadmin.h" #include "utils/builtins.h" +#include "utils/datetime.h" +#include "utils/jsonapi.h" #include "utils/jsonb.h" #include "utils/memutils.h" #include "utils/varlena.h" @@ -36,7 +39,6 @@ static void fillJsonbValue(JsonbContainer *container, int index, char *base_addr, uint32 offset, JsonbValue *result); -static bool equalsJsonbScalarValue(JsonbValue *a, JsonbValue *b); static int compareJsonbScalarValue(JsonbValue *a, JsonbValue *b); static Jsonb *convertToJsonb(JsonbValue *val); static void convertJsonbValue(StringInfo buffer, JEntry *header, JsonbValue *val, int level); @@ -55,12 +57,8 @@ static JsonbParseState *pushState(JsonbParseState **pstate); static void appendKey(JsonbParseState *pstate, JsonbValue *scalarVal); static void appendValue(JsonbParseState *pstate, JsonbValue *scalarVal); static void appendElement(JsonbParseState *pstate, JsonbValue *scalarVal); -static int lengthCompareJsonbStringValue(const void *a, const void *b); static int lengthCompareJsonbPair(const void *a, const void *b, void *arg); static void uniqueifyJsonbObject(JsonbValue *object); -static JsonbValue *pushJsonbValueScalar(JsonbParseState **pstate, - JsonbIteratorToken seq, - JsonbValue *scalarVal); /* * Turn an in-memory JsonbValue into a Jsonb for on-disk storage. @@ -241,6 +239,7 @@ compareJsonbContainers(JsonbContainer *a, JsonbContainer *b) res = (va.val.object.nPairs > vb.val.object.nPairs) ? 1 : -1; break; case jbvBinary: + case jbvDatetime: elog(ERROR, "unexpected jbvBinary value"); } } @@ -542,7 +541,7 @@ pushJsonbValue(JsonbParseState **pstate, JsonbIteratorToken seq, * Do the actual pushing, with only scalar or pseudo-scalar-array values * accepted. */ -static JsonbValue * +JsonbValue * pushJsonbValueScalar(JsonbParseState **pstate, JsonbIteratorToken seq, JsonbValue *scalarVal) { @@ -580,6 +579,7 @@ pushJsonbValueScalar(JsonbParseState **pstate, JsonbIteratorToken seq, (*pstate)->size = 4; (*pstate)->contVal.val.object.pairs = palloc(sizeof(JsonbPair) * (*pstate)->size); + (*pstate)->contVal.val.object.uniquified = true; break; case WJB_KEY: Assert(scalarVal->type == jbvString); @@ -822,6 +822,7 @@ JsonbIteratorNext(JsonbIterator **it, JsonbValue *val, bool skipNested) /* Set v to object on first object call */ val->type = jbvObject; val->val.object.nPairs = (*it)->nElems; + val->val.object.uniquified = true; /* * v->val.object.pairs is not actually set, because we aren't @@ -1295,7 +1296,7 @@ JsonbHashScalarValueExtended(const JsonbValue *scalarVal, uint64 *hash, /* * Are two scalar JsonbValues of the same type a and b equal? */ -static bool +bool equalsJsonbScalarValue(JsonbValue *aScalar, JsonbValue *bScalar) { if (aScalar->type == bScalar->type) @@ -1741,11 +1742,28 @@ convertJsonbScalar(StringInfo buffer, JEntry *jentry, JsonbValue *scalarVal) JENTRY_ISBOOL_TRUE : JENTRY_ISBOOL_FALSE; break; + case jbvDatetime: + { + char buf[MAXDATELEN + 1]; + size_t len; + + JsonEncodeDateTime(buf, + scalarVal->val.datetime.value, + scalarVal->val.datetime.typid, + &scalarVal->val.datetime.tz); + len = strlen(buf); + appendToBuffer(buffer, buf, len); + + *jentry = JENTRY_ISSTRING | len; + } + break; + default: elog(ERROR, "invalid jsonb scalar type"); } } + /* * Compare two jbvString JsonbValue values, a and b. * @@ -1758,7 +1776,7 @@ convertJsonbScalar(StringInfo buffer, JEntry *jentry, JsonbValue *scalarVal) * a and b are first sorted based on their length. If a tie-breaker is * required, only then do we consider string binary equality. */ -static int +int lengthCompareJsonbStringValue(const void *a, const void *b) { const JsonbValue *va = (const JsonbValue *) a; @@ -1822,6 +1840,9 @@ uniqueifyJsonbObject(JsonbValue *object) Assert(object->type == jbvObject); + if (!object->val.object.uniquified) + return; + if (object->val.object.nPairs > 1) qsort_arg(object->val.object.pairs, object->val.object.nPairs, sizeof(JsonbPair), lengthCompareJsonbPair, &hasNonUniq); diff --git a/src/backend/utils/adt/jsonpath.c b/src/backend/utils/adt/jsonpath.c new file mode 100644 index 0000000000..11d457d505 --- /dev/null +++ b/src/backend/utils/adt/jsonpath.c @@ -0,0 +1,871 @@ +/*------------------------------------------------------------------------- + * + * jsonpath.c + * + * Copyright (c) 2017, PostgreSQL Global Development Group + * + * IDENTIFICATION + * src/backend/utils/adt/jsonpath.c + * + *------------------------------------------------------------------------- + */ + +#include "postgres.h" +#include "funcapi.h" +#include "miscadmin.h" +#include "lib/stringinfo.h" +#include "utils/builtins.h" +#include "utils/json.h" +#include "utils/jsonpath.h" + +/*****************************INPUT/OUTPUT************************************/ + +/* + * Convert AST to flat jsonpath type representation + */ +static int +flattenJsonPathParseItem(StringInfo buf, JsonPathParseItem *item, + int nestingLevel, bool insideArraySubscript) +{ + /* position from begining of jsonpath data */ + int32 pos = buf->len - JSONPATH_HDRSZ; + int32 chld; + int32 next; + int argNestingLevel = 0; + + check_stack_depth(); + CHECK_FOR_INTERRUPTS(); + + appendStringInfoChar(buf, (char)(item->type)); + alignStringInfoInt(buf); + + next = (item->next) ? buf->len : 0; + + /* + * actual value will be recorded later, after next and + * children processing + */ + appendBinaryStringInfo(buf, (char*)&next /* fake value */, sizeof(next)); + + switch(item->type) + { + case jpiString: + case jpiVariable: + case jpiKey: + appendBinaryStringInfo(buf, (char*)&item->value.string.len, + sizeof(item->value.string.len)); + appendBinaryStringInfo(buf, item->value.string.val, item->value.string.len); + appendStringInfoChar(buf, '\0'); + break; + case jpiNumeric: + appendBinaryStringInfo(buf, (char*)item->value.numeric, + VARSIZE(item->value.numeric)); + break; + case jpiBool: + appendBinaryStringInfo(buf, (char*)&item->value.boolean, + sizeof(item->value.boolean)); + break; + case jpiAnd: + case jpiOr: + case jpiEqual: + case jpiNotEqual: + case jpiLess: + case jpiGreater: + case jpiLessOrEqual: + case jpiGreaterOrEqual: + case jpiAdd: + case jpiSub: + case jpiMul: + case jpiDiv: + case jpiMod: + case jpiStartsWith: + case jpiDatetime: + { + int32 left, right; + + left = buf->len; + + /* + * first, reserve place for left/right arg's positions, then + * record both args and sets actual position in reserved places + */ + appendBinaryStringInfo(buf, (char*)&left /* fake value */, sizeof(left)); + right = buf->len; + appendBinaryStringInfo(buf, (char*)&right /* fake value */, sizeof(right)); + + chld = !item->value.args.left ? pos : + flattenJsonPathParseItem(buf, item->value.args.left, + nestingLevel + argNestingLevel, + insideArraySubscript); + *(int32*)(buf->data + left) = chld - pos; + chld = !item->value.args.right ? pos : + flattenJsonPathParseItem(buf, item->value.args.right, + nestingLevel + argNestingLevel, + insideArraySubscript); + *(int32*)(buf->data + right) = chld - pos; + } + break; + case jpiLikeRegex: + { + int32 offs; + + appendBinaryStringInfo(buf, + (char *) &item->value.like_regex.flags, + sizeof(item->value.like_regex.flags)); + offs = buf->len; + appendBinaryStringInfo(buf, (char *) &offs /* fake value */, sizeof(offs)); + + appendBinaryStringInfo(buf, + (char *) &item->value.like_regex.patternlen, + sizeof(item->value.like_regex.patternlen)); + appendBinaryStringInfo(buf, item->value.like_regex.pattern, + item->value.like_regex.patternlen); + appendStringInfoChar(buf, '\0'); + + chld = flattenJsonPathParseItem(buf, item->value.like_regex.expr, + nestingLevel, + insideArraySubscript); + *(int32 *)(buf->data + offs) = chld - pos; + } + break; + case jpiFilter: + argNestingLevel++; + /* fall through */ + case jpiIsUnknown: + case jpiNot: + case jpiPlus: + case jpiMinus: + case jpiExists: + { + int32 arg; + + arg = buf->len; + appendBinaryStringInfo(buf, (char*)&arg /* fake value */, sizeof(arg)); + + chld = flattenJsonPathParseItem(buf, item->value.arg, + nestingLevel + argNestingLevel, + insideArraySubscript); + *(int32*)(buf->data + arg) = chld - pos; + } + break; + case jpiNull: + break; + case jpiRoot: + break; + case jpiAnyArray: + case jpiAnyKey: + break; + case jpiCurrent: + if (nestingLevel <= 0) + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("@ is not allowed in root expressions"))); + break; + case jpiLast: + if (!insideArraySubscript) + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("LAST is allowed only in array subscripts"))); + break; + case jpiIndexArray: + { + int32 nelems = item->value.array.nelems; + int offset; + int i; + + appendBinaryStringInfo(buf, (char *) &nelems, sizeof(nelems)); + + offset = buf->len; + + appendStringInfoSpaces(buf, sizeof(int32) * 2 * nelems); + + for (i = 0; i < nelems; i++) + { + int32 *ppos; + int32 topos; + int32 frompos = + flattenJsonPathParseItem(buf, + item->value.array.elems[i].from, + nestingLevel, true) - pos; + + if (item->value.array.elems[i].to) + topos = flattenJsonPathParseItem(buf, + item->value.array.elems[i].to, + nestingLevel, true) - pos; + else + topos = 0; + + ppos = (int32 *) &buf->data[offset + i * 2 * sizeof(int32)]; + + ppos[0] = frompos; + ppos[1] = topos; + } + } + break; + case jpiAny: + appendBinaryStringInfo(buf, + (char*)&item->value.anybounds.first, + sizeof(item->value.anybounds.first)); + appendBinaryStringInfo(buf, + (char*)&item->value.anybounds.last, + sizeof(item->value.anybounds.last)); + break; + case jpiType: + case jpiSize: + case jpiAbs: + case jpiFloor: + case jpiCeiling: + case jpiDouble: + case jpiKeyValue: + break; + default: + elog(ERROR, "Unknown jsonpath item type: %d", item->type); + } + + if (item->next) + { + chld = flattenJsonPathParseItem(buf, item->next, nestingLevel, + insideArraySubscript) - pos; + *(int32 *)(buf->data + next) = chld; + } + + return pos; +} + +Datum +jsonpath_in(PG_FUNCTION_ARGS) +{ + char *in = PG_GETARG_CSTRING(0); + int32 len = strlen(in); + JsonPathParseResult *jsonpath = parsejsonpath(in, len); + JsonPath *res; + StringInfoData buf; + + initStringInfo(&buf); + enlargeStringInfo(&buf, 4 * len /* estimation */); + + appendStringInfoSpaces(&buf, JSONPATH_HDRSZ); + + if (!jsonpath) + ereport(ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for jsonpath: \"%s\"", in))); + + flattenJsonPathParseItem(&buf, jsonpath->expr, 0, false); + + res = (JsonPath*)buf.data; + SET_VARSIZE(res, buf.len); + res->header = JSONPATH_VERSION; + if (jsonpath->lax) + res->header |= JSONPATH_LAX; + + PG_RETURN_JSONPATH_P(res); +} + +static void +printOperation(StringInfo buf, JsonPathItemType type) +{ + switch(type) + { + case jpiAnd: + appendBinaryStringInfo(buf, " && ", 4); break; + case jpiOr: + appendBinaryStringInfo(buf, " || ", 4); break; + case jpiEqual: + appendBinaryStringInfo(buf, " == ", 4); break; + case jpiNotEqual: + appendBinaryStringInfo(buf, " != ", 4); break; + case jpiLess: + appendBinaryStringInfo(buf, " < ", 3); break; + case jpiGreater: + appendBinaryStringInfo(buf, " > ", 3); break; + case jpiLessOrEqual: + appendBinaryStringInfo(buf, " <= ", 4); break; + case jpiGreaterOrEqual: + appendBinaryStringInfo(buf, " >= ", 4); break; + case jpiAdd: + appendBinaryStringInfo(buf, " + ", 3); break; + case jpiSub: + appendBinaryStringInfo(buf, " - ", 3); break; + case jpiMul: + appendBinaryStringInfo(buf, " * ", 3); break; + case jpiDiv: + appendBinaryStringInfo(buf, " / ", 3); break; + case jpiMod: + appendBinaryStringInfo(buf, " % ", 3); break; + case jpiStartsWith: + appendBinaryStringInfo(buf, " starts with ", 13); break; + default: + elog(ERROR, "Unknown jsonpath item type: %d", type); + } +} + +static int +operationPriority(JsonPathItemType op) +{ + switch (op) + { + case jpiOr: + return 0; + case jpiAnd: + return 1; + case jpiEqual: + case jpiNotEqual: + case jpiLess: + case jpiGreater: + case jpiLessOrEqual: + case jpiGreaterOrEqual: + case jpiStartsWith: + return 2; + case jpiAdd: + case jpiSub: + return 3; + case jpiMul: + case jpiDiv: + case jpiMod: + return 4; + case jpiPlus: + case jpiMinus: + return 5; + default: + return 6; + } +} + +static void +printJsonPathItem(StringInfo buf, JsonPathItem *v, bool inKey, bool printBracketes) +{ + JsonPathItem elem; + int i; + + check_stack_depth(); + + switch(v->type) + { + case jpiNull: + appendStringInfoString(buf, "null"); + break; + case jpiKey: + if (inKey) + appendStringInfoChar(buf, '.'); + escape_json(buf, jspGetString(v, NULL)); + break; + case jpiString: + escape_json(buf, jspGetString(v, NULL)); + break; + case jpiVariable: + appendStringInfoChar(buf, '$'); + escape_json(buf, jspGetString(v, NULL)); + break; + case jpiNumeric: + appendStringInfoString(buf, + DatumGetCString(DirectFunctionCall1(numeric_out, + PointerGetDatum(jspGetNumeric(v))))); + break; + case jpiBool: + if (jspGetBool(v)) + appendBinaryStringInfo(buf, "true", 4); + else + appendBinaryStringInfo(buf, "false", 5); + break; + case jpiAnd: + case jpiOr: + case jpiEqual: + case jpiNotEqual: + case jpiLess: + case jpiGreater: + case jpiLessOrEqual: + case jpiGreaterOrEqual: + case jpiAdd: + case jpiSub: + case jpiMul: + case jpiDiv: + case jpiMod: + case jpiStartsWith: + if (printBracketes) + appendStringInfoChar(buf, '('); + jspGetLeftArg(v, &elem); + printJsonPathItem(buf, &elem, false, + operationPriority(elem.type) <= + operationPriority(v->type)); + printOperation(buf, v->type); + jspGetRightArg(v, &elem); + printJsonPathItem(buf, &elem, false, + operationPriority(elem.type) <= + operationPriority(v->type)); + if (printBracketes) + appendStringInfoChar(buf, ')'); + break; + case jpiLikeRegex: + if (printBracketes) + appendStringInfoChar(buf, '('); + + jspInitByBuffer(&elem, v->base, v->content.like_regex.expr); + printJsonPathItem(buf, &elem, false, + operationPriority(elem.type) <= + operationPriority(v->type)); + + appendBinaryStringInfo(buf, " like_regex ", 12); + + escape_json(buf, v->content.like_regex.pattern); + + if (v->content.like_regex.flags) + { + appendBinaryStringInfo(buf, " flag \"", 7); + + if (v->content.like_regex.flags & JSP_REGEX_ICASE) + appendStringInfoChar(buf, 'i'); + if (v->content.like_regex.flags & JSP_REGEX_SLINE) + appendStringInfoChar(buf, 's'); + if (v->content.like_regex.flags & JSP_REGEX_MLINE) + appendStringInfoChar(buf, 'm'); + if (v->content.like_regex.flags & JSP_REGEX_WSPACE) + appendStringInfoChar(buf, 'x'); + + appendStringInfoChar(buf, '"'); + } + + if (printBracketes) + appendStringInfoChar(buf, ')'); + break; + case jpiPlus: + case jpiMinus: + if (printBracketes) + appendStringInfoChar(buf, '('); + appendStringInfoChar(buf, v->type == jpiPlus ? '+' : '-'); + jspGetArg(v, &elem); + printJsonPathItem(buf, &elem, false, + operationPriority(elem.type) <= + operationPriority(v->type)); + if (printBracketes) + appendStringInfoChar(buf, ')'); + break; + case jpiFilter: + appendBinaryStringInfo(buf, "?(", 2); + jspGetArg(v, &elem); + printJsonPathItem(buf, &elem, false, false); + appendStringInfoChar(buf, ')'); + break; + case jpiNot: + appendBinaryStringInfo(buf, "!(", 2); + jspGetArg(v, &elem); + printJsonPathItem(buf, &elem, false, false); + appendStringInfoChar(buf, ')'); + break; + case jpiIsUnknown: + appendStringInfoChar(buf, '('); + jspGetArg(v, &elem); + printJsonPathItem(buf, &elem, false, false); + appendBinaryStringInfo(buf, ") is unknown", 12); + break; + case jpiExists: + appendBinaryStringInfo(buf,"exists (", 8); + jspGetArg(v, &elem); + printJsonPathItem(buf, &elem, false, false); + appendStringInfoChar(buf, ')'); + break; + case jpiCurrent: + Assert(!inKey); + appendStringInfoChar(buf, '@'); + break; + case jpiRoot: + Assert(!inKey); + appendStringInfoChar(buf, '$'); + break; + case jpiLast: + appendBinaryStringInfo(buf, "last", 4); + break; + case jpiAnyArray: + appendBinaryStringInfo(buf, "[*]", 3); + break; + case jpiAnyKey: + if (inKey) + appendStringInfoChar(buf, '.'); + appendStringInfoChar(buf, '*'); + break; + case jpiIndexArray: + appendStringInfoChar(buf, '['); + for (i = 0; i < v->content.array.nelems; i++) + { + JsonPathItem from; + JsonPathItem to; + bool range = jspGetArraySubscript(v, &from, &to, i); + + if (i) + appendStringInfoChar(buf, ','); + + printJsonPathItem(buf, &from, false, false); + + if (range) + { + appendBinaryStringInfo(buf, " to ", 4); + printJsonPathItem(buf, &to, false, false); + } + } + appendStringInfoChar(buf, ']'); + break; + case jpiAny: + if (inKey) + appendStringInfoChar(buf, '.'); + + if (v->content.anybounds.first == 0 && + v->content.anybounds.last == PG_UINT32_MAX) + appendBinaryStringInfo(buf, "**", 2); + else if (v->content.anybounds.first == v->content.anybounds.last) + { + if (v->content.anybounds.first == PG_UINT32_MAX) + appendStringInfo(buf, "**{last}"); + else + appendStringInfo(buf, "**{%u}", v->content.anybounds.first); + } + else if (v->content.anybounds.first == PG_UINT32_MAX) + appendStringInfo(buf, "**{last to %u}", v->content.anybounds.last); + else if (v->content.anybounds.last == PG_UINT32_MAX) + appendStringInfo(buf, "**{%u to last}", v->content.anybounds.first); + else + appendStringInfo(buf, "**{%u to %u}", v->content.anybounds.first, + v->content.anybounds.last); + break; + case jpiType: + appendBinaryStringInfo(buf, ".type()", 7); + break; + case jpiSize: + appendBinaryStringInfo(buf, ".size()", 7); + break; + case jpiAbs: + appendBinaryStringInfo(buf, ".abs()", 6); + break; + case jpiFloor: + appendBinaryStringInfo(buf, ".floor()", 8); + break; + case jpiCeiling: + appendBinaryStringInfo(buf, ".ceiling()", 10); + break; + case jpiDouble: + appendBinaryStringInfo(buf, ".double()", 9); + break; + case jpiDatetime: + appendBinaryStringInfo(buf, ".datetime(", 10); + if (v->content.args.left) + { + jspGetLeftArg(v, &elem); + printJsonPathItem(buf, &elem, false, false); + + if (v->content.args.right) + { + appendBinaryStringInfo(buf, ", ", 2); + jspGetRightArg(v, &elem); + printJsonPathItem(buf, &elem, false, false); + } + } + appendStringInfoChar(buf, ')'); + break; + case jpiKeyValue: + appendBinaryStringInfo(buf, ".keyvalue()", 11); + break; + default: + elog(ERROR, "Unknown jsonpath item type: %d", v->type); + } + + if (jspGetNext(v, &elem)) + printJsonPathItem(buf, &elem, true, true); +} + +Datum +jsonpath_out(PG_FUNCTION_ARGS) +{ + JsonPath *in = PG_GETARG_JSONPATH_P(0); + StringInfoData buf; + JsonPathItem v; + + initStringInfo(&buf); + enlargeStringInfo(&buf, VARSIZE(in) /* estimation */); + + if (!(in->header & JSONPATH_LAX)) + appendBinaryStringInfo(&buf, "strict ", 7); + + jspInit(&v, in); + printJsonPathItem(&buf, &v, false, true); + + PG_RETURN_CSTRING(buf.data); +} + +/********************Support functions for JsonPath****************************/ + +/* + * Support macroses to read stored values + */ + +#define read_byte(v, b, p) do { \ + (v) = *(uint8*)((b) + (p)); \ + (p) += 1; \ +} while(0) \ + +#define read_int32(v, b, p) do { \ + (v) = *(uint32*)((b) + (p)); \ + (p) += sizeof(int32); \ +} while(0) \ + +#define read_int32_n(v, b, p, n) do { \ + (v) = (void *)((b) + (p)); \ + (p) += sizeof(int32) * (n); \ +} while(0) \ + +/* + * Read root node and fill root node representation + */ +void +jspInit(JsonPathItem *v, JsonPath *js) +{ + Assert((js->header & ~JSONPATH_LAX) == JSONPATH_VERSION); + jspInitByBuffer(v, js->data, 0); +} + +/* + * Read node from buffer and fill its representation + */ +void +jspInitByBuffer(JsonPathItem *v, char *base, int32 pos) +{ + v->base = base + pos; + + read_byte(v->type, base, pos); + pos = INTALIGN((uintptr_t)(base + pos)) - (uintptr_t) base; + read_int32(v->nextPos, base, pos); + + switch(v->type) + { + case jpiNull: + case jpiRoot: + case jpiCurrent: + case jpiAnyArray: + case jpiAnyKey: + case jpiType: + case jpiSize: + case jpiAbs: + case jpiFloor: + case jpiCeiling: + case jpiDouble: + case jpiKeyValue: + case jpiLast: + break; + case jpiKey: + case jpiString: + case jpiVariable: + read_int32(v->content.value.datalen, base, pos); + /* follow next */ + case jpiNumeric: + case jpiBool: + v->content.value.data = base + pos; + break; + case jpiAnd: + case jpiOr: + case jpiAdd: + case jpiSub: + case jpiMul: + case jpiDiv: + case jpiMod: + case jpiEqual: + case jpiNotEqual: + case jpiLess: + case jpiGreater: + case jpiLessOrEqual: + case jpiGreaterOrEqual: + case jpiStartsWith: + case jpiDatetime: + read_int32(v->content.args.left, base, pos); + read_int32(v->content.args.right, base, pos); + break; + case jpiLikeRegex: + read_int32(v->content.like_regex.flags, base, pos); + read_int32(v->content.like_regex.expr, base, pos); + read_int32(v->content.like_regex.patternlen, base, pos); + v->content.like_regex.pattern = base + pos; + break; + case jpiNot: + case jpiExists: + case jpiIsUnknown: + case jpiPlus: + case jpiMinus: + case jpiFilter: + read_int32(v->content.arg, base, pos); + break; + case jpiIndexArray: + read_int32(v->content.array.nelems, base, pos); + read_int32_n(v->content.array.elems, base, pos, + v->content.array.nelems * 2); + break; + case jpiAny: + read_int32(v->content.anybounds.first, base, pos); + read_int32(v->content.anybounds.last, base, pos); + break; + default: + elog(ERROR, "Unknown jsonpath item type: %d", v->type); + } +} + +void +jspGetArg(JsonPathItem *v, JsonPathItem *a) +{ + Assert( + v->type == jpiFilter || + v->type == jpiNot || + v->type == jpiIsUnknown || + v->type == jpiExists || + v->type == jpiPlus || + v->type == jpiMinus + ); + + jspInitByBuffer(a, v->base, v->content.arg); +} + +bool +jspGetNext(JsonPathItem *v, JsonPathItem *a) +{ + if (jspHasNext(v)) + { + Assert( + v->type == jpiString || + v->type == jpiNumeric || + v->type == jpiBool || + v->type == jpiNull || + v->type == jpiKey || + v->type == jpiAny || + v->type == jpiAnyArray || + v->type == jpiAnyKey || + v->type == jpiIndexArray || + v->type == jpiFilter || + v->type == jpiCurrent || + v->type == jpiExists || + v->type == jpiRoot || + v->type == jpiVariable || + v->type == jpiLast || + v->type == jpiAdd || + v->type == jpiSub || + v->type == jpiMul || + v->type == jpiDiv || + v->type == jpiMod || + v->type == jpiPlus || + v->type == jpiMinus || + v->type == jpiEqual || + v->type == jpiNotEqual || + v->type == jpiGreater || + v->type == jpiGreaterOrEqual || + v->type == jpiLess || + v->type == jpiLessOrEqual || + v->type == jpiAnd || + v->type == jpiOr || + v->type == jpiNot || + v->type == jpiIsUnknown || + v->type == jpiType || + v->type == jpiSize || + v->type == jpiAbs || + v->type == jpiFloor || + v->type == jpiCeiling || + v->type == jpiDouble || + v->type == jpiDatetime || + v->type == jpiKeyValue || + v->type == jpiStartsWith + ); + + if (a) + jspInitByBuffer(a, v->base, v->nextPos); + return true; + } + + return false; +} + +void +jspGetLeftArg(JsonPathItem *v, JsonPathItem *a) +{ + Assert( + v->type == jpiAnd || + v->type == jpiOr || + v->type == jpiEqual || + v->type == jpiNotEqual || + v->type == jpiLess || + v->type == jpiGreater || + v->type == jpiLessOrEqual || + v->type == jpiGreaterOrEqual || + v->type == jpiAdd || + v->type == jpiSub || + v->type == jpiMul || + v->type == jpiDiv || + v->type == jpiMod || + v->type == jpiDatetime || + v->type == jpiStartsWith + ); + + jspInitByBuffer(a, v->base, v->content.args.left); +} + +void +jspGetRightArg(JsonPathItem *v, JsonPathItem *a) +{ + Assert( + v->type == jpiAnd || + v->type == jpiOr || + v->type == jpiEqual || + v->type == jpiNotEqual || + v->type == jpiLess || + v->type == jpiGreater || + v->type == jpiLessOrEqual || + v->type == jpiGreaterOrEqual || + v->type == jpiAdd || + v->type == jpiSub || + v->type == jpiMul || + v->type == jpiDiv || + v->type == jpiMod || + v->type == jpiDatetime || + v->type == jpiStartsWith + ); + + jspInitByBuffer(a, v->base, v->content.args.right); +} + +bool +jspGetBool(JsonPathItem *v) +{ + Assert(v->type == jpiBool); + + return (bool)*v->content.value.data; +} + +Numeric +jspGetNumeric(JsonPathItem *v) +{ + Assert(v->type == jpiNumeric); + + return (Numeric)v->content.value.data; +} + +char* +jspGetString(JsonPathItem *v, int32 *len) +{ + Assert( + v->type == jpiKey || + v->type == jpiString || + v->type == jpiVariable + ); + + if (len) + *len = v->content.value.datalen; + return v->content.value.data; +} + +bool +jspGetArraySubscript(JsonPathItem *v, JsonPathItem *from, JsonPathItem *to, + int i) +{ + Assert(v->type == jpiIndexArray); + + jspInitByBuffer(from, v->base, v->content.array.elems[i].from); + + if (!v->content.array.elems[i].to) + return false; + + jspInitByBuffer(to, v->base, v->content.array.elems[i].to); + + return true; +} diff --git a/src/backend/utils/adt/jsonpath_exec.c b/src/backend/utils/adt/jsonpath_exec.c new file mode 100644 index 0000000000..4a7c9a3c74 --- /dev/null +++ b/src/backend/utils/adt/jsonpath_exec.c @@ -0,0 +1,2832 @@ +/*------------------------------------------------------------------------- + * + * jsonpath_exec.c + * + * Copyright (c) 2017, PostgreSQL Global Development Group + * + * IDENTIFICATION + * src/backend/utils/adt/jsonpath_exec.c + * + *------------------------------------------------------------------------- + */ + +#include "postgres.h" +#include "funcapi.h" +#include "miscadmin.h" +#include "catalog/pg_collation.h" +#include "catalog/pg_type.h" +#include "lib/stringinfo.h" +#include "regex/regex.h" +#include "utils/builtins.h" +#include "utils/datum.h" +#include "utils/float.h" +#include "utils/formatting.h" +#include "utils/json.h" +#include "utils/jsonpath.h" +#include "utils/varlena.h" + +#ifdef JSONPATH_JSON_C +#define JSONXOID JSONOID +#else +#define JSONXOID JSONBOID + +/* Special pseudo-ErrorData with zero sqlerrcode for existence queries. */ +ErrorData jperNotFound[1]; +#endif + +typedef struct JsonBaseObjectInfo +{ + JsonbContainer *jbc; + int id; +} JsonBaseObjectInfo; + +typedef struct JsonItemStackEntry +{ + JsonbValue *item; + struct JsonItemStackEntry *parent; +} JsonItemStackEntry; + +typedef JsonItemStackEntry *JsonItemStack; + +typedef struct JsonPathExecContext +{ + List *vars; + JsonbValue *root; /* for $ evaluation */ + JsonItemStack stack; /* for @N evaluation */ + JsonBaseObjectInfo baseObject; /* for .keyvalue().id evaluation */ + int generatedObjectId; + int innermostArraySize; /* for LAST array index evaluation */ + bool laxMode; + bool ignoreStructuralErrors; +} JsonPathExecContext; + +/* strict/lax flags is decomposed into four [un]wrap/error flags */ +#define jspStrictAbsenseOfErrors(cxt) (!(cxt)->laxMode) +#define jspAutoUnwrap(cxt) ((cxt)->laxMode) +#define jspAutoWrap(cxt) ((cxt)->laxMode) +#define jspIgnoreStructuralErrors(cxt) ((cxt)->ignoreStructuralErrors) + +typedef struct JsonValueListIterator +{ + ListCell *lcell; +} JsonValueListIterator; + +#define JsonValueListIteratorEnd ((ListCell *) -1) + +static inline JsonPathExecResult recursiveExecute(JsonPathExecContext *cxt, + JsonPathItem *jsp, JsonbValue *jb, + JsonValueList *found); + +static inline JsonPathExecResult recursiveExecuteNested(JsonPathExecContext *cxt, + JsonPathItem *jsp, JsonbValue *jb, + JsonValueList *found); + +static inline JsonPathExecResult recursiveExecuteUnwrap(JsonPathExecContext *cxt, + JsonPathItem *jsp, JsonbValue *jb, JsonValueList *found); + +static inline JsonbValue *wrapItemsInArray(const JsonValueList *items); + + +static inline void +JsonValueListAppend(JsonValueList *jvl, JsonbValue *jbv) +{ + if (jvl->singleton) + { + jvl->list = list_make2(jvl->singleton, jbv); + jvl->singleton = NULL; + } + else if (!jvl->list) + jvl->singleton = jbv; + else + jvl->list = lappend(jvl->list, jbv); +} + +static inline int +JsonValueListLength(const JsonValueList *jvl) +{ + return jvl->singleton ? 1 : list_length(jvl->list); +} + +static inline bool +JsonValueListIsEmpty(JsonValueList *jvl) +{ + return !jvl->singleton && list_length(jvl->list) <= 0; +} + +static inline JsonbValue * +JsonValueListHead(JsonValueList *jvl) +{ + return jvl->singleton ? jvl->singleton : linitial(jvl->list); +} + +static inline List * +JsonValueListGetList(JsonValueList *jvl) +{ + if (jvl->singleton) + return list_make1(jvl->singleton); + + return jvl->list; +} + +/* + * Get the next item from the sequence advancing iterator. + */ +static inline JsonbValue * +JsonValueListNext(const JsonValueList *jvl, JsonValueListIterator *it) +{ + if (it->lcell == JsonValueListIteratorEnd) + return NULL; + + if (it->lcell) + it->lcell = lnext(it->lcell); + else + { + if (jvl->singleton) + { + it->lcell = JsonValueListIteratorEnd; + return jvl->singleton; + } + + it->lcell = list_head(jvl->list); + } + + if (!it->lcell) + { + it->lcell = JsonValueListIteratorEnd; + return NULL; + } + + return lfirst(it->lcell); +} + +#ifndef JSONPATH_JSON_C +/* + * Initialize a binary JsonbValue with the given jsonb container. + */ +static inline JsonbValue * +JsonbInitBinary(JsonbValue *jbv, Jsonb *jb) +{ + jbv->type = jbvBinary; + jbv->val.binary.data = &jb->root; + jbv->val.binary.len = VARSIZE_ANY_EXHDR(jb); + + return jbv; +} +#endif + +/* + * Transform a JsonbValue into a binary JsonbValue by encoding it to a + * binary jsonb container. + */ +static inline JsonbValue * +JsonbWrapInBinary(JsonbValue *jbv, JsonbValue *out) +{ + Jsonb *jb = JsonbValueToJsonb(jbv); + + if (!out) + out = palloc(sizeof(*out)); + + return JsonbInitBinary(out, jb); +} + +static inline void +pushJsonItem(JsonItemStack *stack, JsonItemStackEntry *entry, JsonbValue *item) +{ + entry->item = item; + entry->parent = *stack; + *stack = entry; +} + +static inline void +popJsonItem(JsonItemStack *stack) +{ + *stack = (*stack)->parent; +} + +/********************Execute functions for JsonPath***************************/ + +/* + * Find value of jsonpath variable in a list of passing params + */ +static int +computeJsonPathVariable(JsonPathItem *variable, List *vars, JsonbValue *value) +{ + ListCell *cell; + JsonPathVariable *var = NULL; + bool isNull; + Datum computedValue; + char *varName; + int varNameLength; + int varId = 1; + + Assert(variable->type == jpiVariable); + varName = jspGetString(variable, &varNameLength); + + foreach(cell, vars) + { + var = (JsonPathVariable *) lfirst(cell); + + if (varNameLength == VARSIZE_ANY_EXHDR(var->varName) && + !strncmp(varName, VARDATA_ANY(var->varName), varNameLength)) + break; + + var = NULL; + varId++; + } + + if (var == NULL) + ereport(ERROR, + (errcode(ERRCODE_UNDEFINED_OBJECT), + errmsg("could not find jsonpath variable '%s'", + pnstrdup(varName, varNameLength)))); + + computedValue = var->cb(var->cb_arg, &isNull); + + if (isNull) + { + value->type = jbvNull; + return varId; + } + + switch (var->typid) + { + case BOOLOID: + value->type = jbvBool; + value->val.boolean = DatumGetBool(computedValue); + break; + case NUMERICOID: + value->type = jbvNumeric; + value->val.numeric = DatumGetNumeric(computedValue); + break; + break; + case INT2OID: + value->type = jbvNumeric; + value->val.numeric = DatumGetNumeric(DirectFunctionCall1( + int2_numeric, computedValue)); + break; + case INT4OID: + value->type = jbvNumeric; + value->val.numeric = DatumGetNumeric(DirectFunctionCall1( + int4_numeric, computedValue)); + break; + case INT8OID: + value->type = jbvNumeric; + value->val.numeric = DatumGetNumeric(DirectFunctionCall1( + int8_numeric, computedValue)); + break; + case FLOAT4OID: + value->type = jbvNumeric; + value->val.numeric = DatumGetNumeric(DirectFunctionCall1( + float4_numeric, computedValue)); + break; + case FLOAT8OID: + value->type = jbvNumeric; + value->val.numeric = DatumGetNumeric(DirectFunctionCall1( + float4_numeric, computedValue)); + break; + case TEXTOID: + case VARCHAROID: + value->type = jbvString; + value->val.string.val = VARDATA_ANY(computedValue); + value->val.string.len = VARSIZE_ANY_EXHDR(computedValue); + break; + case DATEOID: + case TIMEOID: + case TIMETZOID: + case TIMESTAMPOID: + case TIMESTAMPTZOID: + value->type = jbvDatetime; + value->val.datetime.typid = var->typid; + value->val.datetime.typmod = var->typmod; + value->val.datetime.tz = 0; + value->val.datetime.value = computedValue; + break; + case JSONXOID: + { + Jsonb *jb = DatumGetJsonbP(computedValue); + + if (JB_ROOT_IS_SCALAR(jb)) + JsonbExtractScalar(&jb->root, value); + else + JsonbInitBinary(value, jb); + } + break; + case (Oid) -1: /* raw JsonbValue */ + *value = *(JsonbValue *) DatumGetPointer(computedValue); + break; + default: + ereport(ERROR, + (errcode(ERRCODE_INVALID_PARAMETER_VALUE), + errmsg("only bool, numeric and text types could be casted to supported jsonpath types"))); + } + + return varId; +} + +/* + * Convert jsonpath's scalar or variable node to actual jsonb value + */ +static int +computeJsonPathItem(JsonPathExecContext *cxt, JsonPathItem *item, JsonbValue *value) +{ + switch(item->type) + { + case jpiNull: + value->type = jbvNull; + break; + case jpiBool: + value->type = jbvBool; + value->val.boolean = jspGetBool(item); + break; + case jpiNumeric: + value->type = jbvNumeric; + value->val.numeric = jspGetNumeric(item); + break; + case jpiString: + value->type = jbvString; + value->val.string.val = jspGetString(item, &value->val.string.len); + break; + case jpiVariable: + return computeJsonPathVariable(item, cxt->vars, value); + default: + elog(ERROR, "Wrong type"); + } + + return 0; +} + + +/* + * Returns jbv* type of of JsonbValue. Note, it never returns + * jbvBinary as is - jbvBinary is used as mark of store naked + * scalar value. To improve readability it defines jbvScalar + * as alias to jbvBinary + */ +#define jbvScalar jbvBinary +static inline int +JsonbType(JsonbValue *jb) +{ + int type = jb->type; + + if (jb->type == jbvBinary) + { + JsonbContainer *jbc = (void *) jb->val.binary.data; + + if (JsonContainerIsScalar(jbc)) + type = jbvScalar; + else if (JsonContainerIsObject(jbc)) + type = jbvObject; + else if (JsonContainerIsArray(jbc)) + type = jbvArray; + else + elog(ERROR, "Unknown container type: 0x%08x", jbc->header); + } + + return type; +} + +/* + * Get the type name of a SQL/JSON item. + */ +static const char * +JsonbTypeName(JsonbValue *jb) +{ + JsonbValue jbvbuf; + + if (jb->type == jbvBinary) + { + JsonbContainer *jbc = (void *) jb->val.binary.data; + + if (JsonContainerIsScalar(jbc)) + jb = JsonbExtractScalar(jbc, &jbvbuf); + else if (JsonContainerIsArray(jbc)) + return "array"; + else if (JsonContainerIsObject(jbc)) + return "object"; + else + elog(ERROR, "Unknown container type: 0x%08x", jbc->header); + } + + switch (jb->type) + { + case jbvObject: + return "object"; + case jbvArray: + return "array"; + case jbvNumeric: + return "number"; + case jbvString: + return "string"; + case jbvBool: + return "boolean"; + case jbvNull: + return "null"; + case jbvDatetime: + switch (jb->val.datetime.typid) + { + case DATEOID: + return "date"; + case TIMEOID: + return "time without time zone"; + case TIMETZOID: + return "time with time zone"; + case TIMESTAMPOID: + return "timestamp without time zone"; + case TIMESTAMPTZOID: + return "timestamp with time zone"; + default: + elog(ERROR, "unknown jsonb value datetime type oid %d", + jb->val.datetime.typid); + } + return "unknown"; + default: + elog(ERROR, "Unknown jsonb value type: %d", jb->type); + return "unknown"; + } +} + +/* + * Returns the size of an array item, or -1 if item is not an array. + */ +static int +JsonbArraySize(JsonbValue *jb) +{ + if (jb->type == jbvArray) + return jb->val.array.nElems; + + if (jb->type == jbvBinary) + { + JsonbContainer *jbc = (void *) jb->val.binary.data; + + if (JsonContainerIsArray(jbc) && !JsonContainerIsScalar(jbc)) + return JsonContainerSize(jbc); + } + + return -1; +} + +/* + * Compare two numerics. + */ +static int +compareNumeric(Numeric a, Numeric b) +{ + return DatumGetInt32( + DirectFunctionCall2( + numeric_cmp, + PointerGetDatum(a), + PointerGetDatum(b) + ) + ); +} + +/* + * Cross-type comparison of two datetime SQL/JSON items. If items are + * uncomparable, 'error' flag is set. + */ +static int +compareDatetime(Datum val1, Oid typid1, Datum val2, Oid typid2, bool *error) +{ + PGFunction cmpfunc = NULL; + + switch (typid1) + { + case DATEOID: + switch (typid2) + { + case DATEOID: + cmpfunc = date_cmp; + break; + case TIMESTAMPOID: + cmpfunc = date_cmp_timestamp; + break; + case TIMESTAMPTZOID: + cmpfunc = date_cmp_timestamptz; + break; + case TIMEOID: + case TIMETZOID: + *error = true; + return 0; + } + break; + + case TIMEOID: + switch (typid2) + { + case TIMEOID: + cmpfunc = time_cmp; + break; + case TIMETZOID: + val1 = DirectFunctionCall1(time_timetz, val1); + cmpfunc = timetz_cmp; + break; + case DATEOID: + case TIMESTAMPOID: + case TIMESTAMPTZOID: + *error = true; + return 0; + } + break; + + case TIMETZOID: + switch (typid2) + { + case TIMEOID: + val2 = DirectFunctionCall1(time_timetz, val2); + cmpfunc = timetz_cmp; + break; + case TIMETZOID: + cmpfunc = timetz_cmp; + break; + case DATEOID: + case TIMESTAMPOID: + case TIMESTAMPTZOID: + *error = true; + return 0; + } + break; + + case TIMESTAMPOID: + switch (typid2) + { + case DATEOID: + cmpfunc = timestamp_cmp_date; + break; + case TIMESTAMPOID: + cmpfunc = timestamp_cmp; + break; + case TIMESTAMPTZOID: + cmpfunc = timestamp_cmp_timestamptz; + break; + case TIMEOID: + case TIMETZOID: + *error = true; + return 0; + } + break; + + case TIMESTAMPTZOID: + switch (typid2) + { + case DATEOID: + cmpfunc = timestamptz_cmp_date; + break; + case TIMESTAMPOID: + cmpfunc = timestamptz_cmp_timestamp; + break; + case TIMESTAMPTZOID: + cmpfunc = timestamp_cmp; + break; + case TIMEOID: + case TIMETZOID: + *error = true; + return 0; + } + break; + + default: + elog(ERROR, "unknown SQL/JSON datetime type oid: %d", typid1); + } + + if (!cmpfunc) + elog(ERROR, "unknown SQL/JSON datetime type oid: %d", typid2); + + *error = false; + + return DatumGetInt32(DirectFunctionCall2(cmpfunc, val1, val2)); +} + +/* + * Check equality of two SLQ/JSON items of the same type. + */ +static inline JsonPathBool +checkEquality(JsonbValue *jb1, JsonbValue *jb2, bool not) +{ + bool eq = false; + + if (jb1->type != jb2->type) + { + if (jb1->type == jbvNull || jb2->type == jbvNull) + return not ? jpbTrue : jpbFalse; + + return jpbUnknown; + } + + switch (jb1->type) + { + case jbvNull: + eq = true; + break; + case jbvString: + eq = (jb1->val.string.len == jb2->val.string.len && + memcmp(jb2->val.string.val, jb1->val.string.val, + jb1->val.string.len) == 0); + break; + case jbvBool: + eq = (jb2->val.boolean == jb1->val.boolean); + break; + case jbvNumeric: + eq = (compareNumeric(jb1->val.numeric, jb2->val.numeric) == 0); + break; + case jbvDatetime: + { + bool error; + + eq = compareDatetime(jb1->val.datetime.value, + jb1->val.datetime.typid, + jb2->val.datetime.value, + jb2->val.datetime.typid, + &error) == 0; + + if (error) + return jpbUnknown; + + break; + } + + case jbvBinary: + case jbvObject: + case jbvArray: + return jpbUnknown; + + default: + elog(ERROR, "Unknown jsonb value type %d", jb1->type); + } + + return (not ^ eq) ? jpbTrue : jpbFalse; +} + +/* + * Compare two SLQ/JSON items using comparison operation 'op'. + */ +static JsonPathBool +makeCompare(int32 op, JsonbValue *jb1, JsonbValue *jb2) +{ + int cmp; + bool res; + + if (jb1->type != jb2->type) + { + if (jb1->type != jbvNull && jb2->type != jbvNull) + /* non-null items of different types are not order-comparable */ + return jpbUnknown; + + if (jb1->type != jbvNull || jb2->type != jbvNull) + /* comparison of nulls to non-nulls returns always false */ + return jpbFalse; + + /* both values are JSON nulls */ + } + + switch (jb1->type) + { + case jbvNull: + cmp = 0; + break; + case jbvNumeric: + cmp = compareNumeric(jb1->val.numeric, jb2->val.numeric); + break; + case jbvString: + cmp = varstr_cmp(jb1->val.string.val, jb1->val.string.len, + jb2->val.string.val, jb2->val.string.len, + DEFAULT_COLLATION_OID); + break; + case jbvDatetime: + { + bool error; + + cmp = compareDatetime(jb1->val.datetime.value, + jb1->val.datetime.typid, + jb2->val.datetime.value, + jb2->val.datetime.typid, + &error); + + if (error) + return jpbUnknown; + } + break; + default: + return jpbUnknown; + } + + switch (op) + { + case jpiEqual: + res = (cmp == 0); + break; + case jpiNotEqual: + res = (cmp != 0); + break; + case jpiLess: + res = (cmp < 0); + break; + case jpiGreater: + res = (cmp > 0); + break; + case jpiLessOrEqual: + res = (cmp <= 0); + break; + case jpiGreaterOrEqual: + res = (cmp >= 0); + break; + default: + elog(ERROR, "Unknown operation"); + return jpbUnknown; + } + + return res ? jpbTrue : jpbFalse; +} + +static JsonbValue * +copyJsonbValue(JsonbValue *src) +{ + JsonbValue *dst = palloc(sizeof(*dst)); + + *dst = *src; + + return dst; +} + +/* + * Execute next jsonpath item if it does exist. + */ +static inline JsonPathExecResult +recursiveExecuteNext(JsonPathExecContext *cxt, + JsonPathItem *cur, JsonPathItem *next, + JsonbValue *v, JsonValueList *found, bool copy) +{ + JsonPathItem elem; + bool hasNext; + + if (!cur) + hasNext = next != NULL; + else if (next) + hasNext = jspHasNext(cur); + else + { + next = &elem; + hasNext = jspGetNext(cur, next); + } + + if (hasNext) + return recursiveExecute(cxt, next, v, found); + + if (found) + JsonValueListAppend(found, copy ? copyJsonbValue(v) : v); + + return jperOk; +} + +/* + * Execute jsonpath expression and automatically unwrap each array item from + * the resulting sequence in lax mode. + */ +static inline JsonPathExecResult +recursiveExecuteAndUnwrap(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb, JsonValueList *found) +{ + if (jspAutoUnwrap(cxt)) + { + JsonValueList seq = { 0 }; + JsonValueListIterator it = { 0 }; + JsonPathExecResult res = recursiveExecute(cxt, jsp, jb, &seq); + JsonbValue *item; + + if (jperIsError(res)) + return res; + + while ((item = JsonValueListNext(&seq, &it))) + { + if (item->type == jbvArray) + { + JsonbValue *elem = item->val.array.elems; + JsonbValue *last = elem + item->val.array.nElems; + + for (; elem < last; elem++) + JsonValueListAppend(found, copyJsonbValue(elem)); + } + else if (item->type == jbvBinary && + JsonContainerIsArray(item->val.binary.data)) + { + JsonbValue elem; + JsonbIterator *it = JsonbIteratorInit(item->val.binary.data); + JsonbIteratorToken tok; + + while ((tok = JsonbIteratorNext(&it, &elem, true)) != WJB_DONE) + { + if (tok == WJB_ELEM) + JsonValueListAppend(found, copyJsonbValue(&elem)); + } + } + else + JsonValueListAppend(found, item); + } + + return jperOk; + } + + return recursiveExecute(cxt, jsp, jb, found); +} + +/* + * Execute comparison expression. True is returned only if found any pair of + * items from the left and right operand's sequences which is satisfying + * condition. In strict mode all pairs should be comparable, otherwise an error + * is returned. + */ +static JsonPathBool +executeComparison(JsonPathExecContext *cxt, JsonPathItem *jsp, JsonbValue *jb) +{ + JsonPathExecResult res; + JsonPathItem elem; + JsonValueList lseq = { 0 }; + JsonValueList rseq = { 0 }; + JsonValueListIterator lseqit = { 0 }; + JsonbValue *lval; + bool error = false; + bool found = false; + + jspGetLeftArg(jsp, &elem); + res = recursiveExecuteAndUnwrap(cxt, &elem, jb, &lseq); + if (jperIsError(res)) + return jperReplace(res, jpbUnknown); + + jspGetRightArg(jsp, &elem); + res = recursiveExecuteAndUnwrap(cxt, &elem, jb, &rseq); + if (jperIsError(res)) + return jperReplace(res, jpbUnknown); + + while ((lval = JsonValueListNext(&lseq, &lseqit))) + { + JsonValueListIterator rseqit = { 0 }; + JsonbValue *rval; + + while ((rval = JsonValueListNext(&rseq, &rseqit))) + { + JsonPathBool cmp; + + switch (jsp->type) + { + case jpiEqual: + cmp = checkEquality(lval, rval, false); + break; + case jpiNotEqual: + cmp = checkEquality(lval, rval, true); + break; + case jpiLess: + case jpiGreater: + case jpiLessOrEqual: + case jpiGreaterOrEqual: + cmp = makeCompare(jsp->type, lval, rval); + break; + default: + elog(ERROR, "Unknown operation"); + cmp = jpbUnknown; + break; + } + + if (cmp == jpbTrue) + { + if (!jspStrictAbsenseOfErrors(cxt)) + return jpbTrue; + + found = true; + } + else if (cmp == jpbUnknown) + { + if (jspStrictAbsenseOfErrors(cxt)) + return jpbUnknown; + + error = true; + } + } + } + + if (found) /* possible only in strict mode */ + return jpbTrue; + + if (error) /* possible only in lax mode */ + return jpbUnknown; + + return jpbFalse; +} + +/* + * Execute binary arithemitc expression on singleton numeric operands. + * Array operands are automatically unwrapped in lax mode. + */ +static JsonPathExecResult +executeBinaryArithmExpr(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb, JsonValueList *found) +{ + JsonPathExecResult jper; + JsonPathItem elem; + JsonValueList lseq = { 0 }; + JsonValueList rseq = { 0 }; + JsonbValue *lval; + JsonbValue *rval; + JsonbValue lvalbuf; + JsonbValue rvalbuf; + Numeric (*func)(Numeric, Numeric, ErrorData **); + Numeric res; + bool hasNext; + ErrorData *edata; + + jspGetLeftArg(jsp, &elem); + + /* XXX by standard unwrapped only operands of multiplicative expressions */ + jper = recursiveExecuteAndUnwrap(cxt, &elem, jb, &lseq); + + if (jper == jperOk) + { + jspGetRightArg(jsp, &elem); + jper = recursiveExecuteAndUnwrap(cxt, &elem, jb, &rseq); /* XXX */ + } + + if (jper != jperOk || + JsonValueListLength(&lseq) != 1 || + JsonValueListLength(&rseq) != 1) + return jperMakeError(ERRCODE_SINGLETON_JSON_ITEM_REQUIRED); + + lval = JsonValueListHead(&lseq); + + if (JsonbType(lval) == jbvScalar) + lval = JsonbExtractScalar(lval->val.binary.data, &lvalbuf); + + if (lval->type != jbvNumeric) + return jperMakeError(ERRCODE_SINGLETON_JSON_ITEM_REQUIRED); + + rval = JsonValueListHead(&rseq); + + if (JsonbType(rval) == jbvScalar) + rval = JsonbExtractScalar(rval->val.binary.data, &rvalbuf); + + if (rval->type != jbvNumeric) + return jperMakeError(ERRCODE_SINGLETON_JSON_ITEM_REQUIRED); + + hasNext = jspGetNext(jsp, &elem); + + if (!found && !hasNext) + return jperOk; + + switch (jsp->type) + { + case jpiAdd: + func = numeric_add_internal; + break; + case jpiSub: + func = numeric_sub_internal; + break; + case jpiMul: + func = numeric_mul_internal; + break; + case jpiDiv: + func = numeric_div_internal; + break; + case jpiMod: + func = numeric_mod_internal; + break; + default: + elog(ERROR, "unknown jsonpath arithmetic operation %d", jsp->type); + func = NULL; + break; + } + + edata = NULL; + res = func(lval->val.numeric, rval->val.numeric, &edata); + + if (edata) + return jperMakeErrorData(edata); + + lval = palloc(sizeof(*lval)); + lval->type = jbvNumeric; + lval->val.numeric = res; + + return recursiveExecuteNext(cxt, jsp, &elem, lval, found, false); +} + +/* + * Execute unary arithmetic expression for each numeric item in its operand's + * sequence. Array operand is automatically unwrapped in lax mode. + */ +static JsonPathExecResult +executeUnaryArithmExpr(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb, JsonValueList *found) +{ + JsonPathExecResult jper; + JsonPathExecResult jper2; + JsonPathItem elem; + JsonValueList seq = { 0 }; + JsonValueListIterator it = { 0 }; + JsonbValue *val; + bool hasNext; + + jspGetArg(jsp, &elem); + jper = recursiveExecuteAndUnwrap(cxt, &elem, jb, &seq); + + if (jperIsError(jper)) + return jperReplace(jper, jperMakeError(ERRCODE_JSON_NUMBER_NOT_FOUND)); + + jper = jperNotFound; + + hasNext = jspGetNext(jsp, &elem); + + while ((val = JsonValueListNext(&seq, &it))) + { + if (JsonbType(val) == jbvScalar) + JsonbExtractScalar(val->val.binary.data, val); + + if (val->type == jbvNumeric) + { + if (!found && !hasNext) + return jperOk; + } + else if (!found && !hasNext) + continue; /* skip non-numerics processing */ + + if (val->type != jbvNumeric) + return jperMakeError(ERRCODE_JSON_NUMBER_NOT_FOUND); + + switch (jsp->type) + { + case jpiPlus: + break; + case jpiMinus: + val->val.numeric = + DatumGetNumeric(DirectFunctionCall1( + numeric_uminus, NumericGetDatum(val->val.numeric))); + break; + default: + elog(ERROR, "unknown jsonpath arithmetic operation %d", jsp->type); + } + + jper2 = recursiveExecuteNext(cxt, jsp, &elem, val, found, false); + + if (jperIsError(jper2)) + return jper2; + + if (jper2 == jperOk) + { + if (!found) + return jperOk; + jper = jperOk; + } + } + + return jper; +} + +/* + * implements jpiAny node (** operator) + */ +static JsonPathExecResult +recursiveAny(JsonPathExecContext *cxt, JsonPathItem *jsp, JsonbValue *jb, + JsonValueList *found, uint32 level, uint32 first, uint32 last) +{ + JsonPathExecResult res = jperNotFound; + JsonbIterator *it; + int32 r; + JsonbValue v; + + check_stack_depth(); + + if (level > last) + return res; + + it = JsonbIteratorInit(jb->val.binary.data); + + /* + * Recursively iterate over jsonb objects/arrays + */ + while((r = JsonbIteratorNext(&it, &v, true)) != WJB_DONE) + { + if (r == WJB_KEY) + { + r = JsonbIteratorNext(&it, &v, true); + Assert(r == WJB_VALUE); + } + + if (r == WJB_VALUE || r == WJB_ELEM) + { + + if (level >= first || + (first == PG_UINT32_MAX && last == PG_UINT32_MAX && + v.type != jbvBinary)) /* leaves only requested */ + { + /* check expression */ + bool ignoreStructuralErrors = cxt->ignoreStructuralErrors; + + cxt->ignoreStructuralErrors = true; + res = recursiveExecuteNext(cxt, NULL, jsp, &v, found, true); + cxt->ignoreStructuralErrors = ignoreStructuralErrors; + + if (jperIsError(res)) + break; + + if (res == jperOk && !found) + break; + } + + if (level < last && v.type == jbvBinary) + { + res = recursiveAny(cxt, jsp, &v, found, level + 1, first, last); + + if (jperIsError(res)) + break; + + if (res == jperOk && found == NULL) + break; + } + } + } + + return res; +} + +/* + * Execute array subscript expression and convert resulting numeric item to the + * integer type with truncation. + */ +static JsonPathExecResult +getArrayIndex(JsonPathExecContext *cxt, JsonPathItem *jsp, JsonbValue *jb, + int32 *index) +{ + JsonbValue *jbv; + JsonValueList found = { 0 }; + JsonbValue tmp; + JsonPathExecResult res = recursiveExecute(cxt, jsp, jb, &found); + + if (jperIsError(res)) + return res; + + if (JsonValueListLength(&found) != 1) + return jperMakeError(ERRCODE_INVALID_JSON_SUBSCRIPT); + + jbv = JsonValueListHead(&found); + + if (JsonbType(jbv) == jbvScalar) + jbv = JsonbExtractScalar(jbv->val.binary.data, &tmp); + + if (jbv->type != jbvNumeric) + return jperMakeError(ERRCODE_INVALID_JSON_SUBSCRIPT); + + *index = DatumGetInt32(DirectFunctionCall1(numeric_int4, + DirectFunctionCall2(numeric_trunc, + NumericGetDatum(jbv->val.numeric), + Int32GetDatum(0)))); + + return jperOk; +} + +static JsonPathBool +executeStartsWithPredicate(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb) +{ + JsonPathExecResult res; + JsonPathItem elem; + JsonValueList lseq = { 0 }; + JsonValueList rseq = { 0 }; + JsonValueListIterator lit = { 0 }; + JsonbValue *whole; + JsonbValue *initial; + JsonbValue initialbuf; + bool error = false; + bool found = false; + + jspGetRightArg(jsp, &elem); + res = recursiveExecute(cxt, &elem, jb, &rseq); + if (jperIsError(res)) + return jperReplace(res, jpbUnknown); + + if (JsonValueListLength(&rseq) != 1) + return jpbUnknown; + + initial = JsonValueListHead(&rseq); + + if (JsonbType(initial) == jbvScalar) + initial = JsonbExtractScalar(initial->val.binary.data, &initialbuf); + + if (initial->type != jbvString) + return jpbUnknown; + + jspGetLeftArg(jsp, &elem); + res = recursiveExecuteAndUnwrap(cxt, &elem, jb, &lseq); + if (jperIsError(res)) + return jperReplace(res, jpbUnknown); + + while ((whole = JsonValueListNext(&lseq, &lit))) + { + JsonbValue wholebuf; + + if (JsonbType(whole) == jbvScalar) + whole = JsonbExtractScalar(whole->val.binary.data, &wholebuf); + + if (whole->type != jbvString) + { + if (jspStrictAbsenseOfErrors(cxt)) + return jpbUnknown; + + error = true; + } + else if (whole->val.string.len >= initial->val.string.len && + !memcmp(whole->val.string.val, + initial->val.string.val, + initial->val.string.len)) + { + if (!jspStrictAbsenseOfErrors(cxt)) + return jpbTrue; + + found = true; + } + } + + if (found) /* possible only in strict mode */ + return jpbTrue; + + if (error) /* possible only in lax mode */ + return jpbUnknown; + + return jpbFalse; +} + +static JsonPathBool +executeLikeRegexPredicate(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb) +{ + JsonPathExecResult res; + JsonPathItem elem; + JsonValueList seq = { 0 }; + JsonValueListIterator it = { 0 }; + JsonbValue *str; + text *regex; + uint32 flags = jsp->content.like_regex.flags; + int cflags = REG_ADVANCED; + bool error = false; + bool found = false; + + if (flags & JSP_REGEX_ICASE) + cflags |= REG_ICASE; + if (flags & JSP_REGEX_MLINE) + cflags |= REG_NEWLINE; + if (flags & JSP_REGEX_SLINE) + cflags &= ~REG_NEWLINE; + if (flags & JSP_REGEX_WSPACE) + cflags |= REG_EXPANDED; + + regex = cstring_to_text_with_len(jsp->content.like_regex.pattern, + jsp->content.like_regex.patternlen); + + jspInitByBuffer(&elem, jsp->base, jsp->content.like_regex.expr); + res = recursiveExecuteAndUnwrap(cxt, &elem, jb, &seq); + if (jperIsError(res)) + return jperReplace(res, jpbUnknown); + + while ((str = JsonValueListNext(&seq, &it))) + { + JsonbValue strbuf; + + if (JsonbType(str) == jbvScalar) + str = JsonbExtractScalar(str->val.binary.data, &strbuf); + + if (str->type != jbvString) + { + if (jspStrictAbsenseOfErrors(cxt)) + return jpbUnknown; + + error = true; + } + else if (RE_compile_and_execute(regex, str->val.string.val, + str->val.string.len, cflags, + DEFAULT_COLLATION_OID, 0, NULL)) + { + if (!jspStrictAbsenseOfErrors(cxt)) + return jpbTrue; + + found = true; + } + } + + if (found) /* possible only in strict mode */ + return jpbTrue; + + if (error) /* possible only in lax mode */ + return jpbUnknown; + + return jpbFalse; +} + +/* + * Try to parse datetime text with the specified datetime template and + * default time-zone 'tzname'. + * Returns 'value' datum, its type 'typid' and 'typmod'. + */ +static bool +tryToParseDatetime(const char *fmt, int fmtlen, text *datetime, char *tzname, + bool strict, Datum *value, Oid *typid, int32 *typmod, int *tz) +{ + MemoryContext mcxt = CurrentMemoryContext; + bool ok = false; + + PG_TRY(); + { + *value = to_datetime(datetime, fmt, fmtlen, tzname, strict, + typid, typmod, tz); + ok = true; + } + PG_CATCH(); + { + if (ERRCODE_TO_CATEGORY(geterrcode()) != ERRCODE_DATA_EXCEPTION) + PG_RE_THROW(); + + FlushErrorState(); + MemoryContextSwitchTo(mcxt); + } + PG_END_TRY(); + + return ok; +} + +/* + * Convert boolean execution status 'res' to a boolean JSON item and execute + * next jsonpath. + */ +static inline JsonPathExecResult +appendBoolResult(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonValueList *found, JsonPathBool res) +{ + JsonPathItem next; + JsonbValue jbv; + bool hasNext = jspGetNext(jsp, &next); + + if (!found && !hasNext) + return jperOk; /* found singleton boolean value */ + + if (res == jpbUnknown) + { + jbv.type = jbvNull; + } + else + { + jbv.type = jbvBool; + jbv.val.boolean = res == jpbTrue; + } + + return recursiveExecuteNext(cxt, jsp, &next, &jbv, found, true); +} + +/* Execute boolean-valued jsonpath expression. */ +static inline JsonPathBool +recursiveExecuteBool(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb, bool canHaveNext) +{ + JsonPathItem arg; + JsonPathBool res; + JsonPathBool res2; + + if (!canHaveNext && jspHasNext(jsp)) + elog(ERROR, "boolean jsonpath item can not have next item"); + + switch (jsp->type) + { + case jpiAnd: + jspGetLeftArg(jsp, &arg); + res = recursiveExecuteBool(cxt, &arg, jb, false); + + if (res == jpbFalse) + return jpbFalse; + + /* + * SQL/JSON says that we should check second arg + * in case of jperError + */ + + jspGetRightArg(jsp, &arg); + res2 = recursiveExecuteBool(cxt, &arg, jb, false); + + return res2 == jpbTrue ? res : res2; + + case jpiOr: + jspGetLeftArg(jsp, &arg); + res = recursiveExecuteBool(cxt, &arg, jb, false); + + if (res == jpbTrue) + return jpbTrue; + + jspGetRightArg(jsp, &arg); + res2 = recursiveExecuteBool(cxt, &arg, jb, false); + + return res2 == jpbFalse ? res : res2; + + case jpiNot: + jspGetArg(jsp, &arg); + + res = recursiveExecuteBool(cxt, &arg, jb, false); + + if (res == jpbUnknown) + return jpbUnknown; + + return res == jpbTrue ? jpbFalse : jpbTrue; + + case jpiIsUnknown: + jspGetArg(jsp, &arg); + res = recursiveExecuteBool(cxt, &arg, jb, false); + return res == jpbUnknown ? jpbTrue : jpbFalse; + + case jpiEqual: + case jpiNotEqual: + case jpiLess: + case jpiGreater: + case jpiLessOrEqual: + case jpiGreaterOrEqual: + return executeComparison(cxt, jsp, jb); + + case jpiStartsWith: + return executeStartsWithPredicate(cxt, jsp, jb); + + case jpiLikeRegex: + return executeLikeRegexPredicate(cxt, jsp, jb); + + case jpiExists: + jspGetArg(jsp, &arg); + + if (jspStrictAbsenseOfErrors(cxt)) + { + /* + * In strict mode we must get a complete list of values + * to check that there are no errors at all. + */ + JsonValueList vals = { 0 }; + JsonPathExecResult res = + recursiveExecute(cxt, &arg, jb, &vals); + + if (jperIsError(res)) + return jperReplace(res, jpbUnknown); + + return JsonValueListIsEmpty(&vals) ? jpbFalse : jpbTrue; + } + else + { + JsonPathExecResult res = recursiveExecute(cxt, &arg, jb, NULL); + + if (jperIsError(res)) + return jperReplace(res, jpbUnknown); + + return res == jperOk ? jpbTrue : jpbFalse; + } + + default: + elog(ERROR, "invalid boolean jsonpath item type: %d", jsp->type); + return jpbUnknown; + } +} + +static inline JsonPathExecResult +recursiveExecuteNested(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb, JsonValueList *found) +{ + JsonItemStackEntry current; + JsonPathExecResult res; + + pushJsonItem(&cxt->stack, ¤t, jb); + res = recursiveExecute(cxt, jsp, jb, found); + popJsonItem(&cxt->stack); + + return res; +} + +static inline JsonPathBool +recursiveExecuteBoolNested(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb) +{ + JsonItemStackEntry current; + JsonPathBool res; + + pushJsonItem(&cxt->stack, ¤t, jb); + res = recursiveExecuteBool(cxt, jsp, jb, false); + popJsonItem(&cxt->stack); + + return res; +} + +static inline JsonPathExecResult +recursiveExecuteBase(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jbv, JsonValueList *found) +{ + JsonbValue *v; + JsonbValue vbuf; + bool copy = true; + + if (JsonbType(jbv) == jbvScalar) + { + if (jspHasNext(jsp)) + v = &vbuf; + else + { + v = palloc(sizeof(*v)); + copy = false; + } + + JsonbExtractScalar(jbv->val.binary.data, v); + } + else + v = jbv; + + return recursiveExecuteNext(cxt, jsp, NULL, v, found, copy); +} + +static inline JsonBaseObjectInfo +setBaseObject(JsonPathExecContext *cxt, JsonbValue *jbv, int32 id) +{ + JsonBaseObjectInfo baseObject = cxt->baseObject; + + cxt->baseObject.jbc = jbv->type != jbvBinary ? NULL : + (JsonbContainer *) jbv->val.binary.data; + cxt->baseObject.id = id; + + return baseObject; +} + +/* + * Main executor function: walks on jsonpath structure and tries to find + * correspoding parts of jsonb. Note, jsonb and jsonpath values should be + * avaliable and untoasted during work because JsonPathItem, JsonbValue + * and found could have pointers into input values. If caller wants just to + * check matching of json by jsonpath then it doesn't provide a found arg. + * In this case executor works till first positive result and does not check + * the rest if it is possible. In other case it tries to find all satisfied + * results + */ +static JsonPathExecResult +recursiveExecuteNoUnwrap(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb, JsonValueList *found) +{ + JsonPathItem elem; + JsonPathExecResult res = jperNotFound; + bool hasNext; + JsonBaseObjectInfo baseObject; + + check_stack_depth(); + CHECK_FOR_INTERRUPTS(); + + switch (jsp->type) + { + /* all boolean item types: */ + case jpiAnd: + case jpiOr: + case jpiNot: + case jpiIsUnknown: + case jpiEqual: + case jpiNotEqual: + case jpiLess: + case jpiGreater: + case jpiLessOrEqual: + case jpiGreaterOrEqual: + case jpiExists: + case jpiStartsWith: + case jpiLikeRegex: + { + JsonPathBool st = recursiveExecuteBool(cxt, jsp, jb, true); + + res = appendBoolResult(cxt, jsp, found, st); + break; + } + + case jpiKey: + if (JsonbType(jb) == jbvObject) + { + JsonbValue *v, key; + JsonbValue obj; + + if (jb->type == jbvObject) + jb = JsonbWrapInBinary(jb, &obj); + + key.type = jbvString; + key.val.string.val = jspGetString(jsp, &key.val.string.len); + + v = findJsonbValueFromContainer(jb->val.binary.data, JB_FOBJECT, &key); + + if (v != NULL) + { + res = recursiveExecuteNext(cxt, jsp, NULL, v, found, false); + + if (jspHasNext(jsp) || !found) + pfree(v); /* free value if it was not added to found list */ + } + else if (!jspIgnoreStructuralErrors(cxt)) + { + Assert(found); + res = jperMakeError(ERRCODE_JSON_MEMBER_NOT_FOUND); + } + } + else if (!jspIgnoreStructuralErrors(cxt)) + { + Assert(found); + res = jperMakeError(ERRCODE_JSON_MEMBER_NOT_FOUND); + } + break; + + case jpiRoot: + jb = cxt->root; + baseObject = setBaseObject(cxt, jb, 0); + res = recursiveExecuteBase(cxt, jsp, jb, found); + cxt->baseObject = baseObject; + break; + + case jpiCurrent: + res = recursiveExecuteBase(cxt, jsp, cxt->stack->item, found); + break; + + case jpiAnyArray: + if (JsonbType(jb) == jbvArray) + { + hasNext = jspGetNext(jsp, &elem); + + if (jb->type == jbvArray) + { + JsonbValue *el = jb->val.array.elems; + JsonbValue *last_el = el + jb->val.array.nElems; + + for (; el < last_el; el++) + { + res = recursiveExecuteNext(cxt, jsp, &elem, el, found, true); + + if (jperIsError(res)) + break; + + if (res == jperOk && !found) + break; + } + } + else + { + JsonbValue v; + JsonbIterator *it; + JsonbIteratorToken r; + + it = JsonbIteratorInit(jb->val.binary.data); + + while((r = JsonbIteratorNext(&it, &v, true)) != WJB_DONE) + { + if (r == WJB_ELEM) + { + res = recursiveExecuteNext(cxt, jsp, &elem, &v, found, true); + + if (jperIsError(res)) + break; + + if (res == jperOk && !found) + break; + } + } + } + } + else if (jspAutoWrap(cxt)) + res = recursiveExecuteNext(cxt, jsp, NULL, jb, found, true); + else if (!jspIgnoreStructuralErrors(cxt)) + res = jperMakeError(ERRCODE_JSON_ARRAY_NOT_FOUND); + break; + + case jpiIndexArray: + if (JsonbType(jb) == jbvArray || jspAutoWrap(cxt)) + { + int innermostArraySize = cxt->innermostArraySize; + int i; + int size = JsonbArraySize(jb); + bool binary = jb->type == jbvBinary; + bool singleton = size < 0; + + if (singleton) + size = 1; + + cxt->innermostArraySize = size; /* for LAST evaluation */ + + hasNext = jspGetNext(jsp, &elem); + + for (i = 0; i < jsp->content.array.nelems; i++) + { + JsonPathItem from; + JsonPathItem to; + int32 index; + int32 index_from; + int32 index_to; + bool range = jspGetArraySubscript(jsp, &from, &to, i); + + res = getArrayIndex(cxt, &from, jb, &index_from); + + if (jperIsError(res)) + break; + + if (range) + { + res = getArrayIndex(cxt, &to, jb, &index_to); + + if (jperIsError(res)) + break; + } + else + index_to = index_from; + + if (!jspIgnoreStructuralErrors(cxt) && + (index_from < 0 || + index_from > index_to || + index_to >= size)) + { + res = jperMakeError(ERRCODE_INVALID_JSON_SUBSCRIPT); + break; + } + + if (index_from < 0) + index_from = 0; + + if (index_to >= size) + index_to = size - 1; + + res = jperNotFound; + + for (index = index_from; index <= index_to; index++) + { + JsonbValue *v; + bool copy; + + if (singleton) + { + v = jb; + copy = true; + } + else if (binary) + { + v = getIthJsonbValueFromContainer(jb->val.binary.data, + (uint32) index); + + if (v == NULL) + continue; + + copy = false; + } + else + { + v = &jb->val.array.elems[index]; + copy = true; + } + + res = recursiveExecuteNext(cxt, jsp, &elem, v, found, + copy); + + if (jperIsError(res)) + break; + + if (res == jperOk && !found) + break; + } + + if (jperIsError(res)) + break; + + if (res == jperOk && !found) + break; + } + + cxt->innermostArraySize = innermostArraySize; + } + else if (!jspIgnoreStructuralErrors(cxt)) + { + res = jperMakeError(ERRCODE_JSON_ARRAY_NOT_FOUND); + } + break; + + case jpiLast: + { + JsonbValue tmpjbv; + JsonbValue *lastjbv; + int last; + bool hasNext; + + if (cxt->innermostArraySize < 0) + elog(ERROR, + "evaluating jsonpath LAST outside of array subscript"); + + hasNext = jspGetNext(jsp, &elem); + + if (!hasNext && !found) + { + res = jperOk; + break; + } + + last = cxt->innermostArraySize - 1; + + lastjbv = hasNext ? &tmpjbv : palloc(sizeof(*lastjbv)); + + lastjbv->type = jbvNumeric; + lastjbv->val.numeric = DatumGetNumeric(DirectFunctionCall1( + int4_numeric, Int32GetDatum(last))); + + res = recursiveExecuteNext(cxt, jsp, &elem, lastjbv, found, hasNext); + } + break; + case jpiAnyKey: + if (JsonbType(jb) == jbvObject) + { + JsonbIterator *it; + int32 r; + JsonbValue v; + JsonbValue bin; + + if (jb->type == jbvObject) + jb = JsonbWrapInBinary(jb, &bin); + + hasNext = jspGetNext(jsp, &elem); + it = JsonbIteratorInit(jb->val.binary.data); + + while((r = JsonbIteratorNext(&it, &v, true)) != WJB_DONE) + { + if (r == WJB_VALUE) + { + res = recursiveExecuteNext(cxt, jsp, &elem, &v, found, true); + + if (jperIsError(res)) + break; + + if (res == jperOk && !found) + break; + } + } + } + else if (!jspIgnoreStructuralErrors(cxt)) + { + Assert(found); + res = jperMakeError(ERRCODE_JSON_OBJECT_NOT_FOUND); + } + break; + case jpiAdd: + case jpiSub: + case jpiMul: + case jpiDiv: + case jpiMod: + res = executeBinaryArithmExpr(cxt, jsp, jb, found); + break; + case jpiPlus: + case jpiMinus: + res = executeUnaryArithmExpr(cxt, jsp, jb, found); + break; + case jpiFilter: + { + JsonPathBool st; + + jspGetArg(jsp, &elem); + st = recursiveExecuteBoolNested(cxt, &elem, jb); + if (st != jpbTrue) + res = jperNotFound; + else + res = recursiveExecuteNext(cxt, jsp, NULL, jb, found, true); + break; + } + case jpiAny: + { + JsonbValue jbvbuf; + + hasNext = jspGetNext(jsp, &elem); + + /* first try without any intermediate steps */ + if (jsp->content.anybounds.first == 0) + { + bool ignoreStructuralErrors = cxt->ignoreStructuralErrors; + + cxt->ignoreStructuralErrors = true; + res = recursiveExecuteNext(cxt, jsp, &elem, jb, found, true); + cxt->ignoreStructuralErrors = ignoreStructuralErrors; + + if (res == jperOk && !found) + break; + } + + if (jb->type == jbvArray || jb->type == jbvObject) + jb = JsonbWrapInBinary(jb, &jbvbuf); + + if (jb->type == jbvBinary) + res = recursiveAny(cxt, hasNext ? &elem : NULL, jb, found, + 1, + jsp->content.anybounds.first, + jsp->content.anybounds.last); + break; + } + case jpiNull: + case jpiBool: + case jpiNumeric: + case jpiString: + case jpiVariable: + { + JsonbValue vbuf; + JsonbValue *v; + bool hasNext = jspGetNext(jsp, &elem); + int id; + + if (!hasNext && !found) + { + res = jperOk; /* skip evaluation */ + break; + } + + v = hasNext ? &vbuf : palloc(sizeof(*v)); + + id = computeJsonPathItem(cxt, jsp, v); + + baseObject = setBaseObject(cxt, v, id); + res = recursiveExecuteNext(cxt, jsp, &elem, v, found, hasNext); + cxt->baseObject = baseObject; + } + break; + case jpiType: + { + JsonbValue *jbv = palloc(sizeof(*jbv)); + + jbv->type = jbvString; + jbv->val.string.val = pstrdup(JsonbTypeName(jb)); + jbv->val.string.len = strlen(jbv->val.string.val); + + res = recursiveExecuteNext(cxt, jsp, NULL, jbv, found, false); + } + break; + case jpiSize: + { + int size = JsonbArraySize(jb); + + if (size < 0) + { + if (!jspAutoWrap(cxt)) + { + if (!jspIgnoreStructuralErrors(cxt)) + res = jperMakeError(ERRCODE_JSON_ARRAY_NOT_FOUND); + break; + } + + size = 1; + } + + jb = palloc(sizeof(*jb)); + + jb->type = jbvNumeric; + jb->val.numeric = + DatumGetNumeric(DirectFunctionCall1(int4_numeric, + Int32GetDatum(size))); + + res = recursiveExecuteNext(cxt, jsp, NULL, jb, found, false); + } + break; + case jpiAbs: + case jpiFloor: + case jpiCeiling: + { + JsonbValue jbvbuf; + + if (JsonbType(jb) == jbvScalar) + jb = JsonbExtractScalar(jb->val.binary.data, &jbvbuf); + + if (jb->type == jbvNumeric) + { + Datum datum = NumericGetDatum(jb->val.numeric); + + switch (jsp->type) + { + case jpiAbs: + datum = DirectFunctionCall1(numeric_abs, datum); + break; + case jpiFloor: + datum = DirectFunctionCall1(numeric_floor, datum); + break; + case jpiCeiling: + datum = DirectFunctionCall1(numeric_ceil, datum); + break; + default: + break; + } + + jb = palloc(sizeof(*jb)); + + jb->type = jbvNumeric; + jb->val.numeric = DatumGetNumeric(datum); + + res = recursiveExecuteNext(cxt, jsp, NULL, jb, found, false); + } + else + res = jperMakeError(ERRCODE_NON_NUMERIC_JSON_ITEM); + } + break; + case jpiDouble: + { + JsonbValue jbv; + ErrorData *edata = NULL; + + if (JsonbType(jb) == jbvScalar) + jb = JsonbExtractScalar(jb->val.binary.data, &jbv); + + if (jb->type == jbvNumeric) + { + /* only check success of numeric to double cast */ + (void) numeric_float8_internal(jb->val.numeric, &edata); + } + else if (jb->type == jbvString) + { + /* cast string as double */ + char *str = pnstrdup(jb->val.string.val, + jb->val.string.len); + double val; + + val = float8in_internal_safe(str, NULL, "double precision", + str, &edata); + pfree(str); + + if (!edata) + { + jb = &jbv; + jb->type = jbvNumeric; + jb->val.numeric = float8_numeric_internal(val, &edata); + } + } + else + { + res = jperMakeError(ERRCODE_NON_NUMERIC_JSON_ITEM); + break; + } + + if (edata) + { + if (ERRCODE_TO_CATEGORY(edata->sqlerrcode) != + ERRCODE_DATA_EXCEPTION) + ThrowErrorData(edata); + + FreeErrorData(edata); + res = jperMakeError(ERRCODE_NON_NUMERIC_JSON_ITEM); + } + else + { + res = recursiveExecuteNext(cxt, jsp, NULL, jb, found, true); + } + } + break; + case jpiDatetime: + { + JsonbValue jbvbuf; + Datum value; + text *datetime; + Oid typid; + int32 typmod = -1; + int tz; + bool hasNext; + + if (JsonbType(jb) == jbvScalar) + jb = JsonbExtractScalar(jb->val.binary.data, &jbvbuf); + + res = jperMakeError(ERRCODE_INVALID_ARGUMENT_FOR_JSON_DATETIME_FUNCTION); + + if (jb->type != jbvString) + break; + + datetime = cstring_to_text_with_len(jb->val.string.val, + jb->val.string.len); + + if (jsp->content.args.left) + { + char *template_str; + int template_len; + char *tzname = NULL; + + jspGetLeftArg(jsp, &elem); + + if (elem.type != jpiString) + elog(ERROR, "invalid jsonpath item type for .datetime() argument"); + + template_str = jspGetString(&elem, &template_len); + + if (jsp->content.args.right) + { + JsonValueList tzlist = { 0 }; + JsonPathExecResult tzres; + JsonbValue *tzjbv; + + jspGetRightArg(jsp, &elem); + tzres = recursiveExecuteNoUnwrap(cxt, &elem, jb, + &tzlist); + + if (jperIsError(tzres)) + return tzres; + + if (JsonValueListLength(&tzlist) != 1) + break; + + tzjbv = JsonValueListHead(&tzlist); + + if (tzjbv->type != jbvString) + break; + + tzname = pnstrdup(tzjbv->val.string.val, + tzjbv->val.string.len); + } + + if (tryToParseDatetime(template_str, template_len, datetime, + tzname, false, + &value, &typid, &typmod, &tz)) + res = jperOk; + + if (tzname) + pfree(tzname); + } + else + { + const char *templates[] = { + "yyyy-mm-dd HH24:MI:SS TZH:TZM", + "yyyy-mm-dd HH24:MI:SS TZH", + "yyyy-mm-dd HH24:MI:SS", + "yyyy-mm-dd", + "HH24:MI:SS TZH:TZM", + "HH24:MI:SS TZH", + "HH24:MI:SS" + }; + int i; + + for (i = 0; i < sizeof(templates) / sizeof(*templates); i++) + { + if (tryToParseDatetime(templates[i], -1, datetime, + NULL, true, &value, &typid, + &typmod, &tz)) + { + res = jperOk; + break; + } + } + } + + pfree(datetime); + + if (jperIsError(res)) + break; + + hasNext = jspGetNext(jsp, &elem); + + if (!hasNext && !found) + break; + + jb = hasNext ? &jbvbuf : palloc(sizeof(*jb)); + + jb->type = jbvDatetime; + jb->val.datetime.value = value; + jb->val.datetime.typid = typid; + jb->val.datetime.typmod = typmod; + jb->val.datetime.tz = tz; + + res = recursiveExecuteNext(cxt, jsp, &elem, jb, found, hasNext); + } + break; + case jpiKeyValue: + if (JsonbType(jb) != jbvObject) + res = jperMakeError(ERRCODE_JSON_OBJECT_NOT_FOUND); + else + { + int32 r; + JsonbValue bin; + JsonbValue key; + JsonbValue val; + JsonbValue idval; + JsonbValue obj; + JsonbValue keystr; + JsonbValue valstr; + JsonbValue idstr; + JsonbIterator *it; + JsonbParseState *ps = NULL; + int64 id; + + hasNext = jspGetNext(jsp, &elem); + + if (jb->type == jbvBinary + ? !JsonContainerSize(jb->val.binary.data) + : !jb->val.object.nPairs) + { + res = jperNotFound; + break; + } + + /* make template object */ + obj.type = jbvBinary; + + keystr.type = jbvString; + keystr.val.string.val = "key"; + keystr.val.string.len = 3; + + valstr.type = jbvString; + valstr.val.string.val = "value"; + valstr.val.string.len = 5; + + idstr.type = jbvString; + idstr.val.string.val = "id"; + idstr.val.string.len = 2; + + if (jb->type == jbvObject) + jb = JsonbWrapInBinary(jb, &bin); + + id = jb->type != jbvBinary ? 0 : +#ifdef JSONPATH_JSON_C + (int64)((char *)((JsonContainer *) jb->val.binary.data)->data - + (char *) cxt->baseObject.jbc->data); +#else + (int64)((char *) jb->val.binary.data - + (char *) cxt->baseObject.jbc); +#endif + id += (int64) cxt->baseObject.id * INT64CONST(10000000000); + + idval.type = jbvNumeric; + idval.val.numeric = DatumGetNumeric(DirectFunctionCall1(int8_numeric, Int64GetDatum(id))); + + it = JsonbIteratorInit(jb->val.binary.data); + + while ((r = JsonbIteratorNext(&it, &key, true)) != WJB_DONE) + { + if (r == WJB_KEY) + { + Jsonb *jsonb; + JsonbValue *keyval; + + res = jperOk; + + if (!hasNext && !found) + break; + + r = JsonbIteratorNext(&it, &val, true); + Assert(r == WJB_VALUE); + + pushJsonbValue(&ps, WJB_BEGIN_OBJECT, NULL); + + pushJsonbValue(&ps, WJB_KEY, &keystr); + pushJsonbValue(&ps, WJB_VALUE, &key); + + pushJsonbValue(&ps, WJB_KEY, &valstr); + pushJsonbValue(&ps, WJB_VALUE, &val); + + pushJsonbValue(&ps, WJB_KEY, &idstr); + pushJsonbValue(&ps, WJB_VALUE, &idval); + + keyval = pushJsonbValue(&ps, WJB_END_OBJECT, NULL); + + jsonb = JsonbValueToJsonb(keyval); + + JsonbInitBinary(&obj, jsonb); + + baseObject = setBaseObject(cxt, &obj, + cxt->generatedObjectId++); + + res = recursiveExecuteNext(cxt, jsp, &elem, &obj, found, true); + + cxt->baseObject = baseObject; + + if (jperIsError(res)) + break; + + if (res == jperOk && !found) + break; + } + } + } + break; + default: + elog(ERROR, "unrecognized jsonpath item type: %d", jsp->type); + } + + return res; +} + +/* + * Unwrap current array item and execute jsonpath for each of its elements. + */ +static JsonPathExecResult +recursiveExecuteUnwrapArray(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb, JsonValueList *found) +{ + JsonPathExecResult res = jperNotFound; + + if (jb->type == jbvArray) + { + JsonbValue *elem = jb->val.array.elems; + JsonbValue *last = elem + jb->val.array.nElems; + + for (; elem < last; elem++) + { + res = recursiveExecuteNoUnwrap(cxt, jsp, elem, found); + + if (jperIsError(res)) + break; + if (res == jperOk && !found) + break; + } + } + else + { + JsonbValue v; + JsonbIterator *it; + JsonbIteratorToken tok; + + it = JsonbIteratorInit(jb->val.binary.data); + + while ((tok = JsonbIteratorNext(&it, &v, true)) != WJB_DONE) + { + if (tok == WJB_ELEM) + { + res = recursiveExecuteNoUnwrap(cxt, jsp, &v, found); + if (jperIsError(res)) + break; + if (res == jperOk && !found) + break; + } + } + } + + return res; +} + +/* + * Execute jsonpath with unwrapping of current item if it is an array. + */ +static inline JsonPathExecResult +recursiveExecuteUnwrap(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb, JsonValueList *found) +{ + if (jspAutoUnwrap(cxt) && JsonbType(jb) == jbvArray) + return recursiveExecuteUnwrapArray(cxt, jsp, jb, found); + + return recursiveExecuteNoUnwrap(cxt, jsp, jb, found); +} + +/* + * Wrap a non-array SQL/JSON item into an array for applying array subscription + * path steps in lax mode. + */ +static inline JsonbValue * +wrapItem(JsonbValue *jbv) +{ + JsonbParseState *ps = NULL; + JsonbValue jbvbuf; + + switch (JsonbType(jbv)) + { + case jbvArray: + /* Simply return an array item. */ + return jbv; + + case jbvScalar: + /* Extract scalar value from singleton pseudo-array. */ + jbv = JsonbExtractScalar(jbv->val.binary.data, &jbvbuf); + break; + + case jbvObject: + /* + * Need to wrap object into a binary JsonbValue for its unpacking + * in pushJsonbValue(). + */ + if (jbv->type != jbvBinary) + jbv = JsonbWrapInBinary(jbv, &jbvbuf); + break; + + default: + /* Ordinary scalars can be pushed directly. */ + break; + } + + pushJsonbValue(&ps, WJB_BEGIN_ARRAY, NULL); + pushJsonbValue(&ps, WJB_ELEM, jbv); + jbv = pushJsonbValue(&ps, WJB_END_ARRAY, NULL); + + return JsonbWrapInBinary(jbv, NULL); +} + +/* + * Execute jsonpath with automatic unwrapping of current item in lax mode. + */ +static inline JsonPathExecResult +recursiveExecute(JsonPathExecContext *cxt, JsonPathItem *jsp, JsonbValue *jb, + JsonValueList *found) +{ + if (jspAutoUnwrap(cxt)) + { + switch (jsp->type) + { + case jpiKey: + case jpiAnyKey: + /* case jpiAny: */ + case jpiFilter: + /* all methods excluding type() and size() */ + case jpiAbs: + case jpiFloor: + case jpiCeiling: + case jpiDouble: + case jpiDatetime: + case jpiKeyValue: + return recursiveExecuteUnwrap(cxt, jsp, jb, found); + + default: + break; + } + } + + return recursiveExecuteNoUnwrap(cxt, jsp, jb, found); +} + + +/* + * Public interface to jsonpath executor + */ +JsonPathExecResult +executeJsonPath(JsonPath *path, List *vars, Jsonb *json, JsonValueList *foundJson) +{ + JsonPathExecContext cxt; + JsonPathItem jsp; + JsonbValue jbv; + JsonItemStackEntry root; + + jspInit(&jsp, path); + + cxt.vars = vars; + cxt.laxMode = (path->header & JSONPATH_LAX) != 0; + cxt.ignoreStructuralErrors = cxt.laxMode; + cxt.root = JsonbInitBinary(&jbv, json); + cxt.stack = NULL; + cxt.baseObject.jbc = NULL; + cxt.baseObject.id = 0; + cxt.generatedObjectId = list_length(vars) + 1; + cxt.innermostArraySize = -1; + + pushJsonItem(&cxt.stack, &root, cxt.root); + + if (jspStrictAbsenseOfErrors(&cxt) && !foundJson) + { + /* + * In strict mode we must get a complete list of values to check + * that there are no errors at all. + */ + JsonValueList vals = { 0 }; + JsonPathExecResult res = recursiveExecute(&cxt, &jsp, &jbv, &vals); + + if (jperIsError(res)) + return res; + + return JsonValueListIsEmpty(&vals) ? jperNotFound : jperOk; + } + + return recursiveExecute(&cxt, &jsp, &jbv, foundJson); +} + +static Datum +returnDATUM(void *arg, bool *isNull) +{ + *isNull = false; + return PointerGetDatum(arg); +} + +static Datum +returnNULL(void *arg, bool *isNull) +{ + *isNull = true; + return Int32GetDatum(0); +} + +/* + * Convert jsonb object into list of vars for executor + */ +static List* +makePassingVars(Jsonb *jb) +{ + JsonbValue v; + JsonbIterator *it; + int32 r; + List *vars = NIL; + + it = JsonbIteratorInit(&jb->root); + + r = JsonbIteratorNext(&it, &v, true); + + if (r != WJB_BEGIN_OBJECT) + ereport(ERROR, + (errcode(ERRCODE_INVALID_PARAMETER_VALUE), + errmsg("json containing jsonpath variables is not an object"))); + + while ((r = JsonbIteratorNext(&it, &v, true)) != WJB_DONE) + { + if (r == WJB_KEY) + { + JsonPathVariable *jpv = palloc0(sizeof(*jpv)); + + jpv->varName = cstring_to_text_with_len(v.val.string.val, + v.val.string.len); + + JsonbIteratorNext(&it, &v, true); + + /* Datums are copied from jsonb into the current memory context. */ + jpv->cb = returnDATUM; + + switch (v.type) + { + case jbvBool: + jpv->typid = BOOLOID; + jpv->cb_arg = DatumGetPointer(BoolGetDatum(v.val.boolean)); + break; + case jbvNull: + jpv->cb = returnNULL; + break; + case jbvString: + jpv->typid = TEXTOID; + jpv->cb_arg = cstring_to_text_with_len(v.val.string.val, + v.val.string.len); + break; + case jbvNumeric: + jpv->typid = NUMERICOID; + jpv->cb_arg = DatumGetPointer( + datumCopy(NumericGetDatum(v.val.numeric), false, -1)); + break; + case jbvBinary: + jpv->typid = JSONXOID; + jpv->cb_arg = DatumGetPointer(JsonbPGetDatum(JsonbValueToJsonb(&v))); + break; + default: + elog(ERROR, "invalid jsonb value type"); + } + + vars = lappend(vars, jpv); + } + } + + return vars; +} + +static void +throwJsonPathError(JsonPathExecResult res) +{ + int err; + if (!jperIsError(res)) + return; + + if (jperIsErrorData(res)) + ThrowErrorData(jperGetErrorData(res)); + + err = jperGetError(res); + + switch (err) + { + case ERRCODE_JSON_ARRAY_NOT_FOUND: + ereport(ERROR, + (errcode(err), + errmsg("SQL/JSON array not found"))); + break; + case ERRCODE_JSON_OBJECT_NOT_FOUND: + ereport(ERROR, + (errcode(err), + errmsg("SQL/JSON object not found"))); + break; + case ERRCODE_JSON_MEMBER_NOT_FOUND: + ereport(ERROR, + (errcode(err), + errmsg("SQL/JSON member not found"))); + break; + case ERRCODE_JSON_NUMBER_NOT_FOUND: + ereport(ERROR, + (errcode(err), + errmsg("SQL/JSON number not found"))); + break; + case ERRCODE_JSON_SCALAR_REQUIRED: + ereport(ERROR, + (errcode(err), + errmsg("SQL/JSON scalar required"))); + break; + case ERRCODE_SINGLETON_JSON_ITEM_REQUIRED: + ereport(ERROR, + (errcode(err), + errmsg("Singleton SQL/JSON item required"))); + break; + case ERRCODE_NON_NUMERIC_JSON_ITEM: + ereport(ERROR, + (errcode(err), + errmsg("Non-numeric SQL/JSON item"))); + break; + case ERRCODE_INVALID_JSON_SUBSCRIPT: + ereport(ERROR, + (errcode(err), + errmsg("Invalid SQL/JSON subscript"))); + break; + case ERRCODE_INVALID_ARGUMENT_FOR_JSON_DATETIME_FUNCTION: + ereport(ERROR, + (errcode(err), + errmsg("Invalid argument for SQL/JSON datetime function"))); + break; + default: + ereport(ERROR, + (errcode(err), + errmsg("Unknown SQL/JSON error"))); + break; + } +} + +static Datum +jsonb_jsonpath_exists(PG_FUNCTION_ARGS) +{ + Jsonb *jb = PG_GETARG_JSONB_P(0); + JsonPath *jp = PG_GETARG_JSONPATH_P(1); + JsonPathExecResult res; + List *vars = NIL; + + if (PG_NARGS() == 3) + vars = makePassingVars(PG_GETARG_JSONB_P(2)); + + res = executeJsonPath(jp, vars, jb, NULL); + + PG_FREE_IF_COPY(jb, 0); + PG_FREE_IF_COPY(jp, 1); + + if (jperIsError(res)) + { + jperFree(res); + PG_RETURN_NULL(); + } + + PG_RETURN_BOOL(res == jperOk); +} + +Datum +jsonb_jsonpath_exists2(PG_FUNCTION_ARGS) +{ + return jsonb_jsonpath_exists(fcinfo); +} + +Datum +jsonb_jsonpath_exists3(PG_FUNCTION_ARGS) +{ + return jsonb_jsonpath_exists(fcinfo); +} + +static inline Datum +jsonb_jsonpath_predicate(FunctionCallInfo fcinfo, List *vars) +{ + Jsonb *jb = PG_GETARG_JSONB_P(0); + JsonPath *jp = PG_GETARG_JSONPATH_P(1); + JsonbValue *jbv; + JsonValueList found = { 0 }; + JsonPathExecResult res; + + res = executeJsonPath(jp, vars, jb, &found); + + throwJsonPathError(res); + + if (JsonValueListLength(&found) != 1) + throwJsonPathError(jperMakeError(ERRCODE_SINGLETON_JSON_ITEM_REQUIRED)); + + jbv = JsonValueListHead(&found); + + if (JsonbType(jbv) == jbvScalar) + JsonbExtractScalar(jbv->val.binary.data, jbv); + + PG_FREE_IF_COPY(jb, 0); + PG_FREE_IF_COPY(jp, 1); + + if (jbv->type == jbvNull) + PG_RETURN_NULL(); + + if (jbv->type != jbvBool) + PG_RETURN_NULL(); /* XXX */ + + PG_RETURN_BOOL(jbv->val.boolean); +} + +Datum +jsonb_jsonpath_predicate2(PG_FUNCTION_ARGS) +{ + return jsonb_jsonpath_predicate(fcinfo, NIL); +} + +Datum +jsonb_jsonpath_predicate3(PG_FUNCTION_ARGS) +{ + return jsonb_jsonpath_predicate(fcinfo, + makePassingVars(PG_GETARG_JSONB_P(2))); +} + +static Datum +jsonb_jsonpath_query(FunctionCallInfo fcinfo) +{ + FuncCallContext *funcctx; + List *found; + JsonbValue *v; + ListCell *c; + + if (SRF_IS_FIRSTCALL()) + { + JsonPath *jp = PG_GETARG_JSONPATH_P(1); + Jsonb *jb; + JsonPathExecResult res; + MemoryContext oldcontext; + List *vars = NIL; + JsonValueList found = { 0 }; + + funcctx = SRF_FIRSTCALL_INIT(); + oldcontext = MemoryContextSwitchTo(funcctx->multi_call_memory_ctx); + + jb = PG_GETARG_JSONB_P_COPY(0); + if (PG_NARGS() == 3) + vars = makePassingVars(PG_GETARG_JSONB_P(2)); + + res = executeJsonPath(jp, vars, jb, &found); + + if (jperIsError(res)) + throwJsonPathError(res); + + PG_FREE_IF_COPY(jp, 1); + + funcctx->user_fctx = JsonValueListGetList(&found); + + MemoryContextSwitchTo(oldcontext); + } + + funcctx = SRF_PERCALL_SETUP(); + found = funcctx->user_fctx; + + c = list_head(found); + + if (c == NULL) + SRF_RETURN_DONE(funcctx); + + v = lfirst(c); + funcctx->user_fctx = list_delete_first(found); + + SRF_RETURN_NEXT(funcctx, JsonbPGetDatum(JsonbValueToJsonb(v))); +} + +Datum +jsonb_jsonpath_query2(PG_FUNCTION_ARGS) +{ + return jsonb_jsonpath_query(fcinfo); +} + +Datum +jsonb_jsonpath_query3(PG_FUNCTION_ARGS) +{ + return jsonb_jsonpath_query(fcinfo); +} + +static Datum +jsonb_jsonpath_query_wrapped(FunctionCallInfo fcinfo, List *vars) +{ + Jsonb *jb = PG_GETARG_JSONB_P(0); + JsonPath *jp = PG_GETARG_JSONPATH_P(1); + JsonValueList found = { 0 }; + JsonPathExecResult res; + int size; + + res = executeJsonPath(jp, vars, jb, &found); + + if (jperIsError(res)) + throwJsonPathError(res); + + size = JsonValueListLength(&found); + + if (size == 0) + PG_RETURN_NULL(); + + if (size == 1) + PG_RETURN_JSONB_P(JsonbValueToJsonb(JsonValueListHead(&found))); + + PG_RETURN_JSONB_P(JsonbValueToJsonb(wrapItemsInArray(&found))); +} + +Datum +jsonb_jsonpath_query_wrapped2(PG_FUNCTION_ARGS) +{ + return jsonb_jsonpath_query_wrapped(fcinfo, NIL); +} + +Datum +jsonb_jsonpath_query_wrapped3(PG_FUNCTION_ARGS) +{ + return jsonb_jsonpath_query_wrapped(fcinfo, + makePassingVars(PG_GETARG_JSONB_P(2))); +} + +/* Construct a JSON array from the item list */ +static inline JsonbValue * +wrapItemsInArray(const JsonValueList *items) +{ + JsonbParseState *ps = NULL; + JsonValueListIterator it = { 0 }; + JsonbValue *jbv; + + pushJsonbValue(&ps, WJB_BEGIN_ARRAY, NULL); + + while ((jbv = JsonValueListNext(items, &it))) + { + JsonbValue bin; + + if (jbv->type == jbvBinary && + JsonContainerIsScalar(jbv->val.binary.data)) + JsonbExtractScalar(jbv->val.binary.data, jbv); + + if (jbv->type == jbvObject || jbv->type == jbvArray) + jbv = JsonbWrapInBinary(jbv, &bin); + + pushJsonbValue(&ps, WJB_ELEM, jbv); + } + + return pushJsonbValue(&ps, WJB_END_ARRAY, NULL); +} diff --git a/src/backend/utils/adt/jsonpath_gram.y b/src/backend/utils/adt/jsonpath_gram.y new file mode 100644 index 0000000000..3856a06ba2 --- /dev/null +++ b/src/backend/utils/adt/jsonpath_gram.y @@ -0,0 +1,495 @@ +/*------------------------------------------------------------------------- + * + * jsonpath_gram.y + * Grammar definitions for jsonpath datatype + * + * Copyright (c) 2017, PostgreSQL Global Development Group + * + * IDENTIFICATION + * src/backend/utils/adt/jsonpath_gram.y + * + *------------------------------------------------------------------------- + */ + +%{ +#include "postgres.h" + +#include "fmgr.h" +#include "catalog/pg_collation.h" +#include "miscadmin.h" +#include "nodes/pg_list.h" +#include "regex/regex.h" +#include "utils/builtins.h" +#include "utils/jsonpath.h" + +#include "utils/jsonpath_scanner.h" + +/* + * Bison doesn't allocate anything that needs to live across parser calls, + * so we can easily have it use palloc instead of malloc. This prevents + * memory leaks if we error out during parsing. Note this only works with + * bison >= 2.0. However, in bison 1.875 the default is to use alloca() + * if possible, so there's not really much problem anyhow, at least if + * you're building with gcc. + */ +#define YYMALLOC palloc +#define YYFREE pfree + +static JsonPathParseItem* +makeItemType(int type) +{ + JsonPathParseItem* v = palloc(sizeof(*v)); + + CHECK_FOR_INTERRUPTS(); + + v->type = type; + v->next = NULL; + + return v; +} + +static JsonPathParseItem* +makeItemString(string *s) +{ + JsonPathParseItem *v; + + if (s == NULL) + { + v = makeItemType(jpiNull); + } + else + { + v = makeItemType(jpiString); + v->value.string.val = s->val; + v->value.string.len = s->len; + } + + return v; +} + +static JsonPathParseItem* +makeItemVariable(string *s) +{ + JsonPathParseItem *v; + + v = makeItemType(jpiVariable); + v->value.string.val = s->val; + v->value.string.len = s->len; + + return v; +} + +static JsonPathParseItem* +makeItemKey(string *s) +{ + JsonPathParseItem *v; + + v = makeItemString(s); + v->type = jpiKey; + + return v; +} + +static JsonPathParseItem* +makeItemNumeric(string *s) +{ + JsonPathParseItem *v; + + v = makeItemType(jpiNumeric); + v->value.numeric = + DatumGetNumeric(DirectFunctionCall3(numeric_in, CStringGetDatum(s->val), 0, -1)); + + return v; +} + +static JsonPathParseItem* +makeItemBool(bool val) { + JsonPathParseItem *v = makeItemType(jpiBool); + + v->value.boolean = val; + + return v; +} + +static JsonPathParseItem* +makeItemBinary(int type, JsonPathParseItem* la, JsonPathParseItem *ra) +{ + JsonPathParseItem *v = makeItemType(type); + + v->value.args.left = la; + v->value.args.right = ra; + + return v; +} + +static JsonPathParseItem* +makeItemUnary(int type, JsonPathParseItem* a) +{ + JsonPathParseItem *v; + + if (type == jpiPlus && a->type == jpiNumeric && !a->next) + return a; + + if (type == jpiMinus && a->type == jpiNumeric && !a->next) + { + v = makeItemType(jpiNumeric); + v->value.numeric = + DatumGetNumeric(DirectFunctionCall1(numeric_uminus, + NumericGetDatum(a->value.numeric))); + return v; + } + + v = makeItemType(type); + + v->value.arg = a; + + return v; +} + +static JsonPathParseItem* +makeItemList(List *list) +{ + JsonPathParseItem *head, *end; + ListCell *cell = list_head(list); + + head = end = (JsonPathParseItem *) lfirst(cell); + + if (!lnext(cell)) + return head; + + /* append items to the end of already existing list */ + while (end->next) + end = end->next; + + for_each_cell(cell, lnext(cell)) + { + JsonPathParseItem *c = (JsonPathParseItem *) lfirst(cell); + + end->next = c; + end = c; + } + + return head; +} + +static JsonPathParseItem* +makeIndexArray(List *list) +{ + JsonPathParseItem *v = makeItemType(jpiIndexArray); + ListCell *cell; + int i = 0; + + Assert(list_length(list) > 0); + v->value.array.nelems = list_length(list); + + v->value.array.elems = palloc(sizeof(v->value.array.elems[0]) * v->value.array.nelems); + + foreach(cell, list) + { + JsonPathParseItem *jpi = lfirst(cell); + + Assert(jpi->type == jpiSubscript); + + v->value.array.elems[i].from = jpi->value.args.left; + v->value.array.elems[i++].to = jpi->value.args.right; + } + + return v; +} + +static JsonPathParseItem* +makeAny(int first, int last) +{ + JsonPathParseItem *v = makeItemType(jpiAny); + + v->value.anybounds.first = (first >= 0) ? first : PG_UINT32_MAX; + v->value.anybounds.last = (last >= 0) ? last : PG_UINT32_MAX; + + return v; +} + +static JsonPathParseItem * +makeItemLikeRegex(JsonPathParseItem *expr, string *pattern, string *flags) +{ + JsonPathParseItem *v = makeItemType(jpiLikeRegex); + int i; + int cflags = REG_ADVANCED; + + v->value.like_regex.expr = expr; + v->value.like_regex.pattern = pattern->val; + v->value.like_regex.patternlen = pattern->len; + v->value.like_regex.flags = 0; + + for (i = 0; flags && i < flags->len; i++) + { + switch (flags->val[i]) + { + case 'i': + v->value.like_regex.flags |= JSP_REGEX_ICASE; + cflags |= REG_ICASE; + break; + case 's': + v->value.like_regex.flags &= ~JSP_REGEX_MLINE; + v->value.like_regex.flags |= JSP_REGEX_SLINE; + cflags |= REG_NEWLINE; + break; + case 'm': + v->value.like_regex.flags &= ~JSP_REGEX_SLINE; + v->value.like_regex.flags |= JSP_REGEX_MLINE; + cflags &= ~REG_NEWLINE; + break; + case 'x': + v->value.like_regex.flags |= JSP_REGEX_WSPACE; + cflags |= REG_EXPANDED; + break; + default: + yyerror(NULL, "unrecognized flag of LIKE_REGEX predicate"); + break; + } + } + + /* check regex validity */ + (void) RE_compile_and_cache(cstring_to_text_with_len(pattern->val, pattern->len), + cflags, DEFAULT_COLLATION_OID); + + return v; +} + +%} + +/* BISON Declarations */ +%pure-parser +%expect 0 +%name-prefix="jsonpath_yy" +%error-verbose +%parse-param {JsonPathParseResult **result} + +%union { + string str; + List *elems; /* list of JsonPathParseItem */ + List *indexs; /* list of integers */ + JsonPathParseItem *value; + JsonPathParseResult *result; + JsonPathItemType optype; + bool boolean; + int integer; +} + +%token TO_P NULL_P TRUE_P FALSE_P IS_P UNKNOWN_P EXISTS_P +%token IDENT_P STRING_P NUMERIC_P INT_P VARIABLE_P +%token OR_P AND_P NOT_P +%token LESS_P LESSEQUAL_P EQUAL_P NOTEQUAL_P GREATEREQUAL_P GREATER_P +%token ANY_P STRICT_P LAX_P LAST_P STARTS_P WITH_P LIKE_REGEX_P FLAG_P +%token ABS_P SIZE_P TYPE_P FLOOR_P DOUBLE_P CEILING_P DATETIME_P +%token KEYVALUE_P + +%type result + +%type scalar_value path_primary expr array_accessor + any_path accessor_op key predicate delimited_predicate + index_elem starts_with_initial datetime_template opt_datetime_template + expr_or_predicate + +%type accessor_expr + +%type index_list + +%type comp_op method + +%type mode + +%type key_name + +%type any_level + +%left OR_P +%left AND_P +%right NOT_P +%left '+' '-' +%left '*' '/' '%' +%left UMINUS +%nonassoc '(' ')' + +/* Grammar follows */ +%% + +result: + mode expr_or_predicate { + *result = palloc(sizeof(JsonPathParseResult)); + (*result)->expr = $2; + (*result)->lax = $1; + } + | /* EMPTY */ { *result = NULL; } + ; + +expr_or_predicate: + expr { $$ = $1; } + | predicate { $$ = $1; } + ; + +mode: + STRICT_P { $$ = false; } + | LAX_P { $$ = true; } + | /* EMPTY */ { $$ = true; } + ; + +scalar_value: + STRING_P { $$ = makeItemString(&$1); } + | NULL_P { $$ = makeItemString(NULL); } + | TRUE_P { $$ = makeItemBool(true); } + | FALSE_P { $$ = makeItemBool(false); } + | NUMERIC_P { $$ = makeItemNumeric(&$1); } + | INT_P { $$ = makeItemNumeric(&$1); } + | VARIABLE_P { $$ = makeItemVariable(&$1); } + ; + +comp_op: + EQUAL_P { $$ = jpiEqual; } + | NOTEQUAL_P { $$ = jpiNotEqual; } + | LESS_P { $$ = jpiLess; } + | GREATER_P { $$ = jpiGreater; } + | LESSEQUAL_P { $$ = jpiLessOrEqual; } + | GREATEREQUAL_P { $$ = jpiGreaterOrEqual; } + ; + +delimited_predicate: + '(' predicate ')' { $$ = $2; } + | EXISTS_P '(' expr ')' { $$ = makeItemUnary(jpiExists, $3); } + ; + +predicate: + delimited_predicate { $$ = $1; } + | expr comp_op expr { $$ = makeItemBinary($2, $1, $3); } + | predicate AND_P predicate { $$ = makeItemBinary(jpiAnd, $1, $3); } + | predicate OR_P predicate { $$ = makeItemBinary(jpiOr, $1, $3); } + | NOT_P delimited_predicate { $$ = makeItemUnary(jpiNot, $2); } + | '(' predicate ')' IS_P UNKNOWN_P { $$ = makeItemUnary(jpiIsUnknown, $2); } + | expr STARTS_P WITH_P starts_with_initial + { $$ = makeItemBinary(jpiStartsWith, $1, $4); } + | expr LIKE_REGEX_P STRING_P { $$ = makeItemLikeRegex($1, &$3, NULL); } + | expr LIKE_REGEX_P STRING_P FLAG_P STRING_P + { $$ = makeItemLikeRegex($1, &$3, &$5); } + ; + +starts_with_initial: + STRING_P { $$ = makeItemString(&$1); } + | VARIABLE_P { $$ = makeItemVariable(&$1); } + ; + +path_primary: + scalar_value { $$ = $1; } + | '$' { $$ = makeItemType(jpiRoot); } + | '@' { $$ = makeItemType(jpiCurrent); } + | LAST_P { $$ = makeItemType(jpiLast); } + ; + +accessor_expr: + path_primary { $$ = list_make1($1); } + | '.' key { $$ = list_make2(makeItemType(jpiCurrent), $2); } + | '(' expr ')' accessor_op { $$ = list_make2($2, $4); } + | '(' predicate ')' accessor_op { $$ = list_make2($2, $4); } + | accessor_expr accessor_op { $$ = lappend($1, $2); } + ; + +expr: + accessor_expr { $$ = makeItemList($1); } + | '(' expr ')' { $$ = $2; } + | '+' expr %prec UMINUS { $$ = makeItemUnary(jpiPlus, $2); } + | '-' expr %prec UMINUS { $$ = makeItemUnary(jpiMinus, $2); } + | expr '+' expr { $$ = makeItemBinary(jpiAdd, $1, $3); } + | expr '-' expr { $$ = makeItemBinary(jpiSub, $1, $3); } + | expr '*' expr { $$ = makeItemBinary(jpiMul, $1, $3); } + | expr '/' expr { $$ = makeItemBinary(jpiDiv, $1, $3); } + | expr '%' expr { $$ = makeItemBinary(jpiMod, $1, $3); } + ; + +index_elem: + expr { $$ = makeItemBinary(jpiSubscript, $1, NULL); } + | expr TO_P expr { $$ = makeItemBinary(jpiSubscript, $1, $3); } + ; + +index_list: + index_elem { $$ = list_make1($1); } + | index_list ',' index_elem { $$ = lappend($1, $3); } + ; + +array_accessor: + '[' '*' ']' { $$ = makeItemType(jpiAnyArray); } + | '[' index_list ']' { $$ = makeIndexArray($2); } + ; + +any_level: + INT_P { $$ = pg_atoi($1.val, 4, 0); } + | LAST_P { $$ = -1; } + ; + +any_path: + ANY_P { $$ = makeAny(0, -1); } + | ANY_P '{' any_level '}' { $$ = makeAny($3, $3); } + | ANY_P '{' any_level TO_P any_level '}' { $$ = makeAny($3, $5); } + ; + +accessor_op: + '.' key { $$ = $2; } + | '.' '*' { $$ = makeItemType(jpiAnyKey); } + | array_accessor { $$ = $1; } + | '.' any_path { $$ = $2; } + | '.' method '(' ')' { $$ = makeItemType($2); } + | '.' DATETIME_P '(' opt_datetime_template ')' + { $$ = makeItemBinary(jpiDatetime, $4, NULL); } + | '.' DATETIME_P '(' datetime_template ',' expr ')' + { $$ = makeItemBinary(jpiDatetime, $4, $6); } + | '?' '(' predicate ')' { $$ = makeItemUnary(jpiFilter, $3); } + ; + +datetime_template: + STRING_P { $$ = makeItemString(&$1); } + ; + +opt_datetime_template: + datetime_template { $$ = $1; } + | /* EMPTY */ { $$ = NULL; } + ; + +key: + key_name { $$ = makeItemKey(&$1); } + ; + +key_name: + IDENT_P + | STRING_P + | TO_P + | NULL_P + | TRUE_P + | FALSE_P + | IS_P + | UNKNOWN_P + | EXISTS_P + | STRICT_P + | LAX_P + | ABS_P + | SIZE_P + | TYPE_P + | FLOOR_P + | DOUBLE_P + | CEILING_P + | DATETIME_P + | KEYVALUE_P + | LAST_P + | STARTS_P + | WITH_P + | LIKE_REGEX_P + | FLAG_P + ; + +method: + ABS_P { $$ = jpiAbs; } + | SIZE_P { $$ = jpiSize; } + | TYPE_P { $$ = jpiType; } + | FLOOR_P { $$ = jpiFloor; } + | DOUBLE_P { $$ = jpiDouble; } + | CEILING_P { $$ = jpiCeiling; } + | KEYVALUE_P { $$ = jpiKeyValue; } + ; +%% + diff --git a/src/backend/utils/adt/jsonpath_json.c b/src/backend/utils/adt/jsonpath_json.c new file mode 100644 index 0000000000..91b3e7b8b2 --- /dev/null +++ b/src/backend/utils/adt/jsonpath_json.c @@ -0,0 +1,22 @@ +#define JSONPATH_JSON_C + +#include "postgres.h" + +#include "catalog/pg_type.h" +#include "utils/json.h" +#include "utils/jsonapi.h" +#include "utils/jsonb.h" +#include "utils/builtins.h" + +#include "utils/jsonpath_json.h" + +#define jsonb_jsonpath_exists2 json_jsonpath_exists2 +#define jsonb_jsonpath_exists3 json_jsonpath_exists3 +#define jsonb_jsonpath_predicate2 json_jsonpath_predicate2 +#define jsonb_jsonpath_predicate3 json_jsonpath_predicate3 +#define jsonb_jsonpath_query2 json_jsonpath_query2 +#define jsonb_jsonpath_query3 json_jsonpath_query3 +#define jsonb_jsonpath_query_wrapped2 json_jsonpath_query_wrapped2 +#define jsonb_jsonpath_query_wrapped3 json_jsonpath_query_wrapped3 + +#include "jsonpath_exec.c" diff --git a/src/backend/utils/adt/jsonpath_scan.l b/src/backend/utils/adt/jsonpath_scan.l new file mode 100644 index 0000000000..8101ffb265 --- /dev/null +++ b/src/backend/utils/adt/jsonpath_scan.l @@ -0,0 +1,623 @@ +/*------------------------------------------------------------------------- + * + * jsonpath_scan.l + * Lexical parser for jsonpath datatype + * + * Copyright (c) 2017, PostgreSQL Global Development Group + * + * IDENTIFICATION + * src/backend/utils/adt/jsonpath_scan.l + * + *------------------------------------------------------------------------- + */ + +%{ +#include "postgres.h" +#include "mb/pg_wchar.h" +#include "nodes/pg_list.h" +#include "utils/jsonpath_scanner.h" + +static string scanstring; + +/* No reason to constrain amount of data slurped */ +/* #define YY_READ_BUF_SIZE 16777216 */ + +/* Handles to the buffer that the lexer uses internally */ +static YY_BUFFER_STATE scanbufhandle; +static char *scanbuf; +static int scanbuflen; + +static void addstring(bool init, char *s, int l); +static void addchar(bool init, char s); +static int checkSpecialVal(void); /* examine scanstring for the special value */ + +static void parseUnicode(char *s, int l); +static void parseHexChars(char *s, int l); + +/* Avoid exit() on fatal scanner errors (a bit ugly -- see yy_fatal_error) */ +#undef fprintf +#define fprintf(file, fmt, msg) fprintf_to_ereport(fmt, msg) + +static void +fprintf_to_ereport(const char *fmt, const char *msg) +{ + ereport(ERROR, (errmsg_internal("%s", msg))); +} + +#define yyerror jsonpath_yyerror +%} + +%option 8bit +%option never-interactive +%option nodefault +%option noinput +%option nounput +%option noyywrap +%option warn +%option prefix="jsonpath_yy" +%option bison-bridge +%option noyyalloc +%option noyyrealloc +%option noyyfree + +%x xQUOTED +%x xNONQUOTED +%x xVARQUOTED +%x xSINGLEQUOTED +%x xCOMMENT + +special [\?\%\$\.\[\]\{\}\(\)\|\&\!\=\<\>\@\#\,\*:\-\+\/] +any [^\?\%\$\.\[\]\{\}\(\)\|\&\!\=\<\>\@\#\,\*:\-\+\/\\\"\' \t\n\r\f] +blank [ \t\n\r\f] +hex_dig [0-9A-Fa-f] +unicode \\u({hex_dig}{4}|\{{hex_dig}{1,6}\}) +hex_char \\x{hex_dig}{2} + + +%% + +\&\& { return AND_P; } + +\|\| { return OR_P; } + +\! { return NOT_P; } + +\*\* { return ANY_P; } + +\< { return LESS_P; } + +\<\= { return LESSEQUAL_P; } + +\=\= { return EQUAL_P; } + +\<\> { return NOTEQUAL_P; } + +\!\= { return NOTEQUAL_P; } + +\>\= { return GREATEREQUAL_P; } + +\> { return GREATER_P; } + +\${any}+ { + addstring(true, yytext + 1, yyleng - 1); + addchar(false, '\0'); + yylval->str = scanstring; + return VARIABLE_P; + } + +\$\" { + addchar(true, '\0'); + BEGIN xVARQUOTED; + } + +{special} { return *yytext; } + +{blank}+ { /* ignore */ } + +\/\* { + addchar(true, '\0'); + BEGIN xCOMMENT; + } + +[0-9]+(\.[0-9]+)?[eE][+-]?[0-9]+ /* float */ { + addstring(true, yytext, yyleng); + addchar(false, '\0'); + yylval->str = scanstring; + return NUMERIC_P; + } + +\.[0-9]+[eE][+-]?[0-9]+ /* float */ { + addstring(true, yytext, yyleng); + addchar(false, '\0'); + yylval->str = scanstring; + return NUMERIC_P; + } + +([0-9]+)?\.[0-9]+ { + addstring(true, yytext, yyleng); + addchar(false, '\0'); + yylval->str = scanstring; + return NUMERIC_P; + } + +[0-9]+ { + addstring(true, yytext, yyleng); + addchar(false, '\0'); + yylval->str = scanstring; + return INT_P; + } + +{any}+ { + addstring(true, yytext, yyleng); + BEGIN xNONQUOTED; + } + +\" { + addchar(true, '\0'); + BEGIN xQUOTED; + } + +\' { + addchar(true, '\0'); + BEGIN xSINGLEQUOTED; + } + +\\ { + yyless(0); + addchar(true, '\0'); + BEGIN xNONQUOTED; + } + +{any}+ { + addstring(false, yytext, yyleng); + } + +{blank}+ { + yylval->str = scanstring; + BEGIN INITIAL; + return checkSpecialVal(); + } + + +\/\* { + yylval->str = scanstring; + BEGIN xCOMMENT; + } + +({special}|\"|\') { + yylval->str = scanstring; + yyless(0); + BEGIN INITIAL; + return checkSpecialVal(); + } + +<> { + yylval->str = scanstring; + BEGIN INITIAL; + return checkSpecialVal(); + } + +\\[\"\'\\] { addchar(false, yytext[1]); } + +\\b { addchar(false, '\b'); } + +\\f { addchar(false, '\f'); } + +\\n { addchar(false, '\n'); } + +\\r { addchar(false, '\r'); } + +\\t { addchar(false, '\t'); } + +\\v { addchar(false, '\v'); } + +{unicode}+ { parseUnicode(yytext, yyleng); } + +{hex_char}+ { parseHexChars(yytext, yyleng); } + +\\x { yyerror(NULL, "Hex character sequence is invalid"); } + +\\u { yyerror(NULL, "Unicode sequence is invalid"); } + +\\. { yyerror(NULL, "Escape sequence is invalid"); } + +\\ { yyerror(NULL, "Unexpected end after backslash"); } + +<> { yyerror(NULL, "Unexpected end of quoted string"); } + +\" { + yylval->str = scanstring; + BEGIN INITIAL; + return STRING_P; + } + +\" { + yylval->str = scanstring; + BEGIN INITIAL; + return VARIABLE_P; + } + +\' { + yylval->str = scanstring; + BEGIN INITIAL; + return STRING_P; + } + +[^\\\"]+ { addstring(false, yytext, yyleng); } + +[^\\\']+ { addstring(false, yytext, yyleng); } + +<> { yyterminate(); } + +\*\/ { BEGIN INITIAL; } + +[^\*]+ { } + +\* { } + +<> { yyerror(NULL, "Unexpected end of comment"); } + +%% + +void +jsonpath_yyerror(JsonPathParseResult **result, const char *message) +{ + if (*yytext == YY_END_OF_BUFFER_CHAR) + { + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("bad jsonpath representation"), + /* translator: %s is typically "syntax error" */ + errdetail("%s at end of input", message))); + } + else + { + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("bad jsonpath representation"), + /* translator: first %s is typically "syntax error" */ + errdetail("%s at or near \"%s\"", message, yytext))); + } +} + +typedef struct keyword +{ + int16 len; + bool lowercase; + int val; + char *keyword; +} keyword; + +/* + * Array of key words should be sorted by length and then + * alphabetical order + */ + +static keyword keywords[] = { + { 2, false, IS_P, "is"}, + { 2, false, TO_P, "to"}, + { 3, false, ABS_P, "abs"}, + { 3, false, LAX_P, "lax"}, + { 4, false, FLAG_P, "flag"}, + { 4, false, LAST_P, "last"}, + { 4, true, NULL_P, "null"}, + { 4, false, SIZE_P, "size"}, + { 4, true, TRUE_P, "true"}, + { 4, false, TYPE_P, "type"}, + { 4, false, WITH_P, "with"}, + { 5, true, FALSE_P, "false"}, + { 5, false, FLOOR_P, "floor"}, + { 6, false, DOUBLE_P, "double"}, + { 6, false, EXISTS_P, "exists"}, + { 6, false, STARTS_P, "starts"}, + { 6, false, STRICT_P, "strict"}, + { 7, false, CEILING_P, "ceiling"}, + { 7, false, UNKNOWN_P, "unknown"}, + { 8, false, DATETIME_P, "datetime"}, + { 8, false, KEYVALUE_P, "keyvalue"}, + { 10,false, LIKE_REGEX_P, "like_regex"}, +}; + +static int +checkSpecialVal() +{ + int res = IDENT_P; + int diff; + keyword *StopLow = keywords, + *StopHigh = keywords + lengthof(keywords), + *StopMiddle; + + if (scanstring.len > keywords[lengthof(keywords) - 1].len) + return res; + + while(StopLow < StopHigh) + { + StopMiddle = StopLow + ((StopHigh - StopLow) >> 1); + + if (StopMiddle->len == scanstring.len) + diff = pg_strncasecmp(StopMiddle->keyword, scanstring.val, scanstring.len); + else + diff = StopMiddle->len - scanstring.len; + + if (diff < 0) + StopLow = StopMiddle + 1; + else if (diff > 0) + StopHigh = StopMiddle; + else + { + if (StopMiddle->lowercase) + diff = strncmp(StopMiddle->keyword, scanstring.val, scanstring.len); + + if (diff == 0) + res = StopMiddle->val; + + break; + } + } + + return res; +} + +/* + * Called before any actual parsing is done + */ +static void +jsonpath_scanner_init(const char *str, int slen) +{ + if (slen <= 0) + slen = strlen(str); + + /* + * Might be left over after ereport() + */ + yy_init_globals(); + + /* + * Make a scan buffer with special termination needed by flex. + */ + + scanbuflen = slen; + scanbuf = palloc(slen + 2); + memcpy(scanbuf, str, slen); + scanbuf[slen] = scanbuf[slen + 1] = YY_END_OF_BUFFER_CHAR; + scanbufhandle = yy_scan_buffer(scanbuf, slen + 2); + + BEGIN(INITIAL); +} + + +/* + * Called after parsing is done to clean up after jsonpath_scanner_init() + */ +static void +jsonpath_scanner_finish(void) +{ + yy_delete_buffer(scanbufhandle); + pfree(scanbuf); +} + +static void +addstring(bool init, char *s, int l) { + if (init) { + scanstring.total = 32; + scanstring.val = palloc(scanstring.total); + scanstring.len = 0; + } + + if (s && l) { + while(scanstring.len + l + 1 >= scanstring.total) { + scanstring.total *= 2; + scanstring.val = repalloc(scanstring.val, scanstring.total); + } + + memcpy(scanstring.val + scanstring.len, s, l); + scanstring.len += l; + } +} + +static void +addchar(bool init, char s) { + if (init) + { + scanstring.total = 32; + scanstring.val = palloc(scanstring.total); + scanstring.len = 0; + } + else if(scanstring.len + 1 >= scanstring.total) + { + scanstring.total *= 2; + scanstring.val = repalloc(scanstring.val, scanstring.total); + } + + scanstring.val[ scanstring.len ] = s; + if (s != '\0') + scanstring.len++; +} + +JsonPathParseResult * +parsejsonpath(const char *str, int len) { + JsonPathParseResult *parseresult; + + jsonpath_scanner_init(str, len); + + if (jsonpath_yyparse((void*)&parseresult) != 0) + jsonpath_yyerror(NULL, "bugus input"); + + jsonpath_scanner_finish(); + + return parseresult; +} + +static int +hexval(char c) +{ + if (c >= '0' && c <= '9') + return c - '0'; + if (c >= 'a' && c <= 'f') + return c - 'a' + 0xA; + if (c >= 'A' && c <= 'F') + return c - 'A' + 0xA; + elog(ERROR, "invalid hexadecimal digit"); + return 0; /* not reached */ +} + +static void +addUnicodeChar(int ch) +{ + /* + * For UTF8, replace the escape sequence by the actual + * utf8 character in lex->strval. Do this also for other + * encodings if the escape designates an ASCII character, + * otherwise raise an error. + */ + + if (ch == 0) + { + /* We can't allow this, since our TEXT type doesn't */ + ereport(ERROR, + (errcode(ERRCODE_UNTRANSLATABLE_CHARACTER), + errmsg("unsupported Unicode escape sequence"), + errdetail("\\u0000 cannot be converted to text."))); + } + else if (GetDatabaseEncoding() == PG_UTF8) + { + char utf8str[5]; + int utf8len; + + unicode_to_utf8(ch, (unsigned char *) utf8str); + utf8len = pg_utf_mblen((unsigned char *) utf8str); + addstring(false, utf8str, utf8len); + } + else if (ch <= 0x007f) + { + /* + * This is the only way to designate things like a + * form feed character in JSON, so it's useful in all + * encodings. + */ + addchar(false, (char) ch); + } + else + { + ereport(ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for type jsonpath"), + errdetail("Unicode escape values cannot be used for code point values above 007F when the server encoding is not UTF8."))); + } +} + +static void +addUnicode(int ch, int *hi_surrogate) +{ + if (ch >= 0xd800 && ch <= 0xdbff) + { + if (*hi_surrogate != -1) + ereport(ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for type jsonpath"), + errdetail("Unicode high surrogate must not follow a high surrogate."))); + *hi_surrogate = (ch & 0x3ff) << 10; + return; + } + else if (ch >= 0xdc00 && ch <= 0xdfff) + { + if (*hi_surrogate == -1) + ereport(ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for type jsonpath"), + errdetail("Unicode low surrogate must follow a high surrogate."))); + ch = 0x10000 + *hi_surrogate + (ch & 0x3ff); + *hi_surrogate = -1; + } + else if (*hi_surrogate != -1) + { + ereport(ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for type jsonpath"), + errdetail("Unicode low surrogate must follow a high surrogate."))); + } + + addUnicodeChar(ch); +} + +/* + * parseUnicode was adopted from json_lex_string() in + * src/backend/utils/adt/json.c + */ +static void +parseUnicode(char *s, int l) +{ + int i; + int hi_surrogate = -1; + + for (i = 2; i < l; i += 2) /* skip '\u' */ + { + int ch = 0; + int j; + + if (s[i] == '{') /* parse '\u{XX...}' */ + { + while (s[++i] != '}' && i < l) + ch = (ch << 4) | hexval(s[i]); + i++; /* ski p '}' */ + } + else /* parse '\uXXXX' */ + { + for (j = 0; j < 4 && i < l; j++) + ch = (ch << 4) | hexval(s[i++]); + } + + addUnicode(ch, &hi_surrogate); + } + + if (hi_surrogate != -1) + { + ereport(ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for type jsonpath"), + errdetail("Unicode low surrogate must follow a high surrogate."))); + } +} + +static void +parseHexChars(char *s, int l) +{ + int i; + + Assert(l % 4 /* \xXX */ == 0); + + for (i = 0; i < l / 4; i++) + { + int ch = (hexval(s[i * 4 + 2]) << 4) | hexval(s[i * 4 + 3]); + + addUnicodeChar(ch); + } +} + +/* + * Interface functions to make flex use palloc() instead of malloc(). + * It'd be better to make these static, but flex insists otherwise. + */ + +void * +jsonpath_yyalloc(yy_size_t bytes) +{ + return palloc(bytes); +} + +void * +jsonpath_yyrealloc(void *ptr, yy_size_t bytes) +{ + if (ptr) + return repalloc(ptr, bytes); + else + return palloc(bytes); +} + +void +jsonpath_yyfree(void *ptr) +{ + if (ptr) + pfree(ptr); +} + diff --git a/src/backend/utils/adt/numeric.c b/src/backend/utils/adt/numeric.c index 444e575e1d..8893878e26 100644 --- a/src/backend/utils/adt/numeric.c +++ b/src/backend/utils/adt/numeric.c @@ -466,14 +466,15 @@ static void free_var(NumericVar *var); static void zero_var(NumericVar *var); static const char *set_var_from_str(const char *str, const char *cp, - NumericVar *dest); + NumericVar *dest, ErrorData **edata); static void set_var_from_num(Numeric value, NumericVar *dest); static void init_var_from_num(Numeric num, NumericVar *dest); static void set_var_from_var(const NumericVar *value, NumericVar *dest); static char *get_str_from_var(const NumericVar *var); static char *get_str_from_var_sci(const NumericVar *var, int rscale); -static Numeric make_result(const NumericVar *var); +static inline Numeric make_result(const NumericVar *var); +static Numeric make_result_safe(const NumericVar *var, ErrorData **edata); static void apply_typmod(NumericVar *var, int32 typmod); @@ -510,12 +511,12 @@ static void mul_var(const NumericVar *var1, const NumericVar *var2, int rscale); static void div_var(const NumericVar *var1, const NumericVar *var2, NumericVar *result, - int rscale, bool round); + int rscale, bool round, ErrorData **edata); static void div_var_fast(const NumericVar *var1, const NumericVar *var2, NumericVar *result, int rscale, bool round); static int select_div_scale(const NumericVar *var1, const NumericVar *var2); static void mod_var(const NumericVar *var1, const NumericVar *var2, - NumericVar *result); + NumericVar *result, ErrorData **edata); static void ceil_var(const NumericVar *var, NumericVar *result); static void floor_var(const NumericVar *var, NumericVar *result); @@ -616,7 +617,7 @@ numeric_in(PG_FUNCTION_ARGS) init_var(&value); - cp = set_var_from_str(str, cp, &value); + cp = set_var_from_str(str, cp, &value, NULL); /* * We duplicate a few lines of code here because we would like to @@ -1579,14 +1580,14 @@ compute_bucket(Numeric operand, Numeric bound1, Numeric bound2, sub_var(&operand_var, &bound1_var, &operand_var); sub_var(&bound2_var, &bound1_var, &bound2_var); div_var(&operand_var, &bound2_var, result_var, - select_div_scale(&operand_var, &bound2_var), true); + select_div_scale(&operand_var, &bound2_var), true, NULL); } else { sub_var(&bound1_var, &operand_var, &operand_var); sub_var(&bound1_var, &bound2_var, &bound1_var); div_var(&operand_var, &bound1_var, result_var, - select_div_scale(&operand_var, &bound1_var), true); + select_div_scale(&operand_var, &bound1_var), true, NULL); } mul_var(result_var, count_var, result_var, @@ -2386,17 +2387,9 @@ hash_numeric_extended(PG_FUNCTION_ARGS) * ---------------------------------------------------------------------- */ - -/* - * numeric_add() - - * - * Add two numerics - */ -Datum -numeric_add(PG_FUNCTION_ARGS) +Numeric +numeric_add_internal(Numeric num1, Numeric num2, ErrorData **edata) { - Numeric num1 = PG_GETARG_NUMERIC(0); - Numeric num2 = PG_GETARG_NUMERIC(1); NumericVar arg1; NumericVar arg2; NumericVar result; @@ -2406,7 +2399,7 @@ numeric_add(PG_FUNCTION_ARGS) * Handle NaN */ if (NUMERIC_IS_NAN(num1) || NUMERIC_IS_NAN(num2)) - PG_RETURN_NUMERIC(make_result(&const_nan)); + return make_result_safe(&const_nan, edata); /* * Unpack the values, let add_var() compute the result and return it. @@ -2417,24 +2410,31 @@ numeric_add(PG_FUNCTION_ARGS) init_var(&result); add_var(&arg1, &arg2, &result); - res = make_result(&result); + res = make_result_safe(&result, edata); free_var(&result); - PG_RETURN_NUMERIC(res); + return res; } - /* - * numeric_sub() - + * numeric_add() - * - * Subtract one numeric from another + * Add two numerics */ Datum -numeric_sub(PG_FUNCTION_ARGS) +numeric_add(PG_FUNCTION_ARGS) { Numeric num1 = PG_GETARG_NUMERIC(0); Numeric num2 = PG_GETARG_NUMERIC(1); + Numeric res = numeric_add_internal(num1, num2, NULL); + + PG_RETURN_NUMERIC(res); +} + +Numeric +numeric_sub_internal(Numeric num1, Numeric num2, ErrorData **edata) +{ NumericVar arg1; NumericVar arg2; NumericVar result; @@ -2444,7 +2444,7 @@ numeric_sub(PG_FUNCTION_ARGS) * Handle NaN */ if (NUMERIC_IS_NAN(num1) || NUMERIC_IS_NAN(num2)) - PG_RETURN_NUMERIC(make_result(&const_nan)); + return make_result_safe(&const_nan, edata); /* * Unpack the values, let sub_var() compute the result and return it. @@ -2455,24 +2455,31 @@ numeric_sub(PG_FUNCTION_ARGS) init_var(&result); sub_var(&arg1, &arg2, &result); - res = make_result(&result); + res = make_result_safe(&result, edata); free_var(&result); - PG_RETURN_NUMERIC(res); + return res; } - /* - * numeric_mul() - + * numeric_sub() - * - * Calculate the product of two numerics + * Subtract one numeric from another */ Datum -numeric_mul(PG_FUNCTION_ARGS) +numeric_sub(PG_FUNCTION_ARGS) { Numeric num1 = PG_GETARG_NUMERIC(0); Numeric num2 = PG_GETARG_NUMERIC(1); + Numeric res = numeric_sub_internal(num1, num2, NULL); + + PG_RETURN_NUMERIC(res); +} + +Numeric +numeric_mul_internal(Numeric num1, Numeric num2, ErrorData **edata) +{ NumericVar arg1; NumericVar arg2; NumericVar result; @@ -2482,7 +2489,7 @@ numeric_mul(PG_FUNCTION_ARGS) * Handle NaN */ if (NUMERIC_IS_NAN(num1) || NUMERIC_IS_NAN(num2)) - PG_RETURN_NUMERIC(make_result(&const_nan)); + return make_result_safe(&const_nan, edata); /* * Unpack the values, let mul_var() compute the result and return it. @@ -2497,24 +2504,31 @@ numeric_mul(PG_FUNCTION_ARGS) init_var(&result); mul_var(&arg1, &arg2, &result, arg1.dscale + arg2.dscale); - res = make_result(&result); + res = make_result_safe(&result, edata); free_var(&result); - PG_RETURN_NUMERIC(res); + return res; } - /* - * numeric_div() - + * numeric_mul() - * - * Divide one numeric into another + * Calculate the product of two numerics */ Datum -numeric_div(PG_FUNCTION_ARGS) +numeric_mul(PG_FUNCTION_ARGS) { Numeric num1 = PG_GETARG_NUMERIC(0); Numeric num2 = PG_GETARG_NUMERIC(1); + Numeric res = numeric_mul_internal(num1, num2, NULL); + + PG_RETURN_NUMERIC(res); +} + +Numeric +numeric_div_internal(Numeric num1, Numeric num2, ErrorData **edata) +{ NumericVar arg1; NumericVar arg2; NumericVar result; @@ -2525,7 +2539,7 @@ numeric_div(PG_FUNCTION_ARGS) * Handle NaN */ if (NUMERIC_IS_NAN(num1) || NUMERIC_IS_NAN(num2)) - PG_RETURN_NUMERIC(make_result(&const_nan)); + return make_result_safe(&const_nan, edata); /* * Unpack the arguments @@ -2543,12 +2557,30 @@ numeric_div(PG_FUNCTION_ARGS) /* * Do the divide and return the result */ - div_var(&arg1, &arg2, &result, rscale, true); + div_var(&arg1, &arg2, &result, rscale, true, edata); - res = make_result(&result); + if (edata && *edata) + res = NULL; /* error occured */ + else + res = make_result_safe(&result, edata); free_var(&result); + return res; +} + +/* + * numeric_div() - + * + * Divide one numeric into another + */ +Datum +numeric_div(PG_FUNCTION_ARGS) +{ + Numeric num1 = PG_GETARG_NUMERIC(0); + Numeric num2 = PG_GETARG_NUMERIC(1); + Numeric res = numeric_div_internal(num1, num2, NULL); + PG_RETURN_NUMERIC(res); } @@ -2585,7 +2617,7 @@ numeric_div_trunc(PG_FUNCTION_ARGS) /* * Do the divide and return the result */ - div_var(&arg1, &arg2, &result, 0, false); + div_var(&arg1, &arg2, &result, 0, false, NULL); res = make_result(&result); @@ -2594,36 +2626,43 @@ numeric_div_trunc(PG_FUNCTION_ARGS) PG_RETURN_NUMERIC(res); } - -/* - * numeric_mod() - - * - * Calculate the modulo of two numerics - */ -Datum -numeric_mod(PG_FUNCTION_ARGS) +Numeric +numeric_mod_internal(Numeric num1, Numeric num2, ErrorData **edata) { - Numeric num1 = PG_GETARG_NUMERIC(0); - Numeric num2 = PG_GETARG_NUMERIC(1); Numeric res; NumericVar arg1; NumericVar arg2; NumericVar result; if (NUMERIC_IS_NAN(num1) || NUMERIC_IS_NAN(num2)) - PG_RETURN_NUMERIC(make_result(&const_nan)); + return make_result_safe(&const_nan, edata); init_var_from_num(num1, &arg1); init_var_from_num(num2, &arg2); init_var(&result); - mod_var(&arg1, &arg2, &result); + mod_var(&arg1, &arg2, &result, edata); - res = make_result(&result); + res = make_result_safe(&result, edata); free_var(&result); + return res; +} + +/* + * numeric_mod() - + * + * Calculate the modulo of two numerics + */ +Datum +numeric_mod(PG_FUNCTION_ARGS) +{ + Numeric num1 = PG_GETARG_NUMERIC(0); + Numeric num2 = PG_GETARG_NUMERIC(1); + Numeric res = numeric_mod_internal(num1, num2, NULL); + PG_RETURN_NUMERIC(res); } @@ -3227,55 +3266,73 @@ numeric_int2(PG_FUNCTION_ARGS) } -Datum -float8_numeric(PG_FUNCTION_ARGS) +Numeric +float8_numeric_internal(float8 val, ErrorData **edata) { - float8 val = PG_GETARG_FLOAT8(0); Numeric res; NumericVar result; char buf[DBL_DIG + 100]; if (isnan(val)) - PG_RETURN_NUMERIC(make_result(&const_nan)); + return make_result_safe(&const_nan, edata); if (isinf(val)) - ereport(ERROR, - (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), - errmsg("cannot convert infinity to numeric"))); + { + ereport_safe(edata, ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + errmsg("cannot convert infinity to numeric"))); + return NULL; + } snprintf(buf, sizeof(buf), "%.*g", DBL_DIG, val); init_var(&result); /* Assume we need not worry about leading/trailing spaces */ - (void) set_var_from_str(buf, buf, &result); + (void) set_var_from_str(buf, buf, &result, edata); - res = make_result(&result); + res = make_result_safe(&result, edata); free_var(&result); - PG_RETURN_NUMERIC(res); + return res; } - Datum -numeric_float8(PG_FUNCTION_ARGS) +float8_numeric(PG_FUNCTION_ARGS) +{ + float8 val = PG_GETARG_FLOAT8(0); + Numeric res = float8_numeric_internal(val, NULL); + + PG_RETURN_NUMERIC(res); +} + +float8 +numeric_float8_internal(Numeric num, ErrorData **edata) { - Numeric num = PG_GETARG_NUMERIC(0); char *tmp; - Datum result; + float8 result; if (NUMERIC_IS_NAN(num)) - PG_RETURN_FLOAT8(get_float8_nan()); + return get_float8_nan(); tmp = DatumGetCString(DirectFunctionCall1(numeric_out, NumericGetDatum(num))); - result = DirectFunctionCall1(float8in, CStringGetDatum(tmp)); + result = float8in_internal_safe(tmp, NULL, "double precison", tmp, edata); pfree(tmp); - PG_RETURN_DATUM(result); + return result; +} + +Datum +numeric_float8(PG_FUNCTION_ARGS) +{ + Numeric num = PG_GETARG_NUMERIC(0); + float8 result = numeric_float8_internal(num, NULL); + + PG_RETURN_FLOAT8(result); } @@ -3319,7 +3376,7 @@ float4_numeric(PG_FUNCTION_ARGS) init_var(&result); /* Assume we need not worry about leading/trailing spaces */ - (void) set_var_from_str(buf, buf, &result); + (void) set_var_from_str(buf, buf, &result, NULL); res = make_result(&result); @@ -4894,7 +4951,7 @@ numeric_stddev_internal(NumericAggState *state, else mul_var(&vN, &vN, &vNminus1, 0); /* N * N */ rscale = select_div_scale(&vsumX2, &vNminus1); - div_var(&vsumX2, &vNminus1, &vsumX, rscale, true); /* variance */ + div_var(&vsumX2, &vNminus1, &vsumX, rscale, true, NULL); /* variance */ if (!variance) sqrt_var(&vsumX, &vsumX, rscale); /* stddev */ @@ -5620,7 +5677,8 @@ zero_var(NumericVar *var) * reports. (Typically cp would be the same except advanced over spaces.) */ static const char * -set_var_from_str(const char *str, const char *cp, NumericVar *dest) +set_var_from_str(const char *str, const char *cp, NumericVar *dest, + ErrorData **edata) { bool have_dp = false; int i; @@ -5658,10 +5716,13 @@ set_var_from_str(const char *str, const char *cp, NumericVar *dest) } if (!isdigit((unsigned char) *cp)) - ereport(ERROR, - (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), - errmsg("invalid input syntax for type %s: \"%s\"", - "numeric", str))); + { + ereport_safe(edata, ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for type %s: \"%s\"", + "numeric", str))); + return NULL; + } decdigits = (unsigned char *) palloc(strlen(cp) + DEC_DIGITS * 2); @@ -5682,10 +5743,13 @@ set_var_from_str(const char *str, const char *cp, NumericVar *dest) else if (*cp == '.') { if (have_dp) - ereport(ERROR, - (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), - errmsg("invalid input syntax for type %s: \"%s\"", - "numeric", str))); + { + ereport_safe(edata, ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for type %s: \"%s\"", + "numeric", str))); + return NULL; + } have_dp = true; cp++; } @@ -5706,10 +5770,14 @@ set_var_from_str(const char *str, const char *cp, NumericVar *dest) cp++; exponent = strtol(cp, &endptr, 10); if (endptr == cp) - ereport(ERROR, - (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), - errmsg("invalid input syntax for type %s: \"%s\"", - "numeric", str))); + { + ereport_safe(edata, ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for type %s: \"%s\"", + "numeric", str))); + return NULL; + } + cp = endptr; /* @@ -5721,9 +5789,13 @@ set_var_from_str(const char *str, const char *cp, NumericVar *dest) * for consistency use the same ereport errcode/text as make_result(). */ if (exponent >= INT_MAX / 2 || exponent <= -(INT_MAX / 2)) - ereport(ERROR, - (errcode(ERRCODE_NUMERIC_VALUE_OUT_OF_RANGE), - errmsg("value overflows numeric format"))); + { + ereport_safe(edata, ERROR, + (errcode(ERRCODE_NUMERIC_VALUE_OUT_OF_RANGE), + errmsg("value overflows numeric format"))); + return NULL; + } + dweight += (int) exponent; dscale -= (int) exponent; if (dscale < 0) @@ -6065,7 +6137,7 @@ get_str_from_var_sci(const NumericVar *var, int rscale) init_var(&significand); power_var_int(&const_ten, exponent, &denominator, denom_scale); - div_var(var, &denominator, &significand, rscale, true); + div_var(var, &denominator, &significand, rscale, true, NULL); sig_out = get_str_from_var(&significand); free_var(&denominator); @@ -6087,15 +6159,14 @@ get_str_from_var_sci(const NumericVar *var, int rscale) return str; } - /* - * make_result() - + * make_result_safe() - * * Create the packed db numeric format in palloc()'d memory from * a variable. */ static Numeric -make_result(const NumericVar *var) +make_result_safe(const NumericVar *var, ErrorData **edata) { Numeric result; NumericDigit *digits = var->digits; @@ -6166,14 +6237,22 @@ make_result(const NumericVar *var) /* Check for overflow of int16 fields */ if (NUMERIC_WEIGHT(result) != weight || NUMERIC_DSCALE(result) != var->dscale) - ereport(ERROR, - (errcode(ERRCODE_NUMERIC_VALUE_OUT_OF_RANGE), - errmsg("value overflows numeric format"))); + { + ereport_safe(edata, ERROR, + (errcode(ERRCODE_NUMERIC_VALUE_OUT_OF_RANGE), + errmsg("value overflows numeric format"))); + return NULL; + } dump_numeric("make_result()", result); return result; } +static inline Numeric +make_result(const NumericVar *var) +{ + return make_result_safe(var, NULL); +} /* * apply_typmod() - @@ -7051,7 +7130,7 @@ mul_var(const NumericVar *var1, const NumericVar *var2, NumericVar *result, */ static void div_var(const NumericVar *var1, const NumericVar *var2, NumericVar *result, - int rscale, bool round) + int rscale, bool round, ErrorData **edata) { int div_ndigits; int res_ndigits; @@ -7076,9 +7155,12 @@ div_var(const NumericVar *var1, const NumericVar *var2, NumericVar *result, * unnormalized divisor. */ if (var2ndigits == 0 || var2->digits[0] == 0) - ereport(ERROR, - (errcode(ERRCODE_DIVISION_BY_ZERO), - errmsg("division by zero"))); + { + ereport_safe(edata, ERROR, + (errcode(ERRCODE_DIVISION_BY_ZERO), + errmsg("division by zero"))); + return; + } /* * Now result zero check @@ -7699,7 +7781,8 @@ select_div_scale(const NumericVar *var1, const NumericVar *var2) * Calculate the modulo of two numerics at variable level */ static void -mod_var(const NumericVar *var1, const NumericVar *var2, NumericVar *result) +mod_var(const NumericVar *var1, const NumericVar *var2, NumericVar *result, + ErrorData **edata) { NumericVar tmp; @@ -7711,7 +7794,10 @@ mod_var(const NumericVar *var1, const NumericVar *var2, NumericVar *result) * div_var can be persuaded to give us trunc(x/y) directly. * ---------- */ - div_var(var1, var2, &tmp, 0, false); + div_var(var1, var2, &tmp, 0, false, edata); + + if (edata && *edata) + return; /* error occured */ mul_var(var2, &tmp, &tmp, var2->dscale); @@ -8364,7 +8450,7 @@ power_var_int(const NumericVar *base, int exp, NumericVar *result, int rscale) round_var(result, rscale); return; case -1: - div_var(&const_one, base, result, rscale, true); + div_var(&const_one, base, result, rscale, true, NULL); return; case 2: mul_var(base, base, result, rscale); diff --git a/src/backend/utils/adt/regexp.c b/src/backend/utils/adt/regexp.c index 171fcc8a44..4ba9d6010f 100644 --- a/src/backend/utils/adt/regexp.c +++ b/src/backend/utils/adt/regexp.c @@ -133,7 +133,7 @@ static Datum build_regexp_split_result(regexp_matches_ctx *splitctx); * Pattern is given in the database encoding. We internally convert to * an array of pg_wchar, which is what Spencer's regex package wants. */ -static regex_t * +regex_t * RE_compile_and_cache(text *text_re, int cflags, Oid collation) { int text_re_len = VARSIZE_ANY_EXHDR(text_re); @@ -339,7 +339,7 @@ RE_execute(regex_t *re, char *dat, int dat_len, * Both pattern and data are given in the database encoding. We internally * convert to array of pg_wchar which is what Spencer's regex package wants. */ -static bool +bool RE_compile_and_execute(text *text_re, char *dat, int dat_len, int cflags, Oid collation, int nmatch, regmatch_t *pmatch) diff --git a/src/backend/utils/adt/timestamp.c b/src/backend/utils/adt/timestamp.c index 449164ae7e..fcc6d23e64 100644 --- a/src/backend/utils/adt/timestamp.c +++ b/src/backend/utils/adt/timestamp.c @@ -70,7 +70,6 @@ typedef struct static TimeOffset time2t(const int hour, const int min, const int sec, const fsec_t fsec); static Timestamp dt2local(Timestamp dt, int timezone); -static void AdjustTimestampForTypmod(Timestamp *time, int32 typmod); static void AdjustIntervalForTypmod(Interval *interval, int32 typmod); static TimestampTz timestamp2timestamptz(Timestamp timestamp); static Timestamp timestamptz2timestamp(TimestampTz timestamp); @@ -330,7 +329,7 @@ timestamp_scale(PG_FUNCTION_ARGS) * AdjustTimestampForTypmod --- round off a timestamp to suit given typmod * Works for either timestamp or timestamptz. */ -static void +void AdjustTimestampForTypmod(Timestamp *time, int32 typmod) { static const int64 TimestampScales[MAX_TIMESTAMP_PRECISION + 1] = { diff --git a/src/backend/utils/errcodes.txt b/src/backend/utils/errcodes.txt index 788f88129b..1ef95c3cd4 100644 --- a/src/backend/utils/errcodes.txt +++ b/src/backend/utils/errcodes.txt @@ -206,6 +206,22 @@ Section: Class 22 - Data Exception 2200N E ERRCODE_INVALID_XML_CONTENT invalid_xml_content 2200S E ERRCODE_INVALID_XML_COMMENT invalid_xml_comment 2200T E ERRCODE_INVALID_XML_PROCESSING_INSTRUCTION invalid_xml_processing_instruction +22030 E ERRCODE_DUPLICATE_JSON_OBJECT_KEY_VALUE duplicate_json_object_key_value +22031 E ERRCODE_INVALID_ARGUMENT_FOR_JSON_DATETIME_FUNCTION invalid_argument_for_json_datetime_function +22032 E ERRCODE_INVALID_JSON_TEXT invalid_json_text +22033 E ERRCODE_INVALID_JSON_SUBSCRIPT invalid_json_subscript +22034 E ERRCODE_MORE_THAN_ONE_JSON_ITEM more_than_one_json_item +22035 E ERRCODE_NO_JSON_ITEM no_json_item +22036 E ERRCODE_NON_NUMERIC_JSON_ITEM non_numeric_json_item +22037 E ERRCODE_NON_UNIQUE_KEYS_IN_JSON_OBJECT non_unique_keys_in_json_object +22038 E ERRCODE_SINGLETON_JSON_ITEM_REQUIRED singleton_json_item_required +22039 E ERRCODE_JSON_ARRAY_NOT_FOUND json_array_not_found +2203A E ERRCODE_JSON_MEMBER_NOT_FOUND json_member_not_found +2203B E ERRCODE_JSON_NUMBER_NOT_FOUND json_number_not_found +2203C E ERRCODE_JSON_OBJECT_NOT_FOUND object_not_found +2203F E ERRCODE_JSON_SCALAR_REQUIRED json_scalar_required +2203D E ERRCODE_TOO_MANY_JSON_ARRAY_ELEMENTS too_many_json_array_elements +2203E E ERRCODE_TOO_MANY_JSON_OBJECT_MEMBERS too_many_json_object_members Section: Class 23 - Integrity Constraint Violation diff --git a/src/include/catalog/pg_amop.dat b/src/include/catalog/pg_amop.dat index 075a54c4ac..b2d226f475 100644 --- a/src/include/catalog/pg_amop.dat +++ b/src/include/catalog/pg_amop.dat @@ -1433,11 +1433,23 @@ { amopfamily => 'gin/jsonb_ops', amoplefttype => 'jsonb', amoprighttype => '_text', amopstrategy => '11', amopopr => '?&(jsonb,_text)', amopmethod => 'gin' }, +{ amopfamily => 'gin/jsonb_ops', amoplefttype => 'jsonb', + amoprighttype => 'jsonpath', amopstrategy => '15', + amopopr => '@?(jsonb,jsonpath)', amopmethod => 'gin' }, +{ amopfamily => 'gin/jsonb_ops', amoplefttype => 'jsonb', + amoprighttype => 'jsonpath', amopstrategy => '16', + amopopr => '@~(jsonb,jsonpath)', amopmethod => 'gin' }, # GIN jsonb_path_ops { amopfamily => 'gin/jsonb_path_ops', amoplefttype => 'jsonb', amoprighttype => 'jsonb', amopstrategy => '7', amopopr => '@>(jsonb,jsonb)', amopmethod => 'gin' }, +{ amopfamily => 'gin/jsonb_path_ops', amoplefttype => 'jsonb', + amoprighttype => 'jsonpath', amopstrategy => '15', + amopopr => '@?(jsonb,jsonpath)', amopmethod => 'gin' }, +{ amopfamily => 'gin/jsonb_path_ops', amoplefttype => 'jsonb', + amoprighttype => 'jsonpath', amopstrategy => '16', + amopopr => '@~(jsonb,jsonpath)', amopmethod => 'gin' }, # SP-GiST range_ops { amopfamily => 'spgist/range_ops', amoplefttype => 'anyrange', diff --git a/src/include/catalog/pg_operator.dat b/src/include/catalog/pg_operator.dat index ce23c2f0aa..e08057a6b2 100644 --- a/src/include/catalog/pg_operator.dat +++ b/src/include/catalog/pg_operator.dat @@ -3195,5 +3195,33 @@ { oid => '3287', descr => 'delete path', oprname => '#-', oprleft => 'jsonb', oprright => '_text', oprresult => 'jsonb', oprcode => 'jsonb_delete_path' }, +{ oid => '6075', descr => 'jsonpath items', + oprname => '@*', oprleft => 'jsonb', oprright => 'jsonpath', + oprresult => 'jsonb', oprcode => 'jsonpath_query(jsonb,jsonpath)' }, +{ oid => '6076', descr => 'jsonpath exists', + oprname => '@?', oprleft => 'jsonb', oprright => 'jsonpath', + oprresult => 'bool', oprcode => 'jsonpath_exists(jsonb,jsonpath)', + oprrest => 'contsel', oprjoin => 'contjoinsel' }, +{ oid => '6107', descr => 'jsonpath predicate', + oprname => '@~', oprleft => 'jsonb', oprright => 'jsonpath', + oprresult => 'bool', oprcode => 'jsonpath_predicate(jsonb,jsonpath)', + oprrest => 'contsel', oprjoin => 'contjoinsel' }, +{ oid => '6122', descr => 'jsonpath items wrapped', + oprname => '@#', oprleft => 'jsonb', oprright => 'jsonpath', + oprresult => 'jsonb', oprcode => 'jsonpath_query_wrapped(jsonb,jsonpath)' }, +{ oid => '6070', descr => 'jsonpath items', + oprname => '@*', oprleft => 'json', oprright => 'jsonpath', + oprresult => 'json', oprcode => 'jsonpath_query(json,jsonpath)' }, +{ oid => '6071', descr => 'jsonpath exists', + oprname => '@?', oprleft => 'json', oprright => 'jsonpath', + oprresult => 'bool', oprcode => 'jsonpath_exists(json,jsonpath)', + oprrest => 'contsel', oprjoin => 'contjoinsel' }, +{ oid => '6108', descr => 'jsonpath predicate', + oprname => '@~', oprleft => 'json', oprright => 'jsonpath', + oprresult => 'bool', oprcode => 'jsonpath_predicate(json,jsonpath)', + oprrest => 'contsel', oprjoin => 'contjoinsel' }, +{ oid => '6123', descr => 'jsonpath items wrapped', + oprname => '@#', oprleft => 'json', oprright => 'jsonpath', + oprresult => 'json', oprcode => 'jsonpath_query_wrapped(json,jsonpath)' }, ] diff --git a/src/include/catalog/pg_proc.dat b/src/include/catalog/pg_proc.dat index 4026018ba9..74f49e5e59 100644 --- a/src/include/catalog/pg_proc.dat +++ b/src/include/catalog/pg_proc.dat @@ -9103,6 +9103,71 @@ proname => 'jsonb_insert', prorettype => 'jsonb', proargtypes => 'jsonb _text jsonb bool', prosrc => 'jsonb_insert' }, +# jsonpath +{ oid => '6052', descr => 'I/O', + proname => 'jsonpath_in', prorettype => 'jsonpath', proargtypes => 'cstring', + prosrc => 'jsonpath_in' }, +{ oid => '6053', descr => 'I/O', + proname => 'jsonpath_out', prorettype => 'cstring', proargtypes => 'jsonpath', + prosrc => 'jsonpath_out' }, +{ oid => '6054', descr => 'implementation of @? operator', + proname => 'jsonpath_exists', prorettype => 'bool', + proargtypes => 'jsonb jsonpath', prosrc => 'jsonb_jsonpath_exists2' }, +{ oid => '6055', descr => 'implementation of @* operator', + proname => 'jsonpath_query', prorows => '1000', proretset => 't', + prorettype => 'jsonb', proargtypes => 'jsonb jsonpath', + prosrc => 'jsonb_jsonpath_query2' }, +{ oid => '6124', descr => 'implementation of @# operator', + proname => 'jsonpath_query_wrapped', prorettype => 'jsonb', + proargtypes => 'jsonb jsonpath', prosrc => 'jsonb_jsonpath_query_wrapped2' }, +{ oid => '6056', descr => 'jsonpath exists test', + proname => 'jsonpath_exists', prorettype => 'bool', + proargtypes => 'jsonb jsonpath jsonb', prosrc => 'jsonb_jsonpath_exists3' }, +{ oid => '6057', descr => 'jsonpath query', + proname => 'jsonpath_query', prorows => '1000', proretset => 't', + prorettype => 'jsonb', proargtypes => 'jsonb jsonpath jsonb', + prosrc => 'jsonb_jsonpath_query3' }, +{ oid => '6125', descr => 'jsonpath query with conditional wrapper', + proname => 'jsonpath_query_wrapped', prorettype => 'jsonb', + proargtypes => 'jsonb jsonpath jsonb', + prosrc => 'jsonb_jsonpath_query_wrapped3' }, +{ oid => '6073', descr => 'implementation of @~ operator', + proname => 'jsonpath_predicate', prorettype => 'bool', + proargtypes => 'jsonb jsonpath', prosrc => 'jsonb_jsonpath_predicate2' }, +{ oid => '6074', descr => 'jsonpath predicate test', + proname => 'jsonpath_predicate', prorettype => 'bool', + proargtypes => 'jsonb jsonpath jsonb', + prosrc => 'jsonb_jsonpath_predicate3' }, + +{ oid => '6043', descr => 'implementation of @? operator', + proname => 'jsonpath_exists', prorettype => 'bool', + proargtypes => 'json jsonpath', prosrc => 'json_jsonpath_exists2' }, +{ oid => '6044', descr => 'implementation of @* operator', + proname => 'jsonpath_query', prorows => '1000', proretset => 't', + prorettype => 'json', proargtypes => 'json jsonpath', + prosrc => 'json_jsonpath_query2' }, +{ oid => '6126', descr => 'implementation of @# operator', + proname => 'jsonpath_query_wrapped', prorettype => 'json', + proargtypes => 'json jsonpath', prosrc => 'json_jsonpath_query_wrapped2' }, +{ oid => '6045', descr => 'jsonpath exists test', + proname => 'jsonpath_exists', prorettype => 'bool', + proargtypes => 'json jsonpath json', prosrc => 'json_jsonpath_exists3' }, +{ oid => '6046', descr => 'jsonpath query', + proname => 'jsonpath_query', prorows => '1000', proretset => 't', + prorettype => 'json', proargtypes => 'json jsonpath json', + prosrc => 'json_jsonpath_query3' }, +{ oid => '6127', descr => 'jsonpath query with conditional wrapper', + proname => 'jsonpath_query_wrapped', prorettype => 'json', + proargtypes => 'json jsonpath json', + prosrc => 'json_jsonpath_query_wrapped3' }, +{ oid => '6049', descr => 'implementation of @~ operator', + proname => 'jsonpath_predicate', prorettype => 'bool', + proargtypes => 'json jsonpath', prosrc => 'json_jsonpath_predicate2' }, +{ oid => '6069', descr => 'jsonpath predicate test', + proname => 'jsonpath_predicate', prorettype => 'bool', + proargtypes => 'json jsonpath json', + prosrc => 'json_jsonpath_predicate3' }, + # txid { oid => '2939', descr => 'I/O', proname => 'txid_snapshot_in', prorettype => 'txid_snapshot', diff --git a/src/include/catalog/pg_type.dat b/src/include/catalog/pg_type.dat index d295eae1b9..e7ae4ccc0c 100644 --- a/src/include/catalog/pg_type.dat +++ b/src/include/catalog/pg_type.dat @@ -441,6 +441,11 @@ typname => 'jsonb', typlen => '-1', typbyval => 'f', typcategory => 'U', typinput => 'jsonb_in', typoutput => 'jsonb_out', typreceive => 'jsonb_recv', typsend => 'jsonb_send', typalign => 'i', typstorage => 'x' }, +{ oid => '6050', array_type_oid => '6051', descr => 'JSON path', + typname => 'jsonpath', typlen => '-1', typbyval => 'f', typcategory => 'U', + typarray => '_jsonpath', typinput => 'jsonpath_in', + typoutput => 'jsonpath_out', typreceive => '-', typsend => '-', + typalign => 'i', typstorage => 'x' }, { oid => '2970', array_type_oid => '2949', descr => 'txid snapshot', typname => 'txid_snapshot', typlen => '-1', typbyval => 'f', diff --git a/src/include/lib/stringinfo.h b/src/include/lib/stringinfo.h index 8551237fc6..ff1ecb20ef 100644 --- a/src/include/lib/stringinfo.h +++ b/src/include/lib/stringinfo.h @@ -157,4 +157,10 @@ extern void appendBinaryStringInfoNT(StringInfo str, */ extern void enlargeStringInfo(StringInfo str, int needed); +/*------------------------ + * alignStringInfoInt + * Add padding zero bytes to align StringInfo + */ +extern void alignStringInfoInt(StringInfo buf); + #endif /* STRINGINFO_H */ diff --git a/src/include/regex/regex.h b/src/include/regex/regex.h index 27fdc09040..4b1e80ddd9 100644 --- a/src/include/regex/regex.h +++ b/src/include/regex/regex.h @@ -173,4 +173,9 @@ extern int pg_regprefix(regex_t *, pg_wchar **, size_t *); extern void pg_regfree(regex_t *); extern size_t pg_regerror(int, const regex_t *, char *, size_t); +extern regex_t *RE_compile_and_cache(text *text_re, int cflags, Oid collation); +extern bool RE_compile_and_execute(text *text_re, char *dat, int dat_len, + int cflags, Oid collation, + int nmatch, regmatch_t *pmatch); + #endif /* _REGEX_H_ */ diff --git a/src/include/utils/.gitignore b/src/include/utils/.gitignore index 05cfa7a8d6..e0705e1aa7 100644 --- a/src/include/utils/.gitignore +++ b/src/include/utils/.gitignore @@ -3,3 +3,4 @@ /probes.h /errcodes.h /header-stamp +/jsonpath_gram.h diff --git a/src/include/utils/date.h b/src/include/utils/date.h index eb6d2a16fe..10cc822752 100644 --- a/src/include/utils/date.h +++ b/src/include/utils/date.h @@ -76,5 +76,8 @@ extern TimeTzADT *GetSQLCurrentTime(int32 typmod); extern TimeADT GetSQLLocalTime(int32 typmod); extern int time2tm(TimeADT time, struct pg_tm *tm, fsec_t *fsec); extern int timetz2tm(TimeTzADT *time, struct pg_tm *tm, fsec_t *fsec, int *tzp); +extern int tm2time(struct pg_tm *tm, fsec_t fsec, TimeADT *result); +extern int tm2timetz(struct pg_tm *tm, fsec_t fsec, int tz, TimeTzADT *result); +extern void AdjustTimeForTypmod(TimeADT *time, int32 typmod); #endif /* DATE_H */ diff --git a/src/include/utils/datetime.h b/src/include/utils/datetime.h index de9e9ade5c..165f0e7965 100644 --- a/src/include/utils/datetime.h +++ b/src/include/utils/datetime.h @@ -338,4 +338,6 @@ extern TimeZoneAbbrevTable *ConvertTimeZoneAbbrevs(struct tzEntry *abbrevs, int n); extern void InstallTimeZoneAbbrevs(TimeZoneAbbrevTable *tbl); +extern void AdjustTimestampForTypmod(Timestamp *time, int32 typmod); + #endif /* DATETIME_H */ diff --git a/src/include/utils/elog.h b/src/include/utils/elog.h index 33c6b53e27..42a834c241 100644 --- a/src/include/utils/elog.h +++ b/src/include/utils/elog.h @@ -143,6 +143,25 @@ #define TEXTDOMAIN NULL +/* + * ereport_safe() -- special macro for copying error info into the specified + * ErrorData **edata (if it is non-NULL) instead of throwing it. This is + * intended for handling of errors of categories like ERRCODE_DATA_EXCEPTION + * without PG_TRY/PG_CATCH, but not for errors like ERRCODE_OUT_OF_MEMORY. + */ +#define ereport_safe(edata, elevel, rest) \ + do { \ + if (edata) { \ + if (errstart(elevel, __FILE__, __LINE__, PG_FUNCNAME_MACRO, TEXTDOMAIN)) { \ + (void)(rest); \ + *(edata) = CopyErrorData(); \ + FlushErrorState(); \ + } \ + } else { \ + ereport(elevel, rest); \ + } \ + } while (0) + extern bool errstart(int elevel, const char *filename, int lineno, const char *funcname, const char *domain); extern void errfinish(int dummy,...); diff --git a/src/include/utils/float.h b/src/include/utils/float.h index 05e1b27637..d082bdcdd3 100644 --- a/src/include/utils/float.h +++ b/src/include/utils/float.h @@ -38,8 +38,11 @@ extern PGDLLIMPORT int extra_float_digits; * Utility functions in float.c */ extern int is_infinite(float8 val); -extern float8 float8in_internal(char *num, char **endptr_p, - const char *type_name, const char *orig_string); +extern float8 float8in_internal_safe(char *num, char **endptr_p, + const char *type_name, const char *orig_string, + ErrorData **edata); +#define float8in_internal(num, endptr_p, type_name, orig_string) \ + float8in_internal_safe(num, endptr_p, type_name, orig_string, NULL) extern char *float8out_internal(float8 num); extern int float4_cmp_internal(float4 a, float4 b); extern int float8_cmp_internal(float8 a, float8 b); diff --git a/src/include/utils/formatting.h b/src/include/utils/formatting.h index a9f5548b46..6db5b3fd89 100644 --- a/src/include/utils/formatting.h +++ b/src/include/utils/formatting.h @@ -28,4 +28,7 @@ extern char *asc_tolower(const char *buff, size_t nbytes); extern char *asc_toupper(const char *buff, size_t nbytes); extern char *asc_initcap(const char *buff, size_t nbytes); +extern Datum to_datetime(text *datetxt, const char *fmt, int fmt_len, char *tzn, + bool strict, Oid *typid, int32 *typmod, int *tz); + #endif diff --git a/src/include/utils/jsonapi.h b/src/include/utils/jsonapi.h index 6b483a15a6..6ef601f061 100644 --- a/src/include/utils/jsonapi.h +++ b/src/include/utils/jsonapi.h @@ -15,6 +15,7 @@ #define JSONAPI_H #include "jsonb.h" +#include "access/htup.h" #include "lib/stringinfo.h" typedef enum @@ -93,6 +94,48 @@ typedef struct JsonSemAction json_scalar_action scalar; } JsonSemAction; +typedef enum +{ + JTI_ARRAY_START, + JTI_ARRAY_ELEM, + JTI_ARRAY_ELEM_SCALAR, + JTI_ARRAY_ELEM_AFTER, + JTI_ARRAY_END, + JTI_OBJECT_START, + JTI_OBJECT_KEY, + JTI_OBJECT_VALUE, + JTI_OBJECT_VALUE_AFTER, +} JsontIterState; + +typedef struct JsonContainerData +{ + uint32 header; + int len; + char *data; +} JsonContainerData; + +typedef const JsonContainerData JsonContainer; + +typedef struct Json +{ + JsonContainer root; +} Json; + +typedef struct JsonIterator +{ + struct JsonIterator *parent; + JsonContainer *container; + JsonLexContext *lex; + JsontIterState state; + bool isScalar; +} JsonIterator; + +#define DatumGetJsonP(datum) JsonCreate(DatumGetTextP(datum)) +#define DatumGetJsonPCopy(datum) JsonCreate(DatumGetTextPCopy(datum)) + +#define JsonPGetDatum(json) \ + PointerGetDatum(cstring_to_text_with_len((json)->root.data, (json)->root.len)) + /* * parse_json will parse the string in the lex calling the * action functions in sem at the appropriate points. It is @@ -161,6 +204,24 @@ extern Jsonb *transform_jsonb_string_values(Jsonb *jsonb, void *action_state, extern text *transform_json_string_values(text *json, void *action_state, JsonTransformStringValuesAction transform_action); -extern char *JsonEncodeDateTime(char *buf, Datum value, Oid typid); +extern char *JsonEncodeDateTime(char *buf, Datum value, Oid typid, int *tz); + +extern Json *JsonCreate(text *json); +extern JsonbIteratorToken JsonIteratorNext(JsonIterator **pit, JsonbValue *val, + bool skipNested); +extern JsonIterator *JsonIteratorInit(JsonContainer *jc); +extern void JsonIteratorFree(JsonIterator *it); +extern uint32 JsonGetArraySize(JsonContainer *jc); +extern Json *JsonbValueToJson(JsonbValue *jbv); +extern JsonbValue *JsonExtractScalar(JsonContainer *jbc, JsonbValue *res); +extern char *JsonUnquote(Json *jb); +extern char *JsonToCString(StringInfo out, JsonContainer *jc, + int estimated_len); +extern JsonbValue *pushJsonValue(JsonbParseState **pstate, + JsonbIteratorToken tok, JsonbValue *jbv); +extern JsonbValue *findJsonValueFromContainer(JsonContainer *jc, uint32 flags, + JsonbValue *key); +extern JsonbValue *getIthJsonValueFromContainer(JsonContainer *array, + uint32 index); #endif /* JSONAPI_H */ diff --git a/src/include/utils/jsonb.h b/src/include/utils/jsonb.h index 27873d4d10..404ed70169 100644 --- a/src/include/utils/jsonb.h +++ b/src/include/utils/jsonb.h @@ -34,6 +34,9 @@ typedef enum #define JsonbExistsStrategyNumber 9 #define JsonbExistsAnyStrategyNumber 10 #define JsonbExistsAllStrategyNumber 11 +#define JsonbJsonpathExistsStrategyNumber 15 +#define JsonbJsonpathPredicateStrategyNumber 16 + /* * In the standard jsonb_ops GIN opclass for jsonb, we choose to index both @@ -66,8 +69,10 @@ typedef enum /* Convenience macros */ #define DatumGetJsonbP(d) ((Jsonb *) PG_DETOAST_DATUM(d)) +#define DatumGetJsonbPCopy(d) ((Jsonb *) PG_DETOAST_DATUM_COPY(d)) #define JsonbPGetDatum(p) PointerGetDatum(p) #define PG_GETARG_JSONB_P(x) DatumGetJsonbP(PG_GETARG_DATUM(x)) +#define PG_GETARG_JSONB_P_COPY(x) DatumGetJsonbPCopy(PG_GETARG_DATUM(x)) #define PG_RETURN_JSONB_P(x) PG_RETURN_POINTER(x) typedef struct JsonbPair JsonbPair; @@ -219,10 +224,10 @@ typedef struct } Jsonb; /* convenience macros for accessing the root container in a Jsonb datum */ -#define JB_ROOT_COUNT(jbp_) (*(uint32 *) VARDATA(jbp_) & JB_CMASK) -#define JB_ROOT_IS_SCALAR(jbp_) ((*(uint32 *) VARDATA(jbp_) & JB_FSCALAR) != 0) -#define JB_ROOT_IS_OBJECT(jbp_) ((*(uint32 *) VARDATA(jbp_) & JB_FOBJECT) != 0) -#define JB_ROOT_IS_ARRAY(jbp_) ((*(uint32 *) VARDATA(jbp_) & JB_FARRAY) != 0) +#define JB_ROOT_COUNT(jbp_) JsonContainerSize(&(jbp_)->root) +#define JB_ROOT_IS_SCALAR(jbp_) JsonContainerIsScalar(&(jbp_)->root) +#define JB_ROOT_IS_OBJECT(jbp_) JsonContainerIsObject(&(jbp_)->root) +#define JB_ROOT_IS_ARRAY(jbp_) JsonContainerIsArray(&(jbp_)->root) enum jbvType @@ -236,7 +241,14 @@ enum jbvType jbvArray = 0x10, jbvObject, /* Binary (i.e. struct Jsonb) jbvArray/jbvObject */ - jbvBinary + jbvBinary, + /* + * Virtual types. + * + * These types are used only for in-memory JSON processing and serialized + * into JSON strings when outputted to json/jsonb. + */ + jbvDatetime = 0x20, }; /* @@ -269,6 +281,8 @@ struct JsonbValue struct { int nPairs; /* 1 pair, 2 elements */ + bool uniquified; /* Should we sort pairs by key name and + * remove duplicate keys? */ JsonbPair *pairs; } object; /* Associative container type */ @@ -277,11 +291,20 @@ struct JsonbValue int len; JsonbContainer *data; } binary; /* Array or object, in on-disk format */ + + struct + { + Datum value; + Oid typid; + int32 typmod; + int tz; + } datetime; } val; }; -#define IsAJsonbScalar(jsonbval) ((jsonbval)->type >= jbvNull && \ - (jsonbval)->type <= jbvBool) +#define IsAJsonbScalar(jsonbval) (((jsonbval)->type >= jbvNull && \ + (jsonbval)->type <= jbvBool) || \ + (jsonbval)->type == jbvDatetime) /* * Key/value pair within an Object. @@ -355,6 +378,8 @@ typedef struct JsonbIterator /* Support functions */ extern uint32 getJsonbOffset(const JsonbContainer *jc, int index); extern uint32 getJsonbLength(const JsonbContainer *jc, int index); +extern int lengthCompareJsonbStringValue(const void *a, const void *b); +extern bool equalsJsonbScalarValue(JsonbValue *a, JsonbValue *b); extern int compareJsonbContainers(JsonbContainer *a, JsonbContainer *b); extern JsonbValue *findJsonbValueFromContainer(JsonbContainer *sheader, uint32 flags, @@ -363,6 +388,8 @@ extern JsonbValue *getIthJsonbValueFromContainer(JsonbContainer *sheader, uint32 i); extern JsonbValue *pushJsonbValue(JsonbParseState **pstate, JsonbIteratorToken seq, JsonbValue *jbVal); +extern JsonbValue *pushJsonbValueScalar(JsonbParseState **pstate, + JsonbIteratorToken seq,JsonbValue *scalarVal); extern JsonbIterator *JsonbIteratorInit(JsonbContainer *container); extern JsonbIteratorToken JsonbIteratorNext(JsonbIterator **it, JsonbValue *val, bool skipNested); @@ -379,5 +406,6 @@ extern char *JsonbToCString(StringInfo out, JsonbContainer *in, extern char *JsonbToCStringIndent(StringInfo out, JsonbContainer *in, int estimated_len); +extern JsonbValue *JsonbExtractScalar(JsonbContainer *jbc, JsonbValue *res); #endif /* __JSONB_H__ */ diff --git a/src/include/utils/jsonpath.h b/src/include/utils/jsonpath.h new file mode 100644 index 0000000000..b3cf4c2657 --- /dev/null +++ b/src/include/utils/jsonpath.h @@ -0,0 +1,292 @@ +/*------------------------------------------------------------------------- + * + * jsonpath.h + * Definitions of jsonpath datatype + * + * Copyright (c) 2017, PostgreSQL Global Development Group + * + * IDENTIFICATION + * src/include/utils/jsonpath.h + * + *------------------------------------------------------------------------- + */ + +#ifndef JSONPATH_H +#define JSONPATH_H + +#include "fmgr.h" +#include "utils/jsonb.h" +#include "nodes/pg_list.h" + +typedef struct +{ + int32 vl_len_; /* varlena header (do not touch directly!) */ + uint32 header; /* version and flags (see below) */ + char data[FLEXIBLE_ARRAY_MEMBER]; +} JsonPath; + +#define JSONPATH_VERSION (0x01) +#define JSONPATH_LAX (0x80000000) +#define JSONPATH_HDRSZ (offsetof(JsonPath, data)) + +#define DatumGetJsonPathP(d) ((JsonPath *) DatumGetPointer(PG_DETOAST_DATUM(d))) +#define DatumGetJsonPathPCopy(d) ((JsonPath *) DatumGetPointer(PG_DETOAST_DATUM_COPY(d))) +#define PG_GETARG_JSONPATH_P(x) DatumGetJsonPathP(PG_GETARG_DATUM(x)) +#define PG_GETARG_JSONPATH_P_COPY(x) DatumGetJsonPathPCopy(PG_GETARG_DATUM(x)) +#define PG_RETURN_JSONPATH_P(p) PG_RETURN_POINTER(p) + +#define jspIsScalar(type) ((type) >= jpiNull && (type) <= jpiBool) + +/* + * All node's type of jsonpath expression + */ +typedef enum JsonPathItemType { + jpiNull = jbvNull, /* NULL literal */ + jpiString = jbvString, /* string literal */ + jpiNumeric = jbvNumeric, /* numeric literal */ + jpiBool = jbvBool, /* boolean literal: TRUE or FALSE */ + jpiAnd, /* predicate && predicate */ + jpiOr, /* predicate || predicate */ + jpiNot, /* ! predicate */ + jpiIsUnknown, /* (predicate) IS UNKNOWN */ + jpiEqual, /* expr == expr */ + jpiNotEqual, /* expr != expr */ + jpiLess, /* expr < expr */ + jpiGreater, /* expr > expr */ + jpiLessOrEqual, /* expr <= expr */ + jpiGreaterOrEqual, /* expr >= expr */ + jpiAdd, /* expr + expr */ + jpiSub, /* expr - expr */ + jpiMul, /* expr * expr */ + jpiDiv, /* expr / expr */ + jpiMod, /* expr % expr */ + jpiPlus, /* + expr */ + jpiMinus, /* - expr */ + jpiAnyArray, /* [*] */ + jpiAnyKey, /* .* */ + jpiIndexArray, /* [subscript, ...] */ + jpiAny, /* .** */ + jpiKey, /* .key */ + jpiCurrent, /* @ */ + jpiRoot, /* $ */ + jpiVariable, /* $variable */ + jpiFilter, /* ? (predicate) */ + jpiExists, /* EXISTS (expr) predicate */ + jpiType, /* .type() item method */ + jpiSize, /* .size() item method */ + jpiAbs, /* .abs() item method */ + jpiFloor, /* .floor() item method */ + jpiCeiling, /* .ceiling() item method */ + jpiDouble, /* .double() item method */ + jpiDatetime, /* .datetime() item method */ + jpiKeyValue, /* .keyvalue() item method */ + jpiSubscript, /* array subscript: 'expr' or 'expr TO expr' */ + jpiLast, /* LAST array subscript */ + jpiStartsWith, /* STARTS WITH predicate */ + jpiLikeRegex, /* LIKE_REGEX predicate */ +} JsonPathItemType; + +/* XQuery regex mode flags for LIKE_REGEX predicate */ +#define JSP_REGEX_ICASE 0x01 /* i flag, case insensitive */ +#define JSP_REGEX_SLINE 0x02 /* s flag, single-line mode */ +#define JSP_REGEX_MLINE 0x04 /* m flag, multi-line mode */ +#define JSP_REGEX_WSPACE 0x08 /* x flag, expanded syntax */ + +/* + * Support functions to parse/construct binary value. + * Unlike many other representation of expression the first/main + * node is not an operation but left operand of expression. That + * allows to implement cheep follow-path descending in jsonb + * structure and then execute operator with right operand + */ + +typedef struct JsonPathItem { + JsonPathItemType type; + + /* position form base to next node */ + int32 nextPos; + + /* + * pointer into JsonPath value to current node, all + * positions of current are relative to this base + */ + char *base; + + union { + /* classic operator with two operands: and, or etc */ + struct { + int32 left; + int32 right; + } args; + + /* any unary operation */ + int32 arg; + + /* storage for jpiIndexArray: indexes of array */ + struct { + int32 nelems; + struct { + int32 from; + int32 to; + } *elems; + } array; + + /* jpiAny: levels */ + struct { + uint32 first; + uint32 last; + } anybounds; + + struct { + char *data; /* for bool, numeric and string/key */ + int32 datalen; /* filled only for string/key */ + } value; + + struct { + int32 expr; + char *pattern; + int32 patternlen; + uint32 flags; + } like_regex; + } content; +} JsonPathItem; + +#define jspHasNext(jsp) ((jsp)->nextPos > 0) + +extern void jspInit(JsonPathItem *v, JsonPath *js); +extern void jspInitByBuffer(JsonPathItem *v, char *base, int32 pos); +extern bool jspGetNext(JsonPathItem *v, JsonPathItem *a); +extern void jspGetArg(JsonPathItem *v, JsonPathItem *a); +extern void jspGetLeftArg(JsonPathItem *v, JsonPathItem *a); +extern void jspGetRightArg(JsonPathItem *v, JsonPathItem *a); +extern Numeric jspGetNumeric(JsonPathItem *v); +extern bool jspGetBool(JsonPathItem *v); +extern char * jspGetString(JsonPathItem *v, int32 *len); +extern bool jspGetArraySubscript(JsonPathItem *v, JsonPathItem *from, + JsonPathItem *to, int i); + +/* + * Parsing + */ + +typedef struct JsonPathParseItem JsonPathParseItem; + +struct JsonPathParseItem { + JsonPathItemType type; + JsonPathParseItem *next; /* next in path */ + + union { + + /* classic operator with two operands: and, or etc */ + struct { + JsonPathParseItem *left; + JsonPathParseItem *right; + } args; + + /* any unary operation */ + JsonPathParseItem *arg; + + /* storage for jpiIndexArray: indexes of array */ + struct { + int nelems; + struct + { + JsonPathParseItem *from; + JsonPathParseItem *to; + } *elems; + } array; + + /* jpiAny: levels */ + struct { + uint32 first; + uint32 last; + } anybounds; + + struct { + JsonPathParseItem *expr; + char *pattern; /* could not be not null-terminated */ + uint32 patternlen; + uint32 flags; + } like_regex; + + /* scalars */ + Numeric numeric; + bool boolean; + struct { + uint32 len; + char *val; /* could not be not null-terminated */ + } string; + } value; +}; + +typedef struct JsonPathParseResult +{ + JsonPathParseItem *expr; + bool lax; +} JsonPathParseResult; + +extern JsonPathParseResult* parsejsonpath(const char *str, int len); + +/* + * Evaluation of jsonpath + */ + +/* Result of jsonpath predicate evaluation */ +typedef enum JsonPathBool +{ + jpbFalse = 0, + jpbTrue = 1, + jpbUnknown = 2 +} JsonPathBool; + +/* Result of jsonpath evaluation */ +typedef ErrorData *JsonPathExecResult; + +/* Special pseudo-ErrorData with zero sqlerrcode for existence queries. */ +extern ErrorData jperNotFound[1]; + +#define jperOk NULL +#define jperIsError(jper) ((jper) && (jper)->sqlerrcode) +#define jperIsErrorData(jper) ((jper) && (jper)->elevel > 0) +#define jperGetError(jper) ((jper)->sqlerrcode) +#define jperMakeErrorData(edata) (edata) +#define jperGetErrorData(jper) (jper) +#define jperFree(jper) ((jper) && (jper)->sqlerrcode ? \ + (jper)->elevel > 0 ? FreeErrorData(jper) : pfree(jper) : (void) 0) +#define jperReplace(jper1, jper2) (jperFree(jper1), (jper2)) + +/* Returns special SQL/JSON ErrorData with zero elevel */ +static inline JsonPathExecResult +jperMakeError(int sqlerrcode) +{ + ErrorData *edata = palloc0(sizeof(*edata)); + + edata->sqlerrcode = sqlerrcode; + + return edata; +} + +typedef Datum (*JsonPathVariable_cb)(void *, bool *); + +typedef struct JsonPathVariable { + text *varName; + Oid typid; + int32 typmod; + JsonPathVariable_cb cb; + void *cb_arg; +} JsonPathVariable; + + + +typedef struct JsonValueList +{ + JsonbValue *singleton; + List *list; +} JsonValueList; + +JsonPathExecResult executeJsonPath(JsonPath *path, + List *vars, /* list of JsonPathVariable */ + Jsonb *json, + JsonValueList *foundJson); + +#endif diff --git a/src/include/utils/jsonpath_json.h b/src/include/utils/jsonpath_json.h new file mode 100644 index 0000000000..064d77ef6b --- /dev/null +++ b/src/include/utils/jsonpath_json.h @@ -0,0 +1,106 @@ +/*------------------------------------------------------------------------- + * + * jsonpath_json.h + * Jsonpath support for json datatype + * + * Copyright (c) 2017, PostgreSQL Global Development Group + * + * IDENTIFICATION + * src/include/utils/jsonpath_json.h + * + *------------------------------------------------------------------------- + */ + +#ifndef JSONPATH_JSON_H +#define JSONPATH_JSON_H + +/* redefine jsonb structures */ +#define Jsonb Json +#define JsonbContainer JsonContainer +#define JsonbIterator JsonIterator + +/* redefine jsonb functions */ +#define findJsonbValueFromContainer(jc, flags, jbv) \ + findJsonValueFromContainer((JsonContainer *)(jc), flags, jbv) +#define getIthJsonbValueFromContainer(jc, i) \ + getIthJsonValueFromContainer((JsonContainer *)(jc), i) +#define pushJsonbValue pushJsonValue +#define JsonbIteratorInit(jc) JsonIteratorInit((JsonContainer *)(jc)) +#define JsonbIteratorNext JsonIteratorNext +#define JsonbValueToJsonb JsonbValueToJson +#define JsonbToCString JsonToCString +#define JsonbUnquote JsonUnquote +#define JsonbExtractScalar(jc, jbv) JsonExtractScalar((JsonContainer *)(jc), jbv) + +/* redefine jsonb macros */ +#undef JsonContainerSize +#define JsonContainerSize(jc) \ + ((((JsonContainer *)(jc))->header & JB_CMASK) == JB_CMASK && \ + JsonContainerIsArray(jc) \ + ? JsonGetArraySize((JsonContainer *)(jc)) \ + : ((JsonContainer *)(jc))->header & JB_CMASK) + + +#undef DatumGetJsonbP +#define DatumGetJsonbP(d) DatumGetJsonP(d) + +#undef DatumGetJsonbPCopy +#define DatumGetJsonbPCopy(d) DatumGetJsonPCopy(d) + +#undef JsonbPGetDatum +#define JsonbPGetDatum(json) JsonPGetDatum(json) + +#undef PG_GETARG_JSONB_P +#define PG_GETARG_JSONB_P(n) DatumGetJsonP(PG_GETARG_DATUM(n)) + +#undef PG_GETARG_JSONB_P_COPY +#define PG_GETARG_JSONB_P_COPY(n) DatumGetJsonPCopy(PG_GETARG_DATUM(n)) + +#undef PG_RETURN_JSONB_P +#define PG_RETURN_JSONB_P(json) PG_RETURN_DATUM(JsonPGetDatum(json)) + + +#ifdef DatumGetJsonb +#undef DatumGetJsonb +#define DatumGetJsonb(d) DatumGetJsonbP(d) +#endif + +#ifdef DatumGetJsonbCopy +#undef DatumGetJsonbCopy +#define DatumGetJsonbCopy(d) DatumGetJsonbPCopy(d) +#endif + +#ifdef JsonbGetDatum +#undef JsonbGetDatum +#define JsonbGetDatum(json) JsonbPGetDatum(json) +#endif + +#ifdef PG_GETARG_JSONB +#undef PG_GETARG_JSONB +#define PG_GETARG_JSONB(n) PG_GETARG_JSONB_P(n) +#endif + +#ifdef PG_GETARG_JSONB_COPY +#undef PG_GETARG_JSONB_COPY +#define PG_GETARG_JSONB_COPY(n) PG_GETARG_JSONB_P_COPY(n) +#endif + +#ifdef PG_RETURN_JSONB +#undef PG_RETURN_JSONB +#define PG_RETURN_JSONB(json) PG_RETURN_JSONB_P(json) +#endif + +/* redefine global jsonpath functions */ +#define executeJsonPath executeJsonPathJson + +static inline JsonbValue * +JsonbInitBinary(JsonbValue *jbv, Json *jb) +{ + jbv->type = jbvBinary; + jbv->val.binary.data = (void *) &jb->root; + jbv->val.binary.len = jb->root.len; + + return jbv; +} + +#endif /* JSONPATH_JSON_H */ diff --git a/src/include/utils/jsonpath_scanner.h b/src/include/utils/jsonpath_scanner.h new file mode 100644 index 0000000000..1c8447f6bf --- /dev/null +++ b/src/include/utils/jsonpath_scanner.h @@ -0,0 +1,30 @@ +/*------------------------------------------------------------------------- + * + * jsonpath_scanner.h + * jsonpath scanner & parser support + * + * Portions Copyright (c) 1996-2017, PostgreSQL Global Development Group + * + * src/include/utils/jsonpath_scanner.h + * + *------------------------------------------------------------------------- + */ + +#ifndef JSONPATH_SCANNER_H +#define JSONPATH_SCANNER_H + +/* struct string is shared between scan and gram */ +typedef struct string { + char *val; + int len; + int total; +} string; + +#include "utils/jsonpath.h" +#include "utils/jsonpath_gram.h" + +/* flex 2.5.4 doesn't bother with a decl for this */ +extern int jsonpath_yylex(YYSTYPE * yylval_param); +extern void jsonpath_yyerror(JsonPathParseResult **result, const char *message); + +#endif diff --git a/src/include/utils/numeric.h b/src/include/utils/numeric.h index cd8da8bdc2..6e3e3f002b 100644 --- a/src/include/utils/numeric.h +++ b/src/include/utils/numeric.h @@ -61,4 +61,13 @@ int32 numeric_maximum_size(int32 typmod); extern char *numeric_out_sci(Numeric num, int scale); extern char *numeric_normalize(Numeric num); +/* Functions for safe handling of numeric errors without PG_TRY/PG_CATCH */ +extern Numeric numeric_add_internal(Numeric n1, Numeric n2, ErrorData **edata); +extern Numeric numeric_sub_internal(Numeric n1, Numeric n2, ErrorData **edata); +extern Numeric numeric_mul_internal(Numeric n1, Numeric n2, ErrorData **edata); +extern Numeric numeric_div_internal(Numeric n1, Numeric n2, ErrorData **edata); +extern Numeric numeric_mod_internal(Numeric n1, Numeric n2, ErrorData **edata); +extern Numeric float8_numeric_internal(float8 val, ErrorData **edata); +extern float8 numeric_float8_internal(Numeric num, ErrorData **edata); + #endif /* _PG_NUMERIC_H_ */ diff --git a/src/test/regress/expected/horology.out b/src/test/regress/expected/horology.out index b2b4577333..fa27d74d4d 100644 --- a/src/test/regress/expected/horology.out +++ b/src/test/regress/expected/horology.out @@ -2786,6 +2786,92 @@ SELECT to_timestamp('2011-12-18 11:38 20', 'YYYY-MM-DD HH12:MI TZM'); Sun Dec 18 03:18:00 2011 PST (1 row) +SELECT i, to_timestamp('2018-11-02 12:34:56', 'YYYY-MM-DD HH24:MI:SS.FF' || i) FROM generate_series(1, 6) i; + i | to_timestamp +---+------------------------------ + 1 | Fri Nov 02 12:34:56 2018 PDT + 2 | Fri Nov 02 12:34:56 2018 PDT + 3 | Fri Nov 02 12:34:56 2018 PDT + 4 | Fri Nov 02 12:34:56 2018 PDT + 5 | Fri Nov 02 12:34:56 2018 PDT + 6 | Fri Nov 02 12:34:56 2018 PDT +(6 rows) + +SELECT i, to_timestamp('2018-11-02 12:34:56.1', 'YYYY-MM-DD HH24:MI:SS.FF' || i) FROM generate_series(1, 6) i; + i | to_timestamp +---+-------------------------------- + 1 | Fri Nov 02 12:34:56.1 2018 PDT + 2 | Fri Nov 02 12:34:56.1 2018 PDT + 3 | Fri Nov 02 12:34:56.1 2018 PDT + 4 | Fri Nov 02 12:34:56.1 2018 PDT + 5 | Fri Nov 02 12:34:56.1 2018 PDT + 6 | Fri Nov 02 12:34:56.1 2018 PDT +(6 rows) + +SELECT i, to_timestamp('2018-11-02 12:34:56.12', 'YYYY-MM-DD HH24:MI:SS.FF' || i) FROM generate_series(1, 6) i; + i | to_timestamp +---+--------------------------------- + 1 | Fri Nov 02 12:34:56.1 2018 PDT + 2 | Fri Nov 02 12:34:56.12 2018 PDT + 3 | Fri Nov 02 12:34:56.12 2018 PDT + 4 | Fri Nov 02 12:34:56.12 2018 PDT + 5 | Fri Nov 02 12:34:56.12 2018 PDT + 6 | Fri Nov 02 12:34:56.12 2018 PDT +(6 rows) + +SELECT i, to_timestamp('2018-11-02 12:34:56.123', 'YYYY-MM-DD HH24:MI:SS.FF' || i) FROM generate_series(1, 6) i; + i | to_timestamp +---+---------------------------------- + 1 | Fri Nov 02 12:34:56.1 2018 PDT + 2 | Fri Nov 02 12:34:56.12 2018 PDT + 3 | Fri Nov 02 12:34:56.123 2018 PDT + 4 | Fri Nov 02 12:34:56.123 2018 PDT + 5 | Fri Nov 02 12:34:56.123 2018 PDT + 6 | Fri Nov 02 12:34:56.123 2018 PDT +(6 rows) + +SELECT i, to_timestamp('2018-11-02 12:34:56.1234', 'YYYY-MM-DD HH24:MI:SS.FF' || i) FROM generate_series(1, 6) i; + i | to_timestamp +---+----------------------------------- + 1 | Fri Nov 02 12:34:56.1 2018 PDT + 2 | Fri Nov 02 12:34:56.12 2018 PDT + 3 | Fri Nov 02 12:34:56.123 2018 PDT + 4 | Fri Nov 02 12:34:56.1234 2018 PDT + 5 | Fri Nov 02 12:34:56.1234 2018 PDT + 6 | Fri Nov 02 12:34:56.1234 2018 PDT +(6 rows) + +SELECT i, to_timestamp('2018-11-02 12:34:56.12345', 'YYYY-MM-DD HH24:MI:SS.FF' || i) FROM generate_series(1, 6) i; + i | to_timestamp +---+------------------------------------ + 1 | Fri Nov 02 12:34:56.1 2018 PDT + 2 | Fri Nov 02 12:34:56.12 2018 PDT + 3 | Fri Nov 02 12:34:56.123 2018 PDT + 4 | Fri Nov 02 12:34:56.1235 2018 PDT + 5 | Fri Nov 02 12:34:56.12345 2018 PDT + 6 | Fri Nov 02 12:34:56.12345 2018 PDT +(6 rows) + +SELECT i, to_timestamp('2018-11-02 12:34:56.123456', 'YYYY-MM-DD HH24:MI:SS.FF' || i) FROM generate_series(1, 6) i; + i | to_timestamp +---+------------------------------------- + 1 | Fri Nov 02 12:34:56.1 2018 PDT + 2 | Fri Nov 02 12:34:56.12 2018 PDT + 3 | Fri Nov 02 12:34:56.123 2018 PDT + 4 | Fri Nov 02 12:34:56.1235 2018 PDT + 5 | Fri Nov 02 12:34:56.12346 2018 PDT + 6 | Fri Nov 02 12:34:56.123456 2018 PDT +(6 rows) + +SELECT i, to_timestamp('2018-11-02 12:34:56.123456789', 'YYYY-MM-DD HH24:MI:SS.FF' || i) FROM generate_series(1, 6) i; +ERROR: date/time field value out of range: "2018-11-02 12:34:56.123456789" +-- FF7, FF8, FF9 are not supported +SELECT to_timestamp('123', 'FF7'); +ERROR: datetime formatting field "FF7" is not supported +SELECT to_timestamp('123', 'FF8'); +ERROR: datetime formatting field "FF8" is not supported +SELECT to_timestamp('123', 'FF9'); +ERROR: datetime formatting field "FF9" is not supported -- -- Check handling of multiple spaces in format and/or input -- diff --git a/src/test/regress/expected/json_jsonpath.out b/src/test/regress/expected/json_jsonpath.out new file mode 100644 index 0000000000..942bf6bd99 --- /dev/null +++ b/src/test/regress/expected/json_jsonpath.out @@ -0,0 +1,1732 @@ +select json '{"a": 12}' @? '$.a.b'; + ?column? +---------- + f +(1 row) + +select json '{"a": 12}' @? '$.b'; + ?column? +---------- + f +(1 row) + +select json '{"a": {"a": 12}}' @? '$.a.a'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"a": 12}}' @? '$.*.a'; + ?column? +---------- + t +(1 row) + +select json '{"b": {"a": 12}}' @? '$.*.a'; + ?column? +---------- + t +(1 row) + +select json '{}' @? '$.*'; + ?column? +---------- + f +(1 row) + +select json '{"a": 1}' @? '$.*'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"b": 1}}' @? 'lax $.**{1}'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"b": 1}}' @? 'lax $.**{2}'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"b": 1}}' @? 'lax $.**{3}'; + ?column? +---------- + f +(1 row) + +select json '[]' @? '$[*]'; + ?column? +---------- + f +(1 row) + +select json '[1]' @? '$[*]'; + ?column? +---------- + t +(1 row) + +select json '[1]' @? '$[1]'; + ?column? +---------- + f +(1 row) + +select json '[1]' @? 'strict $[1]'; + ?column? +---------- + +(1 row) + +select json '[1]' @* 'strict $[1]'; +ERROR: Invalid SQL/JSON subscript +select json '[1]' @? '$[0]'; + ?column? +---------- + t +(1 row) + +select json '[1]' @? '$[0.3]'; + ?column? +---------- + t +(1 row) + +select json '[1]' @? '$[0.5]'; + ?column? +---------- + t +(1 row) + +select json '[1]' @? '$[0.9]'; + ?column? +---------- + t +(1 row) + +select json '[1]' @? '$[1.2]'; + ?column? +---------- + f +(1 row) + +select json '[1]' @? 'strict $[1.2]'; + ?column? +---------- + +(1 row) + +select json '[1]' @* 'strict $[1.2]'; +ERROR: Invalid SQL/JSON subscript +select json '{}' @* 'strict $[0.3]'; +ERROR: SQL/JSON array not found +select json '{}' @? 'lax $[0.3]'; + ?column? +---------- + t +(1 row) + +select json '{}' @* 'strict $[1.2]'; +ERROR: SQL/JSON array not found +select json '{}' @? 'lax $[1.2]'; + ?column? +---------- + f +(1 row) + +select json '{}' @* 'strict $[-2 to 3]'; +ERROR: SQL/JSON array not found +select json '{}' @? 'lax $[-2 to 3]'; + ?column? +---------- + t +(1 row) + +select json '{"a": [1,2,3], "b": [3,4,5]}' @? '$ ? (@.a[*] > @.b[*])'; + ?column? +---------- + f +(1 row) + +select json '{"a": [1,2,3], "b": [3,4,5]}' @? '$ ? (@.a[*] >= @.b[*])'; + ?column? +---------- + t +(1 row) + +select json '{"a": [1,2,3], "b": [3,4,"5"]}' @? '$ ? (@.a[*] >= @.b[*])'; + ?column? +---------- + t +(1 row) + +select json '{"a": [1,2,3], "b": [3,4,"5"]}' @? 'strict $ ? (@.a[*] >= @.b[*])'; + ?column? +---------- + f +(1 row) + +select json '{"a": [1,2,3], "b": [3,4,null]}' @? '$ ? (@.a[*] >= @.b[*])'; + ?column? +---------- + t +(1 row) + +select json '1' @? '$ ? ((@ == "1") is unknown)'; + ?column? +---------- + t +(1 row) + +select json '1' @? '$ ? ((@ == 1) is unknown)'; + ?column? +---------- + f +(1 row) + +select json '[{"a": 1}, {"a": 2}]' @? '$[0 to 1] ? (@.a > 1)'; + ?column? +---------- + t +(1 row) + +select json '{"a": 12, "b": {"a": 13}}' @* '$.a'; + ?column? +---------- + 12 +(1 row) + +select json '{"a": 12, "b": {"a": 13}}' @* '$.b'; + ?column? +----------- + {"a": 13} +(1 row) + +select json '{"a": 12, "b": {"a": 13}}' @* '$.*'; + ?column? +----------- + 12 + {"a": 13} +(2 rows) + +select json '{"a": 12, "b": {"a": 13}}' @* 'lax $.*.a'; + ?column? +---------- + 13 +(1 row) + +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $[*].a'; + ?column? +---------- + 13 +(1 row) + +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $[*].*'; + ?column? +---------- + 13 + 14 +(2 rows) + +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $[0].a'; + ?column? +---------- +(0 rows) + +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $[1].a'; + ?column? +---------- + 13 +(1 row) + +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $[2].a'; + ?column? +---------- +(0 rows) + +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $[0,1].a'; + ?column? +---------- + 13 +(1 row) + +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $[0 to 10].a'; + ?column? +---------- + 13 +(1 row) + +select json '[12, {"a": 13}, {"b": 14}, "ccc", true]' @* '$[2.5 - 1 to $.size() - 2]'; + ?column? +----------- + {"a": 13} + {"b": 14} + "ccc" +(3 rows) + +select json '1' @* 'lax $[0]'; + ?column? +---------- + 1 +(1 row) + +select json '1' @* 'lax $[*]'; + ?column? +---------- + 1 +(1 row) + +select json '{}' @* 'lax $[0]'; + ?column? +---------- + {} +(1 row) + +select json '[1]' @* 'lax $[0]'; + ?column? +---------- + 1 +(1 row) + +select json '[1]' @* 'lax $[*]'; + ?column? +---------- + 1 +(1 row) + +select json '[1,2,3]' @* 'lax $[*]'; + ?column? +---------- + 1 + 2 + 3 +(3 rows) + +select json '[]' @* '$[last]'; + ?column? +---------- +(0 rows) + +select json '[]' @* 'strict $[last]'; +ERROR: Invalid SQL/JSON subscript +select json '[1]' @* '$[last]'; + ?column? +---------- + 1 +(1 row) + +select json '{}' @* 'lax $[last]'; + ?column? +---------- + {} +(1 row) + +select json '[1,2,3]' @* '$[last]'; + ?column? +---------- + 3 +(1 row) + +select json '[1,2,3]' @* '$[last - 1]'; + ?column? +---------- + 2 +(1 row) + +select json '[1,2,3]' @* '$[last ? (@.type() == "number")]'; + ?column? +---------- + 3 +(1 row) + +select json '[1,2,3]' @* '$[last ? (@.type() == "string")]'; +ERROR: Invalid SQL/JSON subscript +select * from jsonpath_query(json '{"a": 10}', '$'); + jsonpath_query +---------------- + {"a": 10} +(1 row) + +select * from jsonpath_query(json '{"a": 10}', '$ ? (.a < $value)'); +ERROR: could not find jsonpath variable 'value' +select * from jsonpath_query(json '{"a": 10}', '$ ? (.a < $value)', '{"value" : 13}'); + jsonpath_query +---------------- + {"a": 10} +(1 row) + +select * from jsonpath_query(json '{"a": 10}', '$ ? (.a < $value)', '{"value" : 8}'); + jsonpath_query +---------------- +(0 rows) + +select * from jsonpath_query(json '{"a": 10}', '$.a ? (@ < $value)', '{"value" : 13}'); + jsonpath_query +---------------- + 10 +(1 row) + +select * from jsonpath_query(json '[10,11,12,13,14,15]', '$[*] ? (@ < $value)', '{"value" : 13}'); + jsonpath_query +---------------- + 10 + 11 + 12 +(3 rows) + +select * from jsonpath_query(json '[10,11,12,13,14,15]', '$[0,1] ? (@ < $value)', '{"value" : 13}'); + jsonpath_query +---------------- + 10 + 11 +(2 rows) + +select * from jsonpath_query(json '[10,11,12,13,14,15]', '$[0 to 2] ? (@ < $value)', '{"value" : 15}'); + jsonpath_query +---------------- + 10 + 11 + 12 +(3 rows) + +select * from jsonpath_query(json '[1,"1",2,"2",null]', '$[*] ? (@ == "1")'); + jsonpath_query +---------------- + "1" +(1 row) + +select * from jsonpath_query(json '[1,"1",2,"2",null]', '$[*] ? (@ == $value)', '{"value" : "1"}'); + jsonpath_query +---------------- + "1" +(1 row) + +select json '[1, "2", null]' @* '$[*] ? (@ != null)'; + ?column? +---------- + 1 + "2" +(2 rows) + +select json '[1, "2", null]' @* '$[*] ? (@ == null)'; + ?column? +---------- + null +(1 row) + +select json '{"a": {"b": 1}}' @* 'lax $.**'; + ?column? +----------------- + {"a": {"b": 1}} + {"b": 1} + 1 +(3 rows) + +select json '{"a": {"b": 1}}' @* 'lax $.**{0}'; + ?column? +----------------- + {"a": {"b": 1}} +(1 row) + +select json '{"a": {"b": 1}}' @* 'lax $.**{0 to last}'; + ?column? +----------------- + {"a": {"b": 1}} + {"b": 1} + 1 +(3 rows) + +select json '{"a": {"b": 1}}' @* 'lax $.**{1}'; + ?column? +---------- + {"b": 1} +(1 row) + +select json '{"a": {"b": 1}}' @* 'lax $.**{1 to last}'; + ?column? +---------- + {"b": 1} + 1 +(2 rows) + +select json '{"a": {"b": 1}}' @* 'lax $.**{2}'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"b": 1}}' @* 'lax $.**{2 to last}'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"b": 1}}' @* 'lax $.**{3 to last}'; + ?column? +---------- +(0 rows) + +select json '{"a": {"b": 1}}' @* 'lax $.**.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"b": 1}}' @* 'lax $.**{0}.b ? (@ > 0)'; + ?column? +---------- +(0 rows) + +select json '{"a": {"b": 1}}' @* 'lax $.**{1}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"b": 1}}' @* 'lax $.**{0 to last}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"b": 1}}' @* 'lax $.**{1 to last}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"b": 1}}' @* 'lax $.**{1 to 2}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{0}.b ? (@ > 0)'; + ?column? +---------- +(0 rows) + +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1}.b ? (@ > 0)'; + ?column? +---------- +(0 rows) + +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{0 to last}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1 to last}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1 to 2}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{2 to 3}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"b": 1}}' @? '$.**.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"b": 1}}' @? '$.**{0}.b ? ( @ > 0)'; + ?column? +---------- + f +(1 row) + +select json '{"a": {"b": 1}}' @? '$.**{1}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"b": 1}}' @? '$.**{0 to last}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"b": 1}}' @? '$.**{1 to last}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"b": 1}}' @? '$.**{1 to 2}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @? '$.**.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @? '$.**{0}.b ? ( @ > 0)'; + ?column? +---------- + f +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @? '$.**{1}.b ? ( @ > 0)'; + ?column? +---------- + f +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @? '$.**{0 to last}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @? '$.**{1 to last}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @? '$.**{1 to 2}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @? '$.**{2 to 3}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"g": {"x": 2}}' @* '$.g ? (exists (@.x))'; + ?column? +---------- + {"x": 2} +(1 row) + +select json '{"g": {"x": 2}}' @* '$.g ? (exists (@.y))'; + ?column? +---------- +(0 rows) + +select json '{"g": {"x": 2}}' @* '$.g ? (exists (@.x ? (@ >= 2) ))'; + ?column? +---------- + {"x": 2} +(1 row) + +--test ternary logic +select + x, y, + jsonpath_query( + json '[true, false, null]', + '$[*] ? (@ == true && ($x == true && $y == true) || + @ == false && !($x == true && $y == true) || + @ == null && ($x == true && $y == true) is unknown)', + json_build_object('x', x, 'y', y) + ) as "x && y" +from + (values (json 'true'), ('false'), ('"null"')) x(x), + (values (json 'true'), ('false'), ('"null"')) y(y); + x | y | x && y +--------+--------+-------- + true | true | true + true | false | false + true | "null" | null + false | true | false + false | false | false + false | "null" | false + "null" | true | null + "null" | false | false + "null" | "null" | null +(9 rows) + +select + x, y, + jsonpath_query( + json '[true, false, null]', + '$[*] ? (@ == true && ($x == true || $y == true) || + @ == false && !($x == true || $y == true) || + @ == null && ($x == true || $y == true) is unknown)', + json_build_object('x', x, 'y', y) + ) as "x || y" +from + (values (json 'true'), ('false'), ('"null"')) x(x), + (values (json 'true'), ('false'), ('"null"')) y(y); + x | y | x || y +--------+--------+-------- + true | true | true + true | false | true + true | "null" | true + false | true | true + false | false | false + false | "null" | null + "null" | true | true + "null" | false | null + "null" | "null" | null +(9 rows) + +select json '{"a": 1, "b": 1}' @? '$ ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": 1, "b": 1}}' @? '$ ? (.a == .b)'; + ?column? +---------- + f +(1 row) + +select json '{"c": {"a": 1, "b": 1}}' @? '$.c ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": 1, "b": 1}}' @? '$.c ? ($.c.a == .b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": 1, "b": 1}}' @? '$.* ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select json '{"a": 1, "b": 1}' @? '$.** ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": 1, "b": 1}}' @? '$.** ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": 2, "b": 1}}' @* '$.** ? (.a == 1 + 1)'; + ?column? +------------------ + {"a": 2, "b": 1} +(1 row) + +select json '{"c": {"a": 2, "b": 1}}' @* '$.** ? (.a == (1 + 1))'; + ?column? +------------------ + {"a": 2, "b": 1} +(1 row) + +select json '{"c": {"a": 2, "b": 1}}' @* '$.** ? (.a == .b + 1)'; + ?column? +------------------ + {"a": 2, "b": 1} +(1 row) + +select json '{"c": {"a": 2, "b": 1}}' @* '$.** ? (.a == (.b + 1))'; + ?column? +------------------ + {"a": 2, "b": 1} +(1 row) + +select json '{"c": {"a": -1, "b": 1}}' @? '$.** ? (.a == - 1)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": -1, "b": 1}}' @? '$.** ? (.a == -1)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": -1, "b": 1}}' @? '$.** ? (.a == -.b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": -1, "b": 1}}' @? '$.** ? (.a == - .b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": 0, "b": 1}}' @? '$.** ? (.a == 1 - .b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": 2, "b": 1}}' @? '$.** ? (.a == 1 - - .b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": 0, "b": 1}}' @? '$.** ? (.a == 1 - +.b)'; + ?column? +---------- + t +(1 row) + +select json '[1,2,3]' @? '$ ? (+@[*] > +2)'; + ?column? +---------- + t +(1 row) + +select json '[1,2,3]' @? '$ ? (+@[*] > +3)'; + ?column? +---------- + f +(1 row) + +select json '[1,2,3]' @? '$ ? (-@[*] < -2)'; + ?column? +---------- + t +(1 row) + +select json '[1,2,3]' @? '$ ? (-@[*] < -3)'; + ?column? +---------- + f +(1 row) + +select json '1' @? '$ ? ($ > 0)'; + ?column? +---------- + t +(1 row) + +-- arithmetic errors +select json '[1,2,0,3]' @* '$[*] ? (2 / @ > 0)'; + ?column? +---------- + 1 + 2 + 3 +(3 rows) + +select json '[1,2,0,3]' @* '$[*] ? ((2 / @ > 0) is unknown)'; + ?column? +---------- + 0 +(1 row) + +select json '0' @* '1 / $'; +ERROR: division by zero +-- unwrapping of operator arguments in lax mode +select json '{"a": [2]}' @* 'lax $.a * 3'; + ?column? +---------- + 6 +(1 row) + +select json '{"a": [2]}' @* 'lax $.a + 3'; + ?column? +---------- + 5 +(1 row) + +select json '{"a": [2, 3, 4]}' @* 'lax -$.a'; + ?column? +---------- + -2 + -3 + -4 +(3 rows) + +-- should fail +select json '{"a": [1, 2]}' @* 'lax $.a * 3'; +ERROR: Singleton SQL/JSON item required +-- extension: boolean expressions +select json '2' @* '$ > 1'; + ?column? +---------- + true +(1 row) + +select json '2' @* '$ <= 1'; + ?column? +---------- + false +(1 row) + +select json '2' @* '$ == "2"'; + ?column? +---------- + null +(1 row) + +select json '2' @~ '$ > 1'; + ?column? +---------- + t +(1 row) + +select json '2' @~ '$ <= 1'; + ?column? +---------- + f +(1 row) + +select json '2' @~ '$ == "2"'; + ?column? +---------- + +(1 row) + +select json '2' @~ '1'; + ?column? +---------- + +(1 row) + +select json '{}' @~ '$'; + ?column? +---------- + +(1 row) + +select json '[]' @~ '$'; + ?column? +---------- + +(1 row) + +select json '[1,2,3]' @~ '$[*]'; +ERROR: Singleton SQL/JSON item required +select json '[]' @~ '$[*]'; +ERROR: Singleton SQL/JSON item required +select jsonpath_predicate(json '[[1, true], [2, false]]', 'strict $[*] ? (@[0] > $x) [1]', '{"x": 1}'); + jsonpath_predicate +-------------------- + f +(1 row) + +select jsonpath_predicate(json '[[1, true], [2, false]]', 'strict $[*] ? (@[0] < $x) [1]', '{"x": 2}'); + jsonpath_predicate +-------------------- + t +(1 row) + +select json '[null,1,true,"a",[],{}]' @* '$.type()'; + ?column? +---------- + "array" +(1 row) + +select json '[null,1,true,"a",[],{}]' @* 'lax $.type()'; + ?column? +---------- + "array" +(1 row) + +select json '[null,1,true,"a",[],{}]' @* '$[*].type()'; + ?column? +----------- + "null" + "number" + "boolean" + "string" + "array" + "object" +(6 rows) + +select json 'null' @* 'null.type()'; + ?column? +---------- + "null" +(1 row) + +select json 'null' @* 'true.type()'; + ?column? +----------- + "boolean" +(1 row) + +select json 'null' @* '123.type()'; + ?column? +---------- + "number" +(1 row) + +select json 'null' @* '"123".type()'; + ?column? +---------- + "string" +(1 row) + +select json '{"a": 2}' @* '($.a - 5).abs() + 10'; + ?column? +---------- + 13 +(1 row) + +select json '{"a": 2.5}' @* '-($.a * $.a).floor() + 10'; + ?column? +---------- + 4 +(1 row) + +select json '[1, 2, 3]' @* '($[*] > 2) ? (@ == true)'; + ?column? +---------- + true +(1 row) + +select json '[1, 2, 3]' @* '($[*] > 3).type()'; + ?column? +----------- + "boolean" +(1 row) + +select json '[1, 2, 3]' @* '($[*].a > 3).type()'; + ?column? +----------- + "boolean" +(1 row) + +select json '[1, 2, 3]' @* 'strict ($[*].a > 3).type()'; + ?column? +---------- + "null" +(1 row) + +select json '[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]' @* 'strict $[*].size()'; +ERROR: SQL/JSON array not found +select json '[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]' @* 'lax $[*].size()'; + ?column? +---------- + 1 + 1 + 1 + 1 + 0 + 1 + 3 + 1 + 1 +(9 rows) + +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].abs()'; + ?column? +---------- + 0 + 1 + 2 + 3.4 + 5.6 +(5 rows) + +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].floor()'; + ?column? +---------- + 0 + 1 + -2 + -4 + 5 +(5 rows) + +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling()'; + ?column? +---------- + 0 + 1 + -2 + -3 + 6 +(5 rows) + +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling().abs()'; + ?column? +---------- + 0 + 1 + 2 + 3 + 6 +(5 rows) + +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling().abs().type()'; + ?column? +---------- + "number" + "number" + "number" + "number" + "number" +(5 rows) + +select json '[{},1]' @* '$[*].keyvalue()'; +ERROR: SQL/JSON object not found +select json '{}' @* '$.keyvalue()'; + ?column? +---------- +(0 rows) + +select json '{"a": 1, "b": [1, 2], "c": {"a": "bbb"}}' @* '$.keyvalue()'; + ?column? +---------------------------------------------- + {"key": "a", "value": 1, "id": 0} + {"key": "b", "value": [1, 2], "id": 0} + {"key": "c", "value": {"a": "bbb"}, "id": 0} +(3 rows) + +select json '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* '$[*].keyvalue()'; + ?column? +----------------------------------------------- + {"key": "a", "value": 1, "id": 1} + {"key": "b", "value": [1, 2], "id": 1} + {"key": "c", "value": {"a": "bbb"}, "id": 24} +(3 rows) + +select json '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* 'strict $.keyvalue()'; +ERROR: SQL/JSON object not found +select json '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* 'lax $.keyvalue()'; + ?column? +----------------------------------------------- + {"key": "a", "value": 1, "id": 1} + {"key": "b", "value": [1, 2], "id": 1} + {"key": "c", "value": {"a": "bbb"}, "id": 24} +(3 rows) + +select json 'null' @* '$.double()'; +ERROR: Non-numeric SQL/JSON item +select json 'true' @* '$.double()'; +ERROR: Non-numeric SQL/JSON item +select json '[]' @* '$.double()'; + ?column? +---------- +(0 rows) + +select json '[]' @* 'strict $.double()'; +ERROR: Non-numeric SQL/JSON item +select json '{}' @* '$.double()'; +ERROR: Non-numeric SQL/JSON item +select json '1.23' @* '$.double()'; + ?column? +---------- + 1.23 +(1 row) + +select json '"1.23"' @* '$.double()'; + ?column? +---------- + 1.23 +(1 row) + +select json '"1.23aaa"' @* '$.double()'; +ERROR: Non-numeric SQL/JSON item +select json '["", "a", "abc", "abcabc"]' @* '$[*] ? (@ starts with "abc")'; + ?column? +---------- + "abc" + "abcabc" +(2 rows) + +select json '["", "a", "abc", "abcabc"]' @* 'strict $ ? (@[*] starts with "abc")'; + ?column? +---------------------------- + ["", "a", "abc", "abcabc"] +(1 row) + +select json '["", "a", "abd", "abdabc"]' @* 'strict $ ? (@[*] starts with "abc")'; + ?column? +---------- +(0 rows) + +select json '["abc", "abcabc", null, 1]' @* 'strict $ ? (@[*] starts with "abc")'; + ?column? +---------- +(0 rows) + +select json '["abc", "abcabc", null, 1]' @* 'strict $ ? ((@[*] starts with "abc") is unknown)'; + ?column? +---------------------------- + ["abc", "abcabc", null, 1] +(1 row) + +select json '[[null, 1, "abc", "abcabc"]]' @* 'lax $ ? (@[*] starts with "abc")'; + ?column? +---------------------------- + [null, 1, "abc", "abcabc"] +(1 row) + +select json '[[null, 1, "abd", "abdabc"]]' @* 'lax $ ? ((@[*] starts with "abc") is unknown)'; + ?column? +---------------------------- + [null, 1, "abd", "abdabc"] +(1 row) + +select json '[null, 1, "abd", "abdabc"]' @* 'lax $[*] ? ((@ starts with "abc") is unknown)'; + ?column? +---------- + null + 1 +(2 rows) + +select json '[null, 1, "abc", "abd", "aBdC", "abdacb", "babc"]' @* 'lax $[*] ? (@ like_regex "^ab.*c")'; + ?column? +---------- + "abc" + "abdacb" +(2 rows) + +select json '[null, 1, "abc", "abd", "aBdC", "abdacb", "babc"]' @* 'lax $[*] ? (@ like_regex "^ab.*c" flag "i")'; + ?column? +---------- + "abc" + "aBdC" + "abdacb" +(3 rows) + +select json 'null' @* '$.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select json 'true' @* '$.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select json '[]' @* '$.datetime()'; + ?column? +---------- +(0 rows) + +select json '[]' @* 'strict $.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select json '{}' @* '$.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select json '""' @* '$.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select json '"10-03-2017"' @* '$.datetime("dd-mm-yyyy")'; + ?column? +-------------- + "2017-03-10" +(1 row) + +select json '"10-03-2017"' @* '$.datetime("dd-mm-yyyy").type()'; + ?column? +---------- + "date" +(1 row) + +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy")'; + ?column? +-------------- + "2017-03-10" +(1 row) + +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy").type()'; + ?column? +---------- + "date" +(1 row) + +select json '"10-03-2017 12:34"' @* ' $.datetime("dd-mm-yyyy HH24:MI").type()'; + ?column? +------------------------------- + "timestamp without time zone" +(1 row) + +select json '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM").type()'; + ?column? +---------------------------- + "timestamp with time zone" +(1 row) + +select json '"12:34:56"' @* '$.datetime("HH24:MI:SS").type()'; + ?column? +-------------------------- + "time without time zone" +(1 row) + +select json '"12:34:56 +05:20"' @* '$.datetime("HH24:MI:SS TZH:TZM").type()'; + ?column? +----------------------- + "time with time zone" +(1 row) + +set time zone '+00'; +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI")'; + ?column? +----------------------- + "2017-03-10T12:34:00" +(1 row) + +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +ERROR: Invalid argument for SQL/JSON datetime function +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "+00")'; + ?column? +----------------------------- + "2017-03-10T12:34:00+00:00" +(1 row) + +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "+00:12")'; + ?column? +----------------------------- + "2017-03-10T12:34:00+00:12" +(1 row) + +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "-00:12:34")'; + ?column? +-------------------------------- + "2017-03-10T12:34:00-00:12:34" +(1 row) + +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "UTC")'; +ERROR: Invalid argument for SQL/JSON datetime function +select json '"10-03-2017 12:34 +05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +----------------------------- + "2017-03-10T12:34:00+05:00" +(1 row) + +select json '"10-03-2017 12:34 -05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +----------------------------- + "2017-03-10T12:34:00-05:00" +(1 row) + +select json '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; + ?column? +----------------------------- + "2017-03-10T12:34:00+05:20" +(1 row) + +select json '"10-03-2017 12:34 -05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; + ?column? +----------------------------- + "2017-03-10T12:34:00-05:20" +(1 row) + +select json '"12:34"' @* '$.datetime("HH24:MI")'; + ?column? +------------ + "12:34:00" +(1 row) + +select json '"12:34"' @* '$.datetime("HH24:MI TZH")'; +ERROR: Invalid argument for SQL/JSON datetime function +select json '"12:34"' @* '$.datetime("HH24:MI TZH", "+00")'; + ?column? +------------------ + "12:34:00+00:00" +(1 row) + +select json '"12:34 +05"' @* '$.datetime("HH24:MI TZH")'; + ?column? +------------------ + "12:34:00+05:00" +(1 row) + +select json '"12:34 -05"' @* '$.datetime("HH24:MI TZH")'; + ?column? +------------------ + "12:34:00-05:00" +(1 row) + +select json '"12:34 +05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + ?column? +------------------ + "12:34:00+05:20" +(1 row) + +select json '"12:34 -05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + ?column? +------------------ + "12:34:00-05:20" +(1 row) + +set time zone '+10'; +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI")'; + ?column? +----------------------- + "2017-03-10T12:34:00" +(1 row) + +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +ERROR: Invalid argument for SQL/JSON datetime function +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "+10")'; + ?column? +----------------------------- + "2017-03-10T12:34:00+10:00" +(1 row) + +select json '"10-03-2017 12:34 +05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +----------------------------- + "2017-03-10T12:34:00+05:00" +(1 row) + +select json '"10-03-2017 12:34 -05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +----------------------------- + "2017-03-10T12:34:00-05:00" +(1 row) + +select json '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; + ?column? +----------------------------- + "2017-03-10T12:34:00+05:20" +(1 row) + +select json '"10-03-2017 12:34 -05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; + ?column? +----------------------------- + "2017-03-10T12:34:00-05:20" +(1 row) + +select json '"12:34"' @* '$.datetime("HH24:MI")'; + ?column? +------------ + "12:34:00" +(1 row) + +select json '"12:34"' @* '$.datetime("HH24:MI TZH")'; +ERROR: Invalid argument for SQL/JSON datetime function +select json '"12:34"' @* '$.datetime("HH24:MI TZH", "+10")'; + ?column? +------------------ + "12:34:00+10:00" +(1 row) + +select json '"12:34 +05"' @* '$.datetime("HH24:MI TZH")'; + ?column? +------------------ + "12:34:00+05:00" +(1 row) + +select json '"12:34 -05"' @* '$.datetime("HH24:MI TZH")'; + ?column? +------------------ + "12:34:00-05:00" +(1 row) + +select json '"12:34 +05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + ?column? +------------------ + "12:34:00+05:20" +(1 row) + +select json '"12:34 -05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + ?column? +------------------ + "12:34:00-05:20" +(1 row) + +set time zone default; +select json '"2017-03-10"' @* '$.datetime().type()'; + ?column? +---------- + "date" +(1 row) + +select json '"2017-03-10"' @* '$.datetime()'; + ?column? +-------------- + "2017-03-10" +(1 row) + +select json '"2017-03-10 12:34:56"' @* '$.datetime().type()'; + ?column? +------------------------------- + "timestamp without time zone" +(1 row) + +select json '"2017-03-10 12:34:56"' @* '$.datetime()'; + ?column? +----------------------- + "2017-03-10T12:34:56" +(1 row) + +select json '"2017-03-10 12:34:56 +3"' @* '$.datetime().type()'; + ?column? +---------------------------- + "timestamp with time zone" +(1 row) + +select json '"2017-03-10 12:34:56 +3"' @* '$.datetime()'; + ?column? +----------------------------- + "2017-03-10T12:34:56+03:00" +(1 row) + +select json '"2017-03-10 12:34:56 +3:10"' @* '$.datetime().type()'; + ?column? +---------------------------- + "timestamp with time zone" +(1 row) + +select json '"2017-03-10 12:34:56 +3:10"' @* '$.datetime()'; + ?column? +----------------------------- + "2017-03-10T12:34:56+03:10" +(1 row) + +select json '"12:34:56"' @* '$.datetime().type()'; + ?column? +-------------------------- + "time without time zone" +(1 row) + +select json '"12:34:56"' @* '$.datetime()'; + ?column? +------------ + "12:34:56" +(1 row) + +select json '"12:34:56 +3"' @* '$.datetime().type()'; + ?column? +----------------------- + "time with time zone" +(1 row) + +select json '"12:34:56 +3"' @* '$.datetime()'; + ?column? +------------------ + "12:34:56+03:00" +(1 row) + +select json '"12:34:56 +3:10"' @* '$.datetime().type()'; + ?column? +----------------------- + "time with time zone" +(1 row) + +select json '"12:34:56 +3:10"' @* '$.datetime()'; + ?column? +------------------ + "12:34:56+03:10" +(1 row) + +set time zone '+00'; +-- date comparison +select json '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' + @* '$[*].datetime() ? (@ == "10.03.2017".datetime("dd.mm.yyyy"))'; + ?column? +----------------------------- + "2017-03-10" + "2017-03-10T00:00:00" + "2017-03-10T03:00:00+03:00" +(3 rows) + +select json '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' + @* '$[*].datetime() ? (@ >= "10.03.2017".datetime("dd.mm.yyyy"))'; + ?column? +----------------------------- + "2017-03-10" + "2017-03-11" + "2017-03-10T00:00:00" + "2017-03-10T12:34:56" + "2017-03-10T03:00:00+03:00" +(5 rows) + +select json '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' + @* '$[*].datetime() ? (@ < "10.03.2017".datetime("dd.mm.yyyy"))'; + ?column? +----------------------------- + "2017-03-09" + "2017-03-10T01:02:03+04:00" +(2 rows) + +-- time comparison +select json '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' + @* '$[*].datetime() ? (@ == "12:35".datetime("HH24:MI"))'; + ?column? +------------------ + "12:35:00" + "12:35:00+00:00" +(2 rows) + +select json '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' + @* '$[*].datetime() ? (@ >= "12:35".datetime("HH24:MI"))'; + ?column? +------------------ + "12:35:00" + "12:36:00" + "12:35:00+00:00" +(3 rows) + +select json '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' + @* '$[*].datetime() ? (@ < "12:35".datetime("HH24:MI"))'; + ?column? +------------------ + "12:34:00" + "12:35:00+01:00" + "13:35:00+01:00" +(3 rows) + +-- timetz comparison +select json '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' + @* '$[*].datetime() ? (@ == "12:35 +1".datetime("HH24:MI TZH"))'; + ?column? +------------------ + "12:35:00+01:00" +(1 row) + +select json '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' + @* '$[*].datetime() ? (@ >= "12:35 +1".datetime("HH24:MI TZH"))'; + ?column? +------------------ + "12:35:00+01:00" + "12:36:00+01:00" + "12:35:00-02:00" + "11:35:00" + "12:35:00" +(5 rows) + +select json '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' + @* '$[*].datetime() ? (@ < "12:35 +1".datetime("HH24:MI TZH"))'; + ?column? +------------------ + "12:34:00+01:00" + "12:35:00+02:00" + "10:35:00" +(3 rows) + +-- timestamp comparison +select json '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ == "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; + ?column? +----------------------------- + "2017-03-10T12:35:00" + "2017-03-10T13:35:00+01:00" +(2 rows) + +select json '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ >= "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; + ?column? +----------------------------- + "2017-03-10T12:35:00" + "2017-03-10T12:36:00" + "2017-03-10T13:35:00+01:00" + "2017-03-10T12:35:00-01:00" + "2017-03-11" +(5 rows) + +select json '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ < "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; + ?column? +----------------------------- + "2017-03-10T12:34:00" + "2017-03-10T12:35:00+01:00" + "2017-03-10" +(3 rows) + +-- timestamptz comparison +select json '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ == "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; + ?column? +----------------------------- + "2017-03-10T12:35:00+01:00" + "2017-03-10T11:35:00" +(2 rows) + +select json '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ >= "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; + ?column? +----------------------------- + "2017-03-10T12:35:00+01:00" + "2017-03-10T12:36:00+01:00" + "2017-03-10T12:35:00-02:00" + "2017-03-10T11:35:00" + "2017-03-10T12:35:00" + "2017-03-11" +(6 rows) + +select json '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ < "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; + ?column? +----------------------------- + "2017-03-10T12:34:00+01:00" + "2017-03-10T12:35:00+02:00" + "2017-03-10T10:35:00" + "2017-03-10" +(4 rows) + +set time zone default; +-- jsonpath operators +SELECT json '[{"a": 1}, {"a": 2}]' @* '$[*]'; + ?column? +---------- + {"a": 1} + {"a": 2} +(2 rows) + +SELECT json '[{"a": 1}, {"a": 2}]' @* '$[*] ? (@.a > 10)'; + ?column? +---------- +(0 rows) + +SELECT json '[{"a": 1}, {"a": 2}]' @# '$[*].a'; + ?column? +---------- + [1, 2] +(1 row) + +SELECT json '[{"a": 1}, {"a": 2}]' @# '$[*].a ? (@ == 1)'; + ?column? +---------- + 1 +(1 row) + +SELECT json '[{"a": 1}, {"a": 2}]' @# '$[*].a ? (@ > 10)'; + ?column? +---------- + +(1 row) + +SELECT json '[{"a": 1}, {"a": 2}]' @? '$[*] ? (@.a > 1)'; + ?column? +---------- + t +(1 row) + +SELECT json '[{"a": 1}, {"a": 2}]' @? '$[*].a ? (@ > 2)'; + ?column? +---------- + f +(1 row) + +SELECT json '[{"a": 1}, {"a": 2}]' @~ '$[*].a > 1'; + ?column? +---------- + t +(1 row) + +SELECT json '[{"a": 1}, {"a": 2}]' @~ '$[*].a > 2'; + ?column? +---------- + f +(1 row) + diff --git a/src/test/regress/expected/jsonb.out b/src/test/regress/expected/jsonb.out index f045e08538..92cf4c79c7 100644 --- a/src/test/regress/expected/jsonb.out +++ b/src/test/regress/expected/jsonb.out @@ -2718,6 +2718,114 @@ SELECT count(*) FROM testjsonb WHERE j ?& ARRAY['public','disabled']; 42 (1 row) +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == null'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '"CC" == $.wait'; + count +------- + 15 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == "CC" && true == $.public'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25.0'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($)'; + count +------- + 1012 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public)'; + count +------- + 194 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.bar)'; + count +------- + 0 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public) || exists($.disabled)'; + count +------- + 337 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public) && exists($.disabled)'; + count +------- + 42 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? ("CC" == @)'; + count +------- + 15 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.wait == "CC" && true == @.public)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.age ? (@ == 25)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.age == 25.0)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$'; + count +------- + 1012 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.public'; + count +------- + 194 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.bar'; + count +------- + 0 +(1 row) + CREATE INDEX jidx ON testjsonb USING gin (j); SET enable_seqscan = off; SELECT count(*) FROM testjsonb WHERE j @> '{"wait":null}'; @@ -2793,6 +2901,196 @@ SELECT count(*) FROM testjsonb WHERE j ?& ARRAY['public','disabled']; 42 (1 row) +EXPLAIN (COSTS OFF) +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == null'; + QUERY PLAN +----------------------------------------------------------------- + Aggregate + -> Bitmap Heap Scan on testjsonb + Recheck Cond: (j @~ '($."wait" == null)'::jsonpath) + -> Bitmap Index Scan on jidx + Index Cond: (j @~ '($."wait" == null)'::jsonpath) +(5 rows) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == null'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($ ? (@.wait == null))'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.wait ? (@ == null))'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '"CC" == $.wait'; + count +------- + 15 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == "CC" && true == $.public'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25.0'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.array[*] == "foo"'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.array[*] == "bar"'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($ ? (@.array[*] == "bar"))'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.array ? (@[*] == "bar"))'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.array[*] ? (@ == "bar"))'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($)'; + count +------- + 1012 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public)'; + count +------- + 194 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.bar)'; + count +------- + 0 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public) || exists($.disabled)'; + count +------- + 337 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public) && exists($.disabled)'; + count +------- + 42 +(1 row) + +EXPLAIN (COSTS OFF) +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; + QUERY PLAN +------------------------------------------------------------------- + Aggregate + -> Bitmap Heap Scan on testjsonb + Recheck Cond: (j @? '$."wait"?(@ == null)'::jsonpath) + -> Bitmap Index Scan on jidx + Index Cond: (j @? '$."wait"?(@ == null)'::jsonpath) +(5 rows) + +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? ("CC" == @)'; + count +------- + 15 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.wait == "CC" && true == @.public)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.age ? (@ == 25)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.age == 25.0)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.array[*] == "bar")'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.array ? (@[*] == "bar")'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.array[*] ? (@ == "bar")'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$'; + count +------- + 1012 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.public'; + count +------- + 194 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.bar'; + count +------- + 0 +(1 row) + -- array exists - array elements should behave as keys (for GIN index scans too) CREATE INDEX jidx_array ON testjsonb USING gin((j->'array')); SELECT count(*) from testjsonb WHERE j->'array' ? 'bar'; @@ -2943,6 +3241,161 @@ SELECT count(*) FROM testjsonb WHERE j @> '{}'; 1012 (1 row) +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == null'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($ ? (@.wait == null))'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.wait ? (@ == null))'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '"CC" == $.wait'; + count +------- + 15 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == "CC" && true == $.public'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25.0'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.array[*] == "foo"'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.array[*] == "bar"'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($ ? (@.array[*] == "bar"))'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.array ? (@[*] == "bar"))'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.array[*] ? (@ == "bar"))'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($)'; + count +------- + 1012 +(1 row) + +EXPLAIN (COSTS OFF) +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; + QUERY PLAN +------------------------------------------------------------------- + Aggregate + -> Bitmap Heap Scan on testjsonb + Recheck Cond: (j @? '$."wait"?(@ == null)'::jsonpath) + -> Bitmap Index Scan on jidx + Index Cond: (j @? '$."wait"?(@ == null)'::jsonpath) +(5 rows) + +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? ("CC" == @)'; + count +------- + 15 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.wait == "CC" && true == @.public)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.age ? (@ == 25)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.age == 25.0)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.array[*] == "bar")'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.array ? (@[*] == "bar")'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.array[*] ? (@ == "bar")'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$'; + count +------- + 1012 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.public'; + count +------- + 194 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.bar'; + count +------- + 0 +(1 row) + RESET enable_seqscan; DROP INDEX jidx; -- nested tests diff --git a/src/test/regress/expected/jsonb_jsonpath.out b/src/test/regress/expected/jsonb_jsonpath.out new file mode 100644 index 0000000000..f93c930473 --- /dev/null +++ b/src/test/regress/expected/jsonb_jsonpath.out @@ -0,0 +1,1711 @@ +select jsonb '{"a": 12}' @? '$.a.b'; + ?column? +---------- + f +(1 row) + +select jsonb '{"a": 12}' @? '$.b'; + ?column? +---------- + f +(1 row) + +select jsonb '{"a": {"a": 12}}' @? '$.a.a'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"a": 12}}' @? '$.*.a'; + ?column? +---------- + t +(1 row) + +select jsonb '{"b": {"a": 12}}' @? '$.*.a'; + ?column? +---------- + t +(1 row) + +select jsonb '{}' @? '$.*'; + ?column? +---------- + f +(1 row) + +select jsonb '{"a": 1}' @? '$.*'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"b": 1}}' @? 'lax $.**{1}'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"b": 1}}' @? 'lax $.**{2}'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"b": 1}}' @? 'lax $.**{3}'; + ?column? +---------- + f +(1 row) + +select jsonb '[]' @? '$[*]'; + ?column? +---------- + f +(1 row) + +select jsonb '[1]' @? '$[*]'; + ?column? +---------- + t +(1 row) + +select jsonb '[1]' @? '$[1]'; + ?column? +---------- + f +(1 row) + +select jsonb '[1]' @? 'strict $[1]'; + ?column? +---------- + +(1 row) + +select jsonb '[1]' @* 'strict $[1]'; +ERROR: Invalid SQL/JSON subscript +select jsonb '[1]' @? '$[0]'; + ?column? +---------- + t +(1 row) + +select jsonb '[1]' @? '$[0.3]'; + ?column? +---------- + t +(1 row) + +select jsonb '[1]' @? '$[0.5]'; + ?column? +---------- + t +(1 row) + +select jsonb '[1]' @? '$[0.9]'; + ?column? +---------- + t +(1 row) + +select jsonb '[1]' @? '$[1.2]'; + ?column? +---------- + f +(1 row) + +select jsonb '[1]' @? 'strict $[1.2]'; + ?column? +---------- + +(1 row) + +select jsonb '{"a": [1,2,3], "b": [3,4,5]}' @? '$ ? (@.a[*] > @.b[*])'; + ?column? +---------- + f +(1 row) + +select jsonb '{"a": [1,2,3], "b": [3,4,5]}' @? '$ ? (@.a[*] >= @.b[*])'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": [1,2,3], "b": [3,4,"5"]}' @? '$ ? (@.a[*] >= @.b[*])'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": [1,2,3], "b": [3,4,"5"]}' @? 'strict $ ? (@.a[*] >= @.b[*])'; + ?column? +---------- + f +(1 row) + +select jsonb '{"a": [1,2,3], "b": [3,4,null]}' @? '$ ? (@.a[*] >= @.b[*])'; + ?column? +---------- + t +(1 row) + +select jsonb '1' @? '$ ? ((@ == "1") is unknown)'; + ?column? +---------- + t +(1 row) + +select jsonb '1' @? '$ ? ((@ == 1) is unknown)'; + ?column? +---------- + f +(1 row) + +select jsonb '[{"a": 1}, {"a": 2}]' @? '$[0 to 1] ? (@.a > 1)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": 12, "b": {"a": 13}}' @* '$.a'; + ?column? +---------- + 12 +(1 row) + +select jsonb '{"a": 12, "b": {"a": 13}}' @* '$.b'; + ?column? +----------- + {"a": 13} +(1 row) + +select jsonb '{"a": 12, "b": {"a": 13}}' @* '$.*'; + ?column? +----------- + 12 + {"a": 13} +(2 rows) + +select jsonb '{"a": 12, "b": {"a": 13}}' @* 'lax $.*.a'; + ?column? +---------- + 13 +(1 row) + +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $[*].a'; + ?column? +---------- + 13 +(1 row) + +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $[*].*'; + ?column? +---------- + 13 + 14 +(2 rows) + +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $[0].a'; + ?column? +---------- +(0 rows) + +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $[1].a'; + ?column? +---------- + 13 +(1 row) + +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $[2].a'; + ?column? +---------- +(0 rows) + +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $[0,1].a'; + ?column? +---------- + 13 +(1 row) + +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $[0 to 10].a'; + ?column? +---------- + 13 +(1 row) + +select jsonb '[12, {"a": 13}, {"b": 14}, "ccc", true]' @* '$[2.5 - 1 to $.size() - 2]'; + ?column? +----------- + {"a": 13} + {"b": 14} + "ccc" +(3 rows) + +select jsonb '1' @* 'lax $[0]'; + ?column? +---------- + 1 +(1 row) + +select jsonb '1' @* 'lax $[*]'; + ?column? +---------- + 1 +(1 row) + +select jsonb '[1]' @* 'lax $[0]'; + ?column? +---------- + 1 +(1 row) + +select jsonb '[1]' @* 'lax $[*]'; + ?column? +---------- + 1 +(1 row) + +select jsonb '[1,2,3]' @* 'lax $[*]'; + ?column? +---------- + 1 + 2 + 3 +(3 rows) + +select jsonb '[]' @* '$[last]'; + ?column? +---------- +(0 rows) + +select jsonb '[]' @* 'strict $[last]'; +ERROR: Invalid SQL/JSON subscript +select jsonb '[1]' @* '$[last]'; + ?column? +---------- + 1 +(1 row) + +select jsonb '[1,2,3]' @* '$[last]'; + ?column? +---------- + 3 +(1 row) + +select jsonb '[1,2,3]' @* '$[last - 1]'; + ?column? +---------- + 2 +(1 row) + +select jsonb '[1,2,3]' @* '$[last ? (@.type() == "number")]'; + ?column? +---------- + 3 +(1 row) + +select jsonb '[1,2,3]' @* '$[last ? (@.type() == "string")]'; +ERROR: Invalid SQL/JSON subscript +select * from jsonpath_query(jsonb '{"a": 10}', '$'); + jsonpath_query +---------------- + {"a": 10} +(1 row) + +select * from jsonpath_query(jsonb '{"a": 10}', '$ ? (.a < $value)'); +ERROR: could not find jsonpath variable 'value' +select * from jsonpath_query(jsonb '{"a": 10}', '$ ? (.a < $value)', '{"value" : 13}'); + jsonpath_query +---------------- + {"a": 10} +(1 row) + +select * from jsonpath_query(jsonb '{"a": 10}', '$ ? (.a < $value)', '{"value" : 8}'); + jsonpath_query +---------------- +(0 rows) + +select * from jsonpath_query(jsonb '{"a": 10}', '$.a ? (@ < $value)', '{"value" : 13}'); + jsonpath_query +---------------- + 10 +(1 row) + +select * from jsonpath_query(jsonb '[10,11,12,13,14,15]', '$[*] ? (@ < $value)', '{"value" : 13}'); + jsonpath_query +---------------- + 10 + 11 + 12 +(3 rows) + +select * from jsonpath_query(jsonb '[10,11,12,13,14,15]', '$[0,1] ? (@ < $value)', '{"value" : 13}'); + jsonpath_query +---------------- + 10 + 11 +(2 rows) + +select * from jsonpath_query(jsonb '[10,11,12,13,14,15]', '$[0 to 2] ? (@ < $value)', '{"value" : 15}'); + jsonpath_query +---------------- + 10 + 11 + 12 +(3 rows) + +select * from jsonpath_query(jsonb '[1,"1",2,"2",null]', '$[*] ? (@ == "1")'); + jsonpath_query +---------------- + "1" +(1 row) + +select * from jsonpath_query(jsonb '[1,"1",2,"2",null]', '$[*] ? (@ == $value)', '{"value" : "1"}'); + jsonpath_query +---------------- + "1" +(1 row) + +select * from jsonpath_query(jsonb '[1, "2", null]', '$[*] ? (@ != null)'); + jsonpath_query +---------------- + 1 + "2" +(2 rows) + +select * from jsonpath_query(jsonb '[1, "2", null]', '$[*] ? (@ == null)'); + jsonpath_query +---------------- + null +(1 row) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**'; + ?column? +----------------- + {"a": {"b": 1}} + {"b": 1} + 1 +(3 rows) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{0}'; + ?column? +----------------- + {"a": {"b": 1}} +(1 row) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{0 to last}'; + ?column? +----------------- + {"a": {"b": 1}} + {"b": 1} + 1 +(3 rows) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1}'; + ?column? +---------- + {"b": 1} +(1 row) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1 to last}'; + ?column? +---------- + {"b": 1} + 1 +(2 rows) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{2}'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{2 to last}'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{3 to last}'; + ?column? +---------- +(0 rows) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{0}.b ? (@ > 0)'; + ?column? +---------- +(0 rows) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{0 to last}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1 to last}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1 to 2}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{0}.b ? (@ > 0)'; + ?column? +---------- +(0 rows) + +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1}.b ? (@ > 0)'; + ?column? +---------- +(0 rows) + +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{0 to last}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1 to last}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1 to 2}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{2 to 3}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"b": 1}}' @? '$.**.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"b": 1}}' @? '$.**{0}.b ? ( @ > 0)'; + ?column? +---------- + f +(1 row) + +select jsonb '{"a": {"b": 1}}' @? '$.**{1}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"b": 1}}' @? '$.**{0 to last}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"b": 1}}' @? '$.**{1 to last}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"b": 1}}' @? '$.**{1 to 2}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{0}.b ? ( @ > 0)'; + ?column? +---------- + f +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{1}.b ? ( @ > 0)'; + ?column? +---------- + f +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{0 to last}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{1 to last}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{1 to 2}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{2 to 3}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"g": {"x": 2}}' @* '$.g ? (exists (@.x))'; + ?column? +---------- + {"x": 2} +(1 row) + +select jsonb '{"g": {"x": 2}}' @* '$.g ? (exists (@.y))'; + ?column? +---------- +(0 rows) + +select jsonb '{"g": {"x": 2}}' @* '$.g ? (exists (@.x ? (@ >= 2) ))'; + ?column? +---------- + {"x": 2} +(1 row) + +--test ternary logic +select + x, y, + jsonpath_query( + jsonb '[true, false, null]', + '$[*] ? (@ == true && ($x == true && $y == true) || + @ == false && !($x == true && $y == true) || + @ == null && ($x == true && $y == true) is unknown)', + jsonb_build_object('x', x, 'y', y) + ) as "x && y" +from + (values (jsonb 'true'), ('false'), ('"null"')) x(x), + (values (jsonb 'true'), ('false'), ('"null"')) y(y); + x | y | x && y +--------+--------+-------- + true | true | true + true | false | false + true | "null" | null + false | true | false + false | false | false + false | "null" | false + "null" | true | null + "null" | false | false + "null" | "null" | null +(9 rows) + +select + x, y, + jsonpath_query( + jsonb '[true, false, null]', + '$[*] ? (@ == true && ($x == true || $y == true) || + @ == false && !($x == true || $y == true) || + @ == null && ($x == true || $y == true) is unknown)', + jsonb_build_object('x', x, 'y', y) + ) as "x || y" +from + (values (jsonb 'true'), ('false'), ('"null"')) x(x), + (values (jsonb 'true'), ('false'), ('"null"')) y(y); + x | y | x || y +--------+--------+-------- + true | true | true + true | false | true + true | "null" | true + false | true | true + false | false | false + false | "null" | null + "null" | true | true + "null" | false | null + "null" | "null" | null +(9 rows) + +select jsonb '{"a": 1, "b":1}' @? '$ ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": 1, "b":1}}' @? '$ ? (.a == .b)'; + ?column? +---------- + f +(1 row) + +select jsonb '{"c": {"a": 1, "b":1}}' @? '$.c ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": 1, "b":1}}' @? '$.c ? ($.c.a == .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": 1, "b":1}}' @? '$.* ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": 1, "b":1}' @? '$.** ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": 1, "b":1}}' @? '$.** ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": 2, "b":1}}' @* '$.** ? (.a == 1 + 1)'; + ?column? +------------------ + {"a": 2, "b": 1} +(1 row) + +select jsonb '{"c": {"a": 2, "b":1}}' @* '$.** ? (.a == (1 + 1))'; + ?column? +------------------ + {"a": 2, "b": 1} +(1 row) + +select jsonb '{"c": {"a": 2, "b":1}}' @* '$.** ? (.a == .b + 1)'; + ?column? +------------------ + {"a": 2, "b": 1} +(1 row) + +select jsonb '{"c": {"a": 2, "b":1}}' @* '$.** ? (.a == (.b + 1))'; + ?column? +------------------ + {"a": 2, "b": 1} +(1 row) + +select jsonb '{"c": {"a": -1, "b":1}}' @? '$.** ? (.a == - 1)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": -1, "b":1}}' @? '$.** ? (.a == -1)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": -1, "b":1}}' @? '$.** ? (.a == -.b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": -1, "b":1}}' @? '$.** ? (.a == - .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": 0, "b":1}}' @? '$.** ? (.a == 1 - .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": 2, "b":1}}' @? '$.** ? (.a == 1 - - .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": 0, "b":1}}' @? '$.** ? (.a == 1 - +.b)'; + ?column? +---------- + t +(1 row) + +select jsonb '[1,2,3]' @? '$ ? (+@[*] > +2)'; + ?column? +---------- + t +(1 row) + +select jsonb '[1,2,3]' @? '$ ? (+@[*] > +3)'; + ?column? +---------- + f +(1 row) + +select jsonb '[1,2,3]' @? '$ ? (-@[*] < -2)'; + ?column? +---------- + t +(1 row) + +select jsonb '[1,2,3]' @? '$ ? (-@[*] < -3)'; + ?column? +---------- + f +(1 row) + +select jsonb '1' @? '$ ? ($ > 0)'; + ?column? +---------- + t +(1 row) + +-- arithmetic errors +select jsonb '[1,2,0,3]' @* '$[*] ? (2 / @ > 0)'; + ?column? +---------- + 1 + 2 + 3 +(3 rows) + +select jsonb '[1,2,0,3]' @* '$[*] ? ((2 / @ > 0) is unknown)'; + ?column? +---------- + 0 +(1 row) + +select jsonb '0' @* '1 / $'; +ERROR: division by zero +-- unwrapping of operator arguments in lax mode +select jsonb '{"a": [2]}' @* 'lax $.a * 3'; + ?column? +---------- + 6 +(1 row) + +select jsonb '{"a": [2]}' @* 'lax $.a + 3'; + ?column? +---------- + 5 +(1 row) + +select jsonb '{"a": [2, 3, 4]}' @* 'lax -$.a'; + ?column? +---------- + -2 + -3 + -4 +(3 rows) + +-- should fail +select jsonb '{"a": [1, 2]}' @* 'lax $.a * 3'; +ERROR: Singleton SQL/JSON item required +-- extension: boolean expressions +select jsonb '2' @* '$ > 1'; + ?column? +---------- + true +(1 row) + +select jsonb '2' @* '$ <= 1'; + ?column? +---------- + false +(1 row) + +select jsonb '2' @* '$ == "2"'; + ?column? +---------- + null +(1 row) + +select jsonb '2' @~ '$ > 1'; + ?column? +---------- + t +(1 row) + +select jsonb '2' @~ '$ <= 1'; + ?column? +---------- + f +(1 row) + +select jsonb '2' @~ '$ == "2"'; + ?column? +---------- + +(1 row) + +select jsonb '2' @~ '1'; + ?column? +---------- + +(1 row) + +select jsonb '{}' @~ '$'; + ?column? +---------- + +(1 row) + +select jsonb '[]' @~ '$'; + ?column? +---------- + +(1 row) + +select jsonb '[1,2,3]' @~ '$[*]'; +ERROR: Singleton SQL/JSON item required +select jsonb '[]' @~ '$[*]'; +ERROR: Singleton SQL/JSON item required +select jsonpath_predicate(jsonb '[[1, true], [2, false]]', 'strict $[*] ? (@[0] > $x) [1]', '{"x": 1}'); + jsonpath_predicate +-------------------- + f +(1 row) + +select jsonpath_predicate(jsonb '[[1, true], [2, false]]', 'strict $[*] ? (@[0] < $x) [1]', '{"x": 2}'); + jsonpath_predicate +-------------------- + t +(1 row) + +select jsonb '[null,1,true,"a",[],{}]' @* '$.type()'; + ?column? +---------- + "array" +(1 row) + +select jsonb '[null,1,true,"a",[],{}]' @* 'lax $.type()'; + ?column? +---------- + "array" +(1 row) + +select jsonb '[null,1,true,"a",[],{}]' @* '$[*].type()'; + ?column? +----------- + "null" + "number" + "boolean" + "string" + "array" + "object" +(6 rows) + +select jsonb 'null' @* 'null.type()'; + ?column? +---------- + "null" +(1 row) + +select jsonb 'null' @* 'true.type()'; + ?column? +----------- + "boolean" +(1 row) + +select jsonb 'null' @* '123.type()'; + ?column? +---------- + "number" +(1 row) + +select jsonb 'null' @* '"123".type()'; + ?column? +---------- + "string" +(1 row) + +select jsonb '{"a": 2}' @* '($.a - 5).abs() + 10'; + ?column? +---------- + 13 +(1 row) + +select jsonb '{"a": 2.5}' @* '-($.a * $.a).floor() + 10'; + ?column? +---------- + 4 +(1 row) + +select jsonb '[1, 2, 3]' @* '($[*] > 2) ? (@ == true)'; + ?column? +---------- + true +(1 row) + +select jsonb '[1, 2, 3]' @* '($[*] > 3).type()'; + ?column? +----------- + "boolean" +(1 row) + +select jsonb '[1, 2, 3]' @* '($[*].a > 3).type()'; + ?column? +----------- + "boolean" +(1 row) + +select jsonb '[1, 2, 3]' @* 'strict ($[*].a > 3).type()'; + ?column? +---------- + "null" +(1 row) + +select jsonb '[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]' @* 'strict $[*].size()'; +ERROR: SQL/JSON array not found +select jsonb '[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]' @* 'lax $[*].size()'; + ?column? +---------- + 1 + 1 + 1 + 1 + 0 + 1 + 3 + 1 + 1 +(9 rows) + +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].abs()'; + ?column? +---------- + 0 + 1 + 2 + 3.4 + 5.6 +(5 rows) + +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].floor()'; + ?column? +---------- + 0 + 1 + -2 + -4 + 5 +(5 rows) + +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling()'; + ?column? +---------- + 0 + 1 + -2 + -3 + 6 +(5 rows) + +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling().abs()'; + ?column? +---------- + 0 + 1 + 2 + 3 + 6 +(5 rows) + +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling().abs().type()'; + ?column? +---------- + "number" + "number" + "number" + "number" + "number" +(5 rows) + +select jsonb '[{},1]' @* '$[*].keyvalue()'; +ERROR: SQL/JSON object not found +select jsonb '{}' @* '$.keyvalue()'; + ?column? +---------- +(0 rows) + +select jsonb '{"a": 1, "b": [1, 2], "c": {"a": "bbb"}}' @* '$.keyvalue()'; + ?column? +---------------------------------------------- + {"id": 0, "key": "a", "value": 1} + {"id": 0, "key": "b", "value": [1, 2]} + {"id": 0, "key": "c", "value": {"a": "bbb"}} +(3 rows) + +select jsonb '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* '$[*].keyvalue()'; + ?column? +----------------------------------------------- + {"id": 12, "key": "a", "value": 1} + {"id": 12, "key": "b", "value": [1, 2]} + {"id": 72, "key": "c", "value": {"a": "bbb"}} +(3 rows) + +select jsonb '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* 'strict $.keyvalue()'; +ERROR: SQL/JSON object not found +select jsonb '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* 'lax $.keyvalue()'; + ?column? +----------------------------------------------- + {"id": 12, "key": "a", "value": 1} + {"id": 12, "key": "b", "value": [1, 2]} + {"id": 72, "key": "c", "value": {"a": "bbb"}} +(3 rows) + +select jsonb 'null' @* '$.double()'; +ERROR: Non-numeric SQL/JSON item +select jsonb 'true' @* '$.double()'; +ERROR: Non-numeric SQL/JSON item +select jsonb '[]' @* '$.double()'; + ?column? +---------- +(0 rows) + +select jsonb '[]' @* 'strict $.double()'; +ERROR: Non-numeric SQL/JSON item +select jsonb '{}' @* '$.double()'; +ERROR: Non-numeric SQL/JSON item +select jsonb '1.23' @* '$.double()'; + ?column? +---------- + 1.23 +(1 row) + +select jsonb '"1.23"' @* '$.double()'; + ?column? +---------- + 1.23 +(1 row) + +select jsonb '"1.23aaa"' @* '$.double()'; +ERROR: Non-numeric SQL/JSON item +select jsonb '["", "a", "abc", "abcabc"]' @* '$[*] ? (@ starts with "abc")'; + ?column? +---------- + "abc" + "abcabc" +(2 rows) + +select jsonb '["", "a", "abc", "abcabc"]' @* 'strict $ ? (@[*] starts with "abc")'; + ?column? +---------------------------- + ["", "a", "abc", "abcabc"] +(1 row) + +select jsonb '["", "a", "abd", "abdabc"]' @* 'strict $ ? (@[*] starts with "abc")'; + ?column? +---------- +(0 rows) + +select jsonb '["abc", "abcabc", null, 1]' @* 'strict $ ? (@[*] starts with "abc")'; + ?column? +---------- +(0 rows) + +select jsonb '["abc", "abcabc", null, 1]' @* 'strict $ ? ((@[*] starts with "abc") is unknown)'; + ?column? +---------------------------- + ["abc", "abcabc", null, 1] +(1 row) + +select jsonb '[[null, 1, "abc", "abcabc"]]' @* 'lax $ ? (@[*] starts with "abc")'; + ?column? +---------------------------- + [null, 1, "abc", "abcabc"] +(1 row) + +select jsonb '[[null, 1, "abd", "abdabc"]]' @* 'lax $ ? ((@[*] starts with "abc") is unknown)'; + ?column? +---------------------------- + [null, 1, "abd", "abdabc"] +(1 row) + +select jsonb '[null, 1, "abd", "abdabc"]' @* 'lax $[*] ? ((@ starts with "abc") is unknown)'; + ?column? +---------- + null + 1 +(2 rows) + +select jsonb '[null, 1, "abc", "abd", "aBdC", "abdacb", "babc"]' @* 'lax $[*] ? (@ like_regex "^ab.*c")'; + ?column? +---------- + "abc" + "abdacb" +(2 rows) + +select jsonb '[null, 1, "abc", "abd", "aBdC", "abdacb", "babc"]' @* 'lax $[*] ? (@ like_regex "^ab.*c" flag "i")'; + ?column? +---------- + "abc" + "aBdC" + "abdacb" +(3 rows) + +select jsonb 'null' @* '$.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select jsonb 'true' @* '$.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select jsonb '1' @* '$.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select jsonb '[]' @* '$.datetime()'; + ?column? +---------- +(0 rows) + +select jsonb '[]' @* 'strict $.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select jsonb '{}' @* '$.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select jsonb '""' @* '$.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select jsonb '"10-03-2017"' @* '$.datetime("dd-mm-yyyy")'; + ?column? +-------------- + "2017-03-10" +(1 row) + +select jsonb '"10-03-2017"' @* '$.datetime("dd-mm-yyyy").type()'; + ?column? +---------- + "date" +(1 row) + +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy")'; + ?column? +-------------- + "2017-03-10" +(1 row) + +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy").type()'; + ?column? +---------- + "date" +(1 row) + +select jsonb '"10-03-2017 12:34"' @* ' $.datetime("dd-mm-yyyy HH24:MI").type()'; + ?column? +------------------------------- + "timestamp without time zone" +(1 row) + +select jsonb '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM").type()'; + ?column? +---------------------------- + "timestamp with time zone" +(1 row) + +select jsonb '"12:34:56"' @* '$.datetime("HH24:MI:SS").type()'; + ?column? +-------------------------- + "time without time zone" +(1 row) + +select jsonb '"12:34:56 +05:20"' @* '$.datetime("HH24:MI:SS TZH:TZM").type()'; + ?column? +----------------------- + "time with time zone" +(1 row) + +set time zone '+00'; +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI")'; + ?column? +----------------------- + "2017-03-10T12:34:00" +(1 row) + +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +ERROR: Invalid argument for SQL/JSON datetime function +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "+00")'; + ?column? +----------------------------- + "2017-03-10T12:34:00+00:00" +(1 row) + +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "+00:12")'; + ?column? +----------------------------- + "2017-03-10T12:34:00+00:12" +(1 row) + +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "-00:12:34")'; + ?column? +-------------------------------- + "2017-03-10T12:34:00-00:12:34" +(1 row) + +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "UTC")'; +ERROR: Invalid argument for SQL/JSON datetime function +select jsonb '"10-03-2017 12:34 +05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +----------------------------- + "2017-03-10T12:34:00+05:00" +(1 row) + +select jsonb '"10-03-2017 12:34 -05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +----------------------------- + "2017-03-10T12:34:00-05:00" +(1 row) + +select jsonb '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; + ?column? +----------------------------- + "2017-03-10T12:34:00+05:20" +(1 row) + +select jsonb '"10-03-2017 12:34 -05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; + ?column? +----------------------------- + "2017-03-10T12:34:00-05:20" +(1 row) + +select jsonb '"12:34"' @* '$.datetime("HH24:MI")'; + ?column? +------------ + "12:34:00" +(1 row) + +select jsonb '"12:34"' @* '$.datetime("HH24:MI TZH")'; +ERROR: Invalid argument for SQL/JSON datetime function +select jsonb '"12:34"' @* '$.datetime("HH24:MI TZH", "+00")'; + ?column? +------------------ + "12:34:00+00:00" +(1 row) + +select jsonb '"12:34 +05"' @* '$.datetime("HH24:MI TZH")'; + ?column? +------------------ + "12:34:00+05:00" +(1 row) + +select jsonb '"12:34 -05"' @* '$.datetime("HH24:MI TZH")'; + ?column? +------------------ + "12:34:00-05:00" +(1 row) + +select jsonb '"12:34 +05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + ?column? +------------------ + "12:34:00+05:20" +(1 row) + +select jsonb '"12:34 -05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + ?column? +------------------ + "12:34:00-05:20" +(1 row) + +set time zone '+10'; +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI")'; + ?column? +----------------------- + "2017-03-10T12:34:00" +(1 row) + +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +ERROR: Invalid argument for SQL/JSON datetime function +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "+10")'; + ?column? +----------------------------- + "2017-03-10T12:34:00+10:00" +(1 row) + +select jsonb '"10-03-2017 12:34 +05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +----------------------------- + "2017-03-10T12:34:00+05:00" +(1 row) + +select jsonb '"10-03-2017 12:34 -05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +----------------------------- + "2017-03-10T12:34:00-05:00" +(1 row) + +select jsonb '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; + ?column? +----------------------------- + "2017-03-10T12:34:00+05:20" +(1 row) + +select jsonb '"10-03-2017 12:34 -05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; + ?column? +----------------------------- + "2017-03-10T12:34:00-05:20" +(1 row) + +select jsonb '"12:34"' @* '$.datetime("HH24:MI")'; + ?column? +------------ + "12:34:00" +(1 row) + +select jsonb '"12:34"' @* '$.datetime("HH24:MI TZH")'; +ERROR: Invalid argument for SQL/JSON datetime function +select jsonb '"12:34"' @* '$.datetime("HH24:MI TZH", "+10")'; + ?column? +------------------ + "12:34:00+10:00" +(1 row) + +select jsonb '"12:34 +05"' @* '$.datetime("HH24:MI TZH")'; + ?column? +------------------ + "12:34:00+05:00" +(1 row) + +select jsonb '"12:34 -05"' @* '$.datetime("HH24:MI TZH")'; + ?column? +------------------ + "12:34:00-05:00" +(1 row) + +select jsonb '"12:34 +05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + ?column? +------------------ + "12:34:00+05:20" +(1 row) + +select jsonb '"12:34 -05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + ?column? +------------------ + "12:34:00-05:20" +(1 row) + +set time zone default; +select jsonb '"2017-03-10"' @* '$.datetime().type()'; + ?column? +---------- + "date" +(1 row) + +select jsonb '"2017-03-10"' @* '$.datetime()'; + ?column? +-------------- + "2017-03-10" +(1 row) + +select jsonb '"2017-03-10 12:34:56"' @* '$.datetime().type()'; + ?column? +------------------------------- + "timestamp without time zone" +(1 row) + +select jsonb '"2017-03-10 12:34:56"' @* '$.datetime()'; + ?column? +----------------------- + "2017-03-10T12:34:56" +(1 row) + +select jsonb '"2017-03-10 12:34:56 +3"' @* '$.datetime().type()'; + ?column? +---------------------------- + "timestamp with time zone" +(1 row) + +select jsonb '"2017-03-10 12:34:56 +3"' @* '$.datetime()'; + ?column? +----------------------------- + "2017-03-10T12:34:56+03:00" +(1 row) + +select jsonb '"2017-03-10 12:34:56 +3:10"' @* '$.datetime().type()'; + ?column? +---------------------------- + "timestamp with time zone" +(1 row) + +select jsonb '"2017-03-10 12:34:56 +3:10"' @* '$.datetime()'; + ?column? +----------------------------- + "2017-03-10T12:34:56+03:10" +(1 row) + +select jsonb '"12:34:56"' @* '$.datetime().type()'; + ?column? +-------------------------- + "time without time zone" +(1 row) + +select jsonb '"12:34:56"' @* '$.datetime()'; + ?column? +------------ + "12:34:56" +(1 row) + +select jsonb '"12:34:56 +3"' @* '$.datetime().type()'; + ?column? +----------------------- + "time with time zone" +(1 row) + +select jsonb '"12:34:56 +3"' @* '$.datetime()'; + ?column? +------------------ + "12:34:56+03:00" +(1 row) + +select jsonb '"12:34:56 +3:10"' @* '$.datetime().type()'; + ?column? +----------------------- + "time with time zone" +(1 row) + +select jsonb '"12:34:56 +3:10"' @* '$.datetime()'; + ?column? +------------------ + "12:34:56+03:10" +(1 row) + +set time zone '+00'; +-- date comparison +select jsonb + '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' @* + '$[*].datetime() ? (@ == "10.03.2017".datetime("dd.mm.yyyy"))'; + ?column? +----------------------------- + "2017-03-10" + "2017-03-10T00:00:00" + "2017-03-10T03:00:00+03:00" +(3 rows) + +select jsonb + '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' @* + '$[*].datetime() ? (@ >= "10.03.2017".datetime("dd.mm.yyyy"))'; + ?column? +----------------------------- + "2017-03-10" + "2017-03-11" + "2017-03-10T00:00:00" + "2017-03-10T12:34:56" + "2017-03-10T03:00:00+03:00" +(5 rows) + +select jsonb + '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' @* + '$[*].datetime() ? (@ < "10.03.2017".datetime("dd.mm.yyyy"))'; + ?column? +----------------------------- + "2017-03-09" + "2017-03-10T01:02:03+04:00" +(2 rows) + +-- time comparison +select jsonb + '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' @* + '$[*].datetime() ? (@ == "12:35".datetime("HH24:MI"))'; + ?column? +------------------ + "12:35:00" + "12:35:00+00:00" +(2 rows) + +select jsonb + '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' @* + '$[*].datetime() ? (@ >= "12:35".datetime("HH24:MI"))'; + ?column? +------------------ + "12:35:00" + "12:36:00" + "12:35:00+00:00" +(3 rows) + +select jsonb + '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' @* + '$[*].datetime() ? (@ < "12:35".datetime("HH24:MI"))'; + ?column? +------------------ + "12:34:00" + "12:35:00+01:00" + "13:35:00+01:00" +(3 rows) + +-- timetz comparison +select jsonb + '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' @* + '$[*].datetime() ? (@ == "12:35 +1".datetime("HH24:MI TZH"))'; + ?column? +------------------ + "12:35:00+01:00" +(1 row) + +select jsonb + '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' @* + '$[*].datetime() ? (@ >= "12:35 +1".datetime("HH24:MI TZH"))'; + ?column? +------------------ + "12:35:00+01:00" + "12:36:00+01:00" + "12:35:00-02:00" + "11:35:00" + "12:35:00" +(5 rows) + +select jsonb + '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' @* + '$[*].datetime() ? (@ < "12:35 +1".datetime("HH24:MI TZH"))'; + ?column? +------------------ + "12:34:00+01:00" + "12:35:00+02:00" + "10:35:00" +(3 rows) + +-- timestamp comparison +select jsonb + '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ == "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; + ?column? +----------------------------- + "2017-03-10T12:35:00" + "2017-03-10T13:35:00+01:00" +(2 rows) + +select jsonb + '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ >= "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; + ?column? +----------------------------- + "2017-03-10T12:35:00" + "2017-03-10T12:36:00" + "2017-03-10T13:35:00+01:00" + "2017-03-10T12:35:00-01:00" + "2017-03-11" +(5 rows) + +select jsonb + '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ < "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; + ?column? +----------------------------- + "2017-03-10T12:34:00" + "2017-03-10T12:35:00+01:00" + "2017-03-10" +(3 rows) + +-- timestamptz comparison +select jsonb + '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ == "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; + ?column? +----------------------------- + "2017-03-10T12:35:00+01:00" + "2017-03-10T11:35:00" +(2 rows) + +select jsonb + '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ >= "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; + ?column? +----------------------------- + "2017-03-10T12:35:00+01:00" + "2017-03-10T12:36:00+01:00" + "2017-03-10T12:35:00-02:00" + "2017-03-10T11:35:00" + "2017-03-10T12:35:00" + "2017-03-11" +(6 rows) + +select jsonb + '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ < "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; + ?column? +----------------------------- + "2017-03-10T12:34:00+01:00" + "2017-03-10T12:35:00+02:00" + "2017-03-10T10:35:00" + "2017-03-10" +(4 rows) + +set time zone default; +-- jsonpath operators +SELECT jsonb '[{"a": 1}, {"a": 2}]' @* '$[*]'; + ?column? +---------- + {"a": 1} + {"a": 2} +(2 rows) + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @* '$[*] ? (@.a > 10)'; + ?column? +---------- +(0 rows) + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @# '$[*].a'; + ?column? +---------- + [1, 2] +(1 row) + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @# '$[*].a ? (@ == 1)'; + ?column? +---------- + 1 +(1 row) + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @# '$[*].a ? (@ > 10)'; + ?column? +---------- + +(1 row) + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @? '$[*].a ? (@ > 1)'; + ?column? +---------- + t +(1 row) + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @? '$[*] ? (@.a > 2)'; + ?column? +---------- + f +(1 row) + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @~ '$[*].a > 1'; + ?column? +---------- + t +(1 row) + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @~ '$[*].a > 2'; + ?column? +---------- + f +(1 row) + diff --git a/src/test/regress/expected/jsonpath.out b/src/test/regress/expected/jsonpath.out new file mode 100644 index 0000000000..193fc6841a --- /dev/null +++ b/src/test/regress/expected/jsonpath.out @@ -0,0 +1,800 @@ +--jsonpath io +select ''::jsonpath; +ERROR: invalid input syntax for jsonpath: "" +LINE 1: select ''::jsonpath; + ^ +select '$'::jsonpath; + jsonpath +---------- + $ +(1 row) + +select 'strict $'::jsonpath; + jsonpath +---------- + strict $ +(1 row) + +select 'lax $'::jsonpath; + jsonpath +---------- + $ +(1 row) + +select '$.a'::jsonpath; + jsonpath +---------- + $."a" +(1 row) + +select '$.a.v'::jsonpath; + jsonpath +----------- + $."a"."v" +(1 row) + +select '$.a.*'::jsonpath; + jsonpath +---------- + $."a".* +(1 row) + +select '$.*[*]'::jsonpath; + jsonpath +---------- + $.*[*] +(1 row) + +select '$.a[*]'::jsonpath; + jsonpath +---------- + $."a"[*] +(1 row) + +select '$.a[*][*]'::jsonpath; + jsonpath +------------- + $."a"[*][*] +(1 row) + +select '$[*]'::jsonpath; + jsonpath +---------- + $[*] +(1 row) + +select '$[0]'::jsonpath; + jsonpath +---------- + $[0] +(1 row) + +select '$[*][0]'::jsonpath; + jsonpath +---------- + $[*][0] +(1 row) + +select '$[*].a'::jsonpath; + jsonpath +---------- + $[*]."a" +(1 row) + +select '$[*][0].a.b'::jsonpath; + jsonpath +----------------- + $[*][0]."a"."b" +(1 row) + +select '$.a.**.b'::jsonpath; + jsonpath +-------------- + $."a".**."b" +(1 row) + +select '$.a.**{2}.b'::jsonpath; + jsonpath +----------------- + $."a".**{2}."b" +(1 row) + +select '$.a.**{2 to 2}.b'::jsonpath; + jsonpath +----------------- + $."a".**{2}."b" +(1 row) + +select '$.a.**{2 to 5}.b'::jsonpath; + jsonpath +---------------------- + $."a".**{2 to 5}."b" +(1 row) + +select '$.a.**{0 to 5}.b'::jsonpath; + jsonpath +---------------------- + $."a".**{0 to 5}."b" +(1 row) + +select '$.a.**{5 to last}.b'::jsonpath; + jsonpath +------------------------- + $."a".**{5 to last}."b" +(1 row) + +select '$+1'::jsonpath; + jsonpath +---------- + ($ + 1) +(1 row) + +select '$-1'::jsonpath; + jsonpath +---------- + ($ - 1) +(1 row) + +select '$--+1'::jsonpath; + jsonpath +---------- + ($ - -1) +(1 row) + +select '$.a/+-1'::jsonpath; + jsonpath +-------------- + ($."a" / -1) +(1 row) + +select '"\b\f\r\n\t\v\"\''\\"'::jsonpath; + jsonpath +------------------------- + "\b\f\r\n\t\u000b\"'\\" +(1 row) + +select '''\b\f\r\n\t\v\"\''\\'''::jsonpath; + jsonpath +------------------------- + "\b\f\r\n\t\u000b\"'\\" +(1 row) + +select '"\x50\u0067\u{53}\u{051}\u{00004C}"'::jsonpath; + jsonpath +---------- + "PgSQL" +(1 row) + +select '''\x50\u0067\u{53}\u{051}\u{00004C}'''::jsonpath; + jsonpath +---------- + "PgSQL" +(1 row) + +select '$.foo\x50\u0067\u{53}\u{051}\u{00004C}\t\"bar'::jsonpath; + jsonpath +--------------------- + $."fooPgSQL\t\"bar" +(1 row) + +select '$.g ? ($.a == 1)'::jsonpath; + jsonpath +-------------------- + $."g"?($."a" == 1) +(1 row) + +select '$.g ? (@ == 1)'::jsonpath; + jsonpath +---------------- + $."g"?(@ == 1) +(1 row) + +select '$.g ? (.a == 1)'::jsonpath; + jsonpath +-------------------- + $."g"?(@."a" == 1) +(1 row) + +select '$.g ? (@.a == 1)'::jsonpath; + jsonpath +-------------------- + $."g"?(@."a" == 1) +(1 row) + +select '$.g ? (@.a == 1 || @.a == 4)'::jsonpath; + jsonpath +---------------------------------- + $."g"?(@."a" == 1 || @."a" == 4) +(1 row) + +select '$.g ? (@.a == 1 && @.a == 4)'::jsonpath; + jsonpath +---------------------------------- + $."g"?(@."a" == 1 && @."a" == 4) +(1 row) + +select '$.g ? (@.a == 1 || @.a == 4 && @.b == 7)'::jsonpath; + jsonpath +------------------------------------------------ + $."g"?(@."a" == 1 || @."a" == 4 && @."b" == 7) +(1 row) + +select '$.g ? (@.a == 1 || !(@.a == 4) && @.b == 7)'::jsonpath; + jsonpath +--------------------------------------------------- + $."g"?(@."a" == 1 || !(@."a" == 4) && @."b" == 7) +(1 row) + +select '$.g ? (@.a == 1 || !(@.x >= 123 || @.a == 4) && @.b == 7)'::jsonpath; + jsonpath +------------------------------------------------------------------- + $."g"?(@."a" == 1 || !(@."x" >= 123 || @."a" == 4) && @."b" == 7) +(1 row) + +select '$.g ? (.x >= @[*]?(@.a > "abc"))'::jsonpath; + jsonpath +--------------------------------------- + $."g"?(@."x" >= @[*]?(@."a" > "abc")) +(1 row) + +select '$.g ? ((@.x >= 123 || @.a == 4) is unknown)'::jsonpath; + jsonpath +------------------------------------------------- + $."g"?((@."x" >= 123 || @."a" == 4) is unknown) +(1 row) + +select '$.g ? (exists (.x))'::jsonpath; + jsonpath +------------------------ + $."g"?(exists (@."x")) +(1 row) + +select '$.g ? (exists (@.x ? (@ == 14)))'::jsonpath; + jsonpath +---------------------------------- + $."g"?(exists (@."x"?(@ == 14))) +(1 row) + +select '$.g ? (exists (.x ? (@ == 14)))'::jsonpath; + jsonpath +---------------------------------- + $."g"?(exists (@."x"?(@ == 14))) +(1 row) + +select '$.g ? ((@.x >= 123 || @.a == 4) && exists (.x ? (@ == 14)))'::jsonpath; + jsonpath +------------------------------------------------------------------ + $."g"?((@."x" >= 123 || @."a" == 4) && exists (@."x"?(@ == 14))) +(1 row) + +select '$.g ? (+@.x >= +-(+@.a + 2))'::jsonpath; + jsonpath +------------------------------------ + $."g"?(+@."x" >= +(-(+@."a" + 2))) +(1 row) + +select '$a'::jsonpath; + jsonpath +---------- + $"a" +(1 row) + +select '$a.b'::jsonpath; + jsonpath +---------- + $"a"."b" +(1 row) + +select '$a[*]'::jsonpath; + jsonpath +---------- + $"a"[*] +(1 row) + +select '$.g ? (@.zip == $zip)'::jsonpath; + jsonpath +--------------------------- + $."g"?(@."zip" == $"zip") +(1 row) + +select '$.a[1,2, 3 to 16]'::jsonpath; + jsonpath +-------------------- + $."a"[1,2,3 to 16] +(1 row) + +select '$.a[$a + 1, ($b[*]) to -($[0] * 2)]'::jsonpath; + jsonpath +---------------------------------------- + $."a"[$"a" + 1,$"b"[*] to -($[0] * 2)] +(1 row) + +select '$.a[$.a.size() - 3]'::jsonpath; + jsonpath +------------------------- + $."a"[$."a".size() - 3] +(1 row) + +select 'last'::jsonpath; +ERROR: LAST is allowed only in array subscripts +LINE 1: select 'last'::jsonpath; + ^ +select '"last"'::jsonpath; + jsonpath +---------- + "last" +(1 row) + +select '$.last'::jsonpath; + jsonpath +---------- + $."last" +(1 row) + +select '$ ? (last > 0)'::jsonpath; +ERROR: LAST is allowed only in array subscripts +LINE 1: select '$ ? (last > 0)'::jsonpath; + ^ +select '$[last]'::jsonpath; + jsonpath +---------- + $[last] +(1 row) + +select '$[$[0] ? (last > 0)]'::jsonpath; + jsonpath +-------------------- + $[$[0]?(last > 0)] +(1 row) + +select 'null.type()'::jsonpath; + jsonpath +------------- + null.type() +(1 row) + +select '1.type()'::jsonpath; + jsonpath +---------- + 1.type() +(1 row) + +select '"aaa".type()'::jsonpath; + jsonpath +-------------- + "aaa".type() +(1 row) + +select 'true.type()'::jsonpath; + jsonpath +------------- + true.type() +(1 row) + +select '$.datetime()'::jsonpath; + jsonpath +-------------- + $.datetime() +(1 row) + +select '$.datetime("datetime template")'::jsonpath; + jsonpath +--------------------------------- + $.datetime("datetime template") +(1 row) + +select '$ ? (@ starts with "abc")'::jsonpath; + jsonpath +------------------------- + $?(@ starts with "abc") +(1 row) + +select '$ ? (@ starts with $var)'::jsonpath; + jsonpath +-------------------------- + $?(@ starts with $"var") +(1 row) + +select '$ ? (@ like_regex "(invalid pattern")'::jsonpath; +ERROR: invalid regular expression: parentheses () not balanced +LINE 1: select '$ ? (@ like_regex "(invalid pattern")'::jsonpath; + ^ +select '$ ? (@ like_regex "pattern")'::jsonpath; + jsonpath +---------------------------- + $?(@ like_regex "pattern") +(1 row) + +select '$ ? (@ like_regex "pattern" flag "")'::jsonpath; + jsonpath +---------------------------- + $?(@ like_regex "pattern") +(1 row) + +select '$ ? (@ like_regex "pattern" flag "i")'::jsonpath; + jsonpath +------------------------------------- + $?(@ like_regex "pattern" flag "i") +(1 row) + +select '$ ? (@ like_regex "pattern" flag "is")'::jsonpath; + jsonpath +-------------------------------------- + $?(@ like_regex "pattern" flag "is") +(1 row) + +select '$ ? (@ like_regex "pattern" flag "isim")'::jsonpath; + jsonpath +-------------------------------------- + $?(@ like_regex "pattern" flag "im") +(1 row) + +select '$ ? (@ like_regex "pattern" flag "xsms")'::jsonpath; + jsonpath +-------------------------------------- + $?(@ like_regex "pattern" flag "sx") +(1 row) + +select '$ ? (@ like_regex "pattern" flag "a")'::jsonpath; +ERROR: bad jsonpath representation +LINE 1: select '$ ? (@ like_regex "pattern" flag "a")'::jsonpath; + ^ +DETAIL: unrecognized flag of LIKE_REGEX predicate at or near """ +select '$ < 1'::jsonpath; + jsonpath +---------- + ($ < 1) +(1 row) + +select '($ < 1) || $.a.b <= $x'::jsonpath; + jsonpath +------------------------------ + ($ < 1 || $."a"."b" <= $"x") +(1 row) + +select '@ + 1'::jsonpath; +ERROR: @ is not allowed in root expressions +LINE 1: select '@ + 1'::jsonpath; + ^ +select '($).a.b'::jsonpath; + jsonpath +----------- + $."a"."b" +(1 row) + +select '($.a.b).c.d'::jsonpath; + jsonpath +------------------- + $."a"."b"."c"."d" +(1 row) + +select '($.a.b + -$.x.y).c.d'::jsonpath; + jsonpath +---------------------------------- + ($."a"."b" + -$."x"."y")."c"."d" +(1 row) + +select '(-+$.a.b).c.d'::jsonpath; + jsonpath +------------------------- + (-(+$."a"."b"))."c"."d" +(1 row) + +select '1 + ($.a.b + 2).c.d'::jsonpath; + jsonpath +------------------------------- + (1 + ($."a"."b" + 2)."c"."d") +(1 row) + +select '1 + ($.a.b > 2).c.d'::jsonpath; + jsonpath +------------------------------- + (1 + ($."a"."b" > 2)."c"."d") +(1 row) + +select '($)'::jsonpath; + jsonpath +---------- + $ +(1 row) + +select '(($))'::jsonpath; + jsonpath +---------- + $ +(1 row) + +select '((($ + 1)).a + ((2)).b ? ((((@ > 1)) || (exists(@.c)))))'::jsonpath; + jsonpath +------------------------------------------------- + (($ + 1)."a" + 2."b"?(@ > 1 || exists (@."c"))) +(1 row) + +select '$ ? (@.a < 1)'::jsonpath; + jsonpath +--------------- + $?(@."a" < 1) +(1 row) + +select '$ ? (@.a < -1)'::jsonpath; + jsonpath +---------------- + $?(@."a" < -1) +(1 row) + +select '$ ? (@.a < +1)'::jsonpath; + jsonpath +--------------- + $?(@."a" < 1) +(1 row) + +select '$ ? (@.a < .1)'::jsonpath; + jsonpath +----------------- + $?(@."a" < 0.1) +(1 row) + +select '$ ? (@.a < -.1)'::jsonpath; + jsonpath +------------------ + $?(@."a" < -0.1) +(1 row) + +select '$ ? (@.a < +.1)'::jsonpath; + jsonpath +----------------- + $?(@."a" < 0.1) +(1 row) + +select '$ ? (@.a < 0.1)'::jsonpath; + jsonpath +----------------- + $?(@."a" < 0.1) +(1 row) + +select '$ ? (@.a < -0.1)'::jsonpath; + jsonpath +------------------ + $?(@."a" < -0.1) +(1 row) + +select '$ ? (@.a < +0.1)'::jsonpath; + jsonpath +----------------- + $?(@."a" < 0.1) +(1 row) + +select '$ ? (@.a < 10.1)'::jsonpath; + jsonpath +------------------ + $?(@."a" < 10.1) +(1 row) + +select '$ ? (@.a < -10.1)'::jsonpath; + jsonpath +------------------- + $?(@."a" < -10.1) +(1 row) + +select '$ ? (@.a < +10.1)'::jsonpath; + jsonpath +------------------ + $?(@."a" < 10.1) +(1 row) + +select '$ ? (@.a < 1e1)'::jsonpath; + jsonpath +---------------- + $?(@."a" < 10) +(1 row) + +select '$ ? (@.a < -1e1)'::jsonpath; + jsonpath +----------------- + $?(@."a" < -10) +(1 row) + +select '$ ? (@.a < +1e1)'::jsonpath; + jsonpath +---------------- + $?(@."a" < 10) +(1 row) + +select '$ ? (@.a < .1e1)'::jsonpath; + jsonpath +--------------- + $?(@."a" < 1) +(1 row) + +select '$ ? (@.a < -.1e1)'::jsonpath; + jsonpath +---------------- + $?(@."a" < -1) +(1 row) + +select '$ ? (@.a < +.1e1)'::jsonpath; + jsonpath +--------------- + $?(@."a" < 1) +(1 row) + +select '$ ? (@.a < 0.1e1)'::jsonpath; + jsonpath +--------------- + $?(@."a" < 1) +(1 row) + +select '$ ? (@.a < -0.1e1)'::jsonpath; + jsonpath +---------------- + $?(@."a" < -1) +(1 row) + +select '$ ? (@.a < +0.1e1)'::jsonpath; + jsonpath +--------------- + $?(@."a" < 1) +(1 row) + +select '$ ? (@.a < 10.1e1)'::jsonpath; + jsonpath +----------------- + $?(@."a" < 101) +(1 row) + +select '$ ? (@.a < -10.1e1)'::jsonpath; + jsonpath +------------------ + $?(@."a" < -101) +(1 row) + +select '$ ? (@.a < +10.1e1)'::jsonpath; + jsonpath +----------------- + $?(@."a" < 101) +(1 row) + +select '$ ? (@.a < 1e-1)'::jsonpath; + jsonpath +----------------- + $?(@."a" < 0.1) +(1 row) + +select '$ ? (@.a < -1e-1)'::jsonpath; + jsonpath +------------------ + $?(@."a" < -0.1) +(1 row) + +select '$ ? (@.a < +1e-1)'::jsonpath; + jsonpath +----------------- + $?(@."a" < 0.1) +(1 row) + +select '$ ? (@.a < .1e-1)'::jsonpath; + jsonpath +------------------ + $?(@."a" < 0.01) +(1 row) + +select '$ ? (@.a < -.1e-1)'::jsonpath; + jsonpath +------------------- + $?(@."a" < -0.01) +(1 row) + +select '$ ? (@.a < +.1e-1)'::jsonpath; + jsonpath +------------------ + $?(@."a" < 0.01) +(1 row) + +select '$ ? (@.a < 0.1e-1)'::jsonpath; + jsonpath +------------------ + $?(@."a" < 0.01) +(1 row) + +select '$ ? (@.a < -0.1e-1)'::jsonpath; + jsonpath +------------------- + $?(@."a" < -0.01) +(1 row) + +select '$ ? (@.a < +0.1e-1)'::jsonpath; + jsonpath +------------------ + $?(@."a" < 0.01) +(1 row) + +select '$ ? (@.a < 10.1e-1)'::jsonpath; + jsonpath +------------------ + $?(@."a" < 1.01) +(1 row) + +select '$ ? (@.a < -10.1e-1)'::jsonpath; + jsonpath +------------------- + $?(@."a" < -1.01) +(1 row) + +select '$ ? (@.a < +10.1e-1)'::jsonpath; + jsonpath +------------------ + $?(@."a" < 1.01) +(1 row) + +select '$ ? (@.a < 1e+1)'::jsonpath; + jsonpath +---------------- + $?(@."a" < 10) +(1 row) + +select '$ ? (@.a < -1e+1)'::jsonpath; + jsonpath +----------------- + $?(@."a" < -10) +(1 row) + +select '$ ? (@.a < +1e+1)'::jsonpath; + jsonpath +---------------- + $?(@."a" < 10) +(1 row) + +select '$ ? (@.a < .1e+1)'::jsonpath; + jsonpath +--------------- + $?(@."a" < 1) +(1 row) + +select '$ ? (@.a < -.1e+1)'::jsonpath; + jsonpath +---------------- + $?(@."a" < -1) +(1 row) + +select '$ ? (@.a < +.1e+1)'::jsonpath; + jsonpath +--------------- + $?(@."a" < 1) +(1 row) + +select '$ ? (@.a < 0.1e+1)'::jsonpath; + jsonpath +--------------- + $?(@."a" < 1) +(1 row) + +select '$ ? (@.a < -0.1e+1)'::jsonpath; + jsonpath +---------------- + $?(@."a" < -1) +(1 row) + +select '$ ? (@.a < +0.1e+1)'::jsonpath; + jsonpath +--------------- + $?(@."a" < 1) +(1 row) + +select '$ ? (@.a < 10.1e+1)'::jsonpath; + jsonpath +----------------- + $?(@."a" < 101) +(1 row) + +select '$ ? (@.a < -10.1e+1)'::jsonpath; + jsonpath +------------------ + $?(@."a" < -101) +(1 row) + +select '$ ? (@.a < +10.1e+1)'::jsonpath; + jsonpath +----------------- + $?(@."a" < 101) +(1 row) + diff --git a/src/test/regress/expected/opr_sanity.out b/src/test/regress/expected/opr_sanity.out index c073a5ac3f..dd7b142c18 100644 --- a/src/test/regress/expected/opr_sanity.out +++ b/src/test/regress/expected/opr_sanity.out @@ -1845,6 +1845,8 @@ ORDER BY 1, 2, 3; 2742 | 9 | ? 2742 | 10 | ?| 2742 | 11 | ?& + 2742 | 15 | @? + 2742 | 16 | @~ 3580 | 1 | < 3580 | 1 | << 3580 | 2 | &< @@ -1910,7 +1912,7 @@ ORDER BY 1, 2, 3; 4000 | 26 | >> 4000 | 27 | >>= 4000 | 28 | ^@ -(123 rows) +(125 rows) -- Check that all opclass search operators have selectivity estimators. -- This is not absolutely required, but it seems a reasonable thing diff --git a/src/test/regress/expected/timestamp.out b/src/test/regress/expected/timestamp.out index 4a2fabddd9..43759bd099 100644 --- a/src/test/regress/expected/timestamp.out +++ b/src/test/regress/expected/timestamp.out @@ -1597,6 +1597,29 @@ SELECT '' AS to_char_11, to_char(d1, 'FMIYYY FMIYY FMIY FMI FMIW FMIDDD FMID') | 2001 1 1 1 1 1 1 (65 rows) +SELECT '' AS to_char_12, to_char(d, 'FF1 FF2 FF3 FF4 FF5 FF6 MS US') + FROM (VALUES + ('2018-11-02 12:34:56'::timestamp), + ('2018-11-02 12:34:56.78'), + ('2018-11-02 12:34:56.78901'), + ('2018-11-02 12:34:56.78901234') + ) d(d); + to_char_12 | to_char +------------+---------------------------------------- + | 0 00 000 0000 00000 000000 000 000000 + | 7 78 780 7800 78000 780000 780 780000 + | 7 78 789 7890 78901 789010 789 789010 + | 7 78 789 7890 78901 789012 789 789012 +(4 rows) + +-- FF7-FF9 are not supported +SELECT to_char('2018-11-02 12:34:56'::timestamp, 'FF7'); +ERROR: datetime formatting field "FF7" is not supported +SELECT to_char('2018-11-02 12:34:56'::timestamp, 'FF8'); +ERROR: datetime formatting field "FF8" is not supported +SELECT to_char('2018-11-02 12:34:56'::timestamp, 'FF9'); +ERROR: datetime formatting field "FF9" is not supported + -- timestamp numeric fields constructor SELECT make_timestamp(2014,12,28,6,30,45.887); make_timestamp diff --git a/src/test/regress/expected/timestamptz.out b/src/test/regress/expected/timestamptz.out index 2340f30794..f4d1385196 100644 --- a/src/test/regress/expected/timestamptz.out +++ b/src/test/regress/expected/timestamptz.out @@ -1699,6 +1699,28 @@ SELECT '' AS to_char_11, to_char(d1, 'FMIYYY FMIYY FMIY FMI FMIW FMIDDD FMID') | 2001 1 1 1 1 1 1 (66 rows) +SELECT '' AS to_char_12, to_char(d, 'FF1 FF2 FF3 FF4 FF5 FF6 MS US') + FROM (VALUES + ('2018-11-02 12:34:56'::timestamptz), + ('2018-11-02 12:34:56.78'), + ('2018-11-02 12:34:56.78901'), + ('2018-11-02 12:34:56.78901234') + ) d(d); + to_char_12 | to_char +------------+---------------------------------------- + | 0 00 000 0000 00000 000000 000 000000 + | 7 78 780 7800 78000 780000 780 780000 + | 7 78 789 7890 78901 789010 789 789010 + | 7 78 789 7890 78901 789012 789 789012 +(4 rows) + +-- FF7-FF9 are not supported +SELECT to_char('2018-11-02 12:34:56'::timestamptz, 'FF7'); +ERROR: datetime formatting field "FF7" is not supported +SELECT to_char('2018-11-02 12:34:56'::timestamptz, 'FF8'); +ERROR: datetime formatting field "FF8" is not supported +SELECT to_char('2018-11-02 12:34:56'::timestamptz, 'FF9'); +ERROR: datetime formatting field "FF9" is not supported -- Check OF, TZH, TZM with various zone offsets, particularly fractional hours SET timezone = '00:00'; SELECT to_char(now(), 'OF') as "OF", to_char(now(), 'TZH:TZM') as "TZH:TZM"; diff --git a/src/test/regress/parallel_schedule b/src/test/regress/parallel_schedule index 289c658483..05c7b2d31f 100644 --- a/src/test/regress/parallel_schedule +++ b/src/test/regress/parallel_schedule @@ -104,7 +104,12 @@ test: publication subscription # ---------- # Another group of parallel tests # ---------- -test: select_views portals_p2 foreign_key cluster dependency guc bitmapops combocid tsearch tsdicts foreign_data window xmlmap functional_deps advisory_lock json jsonb json_encoding indirect_toast equivclass +test: select_views portals_p2 foreign_key cluster dependency guc bitmapops combocid tsearch tsdicts foreign_data window xmlmap functional_deps advisory_lock indirect_toast equivclass + +# ---------- +# Another group of parallel tests (JSON related) +# ---------- +test: json jsonb json_encoding jsonpath json_jsonpath jsonb_jsonpath # ---------- # Another group of parallel tests diff --git a/src/test/regress/serial_schedule b/src/test/regress/serial_schedule index bc43b18c62..b5d1505a14 100644 --- a/src/test/regress/serial_schedule +++ b/src/test/regress/serial_schedule @@ -156,6 +156,9 @@ test: advisory_lock test: json test: jsonb test: json_encoding +test: jsonpath +test: json_jsonpath +test: jsonb_jsonpath test: indirect_toast test: equivclass test: plancache diff --git a/src/test/regress/sql/horology.sql b/src/test/regress/sql/horology.sql index e356dd563e..ef34323be4 100644 --- a/src/test/regress/sql/horology.sql +++ b/src/test/regress/sql/horology.sql @@ -402,6 +402,20 @@ SELECT to_timestamp('2011-12-18 11:38 +05:20', 'YYYY-MM-DD HH12:MI TZH:TZM'); SELECT to_timestamp('2011-12-18 11:38 -05:20', 'YYYY-MM-DD HH12:MI TZH:TZM'); SELECT to_timestamp('2011-12-18 11:38 20', 'YYYY-MM-DD HH12:MI TZM'); +SELECT i, to_timestamp('2018-11-02 12:34:56', 'YYYY-MM-DD HH24:MI:SS.FF' || i) FROM generate_series(1, 6) i; +SELECT i, to_timestamp('2018-11-02 12:34:56.1', 'YYYY-MM-DD HH24:MI:SS.FF' || i) FROM generate_series(1, 6) i; +SELECT i, to_timestamp('2018-11-02 12:34:56.12', 'YYYY-MM-DD HH24:MI:SS.FF' || i) FROM generate_series(1, 6) i; +SELECT i, to_timestamp('2018-11-02 12:34:56.123', 'YYYY-MM-DD HH24:MI:SS.FF' || i) FROM generate_series(1, 6) i; +SELECT i, to_timestamp('2018-11-02 12:34:56.1234', 'YYYY-MM-DD HH24:MI:SS.FF' || i) FROM generate_series(1, 6) i; +SELECT i, to_timestamp('2018-11-02 12:34:56.12345', 'YYYY-MM-DD HH24:MI:SS.FF' || i) FROM generate_series(1, 6) i; +SELECT i, to_timestamp('2018-11-02 12:34:56.123456', 'YYYY-MM-DD HH24:MI:SS.FF' || i) FROM generate_series(1, 6) i; +SELECT i, to_timestamp('2018-11-02 12:34:56.123456789', 'YYYY-MM-DD HH24:MI:SS.FF' || i) FROM generate_series(1, 6) i; + +-- FF7, FF8, FF9 are not supported +SELECT to_timestamp('123', 'FF7'); +SELECT to_timestamp('123', 'FF8'); +SELECT to_timestamp('123', 'FF9'); + -- -- Check handling of multiple spaces in format and/or input -- diff --git a/src/test/regress/sql/json_jsonpath.sql b/src/test/regress/sql/json_jsonpath.sql new file mode 100644 index 0000000000..824f510199 --- /dev/null +++ b/src/test/regress/sql/json_jsonpath.sql @@ -0,0 +1,379 @@ +select json '{"a": 12}' @? '$.a.b'; +select json '{"a": 12}' @? '$.b'; +select json '{"a": {"a": 12}}' @? '$.a.a'; +select json '{"a": {"a": 12}}' @? '$.*.a'; +select json '{"b": {"a": 12}}' @? '$.*.a'; +select json '{}' @? '$.*'; +select json '{"a": 1}' @? '$.*'; +select json '{"a": {"b": 1}}' @? 'lax $.**{1}'; +select json '{"a": {"b": 1}}' @? 'lax $.**{2}'; +select json '{"a": {"b": 1}}' @? 'lax $.**{3}'; +select json '[]' @? '$[*]'; +select json '[1]' @? '$[*]'; +select json '[1]' @? '$[1]'; +select json '[1]' @? 'strict $[1]'; +select json '[1]' @* 'strict $[1]'; +select json '[1]' @? '$[0]'; +select json '[1]' @? '$[0.3]'; +select json '[1]' @? '$[0.5]'; +select json '[1]' @? '$[0.9]'; +select json '[1]' @? '$[1.2]'; +select json '[1]' @? 'strict $[1.2]'; +select json '[1]' @* 'strict $[1.2]'; +select json '{}' @* 'strict $[0.3]'; +select json '{}' @? 'lax $[0.3]'; +select json '{}' @* 'strict $[1.2]'; +select json '{}' @? 'lax $[1.2]'; +select json '{}' @* 'strict $[-2 to 3]'; +select json '{}' @? 'lax $[-2 to 3]'; + +select json '{"a": [1,2,3], "b": [3,4,5]}' @? '$ ? (@.a[*] > @.b[*])'; +select json '{"a": [1,2,3], "b": [3,4,5]}' @? '$ ? (@.a[*] >= @.b[*])'; +select json '{"a": [1,2,3], "b": [3,4,"5"]}' @? '$ ? (@.a[*] >= @.b[*])'; +select json '{"a": [1,2,3], "b": [3,4,"5"]}' @? 'strict $ ? (@.a[*] >= @.b[*])'; +select json '{"a": [1,2,3], "b": [3,4,null]}' @? '$ ? (@.a[*] >= @.b[*])'; +select json '1' @? '$ ? ((@ == "1") is unknown)'; +select json '1' @? '$ ? ((@ == 1) is unknown)'; +select json '[{"a": 1}, {"a": 2}]' @? '$[0 to 1] ? (@.a > 1)'; + +select json '{"a": 12, "b": {"a": 13}}' @* '$.a'; +select json '{"a": 12, "b": {"a": 13}}' @* '$.b'; +select json '{"a": 12, "b": {"a": 13}}' @* '$.*'; +select json '{"a": 12, "b": {"a": 13}}' @* 'lax $.*.a'; +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $[*].a'; +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $[*].*'; +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $[0].a'; +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $[1].a'; +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $[2].a'; +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $[0,1].a'; +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $[0 to 10].a'; +select json '[12, {"a": 13}, {"b": 14}, "ccc", true]' @* '$[2.5 - 1 to $.size() - 2]'; +select json '1' @* 'lax $[0]'; +select json '1' @* 'lax $[*]'; +select json '{}' @* 'lax $[0]'; +select json '[1]' @* 'lax $[0]'; +select json '[1]' @* 'lax $[*]'; +select json '[1,2,3]' @* 'lax $[*]'; +select json '[]' @* '$[last]'; +select json '[]' @* 'strict $[last]'; +select json '[1]' @* '$[last]'; +select json '{}' @* 'lax $[last]'; +select json '[1,2,3]' @* '$[last]'; +select json '[1,2,3]' @* '$[last - 1]'; +select json '[1,2,3]' @* '$[last ? (@.type() == "number")]'; +select json '[1,2,3]' @* '$[last ? (@.type() == "string")]'; + +select * from jsonpath_query(json '{"a": 10}', '$'); +select * from jsonpath_query(json '{"a": 10}', '$ ? (.a < $value)'); +select * from jsonpath_query(json '{"a": 10}', '$ ? (.a < $value)', '{"value" : 13}'); +select * from jsonpath_query(json '{"a": 10}', '$ ? (.a < $value)', '{"value" : 8}'); +select * from jsonpath_query(json '{"a": 10}', '$.a ? (@ < $value)', '{"value" : 13}'); +select * from jsonpath_query(json '[10,11,12,13,14,15]', '$[*] ? (@ < $value)', '{"value" : 13}'); +select * from jsonpath_query(json '[10,11,12,13,14,15]', '$[0,1] ? (@ < $value)', '{"value" : 13}'); +select * from jsonpath_query(json '[10,11,12,13,14,15]', '$[0 to 2] ? (@ < $value)', '{"value" : 15}'); +select * from jsonpath_query(json '[1,"1",2,"2",null]', '$[*] ? (@ == "1")'); +select * from jsonpath_query(json '[1,"1",2,"2",null]', '$[*] ? (@ == $value)', '{"value" : "1"}'); +select json '[1, "2", null]' @* '$[*] ? (@ != null)'; +select json '[1, "2", null]' @* '$[*] ? (@ == null)'; + +select json '{"a": {"b": 1}}' @* 'lax $.**'; +select json '{"a": {"b": 1}}' @* 'lax $.**{0}'; +select json '{"a": {"b": 1}}' @* 'lax $.**{0 to last}'; +select json '{"a": {"b": 1}}' @* 'lax $.**{1}'; +select json '{"a": {"b": 1}}' @* 'lax $.**{1 to last}'; +select json '{"a": {"b": 1}}' @* 'lax $.**{2}'; +select json '{"a": {"b": 1}}' @* 'lax $.**{2 to last}'; +select json '{"a": {"b": 1}}' @* 'lax $.**{3 to last}'; +select json '{"a": {"b": 1}}' @* 'lax $.**.b ? (@ > 0)'; +select json '{"a": {"b": 1}}' @* 'lax $.**{0}.b ? (@ > 0)'; +select json '{"a": {"b": 1}}' @* 'lax $.**{1}.b ? (@ > 0)'; +select json '{"a": {"b": 1}}' @* 'lax $.**{0 to last}.b ? (@ > 0)'; +select json '{"a": {"b": 1}}' @* 'lax $.**{1 to last}.b ? (@ > 0)'; +select json '{"a": {"b": 1}}' @* 'lax $.**{1 to 2}.b ? (@ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**.b ? (@ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{0}.b ? (@ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1}.b ? (@ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{0 to last}.b ? (@ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1 to last}.b ? (@ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1 to 2}.b ? (@ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{2 to 3}.b ? (@ > 0)'; + +select json '{"a": {"b": 1}}' @? '$.**.b ? ( @ > 0)'; +select json '{"a": {"b": 1}}' @? '$.**{0}.b ? ( @ > 0)'; +select json '{"a": {"b": 1}}' @? '$.**{1}.b ? ( @ > 0)'; +select json '{"a": {"b": 1}}' @? '$.**{0 to last}.b ? ( @ > 0)'; +select json '{"a": {"b": 1}}' @? '$.**{1 to last}.b ? ( @ > 0)'; +select json '{"a": {"b": 1}}' @? '$.**{1 to 2}.b ? ( @ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @? '$.**.b ? ( @ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @? '$.**{0}.b ? ( @ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @? '$.**{1}.b ? ( @ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @? '$.**{0 to last}.b ? ( @ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @? '$.**{1 to last}.b ? ( @ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @? '$.**{1 to 2}.b ? ( @ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @? '$.**{2 to 3}.b ? ( @ > 0)'; + +select json '{"g": {"x": 2}}' @* '$.g ? (exists (@.x))'; +select json '{"g": {"x": 2}}' @* '$.g ? (exists (@.y))'; +select json '{"g": {"x": 2}}' @* '$.g ? (exists (@.x ? (@ >= 2) ))'; + +--test ternary logic +select + x, y, + jsonpath_query( + json '[true, false, null]', + '$[*] ? (@ == true && ($x == true && $y == true) || + @ == false && !($x == true && $y == true) || + @ == null && ($x == true && $y == true) is unknown)', + json_build_object('x', x, 'y', y) + ) as "x && y" +from + (values (json 'true'), ('false'), ('"null"')) x(x), + (values (json 'true'), ('false'), ('"null"')) y(y); + +select + x, y, + jsonpath_query( + json '[true, false, null]', + '$[*] ? (@ == true && ($x == true || $y == true) || + @ == false && !($x == true || $y == true) || + @ == null && ($x == true || $y == true) is unknown)', + json_build_object('x', x, 'y', y) + ) as "x || y" +from + (values (json 'true'), ('false'), ('"null"')) x(x), + (values (json 'true'), ('false'), ('"null"')) y(y); + +select json '{"a": 1, "b": 1}' @? '$ ? (.a == .b)'; +select json '{"c": {"a": 1, "b": 1}}' @? '$ ? (.a == .b)'; +select json '{"c": {"a": 1, "b": 1}}' @? '$.c ? (.a == .b)'; +select json '{"c": {"a": 1, "b": 1}}' @? '$.c ? ($.c.a == .b)'; +select json '{"c": {"a": 1, "b": 1}}' @? '$.* ? (.a == .b)'; +select json '{"a": 1, "b": 1}' @? '$.** ? (.a == .b)'; +select json '{"c": {"a": 1, "b": 1}}' @? '$.** ? (.a == .b)'; + +select json '{"c": {"a": 2, "b": 1}}' @* '$.** ? (.a == 1 + 1)'; +select json '{"c": {"a": 2, "b": 1}}' @* '$.** ? (.a == (1 + 1))'; +select json '{"c": {"a": 2, "b": 1}}' @* '$.** ? (.a == .b + 1)'; +select json '{"c": {"a": 2, "b": 1}}' @* '$.** ? (.a == (.b + 1))'; +select json '{"c": {"a": -1, "b": 1}}' @? '$.** ? (.a == - 1)'; +select json '{"c": {"a": -1, "b": 1}}' @? '$.** ? (.a == -1)'; +select json '{"c": {"a": -1, "b": 1}}' @? '$.** ? (.a == -.b)'; +select json '{"c": {"a": -1, "b": 1}}' @? '$.** ? (.a == - .b)'; +select json '{"c": {"a": 0, "b": 1}}' @? '$.** ? (.a == 1 - .b)'; +select json '{"c": {"a": 2, "b": 1}}' @? '$.** ? (.a == 1 - - .b)'; +select json '{"c": {"a": 0, "b": 1}}' @? '$.** ? (.a == 1 - +.b)'; +select json '[1,2,3]' @? '$ ? (+@[*] > +2)'; +select json '[1,2,3]' @? '$ ? (+@[*] > +3)'; +select json '[1,2,3]' @? '$ ? (-@[*] < -2)'; +select json '[1,2,3]' @? '$ ? (-@[*] < -3)'; +select json '1' @? '$ ? ($ > 0)'; + +-- arithmetic errors +select json '[1,2,0,3]' @* '$[*] ? (2 / @ > 0)'; +select json '[1,2,0,3]' @* '$[*] ? ((2 / @ > 0) is unknown)'; +select json '0' @* '1 / $'; + +-- unwrapping of operator arguments in lax mode +select json '{"a": [2]}' @* 'lax $.a * 3'; +select json '{"a": [2]}' @* 'lax $.a + 3'; +select json '{"a": [2, 3, 4]}' @* 'lax -$.a'; +-- should fail +select json '{"a": [1, 2]}' @* 'lax $.a * 3'; + +-- extension: boolean expressions +select json '2' @* '$ > 1'; +select json '2' @* '$ <= 1'; +select json '2' @* '$ == "2"'; + +select json '2' @~ '$ > 1'; +select json '2' @~ '$ <= 1'; +select json '2' @~ '$ == "2"'; +select json '2' @~ '1'; +select json '{}' @~ '$'; +select json '[]' @~ '$'; +select json '[1,2,3]' @~ '$[*]'; +select json '[]' @~ '$[*]'; +select jsonpath_predicate(json '[[1, true], [2, false]]', 'strict $[*] ? (@[0] > $x) [1]', '{"x": 1}'); +select jsonpath_predicate(json '[[1, true], [2, false]]', 'strict $[*] ? (@[0] < $x) [1]', '{"x": 2}'); + +select json '[null,1,true,"a",[],{}]' @* '$.type()'; +select json '[null,1,true,"a",[],{}]' @* 'lax $.type()'; +select json '[null,1,true,"a",[],{}]' @* '$[*].type()'; +select json 'null' @* 'null.type()'; +select json 'null' @* 'true.type()'; +select json 'null' @* '123.type()'; +select json 'null' @* '"123".type()'; + +select json '{"a": 2}' @* '($.a - 5).abs() + 10'; +select json '{"a": 2.5}' @* '-($.a * $.a).floor() + 10'; +select json '[1, 2, 3]' @* '($[*] > 2) ? (@ == true)'; +select json '[1, 2, 3]' @* '($[*] > 3).type()'; +select json '[1, 2, 3]' @* '($[*].a > 3).type()'; +select json '[1, 2, 3]' @* 'strict ($[*].a > 3).type()'; + +select json '[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]' @* 'strict $[*].size()'; +select json '[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]' @* 'lax $[*].size()'; + +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].abs()'; +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].floor()'; +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling()'; +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling().abs()'; +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling().abs().type()'; + +select json '[{},1]' @* '$[*].keyvalue()'; +select json '{}' @* '$.keyvalue()'; +select json '{"a": 1, "b": [1, 2], "c": {"a": "bbb"}}' @* '$.keyvalue()'; +select json '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* '$[*].keyvalue()'; +select json '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* 'strict $.keyvalue()'; +select json '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* 'lax $.keyvalue()'; + +select json 'null' @* '$.double()'; +select json 'true' @* '$.double()'; +select json '[]' @* '$.double()'; +select json '[]' @* 'strict $.double()'; +select json '{}' @* '$.double()'; +select json '1.23' @* '$.double()'; +select json '"1.23"' @* '$.double()'; +select json '"1.23aaa"' @* '$.double()'; + +select json '["", "a", "abc", "abcabc"]' @* '$[*] ? (@ starts with "abc")'; +select json '["", "a", "abc", "abcabc"]' @* 'strict $ ? (@[*] starts with "abc")'; +select json '["", "a", "abd", "abdabc"]' @* 'strict $ ? (@[*] starts with "abc")'; +select json '["abc", "abcabc", null, 1]' @* 'strict $ ? (@[*] starts with "abc")'; +select json '["abc", "abcabc", null, 1]' @* 'strict $ ? ((@[*] starts with "abc") is unknown)'; +select json '[[null, 1, "abc", "abcabc"]]' @* 'lax $ ? (@[*] starts with "abc")'; +select json '[[null, 1, "abd", "abdabc"]]' @* 'lax $ ? ((@[*] starts with "abc") is unknown)'; +select json '[null, 1, "abd", "abdabc"]' @* 'lax $[*] ? ((@ starts with "abc") is unknown)'; + +select json '[null, 1, "abc", "abd", "aBdC", "abdacb", "babc"]' @* 'lax $[*] ? (@ like_regex "^ab.*c")'; +select json '[null, 1, "abc", "abd", "aBdC", "abdacb", "babc"]' @* 'lax $[*] ? (@ like_regex "^ab.*c" flag "i")'; + +select json 'null' @* '$.datetime()'; +select json 'true' @* '$.datetime()'; +select json '[]' @* '$.datetime()'; +select json '[]' @* 'strict $.datetime()'; +select json '{}' @* '$.datetime()'; +select json '""' @* '$.datetime()'; + +select json '"10-03-2017"' @* '$.datetime("dd-mm-yyyy")'; +select json '"10-03-2017"' @* '$.datetime("dd-mm-yyyy").type()'; +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy")'; +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy").type()'; + +select json '"10-03-2017 12:34"' @* ' $.datetime("dd-mm-yyyy HH24:MI").type()'; +select json '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM").type()'; +select json '"12:34:56"' @* '$.datetime("HH24:MI:SS").type()'; +select json '"12:34:56 +05:20"' @* '$.datetime("HH24:MI:SS TZH:TZM").type()'; + +set time zone '+00'; + +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI")'; +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "+00")'; +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "+00:12")'; +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "-00:12:34")'; +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "UTC")'; +select json '"10-03-2017 12:34 +05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select json '"10-03-2017 12:34 -05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select json '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; +select json '"10-03-2017 12:34 -05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; +select json '"12:34"' @* '$.datetime("HH24:MI")'; +select json '"12:34"' @* '$.datetime("HH24:MI TZH")'; +select json '"12:34"' @* '$.datetime("HH24:MI TZH", "+00")'; +select json '"12:34 +05"' @* '$.datetime("HH24:MI TZH")'; +select json '"12:34 -05"' @* '$.datetime("HH24:MI TZH")'; +select json '"12:34 +05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; +select json '"12:34 -05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + +set time zone '+10'; + +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI")'; +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "+10")'; +select json '"10-03-2017 12:34 +05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select json '"10-03-2017 12:34 -05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select json '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; +select json '"10-03-2017 12:34 -05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; +select json '"12:34"' @* '$.datetime("HH24:MI")'; +select json '"12:34"' @* '$.datetime("HH24:MI TZH")'; +select json '"12:34"' @* '$.datetime("HH24:MI TZH", "+10")'; +select json '"12:34 +05"' @* '$.datetime("HH24:MI TZH")'; +select json '"12:34 -05"' @* '$.datetime("HH24:MI TZH")'; +select json '"12:34 +05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; +select json '"12:34 -05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + +set time zone default; + +select json '"2017-03-10"' @* '$.datetime().type()'; +select json '"2017-03-10"' @* '$.datetime()'; +select json '"2017-03-10 12:34:56"' @* '$.datetime().type()'; +select json '"2017-03-10 12:34:56"' @* '$.datetime()'; +select json '"2017-03-10 12:34:56 +3"' @* '$.datetime().type()'; +select json '"2017-03-10 12:34:56 +3"' @* '$.datetime()'; +select json '"2017-03-10 12:34:56 +3:10"' @* '$.datetime().type()'; +select json '"2017-03-10 12:34:56 +3:10"' @* '$.datetime()'; +select json '"12:34:56"' @* '$.datetime().type()'; +select json '"12:34:56"' @* '$.datetime()'; +select json '"12:34:56 +3"' @* '$.datetime().type()'; +select json '"12:34:56 +3"' @* '$.datetime()'; +select json '"12:34:56 +3:10"' @* '$.datetime().type()'; +select json '"12:34:56 +3:10"' @* '$.datetime()'; + +set time zone '+00'; + +-- date comparison +select json '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' + @* '$[*].datetime() ? (@ == "10.03.2017".datetime("dd.mm.yyyy"))'; +select json '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' + @* '$[*].datetime() ? (@ >= "10.03.2017".datetime("dd.mm.yyyy"))'; +select json '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' + @* '$[*].datetime() ? (@ < "10.03.2017".datetime("dd.mm.yyyy"))'; + +-- time comparison +select json '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' + @* '$[*].datetime() ? (@ == "12:35".datetime("HH24:MI"))'; +select json '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' + @* '$[*].datetime() ? (@ >= "12:35".datetime("HH24:MI"))'; +select json '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' + @* '$[*].datetime() ? (@ < "12:35".datetime("HH24:MI"))'; + +-- timetz comparison +select json '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' + @* '$[*].datetime() ? (@ == "12:35 +1".datetime("HH24:MI TZH"))'; +select json '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' + @* '$[*].datetime() ? (@ >= "12:35 +1".datetime("HH24:MI TZH"))'; +select json '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' + @* '$[*].datetime() ? (@ < "12:35 +1".datetime("HH24:MI TZH"))'; + +-- timestamp comparison +select json '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ == "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; +select json '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ >= "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; +select json '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ < "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; + +-- timestamptz comparison +select json '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ == "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; +select json '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ >= "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; +select json '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ < "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; + +set time zone default; + +-- jsonpath operators + +SELECT json '[{"a": 1}, {"a": 2}]' @* '$[*]'; +SELECT json '[{"a": 1}, {"a": 2}]' @* '$[*] ? (@.a > 10)'; + +SELECT json '[{"a": 1}, {"a": 2}]' @# '$[*].a'; +SELECT json '[{"a": 1}, {"a": 2}]' @# '$[*].a ? (@ == 1)'; +SELECT json '[{"a": 1}, {"a": 2}]' @# '$[*].a ? (@ > 10)'; + +SELECT json '[{"a": 1}, {"a": 2}]' @? '$[*] ? (@.a > 1)'; +SELECT json '[{"a": 1}, {"a": 2}]' @? '$[*].a ? (@ > 2)'; + +SELECT json '[{"a": 1}, {"a": 2}]' @~ '$[*].a > 1'; +SELECT json '[{"a": 1}, {"a": 2}]' @~ '$[*].a > 2'; diff --git a/src/test/regress/sql/jsonb.sql b/src/test/regress/sql/jsonb.sql index bd82fd13f7..1430a98ac3 100644 --- a/src/test/regress/sql/jsonb.sql +++ b/src/test/regress/sql/jsonb.sql @@ -735,6 +735,24 @@ SELECT count(*) FROM testjsonb WHERE j ? 'public'; SELECT count(*) FROM testjsonb WHERE j ? 'bar'; SELECT count(*) FROM testjsonb WHERE j ?| ARRAY['public','disabled']; SELECT count(*) FROM testjsonb WHERE j ?& ARRAY['public','disabled']; +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == null'; +SELECT count(*) FROM testjsonb WHERE j @~ '"CC" == $.wait'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == "CC" && true == $.public'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25.0'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($)'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public)'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.bar)'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public) || exists($.disabled)'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public) && exists($.disabled)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? ("CC" == @)'; +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.wait == "CC" && true == @.public)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.age ? (@ == 25)'; +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.age == 25.0)'; +SELECT count(*) FROM testjsonb WHERE j @? '$'; +SELECT count(*) FROM testjsonb WHERE j @? '$.public'; +SELECT count(*) FROM testjsonb WHERE j @? '$.bar'; CREATE INDEX jidx ON testjsonb USING gin (j); SET enable_seqscan = off; @@ -753,6 +771,39 @@ SELECT count(*) FROM testjsonb WHERE j ? 'bar'; SELECT count(*) FROM testjsonb WHERE j ?| ARRAY['public','disabled']; SELECT count(*) FROM testjsonb WHERE j ?& ARRAY['public','disabled']; +EXPLAIN (COSTS OFF) +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == null'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == null'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($ ? (@.wait == null))'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.wait ? (@ == null))'; +SELECT count(*) FROM testjsonb WHERE j @~ '"CC" == $.wait'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == "CC" && true == $.public'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25.0'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.array[*] == "foo"'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.array[*] == "bar"'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($ ? (@.array[*] == "bar"))'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.array ? (@[*] == "bar"))'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.array[*] ? (@ == "bar"))'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($)'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public)'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.bar)'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public) || exists($.disabled)'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public) && exists($.disabled)'; +EXPLAIN (COSTS OFF) +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? ("CC" == @)'; +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.wait == "CC" && true == @.public)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.age ? (@ == 25)'; +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.age == 25.0)'; +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.array[*] == "bar")'; +SELECT count(*) FROM testjsonb WHERE j @? '$.array ? (@[*] == "bar")'; +SELECT count(*) FROM testjsonb WHERE j @? '$.array[*] ? (@ == "bar")'; +SELECT count(*) FROM testjsonb WHERE j @? '$'; +SELECT count(*) FROM testjsonb WHERE j @? '$.public'; +SELECT count(*) FROM testjsonb WHERE j @? '$.bar'; + -- array exists - array elements should behave as keys (for GIN index scans too) CREATE INDEX jidx_array ON testjsonb USING gin((j->'array')); SELECT count(*) from testjsonb WHERE j->'array' ? 'bar'; @@ -802,6 +853,34 @@ SELECT count(*) FROM testjsonb WHERE j @> '{"age":25.0}'; -- exercise GIN_SEARCH_MODE_ALL SELECT count(*) FROM testjsonb WHERE j @> '{}'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == null'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($ ? (@.wait == null))'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.wait ? (@ == null))'; +SELECT count(*) FROM testjsonb WHERE j @~ '"CC" == $.wait'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == "CC" && true == $.public'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25.0'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.array[*] == "foo"'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.array[*] == "bar"'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($ ? (@.array[*] == "bar"))'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.array ? (@[*] == "bar"))'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.array[*] ? (@ == "bar"))'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($)'; + +EXPLAIN (COSTS OFF) +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? ("CC" == @)'; +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.wait == "CC" && true == @.public)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.age ? (@ == 25)'; +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.age == 25.0)'; +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.array[*] == "bar")'; +SELECT count(*) FROM testjsonb WHERE j @? '$.array ? (@[*] == "bar")'; +SELECT count(*) FROM testjsonb WHERE j @? '$.array[*] ? (@ == "bar")'; +SELECT count(*) FROM testjsonb WHERE j @? '$'; +SELECT count(*) FROM testjsonb WHERE j @? '$.public'; +SELECT count(*) FROM testjsonb WHERE j @? '$.bar'; + RESET enable_seqscan; DROP INDEX jidx; diff --git a/src/test/regress/sql/jsonb_jsonpath.sql b/src/test/regress/sql/jsonb_jsonpath.sql new file mode 100644 index 0000000000..43f34ef5d3 --- /dev/null +++ b/src/test/regress/sql/jsonb_jsonpath.sql @@ -0,0 +1,385 @@ +select jsonb '{"a": 12}' @? '$.a.b'; +select jsonb '{"a": 12}' @? '$.b'; +select jsonb '{"a": {"a": 12}}' @? '$.a.a'; +select jsonb '{"a": {"a": 12}}' @? '$.*.a'; +select jsonb '{"b": {"a": 12}}' @? '$.*.a'; +select jsonb '{}' @? '$.*'; +select jsonb '{"a": 1}' @? '$.*'; +select jsonb '{"a": {"b": 1}}' @? 'lax $.**{1}'; +select jsonb '{"a": {"b": 1}}' @? 'lax $.**{2}'; +select jsonb '{"a": {"b": 1}}' @? 'lax $.**{3}'; +select jsonb '[]' @? '$[*]'; +select jsonb '[1]' @? '$[*]'; +select jsonb '[1]' @? '$[1]'; +select jsonb '[1]' @? 'strict $[1]'; +select jsonb '[1]' @* 'strict $[1]'; +select jsonb '[1]' @? '$[0]'; +select jsonb '[1]' @? '$[0.3]'; +select jsonb '[1]' @? '$[0.5]'; +select jsonb '[1]' @? '$[0.9]'; +select jsonb '[1]' @? '$[1.2]'; +select jsonb '[1]' @? 'strict $[1.2]'; +select jsonb '{"a": [1,2,3], "b": [3,4,5]}' @? '$ ? (@.a[*] > @.b[*])'; +select jsonb '{"a": [1,2,3], "b": [3,4,5]}' @? '$ ? (@.a[*] >= @.b[*])'; +select jsonb '{"a": [1,2,3], "b": [3,4,"5"]}' @? '$ ? (@.a[*] >= @.b[*])'; +select jsonb '{"a": [1,2,3], "b": [3,4,"5"]}' @? 'strict $ ? (@.a[*] >= @.b[*])'; +select jsonb '{"a": [1,2,3], "b": [3,4,null]}' @? '$ ? (@.a[*] >= @.b[*])'; +select jsonb '1' @? '$ ? ((@ == "1") is unknown)'; +select jsonb '1' @? '$ ? ((@ == 1) is unknown)'; +select jsonb '[{"a": 1}, {"a": 2}]' @? '$[0 to 1] ? (@.a > 1)'; + +select jsonb '{"a": 12, "b": {"a": 13}}' @* '$.a'; +select jsonb '{"a": 12, "b": {"a": 13}}' @* '$.b'; +select jsonb '{"a": 12, "b": {"a": 13}}' @* '$.*'; +select jsonb '{"a": 12, "b": {"a": 13}}' @* 'lax $.*.a'; +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $[*].a'; +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $[*].*'; +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $[0].a'; +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $[1].a'; +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $[2].a'; +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $[0,1].a'; +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $[0 to 10].a'; +select jsonb '[12, {"a": 13}, {"b": 14}, "ccc", true]' @* '$[2.5 - 1 to $.size() - 2]'; +select jsonb '1' @* 'lax $[0]'; +select jsonb '1' @* 'lax $[*]'; +select jsonb '[1]' @* 'lax $[0]'; +select jsonb '[1]' @* 'lax $[*]'; +select jsonb '[1,2,3]' @* 'lax $[*]'; +select jsonb '[]' @* '$[last]'; +select jsonb '[]' @* 'strict $[last]'; +select jsonb '[1]' @* '$[last]'; +select jsonb '[1,2,3]' @* '$[last]'; +select jsonb '[1,2,3]' @* '$[last - 1]'; +select jsonb '[1,2,3]' @* '$[last ? (@.type() == "number")]'; +select jsonb '[1,2,3]' @* '$[last ? (@.type() == "string")]'; + +select * from jsonpath_query(jsonb '{"a": 10}', '$'); +select * from jsonpath_query(jsonb '{"a": 10}', '$ ? (.a < $value)'); +select * from jsonpath_query(jsonb '{"a": 10}', '$ ? (.a < $value)', '{"value" : 13}'); +select * from jsonpath_query(jsonb '{"a": 10}', '$ ? (.a < $value)', '{"value" : 8}'); +select * from jsonpath_query(jsonb '{"a": 10}', '$.a ? (@ < $value)', '{"value" : 13}'); +select * from jsonpath_query(jsonb '[10,11,12,13,14,15]', '$[*] ? (@ < $value)', '{"value" : 13}'); +select * from jsonpath_query(jsonb '[10,11,12,13,14,15]', '$[0,1] ? (@ < $value)', '{"value" : 13}'); +select * from jsonpath_query(jsonb '[10,11,12,13,14,15]', '$[0 to 2] ? (@ < $value)', '{"value" : 15}'); +select * from jsonpath_query(jsonb '[1,"1",2,"2",null]', '$[*] ? (@ == "1")'); +select * from jsonpath_query(jsonb '[1,"1",2,"2",null]', '$[*] ? (@ == $value)', '{"value" : "1"}'); +select * from jsonpath_query(jsonb '[1, "2", null]', '$[*] ? (@ != null)'); +select * from jsonpath_query(jsonb '[1, "2", null]', '$[*] ? (@ == null)'); + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{0}'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{0 to last}'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1}'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1 to last}'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{2}'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{2 to last}'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{3 to last}'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**.b ? (@ > 0)'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{0}.b ? (@ > 0)'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1}.b ? (@ > 0)'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{0 to last}.b ? (@ > 0)'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1 to last}.b ? (@ > 0)'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1 to 2}.b ? (@ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**.b ? (@ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{0}.b ? (@ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1}.b ? (@ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{0 to last}.b ? (@ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1 to last}.b ? (@ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1 to 2}.b ? (@ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{2 to 3}.b ? (@ > 0)'; + +select jsonb '{"a": {"b": 1}}' @? '$.**.b ? ( @ > 0)'; +select jsonb '{"a": {"b": 1}}' @? '$.**{0}.b ? ( @ > 0)'; +select jsonb '{"a": {"b": 1}}' @? '$.**{1}.b ? ( @ > 0)'; +select jsonb '{"a": {"b": 1}}' @? '$.**{0 to last}.b ? ( @ > 0)'; +select jsonb '{"a": {"b": 1}}' @? '$.**{1 to last}.b ? ( @ > 0)'; +select jsonb '{"a": {"b": 1}}' @? '$.**{1 to 2}.b ? ( @ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**.b ? ( @ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{0}.b ? ( @ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{1}.b ? ( @ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{0 to last}.b ? ( @ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{1 to last}.b ? ( @ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{1 to 2}.b ? ( @ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{2 to 3}.b ? ( @ > 0)'; + +select jsonb '{"g": {"x": 2}}' @* '$.g ? (exists (@.x))'; +select jsonb '{"g": {"x": 2}}' @* '$.g ? (exists (@.y))'; +select jsonb '{"g": {"x": 2}}' @* '$.g ? (exists (@.x ? (@ >= 2) ))'; + +--test ternary logic +select + x, y, + jsonpath_query( + jsonb '[true, false, null]', + '$[*] ? (@ == true && ($x == true && $y == true) || + @ == false && !($x == true && $y == true) || + @ == null && ($x == true && $y == true) is unknown)', + jsonb_build_object('x', x, 'y', y) + ) as "x && y" +from + (values (jsonb 'true'), ('false'), ('"null"')) x(x), + (values (jsonb 'true'), ('false'), ('"null"')) y(y); + +select + x, y, + jsonpath_query( + jsonb '[true, false, null]', + '$[*] ? (@ == true && ($x == true || $y == true) || + @ == false && !($x == true || $y == true) || + @ == null && ($x == true || $y == true) is unknown)', + jsonb_build_object('x', x, 'y', y) + ) as "x || y" +from + (values (jsonb 'true'), ('false'), ('"null"')) x(x), + (values (jsonb 'true'), ('false'), ('"null"')) y(y); + +select jsonb '{"a": 1, "b":1}' @? '$ ? (.a == .b)'; +select jsonb '{"c": {"a": 1, "b":1}}' @? '$ ? (.a == .b)'; +select jsonb '{"c": {"a": 1, "b":1}}' @? '$.c ? (.a == .b)'; +select jsonb '{"c": {"a": 1, "b":1}}' @? '$.c ? ($.c.a == .b)'; +select jsonb '{"c": {"a": 1, "b":1}}' @? '$.* ? (.a == .b)'; +select jsonb '{"a": 1, "b":1}' @? '$.** ? (.a == .b)'; +select jsonb '{"c": {"a": 1, "b":1}}' @? '$.** ? (.a == .b)'; + +select jsonb '{"c": {"a": 2, "b":1}}' @* '$.** ? (.a == 1 + 1)'; +select jsonb '{"c": {"a": 2, "b":1}}' @* '$.** ? (.a == (1 + 1))'; +select jsonb '{"c": {"a": 2, "b":1}}' @* '$.** ? (.a == .b + 1)'; +select jsonb '{"c": {"a": 2, "b":1}}' @* '$.** ? (.a == (.b + 1))'; +select jsonb '{"c": {"a": -1, "b":1}}' @? '$.** ? (.a == - 1)'; +select jsonb '{"c": {"a": -1, "b":1}}' @? '$.** ? (.a == -1)'; +select jsonb '{"c": {"a": -1, "b":1}}' @? '$.** ? (.a == -.b)'; +select jsonb '{"c": {"a": -1, "b":1}}' @? '$.** ? (.a == - .b)'; +select jsonb '{"c": {"a": 0, "b":1}}' @? '$.** ? (.a == 1 - .b)'; +select jsonb '{"c": {"a": 2, "b":1}}' @? '$.** ? (.a == 1 - - .b)'; +select jsonb '{"c": {"a": 0, "b":1}}' @? '$.** ? (.a == 1 - +.b)'; +select jsonb '[1,2,3]' @? '$ ? (+@[*] > +2)'; +select jsonb '[1,2,3]' @? '$ ? (+@[*] > +3)'; +select jsonb '[1,2,3]' @? '$ ? (-@[*] < -2)'; +select jsonb '[1,2,3]' @? '$ ? (-@[*] < -3)'; +select jsonb '1' @? '$ ? ($ > 0)'; + +-- arithmetic errors +select jsonb '[1,2,0,3]' @* '$[*] ? (2 / @ > 0)'; +select jsonb '[1,2,0,3]' @* '$[*] ? ((2 / @ > 0) is unknown)'; +select jsonb '0' @* '1 / $'; + +-- unwrapping of operator arguments in lax mode +select jsonb '{"a": [2]}' @* 'lax $.a * 3'; +select jsonb '{"a": [2]}' @* 'lax $.a + 3'; +select jsonb '{"a": [2, 3, 4]}' @* 'lax -$.a'; +-- should fail +select jsonb '{"a": [1, 2]}' @* 'lax $.a * 3'; + +-- extension: boolean expressions +select jsonb '2' @* '$ > 1'; +select jsonb '2' @* '$ <= 1'; +select jsonb '2' @* '$ == "2"'; + +select jsonb '2' @~ '$ > 1'; +select jsonb '2' @~ '$ <= 1'; +select jsonb '2' @~ '$ == "2"'; +select jsonb '2' @~ '1'; +select jsonb '{}' @~ '$'; +select jsonb '[]' @~ '$'; +select jsonb '[1,2,3]' @~ '$[*]'; +select jsonb '[]' @~ '$[*]'; +select jsonpath_predicate(jsonb '[[1, true], [2, false]]', 'strict $[*] ? (@[0] > $x) [1]', '{"x": 1}'); +select jsonpath_predicate(jsonb '[[1, true], [2, false]]', 'strict $[*] ? (@[0] < $x) [1]', '{"x": 2}'); + +select jsonb '[null,1,true,"a",[],{}]' @* '$.type()'; +select jsonb '[null,1,true,"a",[],{}]' @* 'lax $.type()'; +select jsonb '[null,1,true,"a",[],{}]' @* '$[*].type()'; +select jsonb 'null' @* 'null.type()'; +select jsonb 'null' @* 'true.type()'; +select jsonb 'null' @* '123.type()'; +select jsonb 'null' @* '"123".type()'; + +select jsonb '{"a": 2}' @* '($.a - 5).abs() + 10'; +select jsonb '{"a": 2.5}' @* '-($.a * $.a).floor() + 10'; +select jsonb '[1, 2, 3]' @* '($[*] > 2) ? (@ == true)'; +select jsonb '[1, 2, 3]' @* '($[*] > 3).type()'; +select jsonb '[1, 2, 3]' @* '($[*].a > 3).type()'; +select jsonb '[1, 2, 3]' @* 'strict ($[*].a > 3).type()'; + +select jsonb '[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]' @* 'strict $[*].size()'; +select jsonb '[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]' @* 'lax $[*].size()'; + +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].abs()'; +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].floor()'; +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling()'; +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling().abs()'; +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling().abs().type()'; + +select jsonb '[{},1]' @* '$[*].keyvalue()'; +select jsonb '{}' @* '$.keyvalue()'; +select jsonb '{"a": 1, "b": [1, 2], "c": {"a": "bbb"}}' @* '$.keyvalue()'; +select jsonb '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* '$[*].keyvalue()'; +select jsonb '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* 'strict $.keyvalue()'; +select jsonb '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* 'lax $.keyvalue()'; + +select jsonb 'null' @* '$.double()'; +select jsonb 'true' @* '$.double()'; +select jsonb '[]' @* '$.double()'; +select jsonb '[]' @* 'strict $.double()'; +select jsonb '{}' @* '$.double()'; +select jsonb '1.23' @* '$.double()'; +select jsonb '"1.23"' @* '$.double()'; +select jsonb '"1.23aaa"' @* '$.double()'; + +select jsonb '["", "a", "abc", "abcabc"]' @* '$[*] ? (@ starts with "abc")'; +select jsonb '["", "a", "abc", "abcabc"]' @* 'strict $ ? (@[*] starts with "abc")'; +select jsonb '["", "a", "abd", "abdabc"]' @* 'strict $ ? (@[*] starts with "abc")'; +select jsonb '["abc", "abcabc", null, 1]' @* 'strict $ ? (@[*] starts with "abc")'; +select jsonb '["abc", "abcabc", null, 1]' @* 'strict $ ? ((@[*] starts with "abc") is unknown)'; +select jsonb '[[null, 1, "abc", "abcabc"]]' @* 'lax $ ? (@[*] starts with "abc")'; +select jsonb '[[null, 1, "abd", "abdabc"]]' @* 'lax $ ? ((@[*] starts with "abc") is unknown)'; +select jsonb '[null, 1, "abd", "abdabc"]' @* 'lax $[*] ? ((@ starts with "abc") is unknown)'; + +select jsonb '[null, 1, "abc", "abd", "aBdC", "abdacb", "babc"]' @* 'lax $[*] ? (@ like_regex "^ab.*c")'; +select jsonb '[null, 1, "abc", "abd", "aBdC", "abdacb", "babc"]' @* 'lax $[*] ? (@ like_regex "^ab.*c" flag "i")'; + +select jsonb 'null' @* '$.datetime()'; +select jsonb 'true' @* '$.datetime()'; +select jsonb '1' @* '$.datetime()'; +select jsonb '[]' @* '$.datetime()'; +select jsonb '[]' @* 'strict $.datetime()'; +select jsonb '{}' @* '$.datetime()'; +select jsonb '""' @* '$.datetime()'; + +select jsonb '"10-03-2017"' @* '$.datetime("dd-mm-yyyy")'; +select jsonb '"10-03-2017"' @* '$.datetime("dd-mm-yyyy").type()'; +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy")'; +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy").type()'; + +select jsonb '"10-03-2017 12:34"' @* ' $.datetime("dd-mm-yyyy HH24:MI").type()'; +select jsonb '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM").type()'; +select jsonb '"12:34:56"' @* '$.datetime("HH24:MI:SS").type()'; +select jsonb '"12:34:56 +05:20"' @* '$.datetime("HH24:MI:SS TZH:TZM").type()'; + +set time zone '+00'; + +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI")'; +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "+00")'; +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "+00:12")'; +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "-00:12:34")'; +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "UTC")'; +select jsonb '"10-03-2017 12:34 +05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select jsonb '"10-03-2017 12:34 -05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select jsonb '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; +select jsonb '"10-03-2017 12:34 -05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; +select jsonb '"12:34"' @* '$.datetime("HH24:MI")'; +select jsonb '"12:34"' @* '$.datetime("HH24:MI TZH")'; +select jsonb '"12:34"' @* '$.datetime("HH24:MI TZH", "+00")'; +select jsonb '"12:34 +05"' @* '$.datetime("HH24:MI TZH")'; +select jsonb '"12:34 -05"' @* '$.datetime("HH24:MI TZH")'; +select jsonb '"12:34 +05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; +select jsonb '"12:34 -05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + +set time zone '+10'; + +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI")'; +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH", "+10")'; +select jsonb '"10-03-2017 12:34 +05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select jsonb '"10-03-2017 12:34 -05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select jsonb '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; +select jsonb '"10-03-2017 12:34 -05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; +select jsonb '"12:34"' @* '$.datetime("HH24:MI")'; +select jsonb '"12:34"' @* '$.datetime("HH24:MI TZH")'; +select jsonb '"12:34"' @* '$.datetime("HH24:MI TZH", "+10")'; +select jsonb '"12:34 +05"' @* '$.datetime("HH24:MI TZH")'; +select jsonb '"12:34 -05"' @* '$.datetime("HH24:MI TZH")'; +select jsonb '"12:34 +05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; +select jsonb '"12:34 -05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + +set time zone default; + +select jsonb '"2017-03-10"' @* '$.datetime().type()'; +select jsonb '"2017-03-10"' @* '$.datetime()'; +select jsonb '"2017-03-10 12:34:56"' @* '$.datetime().type()'; +select jsonb '"2017-03-10 12:34:56"' @* '$.datetime()'; +select jsonb '"2017-03-10 12:34:56 +3"' @* '$.datetime().type()'; +select jsonb '"2017-03-10 12:34:56 +3"' @* '$.datetime()'; +select jsonb '"2017-03-10 12:34:56 +3:10"' @* '$.datetime().type()'; +select jsonb '"2017-03-10 12:34:56 +3:10"' @* '$.datetime()'; +select jsonb '"12:34:56"' @* '$.datetime().type()'; +select jsonb '"12:34:56"' @* '$.datetime()'; +select jsonb '"12:34:56 +3"' @* '$.datetime().type()'; +select jsonb '"12:34:56 +3"' @* '$.datetime()'; +select jsonb '"12:34:56 +3:10"' @* '$.datetime().type()'; +select jsonb '"12:34:56 +3:10"' @* '$.datetime()'; + +set time zone '+00'; + +-- date comparison +select jsonb + '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' @* + '$[*].datetime() ? (@ == "10.03.2017".datetime("dd.mm.yyyy"))'; +select jsonb + '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' @* + '$[*].datetime() ? (@ >= "10.03.2017".datetime("dd.mm.yyyy"))'; +select jsonb + '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' @* + '$[*].datetime() ? (@ < "10.03.2017".datetime("dd.mm.yyyy"))'; + +-- time comparison +select jsonb + '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' @* + '$[*].datetime() ? (@ == "12:35".datetime("HH24:MI"))'; +select jsonb + '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' @* + '$[*].datetime() ? (@ >= "12:35".datetime("HH24:MI"))'; +select jsonb + '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' @* + '$[*].datetime() ? (@ < "12:35".datetime("HH24:MI"))'; + +-- timetz comparison +select jsonb + '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' @* + '$[*].datetime() ? (@ == "12:35 +1".datetime("HH24:MI TZH"))'; +select jsonb + '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' @* + '$[*].datetime() ? (@ >= "12:35 +1".datetime("HH24:MI TZH"))'; +select jsonb + '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' @* + '$[*].datetime() ? (@ < "12:35 +1".datetime("HH24:MI TZH"))'; + +-- timestamp comparison +select jsonb + '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ == "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; +select jsonb + '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ >= "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; +select jsonb + '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ < "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; + +-- timestamptz comparison +select jsonb + '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ == "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; +select jsonb + '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ >= "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; +select jsonb + '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ < "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; + +set time zone default; + +-- jsonpath operators + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @* '$[*]'; +SELECT jsonb '[{"a": 1}, {"a": 2}]' @* '$[*] ? (@.a > 10)'; + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @# '$[*].a'; +SELECT jsonb '[{"a": 1}, {"a": 2}]' @# '$[*].a ? (@ == 1)'; +SELECT jsonb '[{"a": 1}, {"a": 2}]' @# '$[*].a ? (@ > 10)'; + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @? '$[*].a ? (@ > 1)'; +SELECT jsonb '[{"a": 1}, {"a": 2}]' @? '$[*] ? (@.a > 2)'; + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @~ '$[*].a > 1'; +SELECT jsonb '[{"a": 1}, {"a": 2}]' @~ '$[*].a > 2'; diff --git a/src/test/regress/sql/jsonpath.sql b/src/test/regress/sql/jsonpath.sql new file mode 100644 index 0000000000..8a3ea423b8 --- /dev/null +++ b/src/test/regress/sql/jsonpath.sql @@ -0,0 +1,146 @@ +--jsonpath io + +select ''::jsonpath; +select '$'::jsonpath; +select 'strict $'::jsonpath; +select 'lax $'::jsonpath; +select '$.a'::jsonpath; +select '$.a.v'::jsonpath; +select '$.a.*'::jsonpath; +select '$.*[*]'::jsonpath; +select '$.a[*]'::jsonpath; +select '$.a[*][*]'::jsonpath; +select '$[*]'::jsonpath; +select '$[0]'::jsonpath; +select '$[*][0]'::jsonpath; +select '$[*].a'::jsonpath; +select '$[*][0].a.b'::jsonpath; +select '$.a.**.b'::jsonpath; +select '$.a.**{2}.b'::jsonpath; +select '$.a.**{2 to 2}.b'::jsonpath; +select '$.a.**{2 to 5}.b'::jsonpath; +select '$.a.**{0 to 5}.b'::jsonpath; +select '$.a.**{5 to last}.b'::jsonpath; +select '$+1'::jsonpath; +select '$-1'::jsonpath; +select '$--+1'::jsonpath; +select '$.a/+-1'::jsonpath; + +select '"\b\f\r\n\t\v\"\''\\"'::jsonpath; +select '''\b\f\r\n\t\v\"\''\\'''::jsonpath; +select '"\x50\u0067\u{53}\u{051}\u{00004C}"'::jsonpath; +select '''\x50\u0067\u{53}\u{051}\u{00004C}'''::jsonpath; +select '$.foo\x50\u0067\u{53}\u{051}\u{00004C}\t\"bar'::jsonpath; + +select '$.g ? ($.a == 1)'::jsonpath; +select '$.g ? (@ == 1)'::jsonpath; +select '$.g ? (.a == 1)'::jsonpath; +select '$.g ? (@.a == 1)'::jsonpath; +select '$.g ? (@.a == 1 || @.a == 4)'::jsonpath; +select '$.g ? (@.a == 1 && @.a == 4)'::jsonpath; +select '$.g ? (@.a == 1 || @.a == 4 && @.b == 7)'::jsonpath; +select '$.g ? (@.a == 1 || !(@.a == 4) && @.b == 7)'::jsonpath; +select '$.g ? (@.a == 1 || !(@.x >= 123 || @.a == 4) && @.b == 7)'::jsonpath; +select '$.g ? (.x >= @[*]?(@.a > "abc"))'::jsonpath; +select '$.g ? ((@.x >= 123 || @.a == 4) is unknown)'::jsonpath; +select '$.g ? (exists (.x))'::jsonpath; +select '$.g ? (exists (@.x ? (@ == 14)))'::jsonpath; +select '$.g ? (exists (.x ? (@ == 14)))'::jsonpath; +select '$.g ? ((@.x >= 123 || @.a == 4) && exists (.x ? (@ == 14)))'::jsonpath; +select '$.g ? (+@.x >= +-(+@.a + 2))'::jsonpath; + +select '$a'::jsonpath; +select '$a.b'::jsonpath; +select '$a[*]'::jsonpath; +select '$.g ? (@.zip == $zip)'::jsonpath; +select '$.a[1,2, 3 to 16]'::jsonpath; +select '$.a[$a + 1, ($b[*]) to -($[0] * 2)]'::jsonpath; +select '$.a[$.a.size() - 3]'::jsonpath; +select 'last'::jsonpath; +select '"last"'::jsonpath; +select '$.last'::jsonpath; +select '$ ? (last > 0)'::jsonpath; +select '$[last]'::jsonpath; +select '$[$[0] ? (last > 0)]'::jsonpath; + +select 'null.type()'::jsonpath; +select '1.type()'::jsonpath; +select '"aaa".type()'::jsonpath; +select 'true.type()'::jsonpath; +select '$.datetime()'::jsonpath; +select '$.datetime("datetime template")'::jsonpath; + +select '$ ? (@ starts with "abc")'::jsonpath; +select '$ ? (@ starts with $var)'::jsonpath; + +select '$ ? (@ like_regex "(invalid pattern")'::jsonpath; +select '$ ? (@ like_regex "pattern")'::jsonpath; +select '$ ? (@ like_regex "pattern" flag "")'::jsonpath; +select '$ ? (@ like_regex "pattern" flag "i")'::jsonpath; +select '$ ? (@ like_regex "pattern" flag "is")'::jsonpath; +select '$ ? (@ like_regex "pattern" flag "isim")'::jsonpath; +select '$ ? (@ like_regex "pattern" flag "xsms")'::jsonpath; +select '$ ? (@ like_regex "pattern" flag "a")'::jsonpath; + +select '$ < 1'::jsonpath; +select '($ < 1) || $.a.b <= $x'::jsonpath; +select '@ + 1'::jsonpath; + +select '($).a.b'::jsonpath; +select '($.a.b).c.d'::jsonpath; +select '($.a.b + -$.x.y).c.d'::jsonpath; +select '(-+$.a.b).c.d'::jsonpath; +select '1 + ($.a.b + 2).c.d'::jsonpath; +select '1 + ($.a.b > 2).c.d'::jsonpath; +select '($)'::jsonpath; +select '(($))'::jsonpath; +select '((($ + 1)).a + ((2)).b ? ((((@ > 1)) || (exists(@.c)))))'::jsonpath; + +select '$ ? (@.a < 1)'::jsonpath; +select '$ ? (@.a < -1)'::jsonpath; +select '$ ? (@.a < +1)'::jsonpath; +select '$ ? (@.a < .1)'::jsonpath; +select '$ ? (@.a < -.1)'::jsonpath; +select '$ ? (@.a < +.1)'::jsonpath; +select '$ ? (@.a < 0.1)'::jsonpath; +select '$ ? (@.a < -0.1)'::jsonpath; +select '$ ? (@.a < +0.1)'::jsonpath; +select '$ ? (@.a < 10.1)'::jsonpath; +select '$ ? (@.a < -10.1)'::jsonpath; +select '$ ? (@.a < +10.1)'::jsonpath; +select '$ ? (@.a < 1e1)'::jsonpath; +select '$ ? (@.a < -1e1)'::jsonpath; +select '$ ? (@.a < +1e1)'::jsonpath; +select '$ ? (@.a < .1e1)'::jsonpath; +select '$ ? (@.a < -.1e1)'::jsonpath; +select '$ ? (@.a < +.1e1)'::jsonpath; +select '$ ? (@.a < 0.1e1)'::jsonpath; +select '$ ? (@.a < -0.1e1)'::jsonpath; +select '$ ? (@.a < +0.1e1)'::jsonpath; +select '$ ? (@.a < 10.1e1)'::jsonpath; +select '$ ? (@.a < -10.1e1)'::jsonpath; +select '$ ? (@.a < +10.1e1)'::jsonpath; +select '$ ? (@.a < 1e-1)'::jsonpath; +select '$ ? (@.a < -1e-1)'::jsonpath; +select '$ ? (@.a < +1e-1)'::jsonpath; +select '$ ? (@.a < .1e-1)'::jsonpath; +select '$ ? (@.a < -.1e-1)'::jsonpath; +select '$ ? (@.a < +.1e-1)'::jsonpath; +select '$ ? (@.a < 0.1e-1)'::jsonpath; +select '$ ? (@.a < -0.1e-1)'::jsonpath; +select '$ ? (@.a < +0.1e-1)'::jsonpath; +select '$ ? (@.a < 10.1e-1)'::jsonpath; +select '$ ? (@.a < -10.1e-1)'::jsonpath; +select '$ ? (@.a < +10.1e-1)'::jsonpath; +select '$ ? (@.a < 1e+1)'::jsonpath; +select '$ ? (@.a < -1e+1)'::jsonpath; +select '$ ? (@.a < +1e+1)'::jsonpath; +select '$ ? (@.a < .1e+1)'::jsonpath; +select '$ ? (@.a < -.1e+1)'::jsonpath; +select '$ ? (@.a < +.1e+1)'::jsonpath; +select '$ ? (@.a < 0.1e+1)'::jsonpath; +select '$ ? (@.a < -0.1e+1)'::jsonpath; +select '$ ? (@.a < +0.1e+1)'::jsonpath; +select '$ ? (@.a < 10.1e+1)'::jsonpath; +select '$ ? (@.a < -10.1e+1)'::jsonpath; +select '$ ? (@.a < +10.1e+1)'::jsonpath; diff --git a/src/test/regress/sql/timestamp.sql b/src/test/regress/sql/timestamp.sql index b7957cbb9a..f9d4ccb3ee 100644 --- a/src/test/regress/sql/timestamp.sql +++ b/src/test/regress/sql/timestamp.sql @@ -228,5 +228,18 @@ SELECT '' AS to_char_10, to_char(d1, 'IYYY IYY IY I IW IDDD ID') SELECT '' AS to_char_11, to_char(d1, 'FMIYYY FMIYY FMIY FMI FMIW FMIDDD FMID') FROM TIMESTAMP_TBL; +SELECT '' AS to_char_12, to_char(d, 'FF1 FF2 FF3 FF4 FF5 FF6 MS US') + FROM (VALUES + ('2018-11-02 12:34:56'::timestamp), + ('2018-11-02 12:34:56.78'), + ('2018-11-02 12:34:56.78901'), + ('2018-11-02 12:34:56.78901234') + ) d(d); + +-- FF7-FF9 are not supported +SELECT to_char('2018-11-02 12:34:56'::timestamp, 'FF7'); +SELECT to_char('2018-11-02 12:34:56'::timestamp, 'FF8'); +SELECT to_char('2018-11-02 12:34:56'::timestamp, 'FF9'); + -- timestamp numeric fields constructor SELECT make_timestamp(2014,12,28,6,30,45.887); diff --git a/src/test/regress/sql/timestamptz.sql b/src/test/regress/sql/timestamptz.sql index f17d153fcc..8a8f95a7d6 100644 --- a/src/test/regress/sql/timestamptz.sql +++ b/src/test/regress/sql/timestamptz.sql @@ -248,6 +248,19 @@ SELECT '' AS to_char_10, to_char(d1, 'IYYY IYY IY I IW IDDD ID') SELECT '' AS to_char_11, to_char(d1, 'FMIYYY FMIYY FMIY FMI FMIW FMIDDD FMID') FROM TIMESTAMPTZ_TBL; +SELECT '' AS to_char_12, to_char(d, 'FF1 FF2 FF3 FF4 FF5 FF6 MS US') + FROM (VALUES + ('2018-11-02 12:34:56'::timestamptz), + ('2018-11-02 12:34:56.78'), + ('2018-11-02 12:34:56.78901'), + ('2018-11-02 12:34:56.78901234') + ) d(d); + +-- FF7-FF9 are not supported +SELECT to_char('2018-11-02 12:34:56'::timestamptz, 'FF7'); +SELECT to_char('2018-11-02 12:34:56'::timestamptz, 'FF8'); +SELECT to_char('2018-11-02 12:34:56'::timestamptz, 'FF9'); + -- Check OF, TZH, TZM with various zone offsets, particularly fractional hours SET timezone = '00:00'; SELECT to_char(now(), 'OF') as "OF", to_char(now(), 'TZH:TZM') as "TZH:TZM"; diff --git a/src/tools/msvc/Mkvcbuild.pm b/src/tools/msvc/Mkvcbuild.pm index b562044fa7..9b80e69e55 100644 --- a/src/tools/msvc/Mkvcbuild.pm +++ b/src/tools/msvc/Mkvcbuild.pm @@ -177,6 +177,8 @@ sub mkvcbuild 'src/backend/replication', 'repl_scanner.l', 'repl_gram.y', 'syncrep_scanner.l', 'syncrep_gram.y'); + $postgres->AddFiles('src/backend/utils/adt', 'jsonpath_scan.l', + 'jsonpath_gram.y'); $postgres->AddDefine('BUILDING_DLL'); $postgres->AddLibrary('secur32.lib'); $postgres->AddLibrary('ws2_32.lib'); diff --git a/src/tools/msvc/Solution.pm b/src/tools/msvc/Solution.pm index 68cf812f01..9998e16249 100644 --- a/src/tools/msvc/Solution.pm +++ b/src/tools/msvc/Solution.pm @@ -329,6 +329,24 @@ sub GenerateFiles ); } + if (IsNewer( + 'src/backend/utils/adt/jsonpath_gram.h', + 'src/backend/utils/adt/jsonpath_gram.y')) + { + print "Generating jsonpath_gram.h...\n"; + chdir('src/backend/utils/adt'); + system('perl ../../../tools/msvc/pgbison.pl jsonpath_gram.y'); + chdir('../../../..'); + } + + if (IsNewer( + 'src/include/utils/jsonpath_gram.h', + 'src/backend/utils/adt/jsonpath_gram.h')) + { + copyFile('src/backend/utils/adt/jsonpath_gram.h', + 'src/include/utils/jsonpath_gram.h'); + } + if ($self->{options}->{python} && IsNewer( 'src/pl/plpython/spiexceptions.h',