aboutsummaryrefslogtreecommitdiff
path: root/src/test
diff options
context:
space:
mode:
Diffstat (limited to 'src/test')
-rw-r--r--src/test/modules/test_json_parser/t/001_test_json_parser_incremental.pl13
-rw-r--r--src/test/modules/test_json_parser/t/002_inline.pl15
-rw-r--r--src/test/modules/test_json_parser/t/003_test_semantic.pl11
-rw-r--r--src/test/modules/test_json_parser/test_json_parser_incremental.c37
4 files changed, 54 insertions, 22 deletions
diff --git a/src/test/modules/test_json_parser/t/001_test_json_parser_incremental.pl b/src/test/modules/test_json_parser/t/001_test_json_parser_incremental.pl
index 8cc42e8e292..0c663b8e689 100644
--- a/src/test/modules/test_json_parser/t/001_test_json_parser_incremental.pl
+++ b/src/test/modules/test_json_parser/t/001_test_json_parser_incremental.pl
@@ -13,21 +13,24 @@ use FindBin;
my $test_file = "$FindBin::RealBin/../tiny.json";
-my @exes =
- ("test_json_parser_incremental", "test_json_parser_incremental_shlib");
+my @exes = (
+ [ "test_json_parser_incremental", ],
+ [ "test_json_parser_incremental", "-o", ],
+ [ "test_json_parser_incremental_shlib", ],
+ [ "test_json_parser_incremental_shlib", "-o", ]);
foreach my $exe (@exes)
{
- note "testing executable $exe";
+ note "testing executable @$exe";
# Test the usage error
- my ($stdout, $stderr) = run_command([ $exe, "-c", 10 ]);
+ my ($stdout, $stderr) = run_command([ @$exe, "-c", 10 ]);
like($stderr, qr/Usage:/, 'error message if not enough arguments');
# Test that we get success for small chunk sizes from 64 down to 1.
for (my $size = 64; $size > 0; $size--)
{
- ($stdout, $stderr) = run_command([ $exe, "-c", $size, $test_file ]);
+ ($stdout, $stderr) = run_command([ @$exe, "-c", $size, $test_file ]);
like($stdout, qr/SUCCESS/, "chunk size $size: test succeeds");
is($stderr, "", "chunk size $size: no error output");
diff --git a/src/test/modules/test_json_parser/t/002_inline.pl b/src/test/modules/test_json_parser/t/002_inline.pl
index 5b6c6dc4ae7..71c462b3191 100644
--- a/src/test/modules/test_json_parser/t/002_inline.pl
+++ b/src/test/modules/test_json_parser/t/002_inline.pl
@@ -13,7 +13,7 @@ use Test::More;
use File::Temp qw(tempfile);
my $dir = PostgreSQL::Test::Utils::tempdir;
-my $exe;
+my @exe;
sub test
{
@@ -35,7 +35,7 @@ sub test
foreach my $size (reverse(1 .. $chunk))
{
- my ($stdout, $stderr) = run_command([ $exe, "-c", $size, $fname ]);
+ my ($stdout, $stderr) = run_command([ @exe, "-c", $size, $fname ]);
if (defined($params{error}))
{
@@ -53,13 +53,16 @@ sub test
}
}
-my @exes =
- ("test_json_parser_incremental", "test_json_parser_incremental_shlib");
+my @exes = (
+ [ "test_json_parser_incremental", ],
+ [ "test_json_parser_incremental", "-o", ],
+ [ "test_json_parser_incremental_shlib", ],
+ [ "test_json_parser_incremental_shlib", "-o", ]);
foreach (@exes)
{
- $exe = $_;
- note "testing executable $exe";
+ @exe = @$_;
+ note "testing executable @exe";
test("number", "12345");
test("string", '"hello"');
diff --git a/src/test/modules/test_json_parser/t/003_test_semantic.pl b/src/test/modules/test_json_parser/t/003_test_semantic.pl
index c11480172d3..c57ccdb6602 100644
--- a/src/test/modules/test_json_parser/t/003_test_semantic.pl
+++ b/src/test/modules/test_json_parser/t/003_test_semantic.pl
@@ -16,14 +16,17 @@ use File::Temp qw(tempfile);
my $test_file = "$FindBin::RealBin/../tiny.json";
my $test_out = "$FindBin::RealBin/../tiny.out";
-my @exes =
- ("test_json_parser_incremental", "test_json_parser_incremental_shlib");
+my @exes = (
+ [ "test_json_parser_incremental", ],
+ [ "test_json_parser_incremental", "-o", ],
+ [ "test_json_parser_incremental_shlib", ],
+ [ "test_json_parser_incremental_shlib", "-o", ]);
foreach my $exe (@exes)
{
- note "testing executable $exe";
+ note "testing executable @$exe";
- my ($stdout, $stderr) = run_command([ $exe, "-s", $test_file ]);
+ my ($stdout, $stderr) = run_command([ @$exe, "-s", $test_file ]);
is($stderr, "", "no error output");
diff --git a/src/test/modules/test_json_parser/test_json_parser_incremental.c b/src/test/modules/test_json_parser/test_json_parser_incremental.c
index 294e5f74eac..0b02b5203bf 100644
--- a/src/test/modules/test_json_parser/test_json_parser_incremental.c
+++ b/src/test/modules/test_json_parser/test_json_parser_incremental.c
@@ -18,6 +18,10 @@
* If the -s flag is given, the program does semantic processing. This should
* just mirror back the json, albeit with white space changes.
*
+ * If the -o flag is given, the JSONLEX_CTX_OWNS_TOKENS flag is set. (This can
+ * be used in combination with a leak sanitizer; without the option, the parser
+ * may leak memory with invalid JSON.)
+ *
* The argument specifies the file containing the JSON input.
*
*-------------------------------------------------------------------------
@@ -72,6 +76,8 @@ static JsonSemAction sem = {
.scalar = do_scalar
};
+static bool lex_owns_tokens = false;
+
int
main(int argc, char **argv)
{
@@ -88,10 +94,11 @@ main(int argc, char **argv)
char *testfile;
int c;
bool need_strings = false;
+ int ret = 0;
pg_logging_init(argv[0]);
- while ((c = getopt(argc, argv, "c:s")) != -1)
+ while ((c = getopt(argc, argv, "c:os")) != -1)
{
switch (c)
{
@@ -100,6 +107,9 @@ main(int argc, char **argv)
if (chunk_size > BUFSIZE)
pg_fatal("chunk size cannot exceed %d", BUFSIZE);
break;
+ case 'o': /* switch token ownership */
+ lex_owns_tokens = true;
+ break;
case 's': /* do semantic processing */
testsem = &sem;
sem.semstate = palloc(sizeof(struct DoState));
@@ -112,7 +122,7 @@ main(int argc, char **argv)
if (optind < argc)
{
- testfile = pg_strdup(argv[optind]);
+ testfile = argv[optind];
optind++;
}
else
@@ -122,6 +132,7 @@ main(int argc, char **argv)
}
makeJsonLexContextIncremental(&lex, PG_UTF8, need_strings);
+ setJsonLexContextOwnsTokens(&lex, lex_owns_tokens);
initStringInfo(&json);
if ((json_file = fopen(testfile, PG_BINARY_R)) == NULL)
@@ -160,7 +171,8 @@ main(int argc, char **argv)
if (result != JSON_INCOMPLETE)
{
fprintf(stderr, "%s\n", json_errdetail(result, &lex));
- exit(1);
+ ret = 1;
+ goto cleanup;
}
resetStringInfo(&json);
}
@@ -172,15 +184,21 @@ main(int argc, char **argv)
if (result != JSON_SUCCESS)
{
fprintf(stderr, "%s\n", json_errdetail(result, &lex));
- exit(1);
+ ret = 1;
+ goto cleanup;
}
if (!need_strings)
printf("SUCCESS!\n");
break;
}
}
+
+cleanup:
fclose(json_file);
- exit(0);
+ freeJsonLexContext(&lex);
+ free(json.data);
+
+ return ret;
}
/*
@@ -230,7 +248,8 @@ do_object_field_start(void *state, char *fname, bool isnull)
static JsonParseErrorType
do_object_field_end(void *state, char *fname, bool isnull)
{
- /* nothing to do really */
+ if (!lex_owns_tokens)
+ free(fname);
return JSON_SUCCESS;
}
@@ -291,6 +310,9 @@ do_scalar(void *state, char *token, JsonTokenType tokentype)
else
printf("%s", token);
+ if (!lex_owns_tokens)
+ free(token);
+
return JSON_SUCCESS;
}
@@ -343,7 +365,8 @@ usage(const char *progname)
{
fprintf(stderr, "Usage: %s [OPTION ...] testfile\n", progname);
fprintf(stderr, "Options:\n");
- fprintf(stderr, " -c chunksize size of piece fed to parser (default 64)n");
+ fprintf(stderr, " -c chunksize size of piece fed to parser (default 64)\n");
+ fprintf(stderr, " -o set JSONLEX_CTX_OWNS_TOKENS for leak checking\n");
fprintf(stderr, " -s do semantic processing\n");
}