summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/backend/access/transam/xlogbackup.c34
-rw-r--r--src/backend/backup/basebackup.c10
-rw-r--r--src/backend/commands/subscriptioncmds.c35
-rw-r--r--src/backend/utils/adt/json.c53
-rw-r--r--src/backend/utils/adt/jsonb.c9
-rw-r--r--src/backend/utils/adt/jsonfuncs.c18
-rw-r--r--src/backend/utils/adt/multirangetypes.c13
-rw-r--r--src/backend/utils/adt/rangetypes.c18
-rw-r--r--src/backend/utils/adt/ruleutils.c17
-rw-r--r--src/backend/utils/adt/xml.c34
10 files changed, 127 insertions, 114 deletions
diff --git a/src/backend/access/transam/xlogbackup.c b/src/backend/access/transam/xlogbackup.c
index cda4b38b7d6..8a8a2a7b326 100644
--- a/src/backend/access/transam/xlogbackup.c
+++ b/src/backend/access/transam/xlogbackup.c
@@ -31,18 +31,19 @@ build_backup_content(BackupState *state, bool ishistoryfile)
char startstrbuf[128];
char startxlogfile[MAXFNAMELEN]; /* backup start WAL file */
XLogSegNo startsegno;
- StringInfo result = makeStringInfo();
- char *data;
+ StringInfoData result;
Assert(state != NULL);
+ initStringInfo(&result);
+
/* Use the log timezone here, not the session timezone */
pg_strftime(startstrbuf, sizeof(startstrbuf), "%Y-%m-%d %H:%M:%S %Z",
pg_localtime(&state->starttime, log_timezone));
XLByteToSeg(state->startpoint, startsegno, wal_segment_size);
XLogFileName(startxlogfile, state->starttli, startsegno, wal_segment_size);
- appendStringInfo(result, "START WAL LOCATION: %X/%08X (file %s)\n",
+ appendStringInfo(&result, "START WAL LOCATION: %X/%08X (file %s)\n",
LSN_FORMAT_ARGS(state->startpoint), startxlogfile);
if (ishistoryfile)
@@ -52,18 +53,18 @@ build_backup_content(BackupState *state, bool ishistoryfile)
XLByteToSeg(state->stoppoint, stopsegno, wal_segment_size);
XLogFileName(stopxlogfile, state->stoptli, stopsegno, wal_segment_size);
- appendStringInfo(result, "STOP WAL LOCATION: %X/%08X (file %s)\n",
+ appendStringInfo(&result, "STOP WAL LOCATION: %X/%08X (file %s)\n",
LSN_FORMAT_ARGS(state->stoppoint), stopxlogfile);
}
- appendStringInfo(result, "CHECKPOINT LOCATION: %X/%08X\n",
+ appendStringInfo(&result, "CHECKPOINT LOCATION: %X/%08X\n",
LSN_FORMAT_ARGS(state->checkpointloc));
- appendStringInfoString(result, "BACKUP METHOD: streamed\n");
- appendStringInfo(result, "BACKUP FROM: %s\n",
+ appendStringInfoString(&result, "BACKUP METHOD: streamed\n");
+ appendStringInfo(&result, "BACKUP FROM: %s\n",
state->started_in_recovery ? "standby" : "primary");
- appendStringInfo(result, "START TIME: %s\n", startstrbuf);
- appendStringInfo(result, "LABEL: %s\n", state->name);
- appendStringInfo(result, "START TIMELINE: %u\n", state->starttli);
+ appendStringInfo(&result, "START TIME: %s\n", startstrbuf);
+ appendStringInfo(&result, "LABEL: %s\n", state->name);
+ appendStringInfo(&result, "START TIMELINE: %u\n", state->starttli);
if (ishistoryfile)
{
@@ -73,22 +74,19 @@ build_backup_content(BackupState *state, bool ishistoryfile)
pg_strftime(stopstrfbuf, sizeof(stopstrfbuf), "%Y-%m-%d %H:%M:%S %Z",
pg_localtime(&state->stoptime, log_timezone));
- appendStringInfo(result, "STOP TIME: %s\n", stopstrfbuf);
- appendStringInfo(result, "STOP TIMELINE: %u\n", state->stoptli);
+ appendStringInfo(&result, "STOP TIME: %s\n", stopstrfbuf);
+ appendStringInfo(&result, "STOP TIMELINE: %u\n", state->stoptli);
}
/* either both istartpoint and istarttli should be set, or neither */
Assert(XLogRecPtrIsInvalid(state->istartpoint) == (state->istarttli == 0));
if (!XLogRecPtrIsInvalid(state->istartpoint))
{
- appendStringInfo(result, "INCREMENTAL FROM LSN: %X/%08X\n",
+ appendStringInfo(&result, "INCREMENTAL FROM LSN: %X/%08X\n",
LSN_FORMAT_ARGS(state->istartpoint));
- appendStringInfo(result, "INCREMENTAL FROM TLI: %u\n",
+ appendStringInfo(&result, "INCREMENTAL FROM TLI: %u\n",
state->istarttli);
}
- data = result->data;
- pfree(result);
-
- return data;
+ return result.data;
}
diff --git a/src/backend/backup/basebackup.c b/src/backend/backup/basebackup.c
index bb7d90aa5d9..2be4e069816 100644
--- a/src/backend/backup/basebackup.c
+++ b/src/backend/backup/basebackup.c
@@ -239,7 +239,7 @@ perform_base_backup(basebackup_options *opt, bbsink *sink,
TimeLineID endtli;
backup_manifest_info manifest;
BackupState *backup_state;
- StringInfo tablespace_map;
+ StringInfoData tablespace_map;
/* Initial backup state, insofar as we know it now. */
state.tablespaces = NIL;
@@ -263,11 +263,11 @@ perform_base_backup(basebackup_options *opt, bbsink *sink,
/* Allocate backup related variables. */
backup_state = (BackupState *) palloc0(sizeof(BackupState));
- tablespace_map = makeStringInfo();
+ initStringInfo(&tablespace_map);
basebackup_progress_wait_checkpoint();
do_pg_backup_start(opt->label, opt->fastcheckpoint, &state.tablespaces,
- backup_state, tablespace_map);
+ backup_state, &tablespace_map);
state.startptr = backup_state->startpoint;
state.starttli = backup_state->starttli;
@@ -342,7 +342,7 @@ perform_base_backup(basebackup_options *opt, bbsink *sink,
if (opt->sendtblspcmapfile)
{
sendFileWithContent(sink, TABLESPACE_MAP,
- tablespace_map->data, -1, &manifest);
+ tablespace_map.data, -1, &manifest);
sendtblspclinks = false;
}
@@ -399,7 +399,7 @@ perform_base_backup(basebackup_options *opt, bbsink *sink,
endtli = backup_state->stoptli;
/* Deallocate backup-related variables. */
- destroyStringInfo(tablespace_map);
+ pfree(tablespace_map.data);
pfree(backup_state);
}
PG_END_ENSURE_ERROR_CLEANUP(do_pg_abort_backup, BoolGetDatum(false));
diff --git a/src/backend/commands/subscriptioncmds.c b/src/backend/commands/subscriptioncmds.c
index 1f45444b499..3d29818badd 100644
--- a/src/backend/commands/subscriptioncmds.c
+++ b/src/backend/commands/subscriptioncmds.c
@@ -491,20 +491,20 @@ static void
check_publications(WalReceiverConn *wrconn, List *publications)
{
WalRcvExecResult *res;
- StringInfo cmd;
+ StringInfoData cmd;
TupleTableSlot *slot;
List *publicationsCopy = NIL;
Oid tableRow[1] = {TEXTOID};
- cmd = makeStringInfo();
- appendStringInfoString(cmd, "SELECT t.pubname FROM\n"
+ initStringInfo(&cmd);
+ appendStringInfoString(&cmd, "SELECT t.pubname FROM\n"
" pg_catalog.pg_publication t WHERE\n"
" t.pubname IN (");
- GetPublicationsStr(publications, cmd, true);
- appendStringInfoChar(cmd, ')');
+ GetPublicationsStr(publications, &cmd, true);
+ appendStringInfoChar(&cmd, ')');
- res = walrcv_exec(wrconn, cmd->data, 1, tableRow);
- destroyStringInfo(cmd);
+ res = walrcv_exec(wrconn, cmd.data, 1, tableRow);
+ pfree(cmd.data);
if (res->status != WALRCV_OK_TUPLES)
ereport(ERROR,
@@ -535,15 +535,17 @@ check_publications(WalReceiverConn *wrconn, List *publications)
if (list_length(publicationsCopy))
{
/* Prepare the list of non-existent publication(s) for error message. */
- StringInfo pubnames = makeStringInfo();
+ StringInfoData pubnames;
+
+ initStringInfo(&pubnames);
- GetPublicationsStr(publicationsCopy, pubnames, false);
+ GetPublicationsStr(publicationsCopy, &pubnames, false);
ereport(WARNING,
errcode(ERRCODE_UNDEFINED_OBJECT),
errmsg_plural("publication %s does not exist on the publisher",
"publications %s do not exist on the publisher",
list_length(publicationsCopy),
- pubnames->data));
+ pubnames.data));
}
}
@@ -2885,12 +2887,13 @@ fetch_relation_list(WalReceiverConn *wrconn, List *publications)
int server_version = walrcv_server_version(wrconn);
bool check_columnlist = (server_version >= 150000);
int column_count = check_columnlist ? 4 : 3;
- StringInfo pub_names = makeStringInfo();
+ StringInfoData pub_names;
initStringInfo(&cmd);
+ initStringInfo(&pub_names);
/* Build the pub_names comma-separated string. */
- GetPublicationsStr(publications, pub_names, true);
+ GetPublicationsStr(publications, &pub_names, true);
/* Get the list of relations from the publisher */
if (server_version >= 160000)
@@ -2917,7 +2920,7 @@ fetch_relation_list(WalReceiverConn *wrconn, List *publications)
" FROM pg_publication\n"
" WHERE pubname IN ( %s )) AS gpt\n"
" ON gpt.relid = c.oid\n",
- pub_names->data);
+ pub_names.data);
/* From version 19, inclusion of sequences in the target is supported */
if (server_version >= 190000)
@@ -2926,7 +2929,7 @@ fetch_relation_list(WalReceiverConn *wrconn, List *publications)
" SELECT DISTINCT s.schemaname, s.sequencename, " CppAsString2(RELKIND_SEQUENCE) "::\"char\" AS relkind, NULL::int2vector AS attrs\n"
" FROM pg_catalog.pg_publication_sequences s\n"
" WHERE s.pubname IN ( %s )",
- pub_names->data);
+ pub_names.data);
}
else
{
@@ -2939,10 +2942,10 @@ fetch_relation_list(WalReceiverConn *wrconn, List *publications)
appendStringInfo(&cmd, "FROM pg_catalog.pg_publication_tables t\n"
" WHERE t.pubname IN ( %s )",
- pub_names->data);
+ pub_names.data);
}
- destroyStringInfo(pub_names);
+ pfree(pub_names.data);
res = walrcv_exec(wrconn, cmd.data, column_count, tableRow);
pfree(cmd.data);
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c
index 88a612b041d..06dd62f0008 100644
--- a/src/backend/utils/adt/json.c
+++ b/src/backend/utils/adt/json.c
@@ -631,13 +631,13 @@ Datum
array_to_json(PG_FUNCTION_ARGS)
{
Datum array = PG_GETARG_DATUM(0);
- StringInfo result;
+ StringInfoData result;
- result = makeStringInfo();
+ initStringInfo(&result);
- array_to_json_internal(array, result, false);
+ array_to_json_internal(array, &result, false);
- PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
+ PG_RETURN_TEXT_P(cstring_to_text_with_len(result.data, result.len));
}
/*
@@ -648,13 +648,13 @@ array_to_json_pretty(PG_FUNCTION_ARGS)
{
Datum array = PG_GETARG_DATUM(0);
bool use_line_feeds = PG_GETARG_BOOL(1);
- StringInfo result;
+ StringInfoData result;
- result = makeStringInfo();
+ initStringInfo(&result);
- array_to_json_internal(array, result, use_line_feeds);
+ array_to_json_internal(array, &result, use_line_feeds);
- PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
+ PG_RETURN_TEXT_P(cstring_to_text_with_len(result.data, result.len));
}
/*
@@ -664,13 +664,13 @@ Datum
row_to_json(PG_FUNCTION_ARGS)
{
Datum array = PG_GETARG_DATUM(0);
- StringInfo result;
+ StringInfoData result;
- result = makeStringInfo();
+ initStringInfo(&result);
- composite_to_json(array, result, false);
+ composite_to_json(array, &result, false);
- PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
+ PG_RETURN_TEXT_P(cstring_to_text_with_len(result.data, result.len));
}
/*
@@ -681,13 +681,13 @@ row_to_json_pretty(PG_FUNCTION_ARGS)
{
Datum array = PG_GETARG_DATUM(0);
bool use_line_feeds = PG_GETARG_BOOL(1);
- StringInfo result;
+ StringInfoData result;
- result = makeStringInfo();
+ initStringInfo(&result);
- composite_to_json(array, result, use_line_feeds);
+ composite_to_json(array, &result, use_line_feeds);
- PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
+ PG_RETURN_TEXT_P(cstring_to_text_with_len(result.data, result.len));
}
/*
@@ -763,12 +763,13 @@ to_json(PG_FUNCTION_ARGS)
Datum
datum_to_json(Datum val, JsonTypeCategory tcategory, Oid outfuncoid)
{
- StringInfo result = makeStringInfo();
+ StringInfoData result;
- datum_to_json_internal(val, false, result, tcategory, outfuncoid,
+ initStringInfo(&result);
+ datum_to_json_internal(val, false, &result, tcategory, outfuncoid,
false);
- return PointerGetDatum(cstring_to_text_with_len(result->data, result->len));
+ return PointerGetDatum(cstring_to_text_with_len(result.data, result.len));
}
/*
@@ -1347,25 +1348,25 @@ json_build_array_worker(int nargs, const Datum *args, const bool *nulls, const O
{
int i;
const char *sep = "";
- StringInfo result;
+ StringInfoData result;
- result = makeStringInfo();
+ initStringInfo(&result);
- appendStringInfoChar(result, '[');
+ appendStringInfoChar(&result, '[');
for (i = 0; i < nargs; i++)
{
if (absent_on_null && nulls[i])
continue;
- appendStringInfoString(result, sep);
+ appendStringInfoString(&result, sep);
sep = ", ";
- add_json(args[i], nulls[i], result, types[i], false);
+ add_json(args[i], nulls[i], &result, types[i], false);
}
- appendStringInfoChar(result, ']');
+ appendStringInfoChar(&result, ']');
- return PointerGetDatum(cstring_to_text_with_len(result->data, result->len));
+ return PointerGetDatum(cstring_to_text_with_len(result.data, result.len));
}
/*
diff --git a/src/backend/utils/adt/jsonb.c b/src/backend/utils/adt/jsonb.c
index da94d424d61..9399cdb491a 100644
--- a/src/backend/utils/adt/jsonb.c
+++ b/src/backend/utils/adt/jsonb.c
@@ -125,15 +125,16 @@ jsonb_send(PG_FUNCTION_ARGS)
{
Jsonb *jb = PG_GETARG_JSONB_P(0);
StringInfoData buf;
- StringInfo jtext = makeStringInfo();
+ StringInfoData jtext;
int version = 1;
- (void) JsonbToCString(jtext, &jb->root, VARSIZE(jb));
+ initStringInfo(&jtext);
+ (void) JsonbToCString(&jtext, &jb->root, VARSIZE(jb));
pq_begintypsend(&buf);
pq_sendint8(&buf, version);
- pq_sendtext(&buf, jtext->data, jtext->len);
- destroyStringInfo(jtext);
+ pq_sendtext(&buf, jtext.data, jtext.len);
+ pfree(jtext.data);
PG_RETURN_BYTEA_P(pq_endtypsend(&buf));
}
diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c
index 41862872e8a..8898f0f90a1 100644
--- a/src/backend/utils/adt/jsonfuncs.c
+++ b/src/backend/utils/adt/jsonfuncs.c
@@ -4507,14 +4507,16 @@ json_strip_nulls(PG_FUNCTION_ARGS)
text *json = PG_GETARG_TEXT_PP(0);
bool strip_in_arrays = PG_NARGS() == 2 ? PG_GETARG_BOOL(1) : false;
StripnullState *state;
+ StringInfoData strbuf;
JsonLexContext lex;
JsonSemAction *sem;
state = palloc0(sizeof(StripnullState));
sem = palloc0(sizeof(JsonSemAction));
+ initStringInfo(&strbuf);
state->lex = makeJsonLexContext(&lex, json, true);
- state->strval = makeStringInfo();
+ state->strval = &strbuf;
state->skip_next_null = false;
state->strip_in_arrays = strip_in_arrays;
@@ -4607,11 +4609,12 @@ Datum
jsonb_pretty(PG_FUNCTION_ARGS)
{
Jsonb *jb = PG_GETARG_JSONB_P(0);
- StringInfo str = makeStringInfo();
+ StringInfoData str;
- JsonbToCStringIndent(str, &jb->root, VARSIZE(jb));
+ initStringInfo(&str);
+ JsonbToCStringIndent(&str, &jb->root, VARSIZE(jb));
- PG_RETURN_TEXT_P(cstring_to_text_with_len(str->data, str->len));
+ PG_RETURN_TEXT_P(cstring_to_text_with_len(str.data, str.len));
}
/*
@@ -5846,7 +5849,7 @@ transform_jsonb_string_values(Jsonb *jsonb, void *action_state,
* Iterate over a json, and apply a specified JsonTransformStringValuesAction
* to every string value or element. Any necessary context for a
* JsonTransformStringValuesAction can be passed in the action_state variable.
- * Function returns a StringInfo, which is a copy of an original json with
+ * Function returns a Text Datum, which is a copy of an original json with
* transformed values.
*/
text *
@@ -5856,9 +5859,12 @@ transform_json_string_values(text *json, void *action_state,
JsonLexContext lex;
JsonSemAction *sem = palloc0(sizeof(JsonSemAction));
TransformJsonStringValuesState *state = palloc0(sizeof(TransformJsonStringValuesState));
+ StringInfoData strbuf;
+
+ initStringInfo(&strbuf);
state->lex = makeJsonLexContext(&lex, json, true);
- state->strval = makeStringInfo();
+ state->strval = &strbuf;
state->action = transform_action;
state->action_state = action_state;
diff --git a/src/backend/utils/adt/multirangetypes.c b/src/backend/utils/adt/multirangetypes.c
index 84733dc5019..95e9539591e 100644
--- a/src/backend/utils/adt/multirangetypes.c
+++ b/src/backend/utils/adt/multirangetypes.c
@@ -378,17 +378,18 @@ multirange_send(PG_FUNCTION_ARGS)
{
MultirangeType *multirange = PG_GETARG_MULTIRANGE_P(0);
Oid mltrngtypoid = MultirangeTypeGetOid(multirange);
- StringInfo buf = makeStringInfo();
+ StringInfoData buf;
RangeType **ranges;
int32 range_count;
MultirangeIOData *cache;
+ initStringInfo(&buf);
cache = get_multirange_io_data(fcinfo, mltrngtypoid, IOFunc_send);
/* construct output */
- pq_begintypsend(buf);
+ pq_begintypsend(&buf);
- pq_sendint32(buf, multirange->rangeCount);
+ pq_sendint32(&buf, multirange->rangeCount);
multirange_deserialize(cache->typcache->rngtype, multirange, &range_count, &ranges);
for (int i = 0; i < range_count; i++)
@@ -399,11 +400,11 @@ multirange_send(PG_FUNCTION_ARGS)
range = RangeTypePGetDatum(ranges[i]);
outputbytes = SendFunctionCall(&cache->typioproc, range);
- pq_sendint32(buf, VARSIZE(outputbytes) - VARHDRSZ);
- pq_sendbytes(buf, VARDATA(outputbytes), VARSIZE(outputbytes) - VARHDRSZ);
+ pq_sendint32(&buf, VARSIZE(outputbytes) - VARHDRSZ);
+ pq_sendbytes(&buf, VARDATA(outputbytes), VARSIZE(outputbytes) - VARHDRSZ);
}
- PG_RETURN_BYTEA_P(pq_endtypsend(buf));
+ PG_RETURN_BYTEA_P(pq_endtypsend(&buf));
}
/*
diff --git a/src/backend/utils/adt/rangetypes.c b/src/backend/utils/adt/rangetypes.c
index 0b2ad8b0975..0e451e4693b 100644
--- a/src/backend/utils/adt/rangetypes.c
+++ b/src/backend/utils/adt/rangetypes.c
@@ -264,7 +264,7 @@ Datum
range_send(PG_FUNCTION_ARGS)
{
RangeType *range = PG_GETARG_RANGE_P(0);
- StringInfo buf = makeStringInfo();
+ StringInfoData buf;
RangeIOData *cache;
char flags;
RangeBound lower;
@@ -273,6 +273,8 @@ range_send(PG_FUNCTION_ARGS)
check_stack_depth(); /* recurses when subtype is a range type */
+ initStringInfo(&buf);
+
cache = get_range_io_data(fcinfo, RangeTypeGetOid(range), IOFunc_send);
/* deserialize */
@@ -280,9 +282,9 @@ range_send(PG_FUNCTION_ARGS)
flags = range_get_flags(range);
/* construct output */
- pq_begintypsend(buf);
+ pq_begintypsend(&buf);
- pq_sendbyte(buf, flags);
+ pq_sendbyte(&buf, flags);
if (RANGE_HAS_LBOUND(flags))
{
@@ -290,8 +292,8 @@ range_send(PG_FUNCTION_ARGS)
uint32 bound_len = VARSIZE(bound) - VARHDRSZ;
char *bound_data = VARDATA(bound);
- pq_sendint32(buf, bound_len);
- pq_sendbytes(buf, bound_data, bound_len);
+ pq_sendint32(&buf, bound_len);
+ pq_sendbytes(&buf, bound_data, bound_len);
}
if (RANGE_HAS_UBOUND(flags))
@@ -300,11 +302,11 @@ range_send(PG_FUNCTION_ARGS)
uint32 bound_len = VARSIZE(bound) - VARHDRSZ;
char *bound_data = VARDATA(bound);
- pq_sendint32(buf, bound_len);
- pq_sendbytes(buf, bound_data, bound_len);
+ pq_sendint32(&buf, bound_len);
+ pq_sendbytes(&buf, bound_data, bound_len);
}
- PG_RETURN_BYTEA_P(pq_endtypsend(buf));
+ PG_RETURN_BYTEA_P(pq_endtypsend(&buf));
}
/*
diff --git a/src/backend/utils/adt/ruleutils.c b/src/backend/utils/adt/ruleutils.c
index 5398679cce2..556ab057e5a 100644
--- a/src/backend/utils/adt/ruleutils.c
+++ b/src/backend/utils/adt/ruleutils.c
@@ -13711,25 +13711,26 @@ char *
get_range_partbound_string(List *bound_datums)
{
deparse_context context;
- StringInfo buf = makeStringInfo();
+ StringInfoData buf;
ListCell *cell;
char *sep;
+ initStringInfo(&buf);
memset(&context, 0, sizeof(deparse_context));
- context.buf = buf;
+ context.buf = &buf;
- appendStringInfoChar(buf, '(');
+ appendStringInfoChar(&buf, '(');
sep = "";
foreach(cell, bound_datums)
{
PartitionRangeDatum *datum =
lfirst_node(PartitionRangeDatum, cell);
- appendStringInfoString(buf, sep);
+ appendStringInfoString(&buf, sep);
if (datum->kind == PARTITION_RANGE_DATUM_MINVALUE)
- appendStringInfoString(buf, "MINVALUE");
+ appendStringInfoString(&buf, "MINVALUE");
else if (datum->kind == PARTITION_RANGE_DATUM_MAXVALUE)
- appendStringInfoString(buf, "MAXVALUE");
+ appendStringInfoString(&buf, "MAXVALUE");
else
{
Const *val = castNode(Const, datum->value);
@@ -13738,7 +13739,7 @@ get_range_partbound_string(List *bound_datums)
}
sep = ", ";
}
- appendStringInfoChar(buf, ')');
+ appendStringInfoChar(&buf, ')');
- return buf->data;
+ return buf.data;
}
diff --git a/src/backend/utils/adt/xml.c b/src/backend/utils/adt/xml.c
index 35c915573a1..41e775570ec 100644
--- a/src/backend/utils/adt/xml.c
+++ b/src/backend/utils/adt/xml.c
@@ -2133,7 +2133,7 @@ xml_errorHandler(void *data, PgXmlErrorPtr error)
node->type == XML_ELEMENT_NODE) ? node->name : NULL;
int domain = error->domain;
int level = error->level;
- StringInfo errorBuf;
+ StringInfoData errorBuf;
/*
* Defend against someone passing us a bogus context struct.
@@ -2210,16 +2210,16 @@ xml_errorHandler(void *data, PgXmlErrorPtr error)
}
/* Prepare error message in errorBuf */
- errorBuf = makeStringInfo();
+ initStringInfo(&errorBuf);
if (error->line > 0)
- appendStringInfo(errorBuf, "line %d: ", error->line);
+ appendStringInfo(&errorBuf, "line %d: ", error->line);
if (name != NULL)
- appendStringInfo(errorBuf, "element %s: ", name);
+ appendStringInfo(&errorBuf, "element %s: ", name);
if (error->message != NULL)
- appendStringInfoString(errorBuf, error->message);
+ appendStringInfoString(&errorBuf, error->message);
else
- appendStringInfoString(errorBuf, "(no message provided)");
+ appendStringInfoString(&errorBuf, "(no message provided)");
/*
* Append context information to errorBuf.
@@ -2237,11 +2237,11 @@ xml_errorHandler(void *data, PgXmlErrorPtr error)
xmlGenericErrorFunc errFuncSaved = xmlGenericError;
void *errCtxSaved = xmlGenericErrorContext;
- xmlSetGenericErrorFunc(errorBuf,
+ xmlSetGenericErrorFunc(&errorBuf,
(xmlGenericErrorFunc) appendStringInfo);
/* Add context information to errorBuf */
- appendStringInfoLineSeparator(errorBuf);
+ appendStringInfoLineSeparator(&errorBuf);
xmlParserPrintFileContext(input);
@@ -2250,7 +2250,7 @@ xml_errorHandler(void *data, PgXmlErrorPtr error)
}
/* Get rid of any trailing newlines in errorBuf */
- chopStringInfoNewlines(errorBuf);
+ chopStringInfoNewlines(&errorBuf);
/*
* Legacy error handling mode. err_occurred is never set, we just add the
@@ -2263,10 +2263,10 @@ xml_errorHandler(void *data, PgXmlErrorPtr error)
if (xmlerrcxt->strictness == PG_XML_STRICTNESS_LEGACY)
{
appendStringInfoLineSeparator(&xmlerrcxt->err_buf);
- appendBinaryStringInfo(&xmlerrcxt->err_buf, errorBuf->data,
- errorBuf->len);
+ appendBinaryStringInfo(&xmlerrcxt->err_buf, errorBuf.data,
+ errorBuf.len);
- destroyStringInfo(errorBuf);
+ pfree(errorBuf.data);
return;
}
@@ -2281,23 +2281,23 @@ xml_errorHandler(void *data, PgXmlErrorPtr error)
if (level >= XML_ERR_ERROR)
{
appendStringInfoLineSeparator(&xmlerrcxt->err_buf);
- appendBinaryStringInfo(&xmlerrcxt->err_buf, errorBuf->data,
- errorBuf->len);
+ appendBinaryStringInfo(&xmlerrcxt->err_buf, errorBuf.data,
+ errorBuf.len);
xmlerrcxt->err_occurred = true;
}
else if (level >= XML_ERR_WARNING)
{
ereport(WARNING,
- (errmsg_internal("%s", errorBuf->data)));
+ (errmsg_internal("%s", errorBuf.data)));
}
else
{
ereport(NOTICE,
- (errmsg_internal("%s", errorBuf->data)));
+ (errmsg_internal("%s", errorBuf.data)));
}
- destroyStringInfo(errorBuf);
+ pfree(errorBuf.data);
}