Commits

Andrew Dunstan committed 759a252

pgindent run (with a fixed pgindent)

Comments (0)

Files changed (3)

src/backend/utils/adt/json.c

 #include "utils/jsonapi.h"
 #include "utils/typcache.h"
 
-/* 
+/*
  * The context of the parser is maintained by the recursive descent
  * mechanism, but is passed explicitly to the error reporting routine
  * for better diagnostics.
  */
-typedef enum                    /* contexts of JSON parser */
+typedef enum					/* contexts of JSON parser */
 {
-    JSON_PARSE_VALUE,           /* expecting a value */
-	JSON_PARSE_STRING,          /* expecting a string (for a field name) */
-    JSON_PARSE_ARRAY_START,     /* saw '[', expecting value or ']' */
-    JSON_PARSE_ARRAY_NEXT,      /* saw array element, expecting ',' or ']' */
-    JSON_PARSE_OBJECT_START,    /* saw '{', expecting label or '}' */
-    JSON_PARSE_OBJECT_LABEL,    /* saw object label, expecting ':' */
-    JSON_PARSE_OBJECT_NEXT,     /* saw object value, expecting ',' or '}' */
-    JSON_PARSE_OBJECT_COMMA,    /* saw object ',', expecting next label */
-	JSON_PARSE_END              /* saw the end of a document, expect nothing */
-} JsonParseContext;
+	JSON_PARSE_VALUE,			/* expecting a value */
+	JSON_PARSE_STRING,			/* expecting a string (for a field name) */
+	JSON_PARSE_ARRAY_START,		/* saw '[', expecting value or ']' */
+	JSON_PARSE_ARRAY_NEXT,		/* saw array element, expecting ',' or ']' */
+	JSON_PARSE_OBJECT_START,	/* saw '{', expecting label or '}' */
+	JSON_PARSE_OBJECT_LABEL,	/* saw object label, expecting ':' */
+	JSON_PARSE_OBJECT_NEXT,		/* saw object value, expecting ',' or '}' */
+	JSON_PARSE_OBJECT_COMMA,	/* saw object ',', expecting next label */
+	JSON_PARSE_END				/* saw the end of a document, expect nothing */
+}	JsonParseContext;
 
 static void json_validate_cstring(char *input);
 static void json_lex(JsonLexContext *lex);
 static void parse_array(JsonLexContext *lex, JsonSemAction sem);
 static void report_parse_error(JsonParseContext ctx, JsonLexContext *lex);
 static void report_invalid_token(JsonLexContext *lex);
-static int report_json_context(JsonLexContext *lex);
+static int	report_json_context(JsonLexContext *lex);
 static char *extract_mb_char(char *s);
 static void composite_to_json(Datum composite, StringInfo result,
-							  bool use_line_feeds);
+				  bool use_line_feeds);
 static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims,
 				  Datum *vals, bool *nulls, int *valcount,
 				  TYPCATEGORY tcategory, Oid typoutputfunc,
 				  bool use_line_feeds);
 static void array_to_json_internal(Datum array, StringInfo result,
-								   bool use_line_feeds);
+					   bool use_line_feeds);
 
 /* the null action object used for pure validation */
-static jsonSemAction nullSemAction = 
-{ 
+static jsonSemAction nullSemAction =
+{
 	NULL, NULL, NULL, NULL, NULL,
 	NULL, NULL, NULL, NULL, NULL
 };
 static inline JsonTokenType
 lex_peek(JsonLexContext *lex)
 {
-    return lex->token_type;
+	return lex->token_type;
 }
 
-static inline bool 
+static inline bool
 lex_accept(JsonLexContext *lex, JsonTokenType token, char **lexeme)
 {
-    if (lex->token_type == token) 
+	if (lex->token_type == token)
 	{
 		if (lexeme != NULL)
 		{
 			}
 			else
 			{
-				int len = (lex->token_terminator - lex->token_start);
-				char *tokstr = palloc(len+1);
-				memcpy(tokstr,lex->token_start,len);
+				int			len = (lex->token_terminator - lex->token_start);
+				char	   *tokstr = palloc(len + 1);
+
+				memcpy(tokstr, lex->token_start, len);
 				tokstr[len] = '\0';
 				*lexeme = tokstr;
 			}
 		}
-        json_lex(lex);
-        return true;
-    }
-    return false;
+		json_lex(lex);
+		return true;
+	}
+	return false;
 }
 
-static inline void 
+static inline void
 lex_expect(JsonParseContext ctx, JsonLexContext *lex, JsonTokenType token)
 {
-    if (! lex_accept(lex,token,NULL))
+	if (!lex_accept(lex, token, NULL))
 		report_parse_error(ctx, lex);;
 }
 
  */
 
 JsonLexContext *
-makeJsonLexContext(char * json, bool need_escapes)
+makeJsonLexContext(char *json, bool need_escapes)
 {
-	JsonLexContext * lex = palloc0(sizeof(JsonLexContext));
-	
+	JsonLexContext *lex = palloc0(sizeof(JsonLexContext));
+
 	lex->input = lex->token_terminator = lex->line_start = json;
 	lex->line_number = 1;
 	if (need_escapes)
 void
 pg_parse_json(JsonLexContext *lex, JsonSemAction sem)
 {
-
-    /* get the initial token */
-    json_lex(lex);
+	/* get the initial token */
+	json_lex(lex);
 
 
-    /* parse by recursive descent */
-    if (lex_peek(lex) == JSON_TOKEN_OBJECT_START)
-        parse_object(lex, sem);
-    else if (lex_peek(lex) == JSON_TOKEN_ARRAY_START)
-        parse_array(lex, sem);
-    else
-        parse_scalar(lex, sem);/* json can be a bare scalar */
+	/* parse by recursive descent */
+	if (lex_peek(lex) == JSON_TOKEN_OBJECT_START)
+		parse_object(lex, sem);
+	else if (lex_peek(lex) == JSON_TOKEN_ARRAY_START)
+		parse_array(lex, sem);
+	else
+		parse_scalar(lex, sem); /* json can be a bare scalar */
 
-    lex_expect(JSON_PARSE_END, lex, JSON_TOKEN_END);
+	lex_expect(JSON_PARSE_END, lex, JSON_TOKEN_END);
 
 }
 
 static void
 parse_scalar(JsonLexContext *lex, JsonSemAction sem)
 {
-    char *val = NULL;
-    json_scalar_action sfunc = sem->scalar;
-    JsonTokenType tok = lex_peek(lex);
-
-    if (lex_accept(lex, JSON_TOKEN_TRUE, &val) ||
-        lex_accept(lex, JSON_TOKEN_FALSE, &val) ||
-        lex_accept(lex, JSON_TOKEN_NULL, &val) ||
-        lex_accept(lex, JSON_TOKEN_NUMBER, &val) ||
-        lex_accept(lex, JSON_TOKEN_STRING, &val))
-    {
-        if (sfunc != NULL)
-            (*sfunc) (sem->semstate, val, tok);
-    }
-    else
-    {
-        report_parse_error(JSON_PARSE_VALUE, lex);
-    }
+	char	   *val = NULL;
+	json_scalar_action sfunc = sem->scalar;
+	JsonTokenType tok = lex_peek(lex);
+
+	if (lex_accept(lex, JSON_TOKEN_TRUE, &val) ||
+		lex_accept(lex, JSON_TOKEN_FALSE, &val) ||
+		lex_accept(lex, JSON_TOKEN_NULL, &val) ||
+		lex_accept(lex, JSON_TOKEN_NUMBER, &val) ||
+		lex_accept(lex, JSON_TOKEN_STRING, &val))
+	{
+		if (sfunc != NULL)
+			(*sfunc) (sem->semstate, val, tok);
+	}
+	else
+	{
+		report_parse_error(JSON_PARSE_VALUE, lex);
+	}
 }
 
 static void
 parse_object_field(JsonLexContext *lex, JsonSemAction sem)
 {
+	char	   *fname = NULL;	/* keep compiler quiet */
+	json_ofield_action ostart = sem->object_field_start;
+	json_ofield_action oend = sem->object_field_end;
+	bool		isnull;
 
-    char *fname = NULL; /* keep compiler quiet */
-    json_ofield_action ostart = sem->object_field_start;
-    json_ofield_action oend = sem->object_field_end;
-    bool isnull;
-
-    if (! lex_accept(lex, JSON_TOKEN_STRING, &fname))
-        report_parse_error(JSON_PARSE_STRING, lex); 
+	if (!lex_accept(lex, JSON_TOKEN_STRING, &fname))
+		report_parse_error(JSON_PARSE_STRING, lex);
 
-    lex_expect(JSON_PARSE_OBJECT_LABEL, lex, JSON_TOKEN_COLON);
+	lex_expect(JSON_PARSE_OBJECT_LABEL, lex, JSON_TOKEN_COLON);
 
-    isnull = lex_peek(lex) == JSON_TOKEN_NULL;
+	isnull = lex_peek(lex) == JSON_TOKEN_NULL;
 
-    if (ostart != NULL)
-        (*ostart) (sem->semstate, fname, isnull);
+	if (ostart != NULL)
+		(*ostart) (sem->semstate, fname, isnull);
 
-    if (lex_peek(lex) == JSON_TOKEN_OBJECT_START)
-        parse_object(lex, sem);
-    else if (lex_peek(lex) == JSON_TOKEN_ARRAY_START)
-        parse_array(lex,sem);
-    else
-        parse_scalar(lex, sem);
+	if (lex_peek(lex) == JSON_TOKEN_OBJECT_START)
+		parse_object(lex, sem);
+	else if (lex_peek(lex) == JSON_TOKEN_ARRAY_START)
+		parse_array(lex, sem);
+	else
+		parse_scalar(lex, sem);
 
-    if (oend != NULL)
-        (*oend) (sem->semstate, fname, isnull);
+	if (oend != NULL)
+		(*oend) (sem->semstate, fname, isnull);
 
 	if (fname != NULL)
 		pfree(fname);
 }
 
-static void 
+static void
 parse_object(JsonLexContext *lex, JsonSemAction sem)
 {
-    
-    json_struct_action ostart = sem->object_start;
-    json_struct_action oend = sem->object_end;
+	json_struct_action ostart = sem->object_start;
+	json_struct_action oend = sem->object_end;
 
-    if (ostart != NULL)
-        (*ostart) (sem->semstate);
+	if (ostart != NULL)
+		(*ostart) (sem->semstate);
 
 	lex->lex_level++;
 
 	/* we know this will succeeed, just clearing the token */
-    lex_expect(JSON_PARSE_OBJECT_START, lex, JSON_TOKEN_OBJECT_START);
-    if (lex_peek(lex) == JSON_TOKEN_STRING)
-    {
-        parse_object_field(lex, sem);
-
-        while (lex_accept(lex,JSON_TOKEN_COMMA,NULL))
-                parse_object_field(lex, sem);
-        
-    }
+	lex_expect(JSON_PARSE_OBJECT_START, lex, JSON_TOKEN_OBJECT_START);
+	if (lex_peek(lex) == JSON_TOKEN_STRING)
+	{
+		parse_object_field(lex, sem);
+
+		while (lex_accept(lex, JSON_TOKEN_COMMA, NULL))
+			parse_object_field(lex, sem);
+
+	}
 	else if (lex_peek(lex) != JSON_TOKEN_OBJECT_END)
 	{
 		/* case of an invalid initial token inside the object */
 		report_parse_error(JSON_PARSE_OBJECT_START, lex);
 	}
 
-    lex_expect(JSON_PARSE_OBJECT_NEXT, lex, JSON_TOKEN_OBJECT_END);
+	lex_expect(JSON_PARSE_OBJECT_NEXT, lex, JSON_TOKEN_OBJECT_END);
 
 	lex->lex_level--;
 
-    if (oend != NULL)
-        (*oend) (sem->semstate);
+	if (oend != NULL)
+		(*oend) (sem->semstate);
 }
 
 static void
 parse_array_element(JsonLexContext *lex, JsonSemAction sem)
 {
-    json_aelem_action astart = sem->array_element_start;
-    json_aelem_action aend = sem->array_element_end;
-    bool isnull;
+	json_aelem_action astart = sem->array_element_start;
+	json_aelem_action aend = sem->array_element_end;
+	bool		isnull;
 
-    isnull = lex_peek(lex) == JSON_TOKEN_NULL;
+	isnull = lex_peek(lex) == JSON_TOKEN_NULL;
 
-    if (astart != NULL)
-        (*astart) (sem->semstate, isnull);
+	if (astart != NULL)
+		(*astart) (sem->semstate, isnull);
 
-    if (lex_peek(lex) == JSON_TOKEN_OBJECT_START)
-        parse_object(lex, sem);
-    else if (lex_peek(lex) == JSON_TOKEN_ARRAY_START)
-        parse_array(lex, sem);
-    else
-        parse_scalar(lex, sem);
+	if (lex_peek(lex) == JSON_TOKEN_OBJECT_START)
+		parse_object(lex, sem);
+	else if (lex_peek(lex) == JSON_TOKEN_ARRAY_START)
+		parse_array(lex, sem);
+	else
+		parse_scalar(lex, sem);
 
-    if (aend != NULL)
-        (*aend) (sem->semstate, isnull);
+	if (aend != NULL)
+		(*aend) (sem->semstate, isnull);
 }
 
-static void 
+static void
 parse_array(JsonLexContext *lex, JsonSemAction sem)
 {
-    json_struct_action astart = sem->array_start;
-    json_struct_action aend = sem->array_end;
+	json_struct_action astart = sem->array_start;
+	json_struct_action aend = sem->array_end;
 
-    if (astart != NULL)
-        (*astart) (sem->semstate);
+	if (astart != NULL)
+		(*astart) (sem->semstate);
 
 	lex->lex_level++;
 
-    lex_expect(JSON_PARSE_ARRAY_START, lex, JSON_TOKEN_ARRAY_START);
-    if (lex_peek(lex) != JSON_TOKEN_ARRAY_END)
-    {
+	lex_expect(JSON_PARSE_ARRAY_START, lex, JSON_TOKEN_ARRAY_START);
+	if (lex_peek(lex) != JSON_TOKEN_ARRAY_END)
+	{
 
-        parse_array_element(lex, sem);
+		parse_array_element(lex, sem);
 
-        while (lex_accept(lex,JSON_TOKEN_COMMA,NULL))
-            parse_array_element(lex, sem);
-    }
+		while (lex_accept(lex, JSON_TOKEN_COMMA, NULL))
+			parse_array_element(lex, sem);
+	}
 
-    lex_expect(JSON_PARSE_ARRAY_NEXT, lex, JSON_TOKEN_ARRAY_END);
+	lex_expect(JSON_PARSE_ARRAY_NEXT, lex, JSON_TOKEN_ARRAY_END);
 
 	lex->lex_level--;
 
-    if (aend != NULL)
-        (*aend) (sem->semstate);
+	if (aend != NULL)
+		(*aend) (sem->semstate);
 }
 
 /*
 static void
 json_validate_cstring(char *input)
 {
-
 	JsonLexContext *lex = makeJsonLexContext(input, false);
 
 	pg_parse_json(lex, NullSemAction);
 		lex->token_start = NULL;
 		lex->prev_token_terminator = lex->token_terminator;
 		lex->token_terminator = s;
-		lex->token_type =  JSON_TOKEN_END;
+		lex->token_type = JSON_TOKEN_END;
 	}
 	else if (strchr("{}[],:", s[0]))
 	{
 		lex->token_terminator = s + 1;
 		switch (s[0])
 		{
-			case '{': 
-				lex->token_type = JSON_TOKEN_OBJECT_START; 
+			case '{':
+				lex->token_type = JSON_TOKEN_OBJECT_START;
 				break;
 			case '}':
-				lex->token_type = JSON_TOKEN_OBJECT_END; 
+				lex->token_type = JSON_TOKEN_OBJECT_END;
 				break;
-			case '[': 
-				lex->token_type = JSON_TOKEN_ARRAY_START; 
+			case '[':
+				lex->token_type = JSON_TOKEN_ARRAY_START;
 				break;
 			case ']':
-				lex->token_type = JSON_TOKEN_ARRAY_END; 
+				lex->token_type = JSON_TOKEN_ARRAY_END;
 				break;
 			case ',':
-				lex->token_type = JSON_TOKEN_COMMA; 
+				lex->token_type = JSON_TOKEN_COMMA;
 				break;
 			case ':':
-				lex->token_type = JSON_TOKEN_COLON; 
+				lex->token_type = JSON_TOKEN_COLON;
 				break;
 			default:
 				break;
 	}
 	else
 	{
-		char   *p;
+		char	   *p;
 
 		/*
-		 * We're not dealing with a string, number, legal punctuation mark,
-		 * or end of string.  The only legal tokens we might find here are
-		 * true, false, and null, but for error reporting purposes we scan
-		 * until we see a non-alphanumeric character.  That way, we can report
-		 * the whole word as an unexpected token, rather than just some
+		 * We're not dealing with a string, number, legal punctuation mark, or
+		 * end of string.  The only legal tokens we might find here are true,
+		 * false, and null, but for error reporting purposes we scan until we
+		 * see a non-alphanumeric character.  That way, we can report the
+		 * whole word as an unexpected token, rather than just some
 		 * unintuitive prefix thereof.
 		 */
- 		for (p = s; JSON_ALPHANUMERIC_CHAR(*p); p++)
-			/* skip */ ;
+		for (p = s; JSON_ALPHANUMERIC_CHAR(*p); p++)
+			 /* skip */ ;
 
 		/*
 		 * We got some sort of unexpected punctuation or an otherwise
 {
 	char	   *s;
 
-	if (lex-> strval != NULL)
+	if (lex->strval != NULL)
 		resetStringInfo(lex->strval);
 
 	for (s = lex->token_start + 1; *s != '"'; s++)
 				}
 				if (lex->strval != NULL)
 				{
-					char utf8str[5];
-					int utf8len;
-					char *converted;
+					char		utf8str[5];
+					int			utf8len;
+					char	   *converted;
 
-					unicode_to_utf8(ch, (unsigned char *)utf8str);
-					utf8len = pg_utf_mblen((unsigned char *)utf8str);
+					unicode_to_utf8(ch, (unsigned char *) utf8str);
+					utf8len = pg_utf_mblen((unsigned char *) utf8str);
 					utf8str[utf8len] = '\0';
 					converted = pg_any_to_server(utf8str, 1, PG_UTF8);
 					appendStringInfoString(lex->strval, converted);
 					if (converted != utf8str)
 						pfree(converted);
-					
+
 				}
 			}
 			else if (lex->strval != NULL)
 			{
-				switch(*s)
+				switch (*s)
 				{
 					case '"':
 					case '\\':
 					case '/':
-						appendStringInfoChar(lex->strval,*s);
+						appendStringInfoChar(lex->strval, *s);
 						break;
 					case 'b':
-						appendStringInfoChar(lex->strval,'\b');
+						appendStringInfoChar(lex->strval, '\b');
 						break;
 					case 'f':
-						appendStringInfoChar(lex->strval,'\f');
+						appendStringInfoChar(lex->strval, '\f');
 						break;
 					case 'n':
-						appendStringInfoChar(lex->strval,'\n');
+						appendStringInfoChar(lex->strval, '\n');
 						break;
 					case 'r':
-						appendStringInfoChar(lex->strval,'\r');
+						appendStringInfoChar(lex->strval, '\r');
 						break;
 					case 't':
-						appendStringInfoChar(lex->strval,'\t');
+						appendStringInfoChar(lex->strval, '\t');
 						break;
 					default:
 						/* Not a valid string escape, so error out. */
 						ereport(ERROR,
 								(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
 								 errmsg("invalid input syntax for type json"),
-								 errdetail("Escape sequence \"\\%s\" is invalid.",
-										   extract_mb_char(s)),
+							errdetail("Escape sequence \"\\%s\" is invalid.",
+									  extract_mb_char(s)),
 								 report_json_context(lex)));
 				}
 			}
 			else if (strchr("\"\\/bfnrt", *s) == NULL)
-            {
-				/* 
+			{
+				/*
 				 * Simpler processing if we're not bothered about de-escaping
 				 */
-                lex->token_terminator = s + pg_mblen(s);
-                ereport(ERROR,
-                        (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
-                         errmsg("invalid input syntax for type json"),
-                         errdetail("Escape sequence \"\\%s\" is invalid.",
-                                   extract_mb_char(s)),
-                         report_json_context(lex)));
-            }
+				lex->token_terminator = s + pg_mblen(s);
+				ereport(ERROR,
+						(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
+						 errmsg("invalid input syntax for type json"),
+						 errdetail("Escape sequence \"\\%s\" is invalid.",
+								   extract_mb_char(s)),
+						 report_json_context(lex)));
+			}
 
 		}
 		else if (lex->strval != NULL)
 		{
-			appendStringInfoChar(lex->strval,*s);
+			appendStringInfoChar(lex->strval, *s);
 		}
 
 	}
  * (1) An optional minus sign ('-').
  *
  * (2) Either a single '0', or a string of one or more digits that does not
- *     begin with a '0'.
+ *	   begin with a '0'.
  *
  * (3) An optional decimal part, consisting of a period ('.') followed by
- *     one or more digits.  (Note: While this part can be omitted
- *     completely, it's not OK to have only the decimal point without
- *     any digits afterwards.)
+ *	   one or more digits.	(Note: While this part can be omitted
+ *	   completely, it's not OK to have only the decimal point without
+ *	   any digits afterwards.)
  *
  * (4) An optional exponent part, consisting of 'e' or 'E', optionally
- *     followed by '+' or '-', followed by one or more digits.  (Note:
- *     As with the decimal part, if 'e' or 'E' is present, it must be
- *     followed by at least one digit.)
+ *	   followed by '+' or '-', followed by one or more digits.	(Note:
+ *	   As with the decimal part, if 'e' or 'E' is present, it must be
+ *	   followed by at least one digit.)
  *
  * The 's' argument to this function points to the ostensible beginning
  * of part 2 - i.e. the character after any optional minus sign, and the
 static void
 json_lex_number(JsonLexContext *lex, char *s)
 {
-	bool	error = false;
-	char   *p;
+	bool		error = false;
+	char	   *p;
 
 	/* Part (1): leading sign indicator. */
 	/* Caller already did this for us; so do nothing. */
 	}
 
 	/*
-	 * Check for trailing garbage.  As in json_lex(), any alphanumeric stuff
+	 * Check for trailing garbage.	As in json_lex(), any alphanumeric stuff
 	 * here should be considered part of the token for error-reporting
 	 * purposes.
 	 */
 				ereport(ERROR,
 						(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
 						 errmsg("invalid input syntax for type json"),
-						 errdetail("Expected \",\" or \"]\", but found \"%s\".",
-								   token),
+					  errdetail("Expected \",\" or \"]\", but found \"%s\".",
+								token),
 						 report_json_context(lex)));
 				break;
 			case JSON_PARSE_OBJECT_START:
 				ereport(ERROR,
 						(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
 						 errmsg("invalid input syntax for type json"),
-						 errdetail("Expected string or \"}\", but found \"%s\".",
-								   token),
+					 errdetail("Expected string or \"}\", but found \"%s\".",
+							   token),
 						 report_json_context(lex)));
 				break;
 			case JSON_PARSE_OBJECT_LABEL:
 				ereport(ERROR,
 						(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
 						 errmsg("invalid input syntax for type json"),
-						 errdetail("Expected \",\" or \"}\", but found \"%s\".",
-								   token),
+					  errdetail("Expected \",\" or \"}\", but found \"%s\".",
+								token),
 						 report_json_context(lex)));
 				break;
 			case JSON_PARSE_OBJECT_COMMA:

src/backend/utils/adt/jsonfuncs.c

 	Oid			typiofunc;
 	Oid			typioparam;
 	FmgrInfo	proc;
-}	ColumnIOData;
+} ColumnIOData;
 
 typedef struct RecordIOData
 {
 	int32		record_typmod;
 	int			ncolumns;
 	ColumnIOData columns[1];	/* VARIABLE LENGTH ARRAY */
-}	RecordIOData;
+} RecordIOData;
 
 /* state for populate_recordset */
 typedef struct populateRecordsetState
 {
 	JsonLexContext *lex;
-	HTAB *json_hash;
-	char * saved_scalar;
+	HTAB	   *json_hash;
+	char	   *saved_scalar;
 	Tuplestorestate *tuple_store;
-	TupleDesc ret_tdesc;
+	TupleDesc	ret_tdesc;
 	HeapTupleHeader rec;
 	RecordIOData *my_extra;
-	MemoryContext fn_mcxt; /* used to stash IO funcs */
-} populateRecordsetState, *PopulateRecordsetState;
+	MemoryContext fn_mcxt;		/* used to stash IO funcs */
+}	populateRecordsetState, *PopulateRecordsetState;
 
 /*
  * SQL function json_object-keys
 	{
 		text	   *json = PG_GETARG_TEXT_P(0);
 		char	   *jsonstr = text_to_cstring(json);
-		JsonLexContext *lex = makeJsonLexContext(jsonstr,true);
+		JsonLexContext *lex = makeJsonLexContext(jsonstr, true);
 		JsonSemAction sem;
 
 		MemoryContext oldcontext;
 {
 	GetState	_state = (GetState) state;
 	bool		get_next = false;
-	int lex_level = _state->lex->lex_level;
+	int			lex_level = _state->lex->lex_level;
 
-	if ( lex_level == 1 && _state->search_type == JSON_SEARCH_OBJECT &&
+	if (lex_level == 1 && _state->search_type == JSON_SEARCH_OBJECT &&
 		strcmp(fname, _state->search_term) == 0)
 	{
 		get_next = true;
 {
 	GetState	_state = (GetState) state;
 	bool		get_last = false;
-	int         lex_level = _state->lex->lex_level;
+	int			lex_level = _state->lex->lex_level;
 
 	if (lex_level == 1 && _state->search_type == JSON_SEARCH_OBJECT &&
 		strcmp(fname, _state->search_term) == 0)
 get_array_start(void *state)
 {
 	GetState	_state = (GetState) state;
-	int lex_level = _state->lex->lex_level;
+	int			lex_level = _state->lex->lex_level;
 
 	if (lex_level == 0 && _state->search_type == JSON_SEARCH_OBJECT)
 		ereport(ERROR,
 				(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
 				 errmsg("cannot call json_get(fieldname) on a non-object")));
-	else if (_state->search_type == JSON_SEARCH_PATH && 
+	else if (_state->search_type == JSON_SEARCH_PATH &&
 			 lex_level <= _state->npath)
 		_state->array_level_index[lex_level] = -1;
 }
 {
 	GetState	_state = (GetState) state;
 	bool		get_next = false;
-	int         lex_level = _state->lex->lex_level;
+	int			lex_level = _state->lex->lex_level;
 
 	if (lex_level == 1 && _state->search_type == JSON_SEARCH_ARRAY)
 	{
 {
 	GetState	_state = (GetState) state;
 	bool		get_last = false;
-	int         lex_level = _state->lex->lex_level;
+	int			lex_level = _state->lex->lex_level;
 
 	if (lex_level == 1 && _state->search_type == JSON_SEARCH_ARRAY &&
 		_state->array_index == _state->search_index)
 	char	   *jsonstr = text_to_cstring(json);
 
 	AlenState	state;
-	JsonLexContext *lex = makeJsonLexContext(jsonstr,false);
+	JsonLexContext *lex = makeJsonLexContext(jsonstr, false);
 	JsonSemAction sem;
 
 	state = palloc0(sizeof(alenState));
 	json_hash = get_json_object_as_hash(jsonstr, "json_populate_record");
 
 	/*
-	 * if the input json is empty, we can only skip the rest if we were
-	 * passed in a non-null record, since otherwise there may be issues with
-	 * domain nulls.
+	 * if the input json is empty, we can only skip the rest if we were passed
+	 * in a non-null record, since otherwise there may be issues with domain
+	 * nulls.
 	 */
 	if (hash_get_num_entries(json_hash) == 0 && rec)
 		PG_RETURN_POINTER(rec);
 
 	rsi->returnMode = SFRM_Materialize;
 
-	/* 
+	/*
 	 * get the tupdesc from the result set info - it must be a record type
 	 * because we already checked that arg1 is a record type.
 	 */
 
 }
 
-static void populate_recordset_object_start(void *state)
+static void
+populate_recordset_object_start(void *state)
 {
 	PopulateRecordsetState _state = (PopulateRecordsetState) state;
-	int lex_level = _state->lex->lex_level;
+	int			lex_level = _state->lex->lex_level;
 	HASHCTL		ctl;
 
 	if (lex_level == 0)
 		ereport(ERROR,
 				(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
-			   errmsg("cannot call populate_recordset on an object")));
+				 errmsg("cannot call populate_recordset on an object")));
 	else if (lex_level > 1)
 		ereport(ERROR,
 				(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
-			   errmsg("cannot call populate_recordset with nested objects")));
+			  errmsg("cannot call populate_recordset with nested objects")));
 
 	/* set up a new hash for this entry */
 	memset(&ctl, 0, sizeof(ctl));
 									HASH_ELEM | HASH_CONTEXT);
 }
 
-static void populate_recordset_object_end(void *state)
+static void
+populate_recordset_object_end(void *state)
 {
 	PopulateRecordsetState _state = (PopulateRecordsetState) state;
-	HTAB *json_hash = _state->json_hash;
-	Datum *values;
-	bool *nulls;
-	char fname[NAMEDATALEN];
-	int i;
+	HTAB	   *json_hash = _state->json_hash;
+	Datum	   *values;
+	bool	   *nulls;
+	char		fname[NAMEDATALEN];
+	int			i;
 	RecordIOData *my_extra = _state->my_extra;
-	int ncolumns = my_extra->ncolumns;
-	TupleDesc tupdesc = _state->ret_tdesc;
+	int			ncolumns = my_extra->ncolumns;
+	TupleDesc	tupdesc = _state->ret_tdesc;
 	JsonHashEntry hashentry;
 	HeapTupleHeader rec = _state->rec;
 	HeapTuple	rettuple;
 	}
 
 	rettuple = heap_form_tuple(tupdesc, values, nulls);
-	
+
 	tuplestore_puttuple(_state->tuple_store, rettuple);
 
 	hash_destroy(json_hash);
 }
 
-static void populate_recordset_array_element_start(void *state, bool isnull)
+static void
+populate_recordset_array_element_start(void *state, bool isnull)
 {
 	PopulateRecordsetState _state = (PopulateRecordsetState) state;
-	if (_state->lex->lex_level == 1 && 
+
+	if (_state->lex->lex_level == 1 &&
 		_state->lex->token_type != JSON_TOKEN_OBJECT_START)
 		ereport(ERROR,
 				(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
-			   errmsg("must call populate_recordset on an array of objects")));
+			 errmsg("must call populate_recordset on an array of objects")));
 }
 
-static void populate_recordset_array_start(void *state)
+static void
+populate_recordset_array_start(void *state)
 {
 	PopulateRecordsetState _state = (PopulateRecordsetState) state;
+
 	if (_state->lex->lex_level != 0)
 		ereport(ERROR,
 				(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
 			   errmsg("cannot call populate_recordset with nested arrays")));
 }
 
-static void populate_recordset_scalar(void *state, char *token, JsonTokenType tokentype)
+static void
+populate_recordset_scalar(void *state, char *token, JsonTokenType tokentype)
 {
 	PopulateRecordsetState _state = (PopulateRecordsetState) state;
 
 	if (_state->lex->lex_level == 0)
 		ereport(ERROR,
 				(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
-			   errmsg("cannot call populate_recordset on a scalar")));
+				 errmsg("cannot call populate_recordset on a scalar")));
 
 	_state->saved_scalar = token;
 }
 	{
 		ereport(ERROR,
 				(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
-		errmsg("cannot call populate_recordset on a nested object")));
+			   errmsg("cannot call populate_recordset on a nested object")));
 	}
 }
 
 	hashentry->isnull = isnull;
 	hashentry->val = _state->saved_scalar;
 }
-

src/include/utils/jsonapi.h

 	char	   *token_terminator;
 	char	   *prev_token_terminator;
 	JsonTokenType token_type;
-	int         lex_level;
+	int			lex_level;
 	int			line_number;
 	char	   *line_start;
 	StringInfo	strval;
-}	JsonLexContext;
+} JsonLexContext;
 
 typedef void (*json_struct_action) (void *state);
 typedef void (*json_ofield_action) (void *state, char *fname, bool isnull);
  * points to. If the action pointers are NULL the parser
  * does nothing and just continues.
  */
-extern void pg_parse_json(JsonLexContext * lex, JsonSemAction sem);
+extern void pg_parse_json(JsonLexContext *lex, JsonSemAction sem);
 
 /* constructor for JsonLexContext, with or without strval element */
-extern JsonLexContext *makeJsonLexContext(char * json, bool need_escapes);
+extern JsonLexContext *makeJsonLexContext(char *json, bool need_escapes);
 
 #endif   /* JSONAPI_H */
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.