Sun, 22 Dec 2024 11:10:11 +0100
fix missing errno.h include
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright 2024 Mike Becker, Olaf Wintermann All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ #include "cx/json.h" #include <string.h> #include <ctype.h> #include <assert.h> #include <stdio.h> #include <errno.h> /* * RFC 8259 * https://tools.ietf.org/html/rfc8259 */ static CxJsonValue cx_json_value_nothing = {.type = CX_JSON_NOTHING}; static void token_destroy(CxJsonToken *token) { if (token->allocated) { cx_strfree(&token->content); } } static int num_isexp(const char *content, size_t length, size_t pos) { if (pos >= length) { return 0; } int ok = 0; for (size_t i = pos; i < length; i++) { char c = content[i]; if (isdigit(c)) { ok = 1; } else if (i == pos) { if (!(c == '+' || c == '-')) { return 0; } } else { return 0; } } return ok; } static CxJsonTokenType token_numbertype(const char *content, size_t length) { if (length == 0) return CX_JSON_TOKEN_ERROR; if (content[0] != '-' && !isdigit(content[0])) { return CX_JSON_TOKEN_ERROR; } CxJsonTokenType type = CX_JSON_TOKEN_INTEGER; for (size_t i = 1; i < length; i++) { if (content[i] == '.') { if (type == CX_JSON_TOKEN_NUMBER) { return CX_JSON_TOKEN_ERROR; // more than one decimal separator } type = CX_JSON_TOKEN_NUMBER; } else if (content[i] == 'e' || content[i] == 'E') { return num_isexp(content, length, i + 1) ? CX_JSON_TOKEN_NUMBER : CX_JSON_TOKEN_ERROR; } else if (!isdigit(content[i])) { return CX_JSON_TOKEN_ERROR; // char is not a digit, decimal separator or exponent sep } } return type; } static CxJsonToken token_create(CxJson *json, bool isstring, size_t start, size_t end) { cxmutstr str = cx_mutstrn(json->buffer.space + start, end - start); bool allocated = false; if (json->uncompleted.tokentype != CX_JSON_NO_TOKEN) { allocated = true; str = cx_strcat_m(json->uncompleted.content, 1, str); if (str.ptr == NULL) { return (CxJsonToken){CX_JSON_NO_TOKEN, false, {NULL, 0}}; } } json->uncompleted = (CxJsonToken){0}; CxJsonTokenType ttype; if (isstring) { ttype = CX_JSON_TOKEN_STRING; } else { cxstring s = cx_strcast(str); if (!cx_strcmp(s, CX_STR("true")) || !cx_strcmp(s, CX_STR("false")) || !cx_strcmp(s, CX_STR("null"))) { ttype = CX_JSON_TOKEN_LITERAL; } else { ttype = token_numbertype(str.ptr, str.length); } } if (ttype == CX_JSON_TOKEN_ERROR) { if (allocated) { cx_strfree(&str); } return (CxJsonToken){CX_JSON_TOKEN_ERROR, false, {NULL, 0}}; } return (CxJsonToken){ttype, allocated, str}; } static CxJsonTokenType char2ttype(char c) { switch (c) { case '[': { return CX_JSON_TOKEN_BEGIN_ARRAY; } case '{': { return CX_JSON_TOKEN_BEGIN_OBJECT; } case ']': { return CX_JSON_TOKEN_END_ARRAY; } case '}': { return CX_JSON_TOKEN_END_OBJECT; } case ':': { return CX_JSON_TOKEN_NAME_SEPARATOR; } case ',': { return CX_JSON_TOKEN_VALUE_SEPARATOR; } case '"': { return CX_JSON_TOKEN_STRING; } default: { if (isspace(c)) { return CX_JSON_TOKEN_SPACE; } } } return CX_JSON_NO_TOKEN; } static enum cx_json_status token_parse_next(CxJson *json, CxJsonToken *result) { // check if there is data in the buffer if (cxBufferEof(&json->buffer)) { return json->uncompleted.tokentype == CX_JSON_NO_TOKEN ? CX_JSON_NO_DATA : CX_JSON_INCOMPLETE_DATA; } // current token type and start index CxJsonTokenType ttype = json->uncompleted.tokentype; size_t token_start = json->buffer.pos; for (size_t i = json->buffer.pos; i < json->buffer.size; i++) { char c = json->buffer.space[i]; if (ttype != CX_JSON_TOKEN_STRING) { // currently non-string token CxJsonTokenType ctype = char2ttype(c); // start of new token? if (ttype == CX_JSON_NO_TOKEN) { if (ctype == CX_JSON_TOKEN_SPACE) { json->buffer.pos++; continue; } else if (ctype == CX_JSON_TOKEN_STRING) { // begin string ttype = CX_JSON_TOKEN_STRING; token_start = i; } else if (ctype != CX_JSON_NO_TOKEN) { // single-char token json->buffer.pos = i + 1; *result = (CxJsonToken){ctype, false, {NULL, 0}}; return CX_JSON_NO_ERROR; } else { ttype = CX_JSON_TOKEN_LITERAL; // number or literal token_start = i; } } else { // finish token if (ctype != CX_JSON_NO_TOKEN) { *result = token_create(json, false, token_start, i); if (result->tokentype == CX_JSON_NO_TOKEN) { return CX_JSON_BUFFER_ALLOC_FAILED; } if (result->tokentype == CX_JSON_TOKEN_ERROR) { return CX_JSON_FORMAT_ERROR_NUMBER; } json->buffer.pos = i; return CX_JSON_NO_ERROR; } } } else { // currently inside a string if (json->tokenizer_escape) { json->tokenizer_escape = false; } else { if (c == '"') { *result = token_create(json, true, token_start, i + 1); if (result->tokentype == CX_JSON_NO_TOKEN) { return CX_JSON_BUFFER_ALLOC_FAILED; } json->buffer.pos = i + 1; return CX_JSON_NO_ERROR; } else if (c == '\\') { json->tokenizer_escape = true; } } } } if (ttype != CX_JSON_NO_TOKEN) { // uncompleted token size_t uncompleted_len = json->buffer.size - token_start; if (json->uncompleted.tokentype == CX_JSON_NO_TOKEN) { // current token is uncompleted // save current token content CxJsonToken uncompleted = { ttype, true, cx_strdup(cx_strn(json->buffer.space + token_start, uncompleted_len)) }; if (uncompleted.content.ptr == NULL) { return CX_JSON_BUFFER_ALLOC_FAILED; } json->uncompleted = uncompleted; } else { // previously we also had an uncompleted token // combine the uncompleted token with the current token assert(json->uncompleted.allocated); cxmutstr str = cx_strcat_m(json->uncompleted.content, 1, cx_strn(json->buffer.space + token_start, uncompleted_len)); if (str.ptr == NULL) { return CX_JSON_BUFFER_ALLOC_FAILED; } json->uncompleted.content = str; } // advance the buffer position - we saved the stuff in the uncompleted token json->buffer.pos += uncompleted_len; } return CX_JSON_INCOMPLETE_DATA; } static cxmutstr unescape_string(const CxAllocator *a, cxmutstr str) { // TODO: support more escape sequences // we know that the unescaped string will be shorter by at least 2 chars cxmutstr result; result.length = 0; result.ptr = cxMalloc(a, str.length - 1); if (result.ptr == NULL) { return result; } bool u = false; for (size_t i = 1; i < str.length - 1; i++) { char c = str.ptr[i]; if (u) { u = false; if (c == 'n') { c = '\n'; } else if (c == 't') { c = '\t'; } result.ptr[result.length++] = c; } else { if (c == '\\') { u = true; } else { result.ptr[result.length++] = c; } } } result.ptr[result.length] = 0; return result; } static int parse_number(cxmutstr str, void *value, bool asint) { char *endptr = NULL; if (str.length > 30) { return 1; } // the buffer guarantees that we are working on a copied string char c = str.ptr[str.length]; str.ptr[str.length] = 0; if (asint) { errno = 0; long long v = strtoll(str.ptr, &endptr, 10); if (errno == ERANGE) { return 1; } *((int64_t*)value) = (int64_t) v; } else { // TODO: proper JSON spec number parser // TODO: also return an error when loss of precision is high double v = strtod(str.ptr, &endptr); *((double*)value) = v; } // recover from the hack str.ptr[str.length] = c; return endptr != &str.ptr[str.length]; } static CxJsonValue* create_json_value(CxJson *json, CxJsonValueType type) { CxJsonValue *v = cxMalloc(json->allocator, sizeof(CxJsonValue)); if (v == NULL) { return NULL; } // initialize the value if (type == CX_JSON_ARRAY) { cx_array_initialize_a(json->allocator, v->value.array.array, 16); if (v->value.array.array == NULL) { cxFree(json->allocator, v); return NULL; } } else if (type == CX_JSON_OBJECT) { cx_array_initialize_a(json->allocator, v->value.object.values, 16); if (v->value.object.values == NULL) { cxFree(json->allocator, v); return NULL; } } else { memset(v, 0, sizeof(CxJsonValue)); } v->type = type; v->allocator = json->allocator; // add the new value to a possible parent CxArrayReallocator value_realloc = cx_array_reallocator(json->allocator, NULL); if (json->vbuf_size > 0) { CxJsonValue *parent = json->vbuf[json->vbuf_size - 1]; if (parent->type == CX_JSON_ARRAY) { cx_array_simple_add_a(&value_realloc, parent->value.array.array, v); } else if (parent->type == CX_JSON_OBJECT) { assert(parent->value.object.values_size > 0); assert(parent->value.object.values[parent->value.object.values_size - 1].value == NULL); parent->value.object.values[parent->value.object.values_size - 1].value = v; } else { assert(false); } } // add the new value to the stack, if it is an array or object if (type == CX_JSON_ARRAY || type == CX_JSON_OBJECT) { CxArrayReallocator vbuf_realloc = cx_array_reallocator(NULL, json->vbuf_internal); if (cx_array_simple_add_a(&vbuf_realloc, json->vbuf, v)) { cxFree(json->allocator, v); return NULL; } } // if currently no value is parsed, this is now the value of interest if (json->parsed == NULL) { json->parsed = v; } return v; } #define JP_STATE_VALUE_BEGIN 0 #define JP_STATE_VALUE_END 10 #define JP_STATE_VALUE_BEGIN_OBJ 1 #define JP_STATE_OBJ_SEP_OR_CLOSE 11 #define JP_STATE_VALUE_BEGIN_AR 2 #define JP_STATE_ARRAY_SEP_OR_CLOSE 12 #define JP_STATE_OBJ_NAME_OR_CLOSE 5 #define JP_STATE_OBJ_NAME 6 #define JP_STATE_OBJ_COLON 7 void cxJsonInit(CxJson *json, const CxAllocator *allocator) { if (allocator == NULL) { allocator = cxDefaultAllocator; } memset(json, 0, sizeof(CxJson)); json->allocator = allocator; json->states = json->states_internal; json->states_capacity = cx_nmemb(json->states_internal); json->states[0] = JP_STATE_VALUE_BEGIN; json->states_size = 1; json->vbuf = json->vbuf_internal; json->vbuf_capacity = cx_nmemb(json->vbuf_internal); cxBufferInit(&json->buffer, NULL, 256, NULL, CX_BUFFER_AUTO_EXTEND); } void cxJsonDestroy(CxJson *json) { cxBufferDestroy(&json->buffer); if (json->states != json->states_internal) { free(json->states); } if (json->vbuf != json->vbuf_internal) { free(json->vbuf); } cxJsonValueFree(json->parsed); json->parsed = NULL; } int cxJsonFilln(CxJson *json, const char *buf, size_t size) { // we use the UCX buffer to write the data // but reset the position immediately to enable parsing size_t old_pos = json->buffer.pos; cxBufferSeek(&json->buffer, 0, SEEK_END); size_t written = cxBufferWrite(buf, 1, size, &json->buffer); if (0 == cxBufferTerminate(&json->buffer)) { written++; } json->buffer.pos = old_pos; return written != size + 1; } static void json_add_state(CxJson *json, int state) { // we have guaranteed the necessary space with cx_array_simple_reserve() // therefore, we can safely add the state in the simplest way possible json->states[json->states_size++] = state; } #define return_rec(code) \ token_destroy(&token); \ return code static enum cx_json_status json_parse(CxJson *json) { // Reserve a pointer for a possibly read value CxJsonValue *vbuf = NULL; // grab the next token CxJsonToken token; { enum cx_json_status ret = token_parse_next(json, &token); if (ret != CX_JSON_NO_ERROR) { return ret; } } // pop the current state assert(json->states_size > 0); int state = json->states[--json->states_size]; // guarantee that at least two more states fit on the stack CxArrayReallocator state_realloc = cx_array_reallocator(NULL, json->states_internal); if (cx_array_simple_reserve_a(&state_realloc, json->states, 2)) { return CX_JSON_BUFFER_ALLOC_FAILED; } // 0 JP_STATE_VALUE_BEGIN value begin // 10 JP_STATE_VALUE_END expect value end // 1 JP_STATE_VALUE_BEGIN_OBJ value begin (inside object) // 11 JP_STATE_OBJ_SEP_OR_CLOSE object, expect separator, objclose // 2 JP_STATE_VALUE_BEGIN_AR value begin (inside array) // 12 JP_STATE_ARRAY_SEP_OR_CLOSE array, expect separator or arrayclose // 5 JP_STATE_OBJ_NAME_OR_CLOSE object, expect name or objclose // 6 JP_STATE_OBJ_NAME object, expect name // 7 JP_STATE_OBJ_COLON object, expect ':' if (state < 3) { // push expected end state to the stack json_add_state(json, 10 + state); switch (token.tokentype) { case CX_JSON_TOKEN_BEGIN_ARRAY: { if (create_json_value(json, CX_JSON_ARRAY) == NULL) { return_rec(CX_JSON_VALUE_ALLOC_FAILED); } json_add_state(json, JP_STATE_VALUE_BEGIN_AR); return_rec(CX_JSON_NO_ERROR); } case CX_JSON_TOKEN_BEGIN_OBJECT: { if (create_json_value(json, CX_JSON_OBJECT) == NULL) { return_rec(CX_JSON_VALUE_ALLOC_FAILED); } json_add_state(json, JP_STATE_OBJ_NAME_OR_CLOSE); return_rec(CX_JSON_NO_ERROR); } case CX_JSON_TOKEN_STRING: { if ((vbuf = create_json_value(json, CX_JSON_STRING)) == NULL) { return_rec(CX_JSON_VALUE_ALLOC_FAILED); } cxmutstr str = unescape_string(json->allocator, token.content); if (str.ptr == NULL) { return_rec(CX_JSON_VALUE_ALLOC_FAILED); } vbuf->value.string = str; return_rec(CX_JSON_NO_ERROR); } case CX_JSON_TOKEN_INTEGER: case CX_JSON_TOKEN_NUMBER: { int type = token.tokentype == CX_JSON_TOKEN_INTEGER ? CX_JSON_INTEGER : CX_JSON_NUMBER; if (NULL == (vbuf = create_json_value(json, type))) { return_rec(CX_JSON_VALUE_ALLOC_FAILED); } if (parse_number(token.content, &vbuf->value,type == CX_JSON_INTEGER)) { return_rec(CX_JSON_FORMAT_ERROR_NUMBER); } return_rec(CX_JSON_NO_ERROR); } case CX_JSON_TOKEN_LITERAL: { if ((vbuf = create_json_value(json, CX_JSON_LITERAL)) == NULL) { return_rec(CX_JSON_VALUE_ALLOC_FAILED); } if (0 == cx_strcmp(cx_strcast(token.content), cx_str("true"))) { vbuf->value.literal = CX_JSON_TRUE; } else if (0 == cx_strcmp(cx_strcast(token.content), cx_str("false"))) { vbuf->value.literal = CX_JSON_FALSE; } else { vbuf->value.literal = CX_JSON_NULL; } return_rec(CX_JSON_NO_ERROR); } default: { return_rec(CX_JSON_FORMAT_ERROR_UNEXPECTED_TOKEN); } } } else if (state == JP_STATE_ARRAY_SEP_OR_CLOSE) { // expect ',' or ']' if (token.tokentype == CX_JSON_TOKEN_VALUE_SEPARATOR) { json_add_state(json, JP_STATE_VALUE_BEGIN_AR); return_rec(CX_JSON_NO_ERROR); } else if (token.tokentype == CX_JSON_TOKEN_END_ARRAY) { // discard the array from the value buffer json->vbuf_size--; return_rec(CX_JSON_NO_ERROR); } else { return_rec(CX_JSON_FORMAT_ERROR_UNEXPECTED_TOKEN); } } else if (state == JP_STATE_OBJ_NAME_OR_CLOSE || state == JP_STATE_OBJ_NAME) { if (state == JP_STATE_OBJ_NAME_OR_CLOSE && token.tokentype == CX_JSON_TOKEN_END_OBJECT) { // discard the obj from the value buffer json->vbuf_size--; return_rec(CX_JSON_NO_ERROR); } else { // expect string if (token.tokentype != CX_JSON_TOKEN_STRING) { return_rec(CX_JSON_FORMAT_ERROR_UNEXPECTED_TOKEN); } // add new entry cxmutstr name = unescape_string(json->allocator, token.content); if (name.ptr == NULL) { return_rec(CX_JSON_VALUE_ALLOC_FAILED); } CxJsonObjValue kv = {name, NULL}; assert(json->vbuf_size > 0); CxJsonValue *parent = json->vbuf[json->vbuf_size - 1]; assert(parent != NULL); assert(parent->type == CX_JSON_OBJECT); CxArrayReallocator value_realloc = cx_array_reallocator(json->allocator, NULL); if (cx_array_simple_add_a(&value_realloc, parent->value.object.values, kv)) { return_rec(CX_JSON_VALUE_ALLOC_FAILED); } // next state json_add_state(json, JP_STATE_OBJ_COLON); return_rec(CX_JSON_NO_ERROR); } } else if (state == JP_STATE_OBJ_COLON) { // expect ':' if (token.tokentype != CX_JSON_TOKEN_NAME_SEPARATOR) { return_rec(CX_JSON_FORMAT_ERROR_UNEXPECTED_TOKEN); } // next state json_add_state(json, JP_STATE_VALUE_BEGIN_OBJ); return_rec(CX_JSON_NO_ERROR); } else if (state == JP_STATE_OBJ_SEP_OR_CLOSE) { // expect ',' or '}' if (token.tokentype == CX_JSON_TOKEN_VALUE_SEPARATOR) { json_add_state(json, JP_STATE_OBJ_NAME); return_rec(CX_JSON_NO_ERROR); } else if (token.tokentype == CX_JSON_TOKEN_END_OBJECT) { // discard the obj from the value buffer json->vbuf_size--; return_rec(CX_JSON_NO_ERROR); } else { return_rec(CX_JSON_FORMAT_ERROR_UNEXPECTED_TOKEN); } } else { // should be unreachable assert(false); return_rec(-1); } } CxJsonStatus cxJsonNext(CxJson *json, CxJsonValue **value) { // initialize output value *value = &cx_json_value_nothing; // parse data CxJsonStatus result; do { result = json_parse(json); cxBufferShiftLeft(&json->buffer, json->buffer.pos); if (result == CX_JSON_NO_ERROR && json->states_size == 1) { // final state reached assert(json->states[0] == JP_STATE_VALUE_END); assert(json->vbuf_size == 0); // write output value *value = json->parsed; json->parsed = NULL; // re-initialize state machine json->states[0] = JP_STATE_VALUE_BEGIN; return CX_JSON_NO_ERROR; } } while (result == CX_JSON_NO_ERROR); // the parser might think there is no data // but when we did not reach the final state, // we know that there must be more to come if (result == CX_JSON_NO_DATA && json->states_size > 1) { return CX_JSON_INCOMPLETE_DATA; } return result; } void cxJsonValueFree(CxJsonValue *value) { if (value == NULL || value->type == CX_JSON_NOTHING) return; switch (value->type) { case CX_JSON_OBJECT: { CxJsonObject obj = value->value.object; for (size_t i = 0; i < obj.values_size; i++) { cxJsonValueFree(obj.values[i].value); cx_strfree_a(value->allocator, &obj.values[i].name); } cxFree(value->allocator, obj.values); break; } case CX_JSON_ARRAY: { CxJsonArray array = value->value.array; for (size_t i = 0; i < array.array_size; i++) { cxJsonValueFree(array.array[i]); } cxFree(value->allocator, array.array); break; } case CX_JSON_STRING: { cxFree(value->allocator, value->value.string.ptr); break; } default: { break; } } cxFree(value->allocator, value); } CxJsonValue *cxJsonArrGet(const CxJsonValue *value, size_t index) { if (index >= value->value.array.array_size) { return &cx_json_value_nothing; } return value->value.array.array[index]; } static void *cx_json_iter_current(const void *it) { const CxIterator *iter = it; return *(CxJsonValue**)iter->elem_handle; } static bool cx_json_iter_valid(const void *it) { const CxIterator *iter = it; return iter->index < iter->elem_count; } static void cx_json_iter_next(void *it) { CxIterator *iter = it; iter->index++; iter->elem_handle = (char *) iter->elem_handle + sizeof(void *); } CxIterator cxJsonArrIter(const CxJsonValue *value) { CxIterator iter; iter.index = 0; iter.elem_count = value->value.array.array_size; iter.src_handle.m = value->value.array.array; iter.elem_handle = iter.src_handle.m; iter.elem_size = sizeof(CxJsonValue*); iter.base.valid = cx_json_iter_valid; iter.base.current = cx_json_iter_current; iter.base.next = cx_json_iter_next; iter.base.remove = false; iter.base.mutating = false; return iter; } CxJsonValue *cx_json_obj_get_cxstr(const CxJsonValue *value, cxstring name) { const CxJsonObject *obj = &(value->value.object); // TODO: think about sorting the object so that we can use binary search here for (size_t i = 0; i < obj->values_size; i++) { if (0 == cx_strcmp(name, cx_strcast(obj->values[i].name))) { return obj->values[i].value; } } return &cx_json_value_nothing; }