Revision 9c5a8433cee3f03ecd67de0046c41ed9b01d1993 authored by Eric Blake on 11 June 2018, 21:39:26 UTC, committed by Michael Roth on 21 June 2018, 01:45:07 UTC
Commit a290f085 exposed a latent bug in qemu-img map introduced
during the conversion of block status to be byte-based.  Earlier in
commit 5e344dd8, the internal interface get_block_status() switched
to take byte-based parameters, but still called a sector-based
block layer function; as such, rounding was added in the lone
caller to obey the contract.  However, commit 237d78f8 changed
get_block_status() to truly be byte-based, at which point rounding
to sector boundaries can result in calling bdrv_block_status() with
'bytes == 0' (a coding error) when the boundary between data and a
hole falls mid-sector (true for the past-EOF implicit hole present
in POSIX files).  Fix things by removing the rounding that is now
no longer necessary.

See also https://bugzilla.redhat.com/1589738

Fixes: 237d78f8
Reported-by: Dan Kenigsberg <danken@redhat.com>
Reported-by: Nir Soffer <nsoffer@redhat.com>
Reported-by: Maor Lipchuk <mlipchuk@redhat.com>
CC: qemu-stable@nongnu.org
Signed-off-by: Eric Blake <eblake@redhat.com>
Signed-off-by: Kevin Wolf <kwolf@redhat.com>
(cherry picked from commit e0b371ed5e2db079051139136fd0478728b6a58f)
Signed-off-by: Michael Roth <mdroth@linux.vnet.ibm.com>
1 parent d8a919f
Raw File
json-streamer.c
/*
 * JSON streaming support
 *
 * Copyright IBM, Corp. 2009
 *
 * Authors:
 *  Anthony Liguori   <aliguori@us.ibm.com>
 *
 * This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
 * See the COPYING.LIB file in the top-level directory.
 *
 */

#include "qemu/osdep.h"
#include "qemu-common.h"
#include "qapi/qmp/json-lexer.h"
#include "qapi/qmp/json-streamer.h"

#define MAX_TOKEN_SIZE (64ULL << 20)
#define MAX_TOKEN_COUNT (2ULL << 20)
#define MAX_NESTING (1ULL << 10)

static void json_message_free_token(void *token, void *opaque)
{
    g_free(token);
}

static void json_message_free_tokens(JSONMessageParser *parser)
{
    if (parser->tokens) {
        g_queue_foreach(parser->tokens, json_message_free_token, NULL);
        g_queue_free(parser->tokens);
        parser->tokens = NULL;
    }
}

static void json_message_process_token(JSONLexer *lexer, GString *input,
                                       JSONTokenType type, int x, int y)
{
    JSONMessageParser *parser = container_of(lexer, JSONMessageParser, lexer);
    JSONToken *token;
    GQueue *tokens;

    switch (type) {
    case JSON_LCURLY:
        parser->brace_count++;
        break;
    case JSON_RCURLY:
        parser->brace_count--;
        break;
    case JSON_LSQUARE:
        parser->bracket_count++;
        break;
    case JSON_RSQUARE:
        parser->bracket_count--;
        break;
    default:
        break;
    }

    token = g_malloc(sizeof(JSONToken) + input->len + 1);
    token->type = type;
    memcpy(token->str, input->str, input->len);
    token->str[input->len] = 0;
    token->x = x;
    token->y = y;

    parser->token_size += input->len;

    g_queue_push_tail(parser->tokens, token);

    if (type == JSON_ERROR) {
        goto out_emit_bad;
    } else if (parser->brace_count < 0 ||
        parser->bracket_count < 0 ||
        (parser->brace_count == 0 &&
         parser->bracket_count == 0)) {
        goto out_emit;
    } else if (parser->token_size > MAX_TOKEN_SIZE ||
               g_queue_get_length(parser->tokens) > MAX_TOKEN_COUNT ||
               parser->bracket_count + parser->brace_count > MAX_NESTING) {
        /* Security consideration, we limit total memory allocated per object
         * and the maximum recursion depth that a message can force.
         */
        goto out_emit_bad;
    }

    return;

out_emit_bad:
    /*
     * Clear out token list and tell the parser to emit an error
     * indication by passing it a NULL list
     */
    json_message_free_tokens(parser);
out_emit:
    /* send current list of tokens to parser and reset tokenizer */
    parser->brace_count = 0;
    parser->bracket_count = 0;
    /* parser->emit takes ownership of parser->tokens.  Remove our own
     * reference to parser->tokens before handing it out to parser->emit.
     */
    tokens = parser->tokens;
    parser->tokens = g_queue_new();
    parser->emit(parser, tokens);
    parser->token_size = 0;
}

void json_message_parser_init(JSONMessageParser *parser,
                              void (*func)(JSONMessageParser *, GQueue *))
{
    parser->emit = func;
    parser->brace_count = 0;
    parser->bracket_count = 0;
    parser->tokens = g_queue_new();
    parser->token_size = 0;

    json_lexer_init(&parser->lexer, json_message_process_token);
}

int json_message_parser_feed(JSONMessageParser *parser,
                             const char *buffer, size_t size)
{
    return json_lexer_feed(&parser->lexer, buffer, size);
}

int json_message_parser_flush(JSONMessageParser *parser)
{
    return json_lexer_flush(&parser->lexer);
}

void json_message_parser_destroy(JSONMessageParser *parser)
{
    json_lexer_destroy(&parser->lexer);
    json_message_free_tokens(parser);
}
back to top