Browse Source

tokenizer.c: Support question mark tokens.

master
Reece H. Dunn 8 years ago
parent
commit
128ceaff6a
3 changed files with 21 additions and 2 deletions
  1. 8
    1
      src/libespeak-ng/tokenizer.c
  2. 1
    0
      src/libespeak-ng/tokenizer.h
  3. 12
    1
      tests/tokenizer.c

+ 8
- 1
src/libespeak-ng/tokenizer.c View File

ESPEAKNG_CTYPE_LOWERCASE, ESPEAKNG_CTYPE_LOWERCASE,
ESPEAKNG_CTYPE_UPPERCASE, ESPEAKNG_CTYPE_UPPERCASE,
ESPEAKNG_CTYPE_FULL_STOP, ESPEAKNG_CTYPE_FULL_STOP,
ESPEAKNG_CTYPE_QUESTION_MARK,
} espeakng_CTYPE; } espeakng_CTYPE;


#define ESPEAKNG_CTYPE_PROPERTY_MASK 0x800000000000C001ull
#define ESPEAKNG_CTYPE_PROPERTY_MASK 0xC00000000000C001ull


// Reference: http://www.unicode.org/reports/tr14/tr14-32.html -- Unicode Line Breaking Algorithm // Reference: http://www.unicode.org/reports/tr14/tr14-32.html -- Unicode Line Breaking Algorithm
static espeakng_CTYPE codepoint_type(uint32_t c) static espeakng_CTYPE codepoint_type(uint32_t c)
return ESPEAKNG_CTYPE_UPPERCASE; return ESPEAKNG_CTYPE_UPPERCASE;
case ESPEAKNG_PROPERTY_FULL_STOP: case ESPEAKNG_PROPERTY_FULL_STOP:
return ESPEAKNG_CTYPE_FULL_STOP; return ESPEAKNG_CTYPE_FULL_STOP;
case ESPEAKNG_PROPERTY_QUESTION_MARK:
return ESPEAKNG_CTYPE_QUESTION_MARK;
} }


// 4. Classify the remaining codepoints. // 4. Classify the remaining codepoints.
current += utf8_out(c, current); current += utf8_out(c, current);
*current = '\0'; *current = '\0';
return ESPEAKNG_TOKEN_FULL_STOP; return ESPEAKNG_TOKEN_FULL_STOP;
case ESPEAKNG_CTYPE_QUESTION_MARK:
current += utf8_out(c, current);
*current = '\0';
return ESPEAKNG_TOKEN_QUESTION_MARK;
default: default:
current += utf8_out(c, current); current += utf8_out(c, current);
*current = '\0'; *current = '\0';

+ 1
- 0
src/libespeak-ng/tokenizer.h View File

ESPEAKNG_TOKEN_WORD_MIXEDCASE, ESPEAKNG_TOKEN_WORD_MIXEDCASE,
ESPEAKNG_TOKEN_WORD_CAPITALIZED, ESPEAKNG_TOKEN_WORD_CAPITALIZED,
ESPEAKNG_TOKEN_FULL_STOP, ESPEAKNG_TOKEN_FULL_STOP,
ESPEAKNG_TOKEN_QUESTION_MARK,
} espeak_ng_TOKEN_TYPE; } espeak_ng_TOKEN_TYPE;


espeak_ng_TOKEN_TYPE espeak_ng_TOKEN_TYPE

+ 12
- 1
tests/tokenizer.c View File

espeak_ng_TOKENIZER *tokenizer = create_tokenizer(); espeak_ng_TOKENIZER *tokenizer = create_tokenizer();
espeak_ng_TEXT_DECODER *decoder = create_text_decoder(); espeak_ng_TEXT_DECODER *decoder = create_text_decoder();


assert(text_decoder_decode_string(decoder, ".", -1, ESPEAKNG_ENCODING_US_ASCII) == ENS_OK);
assert(text_decoder_decode_string(decoder, ". ?", -1, ESPEAKNG_ENCODING_US_ASCII) == ENS_OK);
assert(tokenizer_reset(tokenizer, decoder, ESPEAKNG_TOKENIZER_OPTION_TEXT) == 1); assert(tokenizer_reset(tokenizer, decoder, ESPEAKNG_TOKENIZER_OPTION_TEXT) == 1);


assert(tokenizer_read_next_token(tokenizer) == ESPEAKNG_TOKEN_FULL_STOP); assert(tokenizer_read_next_token(tokenizer) == ESPEAKNG_TOKEN_FULL_STOP);
assert(tokenizer_get_token_text(tokenizer) != NULL); assert(tokenizer_get_token_text(tokenizer) != NULL);
assert(strcmp(tokenizer_get_token_text(tokenizer), ".") == 0); assert(strcmp(tokenizer_get_token_text(tokenizer), ".") == 0);


assert(tokenizer_read_next_token(tokenizer) == ESPEAKNG_TOKEN_WHITESPACE);
assert(tokenizer_get_token_text(tokenizer) != NULL);
assert(strcmp(tokenizer_get_token_text(tokenizer), " ") == 0);

assert(tokenizer_read_next_token(tokenizer) == ESPEAKNG_TOKEN_QUESTION_MARK);
assert(tokenizer_get_token_text(tokenizer) != NULL);
assert(strcmp(tokenizer_get_token_text(tokenizer), "?") == 0);

assert(tokenizer_read_next_token(tokenizer) == ESPEAKNG_TOKEN_END_OF_BUFFER); assert(tokenizer_read_next_token(tokenizer) == ESPEAKNG_TOKEN_END_OF_BUFFER);
assert(tokenizer_get_token_text(tokenizer) != NULL); assert(tokenizer_get_token_text(tokenizer) != NULL);
assert(*tokenizer_get_token_text(tokenizer) == '\0'); assert(*tokenizer_get_token_text(tokenizer) == '\0');
case ESPEAKNG_TOKEN_FULL_STOP: case ESPEAKNG_TOKEN_FULL_STOP:
printf("full stop : %s\n", tokenizer_get_token_text(tokenizer)); printf("full stop : %s\n", tokenizer_get_token_text(tokenizer));
break; break;
case ESPEAKNG_TOKEN_QUESTION_MARK:
printf("question mark : %s\n", tokenizer_get_token_text(tokenizer));
break;
} }
} }



Loading…
Cancel
Save