Restrict char in the range of ASCII.
picrin accepts the code `(char->integer (integer->char 128))` but the result is machine-dependent, since the C `char` type can be either signed or unsigned as permitted by the spec. Unfortunately, the result of signed one is incompatible with Unicode-aware implementations. For interoperability, we should deny the range beyond ASCII for now. Ref: #211 Signed-off-by: Masanori Ogino <masanori.ogino@gmail.com>
This commit is contained in:
parent
917704e9a4
commit
993d1813a1
|
@ -22,6 +22,7 @@ pic_char_char_to_integer(pic_state *pic)
|
||||||
char c;
|
char c;
|
||||||
|
|
||||||
pic_get_args(pic, "c", &c);
|
pic_get_args(pic, "c", &c);
|
||||||
|
assert((c & 0x80) == 0);
|
||||||
|
|
||||||
return pic_int_value(pic, c);
|
return pic_int_value(pic, c);
|
||||||
}
|
}
|
||||||
|
@ -33,7 +34,7 @@ pic_char_integer_to_char(pic_state *pic)
|
||||||
|
|
||||||
pic_get_args(pic, "i", &i);
|
pic_get_args(pic, "i", &i);
|
||||||
|
|
||||||
if (i < 0 || i > 255) {
|
if (i < 0 || i > 127) {
|
||||||
pic_error(pic, "integer->char: integer out of char range", 1, pic_int_value(pic, i));
|
pic_error(pic, "integer->char: integer out of char range", 1, pic_int_value(pic, i));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue