unicode is still horribly fucked tho, it seems as if somehow the pattern
matching is fucking us over. My hypothesis is that the code that does
the thing does not do the thing correctly. It is entirely possible that
that code is in fact bugged, and thus does the thing incorrectly. This
revelation will have drastic impacts on fishing season
This commit is contained in:
IonutParau 2025-06-27 21:45:35 +02:00
parent 6c2a4fdc11
commit 4b3d6edbd7
2 changed files with 10 additions and 0 deletions

View File

@ -134,6 +134,11 @@ void nni_gpu_set(nni_gpu *gpu, void *_, nn_component *component, nn_computer *co
return; return;
} }
if(!nn_unicode_validate(s)) {
nn_setCError(computer, "invalid utf-8");
return;
}
int current = 0; int current = 0;
while(s[current]) { while(s[current]) {
int codepoint = nn_unicode_codepointAt(s, current); int codepoint = nn_unicode_codepointAt(s, current);
@ -294,6 +299,10 @@ void nni_gpu_fill(nni_gpu *gpu, void *_, nn_component *component, nn_computer *c
nn_setCError(computer, "bad argument #5 (character expected)"); nn_setCError(computer, "bad argument #5 (character expected)");
return; return;
} }
if(!nn_unicode_validate(s)) {
nn_setCError(computer, "invalid utf-8");
return;
}
int codepoint = nn_unicode_codepointAt(s, 0); int codepoint = nn_unicode_codepointAt(s, 0);

View File

@ -483,6 +483,7 @@ int testLuaArch_unicode_char(lua_State *L) {
if(!lua_isinteger(L, idx)) { if(!lua_isinteger(L, idx)) {
nn_free(codepoints); nn_free(codepoints);
luaL_argerror(L, idx, "integer expected"); luaL_argerror(L, idx, "integer expected");
return 0;
} }
codepoints[i] = lua_tointeger(L, idx); codepoints[i] = lua_tointeger(L, idx);
} }