Make terminal ui binaries work well everywhere
Here's some screenshots of an emulator tui program that was compiled on Linux, then scp'd it to Windows, Mac, and FreeBSD. https://justine.storage.googleapis.com/blinkenlights-cmdexe.png https://justine.storage.googleapis.com/blinkenlights-imac.png https://justine.storage.googleapis.com/blinkenlights-freebsd.png https://justine.storage.googleapis.com/blinkenlights-lisp.png How is this even possible that we have a nontrivial ui binary that just works on Mac, Windows, Linux, and BSD? Surely a first ever achievement. Fixed many bugs. Bootstrapped John McCarthy's metacircular evaluator on bare metal in half the size of Altair BASIC (about 2.5kb) and ran it in emulator for fun and profit.
This commit is contained in:
@ -19,7 +19,8 @@
|
||||
╚─────────────────────────────────────────────────────────────────────────────*/
|
||||
#include "libc/conv/conv.h"
|
||||
#include "libc/str/str.h"
|
||||
#include "libc/str/tpencode.h"
|
||||
#include "libc/str/tpenc.h"
|
||||
#include "libc/str/utf16.h"
|
||||
|
||||
/**
|
||||
* Transcodes UTF-16 to UTF-8.
|
||||
@ -30,19 +31,25 @@
|
||||
* @return number of bytes written excluding NUL
|
||||
*/
|
||||
size_t tprecode16to8(char *dst, size_t dstsize, const char16_t *src) {
|
||||
size_t i = 0;
|
||||
size_t i;
|
||||
uint64_t w;
|
||||
wint_t x, y;
|
||||
i = 0;
|
||||
if (dstsize) {
|
||||
for (;;) {
|
||||
wint_t wc;
|
||||
src += abs(getutf16(src, &wc));
|
||||
if (!wc || dstsize == 1) {
|
||||
dst[i] = '\0';
|
||||
break;
|
||||
if (!(x = *src++)) break;
|
||||
if (IsUtf16Cont(x)) continue;
|
||||
if (!IsUcs2(x)) {
|
||||
if (!(y = *src++)) break;
|
||||
x = MergeUtf16(x, y);
|
||||
}
|
||||
w = tpenc(x);
|
||||
while (w && i + 1 < dstsize) {
|
||||
dst[i++] = w & 0xFF;
|
||||
w >>= 8;
|
||||
}
|
||||
size_t got = abs(tpencode(&dst[i], dstsize, wc, false));
|
||||
dstsize -= got;
|
||||
i += got;
|
||||
}
|
||||
dst[i] = 0;
|
||||
}
|
||||
return i;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user