[PATCH v2 2/4] gdi32: Avoid best fit chars when mapping unicode character to a glyph index.
Dmitry Timoshkov
dmitry at baikal.ru
Tue Dec 15 06:04:46 CST 2020
This part fixes GetGlyphIndices.
Signed-off-by: Dmitry Timoshkov <dmitry at baikal.ru>
---
dlls/gdi32/font.c | 2 +-
dlls/gdi32/tests/font.c | 2 --
2 files changed, 1 insertion(+), 3 deletions(-)
diff --git a/dlls/gdi32/font.c b/dlls/gdi32/font.c
index 00bbeb0a11..9380d3efbe 100644
--- a/dlls/gdi32/font.c
+++ b/dlls/gdi32/font.c
@@ -3223,7 +3223,7 @@ static DWORD CDECL font_GetGlyphIndices( PHYSDEV dev, const WCHAR *str, INT coun
if (str[i] >= 0xf020 && str[i] <= 0xf100) glyph = str[i] - 0xf000;
else if (str[i] < 0x100) glyph = str[i];
}
- else if (WideCharToMultiByte( physdev->font->codepage, 0, &str[i], 1,
+ else if (WideCharToMultiByte( physdev->font->codepage, WC_NO_BEST_FIT_CHARS, &str[i], 1,
&ch, 1, NULL, &used ) && !used)
glyph = (unsigned char)ch;
}
diff --git a/dlls/gdi32/tests/font.c b/dlls/gdi32/tests/font.c
index c630b65e5e..5eb069bf31 100644
--- a/dlls/gdi32/tests/font.c
+++ b/dlls/gdi32/tests/font.c
@@ -1690,14 +1690,12 @@ static void test_GetGlyphIndices(void)
charcount = GetGlyphIndicesW(hdc, c, ARRAY_SIZE(c), glyphs, GGI_MARK_NONEXISTING_GLYPHS);
ok(charcount == ARRAY_SIZE(c), "got %u\n", charcount);
ok(glyphs[0] == 0x001f || glyphs[0] == 0xffff /* Vista */, "got %#x\n", glyphs[0]);
-todo_wine
ok(glyphs[1] == 0x001f || glyphs[1] == 0xffff /* Vista */, "got %#x\n", glyphs[1]);
glyphs[0] = glyphs[1] = 0;
charcount = GetGlyphIndicesW(hdc, c, ARRAY_SIZE(c), glyphs, 0);
ok(charcount == ARRAY_SIZE(c), "got %u\n", charcount);
ok(glyphs[0] == textm.tmDefaultChar || glyphs[0] == 0x20 /* CJK Windows */, "got %#x\n", glyphs[0]);
-todo_wine
ok(glyphs[1] == textm.tmDefaultChar || glyphs[1] == 0x20 /* CJK Windows */, "got %#x\n", glyphs[1]);
DeleteObject(SelectObject(hdc, hOldFont));
--
2.29.2
More information about the wine-devel
mailing list