libutf: Change return type of utftorunestr to size_t
It returns the size of the rune array, so size_t is the right type to use here.
This commit is contained in:
parent
6902aad435
commit
28063c02f4
@ -1,12 +1,12 @@
|
|||||||
/* See LICENSE file for copyright and license details. */
|
/* See LICENSE file for copyright and license details. */
|
||||||
#include "../utf.h"
|
#include "../utf.h"
|
||||||
|
|
||||||
int
|
size_t
|
||||||
utftorunestr(const char *str, Rune *r)
|
utftorunestr(const char *str, Rune *r)
|
||||||
{
|
{
|
||||||
int i, n;
|
size_t i, n;
|
||||||
|
|
||||||
for(i = 0; (n = chartorune(&r[i], str)) && r[i]; i++)
|
for (i = 0; (n = chartorune(&r[i], str)) && r[i]; i++)
|
||||||
str += n;
|
str += n;
|
||||||
|
|
||||||
return i;
|
return i;
|
||||||
|
2
utf.h
2
utf.h
@ -59,7 +59,7 @@ int isxdigitrune(Rune);
|
|||||||
Rune tolowerrune(Rune);
|
Rune tolowerrune(Rune);
|
||||||
Rune toupperrune(Rune);
|
Rune toupperrune(Rune);
|
||||||
|
|
||||||
int utftorunestr(const char*, Rune *);
|
size_t utftorunestr(const char *, Rune *);
|
||||||
|
|
||||||
int fgetrune(Rune *, FILE *);
|
int fgetrune(Rune *, FILE *);
|
||||||
int efgetrune(Rune *, FILE *, const char *);
|
int efgetrune(Rune *, FILE *, const char *);
|
||||||
|
Loading…
Reference in New Issue
Block a user