Mirror: The highly customizable and versatile GraphQL client with which you add on features like normalized caching as you grow.

fix(core): Fix multibyte character decoding by using `stream` option (#3767)

Co-authored-by: Phil Pluckthun <phil@kitten.sh>

i110 53b822bc 9bb7ef8f

Changed files
+28 -18
.changeset
packages
core
src
internal
+5
.changeset/nervous-flies-confess.md
···
+
---
+
'@urql/core': patch
+
---
+
+
Fix `fetchSource` not text-decoding response chunks as streams, which could cause UTF-8 decoding to break.
+23 -18
packages/core/src/internal/fetchSource.ts
···
* The implementation in this file needs to make certain accommodations for:
* - The Web Fetch API
* - Non-browser or polyfill Fetch APIs
-
* - Node.js-like Fetch implementations (see `toString` below)
+
* - Node.js-like Fetch implementations
*
* GraphQL over SSE has a reference implementation, which supports non-HTTP/2
* modes and is a faithful implementation of the spec.
···
import type { Operation, OperationResult, ExecutionResult } from '../types';
import { makeResult, makeErrorResult, mergeResultPatch } from '../utils';
-
const decoder = typeof TextDecoder !== 'undefined' ? new TextDecoder() : null;
const boundaryHeaderRe = /boundary="?([^=";]+)"?/i;
const eventStreamRe = /data: ?([^\n]+)/;
type ChunkData = Buffer | Uint8Array;
-
// NOTE: We're avoiding referencing the `Buffer` global here to prevent
-
// auto-polyfilling in Webpack
-
const toString = (input: Buffer | ArrayBuffer): string =>
-
input.constructor.name === 'Buffer'
-
? (input as Buffer).toString()
-
: decoder!.decode(input as ArrayBuffer);
-
-
async function* streamBody(response: Response): AsyncIterableIterator<string> {
+
async function* streamBody(
+
response: Response
+
): AsyncIterableIterator<ChunkData> {
if (response.body![Symbol.asyncIterator]) {
-
for await (const chunk of response.body! as any)
-
yield toString(chunk as ChunkData);
+
for await (const chunk of response.body! as any) yield chunk as ChunkData;
} else {
const reader = response.body!.getReader();
let result: ReadableStreamReadResult<ChunkData>;
try {
-
while (!(result = await reader.read()).done) yield toString(result.value);
+
while (!(result = await reader.read()).done) yield result.value;
} finally {
reader.cancel();
}
}
}
-
async function* split(
-
chunks: AsyncIterableIterator<string>,
+
async function* streamToBoundedChunks(
+
chunks: AsyncIterableIterator<ChunkData>,
boundary: string
): AsyncIterableIterator<string> {
+
const decoder = typeof TextDecoder !== 'undefined' ? new TextDecoder() : null;
let buffer = '';
let boundaryIndex: number;
for await (const chunk of chunks) {
-
buffer += chunk;
+
// NOTE: We're avoiding referencing the `Buffer` global here to prevent
+
// auto-polyfilling in Webpack
+
buffer +=
+
chunk.constructor.name === 'Buffer'
+
? (chunk as Buffer).toString()
+
: decoder!.decode(chunk as ArrayBuffer, { stream: true });
while ((boundaryIndex = buffer.indexOf(boundary)) > -1) {
yield buffer.slice(0, boundaryIndex);
buffer = buffer.slice(boundaryIndex + boundary.length);
···
response: Response
): AsyncIterableIterator<ExecutionResult> {
let payload: any;
-
for await (const chunk of split(streamBody(response), '\n\n')) {
+
for await (const chunk of streamToBoundedChunks(
+
streamBody(response),
+
'\n\n'
+
)) {
const match = chunk.match(eventStreamRe);
if (match) {
const chunk = match[1];
···
const boundary = '--' + (boundaryHeader ? boundaryHeader[1] : '-');
let isPreamble = true;
let payload: any;
-
for await (let chunk of split(streamBody(response), '\r\n' + boundary)) {
+
for await (let chunk of streamToBoundedChunks(
+
streamBody(response),
+
'\r\n' + boundary
+
)) {
if (isPreamble) {
isPreamble = false;
const preambleIndex = chunk.indexOf(boundary);