Skip to content

Commit 9e82a19

Browse files
jnxypYipeng Xin
authored and
Yipeng Xin
committed
fix(testitall): Update context too large endpoints
- replace the payload string to randomly generated string to avoid compression - renamed the endpoint to /response-too-large - updated testitall.actions.json and testitall.openapi.json to reflect the changes
1 parent d96cf89 commit 9e82a19

File tree

7 files changed

+1400
-1334
lines changed

7 files changed

+1400
-1334
lines changed

integrations/extensions/starter-kits/testitall/mock-server/.env-sample

+4-1
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,11 @@
11
API_SERVER_PORT=4000
22

3-
# The response size limit for WA extensions, in bytes; Used by the Context too large tests. Default is 102400 bytes (100 KB).
3+
# The response size limit for WA extension calls, in bytes; Used by the Context too large tests. Default is 102400 bytes (100 KB).
44
RESPONSE_SIZE_LIMIT=102400
55

6+
# The session size limit for WA stateful sessions, in bytes; Used by the Context almost too large tests. Default is 130000 bytes (127 KB).
7+
SESSION_SIZE_LIMIT=130000
8+
69
# [Authentication Credentials]
710
# Basic Authentication
811
AUTH_USERNAME=WA_USERNAME

integrations/extensions/starter-kits/testitall/mock-server/server/controllers/test-controller.js

+5-7
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
import { generateHighEntropyString } from '../utils.js';
2+
13
const LONG_MESSAGE = `This is a long message intended to demonstrate streaming large messages in smaller chunks. Breaking down messages into smaller parts helps simulate real-time data transmission over a network. This technique is particularly useful for streaming large files or continuous data streams like logs, chat messages, or live updates. By sending small chunks, we ensure the data is processed and displayed incrementally, providing a smoother and more responsive user experience. Each chunk represents a portion of the entire message, and they are sent sequentially with a slight delay to mimic real-world streaming scenarios. This example uses a delay of 100 milliseconds between each chunk to achieve this effect.`;
24

35
// HTTP Methods
@@ -154,21 +156,17 @@ export function errorTest(req, res) {
154156
});
155157
}
156158

157-
158-
export function contextTooLargeTest(req, res) {
159-
const fakeData = 'x'.repeat(parseInt(process.env.RESPONSE_SIZE_LIMIT, 10) * 5); // Default: 100KB * 5 = 650KB
159+
export function responseTooLargeTest(req, res) {
160160
const response = {
161-
data: fakeData
161+
data: generateHighEntropyString(parseInt(process.env.RESPONSE_SIZE_LIMIT, 10) * 5) // Default: 100KB * 5 = 650KB
162162
}
163163

164164
return res.status(200).send(response);
165165
}
166166

167167
export function contextAlmostTooLargeTest(req, res) {
168-
const fakeData = 'x'.repeat(parseInt(process.env.RESPONSE_SIZE_LIMIT, 10) - 1024); // Default: 100KB - 1KB = 99KB
169-
170168
const response = {
171-
data: fakeData
169+
data: generateHighEntropyString(parseInt(process.env.SESSION_SIZE_LIMIT, 10) / 2) // Default: 130KB / 2 = 65KB
172170
}
173171

174172
return res.status(200).send(response);

integrations/extensions/starter-kits/testitall/mock-server/server/routes/test-route.js

+7-1
Original file line numberDiff line numberDiff line change
@@ -10,13 +10,19 @@ router.get('', testController.getTest);
1010
router.put('', testController.putTest);
1111
router.post('', testController.postTest);
1212
router.patch('', testController.patchTest);
13+
1314
router.post('/error', testController.errorTest);
1415
router.post('/params/:path_param', testController.postTest);
1516
router.post('/auth_header', testController.authHeaderTest);
17+
1618
router.post('/arrays-root', testController.arraysRootTest);
1719
router.post('/arrays-object', testController.arraysInObjectTest);
18-
router.post('/context-too-large', testController.contextTooLargeTest);
20+
21+
router.post('/response-too-large', testController.responseTooLargeTest);
22+
router.post('/context-too-large', testController.responseTooLargeTest); // for backwards compatibility
23+
1924
router.post('/context-almost-too-large', testController.contextAlmostTooLargeTest);
25+
2026
router.post('/non-json-response', testController.successfulPostWithNonJSONResponse);
2127

2228
router.post('/advanced/properties-counter', testController.propertiesByCounterTest);
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
/**
2+
* High entropy string generator, used to generate the body of the response for the contextTooLargeTest and contextAlmostTooLargeTest endpoints
3+
* to avoid being compressed by the server for more accurate testing results
4+
* @param {number} length The length of the string to generate
5+
* @returns {string} The generated string
6+
*/
7+
export function generateHighEntropyString(length) {
8+
const charset = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*()_+-=[]{}|;:",.<>?/~`'; // Large character set
9+
let result = '';
10+
for (let i = 0; i < length; i++) {
11+
result += charset[Math.floor(Math.random() * charset.length)];
12+
}
13+
return result;
14+
}

0 commit comments

Comments
 (0)