Skip to content

Commit 550ec5a

Browse files
ysyneuclaude
andcommitted
fix: write curl payload to temp file to avoid ARG_MAX limit
The upload script passed the full JSON payload as a shell argument to curl, which exceeded the OS argument-size limit when batching many docs. Use --data-binary @tmpfile instead. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent 7f9cef7 commit 550ec5a

1 file changed

Lines changed: 12 additions & 2 deletions

File tree

scripts/upload.sh

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -174,12 +174,17 @@ upload_batch() {
174174
return 0
175175
fi
176176

177+
local tmpfile
178+
tmpfile=$(mktemp)
179+
echo "$payload" > "$tmpfile"
180+
177181
local response
178182
response=$(curl -sS --connect-timeout 30 --max-time 120 \
179183
-X POST "$MEILI_ENDPOINT/indexes/$MEILI_INDEX/documents?primaryKey=id" \
180184
-H "Authorization: Bearer $MEILI_API_KEY" \
181185
-H "Content-Type: application/json" \
182-
--data-binary "$payload")
186+
--data-binary "@$tmpfile")
187+
rm -f "$tmpfile"
183188

184189
if echo "$response" | jq -e '.taskUid' > /dev/null 2>&1; then
185190
echo "Uploaded batch of $count documents (taskUid: $(echo "$response" | jq -r '.taskUid'))"
@@ -201,12 +206,17 @@ delete_documents() {
201206
return 0
202207
fi
203208

209+
local tmpfile
210+
tmpfile=$(mktemp)
211+
echo "$ids_json" > "$tmpfile"
212+
204213
local response
205214
response=$(curl -sS --connect-timeout 30 --max-time 60 \
206215
-X POST "$MEILI_ENDPOINT/indexes/$MEILI_INDEX/documents/delete-batch" \
207216
-H "Authorization: Bearer $MEILI_API_KEY" \
208217
-H "Content-Type: application/json" \
209-
--data-binary "$ids_json")
218+
--data-binary "@$tmpfile")
219+
rm -f "$tmpfile"
210220

211221
if echo "$response" | jq -e '.taskUid' > /dev/null 2>&1; then
212222
echo "Deleted $count documents (taskUid: $(echo "$response" | jq -r '.taskUid'))"

0 commit comments

Comments
 (0)