import axios from 'axios'; import https from 'https'; import fs from 'fs'; import readline from 'readline'; import path from 'path'; import FormData from 'form-data'; import { fileURLToPath } from 'url'; const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); // ─── CONFIG ──────────────────────────────────────────────────────── const API_VERSION = '2023-10'; const exportDir = path.join(__dirname, 'exports'); const JSONL_FILENAME = 'bulk_inventory.jsonl'; const desiredInventoryByHandle = {}; // const SHOP = 'veloxautomotive.myshopify.com'; // const ACCESS_TOKEN = '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'; // const TURN14_ACCESS_TOKEN = 'd5c95efd2a7dd59a09b98671a88bdc38149b4c14'; var SHOP var ACCESS_TOKEN var TURN14_ACCESS_TOKEN function chunkArray(array, size) { const result = []; for (let i = 0; i < array.length; i += size) { result.push(array.slice(i, i + size)); } return result; } // ─── HELPERS ──────────────────────────────────────────────────────── function timestamp() { const now = new Date(); return now.toISOString().replace(/[-:]/g, '').replace('T', '_').split('.')[0]; } // ─── STAGE FILE UPLOAD ────────────────────────────────────────────── async function getStagedUploadPath() { const mutation = ` mutation { stagedUploadsCreate(input:[{ resource: BULK_MUTATION_VARIABLES, filename: "${JSONL_FILENAME}", mimeType: "text/jsonl", httpMethod: POST }]){ stagedTargets { url resourceUrl parameters { name value } } userErrors { field message } } } `; const resp = await axios.post( `https://${SHOP}/admin/api/${API_VERSION}/graphql.json`, { query: mutation }, { headers: { 'X-Shopify-Access-Token': ACCESS_TOKEN } } ); const result = resp.data.data.stagedUploadsCreate; if (result.userErrors.length) { throw new Error('Staged upload error: ' + JSON.stringify(result.userErrors)); } const target = result.stagedTargets[0]; const keyParam = target.parameters.find(p => p.name === 'key'); if (!keyParam) throw new Error('Missing "key" parameter in staged upload target'); // console.log("target",target) return { uploadUrl: target.url, parameters: target.parameters, stagedUploadPath: keyParam.value, }; } async function uploadJSONLFile(uploadUrl, params, filePath) { const form = new FormData(); params.forEach(({ name, value }) => form.append(name, value)); form.append('file', fs.createReadStream(filePath)); await axios.post(uploadUrl, form, { headers: form.getHeaders() }); console.log('✅ Uploaded JSONL file to Shopify'); } async function runBulkMutation(stagedUploadPath) { console.log("Resource URL", stagedUploadPath) const mutation = ` mutation bulkInventoryAdjust($input: InventoryAdjustQuantityInput!) { inventoryAdjustQuantity(input: $input) { inventoryLevel { id available } userErrors { field message } } } `; const wrappedMutation = ` mutation { bulkOperationRunMutation( mutation: """${mutation}""", stagedUploadPath: "${stagedUploadPath}" ) { bulkOperation { id status } userErrors { field message } } } `; const resp = await axios.post( `https://${SHOP}/admin/api/${API_VERSION}/graphql.json`, { query: wrappedMutation }, { headers: { 'X-Shopify-Access-Token': ACCESS_TOKEN } } ); console.log('📦 Mutation Response:', JSON.stringify(resp.data, null, 2)); const result = resp.data.data?.bulkOperationRunMutation; if (!result) throw new Error('bulkOperationRunMutation failed.'); if (result.userErrors.length) { console.error('❌ Mutation Errors:', result.userErrors); } else { console.log('✅ Bulk inventory update started with ID:', result.bulkOperation.id); } } // ─── EXPORT / NDJSON LOGIC ───────────────────────────────────────── async function startBulkExport() { console.log('[1] Starting bulk export...'); const mutation = ` mutation { bulkOperationRunQuery( query: """ { products { edges { node { id handle variants { edges { node { id inventoryItem { id } } } } } } } } """ ) { bulkOperation { id status } userErrors { field message } } } `; const resp = await axios.post( `https://${SHOP}/admin/api/${API_VERSION}/graphql.json`, { query: mutation }, { headers: { 'X-Shopify-Access-Token': ACCESS_TOKEN } } ); const err = resp.data.data.bulkOperationRunQuery.userErrors; if (err.length) throw new Error(JSON.stringify(err)); console.log('[1] ✔️ Export started:', resp.data.data.bulkOperationRunQuery.bulkOperation.id); } async function pollUntilReady() { const query = `{ currentBulkOperation { status url errorCode } }`; while (true) { const resp = await axios.post( `https://${SHOP}/admin/api/${API_VERSION}/graphql.json`, { query }, { headers: { 'X-Shopify-Access-Token': ACCESS_TOKEN } } ); const op = resp.data.data.currentBulkOperation; console.log(`[2] Status: ${op.status}`); if (op.status === 'COMPLETED') { console.log('[2] ✔️ Completed export. File URL:', op.url); return op.url; } if (op.status === 'FAILED') { throw new Error('Bulk export failed: ' + op.errorCode); } await new Promise(r => setTimeout(r, 5000)); } } async function downloadToFile(fileUrl) { const fname = path.join(exportDir, `${timestamp()}_bulk_export.ndjson`); const fileStream = fs.createWriteStream(fname); await new Promise((resolve, reject) => { https.get(fileUrl, res => { res.pipe(fileStream); fileStream.on('finish', () => { fileStream.close(resolve); }); fileStream.on('error', reject); }); }); console.log(`[3] ✔️ Downloaded export to ${fname}`); return fname; } async function buildHandleMap(ndjsonPath) { console.log('[4] Building handle→inventory map...'); const handleMap = {}; const productIdToHandle = {}; // desiredInventoryByHandle["84485"] = 2000; const rl = readline.createInterface({ input: fs.createReadStream(ndjsonPath), crlfDelay: Infinity }); for await (const line of rl) { try { const rec = JSON.parse(line); if (rec.handle && rec.id) { productIdToHandle[rec.id] = rec.handle; } if (rec.__parentId && rec.inventoryItem?.id) { const handle = productIdToHandle[rec.__parentId]; if (handle && desiredInventoryByHandle[handle] != null) { if (!handleMap[handle]) handleMap[handle] = []; handleMap[handle].push({ variantId: rec.id, inventoryItemId: rec.inventoryItem.id }); } } } catch (e) { console.error('⚠️ JSON parse error:', e.message); } } console.log(`[4] ✔️ Matched ${Object.keys(handleMap).length} handles`); const mapPath = path.join(exportDir, `${timestamp()}_handle_map.json`); fs.writeFileSync(mapPath, JSON.stringify(handleMap, null, 2)); return handleMap; } async function getLocationID() { console.log('[0] Building Location ID..'); const client = axios.create({ // baseURL: `https://${SHOP}/admin/api/${API_VERSION}/graphql.json`, baseURL: `https://${SHOP}/admin/api/2024-01/graphql.json`, headers: { 'X-Shopify-Access-Token': ACCESS_TOKEN, 'Content-Type': 'application/json', }, }); const locationsRes = await client.post('', { query: ` query { locations(first: 10) { edges { node { id name } } } } ` }); const locations = locationsRes.data.data.locations.edges; const locationId = locations[0].node.id; // Use your logic to pick the right location console.log("Location ID - ", locationId) return locationId; } function writeBulkInventoryJSONL(handleMap, desiredInventoryByHandle, outputPath, locationId) { const stream = fs.createWriteStream(outputPath); let lineCount = 0; const inventoryUpdates = []; for (const [handle, items] of Object.entries(handleMap)) { const qty = desiredInventoryByHandle[handle]; if (qty == null) continue; items.forEach(({ inventoryItemId }) => { inventoryUpdates.push({ inventoryItemId, quantity: qty, locationId, }); lineCount++; }); } var inventorytoupdate = inventoryUpdates // Write all entries to the JSONL stream inventoryUpdates.forEach((entry) => { stream.write(JSON.stringify({ input: entry }) + '\n'); }); stream.end(); console.log(`✅ JSONL: wrote ${lineCount} inventory adjustments to ${outputPath}`); 4 return inventorytoupdate } async function updateInventoryBatch(batch, index) { const mutation = ` mutation inventorySetQuantities { inventorySetQuantities(input: { name: "available", # or "on_hand" reason: "correction", # pick a valid reason from the list above ignoreCompareQuantity: true, quantities: [ ${batch .map(item => `{ inventoryItemId: "${item.inventoryItemId}", locationId: "${item.locationId}", quantity: ${item.quantity} }`) .join(',\n')} ] }) { inventoryAdjustmentGroup { id createdAt reason changes { name delta } } userErrors { field message } } } `; let json; //console.log("Mutation", mutation) try { const res = await axios.post( `https://${SHOP}/admin/api/2025-07/graphql.json`, { query: mutation }, { headers: { 'Content-Type': 'application/json', 'X-Shopify-Access-Token': ACCESS_TOKEN, }, } ); json = res.data; // console.log(json) } catch (error) { console.error(`Batch ${index} - Axios Error:`, error.response?.data || error.message); return; } // const json = await res.json(); if (json.errors) { console.error(`Batch ${index} - GraphQL Errors:`, JSON.stringify(json.errors, null, 2)); } if (json.data?.inventorySetQuantities?.userErrors?.length) { console.error(`Batch ${index} - User Errors:`, JSON.stringify(json.data.inventorySetQuantities.userErrors, null, 2)); } else { console.log(`✅ Batch ${index} updated successfully.`); } } // ─── MAIN FLOW ───────────────────────────────────────────────────── try { SHOP = 'veloxautomotive.myshopify.com'; ACCESS_TOKEN = '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'; TURN14_ACCESS_TOKEN = 'd5c95efd2a7dd59a09b98671a88bdc38149b4c14'; const BATCH_SIZE = 1000; const turn14Res = await fetch(`https://turn14.data4autos.com/v1/inventory/allupdates`, { headers: { Authorization: `Bearer ${TURN14_ACCESS_TOKEN}`, "Content-Type": "application/json", }, }); const turn14Data = await turn14Res.json(); for (const item of turn14Data) { desiredInventoryByHandle[item.id] = item.totalQuantity; } console.log(`✅ Loaded ${Object.keys(desiredInventoryByHandle).length} inventory items from Turn14`); if (!fs.existsSync(exportDir)) fs.mkdirSync(exportDir); const locationId = await getLocationID() await startBulkExport(); const url = await pollUntilReady(); const ndjsonPath = await downloadToFile(url); const handleMap = await buildHandleMap(ndjsonPath); const jsonlPath = path.join(exportDir, JSONL_FILENAME); //console.log(handleMap) const LatestInventoryData = writeBulkInventoryJSONL(handleMap, desiredInventoryByHandle, jsonlPath, locationId); console.log(LatestInventoryData) async function runBulkUpdate() { const batches = chunkArray(LatestInventoryData, BATCH_SIZE); for (let i = 0; i < batches.length; i++) { console.log(`🔄 Updating batch ${i + 1} of ${batches.length}`); await updateInventoryBatch(batches[i], i + 1); } console.log('✅ All inventory batches processed.'); } runBulkUpdate().catch(err => console.error('❌ Error running update:', err)); // const { uploadUrl, stagedUploadPath, parameters } = await getStagedUploadPath(); // console.log() // await uploadJSONLFile(uploadUrl, parameters, jsonlPath); // await runBulkMutation(stagedUploadPath); } catch (err) { console.error('🚨 Error:', err); process.exit(1); } async function syncTurn14Inventory(shop, accessToken, turn14AccessToken) { try { SHOP = shop; ACCESS_TOKEN = accessToken; TURN14_ACCESS_TOKEN = turn14AccessToken; const BATCH_SIZE = 1000; const exportDir = './exported_inventory'; const JSONL_FILENAME = 'inventory_data.jsonl'; const desiredInventoryByHandle = {}; // Fetch Turn14 inventory data const turn14Res = await fetch(`https://turn14.data4autos.com/v1/inventory/allupdates`, { headers: { Authorization: `Bearer ${turn14AccessToken}`, "Content-Type": "application/json", }, }); const turn14Data = await turn14Res.json(); for (const item of turn14Data) { desiredInventoryByHandle[item.id] = item.totalQuantity; } console.log(`✅ Loaded ${Object.keys(desiredInventoryByHandle).length} inventory items from Turn14`); if (!fs.existsSync(exportDir)) fs.mkdirSync(exportDir); const locationId = await getLocationID(shop, accessToken); // pass shopify credentials if needed await startBulkExport(shop, accessToken); const url = await pollUntilReady(shop, accessToken); const ndjsonPath = await downloadToFile(url); const handleMap = await buildHandleMap(ndjsonPath); const jsonlPath = path.join(exportDir, JSONL_FILENAME); const LatestInventoryData = writeBulkInventoryJSONL(handleMap, desiredInventoryByHandle, jsonlPath, locationId); console.log(LatestInventoryData); // Run updates in batches const batches = chunkArray(LatestInventoryData, BATCH_SIZE); for (let i = 0; i < batches.length; i++) { console.log(`🔄 Updating batch ${i + 1} of ${batches.length}`); await updateInventoryBatch(batches[i], i + 1, shop, accessToken); } console.log('✅ All inventory batches processed.'); } catch (err) { console.error('🚨 Error:', err); process.exit(1); } } module.exports = { syncTurn14Inventory };