diff --git a/.dev.example.vars b/.dev.example.vars index 7a23b9d..9032a1a 100644 --- a/.dev.example.vars +++ b/.dev.example.vars @@ -6,4 +6,5 @@ UPSTASH_REDIS_REST_TOKEN="" UPSTASH_REDIS_REST_URL="http://localhost:8079" # KRL stuff -KRL_ENDPOINT_BASE_URL="https://api-partner.krl.co.id/krl-webs/v1" \ No newline at end of file +KRL_ENDPOINT_BASE_URL="https://api-partner.krl.co.id/krl-webs/v1" +KAI_BEARER_TOKEN="eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJhdWQiOiIzIiwianRpIjoiYmE0Yzc4MzE4ODNjYTI0N2YzMTBkMTJhYzc3ZjE5ZTdjMTVkNjgxOTk2ODM0MDc0MGM3MzliYmRjNGQ3YTI5MzczYzMyNWM2NDFiZjgxYzciLCJpYXQiOjE3NTQ0NTkxMjYsIm5iZiI6MTc1NDQ1OTEyNiwiZXhwIjoxNzg1OTk1MTI2LCJzdWIiOiI1Iiwic2NvcGVzIjpbXX0.zPA0IDAN3NycMKa6DaOdRmkcFz1oUTX1dkxEp3MLBlhibTQI0L0WB9mY-pUlQW5vQj8ktOdo-rRvrjxiXaHFqLQM6ebONbqTg8V0AjBXwrkBjLZDCE4dop9iZyDXcG2b9XTLCgPgpOBbduW_Dy0-bIkJOOIgIzl9mEEUVQf3T6G_zA796SGJ6rtLqfBK-sMnhOV4eZSqQIXIrxPyCJ8SA893p-29PFxfQfcbXW_6cYBFhDzyiilhJ6xQd6znN2eWOL4MPAxYeS2ZGnaZ7ijUN91MAyPnV0dQU7loVtS1jt2HlM5oMSsE2Zoz6FP31GvG6f7o_MWogEp0ZMOus50bVly3II8Rjjc4IGgswbw0h-RS0Ipo3f2QmXp4GfhRNUoTyqq-7oiCIDPUJcdg39lSIy9Fz7-ECNfbjEiH60V3GyftuiFGrayMoE7XeWaC9wQZo3fLHhI1aPgbXXsP-rqWLFf2km4zdG5Y5CYpUNb_Z11VOU6aaFCdRtoC6e7VcxHxLwCBT22wluNpbfFtEQSYDQE1JlegijvFmnRHTM88n-zp7sWhuCWVX6oE0ULdy51SR4iOqpYOA4B1ZymmYrQz1kBxSA_52lnTBlU9gfWkUiFX8GLSh7wQ8a4dVMYoJj6t1VCJt9-d30jn4S3tXsim_3wpp71RE9SSazV35j8o7do" diff --git a/README.md b/README.md index 0877765..7d7a45c 100644 --- a/README.md +++ b/README.md @@ -39,19 +39,21 @@ cp .dev.example.vars .dev.vars 4. Generate `UPSTASH_REDIS_REST_TOKEN` using `openssl rand -hex 32` and copy it to your `.dev.vars` file -5. Run database locally +5. Add `KAI_BEARER_TOKEN` to your `.dev.vars` file (grab the latest Bearer token from the official web app network request) + +6. Run database locally ```bash docker-compose up -d ``` -6. Run the database migration +7. Run the database migration ```bash bun run migrate:apply ``` -7. Sync the data and populate it into your local database (once only as you needed) +8. Sync the data and populate it into your local database (once only as you needed) ```bash # Please do this in order @@ -83,21 +85,23 @@ bun run sync:station bun run sync:schedule ``` -6. Add `COMULINE_ENV` to your `.production.vars` file +5. Add `COMULINE_ENV` to your `.production.vars` file ``` COMULINE_ENV=production ``` -6. Create a new Redis database in [Upstash](https://upstash.com/) and copy the value of `UPSTASH_REDIS_REST_TOKEN` and `UPSTASH_REDIS_REST_URL` to your `.production.vars` file +6. Add `KAI_BEARER_TOKEN` to your `.production.vars` file (use the latest Bearer token from the official web app) + +7. Create a new Redis database in [Upstash](https://upstash.com/) and copy the value of `UPSTASH_REDIS_REST_TOKEN` and `UPSTASH_REDIS_REST_URL` to your `.production.vars` file -7. Save your `.production.vars` file to your environment variables in your Cloudflare Workers using `wrangler` +8. Save your `.production.vars` file to your environment variables in your Cloudflare Workers using `wrangler` ```bash bunx wrangler secret put --env production $(cat .production.vars) ``` -8. Deploy the API to Cloudflare Workers +9. Deploy the API to Cloudflare Workers ```bash bun run deploy diff --git a/src/sync/headers.ts b/src/sync/headers.ts index a5d570a..a4907ee 100644 --- a/src/sync/headers.ts +++ b/src/sync/headers.ts @@ -1,9 +1,10 @@ +const KAI_BEARER_TOKEN = process.env.KAI_BEARER_TOKEN?.trim() + export const KAI_HEADERS = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:132.0) Gecko/20100101 Firefox/132.0", Accept: "application/json, text/javascript, */*; q=0.01", "Accept-Language": "en-US,en;q=0.5", - Authorization: - "Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJhdWQiOiIzIiwianRpIjoiMDYzNWIyOGMzYzg3YTY3ZTRjYWE4YTI0MjYxZGYwYzIxNjYzODA4NWM2NWU4ZjhiYzQ4OGNlM2JiZThmYWNmODU4YzY0YmI0MjgyM2EwOTUiLCJpYXQiOjE3MjI2MTc1MTQsIm5iZiI6MTcyMjYxNzUxNCwiZXhwIjoxNzU0MTUzNTE0LCJzdWIiOiI1Iiwic2NvcGVzIjpbXX0.Jz_sedcMtaZJ4dj0eWVc4_pr_wUQ3s1-UgpopFGhEmJt_iGzj6BdnOEEhcDDdIz-gydQL5ek0S_36v5h6P_X3OQyII3JmHp1SEDJMwrcy4FCY63-jGnhPBb4sprqUFruDRFSEIs1cNQ-3rv3qRDzJtGYc_bAkl2MfgZj85bvt2DDwBWPraZuCCkwz2fJvox-6qz6P7iK9YdQq8AjJfuNdl7t_1hMHixmtDG0KooVnfBV7PoChxvcWvs8FOmtYRdqD7RSEIoOXym2kcwqK-rmbWf9VuPQCN5gjLPimL4t2TbifBg5RWNIAAuHLcYzea48i3okbhkqGGlYTk3iVMU6Hf_Jruns1WJr3A961bd4rny62lNXyGPgNLRJJKedCs5lmtUTr4gZRec4Pz_MqDzlEYC3QzRAOZv0Ergp8-W1Vrv5gYyYNr-YQNdZ01mc7JH72N2dpU9G00K5kYxlcXDNVh8520-R-MrxYbmiFGVlNF2BzEH8qq6Ko9m0jT0NiKEOjetwegrbNdNq_oN4KmHvw2sHkGWY06rUeciYJMhBF1JZuRjj3JTwBUBVXcYZMFtwUAoikVByzKuaZZeTo1AtCiSjejSHNdpLxyKk_SFUzog5MOkUN1ktAhFnBFoz6SlWAJBJIS-lHYsdFLSug2YNiaNllkOUsDbYkiDtmPc9XWc", + Authorization: `Bearer ${KAI_BEARER_TOKEN}`, Priority: "u=0", } diff --git a/src/sync/schedule.ts b/src/sync/schedule.ts index c32028f..8e03687 100644 --- a/src/sync/schedule.ts +++ b/src/sync/schedule.ts @@ -48,6 +48,25 @@ const sync = async () => { ), }) + const normalizeStationKey = (name: string) => { + let normalized = name + .toUpperCase() + .replace(/VIA\\s*MRI/g, "") + .replace(/VIAMRI/g, "") + .replace(/\\s+/g, "") + .replace(/[^A-Z0-9]/g, "") + + if (normalized === "TANJUNGPRIUK") { + normalized = "TANJUNGPRIOK" + } + + return normalized + } + + const stationIdByKey = new Map( + stations.map((station) => [normalizeStationKey(station.name), station.id]), + ) + for (let i = 0; i < totalBatches; i++) { const start = i * batchSizes const end = start + batchSizes @@ -57,7 +76,7 @@ const sync = async () => { batch.map(async ({ id, metadata }) => { await sleep(5000) - const url = `${process.env.KRL_ENDPOINT_BASE_URL}/schedule?stationid=${id}&timefrom=00:00&timeto=23:00` + const url = `${process.env.KRL_ENDPOINT_BASE_URL}/schedules?stationid=${id}&timefrom=00:00&timeto=23:00` console.info(`[SYNC][SCHEDULE][${id}] Send preflight`) const optionsResponse = await fetch(url, { @@ -94,52 +113,64 @@ const sync = async () => { if (!parsed.success) { console.error(`[SYNC][SCHEDULE][${id}] Error parse`) } else { - const values = parsed.data.data.map((d) => { - let [origin, destination] = d.route_name.split("-") - - const fixName = (name: string) => { - switch (name) { - case "TANJUNGPRIUK": - return "TANJUNG PRIOK" - case "JAKARTAKOTA": - return "JAKARTA KOTA" - case "KAMPUNGBANDAN": - return "KAMPUNG BANDAN" - case "TANAHABANG": - return "TANAH ABANG" - case "PARUNGPANJANG": - return "PARUNG PANJANG" - case "BANDARASOEKARNOHATTA": - return "BANDARA SOEKARNO HATTA" - default: - return name + const missingRoutes: string[] = [] + + const values = parsed.data.data + .map((d) => { + const routeParts = d.route_name.split("-") + if (routeParts.length < 2) { + missingRoutes.push(d.route_name) + return null } - } - origin = fixName(origin) - destination = fixName(destination) - - return { - id: `sc_krl_${id}_${d.train_id}`.toLowerCase(), - station_id: id, - station_origin_id: stations.find( - ({ name }) => name === origin, - )?.id!, - station_destination_id: stations.find( - ({ name }) => name === destination, - )?.id!, - train_id: d.train_id, - line: d.ka_name, - route: d.route_name, - departs_at: parseTime(d.time_est).toISOString(), - arrives_at: parseTime(d.dest_time).toISOString(), - metadata: { - origin: { - color: d.color, + const originRaw = routeParts[0].trim() + const destinationRaw = + routeParts[routeParts.length - 1].trim() + + const originKey = normalizeStationKey(originRaw) + const destinationKey = normalizeStationKey(destinationRaw) + + const originId = stationIdByKey.get(originKey) + const destinationId = stationIdByKey.get(destinationKey) + + if (!originId || !destinationId) { + missingRoutes.push(d.route_name) + return null + } + + return { + id: `sc_krl_${id}_${d.train_id}`.toLowerCase(), + station_id: id, + station_origin_id: originId, + station_destination_id: destinationId, + train_id: d.train_id, + line: d.ka_name, + route: d.route_name, + departs_at: parseTime(d.time_est).toISOString(), + arrives_at: parseTime(d.dest_time).toISOString(), + metadata: { + origin: { + color: d.color, + }, }, - }, - } satisfies NewSchedule - }) + } satisfies NewSchedule + }) + .filter((value): value is NewSchedule => value !== null) + + if (missingRoutes.length > 0) { + console.warn( + `[SYNC][SCHEDULE][${id}] Skipped ${missingRoutes.length} rows with unknown stations. Sample: ${missingRoutes + .slice(0, 3) + .join(", ")}`, + ) + } + + if (values.length === 0) { + console.info( + `[SYNC][SCHEDULE][${id}] No valid schedule rows to insert`, + ) + return + } const insert = await db .insert(scheduleTable) @@ -185,14 +216,25 @@ const sync = async () => { `[SYNC][SCHEDULE][${id}] Updated station schedule availability status`, ) } else { - const err = await req.json() - const txt = await req.text() + const raw = await req.text() + let err: unknown = null + if (raw) { + try { + err = JSON.parse(raw) + } catch { + err = raw + } + } console.error( `[SYNC][SCHEDULE][${id}] Error fetch schedule data. Trace: ${JSON.stringify( - err, - )}. Status: ${req.status}. Req: ${txt}`, + err ?? "", + )}. Status: ${req.status}.`, + ) + throw new Error( + typeof err === "string" && err.length + ? err + : `Request failed with status ${req.status}`, ) - throw new Error(JSON.stringify(err)) } }), )