Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
64 commits
Select commit Hold shift + click to select a range
8e16505
move db schemas
abielzulio Sep 9, 2024
fef6e60
refactor: inject deps serverless neon
abielzulio Sep 9, 2024
bfd65bd
wip kv
abielzulio Sep 9, 2024
72820c2
init hono, openapi, scalar
abielzulio Sep 10, 2024
734c04b
wip: station route
abielzulio Sep 10, 2024
d89c398
wip
abielzulio Sep 11, 2024
9b30c1f
use neon instead d1
abielzulio Sep 13, 2024
0db2d90
script: change port
abielzulio Nov 10, 2024
83a32b8
fix: consistent station schema
abielzulio Nov 10, 2024
018a748
fix: proper sql station
abielzulio Nov 10, 2024
ce5a9b5
refactor: station controller
abielzulio Nov 10, 2024
4070dc8
refactor: station sync
abielzulio Nov 11, 2024
0862809
feat: sync schedule
abielzulio Nov 11, 2024
2becfe7
feat: schedule api
abielzulio Nov 11, 2024
823b8f1
fix: unify handle err metadata
abielzulio Nov 11, 2024
8a5fe43
refactor: rename to table
abielzulio Nov 11, 2024
6d43f9d
refactor: better station and schedule schema
abielzulio Nov 11, 2024
f7865ee
feat: route
abielzulio Nov 11, 2024
eff14df
refactor: response schema
abielzulio Nov 12, 2024
06b5f3d
fix: response generation
abielzulio Nov 12, 2024
1895baf
fix: route response schema
abielzulio Nov 12, 2024
51599ca
fix: add desc
abielzulio Nov 12, 2024
c931ccb
feat: init redis upstash
abielzulio Nov 12, 2024
96cc5c4
fix: add more stations
abielzulio Nov 12, 2024
0847f5d
fix: add redis env
abielzulio Nov 12, 2024
37f7212
fix: schedule sync station name
abielzulio Nov 12, 2024
8f316fd
fix: remove unused
abielzulio Nov 12, 2024
cc9d54b
fix: string date mode
abielzulio Nov 12, 2024
5c4400b
fix: desc
abielzulio Nov 12, 2024
f59e3c7
fix: db on delete
abielzulio Nov 12, 2024
d2e5ef7
fix: date string mode
abielzulio Nov 12, 2024
d9309d6
feat: cache func
abielzulio Nov 12, 2024
080217d
refactor: get rid of elysia
abielzulio Nov 12, 2024
e9a36ea
fix: main file, run cmds
abielzulio Nov 12, 2024
9cae214
chore: update, remove unused
abielzulio Nov 12, 2024
78b1c24
fix: rename schema
abielzulio Nov 12, 2024
d5c8e35
fix: move utils
abielzulio Nov 12, 2024
b4dbf26
refactor: table schema
abielzulio Nov 12, 2024
75c2e9f
remove unused
abielzulio Nov 12, 2024
bcdc988
feat: run sync cli
abielzulio Nov 12, 2024
af84349
fix: deploy cli
abielzulio Nov 12, 2024
3eabccf
perf: prepared statement
abielzulio Nov 12, 2024
1a13e5e
fix: remove unused
abielzulio Nov 12, 2024
9a6b6f8
perf: train idx
abielzulio Nov 12, 2024
04ebd66
remove unused
abielzulio Nov 12, 2024
d7e7d9a
fix: strict trail
abielzulio Nov 13, 2024
1cfe62e
fix: verbose
abielzulio Nov 13, 2024
2e1542b
refactor: move types
abielzulio Nov 13, 2024
d11af43
feat: aliases path
abielzulio Nov 13, 2024
7b577d0
chore: update wrangler
abielzulio Nov 13, 2024
babecf4
feat: redirect /
abielzulio Nov 13, 2024
b0e0713
refactor: use timestamp instead for arrival and departutre
abielzulio Nov 13, 2024
1e5e190
feat: date not null
abielzulio Nov 13, 2024
83a80d2
fix: not null date
abielzulio Nov 13, 2024
1dc9ad2
fix
abielzulio Nov 13, 2024
4961097
fix: derp missing
abielzulio Nov 13, 2024
3812318
fix: change station metadata
abielzulio Nov 13, 2024
7f2c387
feat: add tsup and build cmd
abielzulio Nov 14, 2024
fb0b4b4
feat: add cors
abielzulio Nov 14, 2024
463057d
fix: define typesafe metadata
abielzulio Nov 24, 2024
24a0f28
fix: `station_origin_id` & `station_destination_id` not null
abielzulio Nov 24, 2024
b756bfe
fix: date
abielzulio Nov 26, 2024
250edd0
feat: add github actions
abielzulio Nov 30, 2024
6bd135d
add docs
abielzulio Nov 30, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .dev.example.vars
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
DATABASE_URL="postgresql://comuline:password@localhost:5432/comuline"
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Comment: Hardcoded sensitive information in .dev.example.vars

Solution: Use environment variables or a secure vault service to manage sensitive information instead of hardcoding it in the source code.
!! Make sure the following suggestion is correct before committing it !!

Suggested change
DATABASE_URL="postgresql://comuline:password@localhost:5432/comuline"
DATABASE_URL="postgresql://username:password@localhost:5432/comuline" # Use environment variables instead

Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Comment: Hardcoded sensitive information in environment variables.

Solution: Use environment variables or secret management tools to handle sensitive information securely.
!! Make sure the following suggestion is correct before committing it !!

Suggested change
DATABASE_URL="postgresql://comuline:password@localhost:5432/comuline"
DATABASE_URL="postgresql://<username>:<password>@localhost:5432/comuline" # Use environment variables for <username> and <password>

Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Comment: Hardcoded sensitive information in environment variables.

Solution: Use environment variables or secrets management tools to store sensitive information securely.
!! Make sure the following suggestion is correct before committing it !!

Suggested change
DATABASE_URL="postgresql://comuline:password@localhost:5432/comuline"
DATABASE_URL="${DATABASE_URL}"

COMULINE_ENV="development"

# Take token from .env.db
UPSTASH_REDIS_REST_TOKEN=""
UPSTASH_REDIS_REST_URL="http://localhost:8079"
9 changes: 8 additions & 1 deletion .env.db
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
# PostgreSQL
POSTGRES_USER="comuline"
POSTGRES_PASSWORD="password"
POSTGRES_DB="comuline"
POSTGRES_DB="comuline"

# Redis
SRH_MODE=env
# openssl rand -base64 32
SRH_TOKEN="1Pf91ZNy5LDTKG621uhX/E73P8RmhVZu43kIV/WCmHg="
SRH_CONNECTION_STRING=redis://redis:6379
6 changes: 0 additions & 6 deletions .env.example

This file was deleted.

28 changes: 28 additions & 0 deletions .github/workflows/api.build.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
name: Build API on Pull Request

on:
pull_request:
branches: ["main"]
types: [opened, reopened, synchronize]

concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref || github.run_id }}
cancel-in-progress: true

jobs:
build-test:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4

- name: Install Bun
uses: oven-sh/setup-bun@v2
with:
bun-version: "latest"

- name: Install package deps
run: bun i

- name: Build package
run: bun run build
28 changes: 28 additions & 0 deletions .github/workflows/api.deploy.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
name: Deploy API to Cloudflare Workers

on:
push:
branches: ["main"]

jobs:
deploy:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4

- name: Installing Bun
uses: oven-sh/setup-bun@v2

- name: Install package deps
run: bun i

- name: Build package
run: bun run build

- name: Deploy to Cloudflare Workers
uses: cloudflare/wrangler-action@v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --minify src/index.ts
6 changes: 6 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -41,3 +41,9 @@ yarn-error.log*
package-lock.json
**/*.bun
.env
wrangler.toml
.wrangler
.dev.vars

# build
.dist
18 changes: 0 additions & 18 deletions Dockerfile

This file was deleted.

105 changes: 34 additions & 71 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
# @comuline/api

An API to get the schedule of KRL commuter line in Jakarta and Yogyakarta using [Elsyia](https://elysiajs.com/) and [Bun](https://bun.sh/), deployed to [Render](https://render.com/). This API is primarily used on the [web app](https://comuline.com/) ([source code](https://github.com/comuline/web)).
An API to get the schedule of KRL commuter line in Jakarta and Yogyakarta using [Hono](https://hono.dev/) and [Bun](https://bun.sh/), deployed to [Cloudflare Workers](https://workers.cloudflare.com/). This API is primarily used on the [web app](https://comuline.com/) ([source code](https://github.com/comuline/web)).

### How does it work?

This API uses a daily cron job (at 00:00) to fetch the schedule of KRL commuter line in Jakarta and Yogyakarta from the official website of PT. KAI. The data is then processed and stored in a PostgreSQL database and cached in a Redis (for every once read request). All endpoints can be found in the [docs](https://www.api.comuline.com/docs).

### Technology stacks

1. [Elsyia](https://elysiajs.com/) API framework
1. [Hono](https://hono.dev/) API framework
2. [Bun](https://bun.sh/) runtime
3. PostgresSQL ([Neon](https://neon.tech/))
4. Redis ([Upstash](https://upstash.com/))
5. [Render](https://render.com/) deployment platform
3. (Serverless) PostgresSQL ([Neon](https://neon.tech/))
4. (Serverless) Redis ([Upstash](https://upstash.com/))
5. [Cloudflare Workers](https://workers.cloudflare.com/) deployment platform
6. [Drizzle](https://orm.drizzle.team/) ORM

## Getting Started
Expand All @@ -31,113 +31,76 @@ git clone https://github.com/comuline/api.git
bun install
```

3. Run database locally
3. Copy the `.dev.example.vars` to `.dev.vars`

```bash
docker-compose up -d
```
cp .dev.example.vars .dev.vars
```

4. Copy the `.env.example` to `.env`
4. Generate `UPSTASH_REDIS_REST_TOKEN` using `openssl rand -hex 32` and copy it to your `.dev.vars` file

```
cp .env.example .env
5. Run database locally

```bash
docker-compose up -d
```

5. Run the database migration
6. Run the database migration

```bash
bun db:generate && bun db:migrate
bun run migrate:apply
```

6. Sync the data and populate it into your local database (once only as you needed)
7. Sync the data and populate it into your local database (once only as you needed)

```bash
# Please do this in order
# 1. Sync station data and wait until it's done
curl --request POST --url http://localhost:3001/v1/station/
bun run sync:station
# 2. Sync schedule data
curl --request POST --url http://localhost:3001/v1/schedule/
bun run sync:schedule
```

### Deployment

1. Create a new PostgreSQL database in [Neon](https://neon.tech/) and copy the connection string value as `DATABASE_URL` in your `.env` file
1. Create a new PostgreSQL database in [Neon](https://neon.tech/) and copy the connection string value as `DATABASE_URL` in your `.production.vars` file

2. Run the database migration

```bash
bun db:generate && bun db:migrate
bun run migrate:apply
```

3. Sync the data and populate it into your remote database (once only as you needed)

```bash
# Please do this in order
# 1. Sync station data and wait until it's done
curl --request POST --url http://localhost:3001/v1/station/
bun run sync:station
# 2. Sync schedule data
curl --request POST --url http://localhost:3001/v1/schedule/

bun run sync:schedule
```

4. Generate `SYNC_TOKEN` (This is used in production level only to prevent unauthorized access to your `POST /v1/station` and `POST /v1/schedule` endpoint)
4. Add `COMULINE_ENV` to your `.production.vars` file

```bash
openssl rand -base64 32
# Copy the output value as a `SYNC_TOKEN`
```
COMULINE_ENV=production
```

2. Create a new Redis database in [Upstash](https://upstash.com/) and copy the connection string value as `REDIS_URL`
5. Create a new Redis database in [Upstash](https://upstash.com/) and copy the value of `UPSTASH_REDIS_REST_TOKEN` and `UPSTASH_REDIS_REST_URL` to your `.production.vars` file

3. Create a `Web Service` in [Render](https://render.com/), copy the `DATABASE_URL`, `REDIS_URL`, and `SYNC_TOKEN` as environment variables, and deploy the application.
6. Save your `.production.vars` file to your environment variables in your Cloudflare Workers using `wrangler`

```bash
bunx wrangler secret put --env production $(cat .production.vars)
```

4. Set the cron job to fetch the schedule data using [Cron-Job](https://cron-job.org/en/). Don't forget to set the `SYNC_TOKEN` as a header in your request. Add the `?from_cron=true` query parameter to flag the request as a cron job request.
6. Deploy the API to Cloudflare Workers

```bash
# Example
curl --request POST --url https://your-service-name.onrender.com/v1/station?from_cron=true -H "Authorization: Bearer ${SYNC_TOKEN}"
curl --request POST --url https://your-service-name.onrender.com/v1/schedule?from_cron=true -H "Authorization: Bearer ${SYNC_TOKEN}"
bun run deploy
```

### Database schema

> **Station**

| Column Name | Data Type | Description |
| ------------ | --------- | ------------------------------- |
| id | TEXT | Primary key (Station ID) |
| name | TEXT | Station name |
| daop | INTEGER | Station regional operation code |
| fgEnable | BOOLEAN | - |
| haveSchedule | BOOLEAN | Schedule availability status |
| updatedAt | TEXT | Last updated date |

> **Schedule**

| Column Name | Data Type | Description |
| --------------- | --------- | ----------------------------------- |
| id | TEXT | Primary key (Station ID + Train ID) |
| stationId | TEXT | Station ID |
| trainId | TEXT | Train ID |
| line | TEXT | Train commuter line |
| route | TEXT | Train route |
| color | TEXT | Commuter line color |
| destination | TEXT | Train destination |
| timeEstimated | TIME | Estimated time |
| destinationTime | TIME | Destination time |
| updatedAt | TEXT | Last updated date |

> **Sync**

| Column Name | Data Type | Description |
| ----------- | --------- | -------------------------------------- |
| id | TEXT | Primary key (Sync ID) |
| n | BIGINT | n of sync |
| type | ENUM | Sync type (manual, cron) |
| status | ENUM | Sync status (PENDING, SUCCESS, FAILED) |
| item | ENUM | Sync item (station, schedule) |
| duration | BIGINT | Sync duration |
| message | TEXT | Sync message (if status failed) |
| startedAt | TEXT | Sync started date |
| endedAt | TEXT | Sync ended date |
| createdAt | TEXT | Sync created date |
> TBD
Binary file modified bun.lockb
Binary file not shown.
29 changes: 21 additions & 8 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,21 +1,34 @@
version: "3.9"

services:
# Serverless PostgresSQL
postgres:
image: "postgres:15.2-alpine"
restart: always
container_name: "comuline-db"
ports:
- "5432:5432"
volumes:
- db:/var/lib/postgresql/data
env_file:
- ./.env.db
pg_proxy:
image: ghcr.io/neondatabase/wsproxy:latest
environment:
APPEND_PORT: "postgres:5432"
ALLOW_ADDR_REGEX: ".*"
LOG_TRAFFIC: "true"
ports:
- "5433:80"
depends_on:
- postgres

# Serverless Redis
redis:
image: redis
container_name: "comuline-cache"
ports:
- "6379:6379"

volumes:
db:
serverless-redis-http:
ports:
- "8079:80"
image: hiett/serverless-redis-http:latest
env_file:
- ./.env.db
depends_on:
- redis
7 changes: 2 additions & 5 deletions drizzle.config.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
import type { Config } from "drizzle-kit"

export default {
out: "./drizzle/migrations",
dialect: "postgresql",
schema: "./src/db/schema",
out: "./src/db/migrations",
driver: "pg",
dbCredentials: {
connectionString: process.env.DATABASE_URL!,
},
} satisfies Config
57 changes: 57 additions & 0 deletions drizzle/migrations/0000_talented_daimon_hellstrom.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
DO $$ BEGIN
CREATE TYPE "public"."station_type" AS ENUM('KRL', 'MRT', 'LRT');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "schedule" (
"id" text PRIMARY KEY NOT NULL,
"station_id" text NOT NULL,
"station_origin_id" text,
"station_destination_id" text,
"train_id" text NOT NULL,
"line" text NOT NULL,
"route" text NOT NULL,
"time_departure" time NOT NULL,
"time_at_destination" time NOT NULL,
"metadata" jsonb,
"created_at" timestamp with time zone DEFAULT now(),
"updated_at" timestamp with time zone DEFAULT now(),
CONSTRAINT "schedule_id_unique" UNIQUE("id")
);
Comment on lines +7 to +21
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Comment: Use of 'IF NOT EXISTS' in SQL migrations can lead to performance issues

Solution: Consider handling migrations in a way that ensures the database schema is in the desired state without relying on 'IF NOT EXISTS'.
!! Make sure the following suggestion is correct before committing it !!

Suggested change
CREATE TABLE IF NOT EXISTS "schedule" (
"id" text PRIMARY KEY NOT NULL,
"station_id" text NOT NULL,
"station_origin_id" text,
"station_destination_id" text,
"train_id" text NOT NULL,
"line" text NOT NULL,
"route" text NOT NULL,
"time_departure" time NOT NULL,
"time_at_destination" time NOT NULL,
"metadata" jsonb,
"created_at" timestamp with time zone DEFAULT now(),
"updated_at" timestamp with time zone DEFAULT now(),
CONSTRAINT "schedule_id_unique" UNIQUE("id")
);
DROP TABLE IF EXISTS "schedule"; CREATE TABLE "schedule" ( ... );

--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "station" (
"uid" text PRIMARY KEY NOT NULL,
"id" text NOT NULL,
"name" text NOT NULL,
"type" "station_type" NOT NULL,
"metadata" jsonb,
"created_at" timestamp with time zone DEFAULT now(),
"updated_at" timestamp with time zone DEFAULT now(),
CONSTRAINT "station_uid_unique" UNIQUE("uid"),
CONSTRAINT "station_id_unique" UNIQUE("id")
);
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "schedule" ADD CONSTRAINT "schedule_station_id_station_id_fk" FOREIGN KEY ("station_id") REFERENCES "public"."station"("id") ON DELETE no action ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "schedule" ADD CONSTRAINT "schedule_station_origin_id_station_id_fk" FOREIGN KEY ("station_origin_id") REFERENCES "public"."station"("id") ON DELETE no action ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "schedule" ADD CONSTRAINT "schedule_station_destination_id_station_id_fk" FOREIGN KEY ("station_destination_id") REFERENCES "public"."station"("id") ON DELETE no action ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
CREATE UNIQUE INDEX IF NOT EXISTS "schedule_idx" ON "schedule" USING btree ("id");--> statement-breakpoint
CREATE INDEX IF NOT EXISTS "schedule_station_idx" ON "schedule" USING btree ("station_id");--> statement-breakpoint
CREATE UNIQUE INDEX IF NOT EXISTS "station_uidx" ON "station" USING btree ("uid");--> statement-breakpoint
CREATE INDEX IF NOT EXISTS "station_idx" ON "station" USING btree ("id");--> statement-breakpoint
CREATE INDEX IF NOT EXISTS "station_type_idx" ON "station" USING btree ("type");
Loading