Skip to content

Commit

Permalink
feat: 30-day retention for tracks
Browse files Browse the repository at this point in the history
  • Loading branch information
FelixNgFender committed Oct 18, 2024
1 parent 5149f19 commit 8bfd8bd
Show file tree
Hide file tree
Showing 6 changed files with 190 additions and 47 deletions.
3 changes: 2 additions & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,6 @@
"typescript.tsdk": "node_modules/typescript/lib",
"files.associations": {
"*.css": "tailwindcss"
}
},
"makefile.configureOnOpen": false
}
21 changes: 21 additions & 0 deletions cleanup.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
FROM node:20-alpine AS base

# Install dependencies only when needed
FROM base AS deps
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
WORKDIR /app
COPY package.json package-lock.json ./
RUN ["npm", "pkg", "delete", "scripts.prepare"]
# RUN [ "npm", "ci", "--omit=dev" ]
RUN [ "npm", "i" ]

FROM base AS cleanup
WORKDIR /app
COPY --from=deps /app/node_modules node_modules/
COPY package.json .
COPY scripts scripts
ARG ENABLE_ALPINE_PRIVATE_NETWORKING
ENV NODE_ENV=production
USER node
CMD [ "node", "scripts/cron-monthly-cleanup.mjs" ]
2 changes: 1 addition & 1 deletion migration.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM node:20-alpine as base
FROM node:20-alpine AS base

# Install dependencies only when needed
FROM base AS deps
Expand Down
108 changes: 63 additions & 45 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
"db:introspect": "drizzle-kit introspect:pg",
"db:generate": "drizzle-kit generate:pg",
"db:migrate": "node -r dotenv/config scripts/migrate.mjs",
"cleanup": "node -r dotenv/config scripts/cron-monthly-cleanup.mjs",
"email:dev": "email dev --port 3001 --dir src/components/emails"
},
"dependencies": {
Expand Down
102 changes: 102 additions & 0 deletions scripts/cron-monthly-cleanup.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
// import { PostgresJsDatabase, drizzle } from 'drizzle-orm/postgres-js';
import { sql } from 'drizzle-orm';
import { drizzle } from 'drizzle-orm/postgres-js';
import * as Minio from 'minio';
import postgres from 'postgres';

// import * as schema from '../src/infra/schema';

if (
!process.env.DATABASE_URL ||
!process.env.S3_ENDPOINT ||
!process.env.S3_PORT ||
!process.env.S3_USE_SSL ||
!process.env.S3_ACCESS_KEY ||
!process.env.S3_SECRET_KEY ||
!process.env.S3_BUCKET_NAME
) {
throw new Error('S3 and database environment variables are not set');
}

const main = async () => {
const db = drizzle(postgres(process.env.DATABASE_URL));
const fileStorage = new Minio.Client({
endPoint: process.env.S3_ENDPOINT,
port: parseInt(process.env.S3_PORT, 10),
useSSL: 'true' === process.env.S3_USE_SSL,
accessKey: process.env.S3_ACCESS_KEY,
secretKey: process.env.S3_SECRET_KEY,
});

const exists = await fileStorage.bucketExists(process.env.S3_BUCKET_NAME);
if (exists) {
console.info(`Bucket ${process.env.S3_BUCKET_NAME} exists.`);
fileStorage.removeObjects;
} else {
throw new Error(`Bucket ${process.env.S3_BUCKET_NAME} does not exist`);
}

// Remove all objects from the bucket
const objectsStream = fileStorage.listObjects(process.env.S3_BUCKET_NAME);
const incompleteUploadsStream = fileStorage.listIncompleteUploads(
process.env.S3_BUCKET_NAME,
);
const objectNames = [];
const incompleteUploadNames = [];
objectsStream.on('data', (obj) => {
if (obj.name) {
objectNames.push(obj.name);
}
});
objectsStream.on('error', (error) => {
console.error(error);
});
objectsStream.on('end', async () => {
console.log(
`Removing ${objectNames.length} objects from bucket ${process.env.S3_BUCKET_NAME}`,
);
await fileStorage.removeObjects(
process.env.S3_BUCKET_NAME,
objectNames,
);
console.log(
`Removed ${objectNames.length} objects from bucket ${process.env.S3_BUCKET_NAME}`,
);
});
incompleteUploadsStream.on('data', (obj) => {
incompleteUploadNames.push(obj.key);
});
incompleteUploadsStream.on('error', (error) => {
console.error(error);
});
incompleteUploadsStream.on('end', async () => {
console.log(
`Removing ${incompleteUploadNames.length} incomplete uploads from bucket ${process.env.S3_BUCKET_NAME}`,
);
for (const name of incompleteUploadNames) {
await fileStorage.removeIncompleteUpload(
process.env.S3_BUCKET_NAME,
name,
);
}
console.log(
`Removed ${incompleteUploadNames.length} incomplete uploads from bucket ${process.env.S3_BUCKET_NAME}`,
);
});

// Delete assets and tracks tables
await db.transaction(async (tx) => {
// have to write raw SQL, careful to sync with table names in schema
await tx.execute(sql`DELETE FROM asset`);
await tx.execute(sql`DELETE FROM track`);
});

process.exit(0);
};

try {
await main();
} catch (error) {
console.error(error);
process.exit(1);
}

0 comments on commit 8bfd8bd

Please sign in to comment.