diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 00000000..b41bcf98 --- /dev/null +++ b/.prettierignore @@ -0,0 +1 @@ +# **.mjs \ No newline at end of file diff --git a/__tests__/csvAlgorithm.test.ts b/__tests__/csvAlgorithm.test.ts new file mode 100644 index 00000000..4b304bc5 --- /dev/null +++ b/__tests__/csvAlgorithm.test.ts @@ -0,0 +1,48 @@ +import { + matchCanonicalTrack, + sortTracks, +} from '@utils/csv-ingestion/csvAlgorithm'; + +describe('csvAlgorithm track matching', () => { + it('matches tracks case-insensitively to canonical names', () => { + expect(matchCanonicalTrack('best hardware hack')).toBe( + 'Best Hardware Hack' + ); + expect(matchCanonicalTrack('Best hardware hack')).toBe( + 'Best Hardware Hack' + ); + }); + + it('does not attempt to correct spelling', () => { + expect(matchCanonicalTrack('Best Hardwre Hack')).toBeNull(); + expect(matchCanonicalTrack('Best Assistive Technlogy')).toBeNull(); + }); + + it('ingests all opt-in tracks and does not cap length', () => { + const tracks = sortTracks( + 'best hardware hack', + '', + '', + 'Best Use of Gemini API; Best Use of MongoDB Atlas, Best Use of Vectara | Best Use of Auth0' + ); + + expect(tracks).toEqual([ + 'Best Hardware Hack', + 'Best Use of Gemini API', + 'Best Use of MongoDB Atlas', + 'Best Use of Vectara', + 'Best Use of Auth0', + ]); + }); + + it('filters out excluded tracks', () => { + const tracks = sortTracks( + 'Best Hack for Social Good', + "Hacker's Choice Award", + '', + 'Best Hack for Social Good, Best Hardware Hack' + ); + + expect(tracks).toEqual(['Best Hardware Hack']); + }); +}); diff --git a/__tests__/csvValidation.test.ts b/__tests__/csvValidation.test.ts new file mode 100644 index 00000000..4489f749 --- /dev/null +++ b/__tests__/csvValidation.test.ts @@ -0,0 +1,90 @@ +import { validateCsvBlob } from '@utils/csv-ingestion/csvAlgorithm'; + +describe('csvAlgorithm validation', () => { + it("silently ignores 'N/A' without warnings", async () => { + const csv = + 'Table Number,Project Status,Project Title,Track #1 (Primary Track),Track #2,Track #3,Opt-In Prizes\n' + + '12,Submitted (Gallery/Visible),Test Project,Best Beginner Hack,N/A,,\n'; + + const blob = new Blob([csv], { type: 'text/csv' }); + const res = await validateCsvBlob(blob); + + expect(res.ok).toBe(true); + expect(res.report.errorRows).toBe(0); + expect(res.report.warningRows).toBe(0); + expect(res.report.issues).toEqual([]); + }); + + it('treats duplicate tracks as warnings (non-blocking)', async () => { + const csv = + 'Table Number,Project Status,Project Title,Track #1 (Primary Track),Track #2,Track #3,Opt-In Prizes\n' + + '87,Submitted (Gallery/Visible),PartyPal,Best UI/UX Design,Best UI/UX Design,,\n'; + + const blob = new Blob([csv], { type: 'text/csv' }); + const res = await validateCsvBlob(blob); + + expect(res.ok).toBe(true); + expect(res.report.errorRows).toBe(0); + expect(res.report.warningRows).toBe(1); + expect(res.report.issues[0].severity).toBe('warning'); + expect(res.report.issues[0].duplicateTracks).toEqual(['Best UI/UX Design']); + }); + + it('detects duplicate teamNumbers as errors', async () => { + const csv = + 'Table Number,Project Status,Project Title,Track #1 (Primary Track),Track #2,Track #3,Opt-In Prizes\n' + + '42,Submitted (Gallery/Visible),Project A,Best Hardware Hack,,,\n' + + '42,Submitted (Gallery/Visible),Project B,Best UI/UX Design,,,\n'; + + const blob = new Blob([csv], { type: 'text/csv' }); + const res = await validateCsvBlob(blob); + + // Should have 1 error issue: second row with same teamNumber is flagged + expect(res.ok).toBe(false); + expect(res.report.errorRows).toBe(1); + expect(res.report.issues.length).toBe(1); + expect(res.report.issues[0].severity).toBe('error'); + expect(res.report.issues[0].duplicateTeamNumber).toBe(42); + expect(res.report.issues[0].teamNumber).toBe(42); + }); + + it('detects multiple duplicate teamNumbers', async () => { + const csv = + 'Table Number,Project Status,Project Title,Track #1 (Primary Track),Track #2,Track #3,Opt-In Prizes\n' + + '10,Submitted (Gallery/Visible),Project A,Best Hardware Hack,,,\n' + + '10,Submitted (Gallery/Visible),Project B,Best UI/UX Design,,,\n' + + '20,Submitted (Gallery/Visible),Project C,Best Beginner Hack,,,\n' + + '20,Submitted (Gallery/Visible),Project D,Best Use of AWS,,,\n'; + + const blob = new Blob([csv], { type: 'text/csv' }); + const res = await validateCsvBlob(blob); + + expect(res.ok).toBe(false); + expect(res.report.errorRows).toBe(2); + expect(res.report.issues.length).toBe(2); + + // Both should be errors with duplicateTeamNumber set + const duplicateIssues = res.report.issues.filter( + (i) => i.duplicateTeamNumber !== undefined + ); + expect(duplicateIssues.length).toBe(2); + expect(duplicateIssues[0].duplicateTeamNumber).toBe(10); + expect(duplicateIssues[1].duplicateTeamNumber).toBe(20); + }); + + it('does not flag unique teamNumbers as duplicates', async () => { + const csv = + 'Table Number,Project Status,Project Title,Track #1 (Primary Track),Track #2,Track #3,Opt-In Prizes\n' + + '10,Submitted (Gallery/Visible),Project A,Best Hardware Hack,,,\n' + + '11,Submitted (Gallery/Visible),Project B,Best UI/UX Design,,,\n' + + '12,Submitted (Gallery/Visible),Project C,Best Beginner Hack,,,\n'; + + const blob = new Blob([csv], { type: 'text/csv' }); + const res = await validateCsvBlob(blob); + + expect(res.ok).toBe(true); + expect(res.report.errorRows).toBe(0); + expect(res.report.warningRows).toBe(0); + expect(res.report.issues).toEqual([]); + }); +}); diff --git a/__tests__/logic/checkTeamsPopulated.test.ts b/__tests__/logic/checkTeamsPopulated.test.ts new file mode 100644 index 00000000..4a8d3d55 --- /dev/null +++ b/__tests__/logic/checkTeamsPopulated.test.ts @@ -0,0 +1,88 @@ +import { db } from '../../jest.setup'; +import checkTeamsPopulated from '@actions/logic/checkTeamsPopulated'; +import * as mongoClient from '@utils/mongodb/mongoClient.mjs'; + +beforeEach(async () => { + await db.collection('teams').deleteMany({}); +}); + +describe('checkTeamsPopulated', () => { + it('should return populated false and count 0 when no teams exist', async () => { + const result = await checkTeamsPopulated(); + expect(result.ok).toBe(true); + expect(result.populated).toBe(false); + expect(result.count).toBe(0); + expect(result.error).toBe(null); + }); + + it('should return populated true and correct count when teams exist', async () => { + await db.collection('teams').insertMany( + [ + { + name: 'Team 1', + teamNumber: 1, + tableNumber: 1, + tracks: ['Best Hardware Hack'], + active: true, + }, + { + name: 'Team 2', + teamNumber: 2, + tableNumber: 2, + tracks: ['Data Science/Machine Learning'], + active: true, + }, + { + name: 'Team 3', + teamNumber: 3, + tableNumber: 3, + tracks: ['Beginner'], + active: false, + }, + ], + { bypassDocumentValidation: true } + ); + + const result = await checkTeamsPopulated(); + expect(result.ok).toBe(true); + expect(result.populated).toBe(true); + expect(result.count).toBe(3); + expect(result.error).toBe(null); + }); + + it('should return populated true and count 1 when exactly one team exists', async () => { + await db.collection('teams').insertOne( + { + name: 'Solo Team', + teamNumber: 1, + tableNumber: 1, + tracks: ['Best Hardware Hack'], + active: true, + }, + { bypassDocumentValidation: true } + ); + + const result = await checkTeamsPopulated(); + expect(result.ok).toBe(true); + expect(result.populated).toBe(true); + expect(result.count).toBe(1); + expect(result.error).toBe(null); + }); + + it('should handle database errors gracefully', async () => { + // Mock the getDatabase to throw an error + const mockGetDatabase = jest + .spyOn(mongoClient, 'getDatabase') + .mockRejectedValue(new Error('Database connection failed')); + + const result = await checkTeamsPopulated(); + + expect(result.ok).toBe(false); + expect(result.populated).toBe(false); + expect(result.count).toBe(0); + expect(result.error).toBe('Database connection failed'); + + // Restore the mock + mockGetDatabase.mockRestore(); + }); +}); diff --git a/__tests__/logic/ingestTeams.test.ts b/__tests__/logic/ingestTeams.test.ts new file mode 100644 index 00000000..78e862af --- /dev/null +++ b/__tests__/logic/ingestTeams.test.ts @@ -0,0 +1,180 @@ +import { db } from '../../jest.setup'; +import ingestTeams from '@actions/logic/ingestTeams'; +import ParsedRecord from '@typeDefs/parsedRecord'; +import { CreateManyTeams } from '@datalib/teams/createTeams'; + +// Mock the CreateManyTeams function +jest.mock('@datalib/teams/createTeams', () => ({ + CreateManyTeams: jest.fn(), +})); + +const mockCreateManyTeams = CreateManyTeams as jest.MockedFunction< + typeof CreateManyTeams +>; + +beforeEach(async () => { + await db.collection('teams').deleteMany({}); + jest.clearAllMocks(); +}); + +describe('ingestTeams', () => { + it('should delegate to CreateManyTeams with the provided body', async () => { + const mockTeams: ParsedRecord[] = [ + { + name: 'Team 1', + teamNumber: 1, + tableNumber: 1, + tracks: ['Track A'], + active: true, + }, + { + name: 'Team 2', + teamNumber: 2, + tableNumber: 2, + tracks: ['Track B'], + active: true, + }, + ]; + + const mockResponse = { + ok: true, + body: mockTeams, + error: null, + status: 201, + }; + + mockCreateManyTeams.mockResolvedValue(mockResponse); + + const result = await ingestTeams(mockTeams); + + expect(mockCreateManyTeams).toHaveBeenCalledTimes(1); + expect(mockCreateManyTeams).toHaveBeenCalledWith(mockTeams); + expect(result).toEqual(mockResponse); + }); + + it('should handle empty body by delegating to CreateManyTeams', async () => { + const emptyBody = []; + const mockResponse = { + ok: false, + body: null, + error: 'No Content Provided', + }; + + mockCreateManyTeams.mockResolvedValue(mockResponse); + + const result = await ingestTeams(emptyBody); + + expect(mockCreateManyTeams).toHaveBeenCalledTimes(1); + expect(mockCreateManyTeams).toHaveBeenCalledWith(emptyBody); + expect(result).toEqual(mockResponse); + }); + + it('should handle CreateManyTeams errors gracefully', async () => { + const mockTeams: ParsedRecord[] = [ + { + name: 'Invalid Team', + teamNumber: 999, + tableNumber: 999, + tracks: ['Invalid Track'], + active: true, + }, + ]; + + const mockResponse = { + ok: false, + body: null, + error: 'Invalid track', + }; + + mockCreateManyTeams.mockResolvedValue(mockResponse); + + const result = await ingestTeams(mockTeams); + + expect(mockCreateManyTeams).toHaveBeenCalledTimes(1); + expect(result).toEqual(mockResponse); + }); + + it('should handle duplicate team numbers by delegating to CreateManyTeams', async () => { + const mockTeams: ParsedRecord[] = [ + { + name: 'Team 1', + teamNumber: 1, + tableNumber: 1, + tracks: ['Track A'], + active: true, + }, + { + name: 'Team 1 Duplicate', + teamNumber: 1, + tableNumber: 2, + tracks: ['Track B'], + active: true, + }, + ]; + + const mockResponse = { + ok: false, + body: null, + error: 'Request contains duplicate team number(s)', + }; + + mockCreateManyTeams.mockResolvedValue(mockResponse); + + const result = await ingestTeams(mockTeams); + + expect(mockCreateManyTeams).toHaveBeenCalledTimes(1); + expect(result).toEqual(mockResponse); + }); + + it('should handle single team ingestion', async () => { + const mockTeam: ParsedRecord[] = [ + { + name: 'Solo Team', + teamNumber: 1, + tableNumber: 1, + tracks: ['Track A', 'Track B'], + active: true, + }, + ]; + + const mockResponse = { + ok: true, + body: mockTeam, + error: null, + status: 201, + }; + + mockCreateManyTeams.mockResolvedValue(mockResponse); + + const result = await ingestTeams(mockTeam); + + expect(mockCreateManyTeams).toHaveBeenCalledTimes(1); + expect(result).toEqual(mockResponse); + }); + + it('should preserve all team properties when delegating', async () => { + const mockTeamsWithAllProps: ParsedRecord[] = [ + { + name: 'Complete Team', + teamNumber: 42, + tableNumber: 10, + tracks: ['Track A', 'Track B', 'Track C'], + active: false, + }, + ]; + + const mockResponse = { + ok: true, + body: mockTeamsWithAllProps, + error: null, + status: 201, + }; + + mockCreateManyTeams.mockResolvedValue(mockResponse); + + const result = await ingestTeams(mockTeamsWithAllProps); + + expect(mockCreateManyTeams).toHaveBeenCalledWith(mockTeamsWithAllProps); + expect(result).toEqual(mockResponse); + }); +}); diff --git a/__tests__/logic/validateCSV.test.ts b/__tests__/logic/validateCSV.test.ts new file mode 100644 index 00000000..cfa1fb59 --- /dev/null +++ b/__tests__/logic/validateCSV.test.ts @@ -0,0 +1,285 @@ +import validateCSV from '@actions/logic/validateCSV'; +import { validateCsvBlob } from '@utils/csv-ingestion/csvAlgorithm'; +import ParsedRecord from '@typeDefs/parsedRecord'; + +// Mock the validateCsvBlob function +jest.mock('@utils/csv-ingestion/csvAlgorithm', () => ({ + validateCsvBlob: jest.fn(), +})); + +const mockValidateCsvBlob = validateCsvBlob as jest.MockedFunction< + typeof validateCsvBlob +>; + +beforeEach(() => { + jest.clearAllMocks(); +}); + +describe('validateCSV', () => { + it('should return error when no file is provided', async () => { + const formData = new FormData(); + + const result = await validateCSV(formData); + + expect(result.ok).toBe(false); + expect(result.body).toBe(null); + expect(result.validBody).toBe(null); + expect(result.report).toBe(null); + expect(result.error).toBe('Missing file'); + expect(mockValidateCsvBlob).not.toHaveBeenCalled(); + }); + + it('should delegate to validateCsvBlob with a valid file', async () => { + const mockCsvContent = + 'Table Number,Project Title,Track #1\n1,Test Project,Track A'; + const file = new File([mockCsvContent], 'test.csv', { type: 'text/csv' }); + const formData = new FormData(); + formData.append('file', file); + + const mockTeams: ParsedRecord[] = [ + { + name: 'Test Project', + teamNumber: 1, + tableNumber: 1, + tracks: ['Track A'], + active: true, + }, + ]; + + const mockValidationResponse = { + ok: true, + body: mockTeams, + validBody: mockTeams, + report: { + totalTeamsParsed: 1, + validTeams: 1, + errorRows: 0, + warningRows: 0, + unknownTracks: [], + issues: [], + }, + error: null, + }; + + mockValidateCsvBlob.mockResolvedValue(mockValidationResponse); + + const result = await validateCSV(formData); + + expect(mockValidateCsvBlob).toHaveBeenCalledTimes(1); + expect(mockValidateCsvBlob).toHaveBeenCalledWith(expect.any(Blob)); + expect(result).toEqual(mockValidationResponse); + }); + + it('should handle validation errors from validateCsvBlob', async () => { + const mockCsvContent = + 'Table Number,Project Title,Track #1\n,Invalid,BadTrack'; + const file = new File([mockCsvContent], 'invalid.csv', { + type: 'text/csv', + }); + const formData = new FormData(); + formData.append('file', file); + + const mockValidationResponse = { + ok: false, + body: null, + validBody: null, + report: { + totalTeamsParsed: 1, + validTeams: 0, + errorRows: 1, + warningRows: 0, + unknownTracks: ['BadTrack'], + issues: [ + { + rowIndex: 1, + teamNumberRaw: '', + severity: 'error' as const, + invalidTracks: ['BadTrack'], + excludedTracks: [], + duplicateTracks: [], + autoFixedTracks: [], + missingFields: ['Table Number'], + contactEmails: [], + contactNames: [], + memberEmails: [], + memberNames: [], + memberColumnsFromTeamMember1: [], + }, + ], + }, + error: 'CSV validation failed. Fix errors and re-validate.', + }; + + mockValidateCsvBlob.mockResolvedValue(mockValidationResponse); + + const result = await validateCSV(formData); + + expect(result.ok).toBe(false); + expect(result.body).toBe(null); + expect(result.validBody).toBe(null); + expect(result.report?.errorRows).toBe(1); + expect(result.error).toBe( + 'CSV validation failed. Fix errors and re-validate.' + ); + }); + + it('should preserve all response fields from validateCsvBlob', async () => { + const mockCsvContent = + 'Table Number,Project Title,Track #1\n1,Test,Track A'; + const file = new File([mockCsvContent], 'test.csv', { type: 'text/csv' }); + const formData = new FormData(); + formData.append('file', file); + + const mockTeams: ParsedRecord[] = [ + { + name: 'Test', + teamNumber: 1, + tableNumber: 1, + tracks: ['Track A'], + active: true, + }, + ]; + + const mockValidationResponse = { + ok: true, + body: mockTeams, + validBody: mockTeams, + report: { + totalTeamsParsed: 1, + validTeams: 1, + errorRows: 0, + warningRows: 0, + unknownTracks: [], + issues: [], + }, + error: null, + }; + + mockValidateCsvBlob.mockResolvedValue(mockValidationResponse); + + const result = await validateCSV(formData); + + expect(result.ok).toBe(mockValidationResponse.ok); + expect(result.body).toEqual(mockValidationResponse.body); + expect(result.validBody).toEqual(mockValidationResponse.validBody); + expect(result.report).toEqual(mockValidationResponse.report); + expect(result.error).toBe(mockValidationResponse.error); + }); + + it('should handle files with different MIME types', async () => { + const mockCsvContent = 'Table Number,Project Title\n1,Test'; + const file = new File([mockCsvContent], 'test.txt', { + type: 'text/plain', + }); + const formData = new FormData(); + formData.append('file', file); + + const mockTeams: ParsedRecord[] = [ + { + name: 'Test', + teamNumber: 1, + tableNumber: 1, + tracks: ['Track A'], + active: true, + }, + ]; + + const mockValidationResponse = { + ok: true, + body: mockTeams, + validBody: mockTeams, + report: { + totalTeamsParsed: 1, + validTeams: 1, + errorRows: 0, + warningRows: 0, + unknownTracks: [], + issues: [], + }, + error: null, + }; + + mockValidateCsvBlob.mockResolvedValue(mockValidationResponse); + + const result = await validateCSV(formData); + + expect(mockValidateCsvBlob).toHaveBeenCalledWith(expect.any(Blob)); + expect(result).toEqual(mockValidationResponse); + }); + + it('should handle partial validation with both valid and invalid teams', async () => { + const mockCsvContent = + 'Table Number,Project Title,Track #1\n1,Valid Team,Track A\n,Invalid Team,Track B'; + const file = new File([mockCsvContent], 'mixed.csv', { type: 'text/csv' }); + const formData = new FormData(); + formData.append('file', file); + + const mockAllTeams: ParsedRecord[] = [ + { + name: 'Valid Team', + teamNumber: 1, + tableNumber: 1, + tracks: ['Track A'], + active: true, + }, + { + name: 'Invalid Team', + teamNumber: NaN, + tableNumber: 2, + tracks: ['Track B'], + active: true, + }, + ]; + + const mockValidTeams: ParsedRecord[] = [ + { + name: 'Valid Team', + teamNumber: 1, + tableNumber: 1, + tracks: ['Track A'], + active: true, + }, + ]; + + const mockValidationResponse = { + ok: false, + body: mockAllTeams, + validBody: mockValidTeams, + report: { + totalTeamsParsed: 2, + validTeams: 1, + errorRows: 1, + warningRows: 0, + unknownTracks: [], + issues: [ + { + rowIndex: 2, + teamNumberRaw: '', + severity: 'error' as const, + invalidTracks: [], + excludedTracks: [], + duplicateTracks: [], + autoFixedTracks: [], + missingFields: ['Table Number'], + contactEmails: [], + contactNames: [], + memberEmails: [], + memberNames: [], + memberColumnsFromTeamMember1: [], + }, + ], + }, + error: 'CSV validation failed. Fix errors and re-validate.', + }; + + mockValidateCsvBlob.mockResolvedValue(mockValidationResponse); + + const result = await validateCSV(formData); + + expect(result.ok).toBe(false); + expect(result.body).toEqual(mockAllTeams); + expect(result.validBody).toEqual(mockValidTeams); + expect(result.report?.validTeams).toBe(1); + expect(result.report?.errorRows).toBe(1); + }); +}); diff --git a/app/(api)/_actions/logic/checkTeamsPopulated.ts b/app/(api)/_actions/logic/checkTeamsPopulated.ts new file mode 100644 index 00000000..5c4eaa39 --- /dev/null +++ b/app/(api)/_actions/logic/checkTeamsPopulated.ts @@ -0,0 +1,14 @@ +'use server'; + +import { getDatabase } from '@utils/mongodb/mongoClient.mjs'; + +export default async function checkTeamsPopulated() { + try { + const db = await getDatabase(); + const count = await db.collection('teams').countDocuments({}); + return { ok: true, populated: count > 0, count, error: null }; + } catch (e) { + const error = e as Error; + return { ok: false, populated: false, count: 0, error: error.message }; + } +} diff --git a/app/(api)/_actions/logic/ingestTeams.ts b/app/(api)/_actions/logic/ingestTeams.ts new file mode 100644 index 00000000..421b4a4e --- /dev/null +++ b/app/(api)/_actions/logic/ingestTeams.ts @@ -0,0 +1,21 @@ +'use server'; + +import { CreateManyTeams } from '@datalib/teams/createTeams'; +import ParsedRecord from '@typeDefs/parsedRecord'; +import Team from '@typeDefs/team'; + +export default async function ingestTeams(body: ParsedRecord[]) { + const res = await CreateManyTeams(body); + + if (!res.ok || !Array.isArray(res.body)) return res; + + const serializedBody = (res.body as Team[]).map((team) => ({ + ...team, + _id: team._id ? String(team._id) : undefined, + })); + + return { + ...res, + body: serializedBody, + }; +} diff --git a/app/(api)/_actions/logic/validateCSV.ts b/app/(api)/_actions/logic/validateCSV.ts new file mode 100644 index 00000000..b8eca9c5 --- /dev/null +++ b/app/(api)/_actions/logic/validateCSV.ts @@ -0,0 +1,28 @@ +'use server'; + +import { validateCsvBlob } from '@utils/csv-ingestion/csvAlgorithm'; + +export default async function validateCSV(formData: FormData) { + const file = formData.get('file') as File | null; + if (!file) { + return { + ok: false, + body: null, + validBody: null, + report: null, + error: 'Missing file', + }; + } + + const data = await file.arrayBuffer(); + const blob = new Blob([data], { type: file.type }); + + const res = await validateCsvBlob(blob); + return { + ok: res.ok, + body: res.body, + validBody: res.validBody, + report: res.report, + error: res.error, + }; +} diff --git a/app/(api)/_utils/csv-ingestion/csvAlgorithm.ts b/app/(api)/_utils/csv-ingestion/csvAlgorithm.ts index 403041df..7cfddb72 100644 --- a/app/(api)/_utils/csv-ingestion/csvAlgorithm.ts +++ b/app/(api)/_utils/csv-ingestion/csvAlgorithm.ts @@ -8,84 +8,461 @@ const filteredTracks = [ "Hacker's Choice Award", 'N/A', ]; -const validTracks: string[] = trackData.tracks.filter( - (t) => !filteredTracks.includes(t) + +export type CsvTrackAutoFix = { + raw: string; + canonical: string; +}; + +export type CsvRowIssue = { + rowIndex: number; // 1-based, counting CSV rows processed + teamNumberRaw?: string; + teamNumber?: number; + projectTitle?: string; + contactEmails: string[]; + contactNames: string[]; + memberEmails: string[]; + memberNames: string[]; + severity: 'error' | 'warning'; + invalidTracks: string[]; + excludedTracks: string[]; + duplicateTracks: string[]; + autoFixedTracks: CsvTrackAutoFix[]; + missingFields: string[]; + memberColumnsFromTeamMember1: Array<{ header: string; value: string }>; + duplicateTeamNumber?: number; +}; + +export type CsvValidationReport = { + totalTeamsParsed: number; + validTeams: number; + errorRows: number; + warningRows: number; + unknownTracks: string[]; + issues: CsvRowIssue[]; +}; + +type TrackMatchCandidate = { + canonical: string; + normalized: string; +}; + +function normalizeTrackName(value: string): string { + // Only do trimming and case-insensitive matching. + return value.trim().toLowerCase(); +} + +const filteredTrackSet = new Set(filteredTracks.map(normalizeTrackName)); + +const validTracks: string[] = (trackData.tracks as string[]).filter( + (t) => !filteredTrackSet.has(normalizeTrackName(t)) ); -function sortTracks( +const trackCandidates: TrackMatchCandidate[] = validTracks.map((canonical) => ({ + canonical, + normalized: normalizeTrackName(canonical), +})); + +const normalizedToCanonical = new Map(); +for (const candidate of trackCandidates) { + if (!normalizedToCanonical.has(candidate.normalized)) { + normalizedToCanonical.set(candidate.normalized, candidate.canonical); + } +} + +export function matchCanonicalTrack(raw: string): string | null { + const normalized = normalizeTrackName(raw); + if (!normalized) return null; + if (filteredTrackSet.has(normalized)) return null; + + const exact = normalizedToCanonical.get(normalized); + if (exact) return exact; + + return null; +} + +function splitOptInTracks(value: string): string[] { + // CSV exports vary; tolerate commas/semicolons/pipes/newlines. + return value + .split(/[,;|\n\r]+/g) + .map((t) => t.trim()) + .filter(Boolean); +} + +function isSubmittedNonDraft(status: unknown): boolean { + const s = String(status ?? '') + .trim() + .toLowerCase(); + if (!s) return false; + if (s.includes('draft')) return false; + return s.includes('submitted'); +} + +function extractContactInfoFromRow(data: Record): { + contactEmails: string[]; + contactNames: string[]; + memberEmails: string[]; + memberNames: string[]; +} { + const contactEmails = new Set(); + const contactNames = new Set(); + const memberEmails = new Set(); + const memberNames = new Set(); + + const looksLikeUrl = (value: string) => /^https?:\/\//i.test(value); + + for (const [key, value] of Object.entries(data)) { + const k = key.toLowerCase(); + const v = String(value ?? '').trim(); + if (!v) continue; + + const isTrackColumn = + k.includes('track') || k.includes('opt-in') || k.includes('prize'); + const isProjectTitle = k.includes('project title'); + const isContactish = + k.includes('contact') || k.includes('submitter') || k.includes('owner'); + + if (k.includes('email') || k.includes('e-mail')) { + v.split(/[\s,;|]+/g) + .map((s) => s.trim()) + .filter(Boolean) + .filter((s) => s.includes('@')) + .forEach((email) => { + memberEmails.add(email); + if (isContactish) contactEmails.add(email); + }); + continue; + } + + // Names + const isNameColumn = k.includes('name'); + const isMemberishColumn = + k.includes('member') || + k.includes('teammate') || + k.includes('team member') || + k.includes('participant'); + const isProbablyNotAName = + k.includes('school') || + k.includes('major') || + k.includes('diet') || + k.includes('shirt') || + k.includes('pronoun') || + k.includes('role') || + k.includes('github') || + k.includes('linkedin') || + k.includes('devpost') || + k.includes('portfolio') || + k.includes('phone'); + + if (!isTrackColumn && !isProjectTitle && !looksLikeUrl(v)) { + if (isNameColumn) { + memberNames.add(v); + if (isContactish) contactNames.add(v); + } else if (isMemberishColumn && !isProbablyNotAName) { + memberNames.add(v); + } + } + } + + return { + contactEmails: Array.from(contactEmails).sort(), + contactNames: Array.from(contactNames).sort(), + memberEmails: Array.from(memberEmails).sort(), + memberNames: Array.from(memberNames).sort(), + }; +} + +function canonicalHeaderKey(value: string): string { + return String(value ?? '') + .trim() + .toLowerCase() + .replace(/[^a-z0-9]/g, ''); +} + +function extractMemberColumnsFromTeamMember1( + data: Record, + headers: string[] | null, + startIndex: number +): Array<{ header: string; value: string }> { + if (!headers || startIndex < 0 || startIndex >= headers.length) return []; + const rows: Array<{ header: string; value: string }> = []; + for (let i = startIndex; i < headers.length; i += 1) { + const header = headers[i]; + rows.push({ header, value: String(data[header] ?? '').trim() }); + } + return rows; +} + +function validateAndCanonicalizeTracks(rawTracks: string[]): { + canonicalTracks: string[]; + invalidTracks: string[]; + excludedTracks: string[]; + duplicateTracks: string[]; + autoFixedTracks: CsvTrackAutoFix[]; +} { + const canonicalTracks: string[] = []; + const invalidTracks: string[] = []; + const excludedTracks: string[] = []; + const duplicateTracks: string[] = []; + const autoFixedTracks: CsvTrackAutoFix[] = []; + + const seen = new Set(); + const excludedSet = new Set(filteredTracks.map((t) => normalizeTrackName(t))); + const silentlyIgnoredSet = new Set(['n/a']); + + for (const raw of rawTracks) { + const trimmed = String(raw ?? '').trim(); + if (!trimmed) continue; + + const normalized = normalizeTrackName(trimmed); + if (silentlyIgnoredSet.has(normalized)) continue; + + if (excludedSet.has(normalized)) { + excludedTracks.push(trimmed); + continue; + } + + const canonical = matchCanonicalTrack(trimmed); + if (!canonical) { + invalidTracks.push(trimmed); + continue; + } + + if (seen.has(canonical)) { + duplicateTracks.push(canonical); + continue; + } + + if (trimmed !== canonical) { + autoFixedTracks.push({ raw: trimmed, canonical }); + } + + seen.add(canonical); + canonicalTracks.push(canonical); + } + + return { + canonicalTracks, + invalidTracks, + excludedTracks, + duplicateTracks, + autoFixedTracks, + }; +} + +function validateTracksFromColumns( track1: string, track2: string, track3: string, - chosentracks: string -): string[] { - const initialTracks = [track1, track2, track3] - .map((t) => t.trim()) - .filter( - (t) => - validTracks.includes(t) && - t !== 'Best Hack for Social Good' && - t !== "Hacker's Choice Award" - ); // explicitly filter it out again - - const existingTrackSet = new Set(initialTracks); - - if (chosentracks.length > 1) { - chosentracks - .split(',') - .map((t) => t.trim()) - .forEach((track) => { - if ( - validTracks.includes(track) && - !existingTrackSet.has(track) && - track !== 'Best Hack for Social Good' // explicitly filter it out - ) { - initialTracks.push(track); - existingTrackSet.add(track); - } - }); - } + optIns: string +): { + canonicalTracks: string[]; + invalidTracks: string[]; + excludedTracks: string[]; + duplicateTracks: string[]; + autoFixedTracks: CsvTrackAutoFix[]; +} { + const primaryRaw = [track1, track2, track3]; + const optInRaw = splitOptInTracks(optIns); + + // First pass: validate primary tracks. + const primary = validateAndCanonicalizeTracks(primaryRaw); + + // Second pass: validate opt-ins, but *ignore* entries that merely repeat a primary track. + const optIn = validateAndCanonicalizeTracks(optInRaw); - if (initialTracks.length > 4) { - initialTracks.length = 4; + const primarySet = new Set(primary.canonicalTracks); + const mergedTracks: string[] = [...primary.canonicalTracks]; + const mergedSeen = new Set(mergedTracks); + + for (const t of optIn.canonicalTracks) { + if (mergedSeen.has(t)) continue; + mergedTracks.push(t); + mergedSeen.add(t); } - const tracksSet = Array.from(new Set(initialTracks)); + // Duplicates: + // - keep duplicates inside primary columns + // - keep duplicates inside opt-in list + // - DO NOT report duplicates that are just opt-in repeating a primary track + const optInDuplicatesNotInPrimary = optIn.duplicateTracks.filter( + (t) => !primarySet.has(t) + ); - return tracksSet; + return { + canonicalTracks: mergedTracks, + invalidTracks: [...primary.invalidTracks, ...optIn.invalidTracks], + excludedTracks: [...primary.excludedTracks, ...optIn.excludedTracks], + duplicateTracks: [ + ...primary.duplicateTracks, + ...optInDuplicatesNotInPrimary, + ], + autoFixedTracks: [...primary.autoFixedTracks, ...optIn.autoFixedTracks], + }; } -export default async function csvAlgorithm( - blob: Blob -): Promise<{ ok: boolean; body: ParsedRecord[] | null; error: string | null }> { +export function sortTracks( + track1: string, + track2: string, + track3: string, + chosenTracks: string +): string[] { + const ordered: string[] = []; + const seen = new Set(); + + const maybeAdd = (raw: string) => { + const canonical = matchCanonicalTrack(raw); + if (!canonical) return; + if (seen.has(canonical)) return; + ordered.push(canonical); + seen.add(canonical); + }; + + [track1, track2, track3].forEach(maybeAdd); + + if (chosenTracks && chosenTracks.trim().length > 0) { + for (const optIn of splitOptInTracks(chosenTracks)) { + maybeAdd(optIn); + } + } + + return ordered; +} + +export async function validateCsvBlob(blob: Blob): Promise<{ + ok: boolean; + body: ParsedRecord[] | null; + validBody: ParsedRecord[] | null; + report: CsvValidationReport; + error: string | null; +}> { + const issues: CsvRowIssue[] = []; + const unknownTrackSet = new Set(); + const output: ParsedRecord[] = []; + const seenTeamNumbers = new Map(); // teamNumber -> first rowIndex where seen + try { - const parsePromise = new Promise((resolve, reject) => { - const output: ParsedRecord[] = []; + const results = await new Promise((resolve, reject) => { + let rowIndex = 0; + let headers: string[] | null = null; + let teamMember1StartIndex = -1; const parseBlob = async () => { const buffer = Buffer.from(await blob.arrayBuffer()); const stream = Readable.from(buffer.toString()); - // let i = 0; + stream .pipe(csv()) + .on('headers', (h: string[]) => { + headers = h; + const target = canonicalHeaderKey('Team member 1 first name'); + teamMember1StartIndex = h.findIndex( + (header) => canonicalHeaderKey(header) === target + ); + }) .on('data', (data) => { + rowIndex += 1; + if ( data['Table Number'] !== '' && - data['Project Status'] === 'Submitted (Gallery/Visible)' + isSubmittedNonDraft(data['Project Status']) ) { + const projectTitle = data['Project Title']; + const tableNumberRaw = data['Table Number']; + const parsedTeamNumber = parseInt(tableNumberRaw); + + const { contactEmails, contactNames, memberEmails, memberNames } = + extractContactInfoFromRow(data); + + const memberColumnsFromTeamMember1 = + extractMemberColumnsFromTeamMember1( + data, + headers, + teamMember1StartIndex + ); + const track1 = data['Track #1 (Primary Track)'] ?? ''; const track2 = data['Track #2'] ?? ''; const track3 = data['Track #3'] ?? ''; const optIns = data['Opt-In Prizes'] ?? ''; - const tracksInOrder = sortTracks(track1, track2, track3, optIns); + const { + canonicalTracks, + invalidTracks, + excludedTracks, + duplicateTracks, + autoFixedTracks, + } = validateTracksFromColumns(track1, track2, track3, optIns); + + invalidTracks.forEach((t) => unknownTrackSet.add(t)); + + const missingFields: string[] = []; + if (!projectTitle || String(projectTitle).trim().length === 0) { + missingFields.push('Project Title'); + } + if (!Number.isFinite(parsedTeamNumber)) { + missingFields.push('Table Number'); + } + if (canonicalTracks.length === 0) { + missingFields.push('Tracks'); + } + + // Check for duplicate teamNumber + let isDuplicateTeamNumber = false; + if (Number.isFinite(parsedTeamNumber)) { + if (seenTeamNumbers.has(parsedTeamNumber)) { + isDuplicateTeamNumber = true; + } else { + seenTeamNumbers.set(parsedTeamNumber, rowIndex); + } + } + + if ( + invalidTracks.length > 0 || + missingFields.length > 0 || + excludedTracks.length > 0 || + duplicateTracks.length > 0 || + autoFixedTracks.length > 0 || + isDuplicateTeamNumber + ) { + const severity = + invalidTracks.length > 0 || + missingFields.length > 0 || + isDuplicateTeamNumber + ? 'error' + : 'warning'; + issues.push({ + rowIndex, + teamNumberRaw: tableNumberRaw, + teamNumber: Number.isFinite(parsedTeamNumber) + ? parsedTeamNumber + : undefined, + projectTitle, + contactEmails, + contactNames, + memberEmails, + memberNames, + severity, + invalidTracks, + excludedTracks, + duplicateTracks, + autoFixedTracks, + missingFields, + memberColumnsFromTeamMember1, + duplicateTeamNumber: isDuplicateTeamNumber + ? parsedTeamNumber + : undefined, + }); + } output.push({ - name: data['Project Title'], - teamNumber: parseInt(data['Table Number']), - tableNumber: 0, // doing it later (on end) - tracks: tracksInOrder, + name: projectTitle, + teamNumber: parsedTeamNumber, + tableNumber: 0, // assigned after ordering + tracks: canonicalTracks, active: true, - }); + _rowIndex: rowIndex, // Store original CSV row index for error filtering + } as any); } }) .on('end', () => { @@ -106,12 +483,82 @@ export default async function csvAlgorithm( }) .on('error', (error) => reject(error)); }; + parseBlob().catch(reject); }); - const results: ParsedRecord[] = await parsePromise; + const errorRows = issues.filter((i) => i.severity === 'error').length; + const warningRows = issues.filter((i) => i.severity === 'warning').length; + + // Use rowIndex-based filtering to avoid NaN equality issues with Set.has() + const errorRowIndexes = new Set( + issues.filter((i) => i.severity === 'error').map((i) => i.rowIndex) + ); + + const validBody = results.filter((team) => { + // Use rowIndex stored on team object (set before reordering) + const rowIdx = (team as any)._rowIndex; + if (rowIdx === undefined) return true; + return !errorRowIndexes.has(rowIdx); + }); + + // Remove _rowIndex field before returning (only needed for internal filtering) + const cleanResults = results.map((team) => { + const { _rowIndex: _, ...clean } = team as any; + return clean as typeof team; + }); + const cleanValidBody = validBody.map((team) => { + const { _rowIndex: _, ...clean } = team as any; + return clean as typeof team; + }); + + const report: CsvValidationReport = { + totalTeamsParsed: cleanResults.length, + validTeams: cleanValidBody.length, + errorRows, + warningRows, + unknownTracks: Array.from(unknownTrackSet).sort(), + issues, + }; + + const ok = report.errorRows === 0; + return { + ok, + body: cleanResults, + validBody: cleanValidBody, + report, + error: ok ? null : 'CSV validation failed. Fix errors and re-validate.', + }; + } catch (e) { + const error = e as Error; + const report: CsvValidationReport = { + totalTeamsParsed: 0, + validTeams: 0, + errorRows: 0, + warningRows: 0, + unknownTracks: [], + issues: [], + }; + return { + ok: false, + body: null, + validBody: null, + report, + error: error.message, + }; + } +} + +export default async function csvAlgorithm( + blob: Blob +): Promise<{ ok: boolean; body: ParsedRecord[] | null; error: string | null }> { + try { + const validated = await validateCsvBlob(blob); + if (!validated.ok) { + return { ok: false, body: null, error: validated.error }; + } - return { ok: true, body: results, error: null }; + return { ok: true, body: validated.validBody, error: null }; } catch (e) { const error = e as Error; return { ok: false, body: null, error: error.message }; diff --git a/app/(pages)/admin/csv/page.tsx b/app/(pages)/admin/csv/page.tsx index 59266e7a..dcd91d33 100644 --- a/app/(pages)/admin/csv/page.tsx +++ b/app/(pages)/admin/csv/page.tsx @@ -1,29 +1,386 @@ 'use client'; -import ingestCSV from '@actions/logic/ingestCSV'; -import React, { useState } from 'react'; +import validateCSV from '@actions/logic/validateCSV'; +import ingestTeams from '@actions/logic/ingestTeams'; +import checkTeamsPopulated from '@actions/logic/checkTeamsPopulated'; +import React, { useEffect, useState } from 'react'; +import ParsedRecord from '@typeDefs/parsedRecord'; +import { + CsvValidationReport, + CsvRowIssue, +} from '@utils/csv-ingestion/csvAlgorithm'; + +type ValidationResponse = { + ok: boolean; + body: ParsedRecord[] | null; + validBody: ParsedRecord[] | null; + report: CsvValidationReport | null; + error: string | null; +}; export default function CsvIngestion() { const [pending, setPending] = useState(false); + const [validating, setValidating] = useState(false); const [response, setResponse] = useState(''); + const [validation, setValidation] = useState(null); + const [file, setFile] = useState(null); + + const [teamsAlreadyPopulated, setTeamsAlreadyPopulated] = useState<{ + populated: boolean; + count: number; + } | null>(null); + + useEffect(() => { + let alive = true; + (async () => { + try { + const res = (await checkTeamsPopulated()) as { + populated: boolean; + count: number; + }; + if (alive) setTeamsAlreadyPopulated(res); + } catch { + // non-blocking + } + })(); + return () => { + alive = false; + }; + }, []); + + const validateHandler = async () => { + if (!file) { + setResponse('Please choose a CSV file first.'); + return; + } + setValidating(true); + const formData = new FormData(); + formData.append('file', file); + + try { + const res = (await validateCSV(formData)) as ValidationResponse; + setValidation(res); + setResponse(''); + } catch (error) { + console.error('Error validating CSV file:', error); + setResponse( + 'An error occurred while validating the CSV file. Please try again.' + ); + } finally { + setValidating(false); + } + }; + + const uploadValidHandler = async () => { + if (!validation?.validBody) return; + setPending(true); + try { + const res = await ingestTeams(validation.validBody); + setResponse(JSON.stringify(res, null, 2)); + } catch (error: unknown) { + const message = + error instanceof Error + ? error.message + : 'An unknown error occurred during upload.'; + setResponse(`Error uploading teams: ${message}`); + } finally { + setPending(false); + } + }; + + const uploadAllHandler = async () => { + if (!validation?.body) return; + + const errors = validation.report?.errorRows ?? 0; + if (errors > 0) { + const ok = window.confirm( + `There are ${errors} error rows. Force upload ALL teams anyway?` + ); + if (!ok) return; + } - const handler = async (event: React.FormEvent) => { - event.preventDefault(); setPending(true); - const formData = new FormData(event.currentTarget); - const res = await ingestCSV(formData); - setResponse(JSON.stringify(res)); - setPending(false); + try { + const res = await ingestTeams(validation.body); + setResponse(JSON.stringify(res, null, 2)); + } catch (error: unknown) { + const message = + error instanceof Error + ? error.message + : 'An unknown error occurred during upload.'; + setResponse(`Error uploading teams: ${message}`); + } finally { + setPending(false); + } + }; + + const canonKey = (value: string) => + String(value ?? '') + .trim() + .toLowerCase() + .replace(/[^a-z0-9]/g, ''); + + const buildTeamMemberLines = (issue: CsvRowIssue): string[] => { + const cols = Array.isArray(issue?.memberColumnsFromTeamMember1) + ? issue.memberColumnsFromTeamMember1 + : []; + + const findByHeaderPrefix = (prefix: string): string => { + const p = canonKey(prefix); + for (const c of cols) { + const header = String(c?.header ?? ''); + const value = String(c?.value ?? '').trim(); + if (!header) continue; + const hk = canonKey(header); + if (hk.startsWith(p)) return value; + } + return ''; + }; + + const lines: string[] = []; + for (let n = 1; n <= 4; n += 1) { + const first = findByHeaderPrefix(`Team member ${n} first name`); + const last = findByHeaderPrefix(`Team member ${n} last name`); + const email = + findByHeaderPrefix(`Team member ${n} email`) || + findByHeaderPrefix(`Team member ${n} e-mail`); + + const fullName = `${first} ${last}`.trim().replace(/\s+/g, ' '); + if (!fullName && !email) continue; + + const namePart = fullName || '(no name)'; + const emailPart = email ? ` — ${email}` : ''; + lines.push(`${namePart}${emailPart}`); + } + + return lines; + }; + + const buildCopyText = (severity: 'error' | 'warning') => { + if (!validation?.report) return ''; + + const rows = validation.report.issues + .filter((i: CsvRowIssue) => i.severity === severity) + .map((i: CsvRowIssue) => { + const header = `Team ${i.teamNumberRaw} — ${i.projectTitle}`; + + const submitterName = i.contactNames?.length + ? `Submitter: ${i.contactNames.join(', ')}` + : ''; + const submitterEmail = i.contactEmails?.length + ? `Submitter Email: ${i.contactEmails.join(', ')}` + : ''; + + const memberLines = buildTeamMemberLines(i); + const membersBlock = memberLines.length + ? ['Members:', ...memberLines.map((l) => ` ${l}`)].join('\n') + : ''; + + return [header, submitterName, submitterEmail, membersBlock] + .filter(Boolean) + .join('\n'); + }); + + return rows.join('\n\n'); + }; + + const copyToClipboard = async (text: string) => { + if (!text) return; + try { + await navigator.clipboard.writeText(text); + setResponse('Copied to clipboard.'); + } catch { + setResponse(text); + } }; return (
+ {teamsAlreadyPopulated?.populated ? ( +
+
Teams database already populated
+
+ Found {teamsAlreadyPopulated.count} existing team records. Uploading + again may create duplicates. +
+
+ ) : null}

Upload CSV:

-
- - -
-

{pending ? 'parsing CSV and creating teams...' : response}

+

+ Step 1: Validate the CSV and review issues. Step 2: Upload to insert + teams. +

+ +
+ { + const next = e.target.files?.[0] ?? null; + setFile(next); + setValidation(null); + setResponse(''); + }} + /> + + + + +
+ + {validation?.report && ( +
+

Validation Results

+

+ Parsed: {validation.report.totalTeamsParsed} teams. Valid:{' '} + {validation.report.validTeams}. Errors:{' '} + {validation.report.errorRows}. Warnings:{' '} + {validation.report.warningRows}. +

+ + {validation.report.errorRows > 0 && ( +
+

Errors

+ +
    + {validation.report.issues + .filter((i: CsvRowIssue) => i.severity === 'error') + .map((i: CsvRowIssue) => { + const memberLines = buildTeamMemberLines(i); + + return ( +
  • + Team {i.teamNumberRaw} — {i.projectTitle} + {i.contactNames?.length ? ( + <> (Submitter: {i.contactNames.join(', ')}) + ) : null} + {i.missingFields?.length ? ( + <> (Missing: {i.missingFields.join(', ')}) + ) : null} + {i.duplicateTeamNumber !== undefined ? ( + <> + {' '} + (Duplicate Table Number: {i.duplicateTeamNumber}) + + ) : null} + {i.invalidTracks?.length ? ( + <> (Invalid tracks: {i.invalidTracks.join(', ')}) + ) : null} + {memberLines.length ? ( +
    +                              {memberLines
    +                                .map((l) => `Member: ${l}`)
    +                                .join('\n')}
    +                            
    + ) : null} +
  • + ); + })} +
+
+ )} + + {validation.report.warningRows > 0 && ( +
+

Warnings

+ +
    + {validation.report.issues + .filter((i: CsvRowIssue) => i.severity === 'warning') + .map((i: CsvRowIssue) => { + const teamMemberLines = buildTeamMemberLines(i); + + return ( +
  • + Team {i.teamNumberRaw} — {i.projectTitle} + {i.contactNames?.length ? ( + <> (Submitter: {i.contactNames.join(', ')}) + ) : null} + {i.duplicateTeamNumber !== undefined ? ( + <> + {' '} + (Duplicate Table Number: {i.duplicateTeamNumber}) + + ) : null} + {i.duplicateTracks?.length ? ( + <> (Duplicates: {i.duplicateTracks.join(', ')}) + ) : null} + {i.excludedTracks?.length ? ( + <> (Excluded: {i.excludedTracks.join(', ')}) + ) : null} + {i.autoFixedTracks?.length ? ( + <> (Auto-fixed casing/spacing) + ) : null} + {teamMemberLines.length ? ( +
    +                              {teamMemberLines
    +                                .map((l) => `Member: ${l}`)
    +                                .join('\n')}
    +                            
    + ) : null} +
  • + ); + })} +
+
+ )} + +
+ + Raw report (JSON) + +
+                {JSON.stringify(validation.report, null, 2)}
+              
+
+
+ )} + + {validation?.report && ( +
+ + + +
+ )} + +
+          {pending ? 'parsing CSV and creating teams...' : response}
+        
); diff --git a/migrations/20250420035636-update-teams.mjs b/migrations/20250420035636-update-teams.mjs index 906b3f66..8f50b4ff 100644 --- a/migrations/20250420035636-update-teams.mjs +++ b/migrations/20250420035636-update-teams.mjs @@ -1,67 +1,66 @@ -import fs from 'fs'; -import path from 'path'; +import fs from "fs"; +import path from "path"; const dataPath = path.resolve( process.cwd(), - 'app/_data/db_validation_data.json' + "app/_data/db_validation_data.json" ); -const data = JSON.parse(fs.readFileSync(dataPath, 'utf8')); +const data = JSON.parse(fs.readFileSync(dataPath, "utf8")); const tracks = [...new Set(data.tracks)]; export const up = async (db) => { await db.command({ - collMod: 'teams', + collMod: "teams", validator: { $jsonSchema: { - bsonType: 'object', - title: 'Teams Object Validation', - required: ['teamNumber', 'tableNumber', 'name', 'tracks', 'active'], + bsonType: "object", + title: "Teams Object Validation", + required: ["teamNumber", "tableNumber", "name", "tracks", "active"], properties: { _id: { - bsonType: 'objectId', - description: '_id must be an ObjectId', + bsonType: "objectId", + description: "_id must be an ObjectId", }, teamNumber: { - bsonType: 'int', - description: 'teamNumber must be an integer', + bsonType: "int", + description: "teamNumber must be an integer", }, tableNumber: { - bsonType: 'int', - description: 'tableNumber must be an integer', + bsonType: "int", + description: "tableNumber must be an integer", }, name: { - bsonType: 'string', - description: 'name must be a string', + bsonType: "string", + description: "name must be a string", }, tracks: { - bsonType: 'array', - maxItems: 6, + bsonType: "array", items: { enum: tracks, - description: 'track must be one of the valid tracks', + description: "track must be one of the valid tracks", }, - description: 'tracks must be an array of strings', + description: "tracks must be an array of strings", }, reports: { - bsonType: 'array', + bsonType: "array", items: { - bsonType: 'object', - required: ['timestamp', 'judge_id'], + bsonType: "object", + required: ["timestamp", "judge_id"], properties: { timestamp: { - bsonType: 'number', - description: 'Timestamp in milliseconds since epoch', + bsonType: "number", + description: "Timestamp in milliseconds since epoch", }, judge_id: { - bsonType: 'string', - description: 'ID of the judge', + bsonType: "string", + description: "ID of the judge", }, }, }, }, active: { - bsonType: 'bool', - description: 'active must be a boolean', + bsonType: "bool", + description: "active must be a boolean", }, }, additionalProperties: false, @@ -72,41 +71,40 @@ export const up = async (db) => { export const down = async (db) => { await db.command({ - collMod: 'teams', + collMod: "teams", validator: { $jsonSchema: { - bsonType: 'object', - title: 'Teams Object Validation', - required: ['teamNumber', 'tableNumber', 'name', 'tracks', 'active'], + bsonType: "object", + title: "Teams Object Validation", + required: ["teamNumber", "tableNumber", "name", "tracks", "active"], properties: { _id: { - bsonType: 'objectId', - description: '_id must be an ObjectId', + bsonType: "objectId", + description: "_id must be an ObjectId", }, teamNumber: { - bsonType: 'int', - description: 'teamNumber must be an integer', + bsonType: "int", + description: "teamNumber must be an integer", }, tableNumber: { - bsonType: 'int', - description: 'tableNumber must be an integer', + bsonType: "int", + description: "tableNumber must be an integer", }, name: { - bsonType: 'string', - description: 'name must be a string', + bsonType: "string", + description: "name must be a string", }, tracks: { - bsonType: 'array', - maxItems: 6, + bsonType: "array", items: { enum: tracks, - description: 'track must be one of the valid tracks', + description: "track must be one of the valid tracks", }, - description: 'tracks must be an array of strings', + description: "tracks must be an array of strings", }, active: { - bsonType: 'bool', - description: 'active must be a boolean', + bsonType: "bool", + description: "active must be a boolean", }, }, additionalProperties: false, diff --git a/migrations/20260105090000-remove-team-tracks-limit.mjs b/migrations/20260105090000-remove-team-tracks-limit.mjs new file mode 100644 index 00000000..da017981 --- /dev/null +++ b/migrations/20260105090000-remove-team-tracks-limit.mjs @@ -0,0 +1,133 @@ +import fs from 'fs'; +import path from 'path'; + +const dataPath = path.resolve( + process.cwd(), + 'app/_data/db_validation_data.json' +); +const data = JSON.parse(fs.readFileSync(dataPath, 'utf8')); +const tracks = [...new Set(data.tracks)]; + +export const up = async (db) => { + await db.command({ + collMod: 'teams', + validator: { + $jsonSchema: { + bsonType: 'object', + title: 'Teams Object Validation', + required: ['teamNumber', 'tableNumber', 'name', 'tracks', 'active'], + properties: { + _id: { + bsonType: 'objectId', + description: '_id must be an ObjectId', + }, + teamNumber: { + bsonType: 'int', + description: 'teamNumber must be an integer', + }, + tableNumber: { + bsonType: 'int', + description: 'tableNumber must be an integer', + }, + name: { + bsonType: 'string', + description: 'name must be a string', + }, + tracks: { + bsonType: 'array', + items: { + enum: tracks, + description: 'track must be one of the valid tracks', + }, + description: 'tracks must be an array of strings', + }, + reports: { + bsonType: 'array', + items: { + bsonType: 'object', + required: ['timestamp', 'judge_id'], + properties: { + timestamp: { + bsonType: 'number', + description: 'Timestamp in milliseconds since epoch', + }, + judge_id: { + bsonType: 'string', + description: 'ID of the judge', + }, + }, + }, + }, + active: { + bsonType: 'bool', + description: 'active must be a boolean', + }, + }, + additionalProperties: false, + }, + }, + }); +}; + +export const down = async (db) => { + // Re-introduce the previous 6-track cap if needed. + await db.command({ + collMod: 'teams', + validator: { + $jsonSchema: { + bsonType: 'object', + title: 'Teams Object Validation', + required: ['teamNumber', 'tableNumber', 'name', 'tracks', 'active'], + properties: { + _id: { + bsonType: 'objectId', + description: '_id must be an ObjectId', + }, + teamNumber: { + bsonType: 'int', + description: 'teamNumber must be an integer', + }, + tableNumber: { + bsonType: 'int', + description: 'tableNumber must be an integer', + }, + name: { + bsonType: 'string', + description: 'name must be a string', + }, + tracks: { + bsonType: 'array', + maxItems: 6, + items: { + enum: tracks, + description: 'track must be one of the valid tracks', + }, + description: 'tracks must be an array of strings', + }, + reports: { + bsonType: 'array', + items: { + bsonType: 'object', + required: ['timestamp', 'judge_id'], + properties: { + timestamp: { + bsonType: 'number', + description: 'Timestamp in milliseconds since epoch', + }, + judge_id: { + bsonType: 'string', + description: 'ID of the judge', + }, + }, + }, + }, + active: { + bsonType: 'bool', + description: 'active must be a boolean', + }, + }, + additionalProperties: false, + }, + }, + }); +}; diff --git a/migrations/create-teams.mjs b/migrations/create-teams.mjs index 4929a4bb..330eb2ec 100644 --- a/migrations/create-teams.mjs +++ b/migrations/create-teams.mjs @@ -34,7 +34,6 @@ export async function up(db) { }, tracks: { bsonType: 'array', - maxItems: 6, items: { enum: tracks, description: 'track must be one of the valid tracks',