diff --git a/README.md b/README.md index 023069e..b187e96 100644 --- a/README.md +++ b/README.md @@ -5,3 +5,39 @@ Super simple project management tool for developers. Born out of frustration with Jira. + +## Self hosting + +### Setup + +1. Copy `.env.example` files into `.env`: + +```bash +cp packages/backend/.env.example packages/backend/.env +cp packages/frontend/.env.example packages/frontend/.env +``` + +2. Backend `.env` required values: + - `DATABASE_URL` + - `JWT_SECRET` + - `RESEND_API_KEY` and `EMAIL_FROM` for verification emails + - `SEED_PASSWORD` if you plan to run `reset-and-seed` + +3. Frontend `.env` required values: + - `VITE_SERVER_URL` + +### Notes + +- OpenCode is optional. The app runs without it, but the AI helper requires OpenCode (no login needed). +- S3 is optional. If you skip S3, image uploads will not work. +- Stripe credentials are not needed for the current state. + +### Database seeding + +Run the seed script to create demo data: + +```bash +bun reset-and-seed +``` + +This seeds demo issues, users, projects, and organisations. Demo users are created and can be used without verification emails. `SEED_PASSWORD` must be set in `packages/backend/.env`. diff --git a/packages/backend/package.json b/packages/backend/package.json index 7a072aa..7e1bfab 100644 --- a/packages/backend/package.json +++ b/packages/backend/package.json @@ -10,8 +10,8 @@ "start": "bun src/index.ts --PORT=3000", "db:start": "docker compose up -d", "db:stop": "docker compose down", - "db:migrate": "npx drizzle-kit generate && npx drizzle-kit migrate", - "db:push": "npx drizzle-kit push", + "db:migrate": "bunx drizzle-kit generate && npx drizzle-kit migrate", + "db:push": "bunx drizzle-kit push", "db:reset": "bun scripts/db-reset.ts", "db:seed": "bun scripts/db-seed.ts" }, diff --git a/packages/backend/scripts/db-reset.ts b/packages/backend/scripts/db-reset.ts index fb0ea03..75bb849 100644 --- a/packages/backend/scripts/db-reset.ts +++ b/packages/backend/scripts/db-reset.ts @@ -29,7 +29,7 @@ async function resetDatabase() { // run migrations to recreate tables console.log("running migrations..."); - execSync("npx drizzle-kit migrate", { + execSync("bunx drizzle-kit migrate", { stdio: "inherit", cwd: `${import.meta.dir}/..`, }); diff --git a/packages/backend/scripts/import-org.ts b/packages/backend/scripts/import-org.ts new file mode 100644 index 0000000..07cebe6 --- /dev/null +++ b/packages/backend/scripts/import-org.ts @@ -0,0 +1,405 @@ +import "dotenv/config"; +import { readFile } from "node:fs/promises"; +import { + Issue, + IssueAssignee, + IssueComment, + Organisation, + OrganisationMember, + Project, + Sprint, + TimedSession, + User, +} from "@sprint/shared"; +import { and, eq, inArray } from "drizzle-orm"; +import { drizzle } from "drizzle-orm/node-postgres"; +import { z } from "zod"; + +const DATABASE_URL = process.env.DATABASE_URL; + +if (!DATABASE_URL) { + console.error("DATABASE_URL is not set"); + process.exit(1); +} + +const db = drizzle({ + connection: { + connectionString: DATABASE_URL, + }, +}); + +const timestampLikeSchema = z.union([z.string(), z.date()]); +const optionalTimestampLikeSchema = z.union([z.string(), z.date(), z.null(), z.undefined()]); + +const importSchema = z + .object({ + organisation: z + .object({ + id: z.number(), + name: z.string(), + slug: z.string(), + description: z.string().nullable().optional(), + iconURL: z.string().nullable().optional(), + statuses: z.record(z.string()), + issueTypes: z.record(z.object({ icon: z.string(), color: z.string() })), + features: z.record(z.boolean()), + createdAt: optionalTimestampLikeSchema, + updatedAt: optionalTimestampLikeSchema, + }) + .passthrough(), + members: z.array( + z + .object({ + id: z.number(), + organisationId: z.number(), + userId: z.number(), + role: z.string(), + createdAt: optionalTimestampLikeSchema, + }) + .passthrough(), + ), + projects: z.array( + z + .object({ + id: z.number(), + key: z.string(), + name: z.string(), + organisationId: z.number(), + creatorId: z.number(), + }) + .passthrough(), + ), + sprints: z.array( + z + .object({ + id: z.number(), + projectId: z.number(), + name: z.string(), + color: z.string(), + startDate: timestampLikeSchema, + endDate: timestampLikeSchema, + createdAt: optionalTimestampLikeSchema, + }) + .passthrough(), + ), + issues: z.array( + z + .object({ + id: z.number(), + projectId: z.number(), + number: z.number(), + type: z.string(), + status: z.string(), + title: z.string(), + description: z.string(), + creatorId: z.number(), + sprintId: z.number().nullable().optional(), + }) + .passthrough(), + ), + issueAssignees: z.array( + z + .object({ + id: z.number(), + issueId: z.number(), + userId: z.number(), + assignedAt: optionalTimestampLikeSchema, + }) + .passthrough(), + ), + issueComments: z.array( + z + .object({ + id: z.number(), + issueId: z.number(), + userId: z.number(), + body: z.string(), + createdAt: optionalTimestampLikeSchema, + updatedAt: optionalTimestampLikeSchema, + }) + .passthrough(), + ), + timedSessions: z.array( + z + .object({ + id: z.number(), + issueId: z.number().nullable().optional(), + userId: z.number(), + timestamps: z.array(timestampLikeSchema), + endedAt: optionalTimestampLikeSchema, + createdAt: optionalTimestampLikeSchema, + }) + .passthrough(), + ), + }) + .passthrough(); + +function toDate(value: unknown, fieldName: string) { + if (value === null || value === undefined) { + throw new Error(`${fieldName} is required`); + } + + if (value instanceof Date) { + return value; + } + + const parsed = new Date(String(value)); + if (Number.isNaN(parsed.getTime())) { + throw new Error(`${fieldName} is not a valid date`); + } + + return parsed; +} + +function toOptionalDate(value: unknown) { + if (value === null || value === undefined) { + return undefined; + } + + return toDate(value, "date"); +} + +function toNullableDate(value: unknown) { + if (value === null || value === undefined) { + return null; + } + + return toDate(value, "date"); +} + +async function importOrg(filePath: string) { + const raw = await readFile(filePath, "utf-8"); + const parsedJson = JSON.parse(raw) as unknown; + const parsed = importSchema.safeParse(parsedJson); + + if (!parsed.success) { + throw new Error(`invalid export JSON format: ${parsed.error.issues[0]?.message ?? "unknown error"}`); + } + + const data = parsed.data; + + const referencedUserIds = new Set(); + for (const member of data.members) referencedUserIds.add(member.userId); + for (const project of data.projects) referencedUserIds.add(project.creatorId); + for (const issue of data.issues) referencedUserIds.add(issue.creatorId); + for (const assignee of data.issueAssignees) referencedUserIds.add(assignee.userId); + for (const comment of data.issueComments) referencedUserIds.add(comment.userId); + for (const session of data.timedSessions) referencedUserIds.add(session.userId); + + const userIds = [...referencedUserIds]; + const existingUserIds = + userIds.length > 0 + ? new Set( + (await db.select({ id: User.id }).from(User).where(inArray(User.id, userIds))).map( + (u) => u.id, + ), + ) + : new Set(); + + const missingUserIds = userIds.filter((id) => !existingUserIds.has(id)); + if (missingUserIds.length > 0) { + throw new Error( + `cannot import org because these user ids do not exist in this database: ${missingUserIds.join(", ")}`, + ); + } + + const existingOrg = await db + .select({ id: Organisation.id }) + .from(Organisation) + .where(eq(Organisation.slug, data.organisation.slug)) + .limit(1); + + if (existingOrg[0]) { + throw new Error(`organisation slug already exists: ${data.organisation.slug}`); + } + + await db.transaction(async (tx) => { + const importedOrganisation = { + name: data.organisation.name, + slug: data.organisation.slug, + description: data.organisation.description ?? null, + iconURL: data.organisation.iconURL ?? null, + statuses: data.organisation.statuses, + issueTypes: data.organisation.issueTypes, + features: data.organisation.features, + createdAt: toOptionalDate(data.organisation.createdAt), + updatedAt: toOptionalDate(data.organisation.updatedAt), + }; + + const [newOrg] = await tx + .insert(Organisation) + .values(importedOrganisation) + .returning({ id: Organisation.id }); + if (!newOrg) { + throw new Error("failed to create organisation"); + } + + const oldProjectIdToNewId = new Map(); + const oldSprintIdToNewId = new Map(); + const oldIssueIdToNewId = new Map(); + + for (const project of data.projects) { + const [createdProject] = await tx + .insert(Project) + .values({ + key: project.key, + name: project.name, + creatorId: project.creatorId, + organisationId: newOrg.id, + }) + .returning({ id: Project.id }); + + if (!createdProject) { + throw new Error(`failed to create project from export project id ${project.id}`); + } + + oldProjectIdToNewId.set(project.id, createdProject.id); + } + + for (const sprint of data.sprints) { + const mappedProjectId = oldProjectIdToNewId.get(sprint.projectId); + if (!mappedProjectId) { + throw new Error(`sprint ${sprint.id} references missing project ${sprint.projectId}`); + } + + const [createdSprint] = await tx + .insert(Sprint) + .values({ + name: sprint.name, + color: sprint.color, + projectId: mappedProjectId, + startDate: toDate(sprint.startDate, `sprint ${sprint.id} startDate`), + endDate: toDate(sprint.endDate, `sprint ${sprint.id} endDate`), + createdAt: toOptionalDate(sprint.createdAt), + }) + .returning({ id: Sprint.id }); + + if (!createdSprint) { + throw new Error(`failed to create sprint from export sprint id ${sprint.id}`); + } + + oldSprintIdToNewId.set(sprint.id, createdSprint.id); + } + + for (const issue of data.issues) { + const mappedProjectId = oldProjectIdToNewId.get(issue.projectId); + if (!mappedProjectId) { + throw new Error(`issue ${issue.id} references missing project ${issue.projectId}`); + } + + const mappedSprintId = issue.sprintId ? oldSprintIdToNewId.get(issue.sprintId) : undefined; + if (issue.sprintId && !mappedSprintId) { + throw new Error(`issue ${issue.id} references missing sprint ${issue.sprintId}`); + } + + const [createdIssue] = await tx + .insert(Issue) + .values({ + number: issue.number, + type: issue.type, + status: issue.status, + title: issue.title, + description: issue.description, + creatorId: issue.creatorId, + projectId: mappedProjectId, + sprintId: mappedSprintId ?? null, + }) + .returning({ id: Issue.id }); + + if (!createdIssue) { + throw new Error(`failed to create issue from export issue id ${issue.id}`); + } + + oldIssueIdToNewId.set(issue.id, createdIssue.id); + } + + if (data.members.length > 0) { + await tx.insert(OrganisationMember).values( + data.members.map((member) => ({ + userId: member.userId, + role: member.role, + organisationId: newOrg.id, + createdAt: toOptionalDate(member.createdAt), + })), + ); + } + + for (const assignee of data.issueAssignees) { + const mappedIssueId = oldIssueIdToNewId.get(assignee.issueId); + if (!mappedIssueId) { + throw new Error(`issue assignee references missing issue ${assignee.issueId}`); + } + + await tx + .insert(IssueAssignee) + .values({ + userId: assignee.userId, + issueId: mappedIssueId, + assignedAt: toOptionalDate(assignee.assignedAt), + }) + .onConflictDoNothing(); + } + + for (const comment of data.issueComments) { + const mappedIssueId = oldIssueIdToNewId.get(comment.issueId); + if (!mappedIssueId) { + throw new Error(`issue comment references missing issue ${comment.issueId}`); + } + + await tx.insert(IssueComment).values({ + userId: comment.userId, + body: comment.body, + issueId: mappedIssueId, + createdAt: toOptionalDate(comment.createdAt), + updatedAt: toOptionalDate(comment.updatedAt), + }); + } + + for (const session of data.timedSessions) { + let mappedIssueId: number | null | undefined = null; + if (session.issueId !== null && session.issueId !== undefined) { + mappedIssueId = oldIssueIdToNewId.get(session.issueId); + if (!mappedIssueId) { + throw new Error(`timed session references missing issue ${session.issueId}`); + } + } + + await tx.insert(TimedSession).values({ + userId: session.userId, + issueId: mappedIssueId, + timestamps: session.timestamps.map((ts, index) => + toDate(ts, `timed session timestamp ${index}`), + ), + endedAt: toNullableDate(session.endedAt), + createdAt: toOptionalDate(session.createdAt), + }); + } + + const ownerExists = await tx + .select({ id: OrganisationMember.id }) + .from(OrganisationMember) + .where( + and(eq(OrganisationMember.organisationId, newOrg.id), eq(OrganisationMember.role, "owner")), + ) + .limit(1); + + if (!ownerExists[0]) { + throw new Error("imported organisation has no owner member"); + } + }); +} + +async function run() { + const filePath = Bun.argv[2] ?? "org.json"; + + try { + await importOrg(filePath); + console.log(`organisation import successful from ${filePath}`); + process.exit(0); + } catch (error) { + console.error("organisation import failed:", error); + process.exit(1); + } +} + +run(); diff --git a/packages/backend/setup-docker-postgres b/packages/backend/setup-docker-postgres new file mode 100755 index 0000000..9a236ac --- /dev/null +++ b/packages/backend/setup-docker-postgres @@ -0,0 +1,33 @@ +#!/usr/bin/env bash + +set -euo pipefail + +CONTAINER_NAME="${CONTAINER_NAME:-sprint-postgres}" +POSTGRES_IMAGE="${POSTGRES_IMAGE:-postgres:16-alpine}" +POSTGRES_USER="${POSTGRES_USER:-eussi}" +POSTGRES_PASSWORD="${POSTGRES_PASSWORD:-password}" +POSTGRES_DB="${POSTGRES_DB:-issue}" +POSTGRES_PORT="${POSTGRES_PORT:-5432}" + +if ! command -v docker >/dev/null 2>&1; then + echo "docker is required but not installed." + exit 1 +fi + +if docker ps -a --filter "name=^/${CONTAINER_NAME}$" --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then + docker update --restart unless-stopped "${CONTAINER_NAME}" >/dev/null + docker start "${CONTAINER_NAME}" >/dev/null || true + echo "container '${CONTAINER_NAME}' already exists and is running (or starting)." +else + docker run -d \ + --name "${CONTAINER_NAME}" \ + --restart unless-stopped \ + -e "POSTGRES_USER=${POSTGRES_USER}" \ + -e "POSTGRES_PASSWORD=${POSTGRES_PASSWORD}" \ + -e "POSTGRES_DB=${POSTGRES_DB}" \ + -p "${POSTGRES_PORT}:5432" \ + "${POSTGRES_IMAGE}" >/dev/null + echo "container '${CONTAINER_NAME}' created and started." +fi + +echo "connection string: postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@localhost:${POSTGRES_PORT}/${POSTGRES_DB}" diff --git a/packages/backend/src/index.ts b/packages/backend/src/index.ts index 63a37a7..93f11fc 100644 --- a/packages/backend/src/index.ts +++ b/packages/backend/src/index.ts @@ -51,7 +51,7 @@ const main = async () => { "/user/by-username": withGlobalAuthed(withAuth(routes.userByUsername)), "/user/update": withGlobalAuthed(withAuth(withCSRF(routes.userUpdate))), - "/user/upload-avatar": withGlobalAuthed(withAuth(routes.userUploadAvatar)), + "/user/upload-avatar": withGlobal(routes.userUploadAvatar), "/issue/create": withGlobalAuthed(withAuth(withCSRF(routes.issueCreate))), "/issue/by-id": withGlobalAuthed(withAuth(routes.issueById)), diff --git a/packages/backend/src/routes/user/upload-avatar.ts b/packages/backend/src/routes/user/upload-avatar.ts index 1490da9..472bd85 100644 --- a/packages/backend/src/routes/user/upload-avatar.ts +++ b/packages/backend/src/routes/user/upload-avatar.ts @@ -1,6 +1,6 @@ import { randomUUID } from "node:crypto"; +import type { BunRequest } from "bun"; import sharp from "sharp"; -import type { AuthedRequest } from "../../auth/middleware"; // import { getSubscriptionByUserId } from "../../db/queries"; import { s3Client, s3Endpoint, s3PublicUrl } from "../../s3"; @@ -17,7 +17,7 @@ async function isAnimatedGIF(buffer: Buffer): Promise { } } -export default async function uploadAvatar(req: AuthedRequest) { +export default async function uploadAvatar(req: BunRequest) { if (req.method !== "POST") { return new Response("method not allowed", { status: 405 }); } diff --git a/packages/frontend/package.json b/packages/frontend/package.json index 9c2b92e..6920556 100644 --- a/packages/frontend/package.json +++ b/packages/frontend/package.json @@ -7,7 +7,7 @@ "host": "NODE_ENV=production vite --host", "build": "tsc && vite build", "preview": "vite preview", - "tauri": "export __NV_DISABLE_EXPLICIT_SYNC=1 && tauri dev" + "tauri": "tauri dev" }, "dependencies": { "@iconify/react": "^6.0.2", diff --git a/packages/frontend/src-tauri/Cargo.lock b/packages/frontend/src-tauri/Cargo.lock index 3a6f9da..73fc4ff 100644 --- a/packages/frontend/src-tauri/Cargo.lock +++ b/packages/frontend/src-tauri/Cargo.lock @@ -1700,17 +1700,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "issue" -version = "0.1.0" -dependencies = [ - "serde", - "serde_json", - "tauri", - "tauri-build", - "tauri-plugin-opener", -] - [[package]] name = "itoa" version = "1.0.15" @@ -3384,6 +3373,17 @@ dependencies = [ "system-deps", ] +[[package]] +name = "sprint" +version = "0.1.0" +dependencies = [ + "serde", + "serde_json", + "tauri", + "tauri-build", + "tauri-plugin-opener", +] + [[package]] name = "stable_deref_trait" version = "1.2.1" diff --git a/packages/frontend/src-tauri/src/main.rs b/packages/frontend/src-tauri/src/main.rs index 40fc77b..f5bf94e 100644 --- a/packages/frontend/src-tauri/src/main.rs +++ b/packages/frontend/src-tauri/src/main.rs @@ -2,5 +2,5 @@ #![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] fn main() { - issue_lib::run() + sprint_lib::run() } diff --git a/packages/frontend/src/components/issue-details.tsx b/packages/frontend/src/components/issue-details.tsx index aae6748..9416164 100644 --- a/packages/frontend/src/components/issue-details.tsx +++ b/packages/frontend/src/components/issue-details.tsx @@ -389,7 +389,7 @@ export function IssueDetails({ )} -
+
{organisation?.Organisation.features.issueTypes && Object.keys(issueTypes).length > 0 && (
-
+
{membersWithTimeTracking.map((member) => (
{selectedProject ? ( -
+
{sprints.map((sprintItem) => { const dateRange = getSprintDateRange(sprintItem); const isCurrent = isCurrentSprint(sprintItem); diff --git a/packages/frontend/src/components/sprint-form.tsx b/packages/frontend/src/components/sprint-form.tsx index 7e67606..f4646f3 100644 --- a/packages/frontend/src/components/sprint-form.tsx +++ b/packages/frontend/src/components/sprint-form.tsx @@ -42,11 +42,30 @@ const addDays = (date: Date, days: number) => { return next; }; -const getDefaultDates = () => { - const today = new Date(); +const getDefaultDates = (sprints: SprintRecord[]) => { + if (sprints.length === 0) { + const today = new Date(); + return { + start: getStartOfDay(today), + end: getEndOfDay(addDays(today, 6)), + }; + } + + const latest = sprints.reduce((current, sprint) => { + const currentEnd = new Date(current.endDate).getTime(); + const sprintEnd = new Date(sprint.endDate).getTime(); + if (sprintEnd !== currentEnd) { + return sprintEnd > currentEnd ? sprint : current; + } + const currentStart = new Date(current.startDate).getTime(); + const sprintStart = new Date(sprint.startDate).getTime(); + return sprintStart > currentStart ? sprint : current; + }, sprints[0]); + + const start = getStartOfDay(addDays(new Date(latest.endDate), 1)); return { - start: getStartOfDay(today), - end: getEndOfDay(addDays(today, 14)), + start, + end: getEndOfDay(addDays(start, 6)), }; }; @@ -78,11 +97,11 @@ export function SprintForm({ const open = isControlled ? controlledOpen : internalOpen; const setOpen = isControlled ? (controlledOnOpenChange ?? (() => {})) : setInternalOpen; - const { start, end } = getDefaultDates(); + const defaultDates = useMemo(() => getDefaultDates(sprints), [sprints]); const [name, setName] = useState(""); const [colour, setColour] = useState(DEFAULT_SPRINT_COLOUR); - const [startDate, setStartDate] = useState(start); - const [endDate, setEndDate] = useState(end); + const [startDate, setStartDate] = useState(defaultDates.start); + const [endDate, setEndDate] = useState(defaultDates.end); const [submitAttempted, setSubmitAttempted] = useState(false); const [submitting, setSubmitting] = useState(false); const [error, setError] = useState(null); @@ -107,7 +126,7 @@ export function SprintForm({ }, [endDate, startDate, submitAttempted]); const reset = () => { - const defaults = getDefaultDates(); + const defaults = getDefaultDates(sprints); setName(""); setColour(DEFAULT_SPRINT_COLOUR); setStartDate(defaults.start); diff --git a/todo.md b/todo.md index 7efa4e8..75eab78 100644 --- a/todo.md +++ b/todo.md @@ -3,7 +3,14 @@ - trial system (IN HOUSE) - BUGS: +- existing sprints are a little unclear in calendar - make it "bright coloured bars" - FEATURES: +- ALLOW PARALLEL SPRINTS (ENABLE/DISABLE IN ORG SETTINGS)? +- org should be able to disable ai chat +- users should be able to disable ai chat +- closing sprints + - sprint is closed by owner/admin + - all issues in sprint are moved to new sprint # LOW PRIORITY