diff --git a/.gitignore b/.gitignore
index d75edea..becda0c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,3 @@
venv
-__pycache__
\ No newline at end of file
+__pycache__
+node_modules
\ No newline at end of file
diff --git a/Makefile b/Makefile
index c1b4ad6..a4b8778 100644
--- a/Makefile
+++ b/Makefile
@@ -6,15 +6,17 @@ COMPOSE=docker-compose -p $(PROJECT_NAME)
check-docker:
@docker info >nul 2>&1 || (echo. && echo ERROR: Docker is not running. Please start Docker Desktop and try again. && echo. && exit /b 1)
-## up: Start the mysql, api (port 5000), and api-test (port 5001) containers and seed default locations
+## up: Start mysql, api (5000), and api-test (5001). Waits briefly and verifies mysql_api is running. Seeding runs inside the API when src.api.app loads (seed_data).
.PHONY: up
up: check-docker
$(COMPOSE) up -d
@echo "Waiting for services to be ready..."
@timeout /t 5 /nobreak >nul 2>&1 || sleep 5 2>/dev/null || true
- @$(COMPOSE) exec api python src/scripts/seed_locations.py
+ @echo Checking API container status...
+ @docker inspect mysql_api --format "{{.State.Status}}" 2>nul >nul || (echo. && echo ======================================== && echo ERROR: API container 'mysql_api' does not exist! && echo ======================================== && echo. && echo Try running: make build && echo. && exit /b 1)
+ @docker inspect mysql_api --format "{{.State.Status}}" 2>nul | findstr /C:"running" >nul 2>&1 || (echo. && echo ======================================== && echo ERROR: API container is not running! && echo ======================================== && echo. && echo Container status: && docker inspect mysql_api --format "Status: {{.State.Status}} (Exit Code: {{.State.ExitCode}})" 2>nul && echo. && echo Container logs: && echo. && docker logs mysql_api 2>&1 && echo. && echo ======================================== && echo. && exit /b 1)
-## up-empty: Start the mysql, api (port 5000), and api-test (port 5001) containers without seeding data
+## up-empty: docker compose up -d only (no wait or API checks). Seeding still runs when each API process starts (same as up).
.PHONY: up-empty
up-empty: check-docker
$(COMPOSE) up -d
@@ -34,9 +36,9 @@ status: check-docker
logs: check-docker
$(COMPOSE) logs -f
-## mysql: Open a mysql shell into the mysql container
-.PHONY: mysql
-mysql: check-docker
+## mysql-shell: Open a mysql shell into the mysql container
+.PHONY: mysql-shell
+mysql-shell: check-docker
$(COMPOSE) exec db mysql -u mysqluser -pmysqlpassword mydb
## build: Build or rebuild services
@@ -57,17 +59,7 @@ install:
sudo apt-get install -y docker.io docker-compose make python3-pip
pip3 install -r requirements.txt
-## locations: Generate inventory-locations.json from generate-locations.js
-.PHONY: locations
-locations:
- @cd milventory && node generate-locations.js
-
## milventory: Start the milventory React app
.PHONY: milventory
milventory:
@cd milventory && npm install && npm start
-
-## help: Show this help menu
-help:
- @echo "Available commands:"
- @grep -E '^##' Makefile | sed 's/## //'
diff --git a/README.md b/README.md
index 9ccc2d5..107309e 100644
--- a/README.md
+++ b/README.md
@@ -20,7 +20,7 @@ Inventory management system for MIL with database-backed storage.
```bash
make up
```
- This starts MySQL and the Flask API, and seeds initial data.
+ This starts MySQL and the Flask API. Initial data is seeded when the API process loads (`src/scripts/seed_data.py` via `src.api.app`).
2. **Start frontend** (in a new terminal):
```bash
@@ -32,23 +32,21 @@ Inventory management system for MIL with database-backed storage.
- Email: `test@ufl.edu`
- Password: `test`
-## Available Commands
+## Available Commands (Ordered by importance)
-- `make up` - Start all services and seed data
-- `make up-empty` - Start services without seeding
-- `make down` - Stop services (keeps data)
-- `make clean` - Stop services and remove all data
+- `make up` - Start all backend services
- `make milventory` - Start React frontend
-- `make mysql` - Open MySQL shell
+- `make down` - Stop backend services (keeps data)
+- `make clean` - Stop backend services and remove all data
- `make logs` - View service logs
- `make status` - Check service status
-- `make help` - Show all commands
+- `make mysql-shell` - Open MySQL shell
## Project Structure
- `milventory/` - React frontend application
- `src/api/` - Flask backend API
-- `src/sql/` - Database schema definitions
+- `src/tables/` - Database schema definitions (`table_*.sql` per domain)
- `src/scripts/` - Database seeding and utility scripts
## Development
diff --git a/milventory/generate-locations.js b/milventory/generate-locations.js
deleted file mode 100644
index 2eca059..0000000
--- a/milventory/generate-locations.js
+++ /dev/null
@@ -1,361 +0,0 @@
-// Script to generate inventory-locations.json
-// Run with: node generate-locations.js > public/inventory-locations.json
-// Or: node generate-locations.js (and redirect output manually)
-
-const fs = require('fs');
-const path = require('path');
-
-// Inventory bounds configuration
-const inventoryBounds = {
- "viewBox": {
- "x": 0,
- "y": 0,
- "width": 4000,
- "height": 4000
- },
- "room": {
- "x": 80,
- "y": 80,
- "width": 3600,
- "height": 3840,
- "rx": 18,
- "ry": 18
- }
-};
-
-// Configuration constants
-const drawerSize = 150;
-const drawerSpacing = 5;
-const topStartX = 720;
-const topY = 80;
-
-// Top drawers A-K (11 drawers)
-// Every 3rd drawer (C, F, I) is double width
-const drawerLabels = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K'];
-const topDrawers = [];
-let currentX = topStartX;
-
-drawerLabels.forEach((label, index) => {
- const isThird = (index + 1) % 3 === 0; // C, F, I (indices 2, 5, 8)
- const width = isThird ? drawerSize * 2 : drawerSize;
-
- topDrawers.push({
- title: `Drawer ${label}`,
- x: currentX,
- y: topY,
- width: width,
- height: drawerSize,
- fill: 'var(--drawer)',
- inventory: []
- });
-
- // Move X position for next drawer
- currentX += width + drawerSpacing;
-});
-
-// Top cabinets 1-4 (each is 2 drawers wide, positioned below drawers)
-// 2 drawers of space between each cabinet
-const cabinetWidth = drawerSize * 2 + drawerSpacing;
-const cabinetHeight = drawerSize + drawerSpacing;
-const cabinetSpacing = drawerSize * 2 + drawerSpacing*2; // 2 drawers of space between cabinets
-const cabinetY = topY + drawerSize + 20;
-
-const topCabinets = [];
-let cabinetX = topStartX;
-for (let i = 0; i < 4; i++) {
- topCabinets.push({
- title: `Cabinet ${i + 1}`,
- x: cabinetX,
- y: cabinetY,
- width: cabinetWidth,
- height: cabinetHeight,
- fill: 'var(--table)',
- inventory: []
- });
- // Move X position for next cabinet (cabinet width + 2 drawers of space)
- cabinetX += cabinetWidth + cabinetSpacing;
-}
-
-// Right side drawers L-AA
-// Note: Drawer N is taller (height 205), which affects spacing
-const rightX = 3500;
-const rightDrawerLabels = ['L', 'M', 'N', 'O', 'P', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'AA'];
-const rightDrawers = [];
-let rightYStart = 840;
-let rightY = rightYStart; // Starting Y position
-
-rightDrawerLabels.forEach((label, index) => {
- const height = label === 'N' ? 2*drawerSize + drawerSpacing : drawerSize; // Drawer N is taller
- rightDrawers.push({
- title: `Drawer ${label}`,
- x: rightX,
- y: rightY,
- width: drawerSize,
- height: height,
- fill: 'var(--drawer)',
- inventory: []
- });
- // Move Y position for next drawer (accounting for drawer height and spacing)
- rightY += height + drawerSpacing;
-});
-
-// Right side cabinets 5-12 (positioned to the left of drawers, aligned with drawer pairs)
-const rightCabinetWidth = drawerSize + drawerSpacing; // 105
-const rightCabinetHeight = drawerSize * 2 + drawerSpacing; // 205
-const rightCabinetX = rightX - rightCabinetWidth - 20; // 1825
-
-const rightCabinets = [];
-// Cabinets align with drawer positions (every 2 drawers, starting with L-M)
-let cabinetYPos = rightYStart; // Start aligned with Drawer L
-for (let i = 0; i < 8; i++) {
- rightCabinets.push({
- title: `Cabinet ${i + 5}`,
- x: rightCabinetX,
- y: cabinetYPos,
- width: rightCabinetWidth,
- height: rightCabinetHeight,
- fill: 'var(--table)',
- inventory: []
- });
- // Move to next cabinet position (every 2 drawers)
- // Account for Drawer N being taller
- if (i === 0) {
- // After Cabinet 5 (L-M), skip to after N
- cabinetYPos = rightYStart + drawerSize*2 + drawerSpacing*2; // After Drawer N
- } else {
- cabinetYPos += (drawerSize + drawerSpacing) * 2; // Normal spacing
- }
-}
-
-// Workbench
-const workbench = {
- title: 'Workbench',
- x: 140,
- y: 1680,
- width: 350,
- height: 520,
- fill: '#e7ebf3',
- isWorkbench: true,
- inventory: []
-};
-
-// Tall Cabinets 100-103 (File Cabinets)
-const tallCabinetX = 140;
-const tallCabinetWidth = 240;
-const tallCabinetHeight = 300;
-const tallCabinetSpacing = 305; // Vertical spacing
-const tallCabinetStartY = 2260;
-
-const tallCabinets = [];
-for (let i = 0; i < 4; i++) {
- tallCabinets.push({
- title: `Tall Cabinet ${103 - i}`, // 103, 102, 101, 100
- x: tallCabinetX,
- y: tallCabinetStartY + i * tallCabinetSpacing,
- width: tallCabinetWidth,
- height: tallCabinetHeight,
- fill: 'var(--files)',
- inventory: []
- });
-}
-
-// Tall Cabinet 104 (above right drawer section)
-// Calculate x position from room right border
-const roomRightBorder = inventoryBounds.room.x + inventoryBounds.room.width; // 80 + 3600 = 3680
-const tallCabinet104X = roomRightBorder - tallCabinetWidth - 30; // 3680 - 240 - 20 = 3420
-
-const tallCabinet104 = {
- title: 'Tall Cabinet 104',
- x: tallCabinet104X,
- y: rightYStart - tallCabinetHeight - 20, // Above the right drawers
- width: tallCabinetWidth,
- height: tallCabinetHeight,
- fill: 'var(--files)',
- inventory: []
-};
-
-// Tables A-H (2 columns x 4 rows)
-// Pattern: A,B in col1 rows 0,1; C,D in col2 rows 0,1; E,F in col1 rows 2,3; G,H in col2 rows 2,3
-const tableWidth = 720;
-const tableHeight = 300;
-const tableCol1X = 800;
-const tableCol2X = 2100;
-const tableRows = [1080, 1385, 2160, 2465]; // 4 rows
-
-const tables = [];
-const tableLabels = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'];
-
-tableLabels.forEach((label, index) => {
- // Pattern: pairs alternate columns, then move to next row pair
- // A,B (0,1): col0 rows 0,1
- // C,D (2,3): col1 rows 0,1
- // E,F (4,5): col0 rows 2,3
- // G,H (6,7): col1 rows 2,3
- const pairIndex = Math.floor(index / 2); // 0,0,1,1,2,2,3,3
- const colIndex = pairIndex % 2; // 0,0,1,1,0,0,1,1
- const rowInPair = index % 2; // 0,1,0,1,0,1,0,1
- const rowIndex = Math.floor(pairIndex / 2) * 2 + rowInPair; // 0,1,0,1,2,3,2,3
-
- tables.push({
- title: `Table ${label}`,
- x: colIndex === 0 ? tableCol1X : tableCol2X,
- y: tableRows[rowIndex],
- width: tableWidth,
- height: tableHeight,
- fill: 'var(--table)',
- inventory: []
- });
-});
-
-// Tables I-J (2 tables horizontally at bottom of room)
-const roomBottomY = 80 + 3840 - 300 - 20; // Room height - table height - margin
-const bottomTableY = roomBottomY; // ~3600
-const bottomTableSpacing = 50; // Space between the two tables
-const bottomTableStartX = 1400; // Start position for first table
-
-const bottomTables = [
- {
- title: 'Table I',
- x: bottomTableStartX,
- y: bottomTableY,
- width: tableWidth,
- height: tableHeight,
- fill: 'var(--table)',
- inventory: []
- },
- {
- title: 'Table J',
- x: bottomTableStartX + tableWidth + bottomTableSpacing,
- y: bottomTableY,
- width: tableWidth,
- height: tableHeight,
- fill: 'var(--table)',
- inventory: []
- }
-];
-
-// Combine all boxes in the desired order
-const allBoxes = [
- ...topDrawers,
- ...topCabinets,
- ...rightDrawers,
- ...rightCabinets,
- workbench,
- ...tallCabinets,
- tallCabinet104,
- ...tables,
- ...bottomTables
-];
-
-// Create the full JSON structure
-const output = {
- "inventory-bounds": inventoryBounds,
- "boxes": allBoxes
-};
-
-// Format function to align columns
-function formatInventoryData(data) {
- // Define attribute order
- const attributeOrder = ['title', 'x', 'y', 'width', 'height', 'fill', 'isWorkbench', 'inventory'];
-
- // Find the maximum width for each attribute's value across all boxes
- const maxValueWidths = {};
-
- attributeOrder.forEach(attr => {
- let maxWidth = 0;
- data.boxes.forEach(box => {
- if (box[attr] !== undefined) {
- const value = box[attr];
- let valueStr;
-
- if (Array.isArray(value)) {
- valueStr = JSON.stringify(value);
- } else if (typeof value === 'string') {
- valueStr = `"${value}"`;
- } else if (typeof value === 'boolean') {
- valueStr = String(value);
- } else {
- valueStr = String(value);
- }
-
- maxWidth = Math.max(maxWidth, valueStr.length);
- }
- });
- maxValueWidths[attr] = maxWidth;
- });
-
- // Format each box with aligned columns (all attributes on one line)
- const formatBox = (box) => {
- const parts = [];
-
- attributeOrder.forEach(attr => {
- if (box[attr] !== undefined) {
- const value = box[attr];
- let valueStr;
-
- if (Array.isArray(value)) {
- valueStr = JSON.stringify(value);
- } else if (typeof value === 'string') {
- valueStr = `"${value}"`;
- } else if (typeof value === 'boolean') {
- valueStr = String(value);
- } else {
- valueStr = String(value);
- }
-
- // Pad the value to align columns
- const paddedValue = valueStr.padEnd(maxValueWidths[attr]);
- parts.push(`"${attr}": ${paddedValue}`);
- }
- });
-
- return ` { ${parts.join(', ')} }`;
- };
-
- // Format the entire JSON structure
- const formattedBoxes = data.boxes.map(formatBox);
-
- // Preserve inventory-bounds if it exists
- let formattedOutput = '';
- if (data['inventory-bounds']) {
- const bounds = JSON.stringify(data['inventory-bounds'], null, 2);
- // Indent the bounds object properly
- const indentedBounds = bounds.split('\n').map((line, idx) => {
- if (idx === 0) return line; // First line
- return ' ' + line;
- }).join('\n');
-
- formattedOutput = `{
- "inventory-bounds": ${indentedBounds},
- "boxes": [
-${formattedBoxes.join(',\n')}
- ]
-}
-`;
- } else {
- formattedOutput = `{
- "boxes": [
-${formattedBoxes.join(',\n')}
- ]
-}
-`;
- }
-
- return formattedOutput;
-}
-
-// Write to file
-const outputPath = path.join(__dirname, 'public', 'inventory-locations.json');
-
-// First write the raw JSON
-const jsonString = JSON.stringify(output, null, 2);
-fs.writeFileSync(outputPath, jsonString, 'utf8');
-
-// Then format it with aligned columns
-const formattedOutput = formatInventoryData(output);
-fs.writeFileSync(outputPath, formattedOutput, 'utf8');
-
-console.log(`✓ Generated ${outputPath}`);
-console.log(` - Inventory bounds configured`);
-console.log(` - ${allBoxes.length} boxes generated`);
-console.log(` - Formatted with aligned columns`);
diff --git a/milventory/package-lock.json b/milventory/package-lock.json
index 362388e..70cb302 100644
--- a/milventory/package-lock.json
+++ b/milventory/package-lock.json
@@ -10,8 +10,10 @@
"dependencies": {
"d3": "^7.8.5",
"http-proxy-middleware": "^2.0.6",
+ "mysql2": "^3.18.2",
"react": "^18.2.0",
"react-dom": "^18.2.0",
+ "react-router-dom": "^6.8.0",
"react-scripts": "5.0.1"
}
},
@@ -51,6 +53,7 @@
"version": "7.28.5",
"resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.5.tgz",
"integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==",
+ "peer": true,
"dependencies": {
"@babel/code-frame": "^7.27.1",
"@babel/generator": "^7.28.5",
@@ -654,6 +657,7 @@
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.27.1.tgz",
"integrity": "sha512-p9OkPbZ5G7UT1MofwYFigGebnrzGJacoBSQM0/6bi/PUMVE+qlWDD/OalvQKbwgQzU6dl0xAv6r4X7Jme0RYxA==",
+ "peer": true,
"dependencies": {
"@babel/helper-plugin-utils": "^7.27.1"
},
@@ -1479,6 +1483,7 @@
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.27.1.tgz",
"integrity": "sha512-2KH4LWGSrJIkVf5tSiBFYuXDAoWRq2MMwgivCf+93dd0GQi8RXLjKA/0EvRnVV5G0hrHczsquXuD01L8s6dmBw==",
+ "peer": true,
"dependencies": {
"@babel/helper-annotate-as-pure": "^7.27.1",
"@babel/helper-module-imports": "^7.27.1",
@@ -2863,6 +2868,14 @@
}
}
},
+ "node_modules/@remix-run/router": {
+ "version": "1.23.2",
+ "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.23.2.tgz",
+ "integrity": "sha512-Ic6m2U/rMjTkhERIa/0ZtXJP17QUi2CbWE7cqx4J58M8aA3QTfW+2UlQ4psvTX9IO1RfNVhK3pcpdjej7L+t2w==",
+ "engines": {
+ "node": ">=14.0.0"
+ }
+ },
"node_modules/@rollup/plugin-babel": {
"version": "5.3.1",
"resolved": "https://registry.npmjs.org/@rollup/plugin-babel/-/plugin-babel-5.3.1.tgz",
@@ -3395,6 +3408,7 @@
"version": "24.10.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.0.tgz",
"integrity": "sha512-qzQZRBqkFsYyaSWXuEHc2WR9c0a0CXwiE5FWUvn7ZM+vdy1uZLfCunD38UzhuB7YN/J11ndbDBcTmOdxJo9Q7A==",
+ "peer": true,
"dependencies": {
"undici-types": "~7.16.0"
}
@@ -3528,6 +3542,7 @@
"version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.62.0.tgz",
"integrity": "sha512-TiZzBSJja/LbhNPvk6yc0JrX9XqhQ0hdh6M2svYfsHGejaKFIAGd9MQ+ERIMzLGlN/kZoYIgdxFV0PuljTKXag==",
+ "peer": true,
"dependencies": {
"@eslint-community/regexpp": "^4.4.0",
"@typescript-eslint/scope-manager": "5.62.0",
@@ -3579,6 +3594,7 @@
"version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.62.0.tgz",
"integrity": "sha512-VlJEV0fOQ7BExOsHYAGrgbEiZoi8D+Bl2+f6V2RrXerRSylnp+ZBHmPvaIa8cz0Ajx7WO7Z5RqfgYg7ED1nRhA==",
+ "peer": true,
"dependencies": {
"@typescript-eslint/scope-manager": "5.62.0",
"@typescript-eslint/types": "5.62.0",
@@ -3918,6 +3934,7 @@
"version": "8.15.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
"integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
+ "peer": true,
"bin": {
"acorn": "bin/acorn"
},
@@ -4007,6 +4024,7 @@
"version": "6.12.6",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
"integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
+ "peer": true,
"dependencies": {
"fast-deep-equal": "^3.1.1",
"fast-json-stable-stringify": "^2.0.0",
@@ -4433,6 +4451,15 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/aws-ssl-profiles": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/aws-ssl-profiles/-/aws-ssl-profiles-1.1.2.tgz",
+ "integrity": "sha512-NZKeq9AfyQvEeNlN0zSYAaWrmBffJh3IELMZfRpJVWgrpEbtEpnjvzqBPf+mxoI287JohRDoa+/nsfqqiZmF6g==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 6.0.0"
+ }
+ },
"node_modules/axe-core": {
"version": "4.11.0",
"resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.11.0.tgz",
@@ -4848,6 +4875,7 @@
"url": "https://github.com/sponsors/ai"
}
],
+ "peer": true,
"dependencies": {
"baseline-browser-mapping": "^2.8.19",
"caniuse-lite": "^1.0.30001751",
@@ -6085,6 +6113,7 @@
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz",
"integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==",
+ "peer": true,
"engines": {
"node": ">=12"
}
@@ -6335,6 +6364,15 @@
"node": ">=0.4.0"
}
},
+ "node_modules/denque": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
+ "integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==",
+ "license": "Apache-2.0",
+ "engines": {
+ "node": ">=0.10"
+ }
+ },
"node_modules/depd": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
@@ -6890,6 +6928,7 @@
"resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz",
"integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==",
"deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.",
+ "peer": true,
"dependencies": {
"@eslint-community/eslint-utils": "^4.2.0",
"@eslint-community/regexpp": "^4.6.1",
@@ -8088,6 +8127,15 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/generate-function": {
+ "version": "2.3.1",
+ "resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.3.1.tgz",
+ "integrity": "sha512-eeB5GfMNeevm/GRYq20ShmsaGcmI81kIX2K9XQx5miC8KdHaC6Jm0qQ8ZNeGOi7wYB8OsdxKs+Y2oVuTFuVwKQ==",
+ "license": "MIT",
+ "dependencies": {
+ "is-property": "^1.0.2"
+ }
+ },
"node_modules/generator-function": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/generator-function/-/generator-function-2.0.1.tgz",
@@ -9143,6 +9191,12 @@
"resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz",
"integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ=="
},
+ "node_modules/is-property": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz",
+ "integrity": "sha512-Ks/IoX00TtClbGQr4TWXemAnktAQvYB7HzcCxDGqEZU6oCmb2INHuOoKxbtR+HFkmYWBKv/dOZtGRiAjDhj92g==",
+ "license": "MIT"
+ },
"node_modules/is-regex": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz",
@@ -9464,6 +9518,7 @@
"version": "27.5.1",
"resolved": "https://registry.npmjs.org/jest/-/jest-27.5.1.tgz",
"integrity": "sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ==",
+ "peer": true,
"dependencies": {
"@jest/core": "^27.5.1",
"import-local": "^3.0.2",
@@ -10309,6 +10364,7 @@
"version": "1.21.7",
"resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz",
"integrity": "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==",
+ "peer": true,
"bin": {
"jiti": "bin/jiti.js"
}
@@ -10633,6 +10689,12 @@
"resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz",
"integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ=="
},
+ "node_modules/long": {
+ "version": "5.3.2",
+ "resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz",
+ "integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==",
+ "license": "Apache-2.0"
+ },
"node_modules/loose-envify": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
@@ -10660,6 +10722,21 @@
"yallist": "^3.0.2"
}
},
+ "node_modules/lru.min": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/lru.min/-/lru.min-1.1.4.tgz",
+ "integrity": "sha512-DqC6n3QQ77zdFpCMASA1a3Jlb64Hv2N2DciFGkO/4L9+q/IpIAuRlKOvCXabtRW6cQf8usbmM6BE/TOPysCdIA==",
+ "license": "MIT",
+ "engines": {
+ "bun": ">=1.0.0",
+ "deno": ">=1.30.0",
+ "node": ">=8.0.0"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wellwelwel"
+ }
+ },
"node_modules/magic-string": {
"version": "0.25.9",
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.9.tgz",
@@ -10888,6 +10965,44 @@
"multicast-dns": "cli.js"
}
},
+ "node_modules/mysql2": {
+ "version": "3.18.2",
+ "resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.18.2.tgz",
+ "integrity": "sha512-UfEShBFAZZEAKjySnTUuE7BgqkYT4mx+RjoJ5aqtmwSSvNcJ/QxQPXz/y3jSxNiVRedPfgccmuBtiPCSiEEytw==",
+ "license": "MIT",
+ "dependencies": {
+ "aws-ssl-profiles": "^1.1.2",
+ "denque": "^2.1.0",
+ "generate-function": "^2.3.1",
+ "iconv-lite": "^0.7.2",
+ "long": "^5.3.2",
+ "lru.min": "^1.1.4",
+ "named-placeholders": "^1.1.6",
+ "sql-escaper": "^1.3.3"
+ },
+ "engines": {
+ "node": ">= 8.0"
+ },
+ "peerDependencies": {
+ "@types/node": ">= 8"
+ }
+ },
+ "node_modules/mysql2/node_modules/iconv-lite": {
+ "version": "0.7.2",
+ "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.2.tgz",
+ "integrity": "sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==",
+ "license": "MIT",
+ "dependencies": {
+ "safer-buffer": ">= 2.1.2 < 3.0.0"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/express"
+ }
+ },
"node_modules/mz": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz",
@@ -10898,6 +11013,18 @@
"thenify-all": "^1.0.0"
}
},
+ "node_modules/named-placeholders": {
+ "version": "1.1.6",
+ "resolved": "https://registry.npmjs.org/named-placeholders/-/named-placeholders-1.1.6.tgz",
+ "integrity": "sha512-Tz09sEL2EEuv5fFowm419c1+a/jSMiBjI9gHxVLrVdbUkkNUUfjsVYs9pVZu5oCon/kmRh9TfLEObFtkVxmY0w==",
+ "license": "MIT",
+ "dependencies": {
+ "lru.min": "^1.1.0"
+ },
+ "engines": {
+ "node": ">=8.0.0"
+ }
+ },
"node_modules/nanoid": {
"version": "3.3.11",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz",
@@ -11546,6 +11673,7 @@
"url": "https://github.com/sponsors/ai"
}
],
+ "peer": true,
"dependencies": {
"nanoid": "^3.3.11",
"picocolors": "^1.1.1",
@@ -12612,6 +12740,7 @@
"version": "6.1.2",
"resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz",
"integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==",
+ "peer": true,
"dependencies": {
"cssesc": "^3.0.0",
"util-deprecate": "^1.0.2"
@@ -12931,6 +13060,7 @@
"version": "18.3.1",
"resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz",
"integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==",
+ "peer": true,
"dependencies": {
"loose-envify": "^1.1.0"
},
@@ -13057,6 +13187,7 @@
"version": "18.3.1",
"resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz",
"integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==",
+ "peer": true,
"dependencies": {
"loose-envify": "^1.1.0",
"scheduler": "^0.23.2"
@@ -13079,10 +13210,41 @@
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.11.0.tgz",
"integrity": "sha512-F27qZr8uUqwhWZboondsPx8tnC3Ct3SxZA3V5WyEvujRyyNv0VYPhoBg1gZ8/MV5tubQp76Trw8lTv9hzRBa+A==",
+ "peer": true,
"engines": {
"node": ">=0.10.0"
}
},
+ "node_modules/react-router": {
+ "version": "6.30.3",
+ "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.30.3.tgz",
+ "integrity": "sha512-XRnlbKMTmktBkjCLE8/XcZFlnHvr2Ltdr1eJX4idL55/9BbORzyZEaIkBFDhFGCEWBBItsVrDxwx3gnisMitdw==",
+ "dependencies": {
+ "@remix-run/router": "1.23.2"
+ },
+ "engines": {
+ "node": ">=14.0.0"
+ },
+ "peerDependencies": {
+ "react": ">=16.8"
+ }
+ },
+ "node_modules/react-router-dom": {
+ "version": "6.30.3",
+ "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.30.3.tgz",
+ "integrity": "sha512-pxPcv1AczD4vso7G4Z3TKcvlxK7g7TNt3/FNGMhfqyntocvYKj+GCatfigGDjbLozC4baguJ0ReCigoDJXb0ag==",
+ "dependencies": {
+ "@remix-run/router": "1.23.2",
+ "react-router": "6.30.3"
+ },
+ "engines": {
+ "node": ">=14.0.0"
+ },
+ "peerDependencies": {
+ "react": ">=16.8",
+ "react-dom": ">=16.8"
+ }
+ },
"node_modules/react-scripts": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/react-scripts/-/react-scripts-5.0.1.tgz",
@@ -13485,6 +13647,7 @@
"version": "2.79.2",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-2.79.2.tgz",
"integrity": "sha512-fS6iqSPZDs3dr/y7Od6y5nha8dW1YnbgtsyotCVvoFGKbERG++CVRFv1meyGDE1SNItQA8BrnCw7ScdAhRJ3XQ==",
+ "peer": true,
"bin": {
"rollup": "dist/bin/rollup"
},
@@ -13719,6 +13882,7 @@
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
+ "peer": true,
"dependencies": {
"fast-deep-equal": "^3.1.3",
"fast-uri": "^3.0.1",
@@ -14181,6 +14345,21 @@
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
"integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g=="
},
+ "node_modules/sql-escaper": {
+ "version": "1.3.3",
+ "resolved": "https://registry.npmjs.org/sql-escaper/-/sql-escaper-1.3.3.tgz",
+ "integrity": "sha512-BsTCV265VpTp8tm1wyIm1xqQCS+Q9NHx2Sr+WcnUrgLrQ6yiDIvHYJV5gHxsj1lMBy2zm5twLaZao8Jd+S8JJw==",
+ "license": "MIT",
+ "engines": {
+ "bun": ">=1.0.0",
+ "deno": ">=2.0.0",
+ "node": ">=12.0.0"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/mysqljs/sql-escaper?sponsor=1"
+ }
+ },
"node_modules/stable": {
"version": "0.1.8",
"resolved": "https://registry.npmjs.org/stable/-/stable-0.1.8.tgz",
@@ -14934,19 +15113,6 @@
}
}
},
- "node_modules/tailwindcss/node_modules/yaml": {
- "version": "2.8.1",
- "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz",
- "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==",
- "optional": true,
- "peer": true,
- "bin": {
- "yaml": "bin.mjs"
- },
- "engines": {
- "node": ">= 14.6"
- }
- },
"node_modules/tapable": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz",
@@ -15256,6 +15422,7 @@
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
"integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==",
+ "peer": true,
"engines": {
"node": ">=10"
},
@@ -15642,6 +15809,7 @@
"version": "5.102.1",
"resolved": "https://registry.npmjs.org/webpack/-/webpack-5.102.1.tgz",
"integrity": "sha512-7h/weGm9d/ywQ6qzJ+Xy+r9n/3qgp/thalBbpOi5i223dPXKi04IBtqPN9nTd+jBc7QKfvDbaBnFipYp4sJAUQ==",
+ "peer": true,
"dependencies": {
"@types/eslint-scope": "^3.7.7",
"@types/estree": "^1.0.8",
@@ -15711,6 +15879,7 @@
"version": "4.15.2",
"resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-4.15.2.tgz",
"integrity": "sha512-0XavAZbNJ5sDrCbkpWL8mia0o5WPOd2YGtxrEiZkBK9FjLppIUK2TgxK6qGD2P3hUXTJNNPVibrerKcx5WkR1g==",
+ "peer": true,
"dependencies": {
"@types/bonjour": "^3.5.9",
"@types/connect-history-api-fallback": "^1.3.5",
@@ -16098,6 +16267,7 @@
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
+ "peer": true,
"dependencies": {
"fast-deep-equal": "^3.1.3",
"fast-uri": "^3.0.1",
diff --git a/milventory/package.json b/milventory/package.json
index a362cec..6ebb753 100644
--- a/milventory/package.json
+++ b/milventory/package.json
@@ -3,11 +3,13 @@
"version": "1.0.0",
"private": true,
"dependencies": {
+ "d3": "^7.8.5",
+ "http-proxy-middleware": "^2.0.6",
+ "mysql2": "^3.18.2",
"react": "^18.2.0",
"react-dom": "^18.2.0",
- "react-scripts": "5.0.1",
- "d3": "^7.8.5",
- "http-proxy-middleware": "^2.0.6"
+ "react-router-dom": "^6.8.0",
+ "react-scripts": "5.0.1"
},
"scripts": {
"start": "react-scripts start",
@@ -33,4 +35,3 @@
]
}
}
-
diff --git a/milventory/public/index.html b/milventory/public/index.html
index 0bfce82..7017897 100644
--- a/milventory/public/index.html
+++ b/milventory/public/index.html
@@ -3,7 +3,7 @@
- Robotics Room — Zoomable Map with Randomized Tooltips
+ Milventory
diff --git a/milventory/public/inventory-locations.json b/milventory/public/inventory-locations.json
deleted file mode 100644
index 76c9996..0000000
--- a/milventory/public/inventory-locations.json
+++ /dev/null
@@ -1,74 +0,0 @@
-{
- "inventory-bounds": {
- "viewBox": {
- "x": 0,
- "y": 0,
- "width": 4000,
- "height": 4000
- },
- "room": {
- "x": 80,
- "y": 80,
- "width": 3600,
- "height": 3840,
- "rx": 18,
- "ry": 18
- }
- },
- "boxes": [
- { "title": "Drawer A" , "x": 720 , "y": 80 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer B" , "x": 875 , "y": 80 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer C" , "x": 1030, "y": 80 , "width": 300, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer D" , "x": 1335, "y": 80 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer E" , "x": 1490, "y": 80 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer F" , "x": 1645, "y": 80 , "width": 300, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer G" , "x": 1950, "y": 80 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer H" , "x": 2105, "y": 80 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer I" , "x": 2260, "y": 80 , "width": 300, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer J" , "x": 2565, "y": 80 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer K" , "x": 2720, "y": 80 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Cabinet 1" , "x": 720 , "y": 250 , "width": 305, "height": 155, "fill": "var(--table)" },
- { "title": "Cabinet 2" , "x": 1335, "y": 250 , "width": 305, "height": 155, "fill": "var(--table)" },
- { "title": "Cabinet 3" , "x": 1950, "y": 250 , "width": 305, "height": 155, "fill": "var(--table)" },
- { "title": "Cabinet 4" , "x": 2565, "y": 250 , "width": 305, "height": 155, "fill": "var(--table)" },
- { "title": "Drawer L" , "x": 3500, "y": 840 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer M" , "x": 3500, "y": 995 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer N" , "x": 3500, "y": 1150, "width": 150, "height": 305, "fill": "var(--drawer)" },
- { "title": "Drawer O" , "x": 3500, "y": 1460, "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer P" , "x": 3500, "y": 1615, "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer R" , "x": 3500, "y": 1770, "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer S" , "x": 3500, "y": 1925, "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer T" , "x": 3500, "y": 2080, "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer U" , "x": 3500, "y": 2235, "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer V" , "x": 3500, "y": 2390, "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer W" , "x": 3500, "y": 2545, "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer X" , "x": 3500, "y": 2700, "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer Y" , "x": 3500, "y": 2855, "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer Z" , "x": 3500, "y": 3010, "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer AA" , "x": 3500, "y": 3165, "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Cabinet 5" , "x": 3325, "y": 840 , "width": 155, "height": 305, "fill": "var(--table)" },
- { "title": "Cabinet 6" , "x": 3325, "y": 1150, "width": 155, "height": 305, "fill": "var(--table)" },
- { "title": "Cabinet 7" , "x": 3325, "y": 1460, "width": 155, "height": 305, "fill": "var(--table)" },
- { "title": "Cabinet 8" , "x": 3325, "y": 1770, "width": 155, "height": 305, "fill": "var(--table)" },
- { "title": "Cabinet 9" , "x": 3325, "y": 2080, "width": 155, "height": 305, "fill": "var(--table)" },
- { "title": "Cabinet 10" , "x": 3325, "y": 2390, "width": 155, "height": 305, "fill": "var(--table)" },
- { "title": "Cabinet 11" , "x": 3325, "y": 2700, "width": 155, "height": 305, "fill": "var(--table)" },
- { "title": "Cabinet 12" , "x": 3325, "y": 3010, "width": 155, "height": 305, "fill": "var(--table)" },
- { "title": "Workbench" , "x": 140 , "y": 1680, "width": 350, "height": 520, "fill": "#e7ebf3" , "isWorkbench": true },
- { "title": "Tall Cabinet 103", "x": 140 , "y": 2260, "width": 240, "height": 300, "fill": "var(--files)" },
- { "title": "Tall Cabinet 102", "x": 140 , "y": 2565, "width": 240, "height": 300, "fill": "var(--files)" },
- { "title": "Tall Cabinet 101", "x": 140 , "y": 2870, "width": 240, "height": 300, "fill": "var(--files)" },
- { "title": "Tall Cabinet 100", "x": 140 , "y": 3175, "width": 240, "height": 300, "fill": "var(--files)" },
- { "title": "Tall Cabinet 104", "x": 3410, "y": 520 , "width": 240, "height": 300, "fill": "var(--files)" },
- { "title": "Table A" , "x": 800 , "y": 1080, "width": 720, "height": 300, "fill": "var(--table)" },
- { "title": "Table B" , "x": 800 , "y": 1385, "width": 720, "height": 300, "fill": "var(--table)" },
- { "title": "Table C" , "x": 2100, "y": 1080, "width": 720, "height": 300, "fill": "var(--table)" },
- { "title": "Table D" , "x": 2100, "y": 1385, "width": 720, "height": 300, "fill": "var(--table)" },
- { "title": "Table E" , "x": 800 , "y": 2160, "width": 720, "height": 300, "fill": "var(--table)" },
- { "title": "Table F" , "x": 800 , "y": 2465, "width": 720, "height": 300, "fill": "var(--table)" },
- { "title": "Table G" , "x": 2100, "y": 2160, "width": 720, "height": 300, "fill": "var(--table)" },
- { "title": "Table H" , "x": 2100, "y": 2465, "width": 720, "height": 300, "fill": "var(--table)" },
- { "title": "Table I" , "x": 1400, "y": 3600, "width": 720, "height": 300, "fill": "var(--table)" },
- { "title": "Table J" , "x": 2170, "y": 3600, "width": 720, "height": 300, "fill": "var(--table)" }
- ]
-}
diff --git a/milventory/public/master-inventory-items.json b/milventory/public/master-inventory-items.json
deleted file mode 100644
index 942e6cf..0000000
--- a/milventory/public/master-inventory-items.json
+++ /dev/null
@@ -1,9 +0,0 @@
-{
- "items": []
-}
-
-
-
-
-
-
diff --git a/milventory/src/App.js b/milventory/src/App.js
index 725adb3..495a624 100644
--- a/milventory/src/App.js
+++ b/milventory/src/App.js
@@ -1,13 +1,20 @@
import React, { useState, useEffect } from 'react';
+import { BrowserRouter, Routes, Route, Navigate, useNavigate } from 'react-router-dom';
import { InventoryProvider, useInventory } from './context/InventoryContext';
-import MapComponent from './components/Map';
-import LeftPanel from './components/LeftPanel';
-import Tooltip from './components/Tooltip';
-import AddModal from './components/AddModal';
-import EditModal from './components/EditForm';
-import AddModePreview from './components/AddModePreview';
-import Login from './components/Login';
-import ErrorToast from './components/ErrorToast';
+import MapComponent from './components/Map/Map';
+import LeftPanel from './components/Layout/LeftPanel';
+import Tooltip from './components/Map/Tooltip';
+import AddModal from './components/Box/AddModal';
+import EditModal from './components/Box/EditForm';
+import AddModePreview from './components/Map/AddModePreview';
+import Login from './components/Auth/Login';
+import SignUp from './components/Auth/SignUp';
+import ErrorToast from './components/Common/ErrorToast';
+import ConflictErrorModal from './components/Common/ConflictErrorModal';
+import { BlockingDialogProvider } from './components/Common/BlockingDialogContext';
+import HistoryModal from './components/History/HistoryModal';
+import UserItemTypesModal from './components/Master/UserItemTypesModal';
+import AdminDashboard from './components/Admin/AdminDashboard';
import { auth } from './api';
function App() {
@@ -49,19 +56,104 @@ function App() {
);
}
+ return (
+
+
+
+
+ ) : (
+
+ )
+ }
+ />
+
+ ) : (
+
+
+
+ )
+ }
+ />
+
+
+
+
+
+ }
+ />
+ } />
+
+
+
+ );
+}
+
+// Protected Route component
+function ProtectedRoute({ children, requireLeader = false }) {
+ const [user, setUser] = useState(null);
+ const [loading, setLoading] = useState(true);
+
+ useEffect(() => {
+ auth.getCurrentUser()
+ .then((userData) => {
+ setUser(userData);
+ setLoading(false);
+ })
+ .catch(() => {
+ setUser(null);
+ setLoading(false);
+ });
+ }, []);
+
+ if (loading) {
+ return (
+
+ );
+ }
+
if (!user) {
- return ;
+ return ;
}
- return (
-
-
-
- );
+ if (requireLeader && !user.is_leader) {
+ return ;
+ }
+
+ return children;
}
function AppContent({ user, onLogout }) {
- const { wrapRef, svgRef, isLoading, error, setError } = useInventory();
+ const {
+ wrapRef,
+ svgRef,
+ isLoading,
+ error,
+ setError,
+ conflictError,
+ setConflictError,
+ dismissMasterWorkbenchUI,
+ } = useInventory();
+ const navigate = useNavigate();
+ const [showHistoryModal, setShowHistoryModal] = useState(false);
+ const [showUserTypesModal, setShowUserTypesModal] = useState(false);
// Handle 401 errors by logging out
useEffect(() => {
@@ -92,16 +184,82 @@ function AppContent({ user, onLogout }) {
- Zoom:
wheel · Pan:
drag · Hover for name · Click for details
{user && (
Logged in as {user.first_name} {user.last_name} ({user.email})
+ {user.is_leader && (
+
+ )}
+
+
@@ -111,7 +112,7 @@ const Login = ({ onLoginSuccess }) => {
backgroundColor: 'var(--room, #1b1f2a)',
color: 'var(--text, #e6ebf4)',
}}
- placeholder="test"
+ placeholder="Password"
/>
@@ -159,6 +160,22 @@ const Login = ({ onLoginSuccess }) => {
+
+ Need an account?{' '}
+
+ Sign up
+
+
+
{
+ const [firstName, setFirstName] = useState('');
+ const [lastName, setLastName] = useState('');
+ const [email, setEmail] = useState('');
+ const [password, setPassword] = useState('');
+ const [confirmPassword, setConfirmPassword] = useState('');
+ const [error, setError] = useState('');
+ const [loading, setLoading] = useState(false);
+
+ const handleSubmit = async (e) => {
+ e.preventDefault();
+ setError('');
+
+ if (password !== confirmPassword) {
+ setError('Passwords do not match.');
+ return;
+ }
+
+ setLoading(true);
+
+ try {
+ const result = await auth.register({
+ firstName,
+ lastName,
+ email,
+ password,
+ confirmPassword,
+ });
+ if (result.success && onSignUpSuccess) {
+ onSignUpSuccess(result.user);
+ }
+ } catch (err) {
+ setError(err.message || 'Could not create account. Please try again.');
+ } finally {
+ setLoading(false);
+ }
+ };
+
+ return (
+
+
+
+ Create account
+
+
+
+
+
+ Already have an account?{' '}
+
+ Log in
+
+
+
+
+ );
+};
+
+export default SignUp;
diff --git a/milventory/src/components/AddModal.js b/milventory/src/components/Box/AddModal.js
similarity index 59%
rename from milventory/src/components/AddModal.js
rename to milventory/src/components/Box/AddModal.js
index e928c7a..3707813 100644
--- a/milventory/src/components/AddModal.js
+++ b/milventory/src/components/Box/AddModal.js
@@ -1,70 +1,92 @@
import React, { useState, useEffect, useRef, useMemo } from 'react';
-import { useInventory } from '../context/InventoryContext';
+import { useInventory } from '../../context/InventoryContext';
+import {
+ hasShelves,
+ getShelfCount,
+ getShelfLabel,
+ getShelfIndicesTopToBottom
+} from '../../utils/shelfLabels';
const AddModal = () => {
const { currentAddingBox, currentAddingIndex, setCurrentAddingBox, setCurrentAddingIndex, inventoryData, updateInventory, masterInventoryItems } = useInventory();
-
- const [selectedItemName, setSelectedItemName] = useState('');
+
+ const [selectedSupplyPublicId, setSelectedSupplyPublicId] = useState('');
const [qty, setQty] = useState(1);
const [selectedShelf, setSelectedShelf] = useState(null);
const [searchQuery, setSearchQuery] = useState('');
const nameInputRef = useRef(null);
- // Check if this is a Tall Cabinet
- const isFileCabinet = currentAddingBox && currentAddingBox.startsWith('Tall Cabinet');
-
- // Shelf definitions for file cabinets
- const SHELF_NAMES = [
- 'Shelf 6 (Top)',
- 'Shelf 5',
- 'Shelf 4',
- 'Shelf 3',
- 'Shelf 2',
- 'Shelf 1 (Bottom)'
- ];
+ const currentBox = currentAddingBox ? inventoryData.get(currentAddingBox) : null;
+ const isShelved = hasShelves(currentBox);
+ const shelfOptions = useMemo(() => {
+ const shelfCount = getShelfCount(currentBox);
+ return getShelfIndicesTopToBottom(shelfCount).map((shelfIdx) => ({
+ value: shelfIdx,
+ label: getShelfLabel(shelfIdx, shelfCount)
+ }));
+ }, [currentBox]);
+
+ const masterRows = useMemo(
+ () =>
+ Array.from(masterInventoryItems.entries()).map(([pid, data]) => ({
+ pid,
+ data,
+ label:
+ data.type_name != null && String(data.type_name).length > 0
+ ? `${data.name} (${data.type_name})`
+ : `${data.name} (#${data.id})`
+ })),
+ [masterInventoryItems]
+ );
const filteredMasterItems = useMemo(() => {
- const itemsArray = Array.from(masterInventoryItems.keys());
if (!searchQuery.trim()) {
- return itemsArray;
+ return masterRows;
}
const query = searchQuery.toLowerCase();
- return itemsArray.filter(name => name.toLowerCase().includes(query));
- }, [masterInventoryItems, searchQuery]);
+ return masterRows.filter(
+ (r) =>
+ r.data.name.toLowerCase().includes(query) ||
+ (r.data.type_name || '').toLowerCase().includes(query)
+ );
+ }, [masterRows, searchQuery]);
useEffect(() => {
if (currentAddingBox) {
- setSelectedItemName('');
+ setSelectedSupplyPublicId('');
setQty(1);
setSearchQuery('');
- // Set default shelf to first one if Tall Cabinet
- if (isFileCabinet) {
- setSelectedShelf(0);
+ if (isShelved) {
+ const shelfCount = getShelfCount(currentBox);
+ setSelectedShelf(Math.max(0, shelfCount - 1));
} else {
setSelectedShelf(null);
}
setTimeout(() => nameInputRef.current?.focus(), 0);
}
- }, [currentAddingBox, isFileCabinet]);
+ }, [currentAddingBox, isShelved, currentBox]);
const handleSave = () => {
- if (currentAddingBox && selectedItemName.trim()) {
+ if (currentAddingBox && selectedSupplyPublicId) {
+ const row = masterInventoryItems.get(selectedSupplyPublicId);
+ if (!row) return;
+
const boxData = inventoryData.get(currentAddingBox);
if (boxData) {
const newItem = {
- name: selectedItemName.trim(),
- qty: parseInt(qty) || 1
+ name: row.name,
+ supplyId: row.id,
+ supplyPublicId: row.public_id || selectedSupplyPublicId,
+ qty: parseInt(qty, 10) || 1
};
-
- // Tag item with shelf number if Tall Cabinet
- if (isFileCabinet && selectedShelf !== null) {
+
+ if (isShelved && selectedShelf !== null) {
newItem.shelf = selectedShelf;
}
-
+
const newInventory = [...boxData.inventory];
-
- // For file cabinets, insert after the last item in the same shelf
- if (isFileCabinet && selectedShelf !== null) {
+
+ if (isShelved && selectedShelf !== null) {
let lastIndexInShelf = -1;
for (let i = newInventory.length - 1; i >= 0; i--) {
if ((newInventory[i].shelf ?? 0) === selectedShelf) {
@@ -78,7 +100,7 @@ const AddModal = () => {
} else {
newInventory.push(newItem);
}
-
+
updateInventory(currentAddingBox, newInventory);
setCurrentAddingBox(null);
setCurrentAddingIndex(null);
@@ -116,15 +138,15 @@ const AddModal = () => {
>
Add Item
- {isFileCabinet && (
+ {isShelved && (
@@ -137,15 +159,15 @@ const AddModal = () => {
value={searchQuery}
onChange={(e) => {
setSearchQuery(e.target.value);
- if (e.target.value && filteredMasterItems.length > 0 && !selectedItemName) {
- setSelectedItemName(filteredMasterItems[0]);
+ if (e.target.value && filteredMasterItems.length > 0 && !selectedSupplyPublicId) {
+ setSelectedSupplyPublicId(filteredMasterItems[0].pid);
}
}}
list="master-items-list"
/>
{filteredMasterItems.length === 0 && searchQuery && (
@@ -155,14 +177,14 @@ const AddModal = () => {
)}
@@ -177,7 +199,7 @@ const AddModal = () => {
-
@@ -187,4 +209,3 @@ const AddModal = () => {
};
export default AddModal;
-
diff --git a/milventory/src/components/EditForm.js b/milventory/src/components/Box/EditForm.js
similarity index 52%
rename from milventory/src/components/EditForm.js
rename to milventory/src/components/Box/EditForm.js
index 0f6d539..67e07ff 100644
--- a/milventory/src/components/EditForm.js
+++ b/milventory/src/components/Box/EditForm.js
@@ -1,38 +1,77 @@
-import React, { useState, useEffect, useRef } from 'react';
-import { useInventory } from '../context/InventoryContext';
+import React, { useState, useEffect, useRef, useMemo } from 'react';
+import { useInventory } from '../../context/InventoryContext';
+
+function rowPublicId(item) {
+ if (!item) return null;
+ return (
+ item.supplyPublicId ||
+ (item.supplyId != null ? `__legacy_id_${item.supplyId}` : null)
+ );
+}
const EditModal = () => {
- const { currentEditingBox, currentEditingIndex, inventoryData, setCurrentEditingBox, setCurrentEditingIndex, setLastSelectedIndex, updateInventory, masterInventoryItems, resolveMasterItem } = useInventory();
-
+ const {
+ currentEditingBox,
+ currentEditingIndex,
+ inventoryData,
+ setCurrentEditingBox,
+ setCurrentEditingIndex,
+ setLastSelectedIndex,
+ updateInventory,
+ masterInventoryItems,
+ resolveMasterItem
+ } = useInventory();
+
const boxData = currentEditingBox ? inventoryData.get(currentEditingBox) : null;
const item = boxData && currentEditingIndex !== null ? boxData.inventory[currentEditingIndex] : null;
- const masterItem = item ? resolveMasterItem(item.name) : null;
-
- const [selectedItemName, setSelectedItemName] = useState('');
+ const itemPid = rowPublicId(item);
+ const masterItem = itemPid ? resolveMasterItem(itemPid) : null;
+
+ const masterRows = useMemo(
+ () =>
+ Array.from(masterInventoryItems.entries()).map(([pid, data]) => ({
+ pid,
+ data,
+ label:
+ data.type_name != null && String(data.type_name).length > 0
+ ? `${data.name} (${data.type_name})`
+ : `${data.name} (#${data.id})`
+ })),
+ [masterInventoryItems]
+ );
+
+ const [selectedSupplyPublicId, setSelectedSupplyPublicId] = useState('');
const [qty, setQty] = useState(1);
const nameInputRef = useRef(null);
useEffect(() => {
if (item) {
- setSelectedItemName(item.name || '');
+ setSelectedSupplyPublicId(itemPid || '');
setQty(item.qty || 1);
setTimeout(() => nameInputRef.current?.focus(), 0);
} else {
- setSelectedItemName('');
+ setSelectedSupplyPublicId('');
setQty(1);
}
- }, [item]);
+ }, [item, itemPid]);
+
+ const previewMaster = selectedSupplyPublicId
+ ? resolveMasterItem(selectedSupplyPublicId)
+ : masterItem;
const handleSave = () => {
- if (currentEditingBox !== null && currentEditingIndex !== null && selectedItemName.trim()) {
- const boxData = inventoryData.get(currentEditingBox);
- if (boxData) {
- const newInventory = [...boxData.inventory];
+ if (currentEditingBox !== null && currentEditingIndex !== null && selectedSupplyPublicId) {
+ const boxDataInner = inventoryData.get(currentEditingBox);
+ const row = masterInventoryItems.get(selectedSupplyPublicId);
+ if (boxDataInner && row) {
+ const newInventory = [...boxDataInner.inventory];
const existingItem = newInventory[currentEditingIndex];
newInventory[currentEditingIndex] = {
- name: selectedItemName.trim(),
- qty: parseInt(qty) || 1,
- shelf: existingItem.shelf // Preserve shelf if it exists
+ name: row.name,
+ supplyId: row.id,
+ supplyPublicId: row.public_id || selectedSupplyPublicId,
+ qty: parseInt(qty, 10) || 1,
+ shelf: existingItem.shelf
};
updateInventory(currentEditingBox, newInventory);
setCurrentEditingBox(null);
@@ -75,26 +114,26 @@ const EditModal = () => {
Edit Item
- {masterItem && (
+ {previewMaster && (
- {masterItem.description &&
{masterItem.description}
}
- {masterItem.image && (
+ {previewMaster.description &&
{previewMaster.description}
}
+ {previewMaster.image && (
-

+
- )}
-
+ )}
+
)}
{
Cancel
-
+
Save
@@ -117,4 +156,3 @@ const EditModal = () => {
};
export default EditModal;
-
diff --git a/milventory/src/components/Common/BlockingDialog.css b/milventory/src/components/Common/BlockingDialog.css
new file mode 100644
index 0000000..c9fa239
--- /dev/null
+++ b/milventory/src/components/Common/BlockingDialog.css
@@ -0,0 +1,39 @@
+.blocking-dialog-overlay {
+ position: fixed;
+ inset: 0;
+ background: rgba(0, 0, 0, 0.65);
+ backdrop-filter: blur(4px);
+ z-index: 11000;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ padding: 1rem;
+ box-sizing: border-box;
+}
+
+.blocking-dialog.modal {
+ max-width: 420px;
+ width: 100%;
+ margin: 0;
+}
+
+.blocking-dialog-message {
+ margin: 0;
+ color: var(--text);
+ font-size: 0.9rem;
+ line-height: 1.5;
+ white-space: pre-wrap;
+}
+
+.blocking-dialog .modal-actions {
+ margin-top: 1rem;
+}
+
+.modal button.blocking-dialog-confirm-danger {
+ background: #c53030;
+ color: #fff;
+}
+
+.modal button.blocking-dialog-confirm-danger:hover {
+ opacity: 0.92;
+}
diff --git a/milventory/src/components/Common/BlockingDialogContext.js b/milventory/src/components/Common/BlockingDialogContext.js
new file mode 100644
index 0000000..cdcae6e
--- /dev/null
+++ b/milventory/src/components/Common/BlockingDialogContext.js
@@ -0,0 +1,136 @@
+import React, {
+ createContext,
+ useContext,
+ useState,
+ useCallback,
+ useEffect,
+} from 'react';
+import './BlockingDialog.css';
+
+const BlockingDialogContext = createContext(null);
+
+/**
+ * App-wide blocking modal replacements for alert() / confirm().
+ * showAlert(message, { title?, confirmLabel? }) -> Promise
+ * showConfirm(message, { title?, confirmLabel?, cancelLabel?, danger? }) -> Promise
+ */
+export function BlockingDialogProvider({ children }) {
+ const [dialog, setDialog] = useState(null);
+
+ const showAlert = useCallback((message, options = {}) => {
+ const text = typeof message === 'string' ? message : String(message);
+ return new Promise((resolve) => {
+ setDialog({
+ kind: 'alert',
+ heading: options.title != null && options.title !== '' ? options.title : null,
+ message: text,
+ confirmLabel: options.confirmLabel || 'OK',
+ onConfirm: () => {
+ setDialog(null);
+ resolve();
+ },
+ });
+ });
+ }, []);
+
+ const showConfirm = useCallback((message, options = {}) => {
+ const text = typeof message === 'string' ? message : String(message);
+ return new Promise((resolve) => {
+ setDialog({
+ kind: 'confirm',
+ heading: options.title != null && options.title !== '' ? options.title : 'Confirm',
+ message: text,
+ confirmLabel: options.confirmLabel || 'OK',
+ cancelLabel: options.cancelLabel || 'Cancel',
+ danger: Boolean(options.danger),
+ onConfirm: () => {
+ setDialog(null);
+ resolve(true);
+ },
+ onCancel: () => {
+ setDialog(null);
+ resolve(false);
+ },
+ });
+ });
+ }, []);
+
+ useEffect(() => {
+ if (!dialog) return undefined;
+ const onKey = (e) => {
+ if (e.key === 'Escape') {
+ e.stopPropagation();
+ if (dialog.kind === 'confirm') dialog.onCancel();
+ else dialog.onConfirm();
+ }
+ };
+ document.addEventListener('keydown', onKey, true);
+ return () => document.removeEventListener('keydown', onKey, true);
+ }, [dialog]);
+
+ const handleOverlayMouseDown = (e) => {
+ if (e.target !== e.currentTarget) return;
+ if (dialog.kind === 'confirm') dialog.onCancel();
+ else dialog.onConfirm();
+ };
+
+ return (
+
+ {children}
+ {dialog && (
+
+
e.stopPropagation()}
+ >
+ {dialog.heading && (
+
{dialog.heading}
+ )}
+
+ {dialog.message}
+
+
+ {dialog.kind === 'confirm' && (
+
+ {dialog.cancelLabel}
+
+ )}
+
+ {dialog.confirmLabel}
+
+
+
+
+ )}
+
+ );
+}
+
+export function useBlockingDialog() {
+ const ctx = useContext(BlockingDialogContext);
+ if (!ctx) {
+ throw new Error('useBlockingDialog must be used within BlockingDialogProvider');
+ }
+ return ctx;
+}
diff --git a/milventory/src/components/Common/ConflictErrorModal.css b/milventory/src/components/Common/ConflictErrorModal.css
new file mode 100644
index 0000000..ef970b2
--- /dev/null
+++ b/milventory/src/components/Common/ConflictErrorModal.css
@@ -0,0 +1,100 @@
+.conflict-error-overlay {
+ position: fixed;
+ top: 0;
+ left: 0;
+ right: 0;
+ bottom: 0;
+ background-color: rgba(0, 0, 0, 0.5);
+ display: flex;
+ justify-content: center;
+ align-items: center;
+ z-index: 10000;
+ /* Non-blocking: allow interaction with background */
+ pointer-events: auto;
+}
+
+.conflict-error-modal {
+ background: var(--bg, #ffffff);
+ border-radius: 8px;
+ padding: 2rem;
+ max-width: 500px;
+ width: 90%;
+ box-shadow: 0 4px 20px rgba(0, 0, 0, 0.3);
+ position: relative;
+ color: var(--text, #333);
+}
+
+.conflict-error-close {
+ position: absolute;
+ top: 1rem;
+ right: 1rem;
+ background: none;
+ border: none;
+ font-size: 2rem;
+ cursor: pointer;
+ color: var(--text, #333);
+ line-height: 1;
+ padding: 0;
+ width: 2rem;
+ height: 2rem;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+}
+
+.conflict-error-close:hover {
+ color: var(--accent, #4a9eff);
+}
+
+.conflict-error-modal h2 {
+ margin: 0 0 1rem 0;
+ color: var(--text, #333);
+ font-size: 1.5rem;
+}
+
+.conflict-error-modal p {
+ margin: 0.5rem 0;
+ color: var(--text, #666);
+}
+
+.conflict-error-item {
+ margin-top: 1rem;
+ padding: 0.75rem;
+ background: var(--table, #f5f5f5);
+ border-radius: 4px;
+}
+
+.conflict-error-actions {
+ display: flex;
+ gap: 1rem;
+ margin-top: 1.5rem;
+ justify-content: flex-end;
+}
+
+.conflict-error-refresh,
+.conflict-error-dismiss {
+ padding: 0.5rem 1rem;
+ border: none;
+ border-radius: 4px;
+ cursor: pointer;
+ font-size: 1rem;
+ transition: background-color 0.2s;
+}
+
+.conflict-error-refresh {
+ background: var(--accent, #4a9eff);
+ color: white;
+}
+
+.conflict-error-refresh:hover {
+ background: var(--accent-hover, #3a8eef);
+}
+
+.conflict-error-dismiss {
+ background: var(--table, #e0e0e0);
+ color: var(--text, #333);
+}
+
+.conflict-error-dismiss:hover {
+ background: var(--table-hover, #d0d0d0);
+}
diff --git a/milventory/src/components/Common/ConflictErrorModal.js b/milventory/src/components/Common/ConflictErrorModal.js
new file mode 100644
index 0000000..6fd4edb
--- /dev/null
+++ b/milventory/src/components/Common/ConflictErrorModal.js
@@ -0,0 +1,52 @@
+import React from 'react';
+import './ConflictErrorModal.css';
+
+const ConflictErrorModal = ({ visible, errorType, message, supplyName, onClose, onRefresh }) => {
+ const handleKeyDown = React.useCallback((e) => {
+ if (e.key === 'Escape') {
+ onClose();
+ }
+ }, [onClose]);
+
+ React.useEffect(() => {
+ if (visible) {
+ document.addEventListener('keydown', handleKeyDown);
+ return () => {
+ document.removeEventListener('keydown', handleKeyDown);
+ };
+ }
+ }, [visible, handleKeyDown]);
+
+ const handleOverlayClick = (e) => {
+ if (e.target === e.currentTarget) {
+ onClose();
+ }
+ };
+
+ if (!visible) return null;
+
+ return (
+
+
e.stopPropagation()}>
+
×
+
⚠️ Action Failed
+
{message}
+ {supplyName && (
+
Item: {supplyName}
+ )}
+
+ {onRefresh && (
+
+ Refresh Page
+
+ )}
+
+ Dismiss
+
+
+
+
+ );
+};
+
+export default ConflictErrorModal;
diff --git a/milventory/src/components/ErrorToast.js b/milventory/src/components/Common/ErrorToast.js
similarity index 90%
rename from milventory/src/components/ErrorToast.js
rename to milventory/src/components/Common/ErrorToast.js
index 0cec537..9808b1e 100644
--- a/milventory/src/components/ErrorToast.js
+++ b/milventory/src/components/Common/ErrorToast.js
@@ -1,4 +1,5 @@
import React, { useEffect } from 'react';
+import { createPortal } from 'react-dom';
const ErrorToast = ({ error, onClose }) => {
useEffect(() => {
@@ -12,7 +13,7 @@ const ErrorToast = ({ error, onClose }) => {
if (!error) return null;
- return (
+ const toast = (
{
padding: '1rem 1.5rem',
borderRadius: '4px',
boxShadow: '0 4px 12px rgba(0,0,0,0.3)',
- zIndex: 10000,
+ zIndex: 12000,
maxWidth: '400px',
display: 'flex',
alignItems: 'center',
@@ -52,6 +53,8 @@ const ErrorToast = ({ error, onClose }) => {
);
+
+ return createPortal(toast, document.body);
};
export default ErrorToast;
diff --git a/milventory/src/components/History/HistoryModal.css b/milventory/src/components/History/HistoryModal.css
new file mode 100644
index 0000000..53a97bb
--- /dev/null
+++ b/milventory/src/components/History/HistoryModal.css
@@ -0,0 +1,200 @@
+.history-modal-overlay {
+ position: fixed;
+ top: 0;
+ left: 0;
+ right: 0;
+ bottom: 0;
+ background-color: rgba(0, 0, 0, 0.7);
+ display: flex;
+ justify-content: center;
+ align-items: center;
+ z-index: 10000;
+}
+
+.history-modal {
+ background: var(--panel, #0e1116);
+ border-radius: 8px;
+ width: 90%;
+ max-width: 1400px;
+ height: 85%;
+ max-height: 90vh;
+ display: flex;
+ flex-direction: column;
+ box-shadow: 0 4px 20px rgba(0, 0, 0, 0.5);
+ color: var(--text, #e6ebf4);
+ border: 1px solid rgba(255,255,255,.1);
+}
+
+.history-modal-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 1.5rem;
+ border-bottom: 1px solid rgba(255,255,255,.1);
+}
+
+.history-modal-header h2 {
+ margin: 0;
+ font-size: 1.5rem;
+ color: var(--text, #e6ebf4);
+}
+
+.history-modal-close {
+ background: none;
+ border: none;
+ font-size: 2rem;
+ cursor: pointer;
+ color: var(--text, #e6ebf4);
+ line-height: 1;
+ padding: 0;
+ width: 2rem;
+ height: 2rem;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+}
+
+.history-modal-close:hover {
+ color: var(--accent, #9bb7ff);
+}
+
+.history-modal-filters {
+ display: flex;
+ gap: 1rem;
+ padding: 1rem 1.5rem;
+ border-bottom: 1px solid rgba(255,255,255,.1);
+}
+
+.history-filter-select {
+ padding: 0.5rem;
+ border: 1px solid rgba(255,255,255,.15);
+ border-radius: 4px;
+ background: rgba(0,0,0,.3);
+ color: var(--text, #e6ebf4);
+ font-size: 0.9rem;
+}
+
+.history-filter-select:focus {
+ outline: none;
+ border-color: var(--accent, #9bb7ff);
+}
+
+.history-filter-search {
+ flex: 1;
+ padding: 0.5rem;
+ border: 1px solid rgba(255,255,255,.15);
+ border-radius: 4px;
+ background: rgba(0,0,0,.3);
+ color: var(--text, #e6ebf4);
+ font-size: 0.9rem;
+}
+
+.history-filter-search:focus {
+ outline: none;
+ border-color: var(--accent, #9bb7ff);
+}
+
+.history-error {
+ padding: 1rem 1.5rem;
+ background: rgba(183, 42, 42, 0.2);
+ color: #ff6b6b;
+ border-bottom: 1px solid rgba(255,255,255,.1);
+}
+
+.history-modal-content {
+ flex: 1;
+ overflow-y: auto;
+ padding: 0;
+}
+
+.history-loading,
+.history-empty {
+ padding: 2rem;
+ text-align: center;
+ color: var(--muted, #9aa8c2);
+}
+
+.history-table {
+ width: 100%;
+ border-collapse: collapse;
+}
+
+.history-table thead {
+ position: sticky;
+ top: 0;
+ background: var(--panel, #0e1116);
+ z-index: 10;
+}
+
+.history-table th {
+ padding: 1rem 0.75rem;
+ text-align: left;
+ font-weight: 600;
+ border-bottom: 2px solid rgba(255,255,255,.1);
+ color: var(--text, #e6ebf4);
+ background: var(--panel, #0e1116);
+}
+
+.history-th-timestamp {
+ width: 150px;
+}
+
+.history-th-action {
+ width: 100px;
+}
+
+.history-th-item {
+ width: 200px;
+}
+
+.history-table .history-item-name {
+ word-wrap: break-word;
+ overflow-wrap: break-word;
+ word-break: break-word;
+ white-space: normal;
+ max-width: 200px;
+}
+
+
+.history-th-user {
+ width: 200px;
+}
+
+.history-th-actions {
+ width: 100px;
+ text-align: center;
+}
+
+.history-modal-pagination {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 1rem 1.5rem;
+ border-top: 1px solid rgba(255,255,255,.1);
+}
+
+.history-pagination-btn {
+ padding: 0.5rem 1rem;
+ background: rgba(0,0,0,.3);
+ color: var(--text, #e6ebf4);
+ border: 1px solid rgba(255,255,255,.15);
+ border-radius: 4px;
+ cursor: pointer;
+ font-size: 0.9rem;
+}
+
+.history-pagination-btn:hover:not(:disabled) {
+ background: rgba(255,255,255,.1);
+}
+
+.history-pagination-btn:disabled {
+ background: rgba(0,0,0,.2);
+ color: var(--muted, #9aa8c2);
+ border-color: rgba(255,255,255,.05);
+ cursor: not-allowed;
+}
+
+.history-pagination-info {
+ color: var(--text, #e6ebf4);
+ font-size: 0.9rem;
+}
diff --git a/milventory/src/components/History/HistoryModal.js b/milventory/src/components/History/HistoryModal.js
new file mode 100644
index 0000000..668c73b
--- /dev/null
+++ b/milventory/src/components/History/HistoryModal.js
@@ -0,0 +1,282 @@
+import React, { useState, useEffect, useCallback } from 'react';
+import { api, locationHistory, historyUndoAllowsDiscard } from '../../api';
+import { useInventory } from '../../context/InventoryContext';
+import { useBlockingDialog } from '../Common/BlockingDialogContext';
+import HistoryTableRow from './HistoryTableRow';
+import './HistoryModal.css';
+
+const HistoryModal = ({ isOpen, onClose, isAdmin = false }) => {
+ const { showConfirm } = useBlockingDialog();
+ const { reloadMasterItems, reloadSupplyLocations } = useInventory();
+ const [history, setHistory] = useState([]);
+ const [loading, setLoading] = useState(false);
+ const [error, setError] = useState(null);
+ const [filters, setFilters] = useState({
+ action_type: '',
+ search: '',
+ limit: 200,
+ offset: 0
+ });
+ const [total, setTotal] = useState(0);
+
+ useEffect(() => {
+ if (isOpen) {
+ loadHistory();
+ }
+ }, [isOpen, filters]);
+
+ const loadHistory = async () => {
+ setLoading(true);
+ setError(null);
+ try {
+ // Fetch both supply history and location history in parallel
+ const [supplyResponse, locationData] = await Promise.all([
+ api.getSupplyHistory({
+ action_type: filters.action_type || undefined,
+ limit: filters.limit,
+ offset: filters.offset
+ }),
+ locationHistory.getAll({
+ limit: filters.limit,
+ offset: filters.offset
+ })
+ ]);
+
+ // Mark supply history entries (guard against missing or non-array response)
+ const supplyHistory = Array.isArray(supplyResponse?.history)
+ ? supplyResponse.history.map(entry => ({
+ ...entry,
+ historyType: 'supply'
+ }))
+ : [];
+
+ // Mark location history entries
+ const locHistory = Array.isArray(locationData) ? locationData.map(entry => ({
+ ...entry,
+ historyType: 'location'
+ })) : [];
+
+ // Combine and sort by timestamp (newest first)
+ let combined = [...supplyHistory, ...locHistory].sort((a, b) => {
+ const dateA = new Date(a.changed_at || 0);
+ const dateB = new Date(b.changed_at || 0);
+ return dateB - dateA;
+ });
+
+ // No need to filter undone entries - undone actions are deleted entirely from the database
+ // See undo_location_history and undo_batch_history endpoints which DELETE entries instead of marking them undone
+
+ // Filter by action type if specified
+ if (filters.action_type) {
+ combined = combined.filter(entry => {
+ if (entry.historyType === 'location') {
+ // Map location history action types
+ if (filters.action_type === 'ADD') return entry.action_type === 'ADD';
+ if (filters.action_type === 'SUBTRACT') return entry.action_type === 'REMOVE';
+ if (filters.action_type === 'UPDATE') return entry.action_type === 'UPDATE';
+ if (filters.action_type === 'MOVE') return entry.action_type === 'MOVE';
+ return false;
+ } else {
+ return entry.action_type === filters.action_type;
+ }
+ });
+ }
+
+ // Client-side search filter
+ if (filters.search) {
+ const searchLower = filters.search.toLowerCase();
+ combined = combined.filter(entry =>
+ entry.supply_name?.toLowerCase().includes(searchLower) ||
+ entry.location_name?.toLowerCase().includes(searchLower)
+ );
+ }
+
+ // Apply pagination
+ const paginated = combined.slice(filters.offset, filters.offset + filters.limit);
+
+ setHistory(paginated);
+ setTotal(combined.length);
+ } catch (err) {
+ setError(err.message || 'Failed to load history');
+ } finally {
+ setLoading(false);
+ }
+ };
+
+ const handleUndo = async (entry) => {
+ const historyId = entry.id;
+ const historyType = entry.historyType;
+
+ if (historyType === 'supply' && entry.undo_removes_log_only) {
+ const ok = await showConfirm(
+ 'The catalog item for this history line is no longer linked (for example, the item was deleted). This will only remove this row from the history log. Inventory and the catalog will not be changed.',
+ {
+ title: 'Remove history entry only',
+ confirmLabel: 'Remove from history',
+ cancelLabel: 'Cancel',
+ danger: true
+ }
+ );
+ if (!ok) return;
+ }
+
+ try {
+ if (historyType === 'location') {
+ await locationHistory.undo(historyId);
+ if (reloadSupplyLocations) {
+ await reloadSupplyLocations();
+ }
+ } else {
+ await api.undoSupplyHistory(historyId);
+ await reloadMasterItems();
+ if (reloadSupplyLocations) {
+ await reloadSupplyLocations();
+ }
+ }
+ await loadHistory();
+ } catch (err) {
+ if (historyUndoAllowsDiscard(err)) {
+ const remove = await showConfirm(
+ `${err.message}\n\nRemove this history entry from the log only? Inventory and catalog will stay as they are now.`,
+ {
+ title: 'Cannot undo',
+ confirmLabel: 'Remove from history',
+ cancelLabel: 'Close',
+ danger: true
+ }
+ );
+ if (remove) {
+ try {
+ if (historyType === 'location') {
+ await locationHistory.discard(historyId);
+ if (reloadSupplyLocations) await reloadSupplyLocations();
+ } else {
+ await api.discardSupplyHistory(historyId);
+ await reloadMasterItems();
+ if (reloadSupplyLocations) await reloadSupplyLocations();
+ }
+ await loadHistory();
+ } catch (e2) {
+ setError(e2.message || 'Failed to remove history entry');
+ }
+ }
+ } else {
+ setError(err.message || 'Failed to undo action');
+ }
+ }
+ };
+
+ const handleKeyDown = useCallback((e) => {
+ if (e.key === 'Escape') {
+ onClose();
+ }
+ }, [onClose]);
+
+ useEffect(() => {
+ if (isOpen) {
+ document.addEventListener('keydown', handleKeyDown);
+ return () => {
+ document.removeEventListener('keydown', handleKeyDown);
+ };
+ }
+ }, [isOpen, handleKeyDown]);
+
+ if (!isOpen) return null;
+
+ return (
+ e.target === e.currentTarget && onClose()}>
+
+
+
Item History
+ ×
+
+
+
+
+
+ setFilters({ ...filters, search: e.target.value, offset: 0 })}
+ className="history-filter-search"
+ />
+
+
+ {error && (
+
+ {error}
+
+ )}
+
+
+ {loading ? (
+
Loading history...
+ ) : history.length === 0 ? (
+
No history entries found
+ ) : (
+
+
+
+ | Timestamp |
+ Action |
+ Item Name |
+ Changes |
+ Changed By |
+ Actions |
+
+
+
+ {history.map((entry, index) => (
+ handleUndo(entry)}
+ />
+ ))}
+
+
+ )}
+
+
+ {total > filters.limit && (
+
+ setFilters({ ...filters, offset: Math.max(0, filters.offset - filters.limit) })}
+ disabled={filters.offset === 0}
+ className="history-pagination-btn"
+ >
+ Previous
+
+
+ Showing {filters.offset + 1} - {Math.min(filters.offset + filters.limit, total)} of {total}
+
+ setFilters({ ...filters, offset: filters.offset + filters.limit })}
+ disabled={filters.offset + filters.limit >= total}
+ className="history-pagination-btn"
+ >
+ Next
+
+
+ )}
+
+
+ );
+};
+
+export default HistoryModal;
diff --git a/milventory/src/components/History/HistoryTab.js b/milventory/src/components/History/HistoryTab.js
new file mode 100644
index 0000000..e7a79f3
--- /dev/null
+++ b/milventory/src/components/History/HistoryTab.js
@@ -0,0 +1,227 @@
+import React, { useState, useEffect } from 'react';
+import { formatEasternDateOnly } from '../../utils/appTimeZone';
+import { locationHistory, historyUndoAllowsDiscard } from '../../api';
+import { useInventory } from '../../context/InventoryContext';
+import { useBlockingDialog } from '../Common/BlockingDialogContext';
+
+const HistoryTab = () => {
+ const { showAlert, showConfirm } = useBlockingDialog();
+ const [history, setHistory] = useState([]);
+ const [loading, setLoading] = useState(true);
+ const [error, setError] = useState(null);
+ const [undoing, setUndoing] = useState(new Set()); // Track which IDs are being undone
+ const { reloadSupplyLocations } = useInventory();
+
+ useEffect(() => {
+ loadHistory();
+ // Auto-refresh every 30 seconds
+ const interval = setInterval(loadHistory, 30000);
+ return () => clearInterval(interval);
+ }, []);
+
+ const loadHistory = async () => {
+ try {
+ setLoading(true);
+ setError(null);
+ const data = await locationHistory.getAll({ limit: 100 });
+ setHistory(data);
+ } catch (err) {
+ setError(err.message || 'Failed to load history');
+ } finally {
+ setLoading(false);
+ }
+ };
+
+ const handleUndo = async (historyId) => {
+ if (undoing.has(historyId)) return;
+
+ try {
+ setUndoing(prev => new Set(prev).add(historyId));
+ await locationHistory.undo(historyId);
+ // Reload history and inventory
+ await loadHistory();
+ if (reloadSupplyLocations) {
+ await reloadSupplyLocations();
+ }
+ } catch (err) {
+ if (historyUndoAllowsDiscard(err)) {
+ const remove = await showConfirm(
+ `${err.message}\n\nRemove this history entry from the log only? Inventory will stay as it is now.`,
+ {
+ title: 'Cannot undo',
+ confirmLabel: 'Remove from history',
+ cancelLabel: 'Close',
+ danger: true
+ }
+ );
+ if (remove) {
+ try {
+ await locationHistory.discard(historyId);
+ await loadHistory();
+ if (reloadSupplyLocations) await reloadSupplyLocations();
+ } catch (e2) {
+ await showAlert(e2.message || 'Failed to remove history entry', { title: 'Error' });
+ }
+ }
+ } else {
+ await showAlert(err.message || 'Failed to undo action', { title: 'Undo failed' });
+ }
+ } finally {
+ setUndoing(prev => {
+ const next = new Set(prev);
+ next.delete(historyId);
+ return next;
+ });
+ }
+ };
+
+ const formatAction = (entry) => {
+ switch (entry.action_type) {
+ case 'ADD':
+ return 'Added';
+ case 'REMOVE':
+ return 'Removed';
+ case 'UPDATE':
+ return 'Updated';
+ case 'MOVE':
+ return 'Moved';
+ case 'CASCADED_SUBTRACT':
+ return 'Cascaded Subtract';
+ default:
+ return entry.action_type;
+ }
+ };
+
+ const formatAmount = (entry) => {
+ if (entry.action_type === 'ADD') {
+ const added = entry.new_amount - (entry.old_amount || 0);
+ return `+${added}`;
+ } else if (entry.action_type === 'REMOVE') {
+ const removed = entry.old_amount - (entry.new_amount || 0);
+ return `-${removed}`;
+ } else if (entry.action_type === 'UPDATE') {
+ const diff = entry.new_amount - entry.old_amount;
+ return diff >= 0 ? `+${diff}` : `${diff}`;
+ } else if (entry.action_type === 'MOVE') {
+ if (entry.action_type === 'REMOVE') {
+ // This is the REMOVE leg of a MOVE
+ return `→ ${entry.related_location || ''}`;
+ } else {
+ // This is the ADD leg of a MOVE
+ return `← ${entry.related_location || ''}`;
+ }
+ }
+ return '';
+ };
+
+ const formatTime = (timestamp) => {
+ if (!timestamp) return 'Unknown';
+ const date = new Date(timestamp);
+ const now = new Date();
+ const diffMs = now - date;
+ const diffMins = Math.floor(diffMs / 60000);
+ const diffHours = Math.floor(diffMs / 3600000);
+ const diffDays = Math.floor(diffMs / 86400000);
+
+ if (diffMins < 1) return 'Just now';
+ if (diffMins < 60) return `${diffMins} min${diffMins !== 1 ? 's' : ''} ago`;
+ if (diffHours < 24) return `${diffHours} hour${diffHours !== 1 ? 's' : ''} ago`;
+ if (diffDays < 7) return `${diffDays} day${diffDays !== 1 ? 's' : ''} ago`;
+ return formatEasternDateOnly(date);
+ };
+
+ const formatLocation = (entry) => {
+ let loc = entry.location_name;
+ if (entry.shelf !== null && entry.shelf !== undefined) {
+ loc += ` (Shelf ${Number(entry.shelf) + 1})`;
+ }
+ return loc;
+ };
+
+ if (loading && history.length === 0) {
+ return (
+
+ );
+ }
+
+ if (error) {
+ return (
+
+ );
+ }
+
+ return (
+
+
+ {history.length === 0 ? (
+
No history available
+ ) : (
+
+
+
+ | Time |
+ Item |
+ Action |
+ Location |
+ Amount |
+ By |
+ Undo |
+
+
+
+ {history.map((entry) => (
+
+ | {formatTime(entry.changed_at)} |
+ {entry.supply_name} |
+ {formatAction(entry)} |
+ {formatLocation(entry)} |
+ {formatAmount(entry)} |
+ {entry.changed_by_name || entry.changed_by || 'Unknown'} |
+
+ {entry.action_type === 'CASCADED_SUBTRACT' ? (
+
+ Use restore
+
+ ) : (
+ handleUndo(entry.id)}
+ disabled={undoing.has(entry.id)}
+ style={{
+ padding: '0.25rem 0.5rem',
+ fontSize: '0.85rem',
+ background: 'var(--accent)',
+ color: 'white',
+ border: 'none',
+ borderRadius: '4px',
+ cursor: undoing.has(entry.id) ? 'not-allowed' : 'pointer',
+ opacity: undoing.has(entry.id) ? 0.6 : 1
+ }}
+ >
+ {undoing.has(entry.id) ? 'Undoing...' : 'Undo'}
+
+ )}
+ |
+
+ ))}
+
+
+ )}
+
+
+ );
+};
+
+export default HistoryTab;
+
+
diff --git a/milventory/src/components/History/HistoryTableRow.css b/milventory/src/components/History/HistoryTableRow.css
new file mode 100644
index 0000000..16e6fe8
--- /dev/null
+++ b/milventory/src/components/History/HistoryTableRow.css
@@ -0,0 +1,135 @@
+.history-row {
+ border-bottom: 1px solid rgba(255,255,255,.1);
+}
+
+.history-row:hover {
+ background-color: rgba(255,255,255,.05);
+}
+
+.history-timestamp {
+ padding: 0.75rem;
+ font-size: 0.9rem;
+ color: var(--muted, #9aa8c2);
+ white-space: nowrap;
+ width: 150px;
+}
+
+.history-action {
+ padding: 0.75rem;
+ width: 100px;
+}
+
+.history-badge {
+ display: inline-block;
+ padding: 0.25rem 0.5rem;
+ border-radius: 4px;
+ font-size: 0.85rem;
+ font-weight: 600;
+ text-transform: uppercase;
+}
+
+.history-badge-create {
+ background-color: #4caf50;
+ color: white;
+}
+
+.history-badge-update {
+ background-color: #2196f3;
+ color: white;
+}
+
+.history-badge-delete {
+ background-color: #f44336;
+ color: white;
+}
+
+.history-badge-default {
+ background-color: var(--table, #e0e0e0);
+ color: var(--text, #333);
+}
+
+.history-item-name {
+ padding: 0.75rem;
+ font-weight: 500;
+ width: 200px;
+ word-wrap: break-word;
+ overflow-wrap: break-word;
+ word-break: break-word;
+ white-space: normal;
+ max-width: 200px;
+ color: var(--text, #e6ebf4);
+}
+
+.history-changes {
+ padding: 0.75rem;
+ flex: 1;
+ color: var(--muted, #9aa8c2);
+ font-size: 0.9rem;
+}
+
+.history-changed-by {
+ padding: 0.75rem;
+ width: 200px;
+ color: var(--muted, #9aa8c2);
+}
+
+.history-actions {
+ padding: 0.75rem;
+ width: 100px;
+ text-align: center;
+}
+
+.history-undo-btn {
+ padding: 0.4rem 0.8rem;
+ background: rgba(0,0,0,.3);
+ color: var(--text, #e6ebf4);
+ border: 1px solid rgba(255,255,255,.15);
+ border-radius: 4px;
+ cursor: pointer;
+ font-size: 0.85rem;
+ transition: background-color 0.2s;
+}
+
+.history-undo-btn:hover:not(:disabled) {
+ background: rgba(255,255,255,.1);
+}
+
+.history-undo-btn:disabled {
+ background: rgba(0,0,0,.2);
+ color: var(--muted, #9aa8c2);
+ border-color: rgba(255,255,255,.05);
+ cursor: not-allowed;
+}
+
+.history-undo-confirm {
+ display: flex;
+ gap: 0.5rem;
+}
+
+.history-undo-confirm-btn {
+ padding: 0.4rem 0.8rem;
+ background: #1e8a4a;
+ color: white;
+ border: none;
+ border-radius: 4px;
+ cursor: pointer;
+ font-size: 0.85rem;
+}
+
+.history-undo-confirm-btn:hover {
+ background: #1a7a3f;
+}
+
+.history-undo-cancel-btn {
+ padding: 0.4rem 0.8rem;
+ background: rgba(0,0,0,.3);
+ color: var(--text, #e6ebf4);
+ border: 1px solid rgba(255,255,255,.15);
+ border-radius: 4px;
+ cursor: pointer;
+ font-size: 0.85rem;
+}
+
+.history-undo-cancel-btn:hover {
+ background: rgba(255,255,255,.1);
+}
diff --git a/milventory/src/components/History/HistoryTableRow.js b/milventory/src/components/History/HistoryTableRow.js
new file mode 100644
index 0000000..f9a2e7f
--- /dev/null
+++ b/milventory/src/components/History/HistoryTableRow.js
@@ -0,0 +1,207 @@
+import React, { useState } from 'react';
+import { formatEasternDateTime } from '../../utils/appTimeZone';
+import './HistoryTableRow.css';
+
+const HistoryTableRow = ({ entry, onUndo, index = 0, isAdmin = false }) => {
+ const [showConfirm, setShowConfirm] = useState(false);
+
+ const formatDate = (dateString) => {
+ if (!dateString) return 'N/A';
+ const date = new Date(dateString);
+ if (Number.isNaN(date.getTime())) return 'N/A';
+ return formatEasternDateTime(date);
+ };
+
+ const getActionBadgeClass = (actionType) => {
+ switch (actionType) {
+ case 'CREATE':
+ return 'history-badge-create';
+ case 'UPDATE':
+ return 'history-badge-update';
+ case 'DELETE':
+ return 'history-badge-delete';
+ case 'ADD':
+ return 'history-badge-create';
+ case 'REMOVE':
+ return 'history-badge-delete';
+ case 'MOVE':
+ return 'history-badge-update';
+ case 'CASCADED_SUBTRACT':
+ return 'history-badge-default';
+ default:
+ return 'history-badge-default';
+ }
+ };
+
+ const formatActionType = (actionType) => {
+ switch (actionType) {
+ case 'REMOVE':
+ return 'SUBTRACT';
+ default:
+ return actionType;
+ }
+ };
+
+ const formatChangesSummary = () => {
+ // Handle location history entries
+ if (entry.historyType === 'location') {
+ const changes = [];
+
+ if (entry.location_name) {
+ let location = entry.location_name;
+ if (entry.shelf !== null && entry.shelf !== undefined) {
+ location += ` (Shelf ${Number(entry.shelf) + 1})`;
+ }
+ changes.push(`Location: ${location}`);
+ }
+
+ if (entry.action_type === 'ADD') {
+ const added = entry.new_amount - (entry.old_amount || 0);
+ changes.push(`Added: +${added}`);
+ } else if (entry.action_type === 'REMOVE') {
+ const removed = entry.old_amount - (entry.new_amount || 0);
+ changes.push(`Subtracted: -${removed}`);
+ } else if (entry.action_type === 'UPDATE') {
+ const diff = entry.new_amount - entry.old_amount;
+ changes.push(`Amount: ${entry.old_amount || 0} → ${entry.new_amount || 0} (${diff >= 0 ? '+' : ''}${diff})`);
+ } else if (entry.action_type === 'MOVE') {
+ if (entry.related_location) {
+ changes.push(`Moved to: ${entry.related_location}`);
+ }
+ }
+
+ return changes.join('; ') || 'Location change';
+ }
+
+ // Handle supply history entries
+ const changes = [];
+
+ if (entry.old_name !== entry.new_name) {
+ changes.push(`Name: "${entry.old_name || 'N/A'}" → "${entry.new_name || 'N/A'}"`);
+ }
+
+ if (entry.old_description !== entry.new_description) {
+ changes.push('Description changed');
+ }
+
+ if (entry.old_image !== entry.new_image) {
+ changes.push('Image changed');
+ }
+
+ if (entry.old_last_order_date !== entry.new_last_order_date) {
+ changes.push('Last order date changed');
+ }
+
+ if (entry.team_changes && entry.team_changes.length > 0) {
+ const added = entry.team_changes.filter(t => t.action === 'ADDED').map(t => t.team_name);
+ const removed = entry.team_changes.filter(t => t.action === 'REMOVED').map(t => t.team_name);
+ const teamChanges = [];
+ if (added.length > 0) teamChanges.push(`${added.join(', ')} added`);
+ if (removed.length > 0) teamChanges.push(`${removed.join(', ')} removed`);
+ if (teamChanges.length > 0) {
+ changes.push(`Teams: ${teamChanges.join(', ')}`);
+ }
+ }
+
+ if (entry.category_changes && entry.category_changes.length > 0) {
+ const added = entry.category_changes.filter(c => c.action === 'ADDED').map(c => c.category_id);
+ const removed = entry.category_changes.filter(c => c.action === 'REMOVED').map(c => c.category_id);
+ const catChanges = [];
+ if (added.length > 0) catChanges.push(`Categories ${added.join(', ')} added`);
+ if (removed.length > 0) catChanges.push(`Categories ${removed.join(', ')} removed`);
+ if (catChanges.length > 0) {
+ changes.push(catChanges.join(', '));
+ }
+ }
+
+ if (changes.length === 0) {
+ return entry.action_type === 'CREATE' ? 'Item created' : 'No changes detected';
+ }
+
+ return changes.join('; ');
+ };
+
+ const handleUndoClick = () => {
+ if (showConfirm) {
+ onUndo(entry);
+ setShowConfirm(false);
+ } else {
+ setShowConfirm(true);
+ }
+ };
+
+ // Determine if entry can be undone
+ // Note: Undone entries are deleted entirely from the database, so we don't need to check undone status
+ // For users: only the first row (most recent on this page) can be undone
+ // For admins: all items can be undone
+ const baseCanUndo = entry.historyType === 'location'
+ ? entry.action_type !== 'CASCADED_SUBTRACT'
+ : entry.can_undo !== false;
+
+ const canUndo = isAdmin
+ ? baseCanUndo // Admins can undo all items
+ : baseCanUndo && index < 1; // Users can only undo the top row
+
+ const undoButtonTitle = !canUndo
+ ? 'Cannot undo'
+ : entry.historyType === 'supply' && entry.undo_removes_log_only
+ ? 'Removes this log line only — catalog item is not linked'
+ : 'Undo this action';
+
+ return (
+
+ | {formatDate(entry.changed_at)} |
+
+
+ {formatActionType(entry.action_type)}
+
+ |
+
+ {entry.supply_name || 'N/A'}
+ {entry.historyType === 'location' && entry.location_name && (
+
+ @ {entry.location_name}
+
+ )}
+ |
+ {formatChangesSummary()} |
+
+
+ {entry.changed_by_name || entry.changed_by || 'Unknown'}
+
+ |
+
+ {entry.action_type === 'CASCADED_SUBTRACT' ? (
+ Use restore
+ ) : showConfirm ? (
+
+
+ Confirm
+
+ setShowConfirm(false)}
+ >
+ Cancel
+
+
+ ) : (
+
+ Undo
+
+ )}
+ |
+
+ );
+};
+
+export default HistoryTableRow;
+
diff --git a/milventory/src/components/LeftPanel.js b/milventory/src/components/Layout/LeftPanel.js
similarity index 88%
rename from milventory/src/components/LeftPanel.js
rename to milventory/src/components/Layout/LeftPanel.js
index 56dbc34..3115682 100644
--- a/milventory/src/components/LeftPanel.js
+++ b/milventory/src/components/Layout/LeftPanel.js
@@ -1,6 +1,7 @@
import React, { useState } from 'react';
-import { useInventory } from '../context/InventoryContext';
-import MasterInventoryTable from './MasterInventoryTable';
+import { useInventory } from '../../context/InventoryContext';
+import { LEFT_PANE_MIN_WIDTH, LEFT_PANE_MAX_WIDTH } from '../../constants/leftPaneLayout';
+import MasterInventoryTable from '../Master/MasterInventoryTable';
const LeftPanel = () => {
const { leftPaneWidth, setLeftPaneWidth, leftPaneCollapsed, setLeftPaneCollapsed } = useInventory();
@@ -16,7 +17,7 @@ const LeftPanel = () => {
React.useEffect(() => {
const handleMouseMove = (e) => {
if (isResizing) {
- const newWidth = Math.max(200, Math.min(600, e.clientX));
+ const newWidth = Math.max(LEFT_PANE_MIN_WIDTH, Math.min(LEFT_PANE_MAX_WIDTH, e.clientX));
setLeftPaneWidth(newWidth);
}
};
diff --git a/milventory/src/components/Map.js b/milventory/src/components/Map.js
deleted file mode 100644
index a62ea05..0000000
--- a/milventory/src/components/Map.js
+++ /dev/null
@@ -1,193 +0,0 @@
-import React, { forwardRef } from 'react';
-import { useInventory } from '../context/InventoryContext';
-import MasterItemPreview from './MasterItemPreview';
-import ArrowConnections from './ArrowConnections';
-import BoxInventoryOverlay from './BoxInventoryOverlay';
-import AddModeArrow from './AddModeArrow';
-
-const SHELF_NAMES = [
- 'Shelf 6 (Top)',
- 'Shelf 5',
- 'Shelf 4',
- 'Shelf 3',
- 'Shelf 2',
- 'Shelf 1 (Bottom)'
-];
-
-const MapComponent = forwardRef((props, ref) => {
- const { worldRef, inventoryData, inventoryBounds, selectedBox, currentDragOverBox, handleBoxClick, handleBoxHover, handleBoxHoverLeave, handleDrop, setCurrentDragOverBox, addModeItem, addModePending, handleBoxClickAddMode, boxHasAnyPending, selectedMasterItem, getItemLocations } = useInventory();
-
- // Compute highlighted box set from selected Master item (for React-managed className)
- const highlightedBoxes = selectedMasterItem ? new Set(getItemLocations(selectedMasterItem)) : null;
-
- const handleBoxMouseEnter = (e, boxTitle) => {
- const rect = e.currentTarget.getBoundingClientRect();
- const wrap = e.currentTarget.closest('.wrap');
- if (wrap) {
- const wrapRect = wrap.getBoundingClientRect();
- const x = rect.left + rect.width / 2 - wrapRect.left;
- const y = rect.top + rect.height / 2 - wrapRect.top;
- handleBoxHover(boxTitle, x, y);
- }
- };
-
- const handleDragEnter = (e, boxTitle) => {
- e.preventDefault();
- if (boxTitle !== currentDragOverBox) {
- setCurrentDragOverBox(boxTitle);
- }
- };
-
- const handleDragOver = (e, boxTitle) => {
- e.preventDefault();
- e.dataTransfer.dropEffect = 'move';
- if (boxTitle !== currentDragOverBox) {
- setCurrentDragOverBox(boxTitle);
- }
- };
-
- const handleDragLeave = (e, boxTitle) => {
- const target = e.currentTarget;
- const clientX = e.clientX;
- const clientY = e.clientY;
-
- setTimeout(() => {
- if (!target) return;
- const rect = target.getBoundingClientRect();
- if (clientX < rect.left || clientX > rect.right || clientY < rect.top || clientY > rect.bottom) {
- if (currentDragOverBox === boxTitle) {
- setCurrentDragOverBox(null);
- }
- }
- }, 0);
- };
-
- const handleDropBox = (e, boxTitle) => {
- e.preventDefault();
- e.stopPropagation();
- handleDrop(boxTitle);
- setCurrentDragOverBox(null);
- };
-
- const boxes = Array.from(inventoryData.values());
-
- const viewBox = inventoryBounds?.viewBox
- ? `${inventoryBounds.viewBox.x} ${inventoryBounds.viewBox.y} ${inventoryBounds.viewBox.width} ${inventoryBounds.viewBox.height}`
- : "0 0 2000 2200";
-
- const roomBounds = inventoryBounds?.room || {
- x: 80,
- y: 80,
- width: 1840,
- height: 2000,
- rx: 18,
- ry: 18
- };
-
- // All Tall Cabinets get shelf overlays in add mode
- const tallCabinets = addModeItem
- ? boxes.filter(b => b.title.startsWith('Tall Cabinet'))
- : [];
-
- return (
- <>
-
-
- >
- );
-});
-
-MapComponent.displayName = 'Map';
-
-export default MapComponent;
diff --git a/milventory/src/components/AddModeArrow.js b/milventory/src/components/Map/AddModeArrow.js
similarity index 68%
rename from milventory/src/components/AddModeArrow.js
rename to milventory/src/components/Map/AddModeArrow.js
index 126ffbb..fc9f52a 100644
--- a/milventory/src/components/AddModeArrow.js
+++ b/milventory/src/components/Map/AddModeArrow.js
@@ -1,5 +1,5 @@
import React, { useEffect, useRef, useCallback } from 'react';
-import { useInventory } from '../context/InventoryContext';
+import { useInventory } from '../../context/InventoryContext';
import * as d3 from 'd3';
const AddModeArrow = () => {
@@ -51,12 +51,49 @@ const AddModeArrow = () => {
path.moveTo(previewX, previewY);
path.bezierCurveTo(cp1x, cp1y, cp2x, cp2y, mx, my);
- // Arrowhead
- const angle = Math.atan2(dy, dx);
+ // Arrowhead - calculate angle from curve tangent at endpoint
+ // Sample the curve near the endpoint to get the actual curve direction
+ const t1 = 0.95; // Sample point before endpoint
+ const t2 = 1.0; // Endpoint (mouse position)
+
+ // Cubic Bezier evaluation: P(t) = (1-t)³P₀ + 3(1-t)²tP₁ + 3(1-t)t²P₂ + t³P₃
+ const evalBezier = (t, p0, p1, p2, p3) => {
+ const mt = 1 - t;
+ const mt2 = mt * mt;
+ const mt3 = mt2 * mt;
+ const t2 = t * t;
+ const t3 = t2 * t;
+ return mt3 * p0 + 3 * mt2 * t * p1 + 3 * mt * t2 * p2 + t3 * p3;
+ };
+
+ // Sample the curve at t1 and t2
+ const x1 = evalBezier(t1, previewX, cp1x, cp2x, mx);
+ const y1 = evalBezier(t1, previewY, cp1y, cp2y, my);
+ const x2 = mx; // t2 = 1.0, endpoint (mouse position)
+ const y2 = my;
+
+ // Calculate the direction vector from sampled point to endpoint
+ const tangentDx = x2 - x1;
+ const tangentDy = y2 - y1;
+
+ // Calculate angle from this direction vector
+ // If the vector is too small (degenerate case), fall back to derivative formula
+ const tangentLength = Math.sqrt(tangentDx * tangentDx + tangentDy * tangentDy);
+ let angle;
+ if (tangentLength < 0.001) {
+ // Degenerate case: use the derivative formula P'(1) = 3(P₃ - P₂)
+ const derivDx = 3 * (mx - cp2x);
+ const derivDy = 3 * (my - cp2y);
+ angle = Math.atan2(derivDy, derivDx);
+ } else {
+ // Use the sampled direction
+ angle = Math.atan2(tangentDy, tangentDx);
+ }
+
const arrowLength = 12;
const arrowAngle = Math.PI / 6;
- const arrowX = mx - Math.cos(angle) * 20;
- const arrowY = my - Math.sin(angle) * 20;
+ const arrowX = mx;
+ const arrowY = my;
path.moveTo(arrowX, arrowY);
path.lineTo(
diff --git a/milventory/src/components/AddModePreview.js b/milventory/src/components/Map/AddModePreview.js
similarity index 87%
rename from milventory/src/components/AddModePreview.js
rename to milventory/src/components/Map/AddModePreview.js
index 6d10d73..f187828 100644
--- a/milventory/src/components/AddModePreview.js
+++ b/milventory/src/components/Map/AddModePreview.js
@@ -1,5 +1,6 @@
import React from 'react';
-import { useInventory } from '../context/InventoryContext';
+import { useInventory } from '../../context/InventoryContext';
+import { getShelfCount, getShelfLabel } from '../../utils/shelfLabels';
const AddModePreview = () => {
const {
@@ -12,7 +13,8 @@ const AddModePreview = () => {
leftPaneWidth,
leftPaneCollapsed,
resolveMasterItem,
- addModePreviewRef
+ addModePreviewRef,
+ inventoryData
} = useInventory();
const item = addModeItem ? resolveMasterItem(addModeItem) : null;
@@ -36,24 +38,18 @@ const AddModePreview = () => {
if (!addModeItem || !item) return null;
- const SHELF_NAMES = [
- 'Shelf 6 (Top)',
- 'Shelf 5',
- 'Shelf 4',
- 'Shelf 3',
- 'Shelf 2',
- 'Shelf 1 (Bottom)'
- ];
-
const pendingCount = Array.from(addModePending.values()).reduce((sum, qty) => sum + qty, 0);
const pendingEntries = Array.from(addModePending.entries());
- // Format a pending key for display
+ // Format a pending key for display. Shelf labels are derived from the target
+ // box's current shelf_count so the numbering matches the rest of the UI.
const formatPendingKey = (key) => {
const parts = key.split('||');
if (parts.length > 1) {
const shelfIdx = parseInt(parts[1], 10);
- return `${parts[0]} → ${SHELF_NAMES[shelfIdx] || `Shelf ${shelfIdx}`}`;
+ const target = inventoryData?.get(parts[0]);
+ const shelfCount = getShelfCount(target);
+ return `${parts[0]} → ${getShelfLabel(shelfIdx, shelfCount)}`;
}
return parts[0];
};
diff --git a/milventory/src/components/Map/ArrowConnections.js b/milventory/src/components/Map/ArrowConnections.js
new file mode 100644
index 0000000..2a4c737
--- /dev/null
+++ b/milventory/src/components/Map/ArrowConnections.js
@@ -0,0 +1,265 @@
+import React, { useEffect, useRef, useCallback } from 'react';
+import { useInventory, MASTER_ARROWS_REDRAW_EVENT } from '../../context/InventoryContext';
+import * as d3 from 'd3';
+import { hasShelves, getShelfCount } from '../../utils/shelfLabels';
+
+const ArrowConnections = () => {
+ const {
+ selectedMasterItem,
+ getItemLocations,
+ inventoryData,
+ svgRef,
+ worldRef,
+ moveModeItem,
+ subtractModeItem,
+ freePlaceModeItem,
+ freePlacementsBySupplyPublicId,
+ freePlaceVisualDots,
+ subtractModeVisualFreeDots,
+ moveModeVisualFreeDots,
+ moveModeDotDragLiveByIdRef
+ } = useInventory();
+
+ const arrowsRef = useRef(null);
+ const rafRef = useRef(null);
+
+ // Convert screen coords to world-group coords (accounts for viewBox + D3 zoom)
+ const screenToWorld = useCallback((screenX, screenY) => {
+ const svg = svgRef.current;
+ if (!svg) return { x: 0, y: 0 };
+ const world = svg.querySelector('#world');
+ if (!world) return { x: 0, y: 0 };
+ const ctm = world.getScreenCTM();
+ if (!ctm) return { x: 0, y: 0 };
+ const pt = svg.createSVGPoint();
+ pt.x = screenX;
+ pt.y = screenY;
+ const worldPt = pt.matrixTransform(ctm.inverse());
+ return { x: worldPt.x, y: worldPt.y };
+ }, [svgRef]);
+
+ const drawArrows = useCallback(() => {
+ if (!arrowsRef.current || !svgRef.current) return;
+
+ const arrowsGroup = arrowsRef.current;
+
+ // Clear existing arrows
+ arrowsGroup.innerHTML = '';
+
+ const arrowItem =
+ subtractModeItem || moveModeItem || freePlaceModeItem || selectedMasterItem;
+ if (!arrowItem) return;
+
+ const locations = getItemLocations(arrowItem);
+ let freeDots = freePlacementsBySupplyPublicId.get(arrowItem) || [];
+ if (freePlaceModeItem === arrowItem && freePlaceVisualDots != null) {
+ freeDots = freePlaceVisualDots;
+ } else if (subtractModeItem === arrowItem && subtractModeVisualFreeDots != null) {
+ freeDots = subtractModeVisualFreeDots;
+ } else if (moveModeItem === arrowItem && moveModeVisualFreeDots != null) {
+ freeDots = moveModeVisualFreeDots.map((d) => {
+ const live = moveModeDotDragLiveByIdRef.current.get(d.id);
+ return live ? { ...d, x: live.x, y: live.y } : d;
+ });
+ }
+ if (locations.length === 0 && freeDots.length === 0) return;
+
+ // Find preview pane position in screen coordinates
+ const previewPane = document.querySelector('.master-preview-pane-overlay');
+ if (!previewPane) return;
+
+ const previewRect = previewPane.getBoundingClientRect();
+
+ // Arrow starts from right edge of preview pane — convert to world coords
+ const preview = screenToWorld(previewRect.right, previewRect.top + 50);
+ const previewX = preview.x;
+ const previewY = preview.y;
+
+ // Draw arrows to each location box (or move boxes if in move mode)
+ locations.forEach((boxTitle) => {
+ const boxData = inventoryData.get(boxTitle);
+ if (!boxData) return;
+
+ let boxX, boxY;
+
+ if (moveModeItem && moveModeItem === arrowItem) {
+ // In move mode, point to the little red boxes
+ const matchingItems = boxData.inventory.filter(item => item.name === moveModeItem);
+ if (matchingItems.length === 0) return;
+
+ const isShelved = hasShelves(boxData);
+
+ if (isShelved) {
+ // For shelved boxes, point to each shelf's move-handle
+ const shelfCount = getShelfCount(boxData);
+ matchingItems.forEach(item => {
+ const shelfIdx = item.shelf ?? 0;
+ const shelfH = boxData.height / shelfCount;
+ const shelfY = boxData.y + shelfIdx * shelfH;
+
+ const boxSize = Math.min(shelfH * 0.6, boxData.width * 0.4, 60);
+ boxX = boxData.x + (boxData.width - boxSize) / 2 + boxSize / 2;
+ boxY = shelfY + (shelfH - boxSize) / 2 + boxSize / 2;
+
+ drawArrowToPoint(previewX, previewY, boxX, boxY, arrowsGroup);
+ });
+ return; // Shelf arrows already drawn; skip the whole-box arrow.
+ } else {
+ // For regular boxes, point to the center move box
+ const boxSize = Math.min(boxData.height * 0.5, boxData.width * 0.4, 60);
+ boxX = boxData.x + (boxData.width - boxSize) / 2 + boxSize / 2;
+ boxY = boxData.y + (boxData.height - boxSize) / 2 + boxSize / 2;
+ }
+ } else {
+ // Normal mode: point to center of inventory box
+ boxX = boxData.x + boxData.width / 2;
+ boxY = boxData.y + boxData.height / 2;
+ }
+
+ drawArrowToPoint(previewX, previewY, boxX, boxY, arrowsGroup);
+ });
+
+ freeDots.forEach((p) => {
+ drawArrowToPoint(previewX, previewY, p.x, p.y, arrowsGroup);
+ });
+ }, [
+ selectedMasterItem,
+ subtractModeItem,
+ moveModeItem,
+ freePlaceModeItem,
+ freePlaceVisualDots,
+ subtractModeVisualFreeDots,
+ moveModeVisualFreeDots,
+ moveModeDotDragLiveByIdRef,
+ getItemLocations,
+ inventoryData,
+ svgRef,
+ screenToWorld,
+ freePlacementsBySupplyPublicId
+ ]);
+
+ const drawArrowToPoint = (previewX, previewY, boxX, boxY, arrowsGroup) => {
+
+ const path = d3.path();
+ const dx = boxX - previewX;
+ const dy = boxY - previewY;
+
+ const cp1x = previewX + dx * 0.3;
+ const cp1y = previewY;
+ const cp2x = boxX - dx * 0.3;
+ const cp2y = boxY;
+
+ path.moveTo(previewX, previewY);
+ path.bezierCurveTo(cp1x, cp1y, cp2x, cp2y, boxX, boxY);
+
+ // Arrowhead - calculate angle from curve tangent at endpoint
+ // For a cubic Bezier P(t), the derivative at t=1 is: P'(1) = 3(P₃ - P₂)
+ // However, to get the visual direction of the curve as it enters the box,
+ // we sample two points very close to the endpoint to get the actual curve direction
+ const t1 = 0.95; // Sample point before endpoint
+ const t2 = 1.0; // Endpoint
+
+ // Cubic Bezier evaluation: P(t) = (1-t)³P₀ + 3(1-t)²tP₁ + 3(1-t)t²P₂ + t³P₃
+ const evalBezier = (t, p0, p1, p2, p3) => {
+ const mt = 1 - t;
+ const mt2 = mt * mt;
+ const mt3 = mt2 * mt;
+ const t2 = t * t;
+ const t3 = t2 * t;
+ return mt3 * p0 + 3 * mt2 * t * p1 + 3 * mt * t2 * p2 + t3 * p3;
+ };
+
+ // Sample the curve at t1 and t2
+ const x1 = evalBezier(t1, previewX, cp1x, cp2x, boxX);
+ const y1 = evalBezier(t1, previewY, cp1y, cp2y, boxY);
+ const x2 = boxX; // t2 = 1.0, endpoint
+ const y2 = boxY;
+
+ // Calculate the direction vector from sampled point to endpoint
+ const tangentDx = x2 - x1;
+ const tangentDy = y2 - y1;
+
+ // Calculate angle from this direction vector
+ // If the vector is too small (degenerate case), fall back to derivative formula
+ const tangentLength = Math.sqrt(tangentDx * tangentDx + tangentDy * tangentDy);
+ let angle;
+ if (tangentLength < 0.001) {
+ // Degenerate case: use the derivative formula P'(1) = 3(P₃ - P₂)
+ const derivDx = 3 * (boxX - cp2x);
+ const derivDy = 3 * (boxY - cp2y);
+ angle = Math.atan2(derivDy, derivDx);
+ } else {
+ // Use the sampled direction
+ angle = Math.atan2(tangentDy, tangentDx);
+ }
+ const arrowLength = 12;
+ const arrowAngle = Math.PI / 6;
+ const arrowX = boxX;
+ const arrowY = boxY;
+
+ path.moveTo(arrowX, arrowY);
+ path.lineTo(
+ arrowX - arrowLength * Math.cos(angle - arrowAngle),
+ arrowY - arrowLength * Math.sin(angle - arrowAngle)
+ );
+ path.moveTo(arrowX, arrowY);
+ path.lineTo(
+ arrowX - arrowLength * Math.cos(angle + arrowAngle),
+ arrowY - arrowLength * Math.sin(angle + arrowAngle)
+ );
+
+ const pathElement = document.createElementNS('http://www.w3.org/2000/svg', 'path');
+ pathElement.setAttribute('d', path.toString());
+ pathElement.setAttribute('class', 'master-arrow-path');
+ arrowsGroup.appendChild(pathElement);
+ };
+
+ // Draw arrows when selectedMasterItem changes
+ useEffect(() => {
+ drawArrows();
+
+ return () => {
+ if (arrowsRef.current) {
+ arrowsRef.current.innerHTML = '';
+ }
+ };
+ }, [drawArrows]);
+
+ // Floor-dot drag updates a ref only; redraw arrows on custom event
+ useEffect(() => {
+ const onRedraw = () => {
+ if (rafRef.current) cancelAnimationFrame(rafRef.current);
+ rafRef.current = requestAnimationFrame(drawArrows);
+ };
+ window.addEventListener(MASTER_ARROWS_REDRAW_EVENT, onRedraw);
+ return () => window.removeEventListener(MASTER_ARROWS_REDRAW_EVENT, onRedraw);
+ }, [drawArrows]);
+
+ // Update arrows on zoom/pan via MutationObserver — direct DOM, no React state
+ useEffect(() => {
+ const arrowActive =
+ subtractModeItem || moveModeItem || freePlaceModeItem || selectedMasterItem;
+ if (!arrowActive || !worldRef.current) return;
+
+ const world = worldRef.current;
+
+ const scheduleRedraw = () => {
+ if (rafRef.current) cancelAnimationFrame(rafRef.current);
+ rafRef.current = requestAnimationFrame(drawArrows);
+ };
+
+ const observer = new MutationObserver(scheduleRedraw);
+ observer.observe(world, { attributes: true, attributeFilter: ['transform'] });
+
+ return () => {
+ observer.disconnect();
+ if (rafRef.current) cancelAnimationFrame(rafRef.current);
+ };
+ }, [selectedMasterItem, subtractModeItem, moveModeItem, freePlaceModeItem, worldRef, drawArrows]);
+
+ if (!subtractModeItem && !moveModeItem && !freePlaceModeItem && !selectedMasterItem) return null;
+
+ return ;
+};
+
+export default ArrowConnections;
diff --git a/milventory/src/components/BoxInventoryOverlay.js b/milventory/src/components/Map/BoxInventoryOverlay.js
similarity index 60%
rename from milventory/src/components/BoxInventoryOverlay.js
rename to milventory/src/components/Map/BoxInventoryOverlay.js
index ffaa7d7..45fd7cf 100644
--- a/milventory/src/components/BoxInventoryOverlay.js
+++ b/milventory/src/components/Map/BoxInventoryOverlay.js
@@ -1,34 +1,30 @@
import React, { useMemo } from 'react';
-import { useInventory } from '../context/InventoryContext';
-import { escapeHtml } from '../utils';
+import { useInventory } from '../../context/InventoryContext';
+import { escapeHtml } from '../../utils';
+import {
+ hasShelves,
+ getShelfCount,
+ getShelfLabel,
+ getShelfIndicesTopToBottom
+} from '../../utils/shelfLabels';
const BoxInventoryOverlay = () => {
- const { selectedBox, inventoryData, addModeItem } = useInventory();
-
+ const { selectedBox, inventoryData, addModeItem, setMasterFilterLocation } = useInventory();
+
const boxData = selectedBox ? inventoryData.get(selectedBox) : null;
const inventory = boxData ? boxData.inventory : [];
- const isFileCabinet = selectedBox && selectedBox.startsWith('Tall Cabinet');
-
- const SHELF_NAMES = [
- 'Shelf 6 (Top)',
- 'Shelf 5',
- 'Shelf 4',
- 'Shelf 3',
- 'Shelf 2',
- 'Shelf 1 (Bottom)'
- ];
+ const shelved = hasShelves(boxData);
- // Group inventory items by shelf for file cabinets
+ // Group inventory items by shelf index (0 = bottom), rendered top-to-bottom.
const shelves = useMemo(() => {
- if (!isFileCabinet) {
- return null;
- }
-
- return SHELF_NAMES.map((name, shelfNum) => {
- const items = inventory.filter(item => (item.shelf ?? 0) === shelfNum);
- return { name, shelfNum, items };
+ if (!shelved) return null;
+ const shelfCount = getShelfCount(boxData);
+ const orderedShelfIndices = getShelfIndicesTopToBottom(shelfCount);
+ return orderedShelfIndices.map((shelfIdx) => {
+ const items = inventory.filter((item) => (item.shelf ?? 0) === shelfIdx);
+ return { name: getShelfLabel(shelfIdx, shelfCount), shelfNum: shelfIdx, items };
});
- }, [inventory, isFileCabinet]);
+ }, [inventory, shelved, boxData]);
if (!selectedBox || !boxData || addModeItem) return null;
@@ -38,8 +34,8 @@ const BoxInventoryOverlay = () => {
const overlayWidth = 250;
const maxHeight = 400;
- if (isFileCabinet && shelves) {
- // Render shelves for Tall Cabinet
+ if (shelved && shelves) {
+ // Render one section per shelf, top-to-bottom.
return (
@@ -47,6 +43,30 @@ const BoxInventoryOverlay = () => {
{selectedBox}
+ setMasterFilterLocation(selectedBox)}
+ style={{
+ width: '100%',
+ padding: '0.5rem',
+ marginBottom: '0.5rem',
+ fontSize: '0.85rem',
+ background: 'var(--accent)',
+ color: 'white',
+ border: 'none',
+ borderRadius: '4px',
+ cursor: 'pointer',
+ fontWeight: '500',
+ flexShrink: 0
+ }}
+ onMouseEnter={(e) => {
+ e.currentTarget.style.opacity = '0.9';
+ }}
+ onMouseLeave={(e) => {
+ e.currentTarget.style.opacity = '1';
+ }}
+ >
+ Master Filter
+
{shelves.map((shelf, idx) => (
shelf.items.length > 0 && (
@@ -95,6 +115,30 @@ const BoxInventoryOverlay = () => {
{selectedBox}
+
setMasterFilterLocation(selectedBox)}
+ style={{
+ width: '100%',
+ padding: '0.5rem',
+ marginBottom: '0.5rem',
+ fontSize: '0.85rem',
+ background: 'var(--accent)',
+ color: 'white',
+ border: 'none',
+ borderRadius: '4px',
+ cursor: 'pointer',
+ fontWeight: '500',
+ flexShrink: 0
+ }}
+ onMouseEnter={(e) => {
+ e.currentTarget.style.opacity = '0.9';
+ }}
+ onMouseLeave={(e) => {
+ e.currentTarget.style.opacity = '1';
+ }}
+ >
+ Master Filter
+
diff --git a/milventory/src/components/Map/FreePlaceDots.js b/milventory/src/components/Map/FreePlaceDots.js
new file mode 100644
index 0000000..8d45a0f
--- /dev/null
+++ b/milventory/src/components/Map/FreePlaceDots.js
@@ -0,0 +1,176 @@
+import React, { useEffect, useRef, useMemo } from 'react';
+import * as d3 from 'd3';
+import { useInventory, FREE_SUBTRACT_DOT_PREFIX } from '../../context/InventoryContext';
+
+const FreePlaceDots = () => {
+ const gRef = useRef(null);
+ const {
+ selectedMasterItem,
+ subtractModeItem,
+ moveModeItem,
+ freePlaceModeItem,
+ freePlacementsBySupplyPublicId,
+ freePlaceVisualDots,
+ subtractModePending,
+ moveModeFreeCoordById,
+ updateFreePlaceSessionCoord,
+ updateMoveModeFreeDotPosition,
+ updateMoveModeDotDragLiveForArrows,
+ moveModeDotDragLiveByIdRef,
+ requestMasterArrowsRedraw,
+ handleFreePlaceSessionDotDelete,
+ handleSubtractFreeDotClick
+ } = useInventory();
+
+ const dotItemName =
+ subtractModeItem || moveModeItem || freePlaceModeItem || selectedMasterItem || null;
+
+ const dots = useMemo(() => {
+ if (!dotItemName) return [];
+ if (freePlaceModeItem && dotItemName === freePlaceModeItem && freePlaceVisualDots != null) {
+ return freePlaceVisualDots;
+ }
+ return freePlacementsBySupplyPublicId.get(dotItemName) || [];
+ }, [dotItemName, freePlaceModeItem, freePlaceVisualDots, freePlacementsBySupplyPublicId]);
+
+ const displayDots = useMemo(() => {
+ if (!subtractModeItem || dotItemName !== subtractModeItem) return dots;
+ return dots
+ .map((d) => {
+ const key = `${FREE_SUBTRACT_DOT_PREFIX}||${d.id}`;
+ const pending = subtractModePending.get(key) || 0;
+ const displayQty = (d.qty || 0) - pending;
+ if (displayQty <= 0) return null;
+ return { ...d, displayQty, pendingSubtract: pending };
+ })
+ .filter(Boolean);
+ }, [dots, dotItemName, subtractModeItem, subtractModePending]);
+
+ const dotsInteractiveSubtract = Boolean(
+ subtractModeItem && dotItemName === subtractModeItem
+ );
+ const dotsInteractiveMove = Boolean(moveModeItem && dotItemName === moveModeItem);
+ const dotsInteractiveFreePlace = Boolean(
+ freePlaceModeItem && dotItemName === freePlaceModeItem
+ );
+ const dotsInteractive =
+ dotsInteractiveSubtract || dotsInteractiveMove || dotsInteractiveFreePlace;
+ const allowDrag = dotsInteractiveMove || dotsInteractiveFreePlace;
+
+ const renderDots = useMemo(() => {
+ if (!dotsInteractiveMove) return displayDots;
+ return displayDots.map((d) => {
+ const o = moveModeFreeCoordById.get(d.id);
+ if (!o) return d;
+ return { ...d, x: o.x, y: o.y };
+ });
+ }, [displayDots, dotsInteractiveMove, moveModeFreeCoordById]);
+
+ useEffect(() => {
+ if (!gRef.current) return;
+ const g = d3.select(gRef.current);
+ g.selectAll('*').remove();
+ if (!dotItemName || renderDots.length === 0) return;
+
+ const drag = d3
+ .drag()
+ .filter((event) => {
+ if (event.ctrlKey || event.button) return false;
+ const t = event.type;
+ if ((t === 'mousedown' || t === 'pointerdown') && event.detail > 1) return false;
+ return true;
+ })
+ .clickDistance(4)
+ .on('start', (event) => {
+ event.sourceEvent?.stopPropagation?.();
+ })
+ .on('drag', function dragMove(event, d) {
+ if (!allowDrag) return;
+ const node = d3.select(this);
+ const cx = (+node.attr('cx') || 0) + event.dx;
+ const cy = (+node.attr('cy') || 0) + event.dy;
+ node.attr('cx', cx).attr('cy', cy);
+ if (dotsInteractiveMove) {
+ updateMoveModeDotDragLiveForArrows(d.id, cx, cy);
+ }
+ })
+ .on('end', function dragEnd(event, d) {
+ if (!allowDrag) return;
+ const cx = +d3.select(this).attr('cx');
+ const cy = +d3.select(this).attr('cy');
+ const ox = d.x;
+ const oy = d.y;
+ const significant = Math.hypot(cx - ox, cy - oy) >= 0.5;
+ if (dotsInteractiveMove) {
+ moveModeDotDragLiveByIdRef.current.delete(d.id);
+ if (!significant) requestMasterArrowsRedraw();
+ }
+ if (!significant) return;
+ if (dotsInteractiveMove) {
+ updateMoveModeFreeDotPosition(d.id, cx, cy);
+ } else if (dotsInteractiveFreePlace) {
+ updateFreePlaceSessionCoord(d.id, cx, cy);
+ }
+ });
+
+ const cursor = dotsInteractiveSubtract
+ ? 'pointer'
+ : allowDrag
+ ? 'grab'
+ : 'default';
+
+ const circles = g
+ .selectAll('circle')
+ .data(renderDots, (d) => d.id)
+ .join('circle')
+ .attr('cx', (d) => d.x)
+ .attr('cy', (d) => d.y)
+ .attr('r', dotsInteractive ? 12 : 9)
+ .attr('class', 'free-place-dot')
+ .style('cursor', cursor)
+ .style('fill', 'var(--accent)')
+ .style('stroke', '#fff')
+ .style('stroke-width', 2)
+ .style('opacity', (d) =>
+ dotsInteractiveSubtract && d.pendingSubtract > 0 ? 0.62 : 1
+ )
+ .style('pointer-events', dotsInteractive ? 'all' : 'none')
+ .on('click', (e, d) => {
+ if (!dotsInteractiveSubtract) return;
+ e.stopPropagation();
+ e.preventDefault();
+ void handleSubtractFreeDotClick(d.id);
+ })
+ .on('dblclick', (e, d) => {
+ if (!dotsInteractiveFreePlace) return;
+ e.stopPropagation();
+ e.preventDefault();
+ handleFreePlaceSessionDotDelete(d.id);
+ });
+
+ if (allowDrag) {
+ circles.call(drag);
+ }
+ }, [
+ renderDots,
+ dotItemName,
+ dotsInteractive,
+ dotsInteractiveSubtract,
+ dotsInteractiveMove,
+ dotsInteractiveFreePlace,
+ allowDrag,
+ updateFreePlaceSessionCoord,
+ updateMoveModeFreeDotPosition,
+ updateMoveModeDotDragLiveForArrows,
+ moveModeDotDragLiveByIdRef,
+ requestMasterArrowsRedraw,
+ handleFreePlaceSessionDotDelete,
+ handleSubtractFreeDotClick
+ ]);
+
+ if (!dotItemName || renderDots.length === 0) return null;
+
+ return ;
+};
+
+export default FreePlaceDots;
diff --git a/milventory/src/components/Map/Map.js b/milventory/src/components/Map/Map.js
new file mode 100644
index 0000000..b8399a7
--- /dev/null
+++ b/milventory/src/components/Map/Map.js
@@ -0,0 +1,435 @@
+import React, { forwardRef, useRef } from 'react';
+import { useInventory } from '../../context/InventoryContext';
+import MasterItemPreview from '../Master/MasterItemPreview';
+import ArrowConnections from './ArrowConnections';
+import BoxInventoryOverlay from './BoxInventoryOverlay';
+import AddModeArrow from './AddModeArrow';
+import MoveModeBoxes from './MoveModeBoxes';
+import SubtractModePreview from './SubtractModePreview';
+import FreePlaceDots from './FreePlaceDots';
+import { svgMarkupToDataUrl } from '../../utils/svgDataUrl';
+import {
+ hasShelves,
+ getShelfCount,
+ getShelfLabel,
+ getShelfIndicesTopToBottom
+} from '../../utils/shelfLabels';
+
+function inventoryRowMatchesSupplyPublicId(invItem, supplyPublicId) {
+ if (!supplyPublicId) return false;
+ const k =
+ invItem.supplyPublicId ||
+ (invItem.supplyId != null ? `__legacy_id_${invItem.supplyId}` : null);
+ return k === supplyPublicId;
+}
+
+const MapComponent = forwardRef((props, ref) => {
+ const {
+ worldRef,
+ inventoryData,
+ inventoryBounds,
+ selectedBox,
+ currentDragOverBox,
+ handleBoxClick,
+ handleBoxHover,
+ handleBoxHoverLeave,
+ handleDrop,
+ setCurrentDragOverBox,
+ addModeItem,
+ addModePending,
+ handleBoxClickAddMode,
+ boxHasAnyPending,
+ selectedMasterItem,
+ getItemLocations,
+ moveModeItem,
+ moveModeDragging,
+ handleMoveModeDrop,
+ subtractModeItem,
+ subtractModePending,
+ handleBoxClickSubtractMode,
+ boxHasAnySubtractPending,
+ freePlaceModeItem,
+ handleFreePlaceWorldClick
+ } = useInventory();
+
+ const freePlaceRoomPointerRef = useRef(null);
+
+ // Compute highlighted box set from selected Master item (for React-managed className)
+ const highlightedBoxes = selectedMasterItem ? new Set(getItemLocations(selectedMasterItem)) : null;
+
+ // Compute highlighted box set for subtract mode (all boxes containing the item)
+ const subtractModeHighlightedBoxes = subtractModeItem ? new Set(getItemLocations(subtractModeItem)) : null;
+
+ const handleBoxMouseEnter = (e, boxTitle) => {
+ const rect = e.currentTarget.getBoundingClientRect();
+ const wrap = e.currentTarget.closest('.wrap');
+ if (wrap) {
+ const wrapRect = wrap.getBoundingClientRect();
+ const x = rect.left + rect.width / 2 - wrapRect.left;
+ const y = rect.top + rect.height / 2 - wrapRect.top;
+ handleBoxHover(boxTitle, x, y);
+ }
+ };
+
+ const handleDragEnter = (e, boxTitle) => {
+ e.preventDefault();
+ if (moveModeItem && moveModeDragging) {
+ // In move mode, track drag over for move boxes
+ if (boxTitle !== currentDragOverBox) {
+ setCurrentDragOverBox(boxTitle);
+ }
+ } else {
+ if (boxTitle !== currentDragOverBox) {
+ setCurrentDragOverBox(boxTitle);
+ }
+ }
+ };
+
+ const handleDragOver = (e, boxTitle) => {
+ e.preventDefault();
+ e.dataTransfer.dropEffect = 'move';
+ if (moveModeItem && moveModeDragging) {
+ // In move mode, track drag over for move boxes
+ if (boxTitle !== currentDragOverBox) {
+ setCurrentDragOverBox(boxTitle);
+ }
+ } else {
+ if (boxTitle !== currentDragOverBox) {
+ setCurrentDragOverBox(boxTitle);
+ }
+ }
+ };
+
+ const handleDragLeave = (e, boxTitle) => {
+ const target = e.currentTarget;
+ const clientX = e.clientX;
+ const clientY = e.clientY;
+
+ setTimeout(() => {
+ if (!target) return;
+ const rect = target.getBoundingClientRect();
+ if (clientX < rect.left || clientX > rect.right || clientY < rect.top || clientY > rect.bottom) {
+ if (currentDragOverBox === boxTitle) {
+ setCurrentDragOverBox(null);
+ }
+ }
+ }, 0);
+ };
+
+ const handleDropBox = (e, boxTitle) => {
+ e.preventDefault();
+ e.stopPropagation();
+ if (moveModeItem && moveModeDragging) {
+ // In move mode, handle drop for moving items
+ // For Tall Cabinets, we need to determine which shelf was dropped on
+ const boxData = inventoryData.get(boxTitle);
+ let targetShelf = undefined;
+
+ if (boxData && hasShelves(boxData) && worldRef.current) {
+ const shelfCount = getShelfCount(boxData);
+ const svg = e.currentTarget.ownerSVGElement;
+ if (svg) {
+ const pt = svg.createSVGPoint();
+ pt.x = e.clientX;
+ pt.y = e.clientY;
+ const ctm = worldRef.current.getScreenCTM();
+ if (ctm) {
+ const worldPt = pt.matrixTransform(ctm.inverse());
+ const shelfH = boxData.height / shelfCount;
+ const relativeY = worldPt.y - boxData.y;
+ const rowFromTop = Math.max(
+ 0,
+ Math.min(shelfCount - 1, Math.floor(relativeY / shelfH))
+ );
+ targetShelf = shelfCount - 1 - rowFromTop;
+ }
+ }
+ }
+
+ handleMoveModeDrop(boxTitle, targetShelf);
+ setCurrentDragOverBox(null);
+ } else {
+ handleDrop(boxTitle);
+ setCurrentDragOverBox(null);
+ }
+ };
+
+ const boxes = Array.from(inventoryData.values());
+
+ const viewBox = inventoryBounds?.viewBox
+ ? `${inventoryBounds.viewBox.x} ${inventoryBounds.viewBox.y} ${inventoryBounds.viewBox.width} ${inventoryBounds.viewBox.height}`
+ : "0 0 2000 2200";
+
+ const roomBounds = inventoryBounds?.room || {
+ x: 80,
+ y: 80,
+ width: 1840,
+ height: 2000,
+ rx: 18,
+ ry: 18
+ };
+
+ const onFreePlaceRoomPointerDown = (e) => {
+ if (!freePlaceModeItem || e.button !== 0) return;
+ freePlaceRoomPointerRef.current = { x: e.clientX, y: e.clientY };
+ };
+
+ const onFreePlaceRoomClick = (e) => {
+ if (!freePlaceModeItem || !worldRef.current || !handleFreePlaceWorldClick) return;
+ if (e.button !== 0) return;
+ const start = freePlaceRoomPointerRef.current;
+ freePlaceRoomPointerRef.current = null;
+ if (start) {
+ const d = Math.hypot(e.clientX - start.x, e.clientY - start.y);
+ if (d > 12) return;
+ }
+ e.stopPropagation();
+ const svg = e.currentTarget.ownerSVGElement;
+ if (!svg) return;
+ const pt = svg.createSVGPoint();
+ pt.x = e.clientX;
+ pt.y = e.clientY;
+ const ctm = worldRef.current.getScreenCTM();
+ if (!ctm) return;
+ const w = pt.matrixTransform(ctm.inverse());
+ void handleFreePlaceWorldClick(w.x, w.y);
+ };
+
+ const boxPointerBlockFreePlace = freePlaceModeItem ? { pointerEvents: 'none' } : undefined;
+
+ // Boxes with shelves get shelf overlays in add/subtract/move mode
+ const shelvedBoxes = (addModeItem || subtractModeItem || moveModeItem)
+ ? boxes.filter(hasShelves)
+ : [];
+
+ return (
+ <>
+
+
+
+ >
+ );
+});
+
+MapComponent.displayName = 'Map';
+
+export default MapComponent;
diff --git a/milventory/src/components/Map/MoveModeBoxes.js b/milventory/src/components/Map/MoveModeBoxes.js
new file mode 100644
index 0000000..b573dd8
--- /dev/null
+++ b/milventory/src/components/Map/MoveModeBoxes.js
@@ -0,0 +1,275 @@
+import React, { useEffect, useRef } from 'react';
+import { useInventory } from '../../context/InventoryContext';
+import { hasShelves, getShelfCount, normalizeShelfIndex } from '../../utils/shelfLabels';
+
+const MoveModeBoxes = () => {
+ const {
+ moveModeItem,
+ inventoryData,
+ moveModeDragging,
+ handleMoveModeDragStart,
+ handleMoveModeDragMove,
+ handleMoveModeDrop,
+ clearMoveModeDragging,
+ currentDragOverBox,
+ svgRef,
+ worldRef,
+ isDraggingMoveBoxRef
+ } = useInventory();
+
+ const dragStartRef = useRef(null);
+
+ // Track mouse position during drag
+ useEffect(() => {
+ if (!moveModeDragging) {
+ dragStartRef.current = null;
+ return;
+ }
+
+ const handleMouseMove = (e) => {
+ if (!svgRef.current || !worldRef.current || !moveModeDragging) return;
+
+ const svg = svgRef.current;
+ const pt = svg.createSVGPoint();
+ pt.x = e.clientX;
+ pt.y = e.clientY;
+
+ const ctm = worldRef.current.getScreenCTM();
+ if (!ctm) return;
+
+ const worldPt = pt.matrixTransform(ctm.inverse());
+ handleMoveModeDragMove(worldPt.x - 30, worldPt.y - 30); // Offset by half box size
+ };
+
+ const handleMouseUp = (e) => {
+ if (!moveModeDragging || !dragStartRef.current) return;
+
+ // Check if we're over an inventory box
+ const svg = svgRef.current;
+ if (!svg || !worldRef.current) {
+ // Reset to original position
+ clearMoveModeDragging();
+ return;
+ }
+
+ const pt = svg.createSVGPoint();
+ pt.x = e.clientX;
+ pt.y = e.clientY;
+ const ctm = worldRef.current.getScreenCTM();
+ if (!ctm) {
+ clearMoveModeDragging();
+ return;
+ }
+
+ const worldPt = pt.matrixTransform(ctm.inverse());
+
+ // Find which inventory box we're over
+ let targetBox = null;
+ let targetShelf = undefined;
+
+ for (const [boxTitle, boxData] of inventoryData.entries()) {
+ if (worldPt.x >= boxData.x && worldPt.x <= boxData.x + boxData.width &&
+ worldPt.y >= boxData.y && worldPt.y <= boxData.y + boxData.height) {
+ targetBox = boxTitle;
+
+ // If it has shelves, determine which one was dropped on.
+ if (hasShelves(boxData)) {
+ const shelfCount = getShelfCount(boxData);
+ const shelfH = boxData.height / shelfCount;
+ const relativeY = worldPt.y - boxData.y;
+ const rowFromTop = Math.max(
+ 0,
+ Math.min(shelfCount - 1, Math.floor(relativeY / shelfH))
+ );
+ targetShelf = shelfCount - 1 - rowFromTop;
+ }
+ break;
+ }
+ }
+
+ // Check if dropped on a different location (different box or different shelf of same box)
+ const isDifferentLocation =
+ targetBox &&
+ (targetBox !== dragStartRef.current.boxTitle ||
+ (targetBox === dragStartRef.current.boxTitle &&
+ normalizeShelfIndex(targetShelf) !==
+ normalizeShelfIndex(dragStartRef.current.shelf)));
+
+ if (isDifferentLocation) {
+ // Dropped on a different location - move the item
+ handleMoveModeDrop(targetBox, targetShelf);
+ } else {
+ // Dropped outside or on same location - reset to original position
+ clearMoveModeDragging();
+ }
+ };
+
+ window.addEventListener('mousemove', handleMouseMove);
+ window.addEventListener('mouseup', handleMouseUp);
+ return () => {
+ window.removeEventListener('mousemove', handleMouseMove);
+ window.removeEventListener('mouseup', handleMouseUp);
+ };
+ }, [moveModeDragging, svgRef, worldRef, handleMoveModeDragMove, handleMoveModeDrop, clearMoveModeDragging, inventoryData]);
+
+ if (!moveModeItem) return null;
+
+ const boxes = Array.from(inventoryData.values());
+ const moveBoxes = [];
+
+ boxes.forEach(box => {
+ const boxData = inventoryData.get(box.title);
+ if (!boxData) return;
+
+ const matchingItems = boxData.inventory.filter((item) => {
+ const pid =
+ item.supplyPublicId ||
+ (item.supplyId != null ? `__legacy_id_${item.supplyId}` : null);
+ return pid === moveModeItem;
+ });
+ if (matchingItems.length === 0) return;
+
+ const isShelved = hasShelves(box);
+
+ if (isShelved) {
+ // For shelved boxes, show one draggable handle per used shelf.
+ const shelfCount = getShelfCount(box);
+ matchingItems.forEach(item => {
+ const shelfIdx = item.shelf ?? 0;
+ const shelfH = box.height / shelfCount;
+ const shelfY = box.y + (shelfCount - 1 - shelfIdx) * shelfH;
+
+ // Position box in center of shelf
+ const boxSize = Math.min(shelfH * 0.6, box.width * 0.4, 60);
+ const boxX = box.x + (box.width - boxSize) / 2;
+ const boxY = shelfY + (shelfH - boxSize) / 2;
+
+ const isDragging = moveModeDragging &&
+ moveModeDragging.boxTitle === box.title &&
+ moveModeDragging.shelf === shelfIdx;
+ const isDragOver = currentDragOverBox === box.title;
+
+ moveBoxes.push({
+ key: `${box.title}||${shelfIdx}`,
+ x: boxX,
+ y: boxY,
+ width: boxSize,
+ height: boxSize,
+ qty: item.qty,
+ boxTitle: box.title,
+ shelf: shelfIdx,
+ isDragging,
+ isDragOver
+ });
+ });
+ } else {
+ // For regular boxes, show one box with total quantity
+ const totalQty = matchingItems.reduce((sum, item) => sum + (item.qty || 0), 0);
+
+ // Position box in center
+ const boxSize = Math.min(box.height * 0.5, box.width * 0.4, 60);
+ const boxX = box.x + (box.width - boxSize) / 2;
+ const boxY = box.y + (box.height - boxSize) / 2;
+
+ const isDragging = moveModeDragging &&
+ moveModeDragging.boxTitle === box.title &&
+ moveModeDragging.shelf === undefined;
+ const isDragOver = currentDragOverBox === box.title;
+
+ moveBoxes.push({
+ key: box.title,
+ x: boxX,
+ y: boxY,
+ width: boxSize,
+ height: boxSize,
+ qty: totalQty,
+ boxTitle: box.title,
+ shelf: undefined,
+ isDragging,
+ isDragOver
+ });
+ }
+ });
+
+ const handleMouseDown = (e, boxTitle, shelf, qty, x, y) => {
+ // Prevent D3 zoom from starting
+ e.preventDefault();
+ e.stopPropagation();
+ // Set dragging state immediately so D3 filter can see it
+ isDraggingMoveBoxRef.current = true;
+ dragStartRef.current = { boxTitle, shelf, qty, x, y };
+ handleMoveModeDragStart(boxTitle, shelf, qty, x, y);
+ };
+
+ return (
+
+ {moveBoxes.map(moveBox => {
+ if (moveBox.isDragging) return null; // Don't render the dragging box
+
+ return (
+
+ handleMouseDown(e, moveBox.boxTitle, moveBox.shelf, moveBox.qty, moveBox.x, moveBox.y)}
+ />
+
+ {moveBox.qty}
+
+
+ );
+ })}
+ {/* Render dragging box at cursor position */}
+ {moveModeDragging && (
+
+
+
+ {moveModeDragging.qty}
+
+
+ )}
+
+ );
+};
+
+export default MoveModeBoxes;
+
diff --git a/milventory/src/components/Map/SubtractModePreview.js b/milventory/src/components/Map/SubtractModePreview.js
new file mode 100644
index 0000000..cb1d873
--- /dev/null
+++ b/milventory/src/components/Map/SubtractModePreview.js
@@ -0,0 +1,150 @@
+import React from 'react';
+import { useInventory, FREE_SUBTRACT_DOT_PREFIX } from '../../context/InventoryContext';
+import { getShelfCount, getShelfLabel } from '../../utils/shelfLabels';
+
+const SubtractModePreview = () => {
+ const {
+ subtractModeItem,
+ subtractModeQtyPerClick,
+ setSubtractModeQtyPerClick,
+ subtractModePending,
+ finishSubtractMode,
+ cancelSubtractMode,
+ leftPaneWidth,
+ leftPaneCollapsed,
+ resolveMasterItem,
+ subtractModePreviewRef,
+ freePlacementsBySupplyPublicId,
+ inventoryData
+ } = useInventory();
+
+ const item = subtractModeItem ? resolveMasterItem(subtractModeItem) : null;
+
+ // Calculate position to the right of left pane
+ const leftPaneActualWidth = leftPaneCollapsed ? 40 : leftPaneWidth;
+ const positionX = leftPaneActualWidth + 20;
+ const positionY = 20;
+
+ const handleQtyChange = (delta) => {
+ setSubtractModeQtyPerClick(prev => Math.max(1, prev + delta));
+ };
+
+ const handleFinish = () => {
+ finishSubtractMode();
+ };
+
+ const handleCancel = () => {
+ cancelSubtractMode();
+ };
+
+ if (!subtractModeItem || !item) return null;
+
+ const pendingCount = Array.from(subtractModePending.values()).reduce((sum, qty) => sum + qty, 0);
+ const pendingEntries = Array.from(subtractModePending.entries());
+
+ // Format a pending key for display.
+ const formatPendingKey = (key) => {
+ const parts = key.split('||');
+ if (parts[0] === FREE_SUBTRACT_DOT_PREFIX && parts[1] != null && parts[1] !== '') {
+ const id = parseInt(parts[1], 10);
+ const placements = freePlacementsBySupplyPublicId.get(subtractModeItem) || [];
+ const dot = placements.find((p) => p.id === id);
+ if (dot) return `Floor (${Math.round(dot.x)}, ${Math.round(dot.y)})`;
+ return 'Floor placement';
+ }
+ if (parts.length > 1) {
+ const shelfIdx = parseInt(parts[1], 10);
+ const target = inventoryData?.get(parts[0]);
+ const shelfCount = getShelfCount(target);
+ return `${parts[0]} → ${getShelfLabel(shelfIdx, shelfCount)}`;
+ }
+ return parts[0];
+ };
+
+ return (
+
+
+
+
Subtract: {item.name}
+
+ ×
+
+
+
+
+ Click boxes or floor markers to queue subtractions (same qty per click). Use Finish Subtract to apply, or Cancel to discard all pending.
+
+
+
+
+ handleQtyChange(-1)}
+ type="button"
+ >
+ −
+
+ {
+ const val = parseInt(e.target.value) || 1;
+ setSubtractModeQtyPerClick(Math.max(1, val));
+ }}
+ min="1"
+ />
+ handleQtyChange(1)}
+ type="button"
+ >
+ +
+
+
+
+
+ {pendingEntries.length > 0 && (
+
+
Pending subtractions:
+
+ {pendingEntries.map(([key, qty]) => (
+ -
+ {formatPendingKey(key)}: -{qty}
+
+ ))}
+
+
+ Total: {pendingCount} items
+
+
+ )}
+
+
+
+ Cancel
+
+
+ Finish Subtract
+
+
+
+
+ );
+};
+
+export default SubtractModePreview;
+
diff --git a/milventory/src/components/Tooltip.js b/milventory/src/components/Map/Tooltip.js
similarity index 86%
rename from milventory/src/components/Tooltip.js
rename to milventory/src/components/Map/Tooltip.js
index 6dc1499..c77591a 100644
--- a/milventory/src/components/Tooltip.js
+++ b/milventory/src/components/Map/Tooltip.js
@@ -1,5 +1,5 @@
import React from 'react';
-import { useInventory } from '../context/InventoryContext';
+import { useInventory } from '../../context/InventoryContext';
const Tooltip = () => {
const { tooltip, wrapRef } = useInventory();
diff --git a/milventory/src/components/Master/ItemTypeFormFields.js b/milventory/src/components/Master/ItemTypeFormFields.js
new file mode 100644
index 0000000..8093c5d
--- /dev/null
+++ b/milventory/src/components/Master/ItemTypeFormFields.js
@@ -0,0 +1,536 @@
+import React from 'react';
+
+export const areNumberCustomFieldsValid = (customFields, customFieldDefinitions) => {
+ const numberDefs = customFieldDefinitions.filter((d) => d.type === 'number');
+ for (const d of numberDefs) {
+ const value = customFields[d.name];
+ if (value === undefined || value === null || value === '') continue;
+ const n = Number(value);
+ if (Number.isNaN(n) || !Number.isFinite(n)) return false;
+ }
+ return true;
+};
+
+/** Keys listed on the type as required must each have a non-empty value (by field type). */
+export const areLockedCustomFieldsFilled = (customFields, lockedKeys, customFieldDefinitions) => {
+ if (!lockedKeys || lockedKeys.length === 0) return true;
+ for (const key of lockedKeys) {
+ const def = customFieldDefinitions.find((d) => d.name === key);
+ const t = def ? def.type : 'text';
+ const v = customFields[key];
+ if (t === 'number') {
+ if (v === undefined || v === null || v === '') return false;
+ const n = Number(v);
+ if (Number.isNaN(n) || !Number.isFinite(n)) return false;
+ } else if (t === 'date') {
+ if (v === undefined || v === null || String(v).trim() === '') return false;
+ } else {
+ if (v === undefined || v === null) return false;
+ if (String(v).trim() === '') return false;
+ }
+ }
+ return true;
+};
+
+/** Build API payload: every key in typeCustomFields is required; defaults only include nonempty presets. */
+export const buildCustomFieldsPayload = (typeCustomFields, definitions) => {
+ const locked_custom_field_keys = Object.keys(typeCustomFields).sort();
+ const default_custom_fields = {};
+ for (const k of locked_custom_field_keys) {
+ const v = typeCustomFields[k];
+ const def = definitions.find((d) => d.name === k);
+ const t = def ? def.type : 'text';
+ if (t === 'number') {
+ if (v !== '' && v !== null && v !== undefined) {
+ const n = Number(v);
+ if (!Number.isNaN(n) && Number.isFinite(n)) default_custom_fields[k] = n;
+ }
+ } else if (v !== '' && v !== null && v !== undefined) {
+ default_custom_fields[k] = v;
+ }
+ }
+ return { default_custom_fields, locked_custom_field_keys };
+};
+
+export const typeCustomFieldsFromTypeRow = (t) => {
+ const locked = Array.isArray(t.locked_custom_field_keys) ? t.locked_custom_field_keys : [];
+ const defs = t.default_custom_fields || {};
+ const cf = {};
+ for (const k of locked) {
+ const raw = Object.prototype.hasOwnProperty.call(defs, k) ? defs[k] : undefined;
+ if (raw === undefined || raw === null) cf[k] = '';
+ else if (typeof raw === 'number') cf[k] = raw;
+ else cf[k] = String(raw);
+ }
+ return cf;
+};
+
+export const emptyForm = () => ({
+ name: '',
+ template_description: '',
+ item_name_prefix: '',
+ item_description_prefix: '',
+ image: null,
+ typeCustomFields: {},
+ locked_category_ids: [],
+ locked_team_names: [],
+ is_unique: false,
+ prevent_user_edit: false
+});
+
+export const TypeFormBody = ({
+ form,
+ setForm,
+ imageLabel,
+ onImagePick,
+ customFieldDefinitions,
+ addFieldDropdownOpen,
+ setAddFieldDropdownOpen,
+ addFieldDropdownRef,
+ showPreventUserEditCheckbox = false,
+ categoryOptions = [],
+ teamOptions = []
+}) => (
+
+);
diff --git a/milventory/src/components/Master/MasterCreateModal.js b/milventory/src/components/Master/MasterCreateModal.js
new file mode 100644
index 0000000..6c3de60
--- /dev/null
+++ b/milventory/src/components/Master/MasterCreateModal.js
@@ -0,0 +1,1013 @@
+import React, { useState, useEffect, useLayoutEffect, useRef, useMemo } from 'react';
+import { useInventory } from '../../context/InventoryContext';
+import { getCategories, getTeams, api } from '../../api';
+import { useBlockingDialog } from '../Common/BlockingDialogContext';
+import { areLockedCustomFieldsFilled } from './ItemTypeFormFields';
+
+// Levenshtein distance for fuzzy search
+const levenshteinDistance = (str1, str2) => {
+ const m = str1.length;
+ const n = str2.length;
+ const dp = Array(m + 1).fill(null).map(() => Array(n + 1).fill(0));
+
+ for (let i = 0; i <= m; i++) dp[i][0] = i;
+ for (let j = 0; j <= n; j++) dp[0][j] = j;
+
+ for (let i = 1; i <= m; i++) {
+ for (let j = 1; j <= n; j++) {
+ if (str1[i - 1] === str2[j - 1]) {
+ dp[i][j] = dp[i - 1][j - 1];
+ } else {
+ dp[i][j] = Math.min(
+ dp[i - 1][j] + 1,
+ dp[i][j - 1] + 1,
+ dp[i - 1][j - 1] + 1
+ );
+ }
+ }
+ }
+
+ return dp[m][n];
+};
+
+const joinPrefixSuffix = (prefix, suffix) => {
+ const p = (prefix || '').trimEnd();
+ const s = (suffix || '').trim();
+ if (!p) return s;
+ if (!s) return p;
+ return `${p}${p.endsWith(' ') ? '' : ' '}${s}`;
+};
+
+// Validate that all number-type custom fields have a valid number (or are empty)
+const areNumberCustomFieldsValid = (customFields, customFieldDefinitions) => {
+ const numberDefs = customFieldDefinitions.filter(d => d.type === 'number');
+ for (const d of numberDefs) {
+ const value = customFields[d.name];
+ if (value === undefined || value === null || value === '') continue;
+ const n = Number(value);
+ if (Number.isNaN(n) || !Number.isFinite(n)) return false;
+ }
+ return true;
+};
+
+// Reusable tag dropdown with fuzzy search
+const TagDropdown = ({
+ placeholder,
+ selectedItems,
+ availableItems,
+ onSelect,
+ onRemove,
+ maxResults = 5,
+ capitalize = false,
+ onSearchChange,
+ lockedItems = []
+}) => {
+ const [isOpen, setIsOpen] = useState(false);
+ const [searchQuery, setSearchQuery] = useState('');
+ const containerRef = useRef(null);
+ const inputRef = useRef(null);
+
+ // Close dropdown on outside click
+ useEffect(() => {
+ const handler = (e) => {
+ if (containerRef.current && !containerRef.current.contains(e.target)) {
+ setIsOpen(false);
+ }
+ };
+ document.addEventListener('mousedown', handler);
+ return () => document.removeEventListener('mousedown', handler);
+ }, []);
+
+ const unselected = availableItems.filter(item => !selectedItems.includes(item));
+
+ // Compute display items: fuzzy top-N when searching, all when idle
+ let displayItems;
+ if (searchQuery.trim()) {
+ const query = searchQuery.toLowerCase();
+ const scored = unselected.map(item => {
+ const itemLower = item.toLowerCase();
+ const distance = levenshteinDistance(query, itemLower);
+ const isSubstring = itemLower.includes(query);
+ return { item, score: isSubstring ? distance - 10 : distance, distance };
+ });
+ scored.sort((a, b) => a.score !== b.score ? a.score - b.score : a.distance - b.distance);
+ displayItems = scored.slice(0, maxResults).map(s => s.item);
+ } else {
+ displayItems = unselected;
+ }
+
+ const handleItemSelect = (item) => {
+ onSelect(item);
+ setSearchQuery('');
+ onSearchChange?.('');
+ setIsOpen(false);
+ };
+
+ return (
+
+ {/* Selected tags — 120% */}
+
+ {selectedItems.length === 0 && (
+
+ {placeholder}
+
+ )}
+ {selectedItems.map((item) => {
+ const isLocked = lockedItems.includes(item);
+ return (
+
+ {item}
+ {
+ if (!isLocked) onRemove(item);
+ }}
+ style={{
+ background: 'transparent',
+ border: 'none',
+ color: 'white',
+ cursor: isLocked ? 'not-allowed' : 'pointer',
+ opacity: isLocked ? 0.35 : 1,
+ padding: '0',
+ marginLeft: '0.25rem',
+ fontSize: '1rem',
+ lineHeight: '1',
+ display: 'flex',
+ alignItems: 'center',
+ justifyContent: 'center'
+ }}
+ title={isLocked ? 'Required by item type' : `Remove ${item}`}
+ >
+ ×
+
+
+ );
+ })}
+
+
+ {/* Search input / dropdown trigger — 132% (120% + 10%) */}
+
{ setIsOpen(true); inputRef.current?.focus(); }}
+ style={{
+ width: '100%', padding: '0.615rem 0.879rem', minHeight: '2.2rem',
+ background: 'rgba(0,0,0,.3)', border: '1px solid rgba(255,255,255,.1)', borderRadius: '4px',
+ color: 'var(--text)', fontSize: '0.85rem', cursor: 'pointer',
+ display: 'flex', alignItems: 'center', boxSizing: 'border-box'
+ }}
+ >
+ 0 ? 'Search...' : 'All selected'}
+ value={searchQuery}
+ onChange={(e) => { setSearchQuery(e.target.value); setIsOpen(true); onSearchChange?.(e.target.value); }}
+ onFocus={() => setIsOpen(true)}
+ onClick={(e) => e.stopPropagation()}
+ style={{
+ background: 'transparent', border: 'none', color: 'var(--text)',
+ fontSize: '0.85rem', outline: 'none', width: '100%', cursor: 'pointer', lineHeight: 1.35, padding: 0, margin: 0
+ }}
+ />
+ ▼
+
+
+ {/* Dropdown list */}
+ {isOpen && displayItems.length > 0 && (
+
+ {displayItems.map(item => (
+
handleItemSelect(item)}
+ style={{
+ padding: '0.5rem 0.75rem', cursor: 'pointer', fontSize: '0.9rem',
+ color: 'var(--text)', transition: 'background 0.1s',
+ ...(capitalize ? { textTransform: 'capitalize' } : {})
+ }}
+ onMouseEnter={(e) => { e.currentTarget.style.background = 'rgba(255,255,255,.1)'; }}
+ onMouseLeave={(e) => { e.currentTarget.style.background = 'transparent'; }}
+ >
+ {item}
+
+ ))}
+
+ )}
+
+ );
+};
+
+/** Single-select supply type with search. */
+const SupplyTypeSearchSelect = ({ supplyTypes, value, onChange }) => {
+ const [open, setOpen] = useState(false);
+ const [query, setQuery] = useState('');
+ const [menuPos, setMenuPos] = useState({ top: 0, left: 0, width: 0 });
+ const containerRef = useRef(null);
+ const triggerRef = useRef(null);
+ const inputRef = useRef(null);
+
+ const updateMenuPos = () => {
+ const el = triggerRef.current;
+ if (!el) return;
+ const r = el.getBoundingClientRect();
+ setMenuPos({ top: r.bottom + 4, left: r.left, width: r.width });
+ };
+
+ useLayoutEffect(() => {
+ if (!open) return;
+ updateMenuPos();
+ const onScrollOrResize = () => updateMenuPos();
+ window.addEventListener('scroll', onScrollOrResize, true);
+ window.addEventListener('resize', onScrollOrResize);
+ return () => {
+ window.removeEventListener('scroll', onScrollOrResize, true);
+ window.removeEventListener('resize', onScrollOrResize);
+ };
+ }, [open]);
+
+ useEffect(() => {
+ const handler = (e) => {
+ const menu = document.getElementById('supply-type-search-menu');
+ if (menu?.contains(e.target)) return;
+ if (containerRef.current?.contains(e.target)) return;
+ setOpen(false);
+ setQuery('');
+ };
+ document.addEventListener('mousedown', handler);
+ return () => document.removeEventListener('mousedown', handler);
+ }, []);
+
+ useEffect(() => {
+ if (open) {
+ setTimeout(() => inputRef.current?.focus(), 0);
+ }
+ }, [open]);
+
+ const selected = supplyTypes.find((x) => String(x.id) === String(value));
+ const label = selected
+ ? `${selected.name}${selected.is_unique ? ' (max 1 qty per item)' : ''}`
+ : 'None';
+
+ const q = query.trim().toLowerCase();
+ let filteredTypes;
+ if (!q) {
+ filteredTypes = supplyTypes;
+ } else {
+ const scored = supplyTypes.map((t) => {
+ const nameLower = (t.name || '').toLowerCase();
+ const isSubstring = nameLower.includes(q);
+ const distance = levenshteinDistance(q, nameLower);
+ return { t, score: isSubstring ? distance - 10 : distance };
+ });
+ scored.sort((a, b) => a.score !== b.score ? a.score - b.score : (a.t.name || '').localeCompare(b.t.name || ''));
+ filteredTypes = scored.map((s) => s.t);
+ }
+
+ const pick = (id) => {
+ onChange(id === '' || id === null || id === undefined ? '' : String(id));
+ setOpen(false);
+ setQuery('');
+ };
+
+ const rowStyle = (active) => ({
+ padding: '0.5rem 0.75rem',
+ cursor: 'pointer',
+ fontSize: '0.85rem',
+ color: 'var(--text)',
+ background: active ? 'rgba(255,255,255,.08)' : 'transparent',
+ });
+
+ return (
+
+ setOpen((o) => !o)}
+ style={{
+ width: '100%',
+ padding: '0.5rem',
+ display: 'flex',
+ alignItems: 'center',
+ justifyContent: 'space-between',
+ gap: '0.5rem',
+ textAlign: 'left',
+ cursor: 'pointer',
+ boxSizing: 'border-box',
+ appearance: 'none',
+ WebkitAppearance: 'none',
+ }}
+ >
+ {label}
+ {open ? '▲' : '▼'}
+
+ {open && (
+
+ )}
+
+ );
+};
+
+const MasterCreateModal = ({ isOpen, onClose, showTypeSelector = true }) => {
+ const { createMasterItem } = useInventory();
+ const { showAlert } = useBlockingDialog();
+
+ const [name, setName] = useState('');
+ const [description, setDescription] = useState('');
+ /** User upload only; type template is shown separately and never sent as supply image */
+ const [customImage, setCustomImage] = useState(null);
+ const [selectedTeams, setSelectedTeams] = useState([]);
+ const [selectedCategories, setSelectedCategories] = useState([]);
+ const [categorySearchQuery, setCategorySearchQuery] = useState('');
+ const [availableCategories, setAvailableCategories] = useState([]);
+ const [availableTeams, setAvailableTeams] = useState([]);
+ const [categoryNameToId, setCategoryNameToId] = useState(new Map());
+ const [customFields, setCustomFields] = useState({});
+ const [customFieldDefinitions, setCustomFieldDefinitions] = useState([]);
+ const [addFieldDropdownOpen, setAddFieldDropdownOpen] = useState(false);
+ const addFieldDropdownRef = useRef(null);
+ const nameInputRef = useRef(null);
+ const [supplyTypes, setSupplyTypes] = useState([]);
+ const [selectedSupplyTypeId, setSelectedSupplyTypeId] = useState('');
+ const [nameSuffix, setNameSuffix] = useState('');
+ const [descSuffix, setDescSuffix] = useState('');
+ const [lockedFieldKeys, setLockedFieldKeys] = useState([]);
+
+ useEffect(() => {
+ const handler = (e) => {
+ if (addFieldDropdownRef.current && !addFieldDropdownRef.current.contains(e.target)) {
+ setAddFieldDropdownOpen(false);
+ }
+ };
+ document.addEventListener('mousedown', handler);
+ return () => document.removeEventListener('mousedown', handler);
+ }, []);
+
+ // Fetch categories, teams, and custom field definitions when modal opens
+ useEffect(() => {
+ if (isOpen) {
+ api.getCustomFieldDefinitions()
+ .then(setCustomFieldDefinitions)
+ .catch(() => setCustomFieldDefinitions([]));
+ api.getSupplyTypes()
+ .then(setSupplyTypes)
+ .catch(() => setSupplyTypes([]));
+ getCategories()
+ .then(categories => {
+ // Categories now come as objects with {id, name}
+ const categoryList = categories.map(c => typeof c === 'string' ? c : c.name);
+ setAvailableCategories(categoryList);
+
+ // Build name-to-ID mapping
+ const mapping = new Map();
+ categories.forEach(cat => {
+ if (typeof cat === 'object' && cat.id && cat.name) {
+ mapping.set(cat.name, cat.id);
+ }
+ });
+ setCategoryNameToId(mapping);
+ })
+ .catch(err => {
+ console.error('Failed to fetch categories:', err);
+ setAvailableCategories([]);
+ setCategoryNameToId(new Map());
+ });
+ getTeams()
+ .then(teams => {
+ // Normalize to lowercase for frontend consistency
+ const normalized = teams.map(t => t.toLowerCase());
+ setAvailableTeams(normalized);
+ })
+ .catch(err => {
+ console.error('Failed to fetch teams:', err);
+ setAvailableTeams([]);
+ });
+ }
+ }, [isOpen]);
+
+ useEffect(() => {
+ if (isOpen) {
+ setName('');
+ setDescription('');
+ setCustomImage(null);
+ setSelectedTeams([]);
+ setSelectedCategories([]);
+ setCategorySearchQuery('');
+ setCustomFields({});
+ setAddFieldDropdownOpen(false);
+ setSelectedSupplyTypeId('');
+ setNameSuffix('');
+ setDescSuffix('');
+ setLockedFieldKeys([]);
+ setTimeout(() => nameInputRef.current?.focus(), 0);
+ }
+ }, [isOpen, showTypeSelector]);
+
+ useEffect(() => {
+ if (!isOpen) return;
+ if (!showTypeSelector) {
+ setLockedFieldKeys([]);
+ return;
+ }
+ const t = supplyTypes.find((x) => String(x.id) === String(selectedSupplyTypeId));
+ if (!selectedSupplyTypeId || !t) {
+ setLockedFieldKeys([]);
+ if (!selectedSupplyTypeId) {
+ setSelectedCategories([]);
+ setSelectedTeams([]);
+ }
+ return;
+ }
+ const defs = t.default_custom_fields || {};
+ const locked = Array.isArray(t.locked_custom_field_keys) ? [...t.locked_custom_field_keys] : [];
+ const cf = { ...defs };
+ for (const k of locked) {
+ if (!(k in cf)) cf[k] = '';
+ }
+ setCustomFields(cf);
+ setLockedFieldKeys(locked);
+ setNameSuffix('');
+ setDescSuffix('');
+ if (t.image) {
+ setCustomImage(null);
+ }
+
+ const teamsLt = (Array.isArray(t.locked_team_names) ? t.locked_team_names : []).map((x) =>
+ String(x).toLowerCase()
+ );
+ setSelectedTeams(teamsLt);
+ }, [selectedSupplyTypeId, supplyTypes, isOpen, showTypeSelector]);
+
+ useEffect(() => {
+ if (!isOpen || !showTypeSelector || !categoryNameToId.size) return;
+ const t = supplyTypes.find((x) => String(x.id) === String(selectedSupplyTypeId));
+ if (!selectedSupplyTypeId || !t) return;
+ const catIds = Array.isArray(t.locked_category_ids) ? t.locked_category_ids : [];
+ const catNames = catIds
+ .map((id) => {
+ for (const [name, cid] of categoryNameToId.entries()) {
+ if (Number(cid) === Number(id)) return name;
+ }
+ return null;
+ })
+ .filter(Boolean);
+ setSelectedCategories(catNames);
+ }, [categoryNameToId, selectedSupplyTypeId, isOpen, showTypeSelector]);
+
+ const effectiveTypeIdForLocks = showTypeSelector ? selectedSupplyTypeId : '';
+ const selectedTypeForLocks = effectiveTypeIdForLocks
+ ? supplyTypes.find((x) => String(x.id) === String(effectiveTypeIdForLocks))
+ : undefined;
+
+ const typeLockedCategoryNames = useMemo(() => {
+ if (!selectedTypeForLocks || !categoryNameToId.size) return [];
+ const ids = Array.isArray(selectedTypeForLocks.locked_category_ids)
+ ? selectedTypeForLocks.locked_category_ids
+ : [];
+ return ids
+ .map((id) => {
+ for (const [name, cid] of categoryNameToId.entries()) {
+ if (Number(cid) === Number(id)) return name;
+ }
+ return null;
+ })
+ .filter(Boolean);
+ }, [selectedTypeForLocks, categoryNameToId]);
+
+ const typeLockedTeamLower = useMemo(() => {
+ if (!selectedTypeForLocks) return [];
+ return (Array.isArray(selectedTypeForLocks.locked_team_names)
+ ? selectedTypeForLocks.locked_team_names
+ : []
+ ).map((x) => String(x).toLowerCase());
+ }, [selectedTypeForLocks]);
+
+ const handleImageChange = async (e) => {
+ const tid = showTypeSelector ? selectedSupplyTypeId : '';
+ const t = tid ? supplyTypes.find((x) => String(x.id) === String(tid)) : null;
+ if (t?.image) {
+ e.target.value = '';
+ return;
+ }
+ const file = e.target.files[0];
+ if (!file) {
+ return;
+ }
+
+ if (file.size > 10 * 1024 * 1024) {
+ await showAlert('Image file size must be less than 10MB');
+ e.target.value = '';
+ return;
+ }
+
+ if (!file.type.startsWith('image/')) {
+ await showAlert('Please select an image file');
+ e.target.value = '';
+ return;
+ }
+
+ const reader = new FileReader();
+ reader.onload = (event) => {
+ const base64Data = event.target.result;
+ setCustomImage(base64Data);
+ };
+ reader.onerror = () => {
+ showAlert('Error reading image file').then(() => {
+ e.target.value = '';
+ });
+ };
+ reader.readAsDataURL(file);
+ };
+
+ const handleRemoveImage = () => {
+ const tid = showTypeSelector ? selectedSupplyTypeId : '';
+ const t = tid ? supplyTypes.find((x) => String(x.id) === String(tid)) : null;
+ if (t?.image) return;
+ setCustomImage(null);
+ };
+
+ const handleSave = async () => {
+ const effectiveTypeId = showTypeSelector ? selectedSupplyTypeId : '';
+ const selectedType = effectiveTypeId
+ ? supplyTypes.find((x) => String(x.id) === String(effectiveTypeId))
+ : undefined;
+ const fullName = effectiveTypeId && selectedType
+ ? joinPrefixSuffix(selectedType.item_name_prefix, nameSuffix)
+ : name.trim();
+ const fullDescRaw = effectiveTypeId && selectedType
+ ? joinPrefixSuffix(selectedType.item_description_prefix || '', descSuffix)
+ : description.trim();
+ const fullDesc = fullDescRaw.trim() ? fullDescRaw.trim() : null;
+
+ if (fullName.trim()) {
+ if (!areNumberCustomFieldsValid(customFields, customFieldDefinitions)) {
+ await showAlert('Please enter a valid number in all number fields (or leave them empty).');
+ return;
+ }
+ if (
+ lockedFieldKeys.length > 0 &&
+ !areLockedCustomFieldsFilled(customFields, lockedFieldKeys, customFieldDefinitions)
+ ) {
+ await showAlert(
+ 'This item type requires a value in each listed custom field. Fill any empty required fields before creating.'
+ );
+ return;
+ }
+
+ // Convert category names to IDs
+ const categoryIds = selectedCategories
+ .map(catName => categoryNameToId.get(catName))
+ .filter(id => id !== undefined);
+
+ const typeHasTemplateImage = Boolean(selectedType?.image);
+ const newItem = {
+ name: fullName.trim(),
+ description: fullDesc,
+ image: typeHasTemplateImage ? null : customImage || null,
+ teams: selectedTeams.length > 0 ? selectedTeams : undefined,
+ categories: categoryIds.length > 0 ? categoryIds : undefined,
+ custom_fields: Object.keys(customFields).length > 0 ? customFields : undefined,
+ locations: [],
+ supply_type_id: effectiveTypeId ? Number(effectiveTypeId) : undefined
+ };
+
+ createMasterItem(newItem);
+ onClose();
+ }
+ };
+
+ const handleCancel = () => {
+ onClose();
+ };
+
+ const handleOverlayClick = (e) => {
+ if (e.target === e.currentTarget) {
+ handleCancel();
+ }
+ };
+
+ const handleKeyDown = (e) => {
+ if (e.key === 'Escape') {
+ handleCancel();
+ } else if (e.key === 'Enter' && (e.ctrlKey || e.metaKey)) {
+ e.preventDefault();
+ handleSave();
+ }
+ };
+
+ if (!isOpen) return null;
+
+ const effectiveTypeId = showTypeSelector ? selectedSupplyTypeId : '';
+ const selectedType = effectiveTypeId
+ ? supplyTypes.find((x) => String(x.id) === String(effectiveTypeId))
+ : undefined;
+ const typeBlocksOwnImage = Boolean(selectedType?.image);
+ const imagePreviewDisplay = typeBlocksOwnImage ? selectedType?.image ?? null : customImage;
+ const typePresetKeys =
+ selectedType?.default_custom_fields && typeof selectedType.default_custom_fields === 'object'
+ ? new Set(Object.keys(selectedType.default_custom_fields))
+ : new Set();
+
+ return (
+
+
+
Create Item
+ {showTypeSelector && (
+ <>
+
+
+ >
+ )}
+ {effectiveTypeId ? (
+ <>
+
+ {(() => {
+ const t = selectedType;
+ const nameFix = t?.item_name_prefix ?? '';
+ const hasNameFix = String(nameFix).length > 0;
+ return hasNameFix ? (
+
+ {nameFix}
+ setNameSuffix(e.target.value)}
+ />
+
+ ) : (
+
setNameSuffix(e.target.value)}
+ />
+ );
+ })()}
+
+ {(() => {
+ const t = selectedType;
+ const descFix = t?.item_description_prefix ?? '';
+ const hasDescFix = String(descFix).trim().length > 0;
+ return hasDescFix ? (
+
+ ) : (
+
+
+ );
+};
+
+export default MasterCreateModal;
+
diff --git a/milventory/src/components/Master/MasterEditModal.js b/milventory/src/components/Master/MasterEditModal.js
new file mode 100644
index 0000000..41891ba
--- /dev/null
+++ b/milventory/src/components/Master/MasterEditModal.js
@@ -0,0 +1,1120 @@
+import React, { useState, useEffect, useLayoutEffect, useRef, useMemo } from 'react';
+import { useInventory } from '../../context/InventoryContext';
+import { getCategories, getTeams, api } from '../../api';
+import { useBlockingDialog } from '../Common/BlockingDialogContext';
+import { areLockedCustomFieldsFilled } from './ItemTypeFormFields';
+
+// Levenshtein distance for fuzzy search
+const levenshteinDistance = (str1, str2) => {
+ const m = str1.length;
+ const n = str2.length;
+ const dp = Array(m + 1).fill(null).map(() => Array(n + 1).fill(0));
+
+ for (let i = 0; i <= m; i++) dp[i][0] = i;
+ for (let j = 0; j <= n; j++) dp[0][j] = j;
+
+ for (let i = 1; i <= m; i++) {
+ for (let j = 1; j <= n; j++) {
+ if (str1[i - 1] === str2[j - 1]) {
+ dp[i][j] = dp[i - 1][j - 1];
+ } else {
+ dp[i][j] = Math.min(
+ dp[i - 1][j] + 1,
+ dp[i][j - 1] + 1,
+ dp[i - 1][j - 1] + 1
+ );
+ }
+ }
+ }
+
+ return dp[m][n];
+};
+
+const joinPrefixSuffix = (prefix, suffix) => {
+ const p = (prefix || '').trimEnd();
+ const s = (suffix || '').trim();
+ if (!p) return s;
+ if (!s) return p;
+ return `${p}${p.endsWith(' ') ? '' : ' '}${s}`;
+};
+
+// Validate that all number-type custom fields have a valid number (or are empty)
+const areNumberCustomFieldsValid = (customFields, customFieldDefinitions) => {
+ const numberDefs = customFieldDefinitions.filter(d => d.type === 'number');
+ for (const d of numberDefs) {
+ const value = customFields[d.name];
+ if (value === undefined || value === null || value === '') continue;
+ const n = Number(value);
+ if (Number.isNaN(n) || !Number.isFinite(n)) return false;
+ }
+ return true;
+};
+
+// Reusable tag dropdown with fuzzy search
+const TagDropdown = ({
+ placeholder,
+ selectedItems,
+ availableItems,
+ onSelect,
+ onRemove,
+ maxResults = 5,
+ capitalize = false,
+ onSearchChange,
+ lockedItems = []
+}) => {
+ const [isOpen, setIsOpen] = useState(false);
+ const [searchQuery, setSearchQuery] = useState('');
+ const containerRef = useRef(null);
+ const inputRef = useRef(null);
+
+ useEffect(() => {
+ const handler = (e) => {
+ if (containerRef.current && !containerRef.current.contains(e.target)) {
+ setIsOpen(false);
+ }
+ };
+ document.addEventListener('mousedown', handler);
+ return () => document.removeEventListener('mousedown', handler);
+ }, []);
+
+ const unselected = availableItems.filter(item => !selectedItems.includes(item));
+
+ let displayItems;
+ if (searchQuery.trim()) {
+ const query = searchQuery.toLowerCase();
+ const scored = unselected.map(item => {
+ const itemLower = item.toLowerCase();
+ const distance = levenshteinDistance(query, itemLower);
+ const isSubstring = itemLower.includes(query);
+ return { item, score: isSubstring ? distance - 10 : distance, distance };
+ });
+ scored.sort((a, b) => a.score !== b.score ? a.score - b.score : a.distance - b.distance);
+ displayItems = scored.slice(0, maxResults).map(s => s.item);
+ } else {
+ displayItems = unselected;
+ }
+
+ const handleItemSelect = (item) => {
+ onSelect(item);
+ setSearchQuery('');
+ onSearchChange?.('');
+ setIsOpen(false);
+ };
+
+ return (
+
+
+ {selectedItems.length === 0 && (
+
+ {placeholder}
+
+ )}
+ {selectedItems.map((item) => {
+ const isLocked = lockedItems.includes(item);
+ return (
+
+ {item}
+ {
+ if (!isLocked) onRemove(item);
+ }}
+ style={{
+ background: 'transparent',
+ border: 'none',
+ color: 'white',
+ cursor: isLocked ? 'not-allowed' : 'pointer',
+ opacity: isLocked ? 0.35 : 1,
+ padding: '0',
+ marginLeft: '0.25rem',
+ fontSize: '1rem',
+ lineHeight: '1',
+ display: 'flex',
+ alignItems: 'center',
+ justifyContent: 'center'
+ }}
+ title={isLocked ? 'Required by item type' : `Remove ${item}`}
+ >
+ ×
+
+
+ );
+ })}
+
+
+
{ setIsOpen(true); inputRef.current?.focus(); }}
+ style={{
+ width: '100%', padding: '0.615rem 0.879rem', minHeight: '2.2rem',
+ background: 'rgba(0,0,0,.3)', border: '1px solid rgba(255,255,255,.1)', borderRadius: '4px',
+ color: 'var(--text)', fontSize: '0.85rem', cursor: 'pointer',
+ display: 'flex', alignItems: 'center', boxSizing: 'border-box'
+ }}
+ >
+ 0 ? 'Search...' : 'All selected'}
+ value={searchQuery}
+ onChange={(e) => { setSearchQuery(e.target.value); setIsOpen(true); onSearchChange?.(e.target.value); }}
+ onFocus={() => setIsOpen(true)}
+ onClick={(e) => e.stopPropagation()}
+ style={{
+ background: 'transparent', border: 'none', color: 'var(--text)',
+ fontSize: '0.85rem', outline: 'none', width: '100%', cursor: 'pointer', lineHeight: 1.35, padding: 0, margin: 0
+ }}
+ />
+ ▼
+
+
+ {isOpen && displayItems.length > 0 && (
+
+ {displayItems.map(item => (
+
handleItemSelect(item)}
+ style={{
+ padding: '0.5rem 0.75rem', cursor: 'pointer', fontSize: '0.9rem',
+ color: 'var(--text)', transition: 'background 0.1s',
+ ...(capitalize ? { textTransform: 'capitalize' } : {})
+ }}
+ onMouseEnter={(e) => { e.currentTarget.style.background = 'rgba(255,255,255,.1)'; }}
+ onMouseLeave={(e) => { e.currentTarget.style.background = 'transparent'; }}
+ >
+ {item}
+
+ ))}
+
+ )}
+
+ );
+};
+
+/** Single-select supply type with search. */
+const SupplyTypeSearchSelect = ({ supplyTypes, value, onChange }) => {
+ const [open, setOpen] = useState(false);
+ const [query, setQuery] = useState('');
+ const [menuPos, setMenuPos] = useState({ top: 0, left: 0, width: 0 });
+ const containerRef = useRef(null);
+ const triggerRef = useRef(null);
+ const inputRef = useRef(null);
+
+ const updateMenuPos = () => {
+ const el = triggerRef.current;
+ if (!el) return;
+ const r = el.getBoundingClientRect();
+ setMenuPos({ top: r.bottom + 4, left: r.left, width: r.width });
+ };
+
+ useLayoutEffect(() => {
+ if (!open) return;
+ updateMenuPos();
+ const onScrollOrResize = () => updateMenuPos();
+ window.addEventListener('scroll', onScrollOrResize, true);
+ window.addEventListener('resize', onScrollOrResize);
+ return () => {
+ window.removeEventListener('scroll', onScrollOrResize, true);
+ window.removeEventListener('resize', onScrollOrResize);
+ };
+ }, [open]);
+
+ useEffect(() => {
+ const handler = (e) => {
+ const menu = document.getElementById('edit-supply-type-search-menu');
+ if (menu?.contains(e.target)) return;
+ if (containerRef.current?.contains(e.target)) return;
+ setOpen(false);
+ setQuery('');
+ };
+ document.addEventListener('mousedown', handler);
+ return () => document.removeEventListener('mousedown', handler);
+ }, []);
+
+ useEffect(() => {
+ if (open) {
+ setTimeout(() => inputRef.current?.focus(), 0);
+ }
+ }, [open]);
+
+ const selected = supplyTypes.find((x) => String(x.id) === String(value));
+ const label = selected
+ ? `${selected.name}${selected.is_unique ? ' (max 1 qty per item)' : ''}`
+ : 'None';
+
+ const q = query.trim().toLowerCase();
+ const filteredTypes = q
+ ? supplyTypes
+ .map((t) => {
+ const nameLower = (t.name || '').toLowerCase();
+ const isSubstring = nameLower.includes(q);
+ const distance = levenshteinDistance(q, nameLower);
+ return { t, score: isSubstring ? distance - 10 : distance };
+ })
+ .sort((a, b) => a.score !== b.score ? a.score - b.score : (a.t.name || '').localeCompare(b.t.name || ''))
+ .map((s) => s.t)
+ : supplyTypes;
+
+ const pick = (id) => {
+ onChange(id === '' || id === null || id === undefined ? '' : String(id));
+ setOpen(false);
+ setQuery('');
+ };
+
+ const rowStyle = (active) => ({
+ padding: '0.5rem 0.75rem',
+ cursor: 'pointer',
+ fontSize: '0.85rem',
+ color: 'var(--text)',
+ background: active ? 'rgba(255,255,255,.08)' : 'transparent',
+ });
+
+ return (
+
+ setOpen((o) => !o)}
+ style={{
+ width: '100%',
+ padding: '0.5rem',
+ display: 'flex',
+ alignItems: 'center',
+ justifyContent: 'space-between',
+ gap: '0.5rem',
+ textAlign: 'left',
+ cursor: 'pointer',
+ boxSizing: 'border-box',
+ appearance: 'none',
+ WebkitAppearance: 'none',
+ }}
+ >
+ {label}
+ {open ? '▲' : '▼'}
+
+ {open && (
+
+ )}
+
+ );
+};
+
+const MasterEditModal = ({ isOpen, onClose, itemName }) => {
+ const { updateMasterItem, resolveMasterItem } = useInventory();
+ const { showAlert } = useBlockingDialog();
+
+ const [name, setName] = useState('');
+ const [description, setDescription] = useState('');
+ const [image, setImage] = useState(null);
+ const [imagePreview, setImagePreview] = useState(null);
+ const [selectedTeams, setSelectedTeams] = useState([]);
+ const [selectedCategories, setSelectedCategories] = useState([]);
+ const [availableCategories, setAvailableCategories] = useState([]);
+ const [availableTeams, setAvailableTeams] = useState([]);
+ const [categoryNameToId, setCategoryNameToId] = useState(new Map());
+ const [categoryIdToName, setCategoryIdToName] = useState(new Map());
+ const [customFields, setCustomFields] = useState({});
+ const [customFieldDefinitions, setCustomFieldDefinitions] = useState([]);
+ const [addFieldDropdownOpen, setAddFieldDropdownOpen] = useState(false);
+ const addFieldDropdownRef = useRef(null);
+ const nameInputRef = useRef(null);
+ const [supplyTypes, setSupplyTypes] = useState([]);
+ const [selectedSupplyTypeId, setSelectedSupplyTypeId] = useState('');
+ const [linkedType, setLinkedType] = useState(null);
+ const [unlinkFromType, setUnlinkFromType] = useState(false);
+ const [nameSuffix, setNameSuffix] = useState('');
+ const [descSuffix, setDescSuffix] = useState('');
+
+ const originalItem = itemName ? resolveMasterItem(itemName) : null;
+
+ useEffect(() => {
+ const handler = (e) => {
+ if (addFieldDropdownRef.current && !addFieldDropdownRef.current.contains(e.target)) {
+ setAddFieldDropdownOpen(false);
+ }
+ };
+ document.addEventListener('mousedown', handler);
+ return () => document.removeEventListener('mousedown', handler);
+ }, []);
+
+ // Fetch categories, teams, and custom field definitions when modal opens
+ useEffect(() => {
+ if (isOpen) {
+ api.getCustomFieldDefinitions()
+ .then(setCustomFieldDefinitions)
+ .catch(() => setCustomFieldDefinitions([]));
+ api.getSupplyTypes()
+ .then(setSupplyTypes)
+ .catch(() => setSupplyTypes([]));
+ getCategories()
+ .then(categories => {
+ const categoryList = categories.map(c => typeof c === 'string' ? c : c.name);
+ setAvailableCategories(categoryList);
+
+ const nameToId = new Map();
+ const idToName = new Map();
+ categories.forEach(cat => {
+ if (typeof cat === 'object' && cat.id && cat.name) {
+ nameToId.set(cat.name, cat.id);
+ idToName.set(cat.id, cat.name);
+ }
+ });
+ setCategoryNameToId(nameToId);
+ setCategoryIdToName(idToName);
+ })
+ .catch(err => {
+ console.error('Failed to fetch categories:', err);
+ setAvailableCategories([]);
+ });
+
+ getTeams()
+ .then(teams => {
+ const normalized = teams.map(t => t.toLowerCase());
+ setAvailableTeams(normalized);
+ })
+ .catch(err => {
+ console.error('Failed to fetch teams:', err);
+ setAvailableTeams([]);
+ });
+ }
+ }, [isOpen]);
+
+ useEffect(() => {
+ if (!isOpen || !originalItem) return;
+
+ setCustomFields(originalItem.custom_fields || {});
+ setAddFieldDropdownOpen(false);
+ setSelectedTeams(originalItem.teams || []);
+ setUnlinkFromType(false);
+ setLinkedType(null);
+ setSelectedSupplyTypeId(originalItem.supply_type_id ? String(originalItem.supply_type_id) : '');
+ setNameSuffix('');
+ setDescSuffix('');
+
+ if (originalItem.supply_type_id) {
+ setName(originalItem.name || '');
+ setDescription(originalItem.description || '');
+ setImage(null);
+ setImagePreview(originalItem.image || null);
+ api.getSupplyType(originalItem.supply_type_id)
+ .then((t) => {
+ setLinkedType(t);
+ if (t.image) {
+ setImage(null);
+ setImagePreview(originalItem.image || t.image);
+ } else {
+ setImage(originalItem.image || null);
+ setImagePreview(originalItem.image || null);
+ }
+ setCustomFields((prev) => {
+ const cf = { ...prev };
+ const locked = Array.isArray(t.locked_custom_field_keys) ? t.locked_custom_field_keys : [];
+ const defs = t.default_custom_fields || {};
+ for (const k of locked) {
+ if (!(k in cf)) cf[k] = Object.prototype.hasOwnProperty.call(defs, k) ? defs[k] : '';
+ }
+ return cf;
+ });
+ const np = (t.item_name_prefix || '').trimEnd();
+ const rawName = originalItem.name || '';
+ if (np && rawName.startsWith(np)) {
+ setNameSuffix(rawName.slice(np.length).replace(/^\s+/, ''));
+ } else {
+ setNameSuffix(rawName);
+ }
+ setName('');
+ const dp = (t.item_description_prefix || '').trim();
+ const rawD = originalItem.description || '';
+ if (dp && String(rawD).startsWith(dp)) {
+ setDescSuffix(String(rawD).slice(dp.length).replace(/^\s+/, ''));
+ } else {
+ setDescSuffix(rawD || '');
+ }
+ setDescription('');
+ })
+ .catch(() => {
+ setLinkedType(null);
+ setName(originalItem.name || '');
+ setDescription(originalItem.description || '');
+ setImage(originalItem.image || null);
+ setImagePreview(originalItem.image || null);
+ });
+ } else {
+ setName(originalItem.name || '');
+ setDescription(originalItem.description || '');
+ setImage(originalItem.image || null);
+ setImagePreview(originalItem.image || null);
+ }
+
+ setTimeout(() => nameInputRef.current?.focus(), 0);
+ }, [isOpen, originalItem]);
+
+ const lockedFieldKeys =
+ linkedType && !unlinkFromType && Array.isArray(linkedType.locked_custom_field_keys)
+ ? linkedType.locked_custom_field_keys
+ : [];
+
+ const typeLockedCategoryNames = useMemo(() => {
+ if (!linkedType || unlinkFromType) return [];
+ const ids = Array.isArray(linkedType.locked_category_ids) ? linkedType.locked_category_ids : [];
+ return ids.map((id) => categoryIdToName.get(id)).filter(Boolean);
+ }, [linkedType, unlinkFromType, categoryIdToName]);
+
+ const typeLockedTeamLower = useMemo(() => {
+ if (!linkedType || unlinkFromType) return [];
+ return (Array.isArray(linkedType.locked_team_names) ? linkedType.locked_team_names : []).map((x) =>
+ String(x).toLowerCase()
+ );
+ }, [linkedType, unlinkFromType]);
+
+ useEffect(() => {
+ if (!linkedType || unlinkFromType) return;
+ const tns = (Array.isArray(linkedType.locked_team_names) ? linkedType.locked_team_names : []).map((x) =>
+ String(x).toLowerCase()
+ );
+ setSelectedTeams((prev) => {
+ const s = new Set([...tns, ...prev]);
+ return Array.from(s);
+ });
+ }, [linkedType?.id, unlinkFromType]);
+
+ useEffect(() => {
+ if (!linkedType || unlinkFromType || !categoryIdToName.size) return;
+ const ids = Array.isArray(linkedType.locked_category_ids) ? linkedType.locked_category_ids : [];
+ const names = ids.map((id) => categoryIdToName.get(id)).filter(Boolean);
+ setSelectedCategories((prev) => {
+ const s = new Set([...names, ...prev]);
+ return Array.from(s);
+ });
+ }, [linkedType?.id, unlinkFromType, categoryIdToName]);
+
+ const typePresetKeys =
+ linkedType && !unlinkFromType && linkedType.default_custom_fields && typeof linkedType.default_custom_fields === 'object'
+ ? new Set(Object.keys(linkedType.default_custom_fields))
+ : new Set();
+
+ const useTypePrefixUi = Boolean(
+ originalItem?.supply_type_id && linkedType && !unlinkFromType
+ );
+
+ const typeBlocksOwnImage =
+ !unlinkFromType &&
+ Boolean(originalItem?.supply_type_id) &&
+ (Boolean(originalItem.type_has_template_image) || Boolean(linkedType?.image));
+
+ // Load categories when categoryIdToName mapping is ready
+ useEffect(() => {
+ if (isOpen && originalItem && categoryIdToName.size > 0) {
+ // Load categories (convert IDs to names for display)
+ if (originalItem.categories && originalItem.categories.length > 0) {
+ const categoryNames = originalItem.categories
+ .map(catId => categoryIdToName.get(catId))
+ .filter(name => name !== undefined);
+ setSelectedCategories(categoryNames);
+ } else {
+ setSelectedCategories([]);
+ }
+ }
+ }, [isOpen, originalItem, categoryIdToName]);
+
+ const handleImageChange = async (e) => {
+ if (typeBlocksOwnImage) {
+ e.target.value = '';
+ return;
+ }
+ const file = e.target.files[0];
+ if (!file) {
+ // Keep existing image if no new file selected
+ return;
+ }
+
+ // Validate file size (10MB max)
+ if (file.size > 10 * 1024 * 1024) {
+ await showAlert('Image file size must be less than 10MB');
+ e.target.value = '';
+ return;
+ }
+
+ // Validate file type
+ if (!file.type.startsWith('image/')) {
+ await showAlert('Please select an image file');
+ e.target.value = '';
+ return;
+ }
+
+ // Convert to base64
+ const reader = new FileReader();
+ reader.onload = (event) => {
+ const base64Data = event.target.result;
+ setImage(base64Data);
+ setImagePreview(base64Data);
+ };
+ reader.onerror = () => {
+ showAlert('Error reading image file').then(() => {
+ e.target.value = '';
+ });
+ };
+ reader.readAsDataURL(file);
+ };
+
+ const handleRemoveImage = () => {
+ setImage(null);
+ setImagePreview(null);
+ };
+
+ const getCurrentNameDescription = () => {
+ if (!unlinkFromType && linkedType) {
+ return {
+ currentName: joinPrefixSuffix(linkedType.item_name_prefix, nameSuffix).trim(),
+ currentDesc: joinPrefixSuffix(linkedType.item_description_prefix || '', descSuffix).trim()
+ };
+ }
+ return {
+ currentName: name.trim(),
+ currentDesc: description.trim()
+ };
+ };
+
+ const applyTypeToEditableFields = (type, baseName, baseDesc) => {
+ const namePrefix = (type.item_name_prefix || '').trimEnd();
+ if (namePrefix && baseName.startsWith(namePrefix)) {
+ setNameSuffix(baseName.slice(namePrefix.length).replace(/^\s+/, ''));
+ } else {
+ setNameSuffix(baseName);
+ }
+
+ const descPrefix = (type.item_description_prefix || '').trim();
+ if (descPrefix && baseDesc.startsWith(descPrefix)) {
+ setDescSuffix(baseDesc.slice(descPrefix.length).replace(/^\s+/, ''));
+ } else {
+ setDescSuffix(baseDesc);
+ }
+
+ setName('');
+ setDescription('');
+ setCustomFields((prev) => {
+ const cf = { ...prev };
+ const defaults = type.default_custom_fields || {};
+ Object.entries(defaults).forEach(([key, value]) => {
+ cf[key] = value;
+ });
+ const locked = Array.isArray(type.locked_custom_field_keys) ? type.locked_custom_field_keys : [];
+ for (const key of locked) {
+ if (!(key in cf)) cf[key] = Object.prototype.hasOwnProperty.call(defaults, key) ? defaults[key] : '';
+ }
+ return cf;
+ });
+
+ const lockedTeams = (Array.isArray(type.locked_team_names) ? type.locked_team_names : []).map((x) =>
+ String(x).toLowerCase()
+ );
+ setSelectedTeams((prev) => Array.from(new Set([...lockedTeams, ...prev])));
+
+ if (categoryIdToName.size) {
+ const lockedCategoryNames = (Array.isArray(type.locked_category_ids) ? type.locked_category_ids : [])
+ .map((id) => categoryIdToName.get(id))
+ .filter(Boolean);
+ setSelectedCategories((prev) => Array.from(new Set([...lockedCategoryNames, ...prev])));
+ }
+
+ if (type.image) {
+ setImage(null);
+ setImagePreview(type.image);
+ } else if (originalItem?.type_has_template_image) {
+ setImage(null);
+ setImagePreview(null);
+ }
+ };
+
+ const handleSupplyTypeChange = async (nextId) => {
+ if (String(nextId || '') === String(selectedSupplyTypeId || '')) return;
+
+ const { currentName, currentDesc } = getCurrentNameDescription();
+ setSelectedSupplyTypeId(nextId);
+
+ if (!nextId) {
+ setName(currentName);
+ setDescription(currentDesc);
+ setNameSuffix('');
+ setDescSuffix('');
+ setLinkedType(null);
+ setUnlinkFromType(true);
+ if (originalItem?.type_has_template_image) {
+ setImage(null);
+ setImagePreview(null);
+ }
+ return;
+ }
+
+ setUnlinkFromType(false);
+ let type = supplyTypes.find((x) => String(x.id) === String(nextId));
+ if (!type) {
+ try {
+ type = await api.getSupplyType(nextId);
+ } catch {
+ type = null;
+ }
+ }
+ if (!type) return;
+
+ setLinkedType(type);
+ applyTypeToEditableFields(type, currentName, currentDesc);
+ };
+
+ const handleSave = async () => {
+ let finalName;
+ let finalDesc;
+ if (unlinkFromType) {
+ finalName = name.trim();
+ finalDesc = description.trim() || null;
+ } else if (useTypePrefixUi && linkedType) {
+ finalName = joinPrefixSuffix(linkedType.item_name_prefix, nameSuffix).trim();
+ const d = joinPrefixSuffix(linkedType.item_description_prefix || '', descSuffix).trim();
+ finalDesc = d || null;
+ } else {
+ finalName = name.trim();
+ finalDesc = description.trim() || null;
+ }
+
+ if (finalName && itemName) {
+ if (!areNumberCustomFieldsValid(customFields, customFieldDefinitions)) {
+ await showAlert('Please enter a valid number in all number fields (or leave them empty).');
+ return;
+ }
+ if (
+ lockedFieldKeys.length > 0 &&
+ !areLockedCustomFieldsFilled(customFields, lockedFieldKeys, customFieldDefinitions)
+ ) {
+ await showAlert(
+ 'This item type requires a value in each listed custom field. Fill any empty required fields before saving.'
+ );
+ return;
+ }
+
+ // Convert category names to IDs
+ const categoryIds = selectedCategories
+ .map(catName => categoryNameToId.get(catName))
+ .filter(id => id !== undefined);
+
+ const updatedItem = {
+ name: finalName,
+ description: finalDesc,
+ image: typeBlocksOwnImage ? null : image || null,
+ teams: selectedTeams.length > 0 ? selectedTeams : [],
+ categories: categoryIds.length > 0 ? categoryIds : [],
+ custom_fields: customFields,
+ locations: originalItem?.locations || []
+ };
+ if (unlinkFromType) {
+ updatedItem.unlink_from_type = true;
+ }
+ if (String(selectedSupplyTypeId || '') !== String(originalItem?.supply_type_id || '')) {
+ updatedItem.supply_type_id = selectedSupplyTypeId ? Number(selectedSupplyTypeId) : null;
+ }
+
+ updateMasterItem(itemName, updatedItem);
+ onClose();
+ }
+ };
+
+ const handleCancel = () => {
+ onClose();
+ };
+
+ const handleOverlayClick = (e) => {
+ if (e.target === e.currentTarget) {
+ handleCancel();
+ }
+ };
+
+ const handleKeyDown = (e) => {
+ if (e.key === 'Escape') {
+ handleCancel();
+ } else if (e.key === 'Enter' && (e.ctrlKey || e.metaKey)) {
+ e.preventDefault();
+ handleSave();
+ }
+ };
+
+ if (!isOpen || !originalItem) return null;
+
+ return (
+
+
+
Edit Master Item
+
+
+ {useTypePrefixUi ? (
+ <>
+
+ {(() => {
+ const nameFix = linkedType?.item_name_prefix ?? '';
+ const hasNameFix = String(nameFix).length > 0;
+ return hasNameFix ? (
+
+ {nameFix}
+ setNameSuffix(e.target.value)}
+ />
+
+ ) : (
+
setNameSuffix(e.target.value)}
+ />
+ );
+ })()}
+
+ {(() => {
+ const descFix = linkedType?.item_description_prefix ?? '';
+ const hasDescFix = String(descFix).trim().length > 0;
+ return hasDescFix ? (
+
+
{descFix}
+
+ ) : (
+
+
+ );
+};
+
+export default MasterEditModal;
+
diff --git a/milventory/src/components/Master/MasterInventoryTable.js b/milventory/src/components/Master/MasterInventoryTable.js
new file mode 100644
index 0000000..44be2c9
--- /dev/null
+++ b/milventory/src/components/Master/MasterInventoryTable.js
@@ -0,0 +1,1903 @@
+import React, { useState, useMemo, useRef, useEffect, useCallback } from 'react';
+import { useInventory } from '../../context/InventoryContext';
+import MasterTableRow, { formatCustomValue, formatDate } from './MasterTableRow';
+import MasterCreateModal from './MasterCreateModal';
+import { getCategories, api } from '../../api';
+
+/** Sentinel for filter: items with no template type */
+const TYPE_FILTER_NONE = '__NO_TYPE__';
+
+const summarizeList = (values) => {
+ if (!values.length) return '—';
+ if (values.length === 1) return values[0];
+ return `${values[0]}, ... +${values.length - 1}`;
+};
+
+const uniqueSorted = (values) => Array.from(new Set(values.filter(Boolean))).sort();
+
+const MasterInventoryTable = () => {
+ const {
+ masterInventoryItems,
+ computeMasterQuantities,
+ getItemLocations,
+ setSelectedMasterItem,
+ selectedMasterItem,
+ dismissMasterWorkbenchUI,
+ inventoryData,
+ masterFilterLocation,
+ setMasterFilterLocation
+ } = useInventory();
+
+ const [searchQuery, setSearchQuery] = useState('');
+ const [showAddModal, setShowAddModal] = useState(false);
+ const [createFromType, setCreateFromType] = useState(true);
+ const [showFilterMenu, setShowFilterMenu] = useState(false);
+ /** Which facet list is visible; selections in all facets still combine with AND */
+ const [filterType, setFilterType] = useState('location');
+ const [selectedLocations, setSelectedLocations] = useState(new Set());
+ const [selectedCategories, setSelectedCategories] = useState(new Set());
+ const [selectedTypes, setSelectedTypes] = useState(new Set());
+ const [availableCategories, setAvailableCategories] = useState([]);
+ const [categoryIdToName, setCategoryIdToName] = useState(new Map());
+ const [categoryNameToId, setCategoryNameToId] = useState(new Map());
+ const filterButtonRef = React.useRef(null);
+ const columnButtonRef = React.useRef(null);
+ const groupButtonRef = React.useRef(null);
+
+ // Column visibility state - default hide category and team, show others
+ const [showTypeColumn, setShowTypeColumn] = useState(false);
+ const [showQtyColumn, setShowQtyColumn] = useState(true);
+ const [showLocationColumn, setShowLocationColumn] = useState(true);
+ const [showCategoryColumn, setShowCategoryColumn] = useState(false);
+ const [showTeamColumn, setShowTeamColumn] = useState(false);
+ const [showLastModifiedColumn, setShowLastModifiedColumn] = useState(true);
+ const [showColumnMenu, setShowColumnMenu] = useState(false);
+ const [customFieldDefinitions, setCustomFieldDefinitions] = useState([]);
+ const [visibleCustomColumns, setVisibleCustomColumns] = useState(new Set());
+
+ // Grouping state - null (no grouping) or 'type'
+ const [groupBy, setGroupBy] = useState(null);
+ const [showGroupMenu, setShowGroupMenu] = useState(false);
+ const [collapsedGroups, setCollapsedGroups] = useState(new Set());
+
+ // Sorting state - default to lastModified ascending (earliest first)
+ const [sortColumn, setSortColumn] = useState('lastModified');
+ const [sortDirection, setSortDirection] = useState('asc');
+
+ const quantities = computeMasterQuantities();
+
+ const filterHasSelection =
+ selectedLocations.size > 0 ||
+ selectedCategories.size > 0 ||
+ selectedTypes.size > 0;
+
+ const filterSelectionCount =
+ selectedLocations.size + selectedCategories.size + selectedTypes.size;
+
+ const filterStatusFooterText = useMemo(() => {
+ const parts = [];
+ if (selectedLocations.size > 0) {
+ parts.push(`${selectedLocations.size} location${selectedLocations.size === 1 ? '' : 's'}`);
+ }
+ if (selectedCategories.size > 0) {
+ parts.push(`${selectedCategories.size} categor${selectedCategories.size === 1 ? 'y' : 'ies'}`);
+ }
+ if (selectedTypes.size > 0) {
+ parts.push(`${selectedTypes.size} type${selectedTypes.size === 1 ? '' : 's'}`);
+ }
+ if (parts.length === 0) return 'No filters active — all items shown.';
+ return `Active filters (AND): ${parts.join(' · ')}`;
+ }, [selectedLocations, selectedCategories, selectedTypes]);
+
+ // Get all available locations from inventoryData
+ const availableLocations = useMemo(() => {
+ return Array.from(inventoryData.keys()).sort();
+ }, [inventoryData]);
+
+ // Helper function to get item categories (convert IDs to names)
+ const getItemCategories = useCallback((supplyPublicId) => {
+ const itemData = masterInventoryItems.get(supplyPublicId);
+ if (!itemData || !itemData.categories || itemData.categories.length === 0) {
+ return [];
+ }
+ return itemData.categories
+ .map(catId => categoryIdToName.get(catId))
+ .filter(name => name !== undefined)
+ .sort(); // Sort alphabetically for consistency
+ }, [masterInventoryItems, categoryIdToName]);
+
+ // Helper function to get item teams (capitalize first letter)
+ const getItemTeams = useCallback((supplyPublicId) => {
+ const itemData = masterInventoryItems.get(supplyPublicId);
+ if (!itemData || !itemData.teams || itemData.teams.length === 0) {
+ return [];
+ }
+ // Capitalize first letter of each team name
+ return [...itemData.teams]
+ .map(team => team.charAt(0).toUpperCase() + team.slice(1).toLowerCase())
+ .sort(); // Sort alphabetically for consistency
+ }, [masterInventoryItems]);
+
+ const availableTemplateTypes = useMemo(() => {
+ const names = new Set();
+ let hasUntyped = false;
+ for (const [, item] of masterInventoryItems.entries()) {
+ if (item.type_name) names.add(item.type_name);
+ else hasUntyped = true;
+ }
+ const sorted = Array.from(names).sort();
+ if (hasUntyped) sorted.unshift(TYPE_FILTER_NONE);
+ return sorted;
+ }, [masterInventoryItems]);
+
+ const typeFilterLabel = (key) => (key === TYPE_FILTER_NONE ? '(No type)' : key);
+
+ // Sync with context filter location (merge; keeps category/type selections)
+ useEffect(() => {
+ if (masterFilterLocation) {
+ setFilterType('location');
+ setSelectedLocations((prev) => new Set(prev).add(masterFilterLocation));
+ setMasterFilterLocation(null);
+ }
+ }, [masterFilterLocation, setMasterFilterLocation]);
+
+ // Fetch categories on mount (needed for both filtering and display)
+ useEffect(() => {
+ if (availableCategories.length === 0) {
+ getCategories()
+ .then(categories => {
+ const categoryList = categories.map(c => typeof c === 'string' ? c : c.name);
+ setAvailableCategories(categoryList);
+
+ // Build ID-to-name and name-to-ID mappings
+ const idToName = new Map();
+ const nameToId = new Map();
+ categories.forEach(cat => {
+ if (typeof cat === 'object' && cat.id && cat.name) {
+ idToName.set(cat.id, cat.name);
+ nameToId.set(cat.name, cat.id);
+ }
+ });
+ setCategoryIdToName(idToName);
+ setCategoryNameToId(nameToId);
+ })
+ .catch(err => {
+ console.error('Failed to fetch categories:', err);
+ setAvailableCategories([]);
+ setCategoryIdToName(new Map());
+ setCategoryNameToId(new Map());
+ });
+ }
+ }, [availableCategories.length]);
+
+ // Fetch custom field definitions for column options
+ useEffect(() => {
+ api.getCustomFieldDefinitions()
+ .then(setCustomFieldDefinitions)
+ .catch(() => setCustomFieldDefinitions([]));
+ }, []);
+
+ const toggleCustomColumn = useCallback((fieldName) => {
+ setVisibleCustomColumns(prev => {
+ const next = new Set(prev);
+ if (next.has(fieldName)) next.delete(fieldName);
+ else next.add(fieldName);
+ return next;
+ });
+ }, []);
+
+ const filteredItems = useMemo(() => {
+ const itemsArray = Array.from(masterInventoryItems.entries());
+
+ // Filter by search query
+ let filtered = itemsArray;
+ if (searchQuery.trim()) {
+ const query = searchQuery.toLowerCase();
+ filtered = filtered.filter(([, itemData]) => {
+ const nm = (itemData?.name || '').toLowerCase();
+ if (nm.includes(query)) return true;
+ const tn = (itemData?.type_name || '').toLowerCase();
+ return tn.includes(query);
+ });
+ }
+
+ // AND across dimensions: each active facet must match
+ if (selectedLocations.size > 0) {
+ filtered = filtered.filter(([supplyPublicId]) => {
+ const itemLocations = getItemLocations(supplyPublicId);
+ return itemLocations.some(loc => selectedLocations.has(loc));
+ });
+ }
+ if (selectedCategories.size > 0) {
+ filtered = filtered.filter(([, itemData]) => {
+ if (!itemData.categories || itemData.categories.length === 0) {
+ return false;
+ }
+ const itemCategoryNames = itemData.categories
+ .map(catId => categoryIdToName.get(catId))
+ .filter(name => name !== undefined);
+ return itemCategoryNames.some(catName => selectedCategories.has(catName));
+ });
+ }
+ if (selectedTypes.size > 0) {
+ filtered = filtered.filter(([, itemData]) => {
+ const key = itemData.type_name ? itemData.type_name : TYPE_FILTER_NONE;
+ return selectedTypes.has(key);
+ });
+ }
+
+ return filtered;
+ }, [masterInventoryItems, searchQuery, selectedLocations, selectedCategories, selectedTypes, getItemLocations, categoryIdToName]);
+
+ const handleLocationToggle = (location) => {
+ setSelectedLocations(prev => {
+ const next = new Set(prev);
+ if (next.has(location)) {
+ next.delete(location);
+ } else {
+ next.add(location);
+ }
+ return next;
+ });
+ };
+
+ const handleCategoryToggle = (category) => {
+ setSelectedCategories(prev => {
+ const next = new Set(prev);
+ if (next.has(category)) {
+ next.delete(category);
+ } else {
+ next.add(category);
+ }
+ return next;
+ });
+ };
+
+ const handleClearFilters = () => {
+ setSelectedLocations(new Set());
+ setSelectedCategories(new Set());
+ setSelectedTypes(new Set());
+ };
+
+ const handleFilterTypeChange = (type) => {
+ setFilterType(type);
+ };
+
+ const handleTypeToggle = (typeKey) => {
+ setSelectedTypes(prev => {
+ const next = new Set(prev);
+ if (next.has(typeKey)) next.delete(typeKey);
+ else next.add(typeKey);
+ return next;
+ });
+ };
+
+ // Close filter menu when clicking outside
+ useEffect(() => {
+ if (!showFilterMenu) return;
+
+ const handleClickOutside = (event) => {
+ if (filterButtonRef.current && !filterButtonRef.current.contains(event.target)) {
+ setShowFilterMenu(false);
+ }
+ };
+
+ // Use a small delay to avoid closing immediately when opening
+ const timeoutId = setTimeout(() => {
+ document.addEventListener('mousedown', handleClickOutside);
+ }, 100);
+
+ return () => {
+ clearTimeout(timeoutId);
+ document.removeEventListener('mousedown', handleClickOutside);
+ };
+ }, [showFilterMenu]);
+
+ // Close column menu when clicking outside
+ useEffect(() => {
+ if (!showColumnMenu) return;
+
+ const handleClickOutside = (event) => {
+ if (columnButtonRef.current && !columnButtonRef.current.contains(event.target)) {
+ setShowColumnMenu(false);
+ }
+ };
+
+ // Use a small delay to avoid closing immediately when opening
+ const timeoutId = setTimeout(() => {
+ document.addEventListener('mousedown', handleClickOutside);
+ }, 100);
+
+ return () => {
+ clearTimeout(timeoutId);
+ document.removeEventListener('mousedown', handleClickOutside);
+ };
+ }, [showColumnMenu]);
+
+ // Close group menu when clicking outside
+ useEffect(() => {
+ if (!showGroupMenu) return;
+
+ const handleClickOutside = (event) => {
+ if (groupButtonRef.current && !groupButtonRef.current.contains(event.target)) {
+ setShowGroupMenu(false);
+ }
+ };
+
+ const timeoutId = setTimeout(() => {
+ document.addEventListener('mousedown', handleClickOutside);
+ }, 100);
+
+ return () => {
+ clearTimeout(timeoutId);
+ document.removeEventListener('mousedown', handleClickOutside);
+ };
+ }, [showGroupMenu]);
+
+ const sortedItems = useMemo(() => {
+ const items = [...filteredItems];
+
+ if (items.length === 0) return items;
+
+ return items.sort(([pidA, itemDataA], [pidB, itemDataB]) => {
+ let comparison = 0;
+
+ switch (sortColumn) {
+ case 'name': {
+ comparison = (itemDataA?.name || '').localeCompare(itemDataB?.name || '');
+ if (comparison === 0) comparison = pidA.localeCompare(pidB);
+ break;
+ }
+ case 'qty':
+ const qtyA = quantities.get(pidA) || 0;
+ const qtyB = quantities.get(pidB) || 0;
+ comparison = qtyA - qtyB;
+ break;
+ case 'location':
+ const locsA = getItemLocations(pidA);
+ const locsB = getItemLocations(pidB);
+ // Sort by first location name, or by count if no locations
+ if (locsA.length === 0 && locsB.length === 0) {
+ comparison = 0;
+ } else if (locsA.length === 0) {
+ comparison = 1; // Items with no locations go to end
+ } else if (locsB.length === 0) {
+ comparison = -1;
+ } else {
+ comparison = locsA[0].localeCompare(locsB[0]);
+ }
+ break;
+ case 'category':
+ const catsA = getItemCategories(pidA);
+ const catsB = getItemCategories(pidB);
+ // Sort by first category name, or by count if no categories
+ if (catsA.length === 0 && catsB.length === 0) {
+ comparison = 0;
+ } else if (catsA.length === 0) {
+ comparison = 1; // Items with no categories go to end
+ } else if (catsB.length === 0) {
+ comparison = -1;
+ } else {
+ comparison = catsA[0].localeCompare(catsB[0]);
+ }
+ break;
+ case 'team':
+ const teamsA = getItemTeams(pidA);
+ const teamsB = getItemTeams(pidB);
+ // Sort by first team name, or by count if no teams
+ if (teamsA.length === 0 && teamsB.length === 0) {
+ comparison = 0;
+ } else if (teamsA.length === 0) {
+ comparison = 1; // Items with no teams go to end
+ } else if (teamsB.length === 0) {
+ comparison = -1;
+ } else {
+ comparison = teamsA[0].localeCompare(teamsB[0]);
+ }
+ break;
+ case 'lastModified':
+ const dateA = itemDataA.lastModified ? new Date(itemDataA.lastModified).getTime() : 0;
+ const dateB = itemDataB.lastModified ? new Date(itemDataB.lastModified).getTime() : 0;
+ comparison = dateA - dateB;
+ break;
+ default: {
+ const customDef = customFieldDefinitions.find(d => d.name === sortColumn);
+ if (customDef) {
+ const valA = itemDataA.custom_fields?.[sortColumn];
+ const valB = itemDataB.custom_fields?.[sortColumn];
+ if (customDef.type === 'number') {
+ const nA = valA !== undefined && valA !== null && valA !== '' ? Number(valA) : NaN;
+ const nB = valB !== undefined && valB !== null && valB !== '' ? Number(valB) : NaN;
+ comparison = (Number.isNaN(nA) ? 1 : 0) - (Number.isNaN(nB) ? 1 : 0) || nA - nB;
+ } else if (customDef.type === 'date') {
+ const tA = valA ? new Date(valA).getTime() : 0;
+ const tB = valB ? new Date(valB).getTime() : 0;
+ comparison = tA - tB;
+ } else {
+ const sA = valA != null ? String(valA) : '';
+ const sB = valB != null ? String(valB) : '';
+ comparison = sA.localeCompare(sB);
+ }
+ } else {
+ comparison = 0;
+ }
+ break;
+ }
+ }
+
+ return sortDirection === 'asc' ? comparison : -comparison;
+ });
+ }, [filteredItems, sortColumn, sortDirection, quantities, getItemLocations, getItemCategories, getItemTeams, customFieldDefinitions]);
+
+ const handleRowClick = (itemName) => {
+ setSelectedMasterItem(itemName);
+ };
+
+ const handleAddItem = () => {
+ void dismissMasterWorkbenchUI().then(() => setShowAddModal(true));
+ };
+
+ const handleSort = (column) => {
+ if (sortColumn === column) {
+ // Toggle direction if clicking the same column
+ setSortDirection(prev => prev === 'asc' ? 'desc' : 'asc');
+ } else {
+ // Set new column and default to ascending
+ setSortColumn(column);
+ setSortDirection('asc');
+ }
+ };
+
+ const standardColumnCount = 6;
+
+ const handleShowAllColumns = () => {
+ setShowTypeColumn(true);
+ setShowQtyColumn(true);
+ setShowLocationColumn(true);
+ setShowCategoryColumn(true);
+ setShowTeamColumn(true);
+ setShowLastModifiedColumn(true);
+ setVisibleCustomColumns(new Set(customFieldDefinitions.map(d => d.name)));
+ };
+
+ const handleHideAllColumns = () => {
+ setShowTypeColumn(false);
+ setShowQtyColumn(false);
+ setShowLocationColumn(false);
+ setShowCategoryColumn(false);
+ setShowTeamColumn(false);
+ setShowLastModifiedColumn(false);
+ setVisibleCustomColumns(new Set());
+ };
+
+ // Count visible columns (excluding Name which is always visible)
+ const visibleColumnCount = [
+ showTypeColumn,
+ showQtyColumn,
+ showLocationColumn,
+ showCategoryColumn,
+ showTeamColumn,
+ showLastModifiedColumn
+ ].filter(Boolean).length + visibleCustomColumns.size;
+
+ const TYPE_GROUP_UNTYPED = '__NO_TYPE__';
+
+ const groupedRows = useMemo(() => {
+ if (groupBy !== 'type') return null;
+ const buckets = new Map();
+ for (const entry of sortedItems) {
+ const [, itemData] = entry;
+ const key = itemData.type_name || TYPE_GROUP_UNTYPED;
+ if (!buckets.has(key)) buckets.set(key, []);
+ buckets.get(key).push(entry);
+ }
+ const groups = Array.from(buckets.keys()).map((key) => {
+ const items = buckets.get(key);
+ const lastModifiedDates = items
+ .map(([, itemData]) => itemData.lastModified ? new Date(itemData.lastModified) : null)
+ .filter((date) => date && !Number.isNaN(date.getTime()));
+ const latestLastModified = lastModifiedDates.length
+ ? new Date(Math.max(...lastModifiedDates.map((date) => date.getTime()))).toISOString()
+ : null;
+
+ return {
+ key,
+ label: key === TYPE_GROUP_UNTYPED ? '(No type)' : key,
+ items,
+ totalQty: items.reduce((sum, [itemName]) => sum + (quantities.get(itemName) || 0), 0),
+ locations: uniqueSorted(items.flatMap(([itemName]) => getItemLocations(itemName))),
+ categories: uniqueSorted(items.flatMap(([itemName]) => getItemCategories(itemName))),
+ teams: uniqueSorted(items.flatMap(([itemName]) => getItemTeams(itemName))),
+ latestLastModified
+ };
+ });
+
+ const compareGroupLists = (aList, bList) => {
+ if (aList.length === 0 && bList.length === 0) return 0;
+ if (aList.length === 0) return 1;
+ if (bList.length === 0) return -1;
+ return aList[0].localeCompare(bList[0]) || (aList.length - bList.length);
+ };
+
+ groups.sort((a, b) => {
+ let comparison = 0;
+
+ switch (sortColumn) {
+ case 'typeName':
+ case 'name':
+ comparison = a.label.localeCompare(b.label);
+ break;
+ case 'qty':
+ comparison = a.totalQty - b.totalQty;
+ break;
+ case 'location':
+ comparison = compareGroupLists(a.locations, b.locations);
+ break;
+ case 'category':
+ comparison = compareGroupLists(a.categories, b.categories);
+ break;
+ case 'team':
+ comparison = compareGroupLists(a.teams, b.teams);
+ break;
+ case 'lastModified': {
+ const aTime = a.latestLastModified ? new Date(a.latestLastModified).getTime() : 0;
+ const bTime = b.latestLastModified ? new Date(b.latestLastModified).getTime() : 0;
+ comparison = aTime - bTime;
+ break;
+ }
+ default: {
+ const customDef = customFieldDefinitions.find(d => d.name === sortColumn);
+ if (customDef) {
+ const firstValue = (group) => {
+ const values = uniqueSorted(
+ group.items.map(([, itemData]) => formatCustomValue(itemData.custom_fields?.[customDef.name], customDef.type))
+ .filter((value) => value !== '—')
+ );
+ return values[0] || '';
+ };
+ if (customDef.type === 'number') {
+ const toNumber = (group) => {
+ const n = Number(firstValue(group));
+ return Number.isNaN(n) ? Number.POSITIVE_INFINITY : n;
+ };
+ comparison = toNumber(a) - toNumber(b);
+ } else {
+ comparison = firstValue(a).localeCompare(firstValue(b));
+ }
+ }
+ break;
+ }
+ }
+
+ if (comparison === 0) comparison = a.label.localeCompare(b.label);
+ return sortDirection === 'asc' ? comparison : -comparison;
+ });
+
+ return groups;
+ }, [
+ groupBy,
+ sortedItems,
+ quantities,
+ getItemLocations,
+ getItemCategories,
+ getItemTeams,
+ sortColumn,
+ sortDirection,
+ customFieldDefinitions
+ ]);
+
+ const toggleGroup = (key) => {
+ setCollapsedGroups((prev) => {
+ const next = new Set(prev);
+ if (next.has(key)) next.delete(key);
+ else next.add(key);
+ return next;
+ });
+ };
+
+ const SortIcon = ({ column }) => {
+ if (sortColumn !== column) {
+ // Show neutral sort icon when not active
+ return (
+
+ );
+ }
+
+ // Show active sort icon
+ return (
+
+ );
+ };
+
+ return (
+ <>
+
+
+
Master Inventory
+
+
+
setSearchQuery(e.target.value)}
+ className="master-search-input"
+ />
+
+
+
{
+ e.stopPropagation();
+ setShowFilterMenu(!showFilterMenu);
+ }}
+ style={{
+ padding: '0.4rem',
+ fontSize: '1rem',
+ background: filterHasSelection ? 'var(--accent)' : 'transparent',
+ border: '1px solid var(--stroke)',
+ color: filterHasSelection ? 'white' : 'var(--text)',
+ borderRadius: '4px',
+ cursor: 'pointer',
+ display: 'flex',
+ alignItems: 'center',
+ justifyContent: 'center',
+ width: '32px',
+ height: '32px',
+ position: 'relative',
+ transition: 'all 0.2s'
+ }}
+ title="Filter items"
+ onMouseEnter={(e) => {
+ if (!filterHasSelection) {
+ e.currentTarget.style.background = 'rgba(255, 255, 255, 0.1)';
+ }
+ }}
+ onMouseLeave={(e) => {
+ if (!filterHasSelection) {
+ e.currentTarget.style.background = 'transparent';
+ }
+ }}
+ >
+
+ {filterHasSelection && (
+
+ {filterSelectionCount}
+
+ )}
+
+ {filterHasSelection && (
+
+ Clear
+
+ )}
+
+ {/* Filter Dropdown Menu */}
+ {showFilterMenu && (
+
e.stopPropagation()}
+ >
+
+
+
+ Filter
+
+
+
+
+ Switch dimension to edit another facet; selections combine with AND.
+
+
+
+ {filterType === 'location' ? (
+
+ {availableLocations.length === 0 ? (
+
+ No locations available
+
+ ) : (
+ <>
+
+ setSelectedLocations(new Set(availableLocations))}
+ style={{
+ padding: '0.3rem 0.6rem',
+ fontSize: '0.75rem',
+ background: 'transparent',
+ border: '1px solid var(--stroke)',
+ color: 'var(--text)',
+ borderRadius: '4px',
+ cursor: 'pointer',
+ flex: 1
+ }}
+ >
+ Select All
+
+ setSelectedLocations(new Set())}
+ style={{
+ padding: '0.3rem 0.6rem',
+ fontSize: '0.75rem',
+ background: 'transparent',
+ border: '1px solid var(--stroke)',
+ color: 'var(--text)',
+ borderRadius: '4px',
+ cursor: 'pointer',
+ flex: 1
+ }}
+ >
+ Clear
+
+
+
+ {availableLocations.map(location => (
+
+ ))}
+
+ >
+ )}
+
+ ) : filterType === 'category' ? (
+
+ {availableCategories.length === 0 ? (
+
+ No categories available
+
+ ) : (
+ <>
+
+ setSelectedCategories(new Set(availableCategories))}
+ style={{
+ padding: '0.3rem 0.6rem',
+ fontSize: '0.75rem',
+ background: 'transparent',
+ border: '1px solid var(--stroke)',
+ color: 'var(--text)',
+ borderRadius: '4px',
+ cursor: 'pointer',
+ flex: 1
+ }}
+ >
+ Select All
+
+ setSelectedCategories(new Set())}
+ style={{
+ padding: '0.3rem 0.6rem',
+ fontSize: '0.75rem',
+ background: 'transparent',
+ border: '1px solid var(--stroke)',
+ color: 'var(--text)',
+ borderRadius: '4px',
+ cursor: 'pointer',
+ flex: 1
+ }}
+ >
+ Clear
+
+
+
+ {availableCategories.map(category => (
+
+ ))}
+
+ >
+ )}
+
+ ) : (
+
+ {availableTemplateTypes.length === 0 ? (
+
+ No types in inventory
+
+ ) : (
+ <>
+
+ setSelectedTypes(new Set(availableTemplateTypes))}
+ style={{
+ padding: '0.3rem 0.6rem',
+ fontSize: '0.75rem',
+ background: 'transparent',
+ border: '1px solid var(--stroke)',
+ color: 'var(--text)',
+ borderRadius: '4px',
+ cursor: 'pointer',
+ flex: 1
+ }}
+ >
+ Select All
+
+ setSelectedTypes(new Set())}
+ style={{
+ padding: '0.3rem 0.6rem',
+ fontSize: '0.75rem',
+ background: 'transparent',
+ border: '1px solid var(--stroke)',
+ color: 'var(--text)',
+ borderRadius: '4px',
+ cursor: 'pointer',
+ flex: 1
+ }}
+ >
+ Clear
+
+
+
+ {availableTemplateTypes.map((typeKey) => (
+
+ ))}
+
+ >
+ )}
+
+ )}
+
+ {/* Status Footer */}
+
+ {filterStatusFooterText}
+
+
+ )}
+
+ {/* Column Visibility Button */}
+
+
{
+ e.stopPropagation();
+ setShowColumnMenu(!showColumnMenu);
+ }}
+ style={{
+ padding: '0.4rem',
+ fontSize: '1rem',
+ background: visibleColumnCount < (standardColumnCount + customFieldDefinitions.length) ? 'var(--accent)' : 'transparent',
+ border: '1px solid var(--stroke)',
+ color: visibleColumnCount < (standardColumnCount + customFieldDefinitions.length) ? 'white' : 'var(--text)',
+ borderRadius: '4px',
+ cursor: 'pointer',
+ display: 'flex',
+ alignItems: 'center',
+ justifyContent: 'center',
+ width: '32px',
+ height: '32px',
+ position: 'relative',
+ transition: 'all 0.2s'
+ }}
+ title="Show/hide columns"
+ onMouseEnter={(e) => {
+ if (visibleColumnCount === standardColumnCount + customFieldDefinitions.length) {
+ e.currentTarget.style.background = 'rgba(255, 255, 255, 0.1)';
+ }
+ }}
+ onMouseLeave={(e) => {
+ if (visibleColumnCount === standardColumnCount + customFieldDefinitions.length) {
+ e.currentTarget.style.background = 'transparent';
+ }
+ }}
+ >
+
+ {visibleColumnCount < standardColumnCount + customFieldDefinitions.length && (
+
+ {standardColumnCount + customFieldDefinitions.length - visibleColumnCount}
+
+ )}
+
+
+ {/* Column Visibility Dropdown Menu */}
+ {showColumnMenu && (
+
e.stopPropagation()}
+ >
+
+ Show Columns
+
+
+ {
+ e.currentTarget.style.background = 'rgba(100, 150, 255, 0.3)';
+ }}
+ onMouseLeave={(e) => {
+ e.currentTarget.style.background = 'rgba(100, 150, 255, 0.2)';
+ }}
+ >
+ Show All
+
+ {
+ e.currentTarget.style.background = 'rgba(255, 255, 255, 0.05)';
+ }}
+ onMouseLeave={(e) => {
+ e.currentTarget.style.background = 'transparent';
+ }}
+ >
+ Hide All
+
+
+
+
+
+
+
+
+
+ {customFieldDefinitions.length > 0 && (
+ <>
+
+ Custom fields
+
+ {customFieldDefinitions.map(d => (
+
+ ))}
+ >
+ )}
+
+
+ )}
+
+ {/* Group Button */}
+
+
{
+ e.stopPropagation();
+ setShowGroupMenu(!showGroupMenu);
+ }}
+ style={{
+ padding: '0.4rem',
+ fontSize: '1rem',
+ background: groupBy ? 'var(--accent)' : 'transparent',
+ border: '1px solid var(--stroke)',
+ color: groupBy ? 'white' : 'var(--text)',
+ borderRadius: '4px',
+ cursor: 'pointer',
+ display: 'flex',
+ alignItems: 'center',
+ justifyContent: 'center',
+ width: '32px',
+ height: '32px',
+ position: 'relative',
+ transition: 'all 0.2s'
+ }}
+ title="Group rows"
+ onMouseEnter={(e) => {
+ if (!groupBy) {
+ e.currentTarget.style.background = 'rgba(255, 255, 255, 0.1)';
+ }
+ }}
+ onMouseLeave={(e) => {
+ if (!groupBy) {
+ e.currentTarget.style.background = 'transparent';
+ }
+ }}
+ >
+
+
+
+ {/* Group Dropdown Menu */}
+ {showGroupMenu && (
+
e.stopPropagation()}
+ >
+
+ Group by
+
+
+
+
+
+
+ )}
+
+
+
+
+ {sortedItems.length === 0 ? (
+
+ {searchQuery || filterHasSelection
+ ? 'No items found matching filters'
+ : 'No Master items. Click "+ Create Item" to create one.'}
+
+ ) : (
+
+
+
+ {showTypeColumn && (
+ | handleSort('typeName')}
+ >
+
+ Type
+
+
+ |
+ )}
+ handleSort('name')}
+ >
+
+ Name
+
+
+ |
+ {showQtyColumn && (
+ handleSort('qty')}
+ >
+
+ Qty
+
+
+ |
+ )}
+ {showLocationColumn && (
+ handleSort('location')}
+ >
+
+ Location
+
+
+ |
+ )}
+ {showCategoryColumn && (
+ handleSort('category')}
+ >
+
+ Category
+
+
+ |
+ )}
+ {showTeamColumn && (
+ handleSort('team')}
+ >
+
+ Team
+
+
+ |
+ )}
+ {showLastModifiedColumn && (
+ handleSort('lastModified')}
+ >
+
+ Last Modified
+
+
+ |
+ )}
+ {customFieldDefinitions.filter(d => visibleCustomColumns.has(d.name)).map(d => (
+ handleSort(d.name)}
+ >
+
+ {d.name}
+
+
+ |
+ ))}
+
+
+
+ {groupBy === 'type' && groupedRows ? (
+ groupedRows.map((group) => {
+ const isCollapsed = collapsedGroups.has(group.key);
+ return (
+
+ toggleGroup(group.key)}
+ style={{
+ background: 'rgba(100, 150, 255, 0.08)',
+ cursor: 'pointer',
+ userSelect: 'none',
+ borderTop: '1px solid var(--stroke)',
+ }}
+ >
+ {showTypeColumn && (
+ |
+ {group.label}
+ |
+ )}
+
+
+
+
+ {group.label}
+
+
+ |
+ {showQtyColumn && (
+ {group.totalQty} |
+ )}
+ {showLocationColumn && (
+
+ {summarizeList(group.locations)}
+ |
+ )}
+ {showCategoryColumn && (
+
+ {summarizeList(group.categories)}
+ |
+ )}
+ {showTeamColumn && (
+
+ {summarizeList(group.teams)}
+ |
+ )}
+ {showLastModifiedColumn && (
+
+ {formatDate(group.latestLastModified)}
+ |
+ )}
+ {customFieldDefinitions.filter(d => visibleCustomColumns.has(d.name)).map(d => {
+ const values = uniqueSorted(
+ group.items.map(([, itemData]) => formatCustomValue(itemData.custom_fields?.[d.name], d.type))
+ .filter((value) => value !== '—')
+ );
+ return (
+
+ {summarizeList(values)}
+ |
+ );
+ })}
+
+ {!isCollapsed && group.items.map(([itemName, itemData]) => (
+ handleRowClick(itemName)}
+ />
+ ))}
+
+ );
+ })
+ ) : (
+ sortedItems.map(([itemName, itemData]) => (
+ handleRowClick(itemName)}
+ />
+ ))
+ )}
+
+
+ )}
+
+
+
+ + Create Item
+
+
+
+
+ setShowAddModal(false)}
+ showTypeSelector={createFromType}
+ />
+ >
+ );
+};
+
+export default MasterInventoryTable;
+
+
+
+
diff --git a/milventory/src/components/Master/MasterItemPreview.js b/milventory/src/components/Master/MasterItemPreview.js
new file mode 100644
index 0000000..7311047
--- /dev/null
+++ b/milventory/src/components/Master/MasterItemPreview.js
@@ -0,0 +1,391 @@
+import React, { useRef, useState, useEffect } from 'react';
+import { useInventory } from '../../context/InventoryContext';
+import MasterEditModal from './MasterEditModal';
+import { getCategories, api } from '../../api';
+import { formatCustomValue } from './MasterTableRow';
+import { formatEasternDateTime } from '../../utils/appTimeZone';
+import { useBlockingDialog } from '../Common/BlockingDialogContext';
+import { hasShelves, getShelfCount, getShelfLabel } from '../../utils/shelfLabels';
+
+const MasterItemPreview = () => {
+ const { showConfirm } = useBlockingDialog();
+ const {
+ selectedMasterItem,
+ resolveMasterItem,
+ getItemLocations,
+ inventoryData,
+ clearSelectedMasterItem,
+ deleteMasterItem,
+ startAddMode,
+ startMoveMode,
+ startSubtractMode,
+ finishMoveMode,
+ cancelMoveMode,
+ moveModeItem,
+ leftPaneWidth,
+ leftPaneCollapsed,
+ freePlaceModeItem,
+ freePlacementsBySupplyPublicId,
+ startFreePlaceMode,
+ cancelFreePlaceMode,
+ finishFreePlaceMode
+ } = useInventory();
+
+ const previewRef = useRef(null);
+ const [editingItem, setEditingItem] = useState(null);
+ const [categoryIdToName, setCategoryIdToName] = useState(new Map());
+ const [customFieldDefinitions, setCustomFieldDefinitions] = useState([]);
+ const [freeCoordsOpen, setFreeCoordsOpen] = useState(true);
+ /** 'landscape' = wider or square → image below text; 'portrait' = taller → image right */
+ const [descImageLayout, setDescImageLayout] = useState('landscape');
+
+ // Fetch category mapping for display
+ useEffect(() => {
+ getCategories()
+ .then(categories => {
+ const mapping = new Map();
+ categories.forEach(cat => {
+ if (typeof cat === 'object' && cat.id && cat.name) {
+ mapping.set(cat.id, cat.name);
+ }
+ });
+ setCategoryIdToName(mapping);
+ })
+ .catch(console.error);
+ }, []);
+
+ useEffect(() => {
+ api.getCustomFieldDefinitions()
+ .then(data => setCustomFieldDefinitions(Array.isArray(data) ? data : []))
+ .catch(() => setCustomFieldDefinitions([]));
+ }, []);
+
+ const item = selectedMasterItem ? resolveMasterItem(selectedMasterItem) : null;
+ const locations = selectedMasterItem ? getItemLocations(selectedMasterItem) : [];
+
+ // Convert category IDs to names for display
+ const categoryNames = item?.categories
+ ? item.categories.map(catId => categoryIdToName.get(catId)).filter(name => name !== undefined)
+ : [];
+
+ // Box/shelf locations (with qty); free coordinates listed separately (always one unit per coordinate)
+ const boxLocationDetails = [];
+ const freeCoordinatePairs = [];
+ if (selectedMasterItem) {
+ locations.forEach(boxTitle => {
+ const boxData = inventoryData.get(boxTitle);
+ if (!boxData) return;
+ const matchingItems = boxData.inventory.filter((i) => {
+ const k =
+ i.supplyPublicId ||
+ (i.supplyId != null ? `__legacy_id_${i.supplyId}` : null);
+ return k === selectedMasterItem;
+ });
+ if (hasShelves(boxData)) {
+ const shelfCount = getShelfCount(boxData);
+ matchingItems.forEach(i => {
+ const shelfIdx = i.shelf ?? 0;
+ const shelfName = getShelfLabel(shelfIdx, shelfCount);
+ boxLocationDetails.push({ label: `${boxTitle} → ${shelfName}`, qty: i.qty });
+ });
+ } else {
+ const totalQty = matchingItems.reduce((sum, i) => sum + (i.qty || 0), 0);
+ boxLocationDetails.push({ label: boxTitle, qty: totalQty });
+ }
+ });
+ const freeDots = freePlacementsBySupplyPublicId.get(selectedMasterItem) || [];
+ freeDots.forEach((p) => {
+ freeCoordinatePairs.push({ x: Math.round(p.x), y: Math.round(p.y) });
+ });
+ }
+
+ // Calculate position to the right of left pane
+ const leftPaneActualWidth = leftPaneCollapsed ? 40 : leftPaneWidth;
+ const positionX = leftPaneActualWidth + 20;
+ const positionY = 20;
+
+ const handleDeleteItem = async () => {
+ const freeDots = freePlacementsBySupplyPublicId.get(selectedMasterItem) || [];
+ const placeCount = locations.length + freeDots.length;
+ if (placeCount > 0) {
+ const confirmed = await showConfirm(
+ `This item appears in ${placeCount} place(s) on the map (boxes and/or floor). Delete from everywhere?`,
+ { title: 'Delete item', danger: true, confirmLabel: 'Delete all', cancelLabel: 'Cancel' }
+ );
+ if (!confirmed) return;
+ }
+ deleteMasterItem(selectedMasterItem);
+ };
+
+ const handleAddToBoxes = () => {
+ startAddMode(selectedMasterItem);
+ };
+
+ const handleMove = () => {
+ startMoveMode(selectedMasterItem);
+ };
+
+ const handleSubtract = () => {
+ startSubtractMode(selectedMasterItem);
+ };
+
+ const handleEdit = () => {
+ setEditingItem(selectedMasterItem);
+ };
+
+ const isInMoveMode = moveModeItem === selectedMasterItem;
+ const isInFreePlaceMode = freePlaceModeItem === selectedMasterItem;
+
+ const hasDescriptionText = Boolean(item?.description);
+ const hasPreviewImage = Boolean(item?.image);
+
+ useEffect(() => {
+ setDescImageLayout('landscape');
+ }, [selectedMasterItem, item?.image]);
+
+ useEffect(() => {
+ if (!selectedMasterItem) setEditingItem(null);
+ }, [selectedMasterItem]);
+
+ if (!selectedMasterItem || !item) return null;
+
+ const descriptionLayout =
+ hasPreviewImage && hasDescriptionText ? descImageLayout : hasPreviewImage ? 'landscape' : 'text-only';
+
+ return (
+ <>
+
+
+
+
{item.name}
+ {
+ if (isInMoveMode) cancelMoveMode();
+ if (freePlaceModeItem === selectedMasterItem) cancelFreePlaceMode();
+ clearSelectedMasterItem();
+ }}
+ title="Close preview"
+ >
+ ×
+
+
+ {(hasDescriptionText || hasPreviewImage) && (
+
+
{hasDescriptionText ? 'Description:' : 'Image:'}
+
+ {hasDescriptionText && (
+
+ )}
+ {hasPreviewImage && (
+
+

{
+ const { naturalWidth: w, naturalHeight: h } = e.currentTarget;
+ if (w > 0 && h > 0) {
+ setDescImageLayout(h > w ? 'portrait' : 'landscape');
+ }
+ }}
+ />
+
+ )}
+
+
+ )}
+ {(item.teams && item.teams.length > 0) && (
+
+
Teams:
+
+ {item.teams.map((team, idx) => (
+
+ {team}
+
+ ))}
+
+
+ )}
+ {categoryNames.length > 0 && (
+
+
Categories:
+
+ {categoryNames.map((category, idx) => (
+
+ {category}
+
+ ))}
+
+
+ )}
+ {item.custom_fields && Object.keys(item.custom_fields).length > 0 && (
+
+
Custom fields:
+
+ {Object.entries(item.custom_fields).map(([fieldName, value]) => {
+ const def = customFieldDefinitions.find(d => d.name === fieldName);
+ return (
+
+ {fieldName}:{' '}
+ {formatCustomValue(value, def?.type || 'text')}
+
+ );
+ })}
+
+
+ )}
+
+
Locations:
+ {boxLocationDetails.length === 0 && freeCoordinatePairs.length === 0 ? (
+
No locations
+ ) : (
+ <>
+ {boxLocationDetails.map((loc, idx) => (
+
+ {loc.label}{' '}
+ (Qty: {loc.qty})
+
+ ))}
+ {freeCoordinatePairs.length > 0 && (
+
+
setFreeCoordsOpen((o) => !o)}
+ aria-expanded={freeCoordsOpen}
+ aria-label={
+ freeCoordsOpen
+ ? 'Collapse free coordinates'
+ : 'Expand free coordinates'
+ }
+ >
+ Free Coordinates{' '}
+
+ (Qty: {freeCoordinatePairs.length})
+
+
+ {freeCoordsOpen && (
+
+ {freeCoordinatePairs.map((c, idx) => (
+ -
+ ({c.x}, {c.y})
+
+ ))}
+
+ )}
+
+ )}
+ >
+ )}
+
+ {item.last_modified_by_name && (
+
+ Last modified by: {item.last_modified_by_name}
+ {item.lastModified && (
+
+ ({formatEasternDateTime(item.lastModified)})
+
+ )}
+
+ )}
+
+
Actions:
+ {isInFreePlaceMode && (
+
+ Click the room floor to add an instance of the item. Drag to reposition, double-click to remove. Cancel discards all changes.
+
+ )}
+ {isInMoveMode && (
+
+ You are in Move mode. Number in boxes represents qty.
+
+ )}
+
+ {isInFreePlaceMode ? (
+ <>
+ void finishFreePlaceMode()} title="Save floor changes to the server">
+ Done
+
+
+ Cancel
+
+ >
+ ) : isInMoveMode ? (
+ <>
+
+ Apply Move
+
+
+ Cancel Move
+
+ >
+ ) : (
+ <>
+
+ Add
+
+
+ Subtract
+
+
+ Move
+
+
+ Edit
+
+
+ Delete Master Item
+
+ startFreePlaceMode(selectedMasterItem)} title="Place on room floor by coordinates">
+ Free place
+
+ >
+ )}
+
+
+
+
+ setEditingItem(null)} itemName={editingItem} />
+ >
+ );
+};
+
+export default MasterItemPreview;
diff --git a/milventory/src/components/Master/MasterTableRow.js b/milventory/src/components/Master/MasterTableRow.js
new file mode 100644
index 0000000..f6f17df
--- /dev/null
+++ b/milventory/src/components/Master/MasterTableRow.js
@@ -0,0 +1,104 @@
+import React from 'react';
+import {
+ easternCalendarDayKey,
+ formatEasternDateShort,
+ formatEasternDateOnly,
+ formatEasternTimeShort,
+} from '../../utils/appTimeZone';
+
+export const formatDate = (isoString) => {
+ if (!isoString) return '—';
+ const date = new Date(isoString);
+ if (Number.isNaN(date.getTime())) return '—';
+ const now = new Date();
+ const isToday = easternCalendarDayKey(date) === easternCalendarDayKey(now);
+
+ const timeStr = formatEasternTimeShort(date);
+
+ if (isToday) {
+ return timeStr;
+ }
+
+ const dateStr = formatEasternDateShort(date);
+ return `${dateStr}, ${timeStr}`;
+};
+
+export const formatCustomValue = (value, type) => {
+ if (value === undefined || value === null || value === '') return '—';
+ if (type === 'date') {
+ try {
+ const d = new Date(value);
+ return Number.isNaN(d.getTime()) ? String(value) : formatEasternDateOnly(d);
+ } catch {
+ return String(value);
+ }
+ }
+ return String(value);
+};
+
+const MasterTableRow = ({ itemName, itemData, quantity, locations, categories, teams, showType, showQty, showLocation, showCategory, showTeam, showLastModified, visibleCustomColumns, customFieldDefinitions, isSelected, onClick }) => {
+ // Build a truncated location string that fits the cell
+ const locationText = locations.length === 0
+ ? '—'
+ : locations.length === 1
+ ? locations[0]
+ : `${locations[0]}, ...+${locations.length - 1}`;
+
+ // Build a truncated category string that fits the cell (same strategy as locations)
+ const categoryText = categories.length === 0
+ ? '—'
+ : categories.length === 1
+ ? categories[0]
+ : `${categories[0]}, ...+${categories.length - 1}`;
+
+ // Build a truncated team string that fits the cell (same strategy as locations)
+ const teamText = teams.length === 0
+ ? '—'
+ : teams.length === 1
+ ? teams[0]
+ : `${teams[0]}, ...+${teams.length - 1}`;
+
+ return (
+
+ {showType && (
+ |
+ {itemData.type_name || '—'}
+ |
+ )}
+ {itemData?.name ?? itemName} |
+ {showQty && (
+ {quantity} |
+ )}
+ {showLocation && (
+
+ {locationText}
+ |
+ )}
+ {showCategory && (
+
+ {categoryText}
+ |
+ )}
+ {showTeam && (
+
+ {teamText}
+ |
+ )}
+ {showLastModified && (
+
+ {formatDate(itemData.lastModified)}
+ |
+ )}
+ {customFieldDefinitions?.filter(d => visibleCustomColumns?.has(d.name)).map(d => (
+
+ {formatCustomValue(itemData.custom_fields?.[d.name], d.type)}
+ |
+ ))}
+
+ );
+};
+
+export default MasterTableRow;
diff --git a/milventory/src/components/Master/UserItemTypesModal.css b/milventory/src/components/Master/UserItemTypesModal.css
new file mode 100644
index 0000000..47a12a4
--- /dev/null
+++ b/milventory/src/components/Master/UserItemTypesModal.css
@@ -0,0 +1,203 @@
+/* Two-pane body inside .history-modal (same shell as History modal) */
+.user-types-modal-body {
+ display: flex;
+ flex: 1;
+ min-height: 0;
+ overflow: hidden;
+}
+
+.user-types-left {
+ width: 280px;
+ min-width: 240px;
+ max-width: 320px;
+ display: flex;
+ flex-direction: column;
+ border-right: 1px solid rgba(255, 255, 255, 0.1);
+ background: rgba(0, 0, 0, 0.15);
+}
+
+.user-types-search {
+ padding: 0.75rem 1rem;
+ border-bottom: 1px solid rgba(255, 255, 255, 0.08);
+}
+
+.user-types-search input {
+ width: 100%;
+ box-sizing: border-box;
+ padding: 0.5rem 0.65rem;
+ border: 1px solid rgba(255, 255, 255, 0.15);
+ border-radius: 4px;
+ background: rgba(0, 0, 0, 0.3);
+ color: var(--text, #e6ebf4);
+ font-size: 0.9rem;
+}
+
+.user-types-search input:focus {
+ outline: none;
+ border-color: var(--accent, #9bb7ff);
+}
+
+.user-types-list {
+ flex: 1;
+ overflow-y: auto;
+ padding: 0.35rem 0;
+}
+
+.user-types-list-item {
+ display: block;
+ width: 100%;
+ text-align: left;
+ padding: 0.6rem 1rem;
+ border: none;
+ background: transparent;
+ color: var(--text, #e6ebf4);
+ cursor: pointer;
+ font-size: 0.9rem;
+ border-left: 3px solid transparent;
+}
+
+.user-types-list-item:hover {
+ background: rgba(255, 255, 255, 0.06);
+}
+
+.user-types-list-item.selected {
+ background: rgba(155, 183, 255, 0.12);
+ border-left-color: var(--accent, #9bb7ff);
+}
+
+.user-types-list-item .user-types-list-name {
+ font-weight: 600;
+ display: block;
+}
+
+.user-types-list-item .user-types-list-prefix {
+ font-size: 0.78rem;
+ color: var(--muted, #9aa8c2);
+ margin-top: 0.2rem;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ white-space: nowrap;
+}
+
+.user-types-left-footer {
+ padding: 0.75rem 1rem;
+ border-top: 1px solid rgba(255, 255, 255, 0.1);
+}
+
+.user-types-create-btn {
+ width: 100%;
+ padding: 0.55rem 0.75rem;
+ font-size: 0.9rem;
+ cursor: pointer;
+ background: var(--accent, #4a9eff);
+ color: #fff;
+ border: none;
+ border-radius: 6px;
+ font-weight: 500;
+}
+
+.user-types-create-btn:hover {
+ filter: brightness(1.08);
+}
+
+.user-types-right {
+ flex: 1;
+ min-width: 0;
+ overflow-y: auto;
+ padding: 0;
+}
+
+.user-types-right-inner {
+ padding: 1rem 1.25rem 1.5rem;
+}
+
+/* Flatten nested .modal card so the type editor reads as a pane */
+.user-types-right .modal.user-types-type-form {
+ max-width: none;
+ width: 100%;
+ min-width: 0;
+ box-sizing: border-box;
+ background: transparent;
+ border: none;
+ box-shadow: none;
+ padding: 1rem 1.25rem 1.5rem;
+ gap: 0.75rem;
+}
+
+.user-types-idle,
+.user-types-loading {
+ padding: 2rem 1.5rem;
+ color: var(--muted, #9aa8c2);
+ text-align: center;
+ font-size: 0.95rem;
+}
+
+.user-types-detail h3 {
+ margin: 0 0 1rem 0;
+ font-size: 1.05rem;
+ color: var(--text, #e6ebf4);
+}
+
+.user-types-detail-dl {
+ margin: 0;
+ display: grid;
+ grid-template-columns: 9rem 1fr;
+ gap: 0.5rem 1rem;
+ font-size: 0.88rem;
+}
+
+.user-types-detail-dl dt {
+ margin: 0;
+ color: var(--muted, #9aa8c2);
+ font-weight: 500;
+}
+
+.user-types-detail-dl dd {
+ margin: 0;
+ color: var(--text, #e6ebf4);
+ word-break: break-word;
+}
+
+.user-types-fields-section {
+ margin-top: 1.25rem;
+}
+
+.user-types-fields-section h4 {
+ margin: 0 0 0.5rem 0;
+ font-size: 0.9rem;
+ color: var(--text, #e6ebf4);
+}
+
+.user-types-field-row {
+ padding: 0.45rem 0;
+ border-bottom: 1px solid rgba(255, 255, 255, 0.06);
+ font-size: 0.85rem;
+}
+
+.user-types-field-row:last-child {
+ border-bottom: none;
+}
+
+.user-types-field-name {
+ font-weight: 600;
+ color: var(--text, #e6ebf4);
+}
+
+.user-types-field-meta {
+ color: var(--muted, #9aa8c2);
+ font-size: 0.8rem;
+ margin-left: 0.35rem;
+}
+
+.user-types-field-default {
+ margin-top: 0.25rem;
+ color: var(--muted, #9aa8c2);
+}
+
+.user-types-detail-image {
+ max-width: 200px;
+ max-height: 140px;
+ border-radius: 6px;
+ margin-top: 0.5rem;
+ border: 1px solid rgba(255, 255, 255, 0.1);
+}
diff --git a/milventory/src/components/Master/UserItemTypesModal.js b/milventory/src/components/Master/UserItemTypesModal.js
new file mode 100644
index 0000000..c7a6587
--- /dev/null
+++ b/milventory/src/components/Master/UserItemTypesModal.js
@@ -0,0 +1,511 @@
+import React, { useState, useEffect, useCallback, useRef, useMemo } from 'react';
+import { api, getCategories, getTeams } from '../../api';
+import { useInventory } from '../../context/InventoryContext';
+import {
+ areNumberCustomFieldsValid,
+ buildCustomFieldsPayload,
+ emptyForm,
+ typeCustomFieldsFromTypeRow,
+ TypeFormBody
+} from './ItemTypeFormFields';
+import '../History/HistoryModal.css';
+import './UserItemTypesModal.css';
+
+function formatDefaultValue(v) {
+ if (v === undefined || v === null || v === '') return '— (user fills in)';
+ if (typeof v === 'object') return JSON.stringify(v);
+ return String(v);
+}
+
+function TypeDetailPane({ typeRow, customFieldDefinitions, categoryOptions = [], actions = null }) {
+ const locked = Array.isArray(typeRow.locked_custom_field_keys) ? typeRow.locked_custom_field_keys : [];
+ const defaults = typeRow.default_custom_fields && typeof typeRow.default_custom_fields === 'object'
+ ? typeRow.default_custom_fields
+ : {};
+ const lockedCatIds = Array.isArray(typeRow.locked_category_ids) ? typeRow.locked_category_ids : [];
+ const lockedCatLabels = lockedCatIds.map((id) => {
+ const c = categoryOptions.find((x) => Number(x.id) === Number(id));
+ return c ? c.name : `ID ${id}`;
+ });
+ const lockedTeams = Array.isArray(typeRow.locked_team_names) ? typeRow.locked_team_names : [];
+
+ return (
+
+
+
{typeRow.name}
+ {actions}
+
+
+ - Type name
+ - {typeRow.name}
+ - Template notes
+ - {typeRow.template_description?.trim() ? typeRow.template_description : '—'}
+ - Name prefix
+ - {typeRow.item_name_prefix?.trim() ? typeRow.item_name_prefix : '—'}
+ - Description prefix
+ - {typeRow.item_description_prefix?.trim() ? typeRow.item_description_prefix : '—'}
+ - Unique on map
+ - {typeRow.is_unique ? 'Yes' : 'No'}
+ - Admin-only edits
+ - {typeRow.prevent_user_edit ? 'Yes (only leaders can change this type)' : 'No'}
+ - Required categories
+ - {lockedCatLabels.length ? lockedCatLabels.join(', ') : '—'}
+ - Required teams
+ - {lockedTeams.length ? lockedTeams.join(', ') : '—'}
+
+ {typeRow.image && (
+
+
Default image
+

+
+ )}
+
+
Required custom fields
+ {locked.length === 0 ? (
+
None
+ ) : (
+ locked.map((key) => {
+ const def = customFieldDefinitions.find((d) => d.name === key);
+ const fieldType = def ? def.type : 'unknown';
+ const raw = Object.prototype.hasOwnProperty.call(defaults, key) ? defaults[key] : undefined;
+ return (
+
+
{key}
+
({fieldType})
+
Default: {formatDefaultValue(raw)}
+
+ );
+ })
+ )}
+
+
+ );
+}
+
+const UserItemTypesModal = ({ isOpen, onClose }) => {
+ const { reloadMasterItems, setError: setToastError } = useInventory();
+ const [types, setTypes] = useState([]);
+ const [loadingTypes, setLoadingTypes] = useState(false);
+ const [loadError, setLoadError] = useState(null);
+ const [search, setSearch] = useState('');
+ const [panelMode, setPanelMode] = useState('idle');
+ const [selectedTypeId, setSelectedTypeId] = useState(null);
+ const [customFieldDefinitions, setCustomFieldDefinitions] = useState([]);
+ const [form, setForm] = useState(emptyForm);
+ const [formError, setFormError] = useState(null);
+ const [submitting, setSubmitting] = useState(false);
+ const [addFieldDropdownOpen, setAddFieldDropdownOpen] = useState(false);
+ const addFieldDropdownRef = useRef(null);
+ const [categoryOptions, setCategoryOptions] = useState([]);
+ const [teamOptions, setTeamOptions] = useState([]);
+
+ const resetUi = useCallback(() => {
+ setSearch('');
+ setPanelMode('idle');
+ setSelectedTypeId(null);
+ setForm(emptyForm());
+ setFormError(null);
+ setLoadError(null);
+ setAddFieldDropdownOpen(false);
+ }, []);
+
+ const loadTypes = useCallback(async (options = {}) => {
+ const { silent = false } = options;
+ if (!silent) setLoadingTypes(true);
+ setLoadError(null);
+ try {
+ const data = await api.getSupplyTypes();
+ setTypes(Array.isArray(data) ? data : []);
+ } catch (err) {
+ setLoadError(err.message || 'Failed to load types');
+ setTypes([]);
+ } finally {
+ if (!silent) setLoadingTypes(false);
+ }
+ }, []);
+
+ useEffect(() => {
+ if (!isOpen) {
+ resetUi();
+ return;
+ }
+ loadTypes();
+ api
+ .getCustomFieldDefinitions()
+ .then(setCustomFieldDefinitions)
+ .catch(() => setCustomFieldDefinitions([]));
+ getCategories()
+ .then((cats) => {
+ const opts = (cats || [])
+ .filter((c) => c && typeof c === 'object' && c.id != null && c.name)
+ .map((c) => ({ id: c.id, name: c.name }));
+ setCategoryOptions(opts);
+ })
+ .catch(() => setCategoryOptions([]));
+ getTeams()
+ .then((teams) => setTeamOptions((teams || []).map((t) => String(t).toLowerCase())))
+ .catch(() => setTeamOptions([]));
+ }, [isOpen, loadTypes, resetUi]);
+
+ useEffect(() => {
+ const handler = (e) => {
+ if (addFieldDropdownRef.current && !addFieldDropdownRef.current.contains(e.target)) {
+ setAddFieldDropdownOpen(false);
+ }
+ };
+ document.addEventListener('mousedown', handler);
+ return () => document.removeEventListener('mousedown', handler);
+ }, []);
+
+ const handleKeyDown = useCallback(
+ (e) => {
+ if (e.key === 'Escape') onClose();
+ },
+ [onClose]
+ );
+
+ useEffect(() => {
+ if (isOpen) {
+ document.addEventListener('keydown', handleKeyDown);
+ return () => document.removeEventListener('keydown', handleKeyDown);
+ }
+ }, [isOpen, handleKeyDown]);
+
+ const filteredTypes = useMemo(() => {
+ const q = search.trim().toLowerCase();
+ if (!q) return types;
+ return types.filter((t) => {
+ const name = (t.name || '').toLowerCase();
+ const pref = (t.item_name_prefix || '').toLowerCase();
+ return name.includes(q) || pref.includes(q);
+ });
+ }, [types, search]);
+
+ const selectedType = useMemo(
+ () => (selectedTypeId != null ? types.find((t) => t.id === selectedTypeId) : null),
+ [types, selectedTypeId]
+ );
+
+ const selectType = (id) => {
+ setSelectedTypeId(id);
+ setPanelMode('detail');
+ setFormError(null);
+ setAddFieldDropdownOpen(false);
+ };
+
+ const startCreate = () => {
+ setSelectedTypeId(null);
+ setPanelMode('create');
+ setForm(emptyForm());
+ setFormError(null);
+ setAddFieldDropdownOpen(false);
+ };
+
+ const startEdit = (typeRow) => {
+ if (!typeRow || typeRow.prevent_user_edit) return;
+ setSelectedTypeId(typeRow.id);
+ setPanelMode('edit');
+ setForm({
+ name: typeRow.name || '',
+ template_description: typeRow.template_description || '',
+ item_name_prefix: typeRow.item_name_prefix || '',
+ item_description_prefix: typeRow.item_description_prefix || '',
+ image: typeRow.image || null,
+ typeCustomFields: typeCustomFieldsFromTypeRow(typeRow),
+ locked_category_ids: Array.isArray(typeRow.locked_category_ids) ? [...typeRow.locked_category_ids] : [],
+ locked_team_names: Array.isArray(typeRow.locked_team_names)
+ ? typeRow.locked_team_names.map((x) => String(x).toLowerCase())
+ : [],
+ is_unique: !!typeRow.is_unique,
+ prevent_user_edit: !!typeRow.prevent_user_edit
+ });
+ setFormError(null);
+ setAddFieldDropdownOpen(false);
+ };
+
+ const cancelForm = () => {
+ setPanelMode(selectedTypeId != null ? 'detail' : 'idle');
+ setForm(emptyForm());
+ setFormError(null);
+ setAddFieldDropdownOpen(false);
+ };
+
+ const handleImagePick = (e) => {
+ const file = e.target.files?.[0];
+ if (!file) return;
+ if (file.size > 10 * 1024 * 1024) {
+ setFormError('Image must be under 10MB');
+ return;
+ }
+ if (!file.type.startsWith('image/')) {
+ setFormError('Please choose an image file');
+ return;
+ }
+ const reader = new FileReader();
+ reader.onload = () => {
+ setForm((prev) => ({ ...prev, image: reader.result }));
+ };
+ reader.readAsDataURL(file);
+ e.target.value = '';
+ };
+
+ const handleCreateSubmit = async (e) => {
+ e.preventDefault();
+ if (!form.name.trim()) {
+ setFormError('Type name is required');
+ return;
+ }
+ if (!areNumberCustomFieldsValid(form.typeCustomFields || {}, customFieldDefinitions)) {
+ setFormError('Enter a valid number for number fields (or leave them empty).');
+ return;
+ }
+ const { default_custom_fields, locked_custom_field_keys } = buildCustomFieldsPayload(
+ form.typeCustomFields || {},
+ customFieldDefinitions
+ );
+ try {
+ setSubmitting(true);
+ setFormError(null);
+ const created = await api.createSupplyType({
+ name: form.name.trim(),
+ template_description: form.template_description.trim() || null,
+ item_name_prefix: form.item_name_prefix.trim(),
+ item_description_prefix: form.item_description_prefix.trim() || null,
+ image: form.image || null,
+ default_custom_fields,
+ locked_custom_field_keys,
+ is_unique: form.is_unique,
+ locked_category_ids: [...(form.locked_category_ids || [])].sort((a, b) => Number(a) - Number(b)),
+ locked_team_names: form.locked_team_names || []
+ });
+ await reloadMasterItems();
+ try {
+ localStorage.setItem('milventory-master-catalog-bump', String(Date.now()));
+ } catch (_) {
+ /* ignore */
+ }
+ await loadTypes({ silent: true });
+ const newId = created?.id;
+ if (newId != null) {
+ setSelectedTypeId(newId);
+ setPanelMode('detail');
+ } else {
+ setPanelMode('idle');
+ }
+ setForm(emptyForm());
+ } catch (err) {
+ setFormError(err.message || 'Failed to create type');
+ } finally {
+ setSubmitting(false);
+ }
+ };
+
+ const handleEditSubmit = async (e) => {
+ e.preventDefault();
+ if (!selectedType || selectedType.prevent_user_edit) {
+ setFormError('This item type can only be edited by a leader.');
+ return;
+ }
+ if (!form.name.trim()) {
+ setFormError('Type name is required');
+ return;
+ }
+ if (!areNumberCustomFieldsValid(form.typeCustomFields || {}, customFieldDefinitions)) {
+ setFormError('Enter a valid number for number fields (or leave them empty).');
+ return;
+ }
+ const { default_custom_fields, locked_custom_field_keys } = buildCustomFieldsPayload(
+ form.typeCustomFields || {},
+ customFieldDefinitions
+ );
+ try {
+ setSubmitting(true);
+ setFormError(null);
+ await api.updateSupplyType(selectedType.id, {
+ name: form.name.trim(),
+ template_description: form.template_description.trim() || null,
+ item_name_prefix: form.item_name_prefix.trim(),
+ item_description_prefix: form.item_description_prefix.trim() || null,
+ image: form.image,
+ default_custom_fields,
+ locked_custom_field_keys,
+ is_unique: form.is_unique,
+ locked_category_ids: [...(form.locked_category_ids || [])].sort((a, b) => Number(a) - Number(b)),
+ locked_team_names: form.locked_team_names || []
+ });
+ await reloadMasterItems();
+ try {
+ localStorage.setItem('milventory-master-catalog-bump', String(Date.now()));
+ } catch (_) {
+ /* ignore */
+ }
+ await loadTypes({ silent: true });
+ setPanelMode('detail');
+ setForm(emptyForm());
+ } catch (err) {
+ const message = err.message || 'Failed to update type';
+ if (message.includes('Cannot mark this item type as unique')) {
+ setToastError(message);
+ } else {
+ setFormError(message);
+ }
+ } finally {
+ setSubmitting(false);
+ }
+ };
+
+ if (!isOpen) return null;
+
+ return (
+ e.target === e.currentTarget && onClose()}
+ role="presentation"
+ >
+
e.stopPropagation()} role="dialog" aria-labelledby="user-types-title">
+
+
Item types
+
+ ×
+
+
+
+ {loadError && (
+
+ {loadError}
+
+ )}
+
+
+
+
+ setSearch(e.target.value)}
+ aria-label="Search item types"
+ />
+
+
+ {loadingTypes && types.length === 0 ? (
+
Loading…
+ ) : filteredTypes.length === 0 ? (
+
No matching types.
+ ) : (
+ filteredTypes.map((t) => (
+
selectType(t.id)}
+ >
+ {t.name}
+
+ {t.item_name_prefix?.trim() ? `Prefix: ${t.item_name_prefix}` : 'No prefix'}
+
+
+ ))
+ )}
+
+
+
+ Create type
+
+
+
+
+
+ {panelMode === 'idle' && (
+
+ Select a type from the list, or use Create type to add one.
+
+ )}
+ {panelMode === 'detail' && selectedType && (
+
startEdit(selectedType)}
+ style={{ width: 'auto', flexShrink: 0 }}
+ >
+ Edit type
+
+ ) : null
+ }
+ />
+ )}
+ {panelMode === 'detail' && !selectedType && !loadingTypes && (
+ Type not found. Try refreshing.
+ )}
+ {panelMode === 'create' && (
+
+
New item type
+ {formError &&
{formError}
}
+
+
+ )}
+ {panelMode === 'edit' && selectedType && (
+
+
Edit item type
+ {formError &&
{formError}
}
+
+
+ )}
+
+
+
+
+ );
+};
+
+export default UserItemTypesModal;
diff --git a/milventory/src/components/MasterAddModal.js b/milventory/src/components/MasterAddModal.js
deleted file mode 100644
index 731e443..0000000
--- a/milventory/src/components/MasterAddModal.js
+++ /dev/null
@@ -1,459 +0,0 @@
-import React, { useState, useEffect, useRef } from 'react';
-import { useInventory } from '../context/InventoryContext';
-import { getCategories, getTeams } from '../api';
-
-// Levenshtein distance for fuzzy search
-const levenshteinDistance = (str1, str2) => {
- const m = str1.length;
- const n = str2.length;
- const dp = Array(m + 1).fill(null).map(() => Array(n + 1).fill(0));
-
- for (let i = 0; i <= m; i++) dp[i][0] = i;
- for (let j = 0; j <= n; j++) dp[0][j] = j;
-
- for (let i = 1; i <= m; i++) {
- for (let j = 1; j <= n; j++) {
- if (str1[i - 1] === str2[j - 1]) {
- dp[i][j] = dp[i - 1][j - 1];
- } else {
- dp[i][j] = Math.min(
- dp[i - 1][j] + 1,
- dp[i][j - 1] + 1,
- dp[i - 1][j - 1] + 1
- );
- }
- }
- }
-
- return dp[m][n];
-};
-
-// Reusable tag dropdown with fuzzy search
-const TagDropdown = ({ placeholder, selectedItems, availableItems, onSelect, onRemove, maxResults = 5, capitalize = false, onSearchChange }) => {
- const [isOpen, setIsOpen] = useState(false);
- const [searchQuery, setSearchQuery] = useState('');
- const containerRef = useRef(null);
- const inputRef = useRef(null);
-
- // Close dropdown on outside click
- useEffect(() => {
- const handler = (e) => {
- if (containerRef.current && !containerRef.current.contains(e.target)) {
- setIsOpen(false);
- }
- };
- document.addEventListener('mousedown', handler);
- return () => document.removeEventListener('mousedown', handler);
- }, []);
-
- const unselected = availableItems.filter(item => !selectedItems.includes(item));
-
- // Compute display items: fuzzy top-N when searching, all when idle
- let displayItems;
- if (searchQuery.trim()) {
- const query = searchQuery.toLowerCase();
- const scored = unselected.map(item => {
- const itemLower = item.toLowerCase();
- const distance = levenshteinDistance(query, itemLower);
- const isSubstring = itemLower.includes(query);
- return { item, score: isSubstring ? distance - 10 : distance, distance };
- });
- scored.sort((a, b) => a.score !== b.score ? a.score - b.score : a.distance - b.distance);
- displayItems = scored.slice(0, maxResults).map(s => s.item);
- } else {
- displayItems = unselected;
- }
-
- const handleItemSelect = (item) => {
- onSelect(item);
- setSearchQuery('');
- onSearchChange?.('');
- setIsOpen(false);
- };
-
- return (
-
- {/* Selected tags */}
-
- {selectedItems.length === 0 && (
-
- {placeholder}
-
- )}
- {selectedItems.map(item => (
-
- {item}
- onRemove(item)} style={{
- background: 'transparent', border: 'none', color: 'white', cursor: 'pointer',
- padding: '0', marginLeft: '0.25rem', fontSize: '1rem', lineHeight: '1',
- display: 'flex', alignItems: 'center', justifyContent: 'center'
- }} title={`Remove ${item}`}>×
-
- ))}
-
-
- {/* Search input / dropdown trigger */}
-
{ setIsOpen(true); inputRef.current?.focus(); }}
- style={{
- width: '100%', padding: '0.5rem', background: 'rgba(0,0,0,.3)',
- border: '1px solid rgba(255,255,255,.1)', borderRadius: '4px',
- color: 'var(--text)', fontSize: '0.9rem', cursor: 'pointer',
- display: 'flex', alignItems: 'center', boxSizing: 'border-box'
- }}
- >
- 0 ? 'Search...' : 'All selected'}
- value={searchQuery}
- onChange={(e) => { setSearchQuery(e.target.value); setIsOpen(true); onSearchChange?.(e.target.value); }}
- onFocus={() => setIsOpen(true)}
- onClick={(e) => e.stopPropagation()}
- style={{
- background: 'transparent', border: 'none', color: 'var(--text)',
- fontSize: '0.9rem', outline: 'none', width: '100%', cursor: 'pointer'
- }}
- />
- ▼
-
-
- {/* Dropdown list */}
- {isOpen && displayItems.length > 0 && (
-
- {displayItems.map(item => (
-
handleItemSelect(item)}
- style={{
- padding: '0.5rem 0.75rem', cursor: 'pointer', fontSize: '0.9rem',
- color: 'var(--text)', transition: 'background 0.1s',
- ...(capitalize ? { textTransform: 'capitalize' } : {})
- }}
- onMouseEnter={(e) => { e.currentTarget.style.background = 'rgba(255,255,255,.1)'; }}
- onMouseLeave={(e) => { e.currentTarget.style.background = 'transparent'; }}
- >
- {item}
-
- ))}
-
- )}
-
- );
-};
-
-const MasterAddModal = ({ isOpen, onClose }) => {
- const { addMasterItem, masterInventoryItems } = useInventory();
-
- const [name, setName] = useState('');
- const [description, setDescription] = useState('');
- const [image, setImage] = useState(null);
- const [imagePreview, setImagePreview] = useState(null);
- const [selectedTeams, setSelectedTeams] = useState([]);
- const [selectedCategories, setSelectedCategories] = useState([]);
- const [categorySearchQuery, setCategorySearchQuery] = useState('');
- const [teamSearchQuery, setTeamSearchQuery] = useState('');
- const [availableCategories, setAvailableCategories] = useState([]);
- const [availableTeams, setAvailableTeams] = useState([]);
- const [categoryNameToId, setCategoryNameToId] = useState(new Map());
- const nameInputRef = useRef(null);
-
- // Fetch categories and teams on mount and when modal opens
- useEffect(() => {
- if (isOpen) {
- getCategories()
- .then(categories => {
- // Categories now come as objects with {id, name}
- const categoryList = categories.map(c => typeof c === 'string' ? c : c.name);
- setAvailableCategories(categoryList);
-
- // Build name-to-ID mapping
- const mapping = new Map();
- categories.forEach(cat => {
- if (typeof cat === 'object' && cat.id && cat.name) {
- mapping.set(cat.name, cat.id);
- }
- });
- setCategoryNameToId(mapping);
- })
- .catch(err => {
- console.error('Failed to fetch categories:', err);
- setAvailableCategories([]);
- setCategoryNameToId(new Map());
- });
- getTeams()
- .then(teams => {
- console.log('Fetched teams from API:', teams);
- // Normalize to lowercase for frontend consistency
- const normalized = teams.map(t => t.toLowerCase());
- console.log('Normalized teams:', normalized);
- setAvailableTeams(normalized);
- })
- .catch(err => {
- console.error('Failed to fetch teams:', err);
- setAvailableTeams([]);
- });
- }
- }, [isOpen]);
-
- useEffect(() => {
- if (isOpen) {
- setName('');
- setDescription('');
- setImage(null);
- setImagePreview(null);
- setSelectedTeams([]);
- setSelectedCategories([]);
- setCategorySearchQuery('');
- setTimeout(() => nameInputRef.current?.focus(), 0);
- }
- }, [isOpen]);
-
- const handleImageChange = (e) => {
- const file = e.target.files[0];
- if (!file) {
- setImage(null);
- setImagePreview(null);
- return;
- }
-
- if (file.size > 10 * 1024 * 1024) {
- alert('Image file size must be less than 10MB');
- e.target.value = '';
- return;
- }
-
- if (!file.type.startsWith('image/')) {
- alert('Please select an image file');
- e.target.value = '';
- return;
- }
-
- const reader = new FileReader();
- reader.onload = (event) => {
- const base64Data = event.target.result;
- setImage(base64Data);
- setImagePreview(base64Data);
- };
- reader.onerror = () => {
- alert('Error reading image file');
- e.target.value = '';
- };
- reader.readAsDataURL(file);
- };
-
- const handleRemoveImage = () => {
- setImage(null);
- setImagePreview(null);
- };
-
- const handleSave = () => {
- if (name.trim()) {
- if (masterInventoryItems.has(name.trim())) {
- alert('An item with this name already exists. Please use a different name.');
- return;
- }
-
- // Convert category names to IDs
- const categoryIds = selectedCategories
- .map(catName => categoryNameToId.get(catName))
- .filter(id => id !== undefined);
-
- const newItem = {
- name: name.trim(),
- description: description.trim() || null,
- image: image || null,
- teams: selectedTeams.length > 0 ? selectedTeams : undefined,
- categories: categoryIds.length > 0 ? categoryIds : undefined,
- locations: []
- };
-
- addMasterItem(newItem);
- onClose();
- }
- };
-
- const handleCancel = () => {
- onClose();
- };
-
- const handleOverlayClick = (e) => {
- if (e.target === e.currentTarget) {
- handleCancel();
- }
- };
-
- const handleKeyDown = (e) => {
- if (e.key === 'Escape') {
- handleCancel();
- } else if (e.key === 'Enter' && (e.ctrlKey || e.metaKey)) {
- e.preventDefault();
- handleSave();
- }
- };
-
- if (!isOpen) return null;
-
- return (
-
-
-
Add Master Item
- {teamSearchQuery.trim() && (() => {
- const query = teamSearchQuery.toLowerCase();
- const unselected = availableTeams.filter(t => !selectedTeams.includes(t));
- const scored = unselected.map(item => {
- const itemLower = item.toLowerCase();
- const distance = levenshteinDistance(query, itemLower);
- const isSubstring = itemLower.includes(query);
- return { item, score: isSubstring ? distance - 10 : distance, distance };
- });
- scored.sort((a, b) => a.score !== b.score ? a.score - b.score : a.distance - b.distance);
- const matches = scored.slice(0, 5);
-
- return (
-
-
- Team Search Debug:
-
-
- Query: "{teamSearchQuery}"
-
-
- Selected: {selectedTeams.length > 0 ? selectedTeams.join(', ') : 'None'}
-
-
- Available: {unselected.length} team{unselected.length !== 1 ? 's' : ''} remaining
-
- {matches.length > 0 && (
-
-
Top Matches:
- {matches.map(({ item, distance }, index) => (
-
- {index + 1}. {item} (distance: {distance})
-
- ))}
-
- )}
- {matches.length === 0 && unselected.length > 0 && (
-
No fuzzy matches found.
- )}
-
- );
- })()}
-
setName(e.target.value)}
- />
-
setDescription(e.target.value)}
- rows="3"
- />
-
- setSelectedTeams(prev => [...prev, team])}
- onRemove={(team) => setSelectedTeams(prev => prev.filter(t => t !== team))}
- capitalize
- onSearchChange={setTeamSearchQuery}
- />
-
- setSelectedCategories(prev => [...prev, cat])}
- onRemove={(cat) => setSelectedCategories(prev => prev.filter(c => c !== cat))}
- maxResults={5}
- onSearchChange={setCategorySearchQuery}
- />
-
-
-
-
- {imagePreview && (
-
-

-
- ×
-
-
- )}
-
-
-
- Cancel
-
-
- Add
-
-
-
-
- );
-};
-
-export default MasterAddModal;
diff --git a/milventory/src/components/MasterEditModal.js b/milventory/src/components/MasterEditModal.js
deleted file mode 100644
index baf1ab5..0000000
--- a/milventory/src/components/MasterEditModal.js
+++ /dev/null
@@ -1,421 +0,0 @@
-import React, { useState, useEffect, useRef } from 'react';
-import { useInventory } from '../context/InventoryContext';
-import { getCategories, getTeams } from '../api';
-
-// Levenshtein distance for fuzzy search
-const levenshteinDistance = (str1, str2) => {
- const m = str1.length;
- const n = str2.length;
- const dp = Array(m + 1).fill(null).map(() => Array(n + 1).fill(0));
-
- for (let i = 0; i <= m; i++) dp[i][0] = i;
- for (let j = 0; j <= n; j++) dp[0][j] = j;
-
- for (let i = 1; i <= m; i++) {
- for (let j = 1; j <= n; j++) {
- if (str1[i - 1] === str2[j - 1]) {
- dp[i][j] = dp[i - 1][j - 1];
- } else {
- dp[i][j] = Math.min(
- dp[i - 1][j] + 1,
- dp[i][j - 1] + 1,
- dp[i - 1][j - 1] + 1
- );
- }
- }
- }
-
- return dp[m][n];
-};
-
-// Reusable tag dropdown with fuzzy search
-const TagDropdown = ({ placeholder, selectedItems, availableItems, onSelect, onRemove, maxResults = 5, capitalize = false, onSearchChange }) => {
- const [isOpen, setIsOpen] = useState(false);
- const [searchQuery, setSearchQuery] = useState('');
- const containerRef = useRef(null);
- const inputRef = useRef(null);
-
- useEffect(() => {
- const handler = (e) => {
- if (containerRef.current && !containerRef.current.contains(e.target)) {
- setIsOpen(false);
- }
- };
- document.addEventListener('mousedown', handler);
- return () => document.removeEventListener('mousedown', handler);
- }, []);
-
- const unselected = availableItems.filter(item => !selectedItems.includes(item));
-
- let displayItems;
- if (searchQuery.trim()) {
- const query = searchQuery.toLowerCase();
- const scored = unselected.map(item => {
- const itemLower = item.toLowerCase();
- const distance = levenshteinDistance(query, itemLower);
- const isSubstring = itemLower.includes(query);
- return { item, score: isSubstring ? distance - 10 : distance, distance };
- });
- scored.sort((a, b) => a.score !== b.score ? a.score - b.score : a.distance - b.distance);
- displayItems = scored.slice(0, maxResults).map(s => s.item);
- } else {
- displayItems = unselected;
- }
-
- const handleItemSelect = (item) => {
- onSelect(item);
- setSearchQuery('');
- onSearchChange?.('');
- setIsOpen(false);
- };
-
- return (
-
-
- {selectedItems.length === 0 && (
-
- {placeholder}
-
- )}
- {selectedItems.map(item => (
-
- {item}
- onRemove(item)} style={{
- background: 'transparent', border: 'none', color: 'white', cursor: 'pointer',
- padding: '0', marginLeft: '0.25rem', fontSize: '1rem', lineHeight: '1',
- display: 'flex', alignItems: 'center', justifyContent: 'center'
- }} title={`Remove ${item}`}>×
-
- ))}
-
-
-
{ setIsOpen(true); inputRef.current?.focus(); }}
- style={{
- width: '100%', padding: '0.5rem', background: 'rgba(0,0,0,.3)',
- border: '1px solid rgba(255,255,255,.1)', borderRadius: '4px',
- color: 'var(--text)', fontSize: '0.9rem', cursor: 'pointer',
- display: 'flex', alignItems: 'center', boxSizing: 'border-box'
- }}
- >
- 0 ? 'Search...' : 'All selected'}
- value={searchQuery}
- onChange={(e) => { setSearchQuery(e.target.value); setIsOpen(true); onSearchChange?.(e.target.value); }}
- onFocus={() => setIsOpen(true)}
- onClick={(e) => e.stopPropagation()}
- style={{
- background: 'transparent', border: 'none', color: 'var(--text)',
- fontSize: '0.9rem', outline: 'none', width: '100%', cursor: 'pointer'
- }}
- />
- ▼
-
-
- {isOpen && displayItems.length > 0 && (
-
- {displayItems.map(item => (
-
handleItemSelect(item)}
- style={{
- padding: '0.5rem 0.75rem', cursor: 'pointer', fontSize: '0.9rem',
- color: 'var(--text)', transition: 'background 0.1s',
- ...(capitalize ? { textTransform: 'capitalize' } : {})
- }}
- onMouseEnter={(e) => { e.currentTarget.style.background = 'rgba(255,255,255,.1)'; }}
- onMouseLeave={(e) => { e.currentTarget.style.background = 'transparent'; }}
- >
- {item}
-
- ))}
-
- )}
-
- );
-};
-
-const MasterEditModal = ({ isOpen, onClose, itemName }) => {
- const { updateMasterItem, resolveMasterItem, masterInventoryItems } = useInventory();
-
- const [name, setName] = useState('');
- const [description, setDescription] = useState('');
- const [image, setImage] = useState(null);
- const [imagePreview, setImagePreview] = useState(null);
- const [selectedTeams, setSelectedTeams] = useState([]);
- const [selectedCategories, setSelectedCategories] = useState([]);
- const [availableCategories, setAvailableCategories] = useState([]);
- const [availableTeams, setAvailableTeams] = useState([]);
- const [categoryNameToId, setCategoryNameToId] = useState(new Map());
- const [categoryIdToName, setCategoryIdToName] = useState(new Map());
- const nameInputRef = useRef(null);
-
- const originalItem = itemName ? resolveMasterItem(itemName) : null;
-
- // Fetch categories and teams when modal opens
- useEffect(() => {
- if (isOpen) {
- getCategories()
- .then(categories => {
- const categoryList = categories.map(c => typeof c === 'string' ? c : c.name);
- setAvailableCategories(categoryList);
-
- const nameToId = new Map();
- const idToName = new Map();
- categories.forEach(cat => {
- if (typeof cat === 'object' && cat.id && cat.name) {
- nameToId.set(cat.name, cat.id);
- idToName.set(cat.id, cat.name);
- }
- });
- setCategoryNameToId(nameToId);
- setCategoryIdToName(idToName);
- })
- .catch(err => {
- console.error('Failed to fetch categories:', err);
- setAvailableCategories([]);
- });
-
- getTeams()
- .then(teams => {
- const normalized = teams.map(t => t.toLowerCase());
- setAvailableTeams(normalized);
- })
- .catch(err => {
- console.error('Failed to fetch teams:', err);
- setAvailableTeams([]);
- });
- }
- }, [isOpen]);
-
- useEffect(() => {
- if (isOpen && originalItem) {
- setName(originalItem.name || '');
- setDescription(originalItem.description || '');
- setImage(originalItem.image || null);
- setImagePreview(originalItem.image || null);
-
- // Load teams (already lowercase from API)
- setSelectedTeams(originalItem.teams || []);
-
- setTimeout(() => nameInputRef.current?.focus(), 0);
- }
- }, [isOpen, originalItem]);
-
- // Load categories when categoryIdToName mapping is ready
- useEffect(() => {
- if (isOpen && originalItem && categoryIdToName.size > 0) {
- // Load categories (convert IDs to names for display)
- if (originalItem.categories && originalItem.categories.length > 0) {
- const categoryNames = originalItem.categories
- .map(catId => categoryIdToName.get(catId))
- .filter(name => name !== undefined);
- setSelectedCategories(categoryNames);
- } else {
- setSelectedCategories([]);
- }
- }
- }, [isOpen, originalItem, categoryIdToName]);
-
- const handleImageChange = (e) => {
- const file = e.target.files[0];
- if (!file) {
- // Keep existing image if no new file selected
- return;
- }
-
- // Validate file size (10MB max)
- if (file.size > 10 * 1024 * 1024) {
- alert('Image file size must be less than 10MB');
- e.target.value = '';
- return;
- }
-
- // Validate file type
- if (!file.type.startsWith('image/')) {
- alert('Please select an image file');
- e.target.value = '';
- return;
- }
-
- // Convert to base64
- const reader = new FileReader();
- reader.onload = (event) => {
- const base64Data = event.target.result;
- setImage(base64Data);
- setImagePreview(base64Data);
- };
- reader.onerror = () => {
- alert('Error reading image file');
- e.target.value = '';
- };
- reader.readAsDataURL(file);
- };
-
- const handleRemoveImage = () => {
- setImage(null);
- setImagePreview(null);
- };
-
- const handleSave = () => {
- if (name.trim() && itemName) {
- // Check if name changed and new name already exists
- if (name.trim() !== itemName && masterInventoryItems.has(name.trim())) {
- alert('An item with this name already exists. Please use a different name.');
- return;
- }
-
- // Convert category names to IDs
- const categoryIds = selectedCategories
- .map(catName => categoryNameToId.get(catName))
- .filter(id => id !== undefined);
-
- const updatedItem = {
- name: name.trim(),
- description: description.trim() || null,
- image: image || null,
- teams: selectedTeams.length > 0 ? selectedTeams : [],
- categories: categoryIds.length > 0 ? categoryIds : [],
- locations: originalItem?.locations || []
- };
-
- updateMasterItem(itemName, updatedItem);
- onClose();
- }
- };
-
- const handleCancel = () => {
- onClose();
- };
-
- const handleOverlayClick = (e) => {
- if (e.target === e.currentTarget) {
- handleCancel();
- }
- };
-
- const handleKeyDown = (e) => {
- if (e.key === 'Escape') {
- handleCancel();
- } else if (e.key === 'Enter' && (e.ctrlKey || e.metaKey)) {
- e.preventDefault();
- handleSave();
- }
- };
-
- if (!isOpen || !originalItem) return null;
-
- return (
-
-
-
Edit Master Item
-
setName(e.target.value)}
- />
-
setDescription(e.target.value)}
- rows="3"
- />
-
- setSelectedTeams(prev => [...prev, team])}
- onRemove={(team) => setSelectedTeams(prev => prev.filter(t => t !== team))}
- capitalize
- />
-
- setSelectedCategories(prev => [...prev, cat])}
- onRemove={(cat) => setSelectedCategories(prev => prev.filter(c => c !== cat))}
- maxResults={5}
- />
-
-
-
- {imagePreview && (
-
-

-
- )}
-
- {imagePreview && (
-
- Remove Image
-
- )}
-
-
-
- Cancel
-
-
- Save
-
-
-
-
- );
-};
-
-export default MasterEditModal;
-
diff --git a/milventory/src/components/MasterInventoryTable.js b/milventory/src/components/MasterInventoryTable.js
deleted file mode 100644
index 58ffa4f..0000000
--- a/milventory/src/components/MasterInventoryTable.js
+++ /dev/null
@@ -1,102 +0,0 @@
-import React, { useState, useMemo } from 'react';
-import { useInventory } from '../context/InventoryContext';
-import MasterTableRow from './MasterTableRow';
-import MasterAddModal from './MasterAddModal';
-
-const MasterInventoryTable = () => {
- const {
- masterInventoryItems,
- computeMasterQuantities,
- getItemLocations,
- setSelectedMasterItem,
- selectedMasterItem
- } = useInventory();
-
- const [searchQuery, setSearchQuery] = useState('');
- const [showAddModal, setShowAddModal] = useState(false);
-
- const quantities = computeMasterQuantities();
-
- const filteredItems = useMemo(() => {
- const itemsArray = Array.from(masterInventoryItems.entries());
- if (!searchQuery.trim()) {
- return itemsArray;
- }
- const query = searchQuery.toLowerCase();
- return itemsArray.filter(([name]) => name.toLowerCase().includes(query));
- }, [masterInventoryItems, searchQuery]);
-
- const sortedItems = useMemo(() => {
- return [...filteredItems].sort(([nameA], [nameB]) => nameA.localeCompare(nameB));
- }, [filteredItems]);
-
- const handleRowClick = (itemName) => {
- setSelectedMasterItem(itemName);
- };
-
- const handleAddItem = () => {
- setShowAddModal(true);
- };
-
- return (
- <>
-
-
-
Master Inventory
-
-
- setSearchQuery(e.target.value)}
- className="master-search-input"
- />
-
-
- {sortedItems.length === 0 ? (
-
- {searchQuery ? 'No items found' : 'No Master items. Click "+ Add Item" to create one.'}
-
- ) : (
-
-
-
- | Name |
- Qty |
- Location |
- Last Modified |
-
-
-
- {sortedItems.map(([itemName, itemData]) => (
- handleRowClick(itemName)}
- />
- ))}
-
-
- )}
-
-
-
- + Add Item
-
-
-
- setShowAddModal(false)} />
- >
- );
-};
-
-export default MasterInventoryTable;
-
-
-
-
diff --git a/milventory/src/components/MasterItemPreview.js b/milventory/src/components/MasterItemPreview.js
deleted file mode 100644
index f46f9f9..0000000
--- a/milventory/src/components/MasterItemPreview.js
+++ /dev/null
@@ -1,221 +0,0 @@
-import React, { useRef, useState, useEffect } from 'react';
-import { useInventory } from '../context/InventoryContext';
-import MasterEditModal from './MasterEditModal';
-import { getCategories } from '../api';
-
-const MasterItemPreview = () => {
- const {
- selectedMasterItem,
- resolveMasterItem,
- getItemLocations,
- inventoryData,
- clearSelectedMasterItem,
- deleteMasterItem,
- startAddMode,
- leftPaneWidth,
- leftPaneCollapsed
- } = useInventory();
-
- const SHELF_NAMES = [
- 'Shelf 6 (Top)', 'Shelf 5', 'Shelf 4',
- 'Shelf 3', 'Shelf 2', 'Shelf 1 (Bottom)'
- ];
-
- const previewRef = useRef(null);
- const [editingItem, setEditingItem] = useState(null);
- const [categoryIdToName, setCategoryIdToName] = useState(new Map());
-
- // Fetch category mapping for display
- useEffect(() => {
- getCategories()
- .then(categories => {
- const mapping = new Map();
- categories.forEach(cat => {
- if (typeof cat === 'object' && cat.id && cat.name) {
- mapping.set(cat.id, cat.name);
- }
- });
- setCategoryIdToName(mapping);
- })
- .catch(console.error);
- }, []);
-
- const item = selectedMasterItem ? resolveMasterItem(selectedMasterItem) : null;
- const locations = selectedMasterItem ? getItemLocations(selectedMasterItem) : [];
-
- // Convert category IDs to names for display
- const categoryNames = item?.categories
- ? item.categories.map(catId => categoryIdToName.get(catId)).filter(name => name !== undefined)
- : [];
-
- // Build detailed location entries with qty (breaking Tall Cabinets down by shelf)
- const locationDetails = [];
- if (selectedMasterItem) {
- locations.forEach(boxTitle => {
- const boxData = inventoryData.get(boxTitle);
- if (!boxData) return;
- const matchingItems = boxData.inventory.filter(i => i.name === selectedMasterItem);
- if (boxTitle.startsWith('Tall Cabinet')) {
- matchingItems.forEach(i => {
- const shelfIdx = i.shelf ?? 0;
- const shelfName = SHELF_NAMES[shelfIdx] || `Shelf ${shelfIdx}`;
- locationDetails.push({ label: `${boxTitle} → ${shelfName}`, qty: i.qty });
- });
- } else {
- const totalQty = matchingItems.reduce((sum, i) => sum + (i.qty || 0), 0);
- locationDetails.push({ label: boxTitle, qty: totalQty });
- }
- });
- }
-
- // Calculate position to the right of left pane
- const leftPaneActualWidth = leftPaneCollapsed ? 40 : leftPaneWidth;
- const positionX = leftPaneActualWidth + 20;
- const positionY = 20;
-
- const handleDelete = () => {
- if (locations.length > 0) {
- const confirmed = window.confirm(
- `This item is used in ${locations.length} box(es). Delete from all boxes?`
- );
- if (!confirmed) return;
- }
- deleteMasterItem(selectedMasterItem);
- };
-
- const handleAddToBoxes = () => {
- startAddMode(selectedMasterItem);
- };
-
- const handleEdit = () => {
- setEditingItem(selectedMasterItem);
- };
-
- if (!selectedMasterItem || !item) return null;
-
- return (
- <>
-
-
-
-
{item.name}
-
- ×
-
-
- {item.description && (
-
-
Description:
-
{item.description}
-
- )}
- {(item.teams && item.teams.length > 0) && (
-
-
Teams:
-
- {item.teams.map((team, idx) => (
-
- {team}
-
- ))}
-
-
- )}
- {categoryNames.length > 0 && (
-
-
Categories:
-
- {categoryNames.map((category, idx) => (
-
- {category}
-
- ))}
-
-
- )}
-
-
Locations:
- {locationDetails.length === 0 ? (
-
No locations
- ) : (
- locationDetails.map((loc, idx) => (
-
- {loc.label} (Qty: {loc.qty})
-
- ))
- )}
-
- {item.image && (
-
-

-
- )}
- {item.last_modified_by_name && (
-
- Last modified by: {item.last_modified_by_name}
- {item.lastModified && (
-
- ({new Date(item.lastModified).toLocaleString()})
-
- )}
-
- )}
-
-
Actions:
-
-
- Add to Boxes
-
-
- Edit
-
-
- Delete
-
-
-
-
-
- setEditingItem(null)} itemName={editingItem} />
- >
- );
-};
-
-export default MasterItemPreview;
diff --git a/milventory/src/components/MasterTableRow.js b/milventory/src/components/MasterTableRow.js
deleted file mode 100644
index 3839dae..0000000
--- a/milventory/src/components/MasterTableRow.js
+++ /dev/null
@@ -1,44 +0,0 @@
-import React from 'react';
-
-const formatDate = (isoString) => {
- if (!isoString) return '—';
- const date = new Date(isoString);
- const now = new Date();
- const isToday = date.toDateString() === now.toDateString();
-
- const timeStr = date.toLocaleTimeString([], { hour: 'numeric', minute: '2-digit' });
-
- if (isToday) {
- return timeStr;
- }
-
- const dateStr = date.toLocaleDateString([], { month: 'short', day: 'numeric' });
- return `${dateStr}, ${timeStr}`;
-};
-
-const MasterTableRow = ({ itemName, itemData, quantity, locations, isSelected, onClick }) => {
- // Build a truncated location string that fits the cell
- const locationText = locations.length === 0
- ? '—'
- : locations.length === 1
- ? locations[0]
- : `${locations[0]}, ...+${locations.length - 1}`;
-
- return (
-
- | {itemName} |
- {quantity} |
-
- {locationText}
- |
-
- {formatDate(itemData.lastModified)}
- |
-
- );
-};
-
-export default MasterTableRow;
diff --git a/milventory/src/components/SVGs/delivery_truck.js b/milventory/src/components/SVGs/delivery_truck.js
new file mode 100644
index 0000000..391cc03
--- /dev/null
+++ b/milventory/src/components/SVGs/delivery_truck.js
@@ -0,0 +1,14 @@
+/** Delivery truck icon — represents inventory in transit / to be delivered. */
+export const deliveryTruckSvg = ``;
diff --git a/milventory/src/components/SVGs/lost_items.js b/milventory/src/components/SVGs/lost_items.js
new file mode 100644
index 0000000..0101da9
--- /dev/null
+++ b/milventory/src/components/SVGs/lost_items.js
@@ -0,0 +1,10 @@
+/** Magnifying glass with a question mark — represents lost / misplaced items. */
+export const lostItemsSvg = ``;
diff --git a/milventory/src/components/SVGs/unsorted_items.js b/milventory/src/components/SVGs/unsorted_items.js
new file mode 100644
index 0000000..dcade86
--- /dev/null
+++ b/milventory/src/components/SVGs/unsorted_items.js
@@ -0,0 +1,9 @@
+/** Open cardboard box — represents unsorted / unidentified items. */
+export const unsortedItemsSvg = ``;
diff --git a/milventory/src/constants/leftPaneLayout.js b/milventory/src/constants/leftPaneLayout.js
new file mode 100644
index 0000000..f76fee9
--- /dev/null
+++ b/milventory/src/constants/leftPaneLayout.js
@@ -0,0 +1,4 @@
+/** Drag-resize limits for the master/admin left pane (px). */
+export const LEFT_PANE_MIN_WIDTH = 200;
+/** Prior max was 600; +50% for wider table preview. */
+export const LEFT_PANE_MAX_WIDTH = 900;
diff --git a/milventory/src/constants/locationSvgByName.js b/milventory/src/constants/locationSvgByName.js
new file mode 100644
index 0000000..10342f7
--- /dev/null
+++ b/milventory/src/constants/locationSvgByName.js
@@ -0,0 +1,15 @@
+import { deliveryTruckSvg } from '../components/SVGs/delivery_truck';
+import { lostItemsSvg } from '../components/SVGs/lost_items';
+import { unsortedItemsSvg } from '../components/SVGs/unsorted_items';
+
+/** Map location `name` (DB / JSON title) to raw SVG markup for map rendering. */
+export const LOCATION_SVG_MARKUP_BY_NAME = {
+ 'To Be Delivered': deliveryTruckSvg,
+ 'Lost Items': lostItemsSvg,
+ 'Unsorted Items': unsortedItemsSvg
+};
+
+/** Names that use the SVG map treatment; keep in sync with `SYSTEM_SPECIAL_LOCATION_NAMES` in Python. */
+export const SYSTEM_SPECIAL_LOCATION_NAMES = Object.freeze(
+ Object.keys(LOCATION_SVG_MARKUP_BY_NAME)
+);
diff --git a/milventory/src/constants/mapBounds.js b/milventory/src/constants/mapBounds.js
new file mode 100644
index 0000000..f0701cb
--- /dev/null
+++ b/milventory/src/constants/mapBounds.js
@@ -0,0 +1,21 @@
+/** Room bounds in world/map coordinates — must match src/api/helpers/map_bounds.py */
+export const MAP_ROOM = {
+ x: 80,
+ y: 80,
+ width: 3600,
+ height: 3840
+};
+
+export function clampPointToRoom(x, y) {
+ const maxX = MAP_ROOM.x + MAP_ROOM.width;
+ const maxY = MAP_ROOM.y + MAP_ROOM.height;
+ return {
+ x: Math.round(Math.max(MAP_ROOM.x, Math.min(maxX, x))),
+ y: Math.round(Math.max(MAP_ROOM.y, Math.min(maxY, y)))
+ };
+}
+
+export function pointInRoom(x, y) {
+ const p = clampPointToRoom(x, y);
+ return p.x === Math.round(x) && p.y === Math.round(y);
+}
diff --git a/milventory/src/context/InventoryContext.js b/milventory/src/context/InventoryContext.js
index 426bdb1..928851a 100644
--- a/milventory/src/context/InventoryContext.js
+++ b/milventory/src/context/InventoryContext.js
@@ -1,9 +1,173 @@
-import React, { createContext, useContext, useState, useEffect, useRef, useCallback } from 'react';
+import React, { createContext, useContext, useState, useEffect, useRef, useCallback, useMemo } from 'react';
import * as d3 from 'd3';
-import { api } from '../api';
+import { api, admin, handleApiError } from '../api';
+import { LOCATION_SVG_MARKUP_BY_NAME } from '../constants/locationSvgByName';
+import { clampPointToRoom } from '../constants/mapBounds';
+import { LEFT_PANE_MIN_WIDTH, LEFT_PANE_MAX_WIDTH } from '../constants/leftPaneLayout';
+import { normalizeShelfIndex } from '../utils/shelfLabels';
+
+export const MASTER_ARROWS_REDRAW_EVENT = 'milventory-master-arrows-redraw';
+
+function newTempFreePlaceId() {
+ if (typeof crypto !== 'undefined' && crypto.randomUUID) {
+ return `fp-${crypto.randomUUID()}`;
+ }
+ return `fp-${Date.now()}-${Math.random().toString(36).slice(2, 11)}`;
+}
+
+/** Pending subtract map keys for floor placements (`${prefix}||supply_location_id`). */
+export const FREE_SUBTRACT_DOT_PREFIX = '__free__';
const InventoryContext = createContext(null);
+/** Build master table location strings from API supplies.locations[] */
+function locationsListFromSupplyLocs(locations) {
+ const out = [];
+ let freeN = 0;
+ (locations || []).forEach((loc) => {
+ if (loc.location === 'Free Coordinate' || (loc.coord_x != null && loc.coord_y != null)) {
+ freeN += 1;
+ return;
+ }
+ if (loc.shelf !== null && loc.shelf !== undefined) {
+ out.push(`${loc.location} (Shelf ${Number(loc.shelf) + 1})`);
+ } else {
+ out.push(loc.location);
+ }
+ });
+ if (freeN > 0) {
+ out.push(freeN === 1 ? 'Free Coordinate' : `Free Coordinates (${freeN})`);
+ }
+ return out;
+}
+
+/** Match shelf indices loosely (undefined/null ↔ null), coercing string/number mismatches from API/UI. */
+function shelvesMatchForMove(invShelf, locShelf) {
+ return normalizeShelfIndex(invShelf) === normalizeShelfIndex(locShelf);
+}
+
+/** Stable row key for a placement, even when public_id is missing (legacy rows). */
+function movePlacementKey(item) {
+ return (
+ item?.supplyPublicId ||
+ (item?.supplyId != null ? `__legacy_id_${item.supplyId}` : null)
+ );
+}
+
+/**
+ * Apply a box-to-box move only in local `inventoryData` (no API).
+ * Identifies the stock line by supplyPublicId (stable); display name may be duplicated.
+ */
+function applyOptimisticMoveBetweenBoxes(
+ inventoryMap,
+ supplyPublicId,
+ itemName,
+ sourceBox,
+ sourceShelf,
+ targetBox,
+ targetShelf,
+ moveQty,
+ numericSupplyId
+) {
+ const src = inventoryMap.get(sourceBox);
+ const dst = inventoryMap.get(targetBox);
+ if (!src || !dst || moveQty <= 0 || !supplyPublicId) return inventoryMap;
+
+ const next = new Map(inventoryMap);
+
+ if (sourceBox === targetBox) {
+ let inv = src.inventory.map((i) => ({ ...i }));
+ let rem = moveQty;
+ const afterRemove = [];
+ for (const it of inv) {
+ if (
+ rem <= 0 ||
+ movePlacementKey(it) !== supplyPublicId ||
+ !shelvesMatchForMove(it.shelf, sourceShelf)
+ ) {
+ afterRemove.push({ ...it });
+ continue;
+ }
+ const take = Math.min(rem, it.qty);
+ if (it.qty > take) afterRemove.push({ ...it, qty: it.qty - take });
+ rem -= take;
+ }
+ inv = afterRemove;
+
+ let merged = false;
+ const nsTarget = normalizeShelfIndex(targetShelf);
+ for (let i = 0; i < inv.length; i++) {
+ const it = inv[i];
+ if (
+ movePlacementKey(it) === supplyPublicId &&
+ normalizeShelfIndex(it.shelf) === nsTarget
+ ) {
+ inv[i] = { ...it, qty: it.qty + moveQty };
+ merged = true;
+ break;
+ }
+ }
+ if (!merged) {
+ const shelfVal =
+ targetShelf === undefined || targetShelf === null ? undefined : targetShelf;
+ inv.push({
+ name: itemName,
+ qty: moveQty,
+ shelf: shelfVal,
+ supplyId: numericSupplyId,
+ supplyPublicId
+ });
+ }
+
+ next.set(sourceBox, { ...src, inventory: inv });
+ return next;
+ }
+
+ let rem = moveQty;
+ const newSrc = [];
+
+ for (const it of src.inventory) {
+ if (
+ rem <= 0 ||
+ movePlacementKey(it) !== supplyPublicId ||
+ !shelvesMatchForMove(it.shelf, sourceShelf)
+ ) {
+ newSrc.push({ ...it });
+ continue;
+ }
+ const take = Math.min(rem, it.qty);
+ if (it.qty > take) {
+ newSrc.push({ ...it, qty: it.qty - take });
+ }
+ rem -= take;
+ }
+
+ const dstInv = dst.inventory.map((i) => ({ ...i }));
+ let merged = false;
+ for (let i = 0; i < dstInv.length; i++) {
+ const it = dstInv[i];
+ if (movePlacementKey(it) === supplyPublicId && shelvesMatchForMove(it.shelf, targetShelf)) {
+ dstInv[i] = { ...it, qty: it.qty + moveQty };
+ merged = true;
+ break;
+ }
+ }
+ if (!merged) {
+ const shelfVal = targetShelf === undefined || targetShelf === null ? undefined : targetShelf;
+ dstInv.push({
+ name: itemName,
+ qty: moveQty,
+ shelf: shelfVal,
+ supplyId: numericSupplyId,
+ supplyPublicId
+ });
+ }
+
+ next.set(sourceBox, { ...src, inventory: newSrc });
+ next.set(targetBox, { ...dst, inventory: dstInv });
+ return next;
+}
+
export const useInventory = () => {
const context = useContext(InventoryContext);
if (!context) {
@@ -28,16 +192,16 @@ export const InventoryProvider = ({ children }) => {
// Master Inventory Table state
const [masterInventoryItems, setMasterInventoryItems] = useState(new Map());
+ /** Selected master row: supplies.public_id (UUID string), not display name */
const [selectedMasterItem, setSelectedMasterItem] = useState(null);
const [leftPaneWidth, setLeftPaneWidth] = useState(300);
const [leftPaneCollapsed, setLeftPaneCollapsed] = useState(false);
-
+ const [masterFilterLocation, setMasterFilterLocation] = useState(null); // Location to filter master table by
+
// Loading and error states
const [isLoading, setIsLoading] = useState(true);
const [error, setError] = useState(null);
-
- // Supply name to ID mapping (for API calls)
- const [supplyNameToId, setSupplyNameToId] = useState(new Map());
+ const [conflictError, setConflictError] = useState(null);
// Add Mode state
const [addModeItem, setAddModeItem] = useState(null);
@@ -48,6 +212,34 @@ export const InventoryProvider = ({ children }) => {
const addModePendingRef = useRef(new Map());
const addModeQtyPerClickRef = useRef(1);
+ // Subtract Mode state (removes items from boxes, not the master entry)
+ const [subtractModeItem, setSubtractModeItem] = useState(null);
+ const [subtractModeQtyPerClick, setSubtractModeQtyPerClick] = useState(1);
+ const [subtractModePending, setSubtractModePending] = useState(new Map()); // Map
+ const subtractModePreviewRef = useRef(null);
+ const subtractModeItemRef = useRef(null);
+ const subtractModePendingRef = useRef(new Map());
+ const subtractModeQtyPerClickRef = useRef(1);
+
+ // Move Mode state
+ const [moveModeItem, setMoveModeItem] = useState(null);
+ const [moveModeDragging, setMoveModeDragging] = useState(null); // { boxTitle, shelf, qty, x, y }
+ const [moveModePending, setMoveModePending] = useState([]); // Box moves queued until Apply Move (no API until then)
+ /** Pending floor-dot positions in move mode (applied on Finish; discarded on Cancel). */
+ const [moveModeFreeCoordById, setMoveModeFreeCoordById] = useState(() => new Map());
+ /** Live world coords during floor-dot drag (React state updates only on drag end so D3 is not torn down). */
+ const moveModeDotDragLiveByIdRef = useRef(new Map());
+ const moveModeItemRef = useRef(null);
+ const moveModePendingRef = useRef([]); // Ref version for callbacks
+ const isDraggingMoveBoxRef = useRef(false); // Synchronous ref for D3 filter
+
+ const [freePlaceModeItem, setFreePlaceModeItem] = useState(null);
+ const freePlaceModeItemRef = useRef(null);
+ const [freePlacementsBySupplyPublicId, setFreePlacementsBySupplyPublicId] = useState(new Map());
+ const [freePlacePendingDeletes, setFreePlacePendingDeletes] = useState(() => new Set());
+ const [freePlacePendingCoordById, setFreePlacePendingCoordById] = useState(() => new Map());
+ const [freePlacePendingAdds, setFreePlacePendingAdds] = useState([]);
+
// Keep refs in sync with state
useEffect(() => {
addModeItemRef.current = addModeItem;
@@ -60,6 +252,26 @@ export const InventoryProvider = ({ children }) => {
useEffect(() => {
addModeQtyPerClickRef.current = addModeQtyPerClick;
}, [addModeQtyPerClick]);
+
+ useEffect(() => {
+ subtractModeItemRef.current = subtractModeItem;
+ }, [subtractModeItem]);
+
+ useEffect(() => {
+ subtractModePendingRef.current = subtractModePending;
+ }, [subtractModePending]);
+
+ useEffect(() => {
+ subtractModeQtyPerClickRef.current = subtractModeQtyPerClick;
+ }, [subtractModeQtyPerClick]);
+
+ useEffect(() => {
+ moveModeItemRef.current = moveModeItem;
+ }, [moveModeItem]);
+
+ useEffect(() => {
+ freePlaceModeItemRef.current = freePlaceModeItem;
+ }, [freePlaceModeItem]);
// Refs
const wrapRef = useRef(null);
@@ -67,80 +279,128 @@ export const InventoryProvider = ({ children }) => {
const worldRef = useRef(null);
const isPanningRef = useRef(false);
- // Initialize inventory data from JSON (layout only) and API (inventory data)
+ // Helper function to get fill color for location type
+ const getFillForType = (type) => {
+ const typeFills = {
+ 'drawer': 'var(--drawer)',
+ 'cabinet': 'var(--table)',
+ 'tall_cabinet': 'var(--files)', // Tall cabinets use files color
+ 'table': 'var(--table)',
+ 'other': '#e7ebf3', // Other category (includes workbench) has special color
+ 'special': '#ff69b4', // System map locations with custom SVG
+ };
+ return typeFills[type] || 'var(--table)';
+ };
+
+ // Function to reload supply locations from API
+ const reloadSupplyLocations = useCallback(async () => {
+ try {
+ const supplyLocations = await api.getAllSupplyLocations();
+
+ const locationMap = new Map();
+ const freeMap = new Map();
+
+ supplyLocations.forEach((sl) => {
+ const pid =
+ sl.supply_public_id ||
+ (sl.supply_id != null ? `__legacy_id_${sl.supply_id}` : '');
+ if (sl.free_place || (sl.coord_x != null && sl.coord_y != null)) {
+ if (!pid) return;
+ if (!freeMap.has(pid)) freeMap.set(pid, []);
+ freeMap.get(pid).push({
+ id: sl.id,
+ x: sl.coord_x,
+ y: sl.coord_y,
+ qty: sl.qty || 0
+ });
+ return;
+ }
+ const key = sl.location;
+ if (key == null || key === '') return;
+ if (!locationMap.has(key)) {
+ locationMap.set(key, []);
+ }
+ locationMap.get(key).push({
+ id: sl.id,
+ name: sl.supply_name || '',
+ supplyId: sl.supply_id,
+ supplyPublicId: sl.supply_public_id || null,
+ qty: sl.qty,
+ shelf: sl.shelf !== null ? sl.shelf : undefined
+ });
+ });
+
+ setFreePlacementsBySupplyPublicId(freeMap);
+
+ setInventoryData((prev) => {
+ const next = new Map(prev);
+ prev.forEach((boxData, locationName) => {
+ const items = locationMap.get(locationName) || [];
+ next.set(locationName, {
+ ...boxData,
+ inventory: items
+ });
+ });
+ return next;
+ });
+ } catch (apiError) {
+ console.error('Error reloading supply locations from API:', apiError);
+ }
+ }, []);
+
+ // Initialize inventory data from database (locations) and API (inventory data)
useEffect(() => {
const loadInventoryData = async () => {
setIsLoading(true);
setError(null);
try {
- // 1. Load layout from JSON (no inventory arrays)
- const response = await fetch('/inventory-locations.json');
- if (!response.ok) {
- throw new Error('Failed to load inventory layout');
- }
- const data = await response.json();
+ // 1. Set default inventory bounds (no longer loading from JSON)
+ setInventoryBounds({
+ viewBox: { x: 0, y: 0, width: 4000, height: 4000 },
+ room: { x: 80, y: 80, width: 3600, height: 3840, rx: 18, ry: 18 }
+ });
- // Store inventory bounds
- if (data['inventory-bounds']) {
- setInventoryBounds(data['inventory-bounds']);
- }
+ // 2. Load locations from database API
+ const locations = await admin.getLocations();
- // Initialize inventoryData with layout only (empty inventory arrays)
+ // Convert API locations to box format expected by map
const newInventoryData = new Map();
- data.boxes.forEach(box => {
- newInventoryData.set(box.title, {
- ...box,
- inventory: [] // Will be populated from API
- });
+ locations.forEach(location => {
+ const svgMarkup = LOCATION_SVG_MARKUP_BY_NAME[location.name];
+ const boxData = {
+ title: location.name,
+ x: location.x,
+ y: location.y,
+ width: location.width,
+ height: location.height,
+ fill: getFillForType(location.type),
+ type: location.type,
+ shelf_count: Math.max(0, parseInt(location.shelf_count, 10) || 0),
+ inventory: [], // Will be populated from supply locations API
+ ...(svgMarkup ? { svgMarkup } : {})
+ };
+
+ newInventoryData.set(location.name, boxData);
});
setInventoryData(newInventoryData);
- // 2. Load supply locations from API and merge into inventoryData
- try {
- const supplyLocations = await api.getAllSupplyLocations();
-
- // Group by location_name and merge into inventoryData
- const locationMap = new Map();
- supplyLocations.forEach(sl => {
- const key = sl.location;
- if (!locationMap.has(key)) {
- locationMap.set(key, []);
- }
- locationMap.get(key).push({
- name: sl.supply_name || '', // From JOIN in API
- qty: sl.qty, // API maps amount to qty
- shelf: sl.shelf !== null ? sl.shelf : undefined
- });
- });
-
- // Merge into inventoryData
- setInventoryData(prev => {
- const next = new Map(prev);
- locationMap.forEach((items, locationName) => {
- const boxData = next.get(locationName);
- if (boxData) {
- next.set(locationName, {
- ...boxData,
- inventory: items
- });
- }
- });
- return next;
- });
- } catch (apiError) {
- console.error('Error loading supply locations from API:', apiError);
- // Continue with empty inventory arrays if API fails
- }
+ // 3. Load supply locations from API and merge into inventoryData
+ await reloadSupplyLocations();
setIsLoading(false);
} catch (error) {
console.error('Error loading inventory data:', error);
setError(error.message || 'Failed to load inventory data');
setIsLoading(false);
- // Fallback to empty data if JSON fails to load
+ // Fallback to empty data if API fails
setInventoryData(new Map());
+ // Set default bounds
+ setInventoryBounds({
+ viewBox: { x: 0, y: 0, width: 4000, height: 4000 },
+ room: { x: 80, y: 80, width: 3600, height: 3840, rx: 18, ry: 18 }
+ });
}
};
@@ -153,6 +413,19 @@ export const InventoryProvider = ({ children }) => {
const zoom = d3.zoom()
.scaleExtent([0.6, 6])
+ .filter((event) => {
+ // Disable zoom/pan when dragging a move box
+ if (isDraggingMoveBoxRef.current) return false;
+ // Check if the event target is a move box
+ if (event.target && event.target.dataset && event.target.dataset.moveBox) return false;
+ // Don't intercept events from interactive HTML elements inside foreignObject
+ // (e.g. buttons, inputs, selects inside BoxInventoryOverlay)
+ if (event.target && event.target.closest && event.target.closest('button, input, select, a, textarea')) return false;
+ // Don't intercept scroll (wheel) or mousedown events inside scrollable containers —
+ // otherwise D3 steals the scroll and the native scrollbar drag never fires
+ if (event.target && event.target.closest && event.target.closest('.box-inventory-content')) return false;
+ return true;
+ })
.on('start', () => {
isPanningRef.current = true;
})
@@ -182,44 +455,40 @@ export const InventoryProvider = ({ children }) => {
const supplies = await api.getSupplies();
const newMasterItems = new Map();
- const nameToIdMap = new Map();
-
- supplies.forEach(supply => {
- // Build name to ID mapping
- nameToIdMap.set(supply.name, supply.id);
-
- // Convert API response to Master item format
- // API returns locations[] with {location, shelf, qty}
- const locations = (supply.locations || []).map(loc => {
- if (loc.shelf !== null && loc.shelf !== undefined) {
- return `${loc.location} (Shelf ${loc.shelf})`;
- }
- return loc.location;
- });
-
- newMasterItems.set(supply.name, {
+
+ supplies.forEach((supply) => {
+ const rowKey =
+ supply.public_id || (supply.id != null ? `__legacy_id_${supply.id}` : null);
+ if (!rowKey) return;
+
+ const locations = locationsListFromSupplyLocs(supply.locations);
+
+ newMasterItems.set(rowKey, {
+ public_id: supply.public_id || null,
name: supply.name,
description: supply.description || '',
image: supply.image || null,
locations: locations,
teams: supply.teams || [],
categories: supply.categories || [],
+ custom_fields: supply.custom_fields || {},
+ supply_type_id: supply.supply_type_id ?? null,
+ type_name: supply.type_name || null,
+ type_has_template_image: Boolean(supply.type_has_template_image),
lastModified: supply.lastModified || null,
last_modified_by: supply.last_modified_by || null,
last_modified_by_name: supply.last_modified_by_name || null,
- id: supply.id // Store ID for API calls
+ id: supply.id
});
});
-
+
setMasterInventoryItems(newMasterItems);
- setSupplyNameToId(nameToIdMap);
} catch (error) {
console.error('Error loading Master inventory items from API:', error);
if (error.message === 'Authentication required') {
setError('Authentication required. Please login.');
}
setMasterInventoryItems(new Map());
- setSupplyNameToId(new Map());
}
};
@@ -232,7 +501,12 @@ export const InventoryProvider = ({ children }) => {
useEffect(() => {
const savedLeftWidth = localStorage.getItem('leftPaneWidth');
const savedLeftCollapsed = localStorage.getItem('leftPaneCollapsed');
- if (savedLeftWidth) setLeftPaneWidth(parseInt(savedLeftWidth, 10));
+ if (savedLeftWidth) {
+ const w = parseInt(savedLeftWidth, 10);
+ if (!Number.isNaN(w)) {
+ setLeftPaneWidth(Math.max(LEFT_PANE_MIN_WIDTH, Math.min(LEFT_PANE_MAX_WIDTH, w)));
+ }
+ }
if (savedLeftCollapsed === 'true') setLeftPaneCollapsed(true);
}, []);
@@ -267,33 +541,30 @@ export const InventoryProvider = ({ children }) => {
}, []);
const updateInventory = useCallback(async (boxTitle, newInventory) => {
- // Update local state immediately (optimistic update)
- setInventoryData(prev => {
- const next = new Map(prev);
- const boxData = next.get(boxTitle);
- if (boxData) {
- next.set(boxTitle, { ...boxData, inventory: newInventory });
- }
- return next;
- });
-
- // Sync to API (fire and forget for now - could add error handling later)
try {
// Get current supply locations for this box
const currentLocations = await api.getLocationSupplies(boxTitle);
// Build maps for comparison
const currentMap = new Map();
- currentLocations.forEach(sl => {
- const key = `${sl.supply_name}||${sl.shelf !== null ? sl.shelf : 'null'}`;
+ currentLocations.forEach((sl) => {
+ const rowPid = sl.supply_public_id || `__legacy_id_${sl.supply_id}`;
+ const key = `${rowPid}||${sl.shelf !== null ? sl.shelf : 'null'}`;
currentMap.set(key, { id: sl.id, qty: sl.qty });
});
-
+
const newMap = new Map();
- newInventory.forEach(item => {
- const key = `${item.name}||${item.shelf !== undefined ? item.shelf : 'null'}`;
- const supplyId = supplyNameToId.get(item.name);
- if (supplyId) {
+ newInventory.forEach((item) => {
+ const rowPid =
+ item.supplyPublicId ||
+ (item.supplyId != null ? `__legacy_id_${item.supplyId}` : null);
+ if (!rowPid) return;
+ const key = `${rowPid}||${item.shelf !== undefined ? item.shelf : 'null'}`;
+ const supplyId =
+ item.supplyId != null
+ ? item.supplyId
+ : masterInventoryItems.get(rowPid)?.id;
+ if (supplyId != null) {
newMap.set(key, { supplyId, qty: item.qty, shelf: item.shelf });
}
});
@@ -337,17 +608,75 @@ export const InventoryProvider = ({ children }) => {
for (const id of toDelete) {
await api.deleteSupplyLocation(id);
}
+
+ // Reload supply locations to ensure UI reflects actual server state
+ await reloadSupplyLocations();
} catch (error) {
console.error('Error syncing inventory to API:', error);
// Only set error if not panning (to avoid breaking pan)
if (!isPanningRef.current) {
- setError(error.message || 'Failed to sync inventory changes');
+ const errorInfo = await handleApiError(error);
+ if (errorInfo.isConflict) {
+ setConflictError(errorInfo);
+ } else {
+ setError(errorInfo.message || 'Failed to sync inventory changes');
+ }
}
}
- }, [supplyNameToId]);
+ }, [masterInventoryItems, reloadSupplyLocations]);
+
+ // Function to reload master items from API
+ const reloadMasterItems = useCallback(async () => {
+ try {
+ const supplies = await api.getSupplies();
+
+ const newMasterItems = new Map();
+
+ supplies.forEach((supply) => {
+ const rowKey =
+ supply.public_id || (supply.id != null ? `__legacy_id_${supply.id}` : null);
+ if (!rowKey) return;
+
+ const locations = locationsListFromSupplyLocs(supply.locations);
+ newMasterItems.set(rowKey, {
+ public_id: supply.public_id || null,
+ name: supply.name,
+ description: supply.description || '',
+ image: supply.image || null,
+ locations: locations,
+ teams: supply.teams || [],
+ categories: supply.categories || [],
+ custom_fields: supply.custom_fields || {},
+ supply_type_id: supply.supply_type_id ?? null,
+ type_name: supply.type_name || null,
+ type_has_template_image: Boolean(supply.type_has_template_image),
+ lastModified: supply.lastModified || null,
+ last_modified_by: supply.last_modified_by || null,
+ last_modified_by_name: supply.last_modified_by_name || null,
+ id: supply.id
+ });
+ });
+
+ setMasterInventoryItems(newMasterItems);
+ } catch (error) {
+ console.error('Error reloading Master inventory items:', error);
+ }
+ }, []);
+
+ // Other tabs (e.g. /admin) can bump this key so the catalog refetches without a full reload
+ useEffect(() => {
+ const onStorage = (e) => {
+ if (e.key === 'milventory-master-catalog-bump') {
+ reloadMasterItems();
+ }
+ };
+ window.addEventListener('storage', onStorage);
+ return () => window.removeEventListener('storage', onStorage);
+ }, [reloadMasterItems]);
// Add Mode functions
const startAddMode = useCallback((itemName) => {
+ setFreePlaceModeItem(null);
setAddModeItem(itemName);
setAddModeQtyPerClick(1);
setAddModePending(new Map());
@@ -386,8 +715,11 @@ export const InventoryProvider = ({ children }) => {
return;
}
- // Get supply_id for the item
- const supplyId = supplyNameToId.get(currentItem);
+ const restorePreviewItem = () => {
+ if (currentItem) setSelectedMasterItem(currentItem);
+ };
+
+ const supplyId = masterInventoryItems.get(currentItem)?.id;
if (!supplyId) {
console.error(`Supply ID not found for item: ${currentItem}`);
setError(`Supply ID not found for item: ${currentItem}`);
@@ -411,6 +743,7 @@ export const InventoryProvider = ({ children }) => {
setAddModeItem(null);
setAddModeQtyPerClick(1);
setAddModePending(new Map());
+ restorePreviewItem();
return;
}
@@ -421,70 +754,647 @@ export const InventoryProvider = ({ children }) => {
additions: additions
});
- // Update local state optimistically
- const byBox = new Map();
- pending.forEach((qty, key) => {
- const parts = key.split('||');
- const boxTitle = parts[0];
- const shelf = parts.length > 1 ? parseInt(parts[1], 10) : undefined;
- if (!byBox.has(boxTitle)) byBox.set(boxTitle, []);
- byBox.get(boxTitle).push({ shelf, qty });
- });
+ // Reload supply locations to ensure UI reflects actual server state
+ await reloadSupplyLocations();
+
+ // Reload master items to update last_modified timestamp
+ await reloadMasterItems();
+
+ // Clear add mode
+ setAddModeItem(null);
+ setAddModeQtyPerClick(1);
+ setAddModePending(new Map());
+ restorePreviewItem();
+ } catch (error) {
+ console.error('Error finishing add mode:', error);
+ // Only set error if not panning (to avoid breaking pan)
+ if (!isPanningRef.current) {
+ const errorInfo = await handleApiError(error);
+ if (errorInfo.isConflict) {
+ setConflictError(errorInfo);
+ } else {
+ setError(errorInfo.message);
+ }
+ }
+ }
+ }, [masterInventoryItems, reloadSupplyLocations, reloadMasterItems]);
- byBox.forEach((entries, boxTitle) => {
+ const cancelAddMode = useCallback(() => {
+ const restore = addModeItemRef.current;
+ setAddModeItem(null);
+ setAddModeQtyPerClick(1);
+ setAddModePending(new Map());
+ if (restore) setSelectedMasterItem(restore);
+ }, []);
+
+ // Subtract Mode functions (removes items from boxes, not the master entry)
+ const startSubtractMode = useCallback((itemName) => {
+ setFreePlaceModeItem(null);
+ setSubtractModeItem(itemName);
+ setSubtractModeQtyPerClick(1);
+ setSubtractModePending(new Map());
+ setSelectedBox(null); // Clear box selection when entering subtract mode
+ setSelectedMasterItem(null); // Clear Master preview when entering subtract mode
+ }, []);
+
+ // shelf is optional — undefined for non-shelf boxes, number for Tall Cabinet shelves
+ const handleBoxClickSubtractMode = useCallback((boxTitle, shelf) => {
+ const qty = subtractModeQtyPerClickRef.current;
+ const key = shelf !== undefined ? `${boxTitle}||${shelf}` : boxTitle;
+
+ // Get current quantity in this location
const boxData = inventoryData.get(boxTitle);
if (!boxData) return;
- const newInventory = [...boxData.inventory];
+ const pid = subtractModeItemRef.current;
+ const matchingItems = boxData.inventory.filter((item) => {
+ const itemPid =
+ item.supplyPublicId ||
+ (item.supplyId != null ? `__legacy_id_${item.supplyId}` : null);
+ if (itemPid !== pid) return false;
+ if (shelf !== undefined) return (item.shelf ?? 0) === shelf;
+ return item.shelf === undefined;
+ });
- entries.forEach(({ shelf, qty }) => {
- const existingIndex = newInventory.findIndex(item => {
- if (item.name !== currentItem) return false;
- if (shelf !== undefined) return (item.shelf ?? 0) === shelf;
- return true;
+ const currentQty = matchingItems.reduce((sum, item) => sum + (item.qty || 0), 0);
+ const existingPending = subtractModePendingRef.current.get(key) || 0;
+
+ // Don't allow subtracting more than what's available
+ const maxSubtractable = currentQty - existingPending;
+ const toSubtract = Math.min(qty, maxSubtractable);
+
+ if (toSubtract <= 0) return; // Nothing to subtract
+
+ setSubtractModePending(prev => {
+ const next = new Map(prev);
+ const existing = next.get(key) || 0;
+ next.set(key, existing + toSubtract);
+ return next;
+ });
+ }, [inventoryData]);
+
+ // Check if any pending subtraction belongs to a given box (handles compound keys)
+ const boxHasAnySubtractPending = useCallback((boxTitle) => {
+ for (const key of subtractModePending.keys()) {
+ if (key === boxTitle || key.startsWith(boxTitle + '||')) return true;
+ }
+ return false;
+ }, [subtractModePending]);
+
+ const handleSubtractFreeDotClick = useCallback(
+ (supplyLocationId) => {
+ const itemName = subtractModeItemRef.current;
+ if (!itemName) return;
+ const qty = subtractModeQtyPerClickRef.current;
+ setSubtractModePending((prev) => {
+ const placements = freePlacementsBySupplyPublicId.get(itemName) || [];
+ const dot = placements.find((p) => p.id === supplyLocationId);
+ if (!dot) return prev;
+ const key = `${FREE_SUBTRACT_DOT_PREFIX}||${supplyLocationId}`;
+ const existing = prev.get(key) || 0;
+ const maxSubtractable = (dot.qty || 0) - existing;
+ const toSubtract = Math.min(qty, maxSubtractable);
+ if (toSubtract <= 0) return prev;
+ const next = new Map(prev);
+ next.set(key, existing + toSubtract);
+ return next;
+ });
+ },
+ [freePlacementsBySupplyPublicId]
+ );
+
+ const finishSubtractMode = useCallback(async () => {
+ const currentItem = subtractModeItemRef.current;
+ const pending = subtractModePendingRef.current;
+
+ if (!currentItem) {
+ setSubtractModeItem(null);
+ setSubtractModeQtyPerClick(1);
+ setSubtractModePending(new Map());
+ return;
+ }
+
+ const restorePreviewItem = () => setSelectedMasterItem(currentItem);
+
+ const supplyId = masterInventoryItems.get(currentItem)?.id;
+ if (!supplyId) {
+ console.error(`Supply ID not found for item: ${currentItem}`);
+ setError(`Supply ID not found for item: ${currentItem}`);
+ return;
+ }
+
+ // Convert pending map to subtractions
+ // Use functional update to get latest inventoryData
+ const subtractions = [];
+ let currentInventoryData = inventoryData;
+
+ pending.forEach((pendingQty, key) => {
+ const parts = key.split('||');
+ if (parts[0] === FREE_SUBTRACT_DOT_PREFIX && parts[1] != null && parts[1] !== '') {
+ const locId = parseInt(parts[1], 10);
+ const floorDots = freePlacementsBySupplyPublicId.get(currentItem) || [];
+ const dot = floorDots.find((d) => d.id === locId);
+ if (!dot || pendingQty <= 0) return;
+ subtractions.push({
+ id: locId,
+ kind: 'free',
+ amount: pendingQty,
+ currentQty: dot.qty || 0
+ });
+ return;
+ }
+
+ const boxTitle = parts[0];
+ const shelf = parts.length > 1 ? parseInt(parts[1], 10) : null;
+
+ // Find the supply_location_id for this item at this location
+ const boxData = currentInventoryData.get(boxTitle);
+ if (!boxData) return;
+
+ const matchingItems = boxData.inventory.filter((item) => {
+ const itemPid =
+ item.supplyPublicId ||
+ (item.supplyId != null ? `__legacy_id_${item.supplyId}` : null);
+ if (itemPid !== currentItem) return false;
+ if (shelf !== null && shelf !== undefined) return (item.shelf ?? 0) === shelf;
+ return item.shelf === undefined;
+ });
+
+ // For each matching item, we need to subtract or reduce it
+ let remainingToSubtract = pendingQty;
+ matchingItems.forEach(item => {
+ if (item.id && remainingToSubtract > 0) {
+ const subtractQty = Math.min(remainingToSubtract, item.qty);
+ subtractions.push({
+ id: item.id,
+ kind: 'box',
+ location: boxTitle,
+ shelf: shelf,
+ amount: subtractQty
});
+ remainingToSubtract -= subtractQty;
+ }
+ });
+ });
+
+ if (subtractions.length === 0) {
+ setSubtractModeItem(null);
+ setSubtractModeQtyPerClick(1);
+ setSubtractModePending(new Map());
+ restorePreviewItem();
+ return;
+ }
- if (existingIndex >= 0) {
- newInventory[existingIndex] = {
- ...newInventory[existingIndex],
- qty: newInventory[existingIndex].qty + qty
- };
+ try {
+ // Subtract items via API
+ for (const subtraction of subtractions) {
+ if (subtraction.kind === 'free') {
+ const qty = subtraction.currentQty;
+ if (qty <= subtraction.amount) {
+ await api.deleteSupplyLocation(subtraction.id);
} else {
- const newItem = { name: currentItem, qty };
- if (shelf !== undefined) newItem.shelf = shelf;
- newInventory.push(newItem);
+ await api.updateSupplyLocation(subtraction.id, { amount: qty - subtraction.amount });
}
- });
+ continue;
+ }
- setInventoryData(prev => {
- const next = new Map(prev);
- const box = next.get(boxTitle);
- if (box) {
- next.set(boxTitle, { ...box, inventory: newInventory });
- }
- return next;
+ const boxData = currentInventoryData.get(subtraction.location);
+ const item = boxData?.inventory.find((i) => i.id === subtraction.id);
+
+ if (!item) continue;
+
+ if (item.qty <= subtraction.amount) {
+ await api.deleteSupplyLocation(subtraction.id);
+ } else {
+ await api.updateSupplyLocation(subtraction.id, { amount: item.qty - subtraction.amount });
+ }
+ }
+
+ // Reload supply locations to ensure UI reflects actual server state
+ await reloadSupplyLocations();
+
+ // Reload master items to update last_modified timestamp
+ await reloadMasterItems();
+
+ // Clear subtract mode
+ setSubtractModeItem(null);
+ setSubtractModeQtyPerClick(1);
+ setSubtractModePending(new Map());
+ restorePreviewItem();
+ } catch (error) {
+ console.error('Error finishing subtract mode:', error);
+ // Only set error if not panning (to avoid breaking pan)
+ if (!isPanningRef.current) {
+ const errorInfo = await handleApiError(error);
+ if (errorInfo.isConflict) {
+ setConflictError(errorInfo);
+ } else {
+ setError(errorInfo.message || 'Failed to subtract items');
+ }
+ }
+ }
+ }, [inventoryData, masterInventoryItems, reloadSupplyLocations, reloadMasterItems, freePlacementsBySupplyPublicId]);
+
+ const cancelSubtractMode = useCallback(() => {
+ const restore = subtractModeItemRef.current;
+ setSubtractModeItem(null);
+ setSubtractModeQtyPerClick(1);
+ setSubtractModePending(new Map());
+ if (restore) setSelectedMasterItem(restore);
+ }, []);
+
+ // Move Mode functions
+ const startMoveMode = useCallback((itemName) => {
+ setFreePlaceModeItem(null);
+ moveModeDotDragLiveByIdRef.current.clear();
+ setMoveModeItem(itemName);
+ setMoveModeDragging(null);
+ setMoveModePending([]);
+ moveModePendingRef.current = [];
+ setMoveModeFreeCoordById(new Map());
+ setSelectedBox(null); // Clear box selection when entering move mode
+ }, []);
+
+ const finishMoveMode = useCallback(async () => {
+ try {
+ const pending = [...moveModePendingRef.current];
+ for (const move of pending) {
+ await api.moveSupplyLocations({
+ from_location: move.from,
+ to_location: move.to,
+ supply_id: move.supplyId,
+ shelf_from: move.shelfFrom,
+ shelf_to: move.shelfTo,
+ amount: move.qty
});
+ }
+ if (moveModeFreeCoordById.size > 0) {
+ for (const [id, { x, y }] of moveModeFreeCoordById) {
+ await api.updateSupplyLocation(id, { coord_x: x, coord_y: y });
+ }
+ }
+ if (pending.length > 0 || moveModeFreeCoordById.size > 0) {
+ await reloadSupplyLocations();
+ }
+ await reloadMasterItems();
+ } catch (error) {
+ console.error('Error finishing move mode:', error);
+ if (!isPanningRef.current) {
+ const errorInfo = await handleApiError(error);
+ if (errorInfo.isConflict) setConflictError(errorInfo);
+ else setError(errorInfo.message || 'Failed to save moves');
+ }
+ return;
+ }
+
+ moveModeDotDragLiveByIdRef.current.clear();
+ setMoveModeFreeCoordById(new Map());
+ setMoveModeItem(null);
+ setMoveModeDragging(null);
+ setMoveModePending([]);
+ moveModePendingRef.current = [];
+ setCurrentDragOverBox(null);
+ isDraggingMoveBoxRef.current = false;
+ }, [moveModeFreeCoordById, reloadMasterItems, reloadSupplyLocations]);
+
+ const cancelMoveMode = useCallback(async () => {
+ moveModeDotDragLiveByIdRef.current.clear();
+ setMoveModeFreeCoordById(new Map());
+ setMoveModeItem(null);
+ setMoveModeDragging(null);
+ setMoveModePending([]);
+ moveModePendingRef.current = [];
+ setCurrentDragOverBox(null);
+ isDraggingMoveBoxRef.current = false;
+ try {
+ await reloadSupplyLocations();
+ } catch (error) {
+ console.error('Error reloading after cancel move mode:', error);
+ }
+ }, [reloadSupplyLocations]);
+
+ const requestMasterArrowsRedraw = useCallback(() => {
+ window.dispatchEvent(new CustomEvent(MASTER_ARROWS_REDRAW_EVENT));
+ }, []);
+
+ const updateMoveModeDotDragLiveForArrows = useCallback((id, worldX, worldY) => {
+ const { x, y } = clampPointToRoom(worldX, worldY);
+ moveModeDotDragLiveByIdRef.current.set(id, { x, y });
+ window.dispatchEvent(new CustomEvent(MASTER_ARROWS_REDRAW_EVENT));
+ }, []);
+
+ const updateMoveModeFreeDotPosition = useCallback((id, worldX, worldY) => {
+ const { x, y } = clampPointToRoom(worldX, worldY);
+ setMoveModeFreeCoordById((prev) => {
+ const next = new Map(prev);
+ next.set(id, { x, y });
+ return next;
+ });
+ }, []);
+
+ const freePlaceVisualDots = useMemo(() => {
+ if (!freePlaceModeItem) return null;
+ const server = freePlacementsBySupplyPublicId.get(freePlaceModeItem) || [];
+ const visible = server
+ .filter((d) => !freePlacePendingDeletes.has(d.id))
+ .map((d) => {
+ const o = freePlacePendingCoordById.get(d.id);
+ return o ? { ...d, x: o.x, y: o.y } : d;
});
+ const adds = freePlacePendingAdds.map((a) => ({
+ id: a.tempId,
+ x: a.x,
+ y: a.y,
+ qty: a.qty
+ }));
+ return [...visible, ...adds];
+ }, [
+ freePlaceModeItem,
+ freePlacementsBySupplyPublicId,
+ freePlacePendingDeletes,
+ freePlacePendingCoordById,
+ freePlacePendingAdds
+ ]);
- // Clear add mode
- setAddModeItem(null);
- setAddModeQtyPerClick(1);
- setAddModePending(new Map());
+ /** Floor dots for arrows in subtract mode (hides fully pending-removed markers). */
+ const subtractModeVisualFreeDots = useMemo(() => {
+ if (!subtractModeItem) return null;
+ const server = freePlacementsBySupplyPublicId.get(subtractModeItem) || [];
+ return server
+ .map((d) => {
+ const key = `${FREE_SUBTRACT_DOT_PREFIX}||${d.id}`;
+ const pending = subtractModePending.get(key) || 0;
+ if ((d.qty || 0) - pending <= 0) return null;
+ return d;
+ })
+ .filter(Boolean);
+ }, [subtractModeItem, subtractModePending, freePlacementsBySupplyPublicId]);
+
+ /** Floor dots for arrows in move mode (follows pending coord drags). */
+ const moveModeVisualFreeDots = useMemo(() => {
+ if (!moveModeItem) return null;
+ const server = freePlacementsBySupplyPublicId.get(moveModeItem) || [];
+ return server.map((d) => {
+ const o = moveModeFreeCoordById.get(d.id);
+ return o ? { ...d, x: o.x, y: o.y } : d;
+ });
+ }, [moveModeItem, freePlacementsBySupplyPublicId, moveModeFreeCoordById]);
+
+ const clearFreePlaceSession = useCallback(() => {
+ setFreePlacePendingDeletes(new Set());
+ setFreePlacePendingCoordById(new Map());
+ setFreePlacePendingAdds([]);
+ setFreePlaceModeItem(null);
+ }, []);
+
+ const startFreePlaceMode = useCallback(
+ (itemName) => {
+ cancelAddMode();
+ cancelSubtractMode();
+ void cancelMoveMode();
+ setFreePlacePendingDeletes(new Set());
+ setFreePlacePendingCoordById(new Map());
+ setFreePlacePendingAdds([]);
+ setFreePlaceModeItem(itemName);
+ setSelectedBox(null);
+ },
+ [cancelMoveMode, cancelAddMode, cancelSubtractMode]
+ );
+
+ const cancelFreePlaceMode = useCallback(() => {
+ clearFreePlaceSession();
+ }, [clearFreePlaceSession]);
+
+ const finishFreePlaceMode = useCallback(async () => {
+ const name = freePlaceModeItemRef.current;
+ if (!name) {
+ clearFreePlaceSession();
+ return;
+ }
+
+ const hasWork =
+ freePlacePendingDeletes.size > 0 ||
+ freePlacePendingCoordById.size > 0 ||
+ freePlacePendingAdds.length > 0;
+
+ if (!hasWork) {
+ clearFreePlaceSession();
+ return;
+ }
+
+ const supplyId = masterInventoryItems.get(name)?.id;
+ if (!supplyId) {
+ setError(`Supply ID not found for item: ${name}`);
+ return;
+ }
+
+ try {
+ for (const id of freePlacePendingDeletes) {
+ await api.deleteSupplyLocation(id);
+ }
+ for (const [id, { x, y }] of freePlacePendingCoordById) {
+ if (freePlacePendingDeletes.has(id)) continue;
+ await api.updateSupplyLocation(id, { coord_x: x, coord_y: y });
+ }
+ for (const a of freePlacePendingAdds) {
+ await api.addSupplyLocation({
+ supply_id: supplyId,
+ coord_x: a.x,
+ coord_y: a.y,
+ amount: 1
+ });
+ }
+ await reloadSupplyLocations();
+ await reloadMasterItems();
} catch (error) {
- console.error('Error finishing add mode:', error);
- // Only set error if not panning (to avoid breaking pan)
+ console.error('Error finishing free place mode:', error);
if (!isPanningRef.current) {
- setError(error.message || 'Failed to add items');
+ const errorInfo = await handleApiError(error);
+ if (errorInfo.isConflict) setConflictError(errorInfo);
+ else setError(errorInfo.message || 'Failed to save floor placements');
}
+ return;
}
- }, [inventoryData, supplyNameToId]);
- const cancelAddMode = useCallback(() => {
- setAddModeItem(null);
- setAddModeQtyPerClick(1);
- setAddModePending(new Map());
+ clearFreePlaceSession();
+ }, [
+ masterInventoryItems,
+ reloadSupplyLocations,
+ reloadMasterItems,
+ freePlacePendingDeletes,
+ freePlacePendingCoordById,
+ freePlacePendingAdds,
+ clearFreePlaceSession
+ ]);
+
+ const handleFreePlaceWorldClick = useCallback(
+ (worldX, worldY) => {
+ const name = freePlaceModeItemRef.current;
+ if (!name) return;
+ const sid = masterInventoryItems.get(name)?.id;
+ if (!sid) return;
+ const { x, y } = clampPointToRoom(worldX, worldY);
+ setFreePlacePendingAdds((prev) => {
+ const existing = freePlacementsBySupplyPublicId.get(name) || [];
+ const onServer = existing.some(
+ (p) => Math.round(p.x) === x && Math.round(p.y) === y
+ );
+ const pendingDup = prev.some((a) => a.x === x && a.y === y);
+ if (onServer || pendingDup) {
+ setError('That floor coordinate already has a marker for this item (one unit per coordinate).');
+ return prev;
+ }
+ return [...prev, { tempId: newTempFreePlaceId(), x, y, qty: 1 }];
+ });
+ },
+ [masterInventoryItems, freePlacementsBySupplyPublicId, setError]
+ );
+
+ const updateFreePlaceSessionCoord = useCallback((id, worldX, worldY) => {
+ const { x, y } = clampPointToRoom(worldX, worldY);
+ if (typeof id === 'string' && id.startsWith('fp-')) {
+ setFreePlacePendingAdds((prev) =>
+ prev.map((a) => (a.tempId === id ? { ...a, x, y } : a))
+ );
+ return;
+ }
+ setFreePlacePendingCoordById((prev) => {
+ const next = new Map(prev);
+ next.set(id, { x, y });
+ return next;
+ });
+ }, []);
+
+ const handleFreePlaceSessionDotDelete = useCallback((id) => {
+ if (typeof id === 'string' && id.startsWith('fp-')) {
+ setFreePlacePendingAdds((prev) => prev.filter((a) => a.tempId !== id));
+ return;
+ }
+ setFreePlacePendingDeletes((prev) => new Set([...prev, id]));
+ setFreePlacePendingCoordById((prev) => {
+ const next = new Map(prev);
+ next.delete(id);
+ return next;
+ });
}, []);
+ const updateFreePlacementCoords = useCallback(
+ async (id, worldX, worldY) => {
+ const { x, y } = clampPointToRoom(worldX, worldY);
+ try {
+ await api.updateSupplyLocation(id, { coord_x: x, coord_y: y });
+ await reloadSupplyLocations();
+ await reloadMasterItems();
+ } catch (error) {
+ if (!isPanningRef.current) {
+ const errorInfo = await handleApiError(error);
+ if (errorInfo.isConflict) setConflictError(errorInfo);
+ else setError(errorInfo.message || 'Failed to move placement');
+ }
+ }
+ },
+ [reloadSupplyLocations, reloadMasterItems]
+ );
+
+ const deleteFreePlacement = useCallback(
+ async (id) => {
+ try {
+ await api.deleteSupplyLocation(id);
+ await reloadSupplyLocations();
+ await reloadMasterItems();
+ } catch (error) {
+ if (!isPanningRef.current) {
+ const errorInfo = await handleApiError(error);
+ if (errorInfo.isConflict) setConflictError(errorInfo);
+ else setError(errorInfo.message || 'Failed to delete placement');
+ }
+ }
+ },
+ [reloadSupplyLocations, reloadMasterItems]
+ );
+
+ const clearMoveModeDragging = useCallback(() => {
+ setMoveModeDragging(null);
+ setCurrentDragOverBox(null);
+ isDraggingMoveBoxRef.current = false;
+ }, []);
+
+ const handleMoveModeDragStart = useCallback((boxTitle, shelf, qty, x, y) => {
+ isDraggingMoveBoxRef.current = true;
+ const supplyPublicId = moveModeItemRef.current;
+ const meta = supplyPublicId ? masterInventoryItems.get(supplyPublicId) : null;
+ setMoveModeDragging({
+ boxTitle,
+ shelf,
+ qty,
+ x,
+ y,
+ originalX: x,
+ originalY: y,
+ supplyPublicId,
+ itemName: meta?.name || ''
+ });
+ }, [masterInventoryItems]);
+
+ const handleMoveModeDragMove = useCallback((x, y) => {
+ if (moveModeDragging) {
+ setMoveModeDragging(prev => ({ ...prev, x, y }));
+ }
+ }, [moveModeDragging]);
+
+ const handleMoveModeDrop = useCallback((targetBoxTitle, targetShelf) => {
+ if (!moveModeDragging || !moveModeItemRef.current) return;
+
+ const supplyPublicId = moveModeDragging.supplyPublicId || moveModeItemRef.current;
+ const itemName = moveModeDragging.itemName || '';
+ const { boxTitle: sourceBoxTitle, shelf: sourceShelf, qty } = moveModeDragging;
+
+ const sameShelf =
+ normalizeShelfIndex(sourceShelf) === normalizeShelfIndex(targetShelf);
+ if (sourceBoxTitle === targetBoxTitle && sameShelf) {
+ setMoveModeDragging(null);
+ isDraggingMoveBoxRef.current = false;
+ return;
+ }
+
+ const supplyId = masterInventoryItems.get(supplyPublicId)?.id;
+ if (!supplyId) {
+ console.error(`Supply ID not found for item: ${supplyPublicId}`);
+ setError(`Supply ID not found for item: ${supplyPublicId}`);
+ setMoveModeDragging(null);
+ isDraggingMoveBoxRef.current = false;
+ return;
+ }
+
+ setInventoryData((prev) =>
+ applyOptimisticMoveBetweenBoxes(
+ prev,
+ supplyPublicId,
+ itemName,
+ sourceBoxTitle,
+ sourceShelf,
+ targetBoxTitle,
+ targetShelf,
+ qty,
+ supplyId
+ )
+ );
+
+ const moveEntry = {
+ from: sourceBoxTitle,
+ to: targetBoxTitle,
+ shelfFrom: sourceShelf !== undefined ? sourceShelf : null,
+ shelfTo: targetShelf !== undefined ? targetShelf : null,
+ qty,
+ supplyId
+ };
+ setMoveModePending((prev) => [...prev, moveEntry]);
+ moveModePendingRef.current = [...moveModePendingRef.current, moveEntry];
+
+ setMoveModeDragging(null);
+ isDraggingMoveBoxRef.current = false;
+ }, [moveModeDragging, masterInventoryItems]);
+
const handleDragStart = useCallback((boxTitle, index, isMultiple, selectedIndices) => {
const boxData = inventoryData.get(boxTitle);
if (!boxData) return;
@@ -518,7 +1428,12 @@ export const InventoryProvider = ({ children }) => {
// Use move API for each item
if (draggedItemData.isMultiple) {
for (const item of draggedItemData.items) {
- const supplyId = supplyNameToId.get(item.name);
+ const supplyId =
+ item.supplyId ??
+ masterInventoryItems.get(
+ item.supplyPublicId ||
+ (item.supplyId != null ? `__legacy_id_${item.supplyId}` : '')
+ )?.id;
if (!supplyId) {
console.error(`Supply ID not found for item: ${item.name}`);
continue;
@@ -533,9 +1448,15 @@ export const InventoryProvider = ({ children }) => {
});
}
} else {
- const supplyId = supplyNameToId.get(draggedItemData.item.name);
+ const it = draggedItemData.item;
+ const supplyId =
+ it.supplyId ??
+ masterInventoryItems.get(
+ it.supplyPublicId ||
+ (it.supplyId != null ? `__legacy_id_${it.supplyId}` : '')
+ )?.id;
if (!supplyId) {
- throw new Error(`Supply ID not found for item: ${draggedItemData.item.name}`);
+ throw new Error(`Supply ID not found for item: ${it.name}`);
}
await api.moveSupplyLocations({
from_location: draggedItemData.sourceBox,
@@ -547,33 +1468,8 @@ export const InventoryProvider = ({ children }) => {
});
}
- // Update local state optimistically
- let newSourceInventory = [...sourceBoxData.inventory];
- let newTargetInventory = [...targetBoxData.inventory];
-
- if (draggedItemData.isMultiple) {
- const sortedIndices = [...draggedItemData.sourceIndices].sort((a, b) => b - a);
- sortedIndices.forEach(idx => {
- newSourceInventory.splice(idx, 1);
- });
- newTargetInventory.push(...draggedItemData.items);
- } else {
- newSourceInventory.splice(draggedItemData.sourceIndex, 1);
- newTargetInventory.push(draggedItemData.item);
- }
-
- setInventoryData(prev => {
- const next = new Map(prev);
- const sourceBox = next.get(draggedItemData.sourceBox);
- const targetBox = next.get(targetBoxTitle);
- if (sourceBox) {
- next.set(draggedItemData.sourceBox, { ...sourceBox, inventory: newSourceInventory });
- }
- if (targetBox) {
- next.set(targetBoxTitle, { ...targetBox, inventory: newTargetInventory });
- }
- return next;
- });
+ // Reload supply locations to ensure UI reflects actual server state
+ await reloadSupplyLocations();
// Auto-select the target box after successful drop
setSelectedBox(targetBoxTitle);
@@ -587,31 +1483,52 @@ export const InventoryProvider = ({ children }) => {
console.error('Error moving items:', error);
// Only set error if not panning (to avoid breaking pan)
if (!isPanningRef.current) {
- setError(error.message || 'Failed to move items');
+ const errorInfo = await handleApiError(error);
+ if (errorInfo.isConflict) {
+ setConflictError(errorInfo);
+ } else {
+ setError(errorInfo.message || 'Failed to move items');
+ }
}
}
- }, [draggedItemData, inventoryData, supplyNameToId]);
+ }, [draggedItemData, masterInventoryItems, reloadSupplyLocations]);
- // Master Item helper functions
- const resolveMasterItem = useCallback((itemName) => {
- return masterInventoryItems.get(itemName) || null;
+ // Master Item helper functions (lookup by supplies.public_id)
+ const resolveMasterItem = useCallback((supplyPublicId) => {
+ return masterInventoryItems.get(supplyPublicId) || null;
}, [masterInventoryItems]);
const computeMasterQuantities = useCallback(() => {
const quantities = new Map();
- inventoryData.forEach((boxData, boxTitle) => {
- boxData.inventory.forEach(item => {
- const currentQty = quantities.get(item.name) || 0;
- quantities.set(item.name, currentQty + (item.qty || 0));
+ inventoryData.forEach((boxData) => {
+ boxData.inventory.forEach((item) => {
+ const rowKey =
+ item.supplyPublicId ||
+ (item.supplyId != null ? `__legacy_id_${item.supplyId}` : item.name);
+ const currentQty = quantities.get(rowKey) || 0;
+ quantities.set(rowKey, currentQty + (item.qty || 0));
});
});
+ freePlacementsBySupplyPublicId.forEach((placements, pid) => {
+ let list = placements;
+ if (freePlaceModeItem === pid && freePlaceVisualDots != null) {
+ list = freePlaceVisualDots;
+ }
+ const sum = list.reduce((s, p) => s + (p.qty || 0), 0);
+ quantities.set(pid, (quantities.get(pid) || 0) + sum);
+ });
return quantities;
- }, [inventoryData]);
+ }, [inventoryData, freePlacementsBySupplyPublicId, freePlaceModeItem, freePlaceVisualDots]);
- const getItemLocations = useCallback((itemName) => {
+ const getItemLocations = useCallback((supplyPublicId) => {
const locations = [];
inventoryData.forEach((boxData, boxTitle) => {
- const hasItem = boxData.inventory.some(item => item.name === itemName);
+ const hasItem = boxData.inventory.some((item) => {
+ const k =
+ item.supplyPublicId ||
+ (item.supplyId != null ? `__legacy_id_${item.supplyId}` : null);
+ return k === supplyPublicId;
+ });
if (hasItem) {
locations.push(boxTitle);
}
@@ -619,26 +1536,39 @@ export const InventoryProvider = ({ children }) => {
return locations;
}, [inventoryData]);
- const addMasterItem = useCallback(async (item) => {
+ const createMasterItem = useCallback(async (item) => {
try {
const created = await api.createSupply({
name: item.name,
description: item.description || '',
image: item.image || null,
teams: item.teams || [],
- categories: item.categories || []
+ categories: item.categories || [],
+ custom_fields: item.custom_fields && Object.keys(item.custom_fields).length > 0 ? item.custom_fields : undefined,
+ supply_type_id: item.supply_type_id != null ? item.supply_type_id : undefined
});
- // Update local state
- setMasterInventoryItems(prev => {
+ const rowKey =
+ created.public_id || (created.id != null ? `__legacy_id_${created.id}` : null);
+ if (!rowKey) {
+ throw new Error('Created supply missing public_id');
+ }
+
+ setMasterInventoryItems((prev) => {
const next = new Map(prev);
- next.set(created.name, {
+ const locations = locationsListFromSupplyLocs(created.locations);
+ next.set(rowKey, {
+ public_id: created.public_id || null,
name: created.name,
description: created.description || '',
image: created.image || null,
- locations: created.locations || [],
+ locations,
teams: created.teams || [],
categories: created.categories || [],
+ custom_fields: created.custom_fields || {},
+ supply_type_id: created.supply_type_id ?? null,
+ type_name: created.type_name || null,
+ type_has_template_image: Boolean(created.type_has_template_image),
lastModified: created.lastModified || null,
last_modified_by: created.last_modified_by || null,
last_modified_by_name: created.last_modified_by_name || null,
@@ -646,62 +1576,59 @@ export const InventoryProvider = ({ children }) => {
});
return next;
});
-
- // Update name to ID mapping
- setSupplyNameToId(prev => {
- const next = new Map(prev);
- next.set(created.name, created.id);
- return next;
- });
} catch (error) {
- console.error('Error adding Master item:', error);
+ console.error('Error creating Master item:', error);
// Only set error if not panning (to avoid breaking pan)
if (!isPanningRef.current) {
- setError(error.message || 'Failed to add item');
+ const errorInfo = await handleApiError(error);
+ if (errorInfo.isConflict) {
+ setConflictError(errorInfo);
+ } else {
+ setError(errorInfo.message);
+ }
}
throw error;
}
}, []);
- const updateMasterItem = useCallback(async (oldName, newItem) => {
+ const updateMasterItem = useCallback(async (supplyPublicId, newItem) => {
try {
- const oldItem = masterInventoryItems.get(oldName);
+ const oldItem = masterInventoryItems.get(supplyPublicId);
if (!oldItem || !oldItem.id) {
- throw new Error(`Item ${oldName} not found or missing ID`);
+ throw new Error(`Item ${supplyPublicId} not found or missing ID`);
}
-
- const updated = await api.updateSupply(oldItem.id, {
+
+ const payload = {
name: newItem.name,
description: newItem.description || '',
image: newItem.image || null,
teams: newItem.teams || [],
- categories: newItem.categories || []
- });
-
- // Update local state
- setMasterInventoryItems(prev => {
+ categories: newItem.categories || [],
+ custom_fields: newItem.custom_fields && Object.keys(newItem.custom_fields).length > 0 ? newItem.custom_fields : {}
+ };
+ if (newItem.unlink_from_type) {
+ payload.unlink_from_type = true;
+ }
+ if (newItem.supply_type_id !== undefined) {
+ payload.supply_type_id = newItem.supply_type_id;
+ }
+ const updated = await api.updateSupply(oldItem.id, payload);
+
+ setMasterInventoryItems((prev) => {
const next = new Map(prev);
- if (oldName !== newItem.name) {
- next.delete(oldName);
- // Update all box references if name changed
- setInventoryData(prevData => {
- const newData = new Map(prevData);
- newData.forEach((boxData, boxTitle) => {
- const updatedInventory = boxData.inventory.map(item =>
- item.name === oldName ? { ...item, name: newItem.name } : item
- );
- newData.set(boxTitle, { ...boxData, inventory: updatedInventory });
- });
- return newData;
- });
- }
- next.set(updated.name, {
+ const locations = locationsListFromSupplyLocs(updated.locations);
+ next.set(supplyPublicId, {
+ public_id: updated.public_id || oldItem.public_id || null,
name: updated.name,
description: updated.description || '',
image: updated.image || null,
- locations: updated.locations || [],
+ locations,
teams: updated.teams || [],
categories: updated.categories || [],
+ custom_fields: updated.custom_fields || {},
+ supply_type_id: updated.supply_type_id ?? null,
+ type_name: updated.type_name || null,
+ type_has_template_image: Boolean(updated.type_has_template_image),
lastModified: updated.lastModified || null,
last_modified_by: updated.last_modified_by || null,
last_modified_by_name: updated.last_modified_by_name || null,
@@ -709,77 +1636,87 @@ export const InventoryProvider = ({ children }) => {
});
return next;
});
-
- // Update name to ID mapping if name changed
- if (oldName !== newItem.name) {
- setSupplyNameToId(prev => {
- const next = new Map(prev);
- next.delete(oldName);
- next.set(updated.name, updated.id);
- return next;
- });
- }
+
+ // Reload supply locations to get updated item names in boxes
+ // (supply locations API JOINs with supplies table, so names will be updated)
+ await reloadSupplyLocations();
} catch (error) {
console.error('Error updating Master item:', error);
// Only set error if not panning (to avoid breaking pan)
if (!isPanningRef.current) {
- setError(error.message || 'Failed to update item');
+ const errorInfo = await handleApiError(error);
+ if (errorInfo.isConflict) {
+ setConflictError(errorInfo);
+ } else {
+ setError(errorInfo.message);
+ }
}
throw error;
}
- }, [masterInventoryItems]);
+ }, [masterInventoryItems, reloadSupplyLocations]);
- const deleteMasterItem = useCallback(async (itemName) => {
+ const deleteMasterItem = useCallback(async (supplyPublicId) => {
try {
- const item = masterInventoryItems.get(itemName);
+ const item = masterInventoryItems.get(supplyPublicId);
if (!item || !item.id) {
- throw new Error(`Item ${itemName} not found or missing ID`);
+ throw new Error(`Item ${supplyPublicId} not found or missing ID`);
}
-
+
await api.deleteSupply(item.id);
-
- // Update local state
- setMasterInventoryItems(prev => {
- const next = new Map(prev);
- next.delete(itemName);
- return next;
- });
-
- // Remove from name to ID mapping
- setSupplyNameToId(prev => {
+
+ setMasterInventoryItems((prev) => {
const next = new Map(prev);
- next.delete(itemName);
+ next.delete(supplyPublicId);
return next;
});
-
- // Remove from all boxes (CASCADE in DB handles this, but update UI)
- setInventoryData(prev => {
- const newData = new Map(prev);
- newData.forEach((boxData, boxTitle) => {
- const updatedInventory = boxData.inventory.filter(item => item.name !== itemName);
- newData.set(boxTitle, { ...boxData, inventory: updatedInventory });
- });
- return newData;
- });
-
- // Close preview if this item was selected
- if (selectedMasterItem === itemName) {
+
+ await reloadSupplyLocations();
+
+ if (selectedMasterItem === supplyPublicId) {
setSelectedMasterItem(null);
}
} catch (error) {
console.error('Error deleting Master item:', error);
// Only set error if not panning (to avoid breaking pan)
if (!isPanningRef.current) {
- setError(error.message || 'Failed to delete item');
+ const errorInfo = await handleApiError(error);
+ if (errorInfo.isConflict) {
+ setConflictError(errorInfo);
+ } else {
+ setError(errorInfo.message);
+ }
}
throw error;
}
- }, [selectedMasterItem, masterInventoryItems]);
+ }, [selectedMasterItem, masterInventoryItems, reloadSupplyLocations]);
const clearSelectedMasterItem = useCallback(() => {
setSelectedMasterItem(null);
}, []);
+ /** Close master preview (clears selection), add/subtract/move/free-place modes, and box add/edit UI. */
+ const dismissMasterWorkbenchUI = useCallback(async () => {
+ try {
+ await cancelMoveMode();
+ } catch (e) {
+ console.error('dismissMasterWorkbenchUI: cancelMoveMode', e);
+ }
+ clearFreePlaceSession();
+ setAddModeItem(null);
+ setAddModeQtyPerClick(1);
+ setAddModePending(new Map());
+ setSubtractModeItem(null);
+ setSubtractModeQtyPerClick(1);
+ setSubtractModePending(new Map());
+ setSelectedMasterItem(null);
+ setSelectedBox(null);
+ setCurrentEditingBox(null);
+ setCurrentEditingIndex(null);
+ setCurrentAddingBox(null);
+ setCurrentAddingIndex(null);
+ setTooltip({ visible: false, title: '', x: 0, y: 0 });
+ }, [cancelMoveMode, clearFreePlaceSession]);
+
const value = {
// State
inventoryData,
@@ -802,6 +1739,8 @@ export const InventoryProvider = ({ children }) => {
isLoading,
error,
setError,
+ conflictError,
+ setConflictError,
// Setters
setInventoryData,
setSelectedBox,
@@ -832,10 +1771,16 @@ export const InventoryProvider = ({ children }) => {
resolveMasterItem,
computeMasterQuantities,
getItemLocations,
- addMasterItem,
+ createMasterItem,
updateMasterItem,
deleteMasterItem,
clearSelectedMasterItem,
+ dismissMasterWorkbenchUI,
+ reloadMasterItems,
+ reloadSupplyLocations,
+ // Master Table Filter
+ masterFilterLocation,
+ setMasterFilterLocation,
// Add Mode
addModeItem,
addModeQtyPerClick,
@@ -847,6 +1792,47 @@ export const InventoryProvider = ({ children }) => {
cancelAddMode,
handleBoxClickAddMode,
boxHasAnyPending,
+ // Subtract Mode (removes items from boxes, not the master entry)
+ subtractModeItem,
+ subtractModeQtyPerClick,
+ setSubtractModeQtyPerClick,
+ subtractModePending,
+ subtractModePreviewRef,
+ startSubtractMode,
+ finishSubtractMode,
+ cancelSubtractMode,
+ handleBoxClickSubtractMode,
+ handleSubtractFreeDotClick,
+ boxHasAnySubtractPending,
+ // Move Mode
+ moveModeItem,
+ moveModeDragging,
+ startMoveMode,
+ finishMoveMode,
+ cancelMoveMode,
+ clearMoveModeDragging,
+ handleMoveModeDragStart,
+ handleMoveModeDragMove,
+ handleMoveModeDrop,
+ isDraggingMoveBoxRef,
+ moveModeFreeCoordById,
+ moveModeDotDragLiveByIdRef,
+ updateMoveModeFreeDotPosition,
+ updateMoveModeDotDragLiveForArrows,
+ requestMasterArrowsRedraw,
+ freePlaceModeItem,
+ freePlacementsBySupplyPublicId,
+ startFreePlaceMode,
+ cancelFreePlaceMode,
+ finishFreePlaceMode,
+ handleFreePlaceWorldClick,
+ freePlaceVisualDots,
+ subtractModeVisualFreeDots,
+ moveModeVisualFreeDots,
+ updateFreePlaceSessionCoord,
+ handleFreePlaceSessionDotDelete,
+ updateFreePlacementCoords,
+ deleteFreePlacement,
};
return (
diff --git a/milventory/src/index.css b/milventory/src/index.css
index 497c0ac..34df791 100644
--- a/milventory/src/index.css
+++ b/milventory/src/index.css
@@ -16,6 +16,46 @@
--files: #b72a2a; /* red file cabinets */
}
+/* Global scrollbars: transparent track; thumb stays visible */
+* {
+ scrollbar-width: thin;
+ scrollbar-color: rgba(154, 168, 194, 0.45) transparent;
+}
+
+*::-webkit-scrollbar {
+ width: 10px;
+ height: 10px;
+}
+
+*::-webkit-scrollbar-track,
+*::-webkit-scrollbar-track-piece {
+ background: transparent;
+}
+
+*::-webkit-scrollbar-thumb {
+ background-color: rgba(255, 255, 255, 0.22);
+ border-radius: 6px;
+}
+
+*::-webkit-scrollbar-thumb:hover {
+ background-color: rgba(255, 255, 255, 0.34);
+}
+
+*::-webkit-scrollbar-corner {
+ background: transparent;
+}
+
+@keyframes slideDown {
+ from {
+ opacity: 0;
+ transform: translateY(-10px);
+ }
+ to {
+ opacity: 1;
+ transform: translateY(0);
+ }
+}
+
html, body { height: 100%; margin: 0; }
body {
background: radial-gradient(1200px 800px at 50% 0%, #0d0f14, #07080c);
@@ -324,9 +364,10 @@ body {
flex-shrink: 0;
}
+/* Modal / form image preview: full image visible, scaled down — no crop */
.edit-form-image-container {
width: 100%;
- aspect-ratio: 16 / 9;
+ max-height: min(50vh, 26rem);
background: rgba(0,0,0,.3);
border: 1px solid rgba(255,255,255,.1);
border-radius: 6px;
@@ -336,12 +377,18 @@ body {
justify-content: center;
flex-shrink: 0;
position: relative;
+ box-sizing: border-box;
+ padding: 0.35rem;
}
.edit-form-image-container img {
- width: 100%;
- height: 100%;
+ display: block;
+ max-width: 100%;
+ max-height: min(50vh, 26rem);
+ width: auto;
+ height: auto;
object-fit: contain;
+ object-position: center;
}
.edit-form-image-placeholder {
@@ -421,6 +468,29 @@ body {
gap: 0.75rem;
}
+.modal form {
+ min-width: 0;
+ max-width: 100%;
+}
+
+.type-form-fields {
+ display: flex;
+ flex-direction: column;
+ gap: 0.65rem;
+ min-width: 0;
+ max-width: 100%;
+}
+
+/* Master create/edit: cap height (40px inset top + bottom), scroll body */
+.modal.master-item-edit-modal,
+.modal.master-item-create-modal {
+ max-height: calc(100vh - 80px);
+ max-height: calc(100dvh - 80px);
+ overflow-y: auto;
+ overflow-x: hidden;
+ overscroll-behavior: contain;
+}
+
.modal h3 {
margin: 0 0 0.5rem 0;
font-size: 1rem;
@@ -430,6 +500,7 @@ body {
.modal input[type="text"],
.modal input[type="number"],
+.modal input[type="date"],
.modal textarea,
.modal select {
background: rgba(0,0,0,.3);
@@ -444,6 +515,30 @@ body {
font-family: inherit;
}
+/* Hide number input up/down spinners in modals and elsewhere */
+.modal input[type="number"]::-webkit-inner-spin-button,
+.modal input[type="number"]::-webkit-outer-spin-button,
+input[type="number"].no-spinner::-webkit-inner-spin-button,
+input[type="number"].no-spinner::-webkit-outer-spin-button {
+ -webkit-appearance: none;
+ margin: 0;
+}
+.modal input[type="number"],
+input[type="number"].no-spinner {
+ -moz-appearance: textfield;
+ appearance: textfield;
+}
+
+/* Date input: match app style; tone down native calendar icon */
+.modal input[type="date"]::-webkit-calendar-picker-indicator {
+ filter: invert(0.7);
+ cursor: pointer;
+ opacity: 0.8;
+}
+.modal input[type="date"]::-webkit-datetime-edit {
+ color: var(--text);
+}
+
.modal select {
cursor: pointer;
}
@@ -453,44 +548,142 @@ body {
background: rgba(0,0,0,.4);
}
+/* Reusable select styling (modals, admin panel, etc.) */
+.styled-select,
+select.styled-select {
+ background: rgba(0,0,0,.3);
+ border: 1px solid rgba(255,255,255,.1);
+ color: var(--text);
+ padding: 0.5rem;
+ border-radius: 6px;
+ font-size: 0.85rem;
+ outline: none;
+ width: 100%;
+ box-sizing: border-box;
+ font-family: inherit;
+ cursor: pointer;
+}
+.styled-select:focus,
+select.styled-select:focus {
+ border-color: var(--accent);
+ background: rgba(0,0,0,.4);
+}
+
.modal input[type="text"]:focus,
.modal input[type="number"]:focus,
+.modal input[type="date"]:focus,
.modal textarea:focus {
border-color: var(--accent);
background: rgba(0,0,0,.4);
}
+/* Tag/category dropdown: compact trigger (override .modal input padding) */
+.tag-dropdown-trigger input {
+ padding: 0 !important;
+ margin: 0;
+ border: none !important;
+ background: transparent !important;
+ line-height: 1.35;
+ min-height: 0;
+}
+.tag-dropdown-trigger input:focus {
+ border: none !important;
+ background: transparent !important;
+}
+
.modal textarea {
resize: none;
min-height: 60px;
}
-.modal .edit-form-image-container {
- width: 100%;
- aspect-ratio: 16 / 9;
- background: rgba(0,0,0,.3);
- border: 1px solid rgba(255,255,255,.1);
- border-radius: 6px;
- overflow: hidden;
+/* Master create/edit: one bordered field with type-fixed text + editable tail */
+.modal .modal-field-composite {
display: flex;
align-items: center;
- justify-content: center;
+ width: 100%;
+ box-sizing: border-box;
+ background: rgba(0, 0, 0, 0.3);
+ border: 1px solid rgba(255, 255, 255, 0.1);
+ border-radius: 6px;
+ font-size: 0.85rem;
+ margin-bottom: 0.5rem;
+}
+
+.modal .modal-field-composite:focus-within {
+ border-color: var(--accent);
+ background: rgba(0, 0, 0, 0.4);
+}
+
+.modal .modal-field-composite__prefix {
+ padding: 0.5rem 0 0.5rem 0.5rem;
+ color: var(--muted);
+ white-space: nowrap;
flex-shrink: 0;
- position: relative;
+ max-width: 55%;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ user-select: none;
+ pointer-events: none;
}
-.modal .edit-form-image-container img {
- width: 100%;
- height: 100%;
- object-fit: contain;
+.modal .modal-field-composite__input {
+ flex: 1;
+ min-width: 0;
+ width: 0;
+ border: none !important;
+ outline: none !important;
+ background: transparent !important;
+ box-shadow: none !important;
+ padding: 0.5rem;
+ color: var(--text);
+ font-size: inherit;
+ font-family: inherit;
+ border-radius: 0 6px 6px 0;
+}
+
+.modal .modal-field-composite__input:focus {
+ border: none !important;
+ background: transparent !important;
+}
+
+.modal .modal-field-composite--stack {
+ flex-direction: column;
+ align-items: stretch;
}
-.modal .edit-form-image-placeholder {
+.modal .modal-field-composite__prefix-block {
+ padding: 0.5rem;
color: var(--muted);
- font-style: italic;
+ white-space: pre-wrap;
+ word-break: break-word;
+ user-select: none;
+ pointer-events: none;
+ border-bottom: 1px solid rgba(255, 255, 255, 0.08);
font-size: 0.85rem;
}
+.modal .modal-field-composite__textarea {
+ display: block;
+ width: 100%;
+ min-height: 60px;
+ border: none !important;
+ outline: none !important;
+ background: transparent !important;
+ box-shadow: none !important;
+ padding: 0.5rem;
+ color: var(--text);
+ font-size: 0.85rem;
+ font-family: inherit;
+ resize: vertical;
+ border-radius: 0 0 6px 6px;
+ box-sizing: border-box;
+}
+
+.modal .modal-field-composite__textarea:focus {
+ border: none !important;
+ background: transparent !important;
+}
+
.modal-actions {
display: flex;
gap: 0.5rem;
@@ -573,6 +766,12 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
.room { fill: var(--room); stroke: #2c3344; stroke-width: 6; }
+.room-free-place-active {
+ stroke: var(--accent);
+ stroke-width: 8;
+ opacity: 0.98;
+}
+
.box {
vector-effect: non-scaling-stroke;
stroke: var(--stroke); stroke-width: 2; rx: 10; ry: 10; cursor: pointer;
@@ -719,6 +918,8 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
height: 100%;
padding: 1rem;
overflow: hidden;
+ container-type: inline-size;
+ container-name: admin-inv-panel;
}
.master-table-header {
@@ -733,6 +934,34 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
font-weight: 600;
}
+.master-subtabs {
+ display: flex;
+ gap: 0.5rem;
+ margin-bottom: 1rem;
+ border-bottom: 1px solid rgba(255,255,255,.1);
+ flex-shrink: 0;
+}
+
+.master-subtab {
+ background: transparent;
+ border: none;
+ border-bottom: 2px solid transparent;
+ color: var(--muted);
+ padding: 0.5rem 1rem;
+ cursor: pointer;
+ font-size: 0.9rem;
+ transition: color 0.2s, border-color 0.2s;
+}
+
+.master-subtab:hover {
+ color: var(--text);
+}
+
+.master-subtab.active {
+ color: var(--accent);
+ border-bottom-color: var(--accent);
+}
+
.master-table-search {
flex-shrink: 0;
margin-bottom: 1rem;
@@ -758,12 +987,13 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
.master-table-content {
flex: 1;
overflow-y: auto;
- overflow-x: hidden;
+ overflow-x: auto;
min-height: 0;
}
.master-table {
width: 100%;
+ min-width: max-content;
border-collapse: collapse;
font-size: 0.85rem;
}
@@ -799,6 +1029,14 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
background: rgba(155, 183, 255, 0.1);
}
+.master-table .name-cell {
+ word-wrap: break-word;
+ overflow-wrap: break-word;
+ word-break: break-word;
+ white-space: normal;
+ max-width: 200px;
+}
+
.master-table .qty-cell {
width: 60px;
text-align: center;
@@ -813,6 +1051,24 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
font-size: 0.8rem;
}
+.master-table .category-cell {
+ max-width: 120px;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ white-space: nowrap;
+ color: var(--muted);
+ font-size: 0.8rem;
+}
+
+.master-table .team-cell {
+ max-width: 120px;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ white-space: nowrap;
+ color: var(--muted);
+ font-size: 0.8rem;
+}
+
.master-table .modified-cell {
white-space: nowrap;
color: var(--muted);
@@ -827,6 +1083,54 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
padding: 2rem 1rem;
}
+/* Admin Subtabs — grid keeps all labels visible; 2×2 when panel is narrow */
+.admin-subtabs {
+ display: grid;
+ grid-template-columns: repeat(4, minmax(0, 1fr));
+ gap: 0.35rem 0.4rem;
+ margin-bottom: 1rem;
+ border-bottom: 1px solid rgba(255,255,255,.1);
+ padding-bottom: 0.5rem;
+ flex-shrink: 0;
+}
+
+@container admin-inv-panel (max-width: 440px) {
+ .admin-subtabs {
+ grid-template-columns: repeat(2, minmax(0, 1fr));
+ }
+}
+
+.admin-subtab {
+ background: none;
+ border: none;
+ color: var(--muted);
+ padding: 0.4rem 0.35rem;
+ cursor: pointer;
+ font-size: 0.78rem;
+ line-height: 1.25;
+ border-bottom: 2px solid transparent;
+ transition: color 0.2s, border-color 0.2s;
+ margin-bottom: -0.5rem;
+ min-width: 0;
+ min-height: 2.35rem;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ text-align: center;
+ hyphens: auto;
+ overflow-wrap: anywhere;
+ word-break: break-word;
+}
+
+.admin-subtab:hover {
+ color: var(--text);
+}
+
+.admin-subtab.active {
+ color: var(--text);
+ border-bottom-color: var(--accent);
+}
+
.master-table-actions {
flex-shrink: 0;
margin-top: 0.5rem;
@@ -842,7 +1146,9 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
.master-preview-pane-overlay .master-preview-pane {
width: 400px;
- max-height: 300px;
+ /* Match overlay top offset (20px) with equal inset from viewport bottom */
+ max-height: calc(100vh - 40px);
+ max-height: calc(100dvh - 40px);
overflow-y: auto;
}
@@ -871,6 +1177,13 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
margin: 0;
font-size: 1rem;
font-weight: 600;
+ word-wrap: break-word;
+ overflow-wrap: break-word;
+ word-break: break-word;
+ white-space: normal;
+ flex: 1;
+ min-width: 0;
+ margin-right: 0.5rem;
}
.master-preview-pane-close {
@@ -907,10 +1220,131 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
line-height: 1.5;
}
+/* Description + image: landscape/square → column (image below, centered); portrait → row (text left, image right) */
+.master-preview-description-body {
+ display: flex;
+ gap: 0.75rem;
+}
+
+.master-preview-description-body--text-only {
+ flex-direction: column;
+}
+
+.master-preview-description-body--landscape {
+ flex-direction: column;
+ align-items: center;
+}
+
+.master-preview-description-body--landscape .master-preview-description-text {
+ align-self: stretch;
+ width: 100%;
+}
+
+.master-preview-description-body--portrait {
+ flex-direction: row;
+ align-items: flex-start;
+}
+
+.master-preview-description-body--portrait .master-preview-description-text {
+ flex: 1;
+ min-width: 0;
+}
+
+.master-preview-description-image {
+ flex-shrink: 0;
+}
+
+/* Portrait: fixed-width strip on the right; center the image in that strip */
+.master-preview-description-body--portrait .master-preview-description-image {
+ flex: 0 0 min(42%, 14rem);
+ width: min(42%, 14rem);
+ min-width: 0;
+ display: flex;
+ justify-content: center;
+ align-items: flex-start;
+}
+
+.master-preview-description-body--landscape .master-preview-description-image {
+ width: 100%;
+ display: flex;
+ justify-content: center;
+}
+
+.master-preview-description-image img {
+ display: block;
+ max-width: 100%;
+ width: auto;
+ height: auto;
+ object-fit: contain;
+ object-position: center;
+ border-radius: 4px;
+}
+
+/* Landscape / stacked: cap height so tall assets do not dominate */
+.master-preview-description-body--landscape .master-preview-description-image img {
+ max-height: min(45vh, 22rem);
+}
+
+/* Portrait / beside text: stay within the image column; cap height */
+.master-preview-description-body--portrait .master-preview-description-image img {
+ max-width: 100%;
+ max-height: min(40vh, 18rem);
+ width: auto;
+ height: auto;
+}
+
+.master-preview-custom-fields strong {
+ display: block;
+ margin-bottom: 0.25rem;
+ color: var(--muted);
+ font-size: 0.8rem;
+ font-family: inherit;
+ text-transform: uppercase;
+ letter-spacing: 0.5px;
+}
+
.master-preview-locations {
margin-top: 0.75rem;
}
+.master-preview-free-coords-group {
+ margin: 0;
+}
+
+/* Strips native button chrome only — do not set font here; specificity would beat
+ .master-preview-location-item and block font-size from applying to this button. */
+button.master-preview-locations-row-button {
+ appearance: none;
+ display: block;
+ width: 100%;
+ background: none;
+ border: none;
+ margin: 0;
+ font-family: inherit;
+ text-align: left;
+ cursor: pointer;
+}
+
+.master-preview-free-coords-list {
+ list-style: none;
+ margin: 0.2rem 0 0;
+ padding: 0;
+ display: flex;
+ flex-wrap: wrap;
+ gap: 0.2rem 0.65rem;
+ align-items: baseline;
+}
+
+.master-preview-free-coord-item {
+ flex: 0 0 auto;
+ margin: 0;
+ padding: 0;
+ font-size: 0.72rem;
+ line-height: 1.35;
+ color: #9aa8c2;
+ white-space: nowrap;
+}
+
.master-preview-locations strong {
display: block;
margin-bottom: 0.25rem;
@@ -931,16 +1365,10 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
font-size: 0.85em;
}
-.master-preview-image {
- margin-top: 0.75rem;
-}
-
-.master-preview-image img {
- max-width: 100%;
- border-radius: 4px;
-}
-
.master-preview-actions {
+ display: flex;
+ flex-direction: column;
+ align-items: stretch;
margin-top: 0.75rem;
padding-top: 0.75rem;
border-top: 1px solid rgba(255,255,255,.1);
@@ -948,19 +1376,35 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
.master-preview-actions strong {
display: block;
- margin-bottom: 0.5rem;
+ margin-bottom: 0;
color: var(--muted);
font-size: 0.8rem;
text-transform: uppercase;
letter-spacing: 0.5px;
}
+/* Tight to the heading; space before buttons lives on the hint’s bottom margin */
+.master-preview-actions-hint {
+ margin: 0.2rem 0 0.75rem;
+ font-size: 0.85rem;
+ color: var(--muted);
+}
+
.master-preview-actions-buttons {
display: flex;
+ flex-direction: row;
+ flex-wrap: wrap;
gap: 0.5rem;
+ align-items: center;
+ margin-top: 0.5rem;
+}
+
+.master-preview-actions:has(.master-preview-actions-hint) .master-preview-actions-buttons {
+ margin-top: 0;
}
.master-action-button {
+ flex: 0 0 auto;
background: rgba(255,255,255,.1);
border: none;
color: var(--text);
@@ -969,6 +1413,7 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
cursor: pointer;
font-size: 0.8rem;
transition: background 0.2s, opacity 0.2s;
+ white-space: nowrap;
}
.master-action-button:hover {
@@ -993,6 +1438,16 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
background: rgba(74, 222, 128, 0.5);
}
+.master-action-button.free-place-button {
+ background: rgba(56, 189, 248, 0.35);
+ color: #fff;
+ font-weight: 400;
+}
+
+.master-action-button.free-place-button:hover {
+ background: rgba(56, 189, 248, 0.5);
+}
+
/* Arrow Connections */
.master-arrow-path {
stroke: var(--accent);
@@ -1067,14 +1522,22 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
pointer-events: none;
}
+/* Re-enable pointer events inside the foreignObject so buttons/scrolling work */
+.box-inventory-overlay foreignObject {
+ pointer-events: all;
+}
+
.box-inventory-table {
background: var(--panel);
border: 1px solid rgba(255,255,255,.15);
border-radius: 8px;
box-shadow: 0 4px 20px rgba(0,0,0,.4);
padding: 0.75rem;
- max-height: 400px;
- overflow-y: auto;
+ height: 100%;
+ box-sizing: border-box;
+ display: flex;
+ flex-direction: column;
+ overflow: hidden;
color: var(--text);
font-size: 0.85rem;
}
@@ -1083,6 +1546,7 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
margin-bottom: 0.5rem;
padding-bottom: 0.5rem;
border-bottom: 1px solid rgba(255,255,255,.1);
+ flex-shrink: 0;
}
.box-inventory-header h4 {
@@ -1093,8 +1557,10 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
}
.box-inventory-content {
- max-height: 350px;
+ flex: 1;
overflow-y: auto;
+ overflow-x: hidden;
+ min-height: 0;
}
.box-inventory-shelf {
@@ -1197,6 +1663,7 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
text-align: center;
width: 80px;
-moz-appearance: textfield;
+ appearance: textfield;
}
.number-input::-webkit-inner-spin-button,
@@ -1300,12 +1767,296 @@ svg.overlay { position: absolute; inset: 0; width: 100%; height: 100%; pointer-e
padding: 0.25rem 0.75rem;
border-radius: 4px;
cursor: pointer;
+}
+
+.add-button:hover {
+ opacity: 0.9;
+}
+
+/* Admin Actions Panel */
+.admin-actions-panel {
+ position: absolute;
+ bottom: 20px;
+ left: 20px;
+ background: var(--panel);
+ border: 1px solid rgba(255,255,255,.15);
+ border-radius: 12px;
+ box-shadow: 0 4px 20px rgba(0,0,0,.4);
+ padding: 1rem;
+ min-width: 200px;
+ z-index: 100;
+}
+
+.admin-actions-header {
+ margin-bottom: 0.75rem;
+ padding-bottom: 0.5rem;
+ border-bottom: 1px solid rgba(255,255,255,.1);
+}
+
+.admin-actions-header h3 {
+ margin: 0;
+ font-size: 1rem;
+ font-weight: 600;
+ color: var(--text);
+}
+
+.admin-actions-content {
+ display: flex;
+ flex-direction: column;
+ gap: 0.5rem;
+}
+
+.admin-action-button {
+ background: var(--accent);
+ color: var(--panel);
+ border: none;
+ padding: 0.75rem 1rem;
+ border-radius: 6px;
+ cursor: pointer;
+ font-weight: 600;
+ font-size: 0.9rem;
+ transition: opacity 0.2s;
+ width: 100%;
+}
+
+.admin-action-button:hover {
+ opacity: 0.9;
+}
+
+.admin-action-button:active {
+ opacity: 0.8;
+}
+
+/* Move Locations Modal */
+.move-locations-modal {
+ position: absolute;
+ top: 20px;
+ right: 20px;
+ background: var(--panel);
+ border: 1px solid rgba(255, 193, 7, 0.3);
+ border-radius: 12px;
+ box-shadow: 0 4px 24px rgba(0,0,0,.5);
+ padding: 1rem;
+ min-width: 280px;
+ max-width: 340px;
+ z-index: 200;
+}
+
+.move-modal-header {
+ display: flex;
+ align-items: center;
+ justify-content: space-between;
+ margin-bottom: 0.75rem;
+ padding-bottom: 0.5rem;
+ border-bottom: 1px solid rgba(255, 193, 7, 0.2);
+}
+
+.move-modal-header h3 {
+ margin: 0;
+ font-size: 1rem;
font-weight: 600;
+ color: #ffc107;
+}
+
+.move-modal-close {
+ background: none;
+ border: none;
+ color: var(--muted);
+ font-size: 1.1rem;
+ cursor: pointer;
+ padding: 0.2rem 0.4rem;
+ border-radius: 4px;
+ line-height: 1;
+}
+
+.move-modal-close:hover {
+ color: var(--text);
+ background: rgba(255,255,255,.1);
+}
+
+.move-modal-body {
+ display: flex;
+ flex-direction: column;
+ gap: 0.75rem;
+}
+
+.move-modal-hint {
+ color: var(--muted);
font-size: 0.85rem;
- margin-right: 0.5rem;
+ margin: 0;
+ line-height: 1.4;
}
-.add-button:hover {
+.move-modal-select-btn {
+ background: #ffc107;
+ color: #1a1a2e;
+ border: none;
+ padding: 0.75rem 1rem;
+ border-radius: 6px;
+ cursor: pointer;
+ font-weight: 600;
+ font-size: 0.9rem;
+ transition: opacity 0.2s;
+ width: 100%;
+}
+
+.move-modal-select-btn:hover {
opacity: 0.9;
}
+.move-modal-selected {
+ display: flex;
+ flex-direction: column;
+ gap: 0.35rem;
+}
+
+.move-modal-label {
+ font-size: 0.75rem;
+ font-weight: 600;
+ color: var(--muted);
+ text-transform: uppercase;
+ letter-spacing: 0.05em;
+}
+
+.move-modal-box-list {
+ display: flex;
+ flex-wrap: wrap;
+ gap: 0.3rem;
+ max-height: 80px;
+ overflow-y: auto;
+}
+
+.move-modal-box-tag {
+ background: rgba(255, 193, 7, 0.15);
+ color: #ffc107;
+ padding: 0.2rem 0.5rem;
+ border-radius: 4px;
+ font-size: 0.75rem;
+ font-weight: 500;
+ border: 1px solid rgba(255, 193, 7, 0.25);
+}
+
+.move-modal-transform {
+ display: flex;
+ flex-direction: column;
+ gap: 0.5rem;
+}
+
+.move-modal-transform-fields {
+ display: flex;
+ gap: 0.5rem;
+}
+
+.move-modal-field {
+ flex: 1;
+ display: flex;
+ align-items: center;
+ gap: 0.4rem;
+}
+
+.move-modal-field-label {
+ font-size: 0.8rem;
+ font-weight: 600;
+ color: var(--muted);
+ min-width: 14px;
+}
+
+.move-modal-input {
+ flex: 1;
+ background: rgba(255,255,255,.06);
+ border: 1px solid rgba(255,255,255,.15);
+ border-radius: 6px;
+ color: var(--text);
+ padding: 0.4rem 0.6rem;
+ font-size: 0.85rem;
+ font-family: 'SF Mono', 'Fira Code', monospace;
+ outline: none;
+ width: 0;
+}
+
+.move-modal-input:focus {
+ border-color: #ffc107;
+ box-shadow: 0 0 0 2px rgba(255, 193, 7, 0.15);
+}
+
+.move-modal-preview-coords {
+ display: flex;
+ flex-direction: column;
+ gap: 0.2rem;
+ padding: 0.4rem 0.5rem;
+ background: rgba(0,0,0,.2);
+ border-radius: 6px;
+ max-height: 65px;
+ overflow-y: auto;
+}
+
+.move-modal-coord-row {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ gap: 0.5rem;
+}
+
+.move-modal-coord-name {
+ font-size: 0.75rem;
+ color: var(--muted);
+ white-space: nowrap;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ max-width: 90px;
+}
+
+.move-modal-coord-val {
+ font-size: 0.72rem;
+ color: var(--text);
+ font-family: 'SF Mono', 'Fira Code', monospace;
+ white-space: nowrap;
+}
+
+.move-modal-actions {
+ display: flex;
+ gap: 0.5rem;
+}
+
+.move-modal-apply-btn {
+ flex: 1;
+ background: #ffc107;
+ color: #1a1a2e;
+ border: none;
+ padding: 0.6rem 1rem;
+ border-radius: 6px;
+ cursor: pointer;
+ font-weight: 600;
+ font-size: 0.85rem;
+ transition: opacity 0.2s;
+}
+
+.move-modal-apply-btn:hover:not(:disabled) {
+ opacity: 0.9;
+}
+
+.move-modal-apply-btn:disabled {
+ opacity: 0.5;
+ cursor: not-allowed;
+}
+
+.move-modal-cancel-btn {
+ padding: 0.6rem 1rem;
+ background: rgba(255,255,255,.1);
+ color: var(--text);
+ border: none;
+ border-radius: 6px;
+ cursor: pointer;
+ font-size: 0.85rem;
+ transition: all 0.2s;
+}
+
+.move-modal-cancel-btn:hover:not(:disabled) {
+ background: rgba(255,255,255,.15);
+}
+
+.move-modal-cancel-btn:disabled {
+ opacity: 0.5;
+ cursor: not-allowed;
+}
+
diff --git a/milventory/src/utils/appTimeZone.js b/milventory/src/utils/appTimeZone.js
new file mode 100644
index 0000000..2026efc
--- /dev/null
+++ b/milventory/src/utils/appTimeZone.js
@@ -0,0 +1,72 @@
+/** US Eastern — EST in winter, EDT in summer (IANA). */
+export const APP_TIME_ZONE = 'America/New_York';
+
+const locale = 'en-US';
+
+/**
+ * @param {string|number|Date} isoOrDate
+ * @returns {boolean}
+ */
+function isValidDate(isoOrDate) {
+ const d = isoOrDate instanceof Date ? isoOrDate : new Date(isoOrDate);
+ return !Number.isNaN(d.getTime());
+}
+
+/**
+ * Calendar day in Eastern as YYYY-MM-DD (for same-day checks).
+ * @param {string|number|Date} isoOrDate
+ */
+export function easternCalendarDayKey(isoOrDate) {
+ if (!isValidDate(isoOrDate)) return '';
+ const d = isoOrDate instanceof Date ? isoOrDate : new Date(isoOrDate);
+ return d.toLocaleDateString('en-CA', { timeZone: APP_TIME_ZONE });
+}
+
+/**
+ * @param {string|number|Date} isoOrDate
+ */
+export function formatEasternTimeShort(isoOrDate) {
+ if (!isValidDate(isoOrDate)) return '';
+ const d = isoOrDate instanceof Date ? isoOrDate : new Date(isoOrDate);
+ return d.toLocaleTimeString(locale, {
+ timeZone: APP_TIME_ZONE,
+ hour: 'numeric',
+ minute: '2-digit',
+ });
+}
+
+/**
+ * @param {string|number|Date} isoOrDate
+ */
+export function formatEasternDateShort(isoOrDate) {
+ if (!isValidDate(isoOrDate)) return '';
+ const d = isoOrDate instanceof Date ? isoOrDate : new Date(isoOrDate);
+ return d.toLocaleDateString(locale, {
+ timeZone: APP_TIME_ZONE,
+ month: 'short',
+ day: 'numeric',
+ });
+}
+
+/**
+ * @param {string|number|Date} isoOrDate
+ */
+export function formatEasternDateTime(isoOrDate) {
+ if (!isValidDate(isoOrDate)) return '';
+ const d = isoOrDate instanceof Date ? isoOrDate : new Date(isoOrDate);
+ return d.toLocaleString(locale, {
+ timeZone: APP_TIME_ZONE,
+ dateStyle: 'short',
+ timeStyle: 'short',
+ });
+}
+
+/**
+ * Date only (custom field values, etc.).
+ * @param {string|number|Date} isoOrDate
+ */
+export function formatEasternDateOnly(isoOrDate) {
+ if (!isValidDate(isoOrDate)) return '';
+ const d = isoOrDate instanceof Date ? isoOrDate : new Date(isoOrDate);
+ return d.toLocaleDateString(locale, { timeZone: APP_TIME_ZONE });
+}
diff --git a/milventory/src/utils/shelfLabels.js b/milventory/src/utils/shelfLabels.js
new file mode 100644
index 0000000..e7927a5
--- /dev/null
+++ b/milventory/src/utils/shelfLabels.js
@@ -0,0 +1,58 @@
+/**
+ * Shelf helpers.
+ *
+ * The DB column `locations.shelf_count` is the single source of truth for
+ * whether a location has shelves and how many. Shelf indices are 0-based and
+ * oriented bottom-to-top (index 0 = bottom-most shelf).
+ *
+ * User-facing labels number shelves 1..N bottom-to-top:
+ * - `"Shelf 1 (bottom)"` for index 0 (when shelfCount > 1)
+ * - `"Shelf N (top)"` for index N-1 (when shelfCount > 1)
+ * - `"Shelf K"` otherwise
+ * - A lone shelf (shelfCount === 1) is just `"Shelf 1"`.
+ */
+
+/** Normalized shelf count for a box, treating missing/null as 0. */
+export function getShelfCount(box) {
+ if (!box) return 0;
+ const n = box.shelf_count;
+ if (typeof n !== 'number' || !Number.isFinite(n)) return 0;
+ return Math.max(0, Math.trunc(n));
+}
+
+/** True iff the box has one or more shelves. */
+export function hasShelves(box) {
+ return getShelfCount(box) > 0;
+}
+
+/** Coerce API/UI shelf to a comparable integer or null (box-level / no shelf). */
+export function normalizeShelfIndex(shelf) {
+ if (shelf === undefined || shelf === null) return null;
+ const n = Number(shelf);
+ return Number.isFinite(n) ? n : null;
+}
+
+/**
+ * Label for a single 0-based shelf index, given the total shelf count at that
+ * location. Falls back to `"Shelf {idx+1}"` if shelfCount is unknown/invalid.
+ */
+export function getShelfLabel(shelfIdx, shelfCount) {
+ const idx = Math.max(0, Math.trunc(shelfIdx || 0));
+ const n = Math.max(0, Math.trunc(shelfCount || 0));
+ if (n <= 0) return `Shelf ${idx + 1}`;
+ if (n === 1) return 'Shelf 1';
+ if (idx === 0) return 'Shelf 1 (bottom)';
+ if (idx === n - 1) return `Shelf ${n} (top)`;
+ return `Shelf ${idx + 1}`;
+}
+
+/**
+ * Return shelf indices in visual order (top row first, bottom row last).
+ * Returns an empty array when shelfCount <= 0.
+ */
+export function getShelfIndicesTopToBottom(shelfCount) {
+ const n = Math.max(0, Math.trunc(shelfCount || 0));
+ const out = [];
+ for (let row = 0; row < n; row += 1) out.push(n - 1 - row);
+ return out;
+}
diff --git a/milventory/src/utils/svgDataUrl.js b/milventory/src/utils/svgDataUrl.js
new file mode 100644
index 0000000..805682a
--- /dev/null
+++ b/milventory/src/utils/svgDataUrl.js
@@ -0,0 +1,4 @@
+/** Encode full SVG document string for use as an SVG . */
+export function svgMarkupToDataUrl(svgMarkup) {
+ return `data:image/svg+xml;charset=utf-8,${encodeURIComponent(svgMarkup)}`;
+}
diff --git a/src/api/app.py b/src/api/app.py
index 3f792ac..43eadb9 100644
--- a/src/api/app.py
+++ b/src/api/app.py
@@ -13,9 +13,12 @@
from src.api.routes.locations import locations_bp
from src.api.routes.supplies import supplies_bp
from src.api.routes.supplies_location import supplies_location_bp
+from src.api.routes.supplies_location_history import supplies_location_history_bp
from src.api.routes.auth import auth_bp
from src.api.routes.categories import categories_bp
from src.api.routes.teams import teams_bp
+from src.api.routes.custom_field_definitions import custom_field_definitions_bp
+from src.api.routes.supply_types import supply_types_bp
# Import helpers for schema initialization
from src.scripts.helpers import (
@@ -31,23 +34,42 @@
# Set secret key for sessions
app.secret_key = os.getenv('FLASK_SECRET_KEY', 'dev-secret-key-change-in-production')
# Configure CORS to allow credentials (cookies)
-CORS(app, supports_credentials=True, origins=['http://localhost:3000', 'http://localhost:5000'])
+CORS(app, supports_credentials=True, origins=['http://localhost:3000', 'http://localhost:5000', 'http://localhost:6001'])
# Register blueprints
app.register_blueprint(locations_bp, url_prefix='/api/locations')
app.register_blueprint(supplies_bp, url_prefix='/api/supplies')
app.register_blueprint(supplies_location_bp, url_prefix='/api/supplies-location')
+app.register_blueprint(supplies_location_history_bp, url_prefix='/api/supplies-location-history')
app.register_blueprint(auth_bp, url_prefix='/api/auth')
app.register_blueprint(categories_bp, url_prefix='/api')
app.register_blueprint(teams_bp, url_prefix='/api')
+app.register_blueprint(custom_field_definitions_bp, url_prefix='/api/custom-field-definitions')
+app.register_blueprint(supply_types_bp, url_prefix='/api/supply-types')
def initialize_schema():
"""Initialize database schema if tables are missing."""
+ max_retries = 5
+ retry_delay = 2
+
+ for attempt in range(max_retries):
+ try:
+ print("🔍 Checking database schema...")
+ conn = get_db()
+ cur = conn.cursor()
+ break
+ except Exception as e:
+ if attempt < max_retries - 1:
+ print(f"⚠ Database connection attempt {attempt + 1}/{max_retries} failed: {e}")
+ print(f" Retrying in {retry_delay} seconds...")
+ import time
+ time.sleep(retry_delay)
+ else:
+ print(f"❌ Failed to connect to database after {max_retries} attempts: {e}")
+ raise
+
try:
- print("🔍 Checking database schema...")
- conn = get_db()
- cur = conn.cursor()
# Get SQL base path
SQL_BASE_PATH = get_sql_base_path(__file__)
@@ -97,24 +119,58 @@ def initialize_schema():
conn.commit()
if failed_tables:
- print(f"⚠ Schema initialization incomplete: {success_count}/{len(missing_tables)} tables created")
- print(f" Failed tables: {', '.join(failed_tables)}")
+ print(f"\n❌ SCHEMA INITIALIZATION FAILED: {success_count}/{len(missing_tables)} tables created successfully")
+ print(f"❌ FAILED TABLES ({len(failed_tables)}): {', '.join(failed_tables)}")
+ print("⚠️ The API will continue, but some endpoints may not work until these tables are created")
else:
print(f"✓ Schema initialization complete ({success_count}/{len(missing_tables)} tables created)")
cur.close()
conn.close()
-
+
except Exception as e:
print(f"⚠ Schema initialization warning: {e}")
import traceback
traceback.print_exc()
print(" API will continue, but some endpoints may not work until tables are created")
+ # Don't raise - allow the API to start even if schema init fails
# Initialize schema on startup
initialize_schema()
+# Run migrations for existing tables
+try:
+ from src.scripts.migrate_locations_schema import migrate_locations_schema
+ migrate_locations_schema()
+except Exception as e:
+ print(f"⚠ Warning: Could not run migrations: {e}")
+try:
+ from src.scripts.migrate_location_types_cleanup import migrate_location_types_cleanup
+ migrate_location_types_cleanup()
+except Exception as e:
+ print(f"⚠ Warning: Could not run location types cleanup migration: {e}")
+try:
+ from src.scripts.migrate_supplies_custom_fields import migrate_supplies_custom_fields
+ migrate_supplies_custom_fields()
+except Exception as e:
+ print(f"⚠ Warning: Could not run supplies custom_fields migration: {e}")
+try:
+ from src.scripts.migrate_supplies_public_id import migrate_supplies_public_id
+ migrate_supplies_public_id()
+except Exception as e:
+ print(f"⚠ Warning: Could not run supplies public_id migration: {e}")
+try:
+ from src.scripts.migrate_supply_types import migrate_supply_types
+ migrate_supply_types()
+except Exception as e:
+ print(f"⚠ Warning: Could not run supply_types migration: {e}")
+try:
+ from src.scripts.migrate_supplies_location_free_place import migrate_supplies_location_free_place
+ migrate_supplies_location_free_place()
+except Exception as e:
+ print(f"⚠ Warning: Could not run supplies_location free_place migration: {e}")
+
# Seed test user, teams, categories, and locations
try:
from src.scripts.seed_data import seed_test_user, seed_teams, seed_categories, seed_locations
@@ -132,6 +188,16 @@ def health_check():
return {'status': 'healthy'}, 200
+# Error handler to ensure CORS headers are always sent
+@app.errorhandler(500)
+def handle_500_error(e):
+ """Handle 500 errors and ensure CORS headers are sent."""
+ from flask import jsonify
+ response = jsonify({'error': str(e) if hasattr(e, 'description') and e.description else 'Internal server error'})
+ response.status_code = 500
+ return response
+
+
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
app.run(host='0.0.0.0', port=port, debug=True)
diff --git a/src/api/db.py b/src/api/db.py
index deb998d..2b7a6a8 100644
--- a/src/api/db.py
+++ b/src/api/db.py
@@ -4,6 +4,7 @@
import mysql.connector
from mysql.connector import pooling
import os
+import time
# Database configuration from environment variables
config = {
@@ -17,12 +18,24 @@
'pool_reset_session': True
}
-# Create connection pool
-try:
- connection_pool = pooling.MySQLConnectionPool(**config)
-except Exception as e:
- print(f"Error creating connection pool: {e}")
- connection_pool = None
+# Create connection pool with retry logic
+connection_pool = None
+max_retries = 10
+retry_delay = 2 # seconds
+
+for attempt in range(max_retries):
+ try:
+ connection_pool = pooling.MySQLConnectionPool(**config)
+ print(f"✓ Database connection pool initialized successfully")
+ break
+ except Exception as e:
+ if attempt < max_retries - 1:
+ print(f"⚠ Database connection attempt {attempt + 1}/{max_retries} failed: {e}")
+ print(f" Retrying in {retry_delay} seconds...")
+ time.sleep(retry_delay)
+ else:
+ print(f"❌ Error creating connection pool after {max_retries} attempts: {e}")
+ connection_pool = None
def get_db():
diff --git a/src/api/helpers/__init__.py b/src/api/helpers/__init__.py
new file mode 100644
index 0000000..4e9b250
--- /dev/null
+++ b/src/api/helpers/__init__.py
@@ -0,0 +1,3 @@
+"""
+Helper modules for the API.
+"""
diff --git a/src/api/helpers/datetime_json.py b/src/api/helpers/datetime_json.py
new file mode 100644
index 0000000..2461936
--- /dev/null
+++ b/src/api/helpers/datetime_json.py
@@ -0,0 +1,33 @@
+"""
+JSON serialization for MySQL date/time columns.
+
+DATETIME and TIMESTAMP values from mysql-connector are naive ``datetime`` (or
+``date`` for DATE columns). Our DB runs in UTC, so wall-clock values are UTC.
+JavaScript parses ISO strings *without* a timezone offset as **local** time
+(ECMA-262), which shifts Eastern users by several hours. Append ``Z`` so
+``new Date()`` gets the correct instant; the frontend can still format in
+America/New_York.
+"""
+from __future__ import annotations
+
+from datetime import date, datetime, timezone
+
+
+def db_datetime_to_utc_iso(value):
+ """
+ Serialize a MySQL date/datetime for JSON.
+
+ - ``datetime`` (naive): treated as UTC, returns ``...Z``.
+ - ``datetime`` (aware): converted to UTC, then ``...Z``.
+ - ``date`` (not datetime): calendar date only ``YYYY-MM-DD`` (no Z).
+ """
+ if value is None:
+ return None
+ if isinstance(value, datetime):
+ if value.tzinfo is not None:
+ value = value.astimezone(timezone.utc).replace(tzinfo=None)
+ text = value.isoformat(sep="T")
+ return f"{text}Z"
+ if isinstance(value, date):
+ return value.isoformat()
+ return None
diff --git a/src/api/helpers/history.py b/src/api/helpers/history.py
new file mode 100644
index 0000000..ff567c0
--- /dev/null
+++ b/src/api/helpers/history.py
@@ -0,0 +1,289 @@
+"""
+History tracking helper functions for supplies.
+"""
+import sys
+import uuid
+from pathlib import Path
+
+# Add src to path for imports
+sys.path.insert(0, str(Path(__file__).parent.parent.parent))
+
+from src.api.db import get_db
+
+
+def log_supply_history(conn, supply_id, action_type, old_values, new_values, changed_by):
+ """
+ Log a history entry for a supply change.
+
+ Args:
+ conn: Database connection
+ supply_id: Supply ID (can be None for CREATE before insert)
+ action_type: 'CREATE', 'UPDATE', or 'DELETE'
+ old_values: Dict with old_name, old_description, old_image, old_last_order_date
+ new_values: Dict with new_name, new_description, new_image, new_last_order_date
+ changed_by: UF ID of user making the change
+
+ Returns:
+ History entry ID
+ """
+ cur = conn.cursor()
+
+ try:
+ cur.execute("""
+ INSERT INTO supplies_history (
+ supply_id, action_type,
+ old_name, new_name,
+ old_description, new_description,
+ old_image, new_image,
+ old_last_order_date, new_last_order_date,
+ changed_by
+ ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
+ """, (
+ supply_id,
+ action_type,
+ old_values.get('name'),
+ new_values.get('name'),
+ old_values.get('description'),
+ new_values.get('description'),
+ old_values.get('image'),
+ new_values.get('image'),
+ old_values.get('last_order_date'),
+ new_values.get('last_order_date'),
+ changed_by
+ ))
+
+ history_id = cur.lastrowid
+ return history_id
+ finally:
+ cur.close()
+
+
+def log_team_changes(conn, history_id, old_teams, new_teams):
+ """
+ Log team changes for a history entry.
+
+ Args:
+ conn: Database connection
+ history_id: History entry ID
+ old_teams: List of old team names
+ new_teams: List of new team names
+ """
+ cur = conn.cursor()
+
+ try:
+ old_set = set(old_teams or [])
+ new_set = set(new_teams or [])
+
+ # Teams that were removed
+ removed = old_set - new_set
+ for team_name in removed:
+ cur.execute("""
+ INSERT INTO supplies_history_teams (history_id, team_name, action)
+ VALUES (%s, %s, 'REMOVED')
+ """, (history_id, team_name))
+
+ # Teams that were added
+ added = new_set - old_set
+ for team_name in added:
+ cur.execute("""
+ INSERT INTO supplies_history_teams (history_id, team_name, action)
+ VALUES (%s, %s, 'ADDED')
+ """, (history_id, team_name))
+ finally:
+ cur.close()
+
+
+def log_category_changes(conn, history_id, old_categories, new_categories):
+ """
+ Log category changes for a history entry.
+
+ Args:
+ conn: Database connection
+ history_id: History entry ID
+ old_categories: List of old category IDs
+ new_categories: List of new category IDs
+ """
+ cur = conn.cursor()
+
+ try:
+ old_set = set(old_categories or [])
+ new_set = set(new_categories or [])
+
+ # Categories that were removed
+ removed = old_set - new_set
+ for category_id in removed:
+ cur.execute("""
+ INSERT INTO supplies_history_categories (history_id, category_id, action)
+ VALUES (%s, %s, 'REMOVED')
+ """, (history_id, category_id))
+
+ # Categories that were added
+ added = new_set - old_set
+ for category_id in added:
+ cur.execute("""
+ INSERT INTO supplies_history_categories (history_id, category_id, action)
+ VALUES (%s, %s, 'ADDED')
+ """, (history_id, category_id))
+ finally:
+ cur.close()
+
+
+def get_supply_current_state(conn, supply_id):
+ """
+ Get current state of a supply including teams and categories.
+
+ Args:
+ conn: Database connection
+ supply_id: Supply ID
+
+ Returns:
+ Dict with supply data, teams, and categories
+ """
+ cur = conn.cursor(dictionary=True)
+
+ try:
+ # Get supply
+ cur.execute("""
+ SELECT id, name, description, image, last_order_date
+ FROM supplies WHERE id = %s
+ """, (supply_id,))
+ supply = cur.fetchone()
+
+ if not supply:
+ return None
+
+ # Get teams
+ cur.execute("""
+ SELECT team_name FROM supplies_teams WHERE supply_id = %s
+ """, (supply_id,))
+ teams = [row['team_name'] for row in cur.fetchall()]
+
+ # Get categories
+ cur.execute("""
+ SELECT category_id FROM supplies_categories WHERE supply_id = %s
+ """, (supply_id,))
+ categories = [row['category_id'] for row in cur.fetchall()]
+
+ return {
+ 'name': supply['name'],
+ 'description': supply['description'],
+ 'image': supply['image'],
+ 'last_order_date': supply['last_order_date'],
+ 'teams': teams,
+ 'categories': categories
+ }
+ finally:
+ cur.close()
+
+
+def log_location_history(conn, action_type, supply_id, supply_name,
+ location_name, shelf,
+ old_amount, new_amount,
+ changed_by,
+ related_location=None, related_shelf=None,
+ batch_id=None):
+ """
+ Insert one row into supplies_location_history.
+ Pass batch_id from the caller to group related rows.
+ Returns the inserted row id.
+
+ Args:
+ conn: Database connection
+ action_type: 'ADD', 'REMOVE', 'UPDATE', 'MOVE', or 'CASCADED_SUBTRACT'
+ supply_id: Supply ID (can be None)
+ supply_name: Supply name (denormalized, required)
+ location_name: Location name
+ shelf: Shelf number (can be None)
+ old_amount: Amount before change (None for ADD)
+ new_amount: Amount after change (None for REMOVE/SNAPSHOT)
+ changed_by: UF ID of user making the change
+ related_location: For MOVE actions, the other location
+ related_shelf: For MOVE actions, the other shelf
+ batch_id: UUID string to group related operations
+
+ Returns:
+ History entry ID
+ """
+ cur = conn.cursor()
+ try:
+ cur.execute("""
+ INSERT INTO supplies_location_history
+ (supply_id, supply_name, location_name, shelf,
+ action_type, old_amount, new_amount,
+ related_location, related_shelf,
+ batch_id, changed_by)
+ VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
+ """, (
+ supply_id, supply_name, location_name, shelf,
+ action_type, old_amount, new_amount,
+ related_location, related_shelf,
+ batch_id, changed_by
+ ))
+ return cur.lastrowid
+ finally:
+ cur.close()
+
+
+def snapshot_supply_locations_before_delete(conn, supply_id, supply_name, changed_by):
+ """
+ Called BEFORE deleting a supply. Reads all current supplies_location rows
+ for this supply and writes CASCADED_SUBTRACT history entries.
+ These are later used to restore the supply's full location state.
+ batch_id ties all snapshots from the same delete together.
+
+ Args:
+ conn: Database connection
+ supply_id: Supply ID to snapshot
+ supply_name: Supply name (denormalized)
+ changed_by: UF ID of user making the change
+
+ Returns:
+ batch_id (UUID string) that groups all snapshot rows
+ """
+ cur = conn.cursor(dictionary=True)
+ batch_id = str(uuid.uuid4())
+ try:
+ cur.execute("""
+ SELECT location_name, shelf, amount, coord_x, coord_y
+ FROM supplies_location
+ WHERE supply_id = %s
+ """, (supply_id,))
+ rows = cur.fetchall()
+ for row in rows:
+ is_free = row['location_name'] is None and row.get('coord_x') is not None
+ log_location_history(
+ conn,
+ action_type='CASCADED_SUBTRACT',
+ supply_id=supply_id,
+ supply_name=supply_name,
+ location_name='Free Coordinate' if is_free else row['location_name'],
+ shelf=row['shelf'],
+ old_amount=row['amount'],
+ new_amount=None,
+ changed_by=changed_by,
+ batch_id=batch_id,
+ related_location=f"{row['coord_x']},{row['coord_y']}" if is_free else None,
+ )
+ return batch_id # return so caller can attach to the supplies_history row too
+ finally:
+ cur.close()
+
+
+def is_latest_global_history_timestamp(cur, changed_at):
+ """
+ True if changed_at equals the latest timestamp across location + supply history.
+ Non-leaders may only undo that row; leaders skip this check in routes.
+ """
+ if changed_at is None:
+ return False
+ cur.execute("""
+ SELECT GREATEST(
+ COALESCE((SELECT MAX(changed_at) FROM supplies_location_history), '1970-01-01 00:00:00'),
+ COALESCE((SELECT MAX(changed_at) FROM supplies_history), '1970-01-01 00:00:00')
+ ) AS latest
+ """)
+ row = cur.fetchone()
+ latest = row['latest'] if row else None
+ if latest is None:
+ return False
+ return changed_at == latest
diff --git a/src/api/helpers/map_bounds.py b/src/api/helpers/map_bounds.py
new file mode 100644
index 0000000..bf107fb
--- /dev/null
+++ b/src/api/helpers/map_bounds.py
@@ -0,0 +1,35 @@
+"""
+Map placement bounds (world coordinates). Must match frontend inventoryBounds.room defaults.
+"""
+# Same as milventory InventoryContext default room rect
+ROOM_X = 80
+ROOM_Y = 80
+ROOM_WIDTH = 3600
+ROOM_HEIGHT = 3840
+
+
+def coords_in_room(x, y):
+ """True if integer coords lie inside the closed room rectangle."""
+ try:
+ xi = int(round(float(x)))
+ yi = int(round(float(y)))
+ except (TypeError, ValueError):
+ return False
+ if xi < ROOM_X or yi < ROOM_Y:
+ return False
+ if xi > ROOM_X + ROOM_WIDTH or yi > ROOM_Y + ROOM_HEIGHT:
+ return False
+ return True
+
+
+def clamp_coords_to_room(x, y):
+ """Return (int, int) clamped to room bounds."""
+ try:
+ xf = float(x)
+ yf = float(y)
+ except (TypeError, ValueError):
+ xf = float(ROOM_X)
+ yf = float(ROOM_Y)
+ xi = int(round(max(ROOM_X, min(ROOM_X + ROOM_WIDTH, xf))))
+ yi = int(round(max(ROOM_Y, min(ROOM_Y + ROOM_HEIGHT, yf))))
+ return xi, yi
diff --git a/src/api/helpers/unique_type_qty.py b/src/api/helpers/unique_type_qty.py
new file mode 100644
index 0000000..e5f9ae7
--- /dev/null
+++ b/src/api/helpers/unique_type_qty.py
@@ -0,0 +1,71 @@
+"""
+is_unique on supply_types: multiple catalog rows may share a type, but for each supply
+linked to such a type, total quantity across supplies_location must be at most 1.
+"""
+
+
+def map_total_qty_for_supply(cur, supply_id):
+ cur.execute(
+ """
+ SELECT COALESCE(SUM(amount), 0) AS total
+ FROM supplies_location
+ WHERE supply_id = %s
+ """,
+ (int(supply_id),),
+ )
+ row = cur.fetchone()
+ if not row:
+ return 0
+ if isinstance(row, dict):
+ return int(row["total"] or 0)
+ return int(row[0] or 0)
+
+
+def supply_has_unique_type(cur, supply_id):
+ cur.execute(
+ """
+ SELECT COALESCE(st.is_unique, 0) AS u
+ FROM supplies s
+ LEFT JOIN supply_types st ON s.supply_type_id = st.id
+ WHERE s.id = %s
+ """,
+ (int(supply_id),),
+ )
+ row = cur.fetchone()
+ if not row:
+ return False
+ if isinstance(row, dict):
+ return bool(row.get("u"))
+ return bool(row[0])
+
+
+def check_unique_type_map_qty(cur, supply_id, proposed_total):
+ """
+ If supply is linked to a type with is_unique, proposed_total must be <= 1.
+ Returns (True, None) or (False, error_message).
+ """
+ if not supply_has_unique_type(cur, supply_id):
+ return True, None
+ if int(proposed_total) > 1:
+ return (
+ False,
+ "This item uses a type that allows at most 1 total quantity on the map.",
+ )
+ return True, None
+
+
+def type_has_supply_with_map_qty_over_one(cur, type_id):
+ """True if any supply with this type has total map quantity > 1."""
+ cur.execute(
+ """
+ SELECT s.id
+ FROM supplies s
+ LEFT JOIN supplies_location sl ON sl.supply_id = s.id
+ WHERE s.supply_type_id = %s
+ GROUP BY s.id
+ HAVING COALESCE(SUM(sl.amount), 0) > 1
+ LIMIT 1
+ """,
+ (int(type_id),),
+ )
+ return cur.fetchone() is not None
diff --git a/src/api/middleware/auth.py b/src/api/middleware/auth.py
index 89a7284..063fb9d 100644
--- a/src/api/middleware/auth.py
+++ b/src/api/middleware/auth.py
@@ -22,3 +22,23 @@ def decorated_function(*args, **kwargs):
return decorated_function
+
+def require_leader(f):
+ """
+ Decorator to require leader/admin status for a route.
+
+ Returns 401 if user is not authenticated, 403 if not a leader.
+ """
+ @wraps(f)
+ def decorated_function(*args, **kwargs):
+ if 'user_id' not in session:
+ return jsonify({'error': 'Authentication required'}), 401
+
+ if not session.get('is_leader', False):
+ return jsonify({'error': 'Leader access required'}), 403
+
+ # Add current user info to kwargs for convenience
+ kwargs['current_user_id'] = session.get('user_id')
+ return f(*args, **kwargs)
+
+ return decorated_function
diff --git a/src/api/models/category.py b/src/api/models/category.py
index 5912c3d..1cd4122 100644
--- a/src/api/models/category.py
+++ b/src/api/models/category.py
@@ -1,5 +1,7 @@
"""Category model for database operations."""
+from src.api.helpers.datetime_json import db_datetime_to_utc_iso
+
class Category:
def __init__(self, id, name, created_at):
@@ -17,6 +19,8 @@ def to_dict(self):
return {
'id': self.id,
'name': self.name,
- 'created_at': self.created_at.isoformat() if self.created_at else None
+ 'created_at': db_datetime_to_utc_iso(self.created_at)
}
+
+
diff --git a/src/api/models/custom_field_definition.py b/src/api/models/custom_field_definition.py
new file mode 100644
index 0000000..8699ef6
--- /dev/null
+++ b/src/api/models/custom_field_definition.py
@@ -0,0 +1,30 @@
+"""
+Custom field definition model (admin-defined field types).
+"""
+from dataclasses import dataclass
+from typing import Optional
+
+
+@dataclass
+class CustomFieldDefinition:
+ """Custom field definition: name (display and key), type (text|number|date)."""
+ id: Optional[int] = None
+ name: str = ""
+ type: str = "text" # text, number, date
+
+ @classmethod
+ def from_db_row(cls, row):
+ """Create from database row (id, name, type)."""
+ return cls(
+ id=row[0],
+ name=row[1],
+ type=row[2]
+ )
+
+ def to_dict(self):
+ """Convert to dictionary for JSON response."""
+ return {
+ 'id': self.id,
+ 'name': self.name,
+ 'type': self.type
+ }
diff --git a/src/api/models/location.py b/src/api/models/location.py
index bb37a6f..8ebd0b1 100644
--- a/src/api/models/location.py
+++ b/src/api/models/location.py
@@ -14,60 +14,64 @@ class Location:
width: int
height: int
type: str
-
+ shelf_count: int = 0
+ protected: bool = False
+
@classmethod
def from_db_row(cls, row: tuple) -> 'Location':
"""
- Create Location from database row.
-
- Args:
- row: Tuple from database query (name, x, y, width, height, type)
-
- Returns:
- Location instance
+ Create Location from a database row.
+
+ Expected column order:
+ (name, x, y, width, height, type, shelf_count, protected)
+
+ For backward compatibility we also accept the legacy 7-column row
+ (without shelf_count) by defaulting shelf_count to 0.
"""
+ if len(row) >= 8:
+ return cls(
+ name=row[0],
+ x=row[1],
+ y=row[2],
+ width=row[3],
+ height=row[4],
+ type=row[5],
+ shelf_count=int(row[6] or 0),
+ protected=bool(row[7]),
+ )
+ # legacy 7-column: (name, x, y, width, height, type, protected)
return cls(
name=row[0],
x=row[1],
y=row[2],
width=row[3],
height=row[4],
- type=row[5]
+ type=row[5],
+ shelf_count=0,
+ protected=bool(row[6]) if len(row) > 6 else False,
)
-
+
def to_dict(self) -> Dict[str, Any]:
- """
- Convert Location to dictionary for JSON serialization.
-
- Returns:
- Dictionary representation of the location
- """
return {
'name': self.name,
'x': self.x,
'y': self.y,
'width': self.width,
'height': self.height,
- 'type': self.type
+ 'type': self.type,
+ 'shelf_count': self.shelf_count,
+ 'protected': self.protected,
}
-
+
@classmethod
def from_dict(cls, data: Dict[str, Any]) -> 'Location':
- """
- Create Location from dictionary.
-
- Args:
- data: Dictionary with location data
-
- Returns:
- Location instance
- """
return cls(
name=data['name'],
x=int(data['x']),
y=int(data['y']),
width=int(data['width']),
height=int(data['height']),
- type=data['type']
+ type=data['type'],
+ shelf_count=int(data.get('shelf_count', 0) or 0),
+ protected=bool(data.get('protected', False)),
)
-
diff --git a/src/api/models/supply.py b/src/api/models/supply.py
index 0a1af8e..ca937ee 100644
--- a/src/api/models/supply.py
+++ b/src/api/models/supply.py
@@ -5,11 +5,14 @@
from typing import Optional
from datetime import date, datetime
+from src.api.helpers.datetime_json import db_datetime_to_utc_iso
+
@dataclass
class Supply:
"""Supply catalog/reference model."""
id: Optional[int] = None
+ public_id: Optional[str] = None
name: str = ""
description: Optional[str] = None
image: Optional[str] = None # Base64 data URI (LONGTEXT)
@@ -33,7 +36,7 @@ def from_db_row(cls, row):
last_order_date=row[4],
last_modified=row[5],
last_modified_by=row[6],
- created_at=row[7]
+ created_at=row[7],
)
@classmethod
@@ -45,13 +48,14 @@ def from_dict(cls, data: dict):
"""
return cls(
id=data.get('id'),
+ public_id=data.get('public_id'),
name=data.get('name', ''),
description=data.get('description'),
image=data.get('image'),
last_order_date=data.get('last_order_date'),
last_modified=data.get('last_modified'),
last_modified_by=data.get('last_modified_by'),
- created_at=data.get('created_at')
+ created_at=data.get('created_at'),
)
def to_dict(self):
@@ -62,22 +66,18 @@ def to_dict(self):
'description': self.description,
'image': self.image,
}
+ if self.public_id:
+ result['public_id'] = self.public_id
if self.last_order_date:
- if isinstance(self.last_order_date, date):
- result['last_order_date'] = self.last_order_date.isoformat()
- else:
- result['last_order_date'] = str(self.last_order_date)
+ serialized = db_datetime_to_utc_iso(self.last_order_date)
+ result['last_order_date'] = serialized if serialized is not None else str(self.last_order_date)
if self.last_modified:
- if isinstance(self.last_modified, datetime):
- result['lastModified'] = self.last_modified.isoformat()
- else:
- result['lastModified'] = str(self.last_modified)
+ serialized = db_datetime_to_utc_iso(self.last_modified)
+ result['lastModified'] = serialized if serialized is not None else str(self.last_modified)
if self.last_modified_by:
result['last_modified_by'] = self.last_modified_by
if self.created_at:
- if isinstance(self.created_at, datetime):
- result['created_at'] = self.created_at.isoformat()
- else:
- result['created_at'] = str(self.created_at)
+ serialized = db_datetime_to_utc_iso(self.created_at)
+ result['created_at'] = serialized if serialized is not None else str(self.created_at)
return result
diff --git a/src/api/models/supply_location.py b/src/api/models/supply_location.py
index e9b60d6..d338d2a 100644
--- a/src/api/models/supply_location.py
+++ b/src/api/models/supply_location.py
@@ -5,15 +5,19 @@
from typing import Optional
from datetime import datetime
+from src.api.helpers.datetime_json import db_datetime_to_utc_iso
+
@dataclass
class SupplyLocation:
"""Supply location model (inventory entries)."""
id: Optional[int] = None
supply_id: int = 0
- location_name: str = ""
+ location_name: Optional[str] = ""
shelf: Optional[int] = None # NULL for non-shelf locations; 0-5 for Tall Cabinet shelves
amount: int = 0 # Backend uses "amount", frontend uses "qty"
+ coord_x: Optional[int] = None
+ coord_y: Optional[int] = None
last_modified: Optional[datetime] = None
last_modified_by: Optional[str] = None # UF ID
created_at: Optional[datetime] = None
@@ -22,18 +26,34 @@ class SupplyLocation:
def from_db_row(cls, row):
"""Create SupplyLocation from database row tuple.
- Args:
- row: Tuple from database query (id, supply_id, location_name, shelf, amount, last_modified, last_modified_by, created_at)
+ Legacy 8-tuple: id, supply_id, location_name, shelf, amount, last_modified, last_modified_by, created_at
+ With free place: id, supply_id, location_name, coord_x, coord_y, shelf, amount, last_modified, last_modified_by, created_at
"""
+ n = len(row)
+ if n >= 10:
+ loc = row[2] if row[2] is not None else ""
+ return cls(
+ id=row[0],
+ supply_id=row[1],
+ location_name=loc,
+ coord_x=row[3],
+ coord_y=row[4],
+ shelf=row[5],
+ amount=row[6],
+ last_modified=row[7],
+ last_modified_by=row[8],
+ created_at=row[9],
+ )
+ loc = row[2] if row[2] is not None else ""
return cls(
id=row[0],
supply_id=row[1],
- location_name=row[2],
+ location_name=loc,
shelf=row[3],
amount=row[4],
last_modified=row[5],
last_modified_by=row[6],
- created_at=row[7]
+ created_at=row[7],
)
@classmethod
@@ -49,6 +69,8 @@ def from_dict(cls, data: dict):
location_name=data.get('location_name', data.get('location', '')),
shelf=data.get('shelf'),
amount=data.get('amount', data.get('qty', 0)), # Accept both "amount" and "qty"
+ coord_x=data.get('coord_x'),
+ coord_y=data.get('coord_y'),
last_modified=data.get('last_modified', data.get('lastModified')),
last_modified_by=data.get('last_modified_by'),
created_at=data.get('created_at', data.get('createdAt'))
@@ -61,21 +83,21 @@ def to_dict(self):
result = {
'id': self.id,
'supply_id': self.supply_id,
- 'location': self.location_name,
+ 'location': self.location_name if self.location_name else None,
'shelf': self.shelf,
'qty': self.amount, # Frontend uses "qty"
}
+ if self.coord_x is not None and self.coord_y is not None:
+ result['coord_x'] = self.coord_x
+ result['coord_y'] = self.coord_y
+ result['free_place'] = True
if self.last_modified:
- if isinstance(self.last_modified, datetime):
- result['lastModified'] = self.last_modified.isoformat()
- else:
- result['lastModified'] = str(self.last_modified)
+ serialized = db_datetime_to_utc_iso(self.last_modified)
+ result['lastModified'] = serialized if serialized is not None else str(self.last_modified)
if self.last_modified_by:
result['last_modified_by'] = self.last_modified_by
if self.created_at:
- if isinstance(self.created_at, datetime):
- result['created_at'] = self.created_at.isoformat()
- else:
- result['created_at'] = str(self.created_at)
+ serialized = db_datetime_to_utc_iso(self.created_at)
+ result['created_at'] = serialized if serialized is not None else str(self.created_at)
return result
diff --git a/src/api/models/team.py b/src/api/models/team.py
index f95f787..d775f6f 100644
--- a/src/api/models/team.py
+++ b/src/api/models/team.py
@@ -16,3 +16,5 @@ def to_dict(self):
'name': self.name
}
+
+
diff --git a/src/api/repositories/__init__.py b/src/api/repositories/__init__.py
new file mode 100644
index 0000000..805099f
--- /dev/null
+++ b/src/api/repositories/__init__.py
@@ -0,0 +1 @@
+"""Data access layer (SQL)."""
diff --git a/src/api/repositories/categories_repository.py b/src/api/repositories/categories_repository.py
new file mode 100644
index 0000000..595267f
--- /dev/null
+++ b/src/api/repositories/categories_repository.py
@@ -0,0 +1,21 @@
+"""SQL for categories."""
+from typing import Optional, Tuple
+
+
+def list_id_name_ordered(cur) -> list:
+ cur.execute("SELECT id, name FROM categories ORDER BY name")
+ return cur.fetchall()
+
+
+def fetch_by_id(cur, category_id: int) -> Optional[Tuple]:
+ cur.execute("SELECT id, name, created_at FROM categories WHERE id = %s", (category_id,))
+ return cur.fetchone()
+
+
+def insert_name(cur, name: str) -> None:
+ cur.execute("INSERT INTO categories (name) VALUES (%s)", (name,))
+
+
+def fetch_last_insert_row(cur) -> Tuple:
+ cur.execute("SELECT id, name, created_at FROM categories WHERE id = LAST_INSERT_ID()")
+ return cur.fetchone()
diff --git a/src/api/repositories/custom_field_definitions_repository.py b/src/api/repositories/custom_field_definitions_repository.py
new file mode 100644
index 0000000..612b838
--- /dev/null
+++ b/src/api/repositories/custom_field_definitions_repository.py
@@ -0,0 +1,62 @@
+"""SQL for custom_field_definitions and supplies.custom_fields cleanup."""
+from typing import List, Optional, Tuple
+
+
+def list_id_name_type_ordered(cur) -> List[Tuple]:
+ cur.execute(
+ """
+ SELECT id, name, type
+ FROM custom_field_definitions
+ ORDER BY name
+ """
+ )
+ return cur.fetchall()
+
+
+def insert_name_type(cur, name: str, type_val: str) -> int:
+ cur.execute(
+ """
+ INSERT INTO custom_field_definitions (name, type)
+ VALUES (%s, %s)
+ """,
+ (name, type_val),
+ )
+ return cur.lastrowid
+
+
+def fetch_by_id_tuple(cur, definition_id: int) -> Optional[Tuple]:
+ cur.execute(
+ "SELECT id, name, type FROM custom_field_definitions WHERE id = %s",
+ (definition_id,),
+ )
+ return cur.fetchone()
+
+
+def exists_id_tuple(cur, definition_id: int) -> bool:
+ cur.execute("SELECT id FROM custom_field_definitions WHERE id = %s", (definition_id,))
+ return cur.fetchone() is not None
+
+
+def update_columns(cur, set_fragments: List[str], values: List) -> None:
+ cur.execute(
+ "UPDATE custom_field_definitions SET " + ", ".join(set_fragments) + " WHERE id = %s",
+ values,
+ )
+
+
+def fetch_name_by_id_dict(cur, definition_id: int) -> Optional[dict]:
+ cur.execute("SELECT name FROM custom_field_definitions WHERE id = %s", (definition_id,))
+ return cur.fetchone()
+
+
+def delete_by_id(cur, definition_id: int) -> None:
+ cur.execute("DELETE FROM custom_field_definitions WHERE id = %s", (definition_id,))
+
+
+def iter_supplies_custom_fields_rows(cur):
+ cur.execute("SELECT id, custom_fields FROM supplies WHERE custom_fields IS NOT NULL")
+ return cur.fetchall()
+
+
+def update_supply_custom_fields_json(cur, supply_id: int, new_json) -> None:
+ cur.execute("UPDATE supplies SET custom_fields = %s WHERE id = %s", (new_json, supply_id))
diff --git a/src/api/repositories/locations_repository.py b/src/api/repositories/locations_repository.py
new file mode 100644
index 0000000..08ad71c
--- /dev/null
+++ b/src/api/repositories/locations_repository.py
@@ -0,0 +1,87 @@
+"""SQL for locations (map boxes)."""
+from typing import List, Optional, Tuple
+
+
+_SELECT_COLS = "name, x, y, width, height, type, shelf_count, protected"
+
+
+def list_all_tuple_ordered(cur) -> List[Tuple]:
+ cur.execute(f"SELECT {_SELECT_COLS} FROM locations ORDER BY name")
+ return cur.fetchall()
+
+
+def fetch_by_name_tuple(cur, name: str) -> Optional[Tuple]:
+ cur.execute(
+ f"SELECT {_SELECT_COLS} FROM locations WHERE name = %s",
+ (name,),
+ )
+ return cur.fetchone()
+
+
+def insert_location(
+ cur, name: str, x, y, width, height, location_type: str, shelf_count: int, protected: bool
+) -> None:
+ cur.execute(
+ """
+ INSERT INTO locations (name, x, y, width, height, type, shelf_count, protected)
+ VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
+ """,
+ (name, x, y, width, height, location_type, shelf_count, protected),
+ )
+
+
+def name_exists(cur, name: str) -> bool:
+ cur.execute("SELECT name FROM locations WHERE name = %s", (name,))
+ return cur.fetchone() is not None
+
+
+def update_by_name(cur, set_clauses: List[str], values: List) -> None:
+ """set_clauses e.g. ['x = %s']; values are bind params ending with the current name for WHERE name = %s."""
+ query = f"UPDATE locations SET {', '.join(set_clauses)} WHERE name = %s"
+ cur.execute(query, tuple(values))
+
+
+def rename(cur, new_name: str, old_name: str) -> None:
+ cur.execute("UPDATE locations SET name = %s WHERE name = %s", (new_name, old_name))
+
+
+def delete_by_name(cur, name: str) -> None:
+ cur.execute("DELETE FROM locations WHERE name = %s", (name,))
+
+
+def max_used_shelf(cur, name: str) -> Optional[int]:
+ """
+ Return the highest `shelf` value currently in use for placements at this location,
+ or None if no placements exist with a non-NULL shelf.
+ Used to validate shelf_count reductions.
+ """
+ cur.execute(
+ "SELECT MAX(shelf) FROM supplies_location WHERE location_name = %s AND shelf IS NOT NULL",
+ (name,),
+ )
+ row = cur.fetchone()
+ if not row:
+ return None
+ return row[0] if row[0] is not None else None
+
+
+def count_orphans_if_shelf_count(cur, name: str, new_shelf_count: int) -> int:
+ """
+ Count how many supplies_location rows would become orphaned if the location's
+ shelf_count were set to `new_shelf_count`.
+
+ - If new_shelf_count == 0, every row with shelf IS NOT NULL is an orphan.
+ - Otherwise, rows with shelf >= new_shelf_count are orphans.
+ """
+ if new_shelf_count <= 0:
+ cur.execute(
+ "SELECT COUNT(*) FROM supplies_location WHERE location_name = %s AND shelf IS NOT NULL",
+ (name,),
+ )
+ else:
+ cur.execute(
+ "SELECT COUNT(*) FROM supplies_location WHERE location_name = %s AND shelf >= %s",
+ (name, new_shelf_count),
+ )
+ row = cur.fetchone()
+ return int(row[0]) if row and row[0] is not None else 0
diff --git a/src/api/repositories/members_repository.py b/src/api/repositories/members_repository.py
new file mode 100644
index 0000000..bc81cb3
--- /dev/null
+++ b/src/api/repositories/members_repository.py
@@ -0,0 +1,55 @@
+"""SQL for members (auth)."""
+import secrets
+from typing import Optional
+
+
+def allocate_uf_id(cur) -> str:
+ """Generate an unused 8-digit UF ID (matches members.uf_id CHECK)."""
+ for _ in range(100):
+ uid = f"{secrets.randbelow(100000000):08d}"
+ cur.execute("SELECT 1 FROM members WHERE uf_id = %s LIMIT 1", (uid,))
+ if cur.fetchone() is None:
+ return uid
+ raise RuntimeError("Could not allocate uf_id")
+
+
+def insert_signup_member(
+ cur,
+ *,
+ first_name: str,
+ last_name: str,
+ uf_id: str,
+ uf_email: str,
+ password_hash: str,
+ discord: str,
+ github: str,
+) -> None:
+ cur.execute(
+ """
+ INSERT INTO members (first_name, last_name, uf_id, uf_email, password_hash, is_leader, discord, github)
+ VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
+ """,
+ (first_name, last_name, uf_id, uf_email, password_hash, False, discord, github),
+ )
+
+
+def fetch_by_email_dict(cur, email: str) -> Optional[dict]:
+ cur.execute(
+ """
+ SELECT uf_id, uf_email, first_name, last_name, password_hash, is_leader
+ FROM members WHERE uf_email = %s
+ """,
+ (email,),
+ )
+ return cur.fetchone()
+
+
+def fetch_by_uf_id_dict(cur, uf_id) -> Optional[dict]:
+ cur.execute(
+ """
+ SELECT uf_id, uf_email, first_name, last_name, is_leader
+ FROM members WHERE uf_id = %s
+ """,
+ (uf_id,),
+ )
+ return cur.fetchone()
diff --git a/src/api/repositories/supplies_location_history_repository.py b/src/api/repositories/supplies_location_history_repository.py
new file mode 100644
index 0000000..29f7102
--- /dev/null
+++ b/src/api/repositories/supplies_location_history_repository.py
@@ -0,0 +1,176 @@
+"""SQL for supplies_location_history listing and undo/discard/batch."""
+from typing import Any, List, Optional
+
+
+def fetch_history_page(
+ cur,
+ supply_id: Optional[int],
+ supply_name: Optional[str],
+ location_name: Optional[str],
+ limit: int,
+ offset: int,
+) -> List[dict]:
+ query = """
+ SELECT
+ slh.id,
+ slh.supply_id,
+ slh.supply_name,
+ slh.location_name,
+ slh.shelf,
+ slh.action_type,
+ slh.old_amount,
+ slh.new_amount,
+ slh.related_location,
+ slh.related_shelf,
+ slh.batch_id,
+ slh.undone,
+ slh.undone_at,
+ slh.undone_by,
+ slh.changed_by,
+ slh.changed_at,
+ m.first_name,
+ m.last_name
+ FROM supplies_location_history slh
+ LEFT JOIN members m ON slh.changed_by = m.uf_id
+ WHERE 1=1
+ """
+ params: List[Any] = []
+ if supply_id is not None:
+ query += " AND slh.supply_id = %s"
+ params.append(supply_id)
+ if supply_name:
+ query += " AND slh.supply_name LIKE %s"
+ params.append(f"%{supply_name}%")
+ if location_name:
+ query += " AND slh.location_name = %s"
+ params.append(location_name)
+ query += " ORDER BY slh.changed_at DESC LIMIT %s OFFSET %s"
+ params.extend([limit, offset])
+ cur.execute(query, params)
+ return cur.fetchall()
+
+
+def fetch_history_entry_for_undo_dict(cur, history_id: int) -> Optional[dict]:
+ cur.execute(
+ """
+ SELECT id, supply_id, supply_name, location_name, shelf,
+ action_type, old_amount, new_amount,
+ related_location, related_shelf, batch_id, undone, changed_at
+ FROM supplies_location_history
+ WHERE id = %s
+ """,
+ (history_id,),
+ )
+ return cur.fetchone()
+
+
+def supply_exists_tuple(cur, supply_id: int) -> bool:
+ cur.execute("SELECT id FROM supplies WHERE id = %s", (supply_id,))
+ return cur.fetchone() is not None
+
+
+def select_location_entry_tuple(cur, supply_id, location_name, shelf) -> Optional[tuple]:
+ cur.execute(
+ """
+ SELECT id, amount FROM supplies_location
+ WHERE supply_id = %s AND location_name = %s
+ AND (shelf = %s OR (shelf IS NULL AND %s IS NULL))
+ """,
+ (supply_id, location_name, shelf, shelf),
+ )
+ return cur.fetchone()
+
+
+def delete_supplies_location_by_id(cur, row_id: int) -> None:
+ cur.execute("DELETE FROM supplies_location WHERE id = %s", (row_id,))
+
+
+def update_supplies_location_amount_tuple(cur, new_amount: int, user_id: str, row_id: int) -> None:
+ cur.execute(
+ """
+ UPDATE supplies_location
+ SET amount = %s, last_modified_by = %s
+ WHERE id = %s
+ """,
+ (new_amount, user_id, row_id),
+ )
+
+
+def insert_supplies_location_box_tuple(
+ cur, supply_id, location_name, shelf, amount, user_id: str
+) -> None:
+ cur.execute(
+ """
+ INSERT INTO supplies_location (supply_id, location_name, shelf, amount, last_modified_by)
+ VALUES (%s, %s, %s, %s, %s)
+ """,
+ (supply_id, location_name, shelf, amount, user_id),
+ )
+
+
+def update_amount_by_supply_location_shelf_tuple(
+ cur, new_amount: int, user_id: str, supply_id, location_name, shelf
+) -> None:
+ cur.execute(
+ """
+ UPDATE supplies_location
+ SET amount = %s, last_modified_by = %s
+ WHERE supply_id = %s AND location_name = %s
+ AND (shelf = %s OR (shelf IS NULL AND %s IS NULL))
+ """,
+ (new_amount, user_id, supply_id, location_name, shelf, shelf),
+ )
+
+
+def fetch_paired_move_row_dict(cur, batch_id, exclude_history_id: int) -> Optional[dict]:
+ cur.execute(
+ """
+ SELECT id, location_name, shelf, old_amount, new_amount
+ FROM supplies_location_history
+ WHERE batch_id = %s AND id != %s
+ """,
+ (batch_id, exclude_history_id),
+ )
+ return cur.fetchone()
+
+
+def delete_history_by_id(cur, history_id: int) -> None:
+ cur.execute("DELETE FROM supplies_location_history WHERE id = %s", (history_id,))
+
+
+def fetch_history_meta_for_discard_dict(cur, history_id: int) -> Optional[dict]:
+ cur.execute(
+ """
+ SELECT id, action_type, batch_id, changed_at
+ FROM supplies_location_history
+ WHERE id = %s
+ """,
+ (history_id,),
+ )
+ return cur.fetchone()
+
+
+def fetch_paired_id_tuple(cur, batch_id, exclude_history_id: int) -> Optional[tuple]:
+ cur.execute(
+ """
+ SELECT id FROM supplies_location_history
+ WHERE batch_id = %s AND id != %s
+ """,
+ (batch_id, exclude_history_id),
+ )
+ return cur.fetchone()
+
+
+def fetch_batch_entries_ordered_dict(cur, batch_id: str) -> List[dict]:
+ cur.execute(
+ """
+ SELECT id, supply_id, supply_name, location_name, shelf,
+ action_type, old_amount, new_amount,
+ related_location, related_shelf
+ FROM supplies_location_history
+ WHERE batch_id = %s
+ ORDER BY id
+ """,
+ (batch_id,),
+ )
+ return cur.fetchall()
diff --git a/src/api/repositories/supplies_location_repository.py b/src/api/repositories/supplies_location_repository.py
new file mode 100644
index 0000000..e0a1ebc
--- /dev/null
+++ b/src/api/repositories/supplies_location_repository.py
@@ -0,0 +1,221 @@
+"""SQL for supplies_location and related supply name/timestamp touches."""
+from typing import Any, List, Optional, Sequence, Tuple
+
+
+def fetch_joined_filtered(
+ cur, location_filter: Optional[str], supply_id_filter: Optional[int]
+) -> List[Tuple]:
+ query = """
+ SELECT sl.id, sl.supply_id, sl.location_name, sl.coord_x, sl.coord_y, sl.shelf, sl.amount,
+ sl.last_modified, sl.last_modified_by, sl.created_at,
+ s.name AS supply_name,
+ s.public_id AS supply_public_id
+ FROM supplies_location sl
+ JOIN supplies s ON sl.supply_id = s.id
+ WHERE 1=1
+ """
+ params: List[Any] = []
+ if location_filter:
+ query += " AND sl.location_name = %s"
+ params.append(location_filter)
+ if supply_id_filter is not None:
+ query += " AND sl.supply_id = %s"
+ params.append(int(supply_id_filter))
+ query += " ORDER BY COALESCE(sl.location_name, ''), sl.shelf, s.name"
+ cur.execute(query, params)
+ return cur.fetchall()
+
+
+def fetch_by_id_tuple(cur, location_id: int) -> Optional[Tuple]:
+ cur.execute(
+ """
+ SELECT id, supply_id, location_name, coord_x, coord_y, shelf, amount,
+ last_modified, last_modified_by, created_at
+ FROM supplies_location
+ WHERE id = %s
+ """,
+ (location_id,),
+ )
+ return cur.fetchone()
+
+
+def fetch_by_location_name_joined(cur, name: str) -> List[Tuple]:
+ cur.execute(
+ """
+ SELECT sl.id, sl.supply_id, sl.location_name, sl.coord_x, sl.coord_y, sl.shelf, sl.amount,
+ sl.last_modified, sl.last_modified_by, sl.created_at,
+ s.name AS supply_name,
+ s.public_id AS supply_public_id
+ FROM supplies_location sl
+ JOIN supplies s ON sl.supply_id = s.id
+ WHERE sl.location_name = %s
+ ORDER BY sl.shelf, s.name
+ """,
+ (name,),
+ )
+ return cur.fetchall()
+
+
+def fetch_supply_id_name_dict(cur, supply_id: int) -> Optional[dict]:
+ cur.execute("SELECT id, name FROM supplies WHERE id = %s", (supply_id,))
+ return cur.fetchone()
+
+
+def fetch_supply_name_only_dict(cur, supply_id: int) -> Optional[dict]:
+ cur.execute("SELECT name FROM supplies WHERE id = %s", (supply_id,))
+ return cur.fetchone()
+
+
+def select_free_coord_row(cur, supply_id: int, cx: int, cy: int) -> Optional[Tuple]:
+ cur.execute(
+ """
+ SELECT id, amount FROM supplies_location
+ WHERE supply_id = %s AND location_name IS NULL
+ AND coord_x = %s AND coord_y = %s
+ """,
+ (supply_id, cx, cy),
+ )
+ return cur.fetchone()
+
+
+def update_free_coord_amount_and_user(cur, location_id: int, user_id: str) -> None:
+ cur.execute(
+ """
+ UPDATE supplies_location
+ SET amount = 1, last_modified_by = %s
+ WHERE id = %s
+ """,
+ (user_id, location_id),
+ )
+
+
+def touch_supply_last_modified(cur, supply_id: int, user_id: str) -> None:
+ cur.execute(
+ """
+ UPDATE supplies
+ SET last_modified = CURRENT_TIMESTAMP, last_modified_by = %s
+ WHERE id = %s
+ """,
+ (user_id, supply_id),
+ )
+
+
+def insert_free_coordinate_row(cur, supply_id: int, cx: int, cy: int, user_id: str) -> int:
+ cur.execute(
+ """
+ INSERT INTO supplies_location
+ (supply_id, location_name, coord_x, coord_y, shelf, amount, last_modified_by)
+ VALUES (%s, NULL, %s, %s, NULL, 1, %s)
+ """,
+ (supply_id, cx, cy, user_id),
+ )
+ return cur.lastrowid
+
+
+def select_box_row(cur, supply_id: int, location_name: str, shelf) -> Optional[Tuple]:
+ cur.execute(
+ """
+ SELECT id, amount FROM supplies_location
+ WHERE supply_id = %s AND location_name = %s AND (shelf = %s OR (shelf IS NULL AND %s IS NULL))
+ """,
+ (supply_id, location_name, shelf, shelf),
+ )
+ return cur.fetchone()
+
+
+def update_location_amount(cur, new_amount: int, user_id: str, location_id: int) -> None:
+ cur.execute(
+ """
+ UPDATE supplies_location
+ SET amount = %s, last_modified_by = %s
+ WHERE id = %s
+ """,
+ (new_amount, user_id, location_id),
+ )
+
+
+def insert_box_row(cur, supply_id: int, location_name: str, shelf, amount: int, user_id: str) -> int:
+ cur.execute(
+ """
+ INSERT INTO supplies_location (supply_id, location_name, shelf, amount, last_modified_by)
+ VALUES (%s, %s, %s, %s, %s)
+ """,
+ (supply_id, location_name, shelf, amount, user_id),
+ )
+ return cur.lastrowid
+
+
+def fetch_location_for_update_join_dict(cur, location_id: int) -> Optional[dict]:
+ cur.execute(
+ """
+ SELECT sl.id, sl.supply_id, sl.location_name, sl.coord_x, sl.coord_y, sl.shelf, sl.amount,
+ s.name as supply_name
+ FROM supplies_location sl
+ JOIN supplies s ON sl.supply_id = s.id
+ WHERE sl.id = %s
+ """,
+ (location_id,),
+ )
+ return cur.fetchone()
+
+
+def select_free_coord_conflict(
+ cur, supply_id: int, cx: int, cy: int, exclude_location_id: int
+) -> Optional[Tuple]:
+ cur.execute(
+ """
+ SELECT id FROM supplies_location
+ WHERE supply_id = %s AND location_name IS NULL
+ AND coord_x = %s AND coord_y = %s AND id <> %s
+ LIMIT 1
+ """,
+ (supply_id, cx, cy, exclude_location_id),
+ )
+ return cur.fetchone()
+
+
+def update_supplies_location_dynamic(cur, set_clauses: Sequence[str], values: List[Any]) -> None:
+ """values end with location_id for WHERE id = %s."""
+ query = f"UPDATE supplies_location SET {', '.join(set_clauses)} WHERE id = %s"
+ cur.execute(query, tuple(values))
+
+
+def fetch_location_row_tuple(cur, location_id: int) -> Optional[Tuple]:
+ cur.execute(
+ """
+ SELECT id, supply_id, location_name, coord_x, coord_y, shelf, amount,
+ last_modified, last_modified_by, created_at
+ FROM supplies_location WHERE id = %s
+ """,
+ (location_id,),
+ )
+ return cur.fetchone()
+
+
+def fetch_location_with_join_for_delete_dict(cur, location_id: int) -> Optional[dict]:
+ cur.execute(
+ """
+ SELECT sl.id, sl.supply_id, sl.location_name, sl.coord_x, sl.coord_y, sl.shelf, sl.amount,
+ s.name as supply_name
+ FROM supplies_location sl
+ JOIN supplies s ON sl.supply_id = s.id
+ WHERE sl.id = %s
+ """,
+ (location_id,),
+ )
+ return cur.fetchone()
+
+
+def delete_by_id(cur, location_id: int) -> None:
+ cur.execute("DELETE FROM supplies_location WHERE id = %s", (location_id,))
+
+
+def select_box_row_dict(cur, supply_id: int, location_name: str, shelf) -> Optional[dict]:
+ cur.execute(
+ """
+ SELECT id, amount FROM supplies_location
+ WHERE supply_id = %s AND location_name = %s AND (shelf = %s OR (shelf IS NULL AND %s IS NULL))
+ """,
+ (supply_id, location_name, shelf, shelf),
+ )
+ return cur.fetchone()
diff --git a/src/api/repositories/supplies_repository.py b/src/api/repositories/supplies_repository.py
new file mode 100644
index 0000000..04251dd
--- /dev/null
+++ b/src/api/repositories/supplies_repository.py
@@ -0,0 +1,516 @@
+"""SQL access for supplies catalog, junction tables, and related history rows."""
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional, Sequence, Tuple
+
+
+def _in_clause(ids: Sequence[int]) -> str:
+ return ",".join(["%s"] * len(ids))
+
+
+def list_supplies_aggregate_rows(cur) -> List[dict]:
+ cur.execute(
+ """
+ SELECT
+ s.id,
+ s.public_id,
+ s.name,
+ s.description,
+ s.image,
+ s.custom_fields,
+ s.supply_type_id,
+ st.name AS type_name,
+ st.image AS type_image,
+ s.last_order_date,
+ s.last_modified,
+ s.last_modified_by,
+ s.created_at,
+ COALESCE(SUM(sl.amount), 0) AS totalQty
+ FROM supplies s
+ LEFT JOIN supply_types st ON s.supply_type_id = st.id
+ LEFT JOIN supplies_location sl ON s.id = sl.supply_id
+ GROUP BY s.id, s.public_id, s.name, s.description, s.image, s.custom_fields, s.supply_type_id,
+ st.name, st.image, s.last_order_date, s.last_modified, s.last_modified_by, s.created_at
+ ORDER BY s.name
+ """
+ )
+ return cur.fetchall()
+
+
+def fetch_locations_by_supply_ids(cur, supply_ids: List[int]) -> Dict[int, List[dict]]:
+ if not supply_ids:
+ return {}
+ ph = _in_clause(supply_ids)
+ cur.execute(
+ f"""
+ SELECT supply_id, id, location_name, coord_x, coord_y, shelf, amount
+ FROM supplies_location
+ WHERE supply_id IN ({ph})
+ ORDER BY supply_id, COALESCE(location_name, ''), shelf
+ """,
+ tuple(supply_ids),
+ )
+ out: Dict[int, List[dict]] = {sid: [] for sid in supply_ids}
+ for row in cur.fetchall():
+ sid = row["supply_id"]
+ if sid in out:
+ out[sid].append(row)
+ return out
+
+
+def fetch_teams_by_supply_ids(cur, supply_ids: List[int]) -> Dict[int, List[str]]:
+ if not supply_ids:
+ return {}
+ ph = _in_clause(supply_ids)
+ cur.execute(
+ f"""
+ SELECT supply_id, team_name
+ FROM supplies_teams
+ WHERE supply_id IN ({ph})
+ ORDER BY supply_id, team_name
+ """,
+ tuple(supply_ids),
+ )
+ out: Dict[int, List[str]] = {sid: [] for sid in supply_ids}
+ for row in cur.fetchall():
+ sid = row["supply_id"]
+ if sid in out:
+ out[sid].append(row["team_name"].lower())
+ return out
+
+
+def fetch_categories_by_supply_ids(cur, supply_ids: List[int]) -> Dict[int, List[int]]:
+ if not supply_ids:
+ return {}
+ ph = _in_clause(supply_ids)
+ cur.execute(
+ f"""
+ SELECT supply_id, category_id
+ FROM supplies_categories
+ WHERE supply_id IN ({ph})
+ ORDER BY supply_id, category_id
+ """,
+ tuple(supply_ids),
+ )
+ out: Dict[int, List[int]] = {sid: [] for sid in supply_ids}
+ for row in cur.fetchall():
+ sid = row["supply_id"]
+ if sid in out:
+ out[sid].append(row["category_id"])
+ return out
+
+
+def fetch_members_by_uf_ids(cur, uf_ids: List[str]) -> Dict[str, dict]:
+ if not uf_ids:
+ return {}
+ ph = _in_clause(uf_ids) # uf_id may be string - use %s still works
+ cur.execute(
+ f"""
+ SELECT uf_id, first_name, last_name, uf_email
+ FROM members
+ WHERE uf_id IN ({ph})
+ """,
+ tuple(uf_ids),
+ )
+ return {str(r["uf_id"]): r for r in cur.fetchall()}
+
+
+def fetch_supply_detail_aggregate_row(cur, supply_id: int) -> Optional[dict]:
+ cur.execute(
+ """
+ SELECT
+ s.id,
+ s.public_id,
+ s.name,
+ s.description,
+ s.image,
+ s.custom_fields,
+ s.supply_type_id,
+ st.name AS type_name,
+ st.image AS type_image,
+ s.last_order_date,
+ s.last_modified,
+ s.last_modified_by,
+ s.created_at,
+ COALESCE(SUM(sl.amount), 0) AS totalQty
+ FROM supplies s
+ LEFT JOIN supply_types st ON s.supply_type_id = st.id
+ LEFT JOIN supplies_location sl ON s.id = sl.supply_id
+ WHERE s.id = %s
+ GROUP BY s.id, s.public_id, s.name, s.description, s.image, s.custom_fields, s.supply_type_id,
+ st.name, st.image, s.last_order_date, s.last_modified, s.last_modified_by, s.created_at
+ """,
+ (supply_id,),
+ )
+ return cur.fetchone()
+
+
+def fetch_allowed_custom_field_names(cur) -> set:
+ try:
+ cur.execute("SELECT name FROM custom_field_definitions")
+ return {r["name"] for r in cur.fetchall()}
+ except Exception:
+ return set()
+
+
+def fetch_supply_type_row(cur, type_id: int) -> Optional[dict]:
+ cur.execute(
+ """
+ SELECT id, name, item_name_prefix, item_description_prefix, image,
+ default_custom_fields, locked_custom_field_keys,
+ locked_category_ids, locked_team_names, is_unique
+ FROM supply_types WHERE id = %s
+ """,
+ (int(type_id),),
+ )
+ return cur.fetchone()
+
+
+def select_supply_id_by_name(cur, name: str) -> Optional[int]:
+ """First matching id when multiple supplies share a name (avoid for new code)."""
+ cur.execute("SELECT id FROM supplies WHERE name = %s ORDER BY id LIMIT 1", (name,))
+ row = cur.fetchone()
+ if not row:
+ return None
+ return row["id"] if isinstance(row, dict) else row[0]
+
+
+def select_supply_ids_by_name(cur, name: str) -> List[int]:
+ cur.execute("SELECT id FROM supplies WHERE name = %s ORDER BY id", (name,))
+ rows = cur.fetchall()
+ out: List[int] = []
+ for row in rows:
+ out.append(row["id"] if isinstance(row, dict) else row[0])
+ return out
+
+
+def name_exists_excluding(cur, name: str, exclude_id: Optional[int]) -> bool:
+ if exclude_id is None:
+ cur.execute("SELECT id FROM supplies WHERE name = %s", (name,))
+ else:
+ cur.execute("SELECT id FROM supplies WHERE name = %s AND id != %s", (name, exclude_id))
+ return cur.fetchone() is not None
+
+
+def fetch_supply_id_type(cur, supply_id: int) -> Optional[dict]:
+ cur.execute(
+ "SELECT id, name, supply_type_id FROM supplies WHERE id = %s",
+ (supply_id,),
+ )
+ return cur.fetchone()
+
+
+def insert_supply(
+ cur,
+ public_id: str,
+ name: str,
+ description: Optional[str],
+ image: Optional[str],
+ custom_fields_json: Optional[str],
+ last_order_date: Any,
+ last_modified_by: str,
+ supply_type_id: Optional[int],
+) -> int:
+ cur.execute(
+ """
+ INSERT INTO supplies (public_id, name, description, image, custom_fields, last_order_date, last_modified_by, supply_type_id)
+ VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
+ """,
+ (public_id, name, description, image, custom_fields_json, last_order_date, last_modified_by, supply_type_id),
+ )
+ return cur.lastrowid
+
+
+def insert_supply_team_ignore(cur, supply_id: int, team_name: str) -> None:
+ cur.execute(
+ "INSERT IGNORE INTO supplies_teams (supply_id, team_name) VALUES (%s, %s)",
+ (supply_id, team_name),
+ )
+
+
+def insert_supply_category_ignore(cur, supply_id: int, category_id: int) -> None:
+ cur.execute(
+ "INSERT IGNORE INTO supplies_categories (supply_id, category_id) VALUES (%s, %s)",
+ (supply_id, category_id),
+ )
+
+
+def fetch_supply_with_type_join(cur, supply_id: int) -> Optional[dict]:
+ cur.execute(
+ """
+ SELECT s.id, s.public_id, s.name, s.description, s.image, s.custom_fields, s.last_order_date,
+ s.last_modified, s.last_modified_by, s.created_at,
+ s.supply_type_id, st.name AS type_name, st.image AS type_image
+ FROM supplies s
+ LEFT JOIN supply_types st ON s.supply_type_id = st.id
+ WHERE s.id = %s
+ """,
+ (supply_id,),
+ )
+ return cur.fetchone()
+
+
+def fetch_teams_for_supply_ordered(cur, supply_id: int) -> List[str]:
+ cur.execute(
+ "SELECT team_name FROM supplies_teams WHERE supply_id = %s ORDER BY team_name",
+ (supply_id,),
+ )
+ return [r["team_name"].lower() for r in cur.fetchall()]
+
+
+def fetch_categories_for_supply_ordered(cur, supply_id: int) -> List[int]:
+ cur.execute(
+ "SELECT category_id FROM supplies_categories WHERE supply_id = %s ORDER BY category_id",
+ (supply_id,),
+ )
+ return [r["category_id"] for r in cur.fetchall()]
+
+
+def update_supply_columns(cur, set_clauses: List[str], values: List[Any]) -> None:
+ """set_clauses e.g. ['name = %s', 'description = %s']; values end with supply_id for WHERE id = %s."""
+ if not set_clauses:
+ return
+ query = f"UPDATE supplies SET {', '.join(set_clauses)} WHERE id = %s"
+ cur.execute(query, tuple(values))
+
+
+def delete_teams_for_supply(cur, supply_id: int) -> None:
+ cur.execute("DELETE FROM supplies_teams WHERE supply_id = %s", (supply_id,))
+
+
+def insert_supply_team(cur, supply_id: int, team_name: str) -> None:
+ cur.execute(
+ "INSERT INTO supplies_teams (supply_id, team_name) VALUES (%s, %s)",
+ (supply_id, team_name),
+ )
+
+
+def delete_categories_for_supply(cur, supply_id: int) -> None:
+ cur.execute("DELETE FROM supplies_categories WHERE supply_id = %s", (supply_id,))
+
+
+def insert_supply_category(cur, supply_id: int, category_id: int) -> None:
+ cur.execute(
+ "INSERT INTO supplies_categories (supply_id, category_id) VALUES (%s, %s)",
+ (supply_id, category_id),
+ )
+
+
+def sum_location_qty(cur, supply_id: int) -> int:
+ cur.execute(
+ "SELECT COALESCE(SUM(amount), 0) AS totalQty FROM supplies_location WHERE supply_id = %s",
+ (supply_id,),
+ )
+ row = cur.fetchone()
+ return int(row["totalQty"]) if row else 0
+
+
+def fetch_locations_ordered_for_supply(cur, supply_id: int) -> List[dict]:
+ cur.execute(
+ """
+ SELECT id, location_name, coord_x, coord_y, shelf, amount
+ FROM supplies_location
+ WHERE supply_id = %s
+ ORDER BY COALESCE(location_name, ''), shelf
+ """,
+ (supply_id,),
+ )
+ return cur.fetchall()
+
+
+def fetch_supply_id_name(cur, supply_id: int) -> Optional[dict]:
+ cur.execute("SELECT id, name FROM supplies WHERE id = %s", (supply_id,))
+ return cur.fetchone()
+
+
+def delete_supply_by_id(cur, supply_id: int) -> None:
+ cur.execute("DELETE FROM supplies WHERE id = %s", (supply_id,))
+
+
+def fetch_member_by_uf_id(cur, uf_id: str) -> Optional[dict]:
+ cur.execute(
+ "SELECT first_name, last_name, uf_email FROM members WHERE uf_id = %s",
+ (uf_id,),
+ )
+ return cur.fetchone()
+
+
+def fetch_supply_history_count_and_rows(
+ cur,
+ supply_id_filter: Optional[int],
+ action_type_filter: Optional[str],
+ limit: int,
+ offset: int,
+) -> Tuple[int, List[dict]]:
+ query = """
+ SELECT
+ h.id,
+ h.supply_id,
+ h.action_type,
+ h.old_name,
+ h.new_name,
+ h.old_description,
+ h.new_description,
+ h.old_image,
+ h.new_image,
+ h.old_last_order_date,
+ h.new_last_order_date,
+ h.changed_by,
+ h.changed_at,
+ COALESCE(s.name, h.old_name, h.new_name) AS supply_name
+ FROM supplies_history h
+ LEFT JOIN supplies s ON h.supply_id = s.id
+ WHERE 1=1
+ """
+ params: List[Any] = []
+ if supply_id_filter is not None:
+ query += " AND h.supply_id = %s"
+ params.append(supply_id_filter)
+ if action_type_filter:
+ query += " AND h.action_type = %s"
+ params.append(action_type_filter)
+ count_query = f"SELECT COUNT(*) AS total FROM ({query}) AS filtered"
+ cur.execute(count_query, tuple(params))
+ total = cur.fetchone()["total"]
+ query += " ORDER BY h.changed_at DESC LIMIT %s OFFSET %s"
+ params.extend([limit, offset])
+ cur.execute(query, tuple(params))
+ return total, cur.fetchall()
+
+
+def fetch_history_teams(cur, history_id: int) -> List[dict]:
+ cur.execute(
+ """
+ SELECT team_name, action
+ FROM supplies_history_teams
+ WHERE history_id = %s
+ """,
+ (history_id,),
+ )
+ return cur.fetchall()
+
+
+def fetch_history_categories(cur, history_id: int) -> List[dict]:
+ cur.execute(
+ """
+ SELECT category_id, action
+ FROM supplies_history_categories
+ WHERE history_id = %s
+ """,
+ (history_id,),
+ )
+ return cur.fetchall()
+
+
+def fetch_history_row(cur, history_id: int) -> Optional[dict]:
+ cur.execute("SELECT * FROM supplies_history WHERE id = %s", (history_id,))
+ return cur.fetchone()
+
+
+def fetch_history_id_and_changed_at(cur, history_id: int) -> Optional[dict]:
+ cur.execute(
+ "SELECT id, changed_at FROM supplies_history WHERE id = %s",
+ (history_id,),
+ )
+ return cur.fetchone()
+
+
+def supply_exists_by_id(cur, supply_id: int) -> bool:
+ cur.execute("SELECT id FROM supplies WHERE id = %s", (supply_id,))
+ return cur.fetchone() is not None
+
+
+def delete_history_by_id(cur, history_id: int) -> None:
+ cur.execute("DELETE FROM supplies_history WHERE id = %s", (history_id,))
+
+
+def insert_supply_with_id(
+ cur,
+ supply_id: int,
+ public_id: str,
+ name: Any,
+ description: Any,
+ image: Any,
+ last_order_date: Any,
+ last_modified_by: str,
+) -> None:
+ cur.execute(
+ """
+ INSERT INTO supplies (id, public_id, name, description, image, last_order_date, last_modified_by)
+ VALUES (%s, %s, %s, %s, %s, %s, %s)
+ """,
+ (supply_id, public_id, name, description, image, last_order_date, last_modified_by),
+ )
+
+
+def insert_supply_without_id(
+ cur,
+ public_id: str,
+ name: Any,
+ description: Any,
+ image: Any,
+ last_order_date: Any,
+ last_modified_by: str,
+) -> int:
+ cur.execute(
+ """
+ INSERT INTO supplies (public_id, name, description, image, last_order_date, last_modified_by)
+ VALUES (%s, %s, %s, %s, %s, %s)
+ """,
+ (public_id, name, description, image, last_order_date, last_modified_by),
+ )
+ return cur.lastrowid
+
+
+def fetch_undo_snapshot_batch(cur, old_name: str, changed_at: Any) -> Optional[dict]:
+ cur.execute(
+ """
+ SELECT batch_id, MAX(changed_at) AS max_changed_at
+ FROM supplies_location_history
+ WHERE supply_name = %s
+ AND action_type = 'CASCADED_SUBTRACT'
+ AND changed_at >= DATE_SUB(%s, INTERVAL 10 SECOND)
+ AND changed_at <= DATE_ADD(%s, INTERVAL 10 SECOND)
+ GROUP BY batch_id
+ ORDER BY max_changed_at DESC
+ LIMIT 1
+ """,
+ (old_name, changed_at, changed_at),
+ )
+ return cur.fetchone()
+
+
+def fetch_undo_snapshot_entries(cur, batch_id: Any) -> List[dict]:
+ cur.execute(
+ """
+ SELECT location_name, shelf, old_amount
+ FROM supplies_location_history
+ WHERE batch_id = %s
+ AND action_type = 'CASCADED_SUBTRACT'
+ """,
+ (batch_id,),
+ )
+ return cur.fetchall()
+
+
+def insert_supplies_location_row(
+ cur, supply_id: int, location_name: Any, shelf: Any, amount: Any, last_modified_by: str
+) -> None:
+ cur.execute(
+ """
+ INSERT INTO supplies_location (supply_id, location_name, shelf, amount, last_modified_by)
+ VALUES (%s, %s, %s, %s, %s)
+ """,
+ (supply_id, location_name, shelf, amount, last_modified_by),
+ )
+
+
+def delete_location_history_cascaded_batch(cur, batch_id: Any) -> None:
+ cur.execute(
+ """
+ DELETE FROM supplies_location_history
+ WHERE batch_id = %s
+ AND action_type = 'CASCADED_SUBTRACT'
+ """,
+ (batch_id,),
+ )
diff --git a/src/api/repositories/supply_types_repository.py b/src/api/repositories/supply_types_repository.py
new file mode 100644
index 0000000..4003119
--- /dev/null
+++ b/src/api/repositories/supply_types_repository.py
@@ -0,0 +1,109 @@
+"""SQL for supply_types and linked supplies rows during prefix cascade."""
+from typing import List, Optional
+
+_SUPPLY_TYPE_SELECT = """
+ id, name, template_description, item_name_prefix, item_description_prefix,
+ image, default_custom_fields, locked_custom_field_keys,
+ locked_category_ids, locked_team_names,
+ is_unique, prevent_user_edit,
+ created_at, updated_at
+"""
+
+
+def list_all_dict(cur) -> List[dict]:
+ cur.execute(
+ f"SELECT {_SUPPLY_TYPE_SELECT.strip()} FROM supply_types ORDER BY name"
+ )
+ return cur.fetchall()
+
+
+def fetch_by_id_dict(cur, type_id: int) -> Optional[dict]:
+ cur.execute(
+ f"SELECT {_SUPPLY_TYPE_SELECT.strip()} FROM supply_types WHERE id = %s",
+ (type_id,),
+ )
+ return cur.fetchone()
+
+
+def insert_supply_type(
+ cur,
+ name: str,
+ template_description,
+ item_name_prefix: str,
+ item_description_prefix,
+ image,
+ default_custom_fields_json,
+ locked_keys_json,
+ locked_category_ids_json,
+ locked_team_names_json,
+ is_unique: int,
+ prevent_user_edit: int,
+) -> int:
+ cur.execute(
+ """
+ INSERT INTO supply_types (
+ name, template_description, item_name_prefix, item_description_prefix,
+ image, default_custom_fields, locked_custom_field_keys,
+ locked_category_ids, locked_team_names,
+ is_unique, prevent_user_edit
+ ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
+ """,
+ (
+ name,
+ template_description,
+ item_name_prefix,
+ item_description_prefix,
+ image,
+ default_custom_fields_json,
+ locked_keys_json,
+ locked_category_ids_json,
+ locked_team_names_json,
+ is_unique,
+ prevent_user_edit,
+ ),
+ )
+ return cur.lastrowid
+
+
+def fetch_prefixes_row(cur, type_id: int) -> Optional[dict]:
+ cur.execute(
+ """
+ SELECT id, item_name_prefix, item_description_prefix
+ FROM supply_types WHERE id = %s
+ """,
+ (type_id,),
+ )
+ return cur.fetchone()
+
+
+def update_supply_type_columns(cur, set_fields: List[str], values: List) -> None:
+ """values end with type_id for WHERE id = %s."""
+ cur.execute(f"UPDATE supply_types SET {', '.join(set_fields)} WHERE id = %s", tuple(values))
+
+
+def select_supplies_id_name_desc_for_type(cur, type_id: int) -> List[dict]:
+ cur.execute(
+ "SELECT id, name, description FROM supplies WHERE supply_type_id = %s",
+ (type_id,),
+ )
+ return cur.fetchall()
+
+
+def select_supply_id_by_name_excluding(cur, name: str, exclude_id: int) -> Optional[int]:
+ cur.execute("SELECT id FROM supplies WHERE name = %s AND id != %s", (name, exclude_id))
+ row = cur.fetchone()
+ if not row:
+ return None
+ return row["id"] if isinstance(row, dict) else row[0]
+
+
+def update_supply_name_description(cur, supply_id: int, name: str, description) -> None:
+ cur.execute(
+ "UPDATE supplies SET name = %s, description = %s WHERE id = %s",
+ (name, description, supply_id),
+ )
+
+
+def delete_supply_type_by_id(cur, type_id: int) -> int:
+ cur.execute("DELETE FROM supply_types WHERE id = %s", (type_id,))
+ return cur.rowcount
diff --git a/src/api/repositories/teams_repository.py b/src/api/repositories/teams_repository.py
new file mode 100644
index 0000000..330c076
--- /dev/null
+++ b/src/api/repositories/teams_repository.py
@@ -0,0 +1,13 @@
+"""SQL for teams reference table."""
+
+
+def list_team_names_ordered(cur) -> list:
+ cur.execute("SELECT name FROM teams ORDER BY name")
+ rows = cur.fetchall()
+ out = []
+ for row in rows:
+ if isinstance(row, dict):
+ out.append(row["name"])
+ else:
+ out.append(row[0])
+ return out
diff --git a/src/api/routes/auth.py b/src/api/routes/auth.py
index 9490519..9dbb24d 100644
--- a/src/api/routes/auth.py
+++ b/src/api/routes/auth.py
@@ -4,138 +4,184 @@
import sys
from pathlib import Path
-# Add src to path for imports (must be before other imports)
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
-from flask import Blueprint, request, jsonify, session
import bcrypt
+import mysql.connector
+import secrets
+from flask import Blueprint, jsonify, request, session
+
from src.api.db import get_db
+from src.api.repositories import members_repository as repo
+
+auth_bp = Blueprint("auth", __name__)
-auth_bp = Blueprint('auth', __name__)
+_MIN_PASSWORD_LEN = 8
-@auth_bp.route('/login', methods=['POST'])
+@auth_bp.route("/login", methods=["POST"])
def login():
"""
POST /api/auth/login
Login with email and password.
-
- Request body:
- {
- "email": "test@ufl.edu",
- "password": "test"
- }
-
- Returns:
- JSON with user info and success status
"""
try:
data = request.get_json()
- email = data.get('email')
- password = data.get('password')
-
+ email = data.get("email")
+ password = data.get("password")
+
if not email or not password:
- return jsonify({'error': 'Email and password are required'}), 400
-
+ return jsonify({"error": "Email and password are required"}), 400
+
conn = get_db()
cur = conn.cursor(dictionary=True)
-
- # Find user by email
- cur.execute(
- "SELECT uf_id, uf_email, first_name, last_name, password_hash, is_leader FROM members WHERE uf_email = %s",
- (email,)
- )
- user = cur.fetchone()
+ user = repo.fetch_by_email_dict(cur, email)
cur.close()
conn.close()
-
+
if not user:
- return jsonify({'error': 'Invalid email or password'}), 401
-
- if not user['password_hash']:
- return jsonify({'error': 'Invalid email or password'}), 401
-
- if not user['is_leader']:
- return jsonify({'error': 'Access denied. Leader status required.'}), 403
-
- # Verify password
- if not bcrypt.checkpw(password.encode('utf-8'), user['password_hash'].encode('utf-8')):
- return jsonify({'error': 'Invalid email or password'}), 401
-
- # Create session
- session['user_id'] = user['uf_id']
- session['user_email'] = user['uf_email']
- session['is_leader'] = user['is_leader']
-
- return jsonify({
- 'success': True,
- 'user': {
- 'uf_id': user['uf_id'],
- 'email': user['uf_email'],
- 'first_name': user['first_name'],
- 'last_name': user['last_name'],
- 'is_leader': user['is_leader']
+ return jsonify({"error": "Invalid email or password"}), 401
+
+ if not user["password_hash"]:
+ return jsonify({"error": "Invalid email or password"}), 401
+
+ if not bcrypt.checkpw(password.encode("utf-8"), user["password_hash"].encode("utf-8")):
+ return jsonify({"error": "Invalid email or password"}), 401
+
+ session["user_id"] = user["uf_id"]
+ session["user_email"] = user["uf_email"]
+ session["is_leader"] = user["is_leader"]
+
+ return jsonify(
+ {
+ "success": True,
+ "user": {
+ "uf_id": user["uf_id"],
+ "email": user["uf_email"],
+ "first_name": user["first_name"],
+ "last_name": user["last_name"],
+ "is_leader": user["is_leader"],
+ },
}
- }), 200
-
+ ), 200
+
except Exception as e:
- return jsonify({'error': str(e)}), 500
+ return jsonify({"error": str(e)}), 500
-@auth_bp.route('/logout', methods=['POST'])
-def logout():
+@auth_bp.route("/register", methods=["POST"])
+def register():
"""
- POST /api/auth/logout
- Logout (destroy session).
-
- Returns:
- JSON with success status
+ POST /api/auth/register
+ Create an account (non-leader). Logs the user in on success.
"""
+ try:
+ data = request.get_json() or {}
+ email = (data.get("email") or "").strip().lower()
+ password = data.get("password") or ""
+ confirm_password = data.get("confirm_password") or ""
+ first_name = (data.get("first_name") or "").strip()
+ last_name = (data.get("last_name") or "").strip()
+
+ if not email or not password or not first_name or not last_name:
+ return jsonify({"error": "First name, last name, email, and password are required"}), 400
+ if password != confirm_password:
+ return jsonify({"error": "Passwords do not match"}), 400
+ if len(password) < _MIN_PASSWORD_LEN:
+ return jsonify({"error": f"Password must be at least {_MIN_PASSWORD_LEN} characters"}), 400
+
+ token = secrets.token_hex(12)
+ discord_tag = f"{token[:24]}#0000"
+ github_user = f"signup-{secrets.token_hex(16)}"
+
+ conn = get_db()
+ cur = conn.cursor(dictionary=True)
+ try:
+ uf_id = repo.allocate_uf_id(cur)
+ password_hash = bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8")
+ repo.insert_signup_member(
+ cur,
+ first_name=first_name,
+ last_name=last_name,
+ uf_id=uf_id,
+ uf_email=email,
+ password_hash=password_hash,
+ discord=discord_tag,
+ github=github_user,
+ )
+ conn.commit()
+
+ session["user_id"] = uf_id
+ session["user_email"] = email
+ session["is_leader"] = False
+
+ return (
+ jsonify(
+ {
+ "success": True,
+ "user": {
+ "uf_id": uf_id,
+ "email": email,
+ "first_name": first_name,
+ "last_name": last_name,
+ "is_leader": False,
+ },
+ }
+ ),
+ 201,
+ )
+ except mysql.connector.IntegrityError:
+ conn.rollback()
+ return jsonify({"error": "An account with this email already exists"}), 409
+ except Exception:
+ conn.rollback()
+ raise
+ finally:
+ cur.close()
+ conn.close()
+
+ except Exception as e:
+ return jsonify({"error": str(e)}), 500
+
+
+@auth_bp.route("/logout", methods=["POST"])
+def logout():
+ """POST /api/auth/logout"""
try:
session.clear()
- return jsonify({'success': True}), 200
+ return jsonify({"success": True}), 200
except Exception as e:
- return jsonify({'error': str(e)}), 500
+ return jsonify({"error": str(e)}), 500
-@auth_bp.route('/me', methods=['GET'])
+@auth_bp.route("/me", methods=["GET"])
def get_current_user():
- """
- GET /api/auth/me
- Get current user info from session.
-
- Returns:
- JSON with user info if authenticated, 401 if not
- """
+ """GET /api/auth/me"""
try:
- if 'user_id' not in session:
- return jsonify({'error': 'Not authenticated'}), 401
-
+ if "user_id" not in session:
+ return jsonify({"error": "Not authenticated"}), 401
+
conn = get_db()
cur = conn.cursor(dictionary=True)
-
- cur.execute(
- "SELECT uf_id, uf_email, first_name, last_name, is_leader FROM members WHERE uf_id = %s",
- (session['user_id'],)
- )
- user = cur.fetchone()
+ user = repo.fetch_by_uf_id_dict(cur, session["user_id"])
cur.close()
conn.close()
-
+
if not user:
session.clear()
- return jsonify({'error': 'User not found'}), 401
-
- return jsonify({
- 'user': {
- 'uf_id': user['uf_id'],
- 'email': user['uf_email'],
- 'first_name': user['first_name'],
- 'last_name': user['last_name'],
- 'is_leader': user['is_leader']
+ return jsonify({"error": "User not found"}), 401
+
+ return jsonify(
+ {
+ "user": {
+ "uf_id": user["uf_id"],
+ "email": user["uf_email"],
+ "first_name": user["first_name"],
+ "last_name": user["last_name"],
+ "is_leader": user["is_leader"],
+ }
}
- }), 200
-
- except Exception as e:
- return jsonify({'error': str(e)}), 500
+ ), 200
+ except Exception as e:
+ return jsonify({"error": str(e)}), 500
diff --git a/src/api/routes/categories.py b/src/api/routes/categories.py
index 80ec9f1..1f23a03 100644
--- a/src/api/routes/categories.py
+++ b/src/api/routes/categories.py
@@ -1,60 +1,81 @@
"""Categories API routes."""
-from flask import Blueprint, jsonify
+from flask import Blueprint, request, jsonify
import mysql.connector
-import os
-from src.scripts.helpers import parse_database_url
-categories_bp = Blueprint('categories', __name__)
+from src.api.db import get_db
+from src.api.middleware.auth import require_leader
+from src.api.models.category import Category
+from src.api.repositories import categories_repository as repo
+categories_bp = Blueprint("categories", __name__)
-def get_db_connection():
- """Get database connection."""
- database_url = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb")
- db_params = parse_database_url(database_url)
- return mysql.connector.connect(**db_params)
-
-@categories_bp.route('/categories', methods=['GET'])
+@categories_bp.route("/categories", methods=["GET"])
def get_categories():
"""Get all categories with IDs."""
try:
- conn = get_db_connection()
+ conn = get_db()
cur = conn.cursor()
-
- cur.execute("SELECT id, name FROM categories ORDER BY name")
- categories = [{'id': row[0], 'name': row[1]} for row in cur.fetchall()]
-
+ rows = repo.list_id_name_ordered(cur)
+ categories = [{"id": row[0], "name": row[1]} for row in rows]
cur.close()
conn.close()
-
- return jsonify({'categories': categories}), 200
+ return jsonify({"categories": categories}), 200
except mysql.connector.Error as e:
- return jsonify({'error': f'Database error: {str(e)}'}), 500
+ return jsonify({"error": f"Database error: {str(e)}"}), 500
except Exception as e:
- return jsonify({'error': f'Unexpected error: {str(e)}'}), 500
+ return jsonify({"error": f"Unexpected error: {str(e)}"}), 500
-@categories_bp.route('/categories/', methods=['GET'])
+@categories_bp.route("/categories/", methods=["GET"])
def get_category(category_id):
"""Get a single category by ID."""
try:
- conn = get_db_connection()
+ conn = get_db()
cur = conn.cursor()
-
- cur.execute("SELECT id, name, created_at FROM categories WHERE id = %s", (category_id,))
- row = cur.fetchone()
-
+ row = repo.fetch_by_id(cur, category_id)
cur.close()
conn.close()
-
if not row:
- return jsonify({'error': 'Category not found'}), 404
-
- from src.api.models.category import Category
+ return jsonify({"error": "Category not found"}), 404
category = Category.from_db_row(row)
return jsonify(category.to_dict()), 200
except mysql.connector.Error as e:
- return jsonify({'error': f'Database error: {str(e)}'}), 500
+ return jsonify({"error": f"Database error: {str(e)}"}), 500
except Exception as e:
- return jsonify({'error': f'Unexpected error: {str(e)}'}), 500
+ return jsonify({"error": f"Unexpected error: {str(e)}"}), 500
+
+
+@categories_bp.route("/categories", methods=["POST"])
+@require_leader
+def create_category(current_user_id=None):
+ """Create a new category. Requires leader/admin access."""
+ try:
+ data = request.json
+ if not data or "name" not in data:
+ return jsonify({"error": "Category name is required"}), 400
+ name = data["name"].strip()
+ if not name:
+ return jsonify({"error": "Category name cannot be empty"}), 400
+ conn = get_db()
+ cur = conn.cursor()
+ try:
+ repo.insert_name(cur, name)
+ conn.commit()
+ row = repo.fetch_last_insert_row(cur)
+ category = Category.from_db_row(row)
+ cur.close()
+ conn.close()
+ return jsonify(category.to_dict()), 201
+ except mysql.connector.IntegrityError as e:
+ conn.rollback()
+ cur.close()
+ conn.close()
+ if "Duplicate entry" in str(e) or "UNIQUE constraint" in str(e):
+ return jsonify({"error": "Category with this name already exists"}), 409
+ return jsonify({"error": f"Database error: {str(e)}"}), 400
+ except mysql.connector.Error as e:
+ return jsonify({"error": f"Database error: {str(e)}"}), 500
+ except Exception as e:
+ return jsonify({"error": f"Unexpected error: {str(e)}"}), 500
diff --git a/src/api/routes/custom_field_definitions.py b/src/api/routes/custom_field_definitions.py
new file mode 100644
index 0000000..5f35f9f
--- /dev/null
+++ b/src/api/routes/custom_field_definitions.py
@@ -0,0 +1,146 @@
+"""
+Custom field definitions API. Public GET for dropdown; admin CRUD for create/update/delete.
+"""
+import json
+import sys
+from pathlib import Path
+
+sys.path.insert(0, str(Path(__file__).parent.parent.parent))
+
+import mysql.connector
+from flask import Blueprint, jsonify, request
+
+from src.api.db import get_db
+from src.api.middleware.auth import require_auth, require_leader
+from src.api.models.custom_field_definition import CustomFieldDefinition
+from src.api.repositories import custom_field_definitions_repository as repo
+
+custom_field_definitions_bp = Blueprint("custom_field_definitions", __name__)
+
+VALID_TYPES = ("text", "number", "date")
+
+
+@custom_field_definitions_bp.route("", methods=["GET"])
+@require_auth
+def get_custom_field_definitions(current_user_id=None):
+ """GET /api/custom-field-definitions"""
+ try:
+ conn = get_db()
+ cur = conn.cursor()
+ rows = repo.list_id_name_type_ordered(cur)
+ definitions = [CustomFieldDefinition.from_db_row(row).to_dict() for row in rows]
+ cur.close()
+ conn.close()
+ return jsonify(definitions), 200
+ except Exception as e:
+ return jsonify({"error": str(e)}), 500
+
+
+@custom_field_definitions_bp.route("", methods=["POST"])
+@require_leader
+def create_custom_field_definition(current_user_id=None):
+ """POST /api/custom-field-definitions"""
+ try:
+ data = request.json
+ if not data:
+ return jsonify({"error": "Request body is required"}), 400
+ name = (data.get("name") or "").strip()
+ type_val = (data.get("type") or "text").strip().lower()
+ if not name:
+ return jsonify({"error": "Name is required"}), 400
+ if type_val not in VALID_TYPES:
+ return jsonify({"error": f'Type must be one of: {", ".join(VALID_TYPES)}'}), 400
+
+ conn = get_db()
+ cur = conn.cursor()
+ repo.insert_name_type(cur, name, type_val)
+ conn.commit()
+ definition_id = cur.lastrowid
+ row = repo.fetch_by_id_tuple(cur, definition_id)
+ cur.close()
+ conn.close()
+ return jsonify(CustomFieldDefinition.from_db_row(row).to_dict()), 201
+ except mysql.connector.IntegrityError as e:
+ if "Duplicate entry" in str(e) or "unique" in str(e).lower():
+ return jsonify({"error": "A custom field with this name already exists"}), 400
+ return jsonify({"error": str(e)}), 400
+ except Exception as e:
+ return jsonify({"error": str(e)}), 500
+
+
+@custom_field_definitions_bp.route("/", methods=["PUT"])
+@require_leader
+def update_custom_field_definition(definition_id, current_user_id=None):
+ """PUT /api/custom-field-definitions/"""
+ try:
+ data = request.json
+ if not data:
+ return jsonify({"error": "Request body is required"}), 400
+ name = (data.get("name") or "").strip() if "name" in data else None
+ type_val = (data.get("type") or "").strip().lower() if "type" in data else None
+ if type_val is not None and type_val not in VALID_TYPES:
+ return jsonify({"error": f'Type must be one of: {", ".join(VALID_TYPES)}'}), 400
+
+ conn = get_db()
+ cur = conn.cursor()
+ if not repo.exists_id_tuple(cur, definition_id):
+ cur.close()
+ conn.close()
+ return jsonify({"error": "Custom field definition not found"}), 404
+
+ updates = []
+ values = []
+ if "name" in data and name:
+ updates.append("name = %s")
+ values.append(name)
+ if "type" in data and type_val:
+ updates.append("type = %s")
+ values.append(type_val)
+ if updates:
+ values.append(definition_id)
+ repo.update_columns(cur, updates, values)
+ conn.commit()
+
+ row = repo.fetch_by_id_tuple(cur, definition_id)
+ cur.close()
+ conn.close()
+ return jsonify(CustomFieldDefinition.from_db_row(row).to_dict()), 200
+ except Exception as e:
+ return jsonify({"error": str(e)}), 500
+
+
+@custom_field_definitions_bp.route("/", methods=["DELETE"])
+@require_leader
+def delete_custom_field_definition(definition_id, current_user_id=None):
+ """DELETE /api/custom-field-definitions/"""
+ try:
+ conn = get_db()
+ cur = conn.cursor(dictionary=True)
+ row = repo.fetch_name_by_id_dict(cur, definition_id)
+ if not row:
+ cur.close()
+ conn.close()
+ return jsonify({"error": "Custom field definition not found"}), 404
+ field_name = row["name"]
+
+ repo.delete_by_id(cur, definition_id)
+
+ for srow in repo.iter_supplies_custom_fields_rows(cur):
+ cf = srow["custom_fields"]
+ if isinstance(cf, str):
+ try:
+ cf = json.loads(cf)
+ except (TypeError, ValueError):
+ continue
+ if not isinstance(cf, dict) or field_name not in cf:
+ continue
+ del cf[field_name]
+ new_json = json.dumps(cf) if cf else None
+ repo.update_supply_custom_fields_json(cur, srow["id"], new_json)
+
+ conn.commit()
+ cur.close()
+ conn.close()
+ return "", 204
+ except Exception as e:
+ return jsonify({"error": str(e)}), 500
diff --git a/src/api/routes/locations.py b/src/api/routes/locations.py
index aba9424..d25daec 100644
--- a/src/api/routes/locations.py
+++ b/src/api/routes/locations.py
@@ -2,6 +2,7 @@
Location API routes.
"""
import sys
+import json
from pathlib import Path
# Add src to path for imports (must be before other imports)
@@ -11,10 +12,109 @@
import mysql.connector
from src.api.db import get_db
from src.api.models.location import Location
+from src.api.middleware.auth import require_leader
+from src.api.repositories import locations_repository as repo
+from src.scripts.location_type_constants import LEADER_ASSIGNABLE_LOCATION_TYPES
locations_bp = Blueprint('locations', __name__)
+def get_fill_for_type(location_type):
+ """Get CSS fill color variable for location type."""
+ type_fills = {
+ 'drawer': 'var(--drawer)',
+ 'cabinet': 'var(--table)',
+ 'tall_cabinet': 'var(--table)',
+ 'table': 'var(--table)',
+ 'other': 'var(--table)',
+ 'special': '#ff69b4', # System map locations with custom SVG
+ }
+ return type_fills.get(location_type, 'var(--table)')
+
+
+def sync_locations_json():
+ """
+ Sync inventory-locations.json with database.
+ Updates the JSON file to match current database state.
+ NOTE: This function is deprecated and no longer called. JSON file is now seed data only.
+ """
+ try:
+ # Get project root (go up from src/api/routes to project root)
+ script_dir = Path(__file__).parent.parent.parent.parent
+ # JSON file is now in seed_data directory
+ json_path = script_dir / "src" / "seed_data" / "inventory-locations.json"
+
+ if not json_path.exists():
+ # Try legacy path for backwards compatibility
+ json_path = script_dir / "milventory" / "public" / "inventory-locations.json"
+ if not json_path.exists():
+ print(f"⚠ Warning: inventory-locations.json not found at {json_path}")
+ return False
+
+ # Fetch all locations from DB
+ conn = get_db()
+ cur = conn.cursor()
+ rows = repo.list_all_tuple_ordered(cur)
+ db_locations = {}
+ for row in rows:
+ db_locations[row[0]] = {
+ 'name': row[0],
+ 'x': row[1],
+ 'y': row[2],
+ 'width': row[3],
+ 'height': row[4],
+ 'type': row[5],
+ 'protected': bool(row[6]) if len(row) > 6 else False
+ }
+ cur.close()
+ conn.close()
+
+ # Load existing JSON to preserve inventory-bounds
+ try:
+ with open(json_path, 'r', encoding='utf-8') as f:
+ data = json.load(f)
+ except (FileNotFoundError, json.JSONDecodeError):
+ # Create default structure if file doesn't exist or is invalid
+ data = {
+ "inventory-bounds": {
+ "viewBox": {"x": 0, "y": 0, "width": 4000, "height": 4000},
+ "room": {"x": 80, "y": 80, "width": 3600, "height": 3840, "rx": 18, "ry": 18}
+ },
+ "boxes": []
+ }
+
+ # Convert DB locations to JSON boxes format
+ boxes = []
+ for name, loc_data in db_locations.items():
+ boxes.append({
+ 'title': loc_data['name'],
+ 'x': loc_data['x'],
+ 'y': loc_data['y'],
+ 'width': loc_data['width'],
+ 'height': loc_data['height'],
+ 'fill': get_fill_for_type(loc_data['type'])
+ })
+
+ data['boxes'] = boxes
+
+ # Write back to JSON
+ with open(json_path, 'w', encoding='utf-8') as f:
+ json.dump(data, f, indent=2)
+
+ # Also update the alternative path if it exists
+ alt_path = script_dir / "milventory" / "public" / "inventory-locations.json"
+ if alt_path.exists() and alt_path != json_path:
+ with open(alt_path, 'w', encoding='utf-8') as f:
+ json.dump(data, f, indent=2)
+
+ return True
+ except Exception as e:
+ print(f"⚠ Warning: Failed to sync locations JSON: {e}")
+ import traceback
+ traceback.print_exc()
+ return False
+
+
@locations_bp.route('', methods=['GET'])
def get_locations():
"""
@@ -27,8 +127,7 @@ def get_locations():
try:
conn = get_db()
cur = conn.cursor()
- cur.execute("SELECT name, x, y, width, height, type FROM locations ORDER BY name")
- rows = cur.fetchall()
+ rows = repo.list_all_tuple_ordered(cur)
locations = [Location.from_db_row(row).to_dict() for row in rows]
cur.close()
conn.close()
@@ -52,8 +151,7 @@ def get_location(name):
try:
conn = get_db()
cur = conn.cursor()
- cur.execute("SELECT name, x, y, width, height, type FROM locations WHERE name = %s", (name,))
- row = cur.fetchone()
+ row = repo.fetch_by_name_tuple(cur, name)
cur.close()
conn.close()
@@ -67,7 +165,8 @@ def get_location(name):
@locations_bp.route('', methods=['POST'])
-def create_location():
+@require_leader
+def create_location(current_user_id=None):
"""
POST /api/locations
Create a new location.
@@ -97,23 +196,47 @@ def create_location():
return jsonify({'error': f'Missing required field: {field}'}), 400
location = Location.from_dict(data)
-
+
+ if location.type not in LEADER_ASSIGNABLE_LOCATION_TYPES:
+ return jsonify({'error': 'Invalid location type'}), 400
+
+ # shelf_count is authoritative from the request (defaults to 0 via the model).
+ # Leaders explicitly control whether a location has shelves and how many.
+ # Capped at 15 for sanity — no real storage unit has more shelves than that,
+ # and the map rendering would become unreadable.
+ shelf_count = max(0, int(location.shelf_count or 0))
+ if shelf_count > 15:
+ return jsonify({'error': 'shelf_count cannot exceed 15'}), 400
+
conn = get_db()
cur = conn.cursor()
- cur.execute(
- "INSERT INTO locations (name, x, y, width, height, type) VALUES (%s, %s, %s, %s, %s, %s)",
- (location.name, location.x, location.y, location.width, location.height, location.type)
+ repo.insert_location(
+ cur,
+ location.name,
+ location.x,
+ location.y,
+ location.width,
+ location.height,
+ location.type,
+ shelf_count,
+ False,
)
conn.commit()
cur.close()
conn.close()
+ # Note: New locations are stored only in the database with protected=FALSE by default.
+ # Protected status is managed via the database column, not the JSON file.
+
return jsonify(location.to_dict()), 201
except mysql.connector.IntegrityError as e:
if 'Duplicate entry' in str(e):
return jsonify({'error': 'Location with this name already exists'}), 409
return jsonify({'error': str(e)}), 400
except Exception as e:
+ print(f"Error creating location: {e}")
+ import traceback
+ traceback.print_exc()
return jsonify({'error': str(e)}), 500
@@ -144,38 +267,101 @@ def update_location(name):
if not data:
return jsonify({'error': 'Request body is required'}), 400
- # Validate fields (name is not updatable via PUT)
- updatable_fields = ['x', 'y', 'width', 'height', 'type']
+ # Validate fields
+ updatable_fields = ['x', 'y', 'width', 'height', 'type', 'name', 'shelf_count']
update_data = {k: v for k, v in data.items() if k in updatable_fields}
-
+
if not update_data:
return jsonify({'error': 'No valid fields to update'}), 400
-
+
conn = get_db()
cur = conn.cursor()
-
- # Check if location exists
- cur.execute("SELECT name FROM locations WHERE name = %s", (name,))
- if not cur.fetchone():
+
+ if not repo.name_exists(cur, name):
cur.close()
conn.close()
return jsonify({'error': 'Location not found'}), 404
+
+ existing_row = repo.fetch_by_name_tuple(cur, name)
+ existing = Location.from_db_row(existing_row)
+
+ if 'type' in update_data:
+ if existing.type == 'special':
+ update_data.pop('type')
+ elif update_data['type'] not in LEADER_ASSIGNABLE_LOCATION_TYPES:
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'Invalid location type'}), 400
+
+ new_name = update_data.pop('name', None)
+
+ # Normalize + validate shelf_count reductions BEFORE committing any change.
+ # A reduction that would orphan existing placements is rejected with 409 so
+ # the admin must move/delete those supplies first.
+ if 'shelf_count' in update_data:
+ try:
+ new_shelf_count = max(0, int(update_data['shelf_count'] or 0))
+ except (TypeError, ValueError):
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'shelf_count must be an integer'}), 400
+ if new_shelf_count > 15:
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'shelf_count cannot exceed 15'}), 400
+ update_data['shelf_count'] = new_shelf_count
+
+ if new_shelf_count == 0:
+ # Fully unchecking "has shelves" demotes every placement at this
+ # location to a box-level placement (shelf = NULL). The admin
+ # confirmed this loss-of-shelf-info in the UI before we got
+ # here; partial reductions (0 < new < old) still hit the 409
+ # orphan-block path below.
+ cur.execute(
+ "UPDATE supplies_location SET shelf = NULL "
+ "WHERE location_name = %s AND shelf IS NOT NULL",
+ (name,),
+ )
+ else:
+ orphan_count = repo.count_orphans_if_shelf_count(cur, name, new_shelf_count)
+ if orphan_count > 0:
+ max_shelf = repo.max_used_shelf(cur, name)
+ cur.close()
+ conn.close()
+ return jsonify({
+ 'error': (
+ f'Cannot reduce shelf_count to {new_shelf_count}: '
+ f'{orphan_count} placement(s) would be orphaned '
+ f'(highest shelf in use is {max_shelf}). '
+ f'Move or delete those supplies first.'
+ ),
+ 'orphaned_count': orphan_count,
+ 'max_used_shelf': max_shelf,
+ 'requested_shelf_count': new_shelf_count,
+ }), 409
+
+ if update_data:
+ set_clauses = []
+ values = []
+ for field, value in update_data.items():
+ set_clauses.append(f"{field} = %s")
+ values.append(value)
+ values.append(name)
+ repo.update_by_name(cur, set_clauses, values)
+
+ final_name = name
+ if new_name and new_name != name:
+ if repo.name_exists(cur, new_name):
+ cur.close()
+ conn.close()
+ return jsonify({'error': f'Location "{new_name}" already exists'}), 409
+ repo.rename(cur, new_name, name)
+ final_name = new_name
- # Build update query dynamically
- set_clauses = []
- values = []
- for field, value in update_data.items():
- set_clauses.append(f"{field} = %s")
- values.append(value)
- values.append(name)
-
- query = f"UPDATE locations SET {', '.join(set_clauses)} WHERE name = %s"
- cur.execute(query, values)
conn.commit()
- # Fetch updated location
- cur.execute("SELECT name, x, y, width, height, type FROM locations WHERE name = %s", (name,))
- row = cur.fetchone()
+ # Fetch updated location using final name
+ row = repo.fetch_by_name_tuple(cur, final_name)
location = Location.from_db_row(row).to_dict()
cur.close()
@@ -187,7 +373,8 @@ def update_location(name):
@locations_bp.route('/', methods=['DELETE'])
-def delete_location(name):
+@require_leader
+def delete_location(name, current_user_id=None):
"""
DELETE /api/locations/
Delete a location.
@@ -202,18 +389,19 @@ def delete_location(name):
conn = get_db()
cur = conn.cursor()
- # Check if location exists
- cur.execute("SELECT name FROM locations WHERE name = %s", (name,))
- if not cur.fetchone():
+ if not repo.name_exists(cur, name):
cur.close()
conn.close()
return jsonify({'error': 'Location not found'}), 404
-
- cur.execute("DELETE FROM locations WHERE name = %s", (name,))
+
+ repo.delete_by_name(cur, name)
conn.commit()
cur.close()
conn.close()
+ # Note: Deletions only affect the database.
+ # Protected locations cannot be deleted (enforced by frontend based on protected column).
+
return '', 204
except Exception as e:
return jsonify({'error': str(e)}), 500
diff --git a/src/api/routes/migrate.py b/src/api/routes/migrate.py
new file mode 100644
index 0000000..34cc744
--- /dev/null
+++ b/src/api/routes/migrate.py
@@ -0,0 +1,48 @@
+"""
+Migration API route to run database migrations.
+This can be called to update the database schema.
+"""
+import sys
+from pathlib import Path
+
+sys.path.insert(0, str(Path(__file__).parent.parent))
+
+from flask import Blueprint, jsonify
+from src.api.middleware.auth import require_leader
+from src.scripts.migrate_locations_schema import migrate_locations_schema
+
+migrate_bp = Blueprint('migrate', __name__)
+
+
+@migrate_bp.route('/migrate/locations', methods=['POST'])
+@require_leader
+def migrate_locations():
+ """
+ POST /api/migrate/locations
+ Run migration to add coordinate columns to locations table.
+ Requires leader/admin access.
+ """
+ try:
+ # Capture print output
+ import io
+ import contextlib
+
+ f = io.StringIO()
+ with contextlib.redirect_stdout(f), contextlib.redirect_stderr(f):
+ migrate_locations_schema()
+
+ output = f.getvalue()
+
+ return jsonify({
+ 'success': True,
+ 'message': 'Migration completed',
+ 'output': output
+ }), 200
+ except Exception as e:
+ return jsonify({
+ 'success': False,
+ 'error': str(e)
+ }), 500
+
+
+
diff --git a/src/api/routes/supplies.py b/src/api/routes/supplies.py
index 5a296e6..c29a339 100644
--- a/src/api/routes/supplies.py
+++ b/src/api/routes/supplies.py
@@ -4,586 +4,109 @@
import sys
from pathlib import Path
-# Add src to path for imports (must be before other imports)
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
-from flask import Blueprint, request, jsonify, session
import mysql.connector
-from src.api.db import get_db
-from src.api.models.supply import Supply
+from flask import Blueprint, jsonify, request
+
from src.api.middleware.auth import require_auth
+from src.api.services import supply_catalog_service as svc
-supplies_bp = Blueprint('supplies', __name__)
+supplies_bp = Blueprint("supplies", __name__)
-@supplies_bp.route('', methods=['GET'])
+@supplies_bp.route("", methods=["GET"])
@require_auth
def get_supplies(current_user_id=None):
- """
- GET /api/supplies
- Get all supplies (catalog) with computed quantities and locations.
-
- Returns:
- JSON array of supplies with totalQty and locations[] computed from supplies_location
- """
try:
- conn = get_db()
- cur = conn.cursor(dictionary=True)
-
- # Get all supplies with computed quantities
- cur.execute("""
- SELECT
- s.id,
- s.name,
- s.description,
- s.image,
- s.last_order_date,
- s.last_modified,
- s.last_modified_by,
- s.created_at,
- COALESCE(SUM(sl.amount), 0) as totalQty
- FROM supplies s
- LEFT JOIN supplies_location sl ON s.id = sl.supply_id
- GROUP BY s.id, s.name, s.description, s.image, s.last_order_date, s.last_modified, s.last_modified_by, s.created_at
- ORDER BY s.name
- """)
-
- supplies = []
- for row in cur.fetchall():
- # Get locations for this supply
- cur.execute("""
- SELECT location_name, shelf, amount
- FROM supplies_location
- WHERE supply_id = %s
- ORDER BY location_name, shelf
- """, (row['id'],))
-
- locations = []
- for loc_row in cur.fetchall():
- locations.append({
- 'location': loc_row['location_name'],
- 'shelf': loc_row['shelf'],
- 'qty': loc_row['amount']
- })
-
- # Get teams for this supply
- cur.execute("""
- SELECT team_name
- FROM supplies_teams
- WHERE supply_id = %s
- ORDER BY team_name
- """, (row['id'],))
- teams = [team_row['team_name'].lower() for team_row in cur.fetchall()]
-
- # Get categories for this supply
- cur.execute("""
- SELECT category_id
- FROM supplies_categories
- WHERE supply_id = %s
- ORDER BY category_id
- """, (row['id'],))
- category_ids = [cat_row['category_id'] for cat_row in cur.fetchall()]
-
- supply_dict = {
- 'id': row['id'],
- 'name': row['name'],
- 'description': row['description'],
- 'image': row['image'],
- 'lastModified': row['last_modified'].isoformat() if row['last_modified'] else None,
- 'last_modified_by': row['last_modified_by'],
- 'totalQty': int(row['totalQty']),
- 'locations': locations,
- 'teams': teams,
- 'categories': category_ids
- }
- if row['last_order_date']:
- supply_dict['last_order_date'] = row['last_order_date'].isoformat() if hasattr(row['last_order_date'], 'isoformat') else str(row['last_order_date'])
-
- # Get member name for last_modified_by if available
- if row['last_modified_by']:
- cur.execute("""
- SELECT first_name, last_name, uf_email
- FROM members
- WHERE uf_id = %s
- """, (row['last_modified_by'],))
- member = cur.fetchone()
- if member:
- supply_dict['last_modified_by_name'] = f"{member['first_name']} {member['last_name']}"
- supply_dict['last_modified_by_email'] = member['uf_email']
-
- supplies.append(supply_dict)
-
- cur.close()
- conn.close()
- return jsonify(supplies), 200
+ return jsonify(svc.list_supplies()), 200
except Exception as e:
- return jsonify({'error': str(e)}), 500
+ return jsonify({"error": str(e)}), 500
-@supplies_bp.route('/', methods=['GET'])
+@supplies_bp.route("/", methods=["GET"])
@require_auth
def get_supply(supply_id, current_user_id=None):
- """
- GET /api/supplies/
- Get a specific supply by ID with computed quantities and locations.
-
- Args:
- supply_id: Supply ID
-
- Returns:
- JSON object of the supply or 404 if not found
- """
try:
- conn = get_db()
- cur = conn.cursor(dictionary=True)
-
- cur.execute("""
- SELECT
- s.id,
- s.name,
- s.description,
- s.image,
- s.last_order_date,
- s.last_modified,
- s.last_modified_by,
- s.created_at,
- COALESCE(SUM(sl.amount), 0) as totalQty
- FROM supplies s
- LEFT JOIN supplies_location sl ON s.id = sl.supply_id
- WHERE s.id = %s
- GROUP BY s.id, s.name, s.description, s.image, s.last_order_date, s.last_modified, s.last_modified_by, s.created_at
- """, (supply_id,))
-
- row = cur.fetchone()
- if not row:
- cur.close()
- conn.close()
- return jsonify({'error': 'Supply not found'}), 404
-
- # Get locations for this supply
- cur.execute("""
- SELECT location_name, shelf, amount
- FROM supplies_location
- WHERE supply_id = %s
- ORDER BY location_name, shelf
- """, (supply_id,))
-
- locations = []
- for loc_row in cur.fetchall():
- locations.append({
- 'location': loc_row['location_name'],
- 'shelf': loc_row['shelf'],
- 'qty': loc_row['amount']
- })
-
- # Get teams for this supply
- cur.execute("""
- SELECT team_name
- FROM supplies_teams
- WHERE supply_id = %s
- ORDER BY team_name
- """, (supply_id,))
- teams = [team_row['team_name'].lower() for team_row in cur.fetchall()]
-
- # Get categories for this supply
- cur.execute("""
- SELECT category_id
- FROM supplies_categories
- WHERE supply_id = %s
- ORDER BY category_id
- """, (supply_id,))
- category_ids = [cat_row['category_id'] for cat_row in cur.fetchall()]
-
- supply_dict = {
- 'id': row['id'],
- 'name': row['name'],
- 'description': row['description'],
- 'image': row['image'],
- 'lastModified': row['last_modified'].isoformat() if row['last_modified'] else None,
- 'last_modified_by': row['last_modified_by'],
- 'totalQty': int(row['totalQty']),
- 'locations': locations,
- 'teams': teams,
- 'categories': category_ids
- }
- if row['last_order_date']:
- supply_dict['last_order_date'] = row['last_order_date'].isoformat() if hasattr(row['last_order_date'], 'isoformat') else str(row['last_order_date'])
-
- # Get member name for last_modified_by if available
- if row['last_modified_by']:
- cur.execute("""
- SELECT first_name, last_name, uf_email
- FROM members
- WHERE uf_id = %s
- """, (row['last_modified_by'],))
- member = cur.fetchone()
- if member:
- supply_dict['last_modified_by_name'] = f"{member['first_name']} {member['last_name']}"
- supply_dict['last_modified_by_email'] = member['uf_email']
-
- cur.close()
- conn.close()
- return jsonify(supply_dict), 200
+ return jsonify(svc.get_supply(supply_id)), 200
+ except svc.CatalogError as e:
+ return jsonify(e.body), e.status
except Exception as e:
- return jsonify({'error': str(e)}), 500
+ return jsonify({"error": str(e)}), 500
-@supplies_bp.route('', methods=['POST'])
+@supplies_bp.route("", methods=["POST"])
@require_auth
def create_supply(current_user_id=None):
- """
- POST /api/supplies
- Create a new supply (catalog entry).
-
- Request body:
- {
- "name": "string" (required),
- "description": "string" (optional),
- "image": "data:image/...;base64,..." (optional, max 10MB file),
- "last_order_date": "YYYY-MM-DD" (optional)
- }
-
- Returns:
- JSON object of the created supply
- """
try:
- data = request.json
- if not data:
- return jsonify({'error': 'Request body is required'}), 400
-
- if 'name' not in data or not data['name'].strip():
- return jsonify({'error': 'Name is required'}), 400
-
- # Validate image size (max 10MB file = ~13.3MB base64)
- if 'image' in data and data['image']:
- # Base64 data URI format: data:image/...;base64,
- if data['image'].startswith('data:image'):
- base64_part = data['image'].split(',', 1)[1] if ',' in data['image'] else ''
- # Approximate: base64 is ~33% larger than original
- if len(base64_part) > 13_300_000: # ~10MB file
- return jsonify({'error': 'Image file size exceeds 10MB limit'}), 400
-
- conn = get_db()
- cur = conn.cursor(dictionary=True)
-
- # Check if supply with this name already exists
- cur.execute("SELECT id FROM supplies WHERE name = %s", (data['name'].strip(),))
- if cur.fetchone():
- cur.close()
- conn.close()
- return jsonify({'error': 'Supply with this name already exists'}), 400
-
- # Insert new supply
- cur.execute("""
- INSERT INTO supplies (name, description, image, last_order_date, last_modified_by)
- VALUES (%s, %s, %s, %s, %s)
- """, (
- data['name'].strip(),
- data.get('description', '').strip() or None,
- data.get('image') or None,
- data.get('last_order_date') or None,
- current_user_id
- ))
-
- supply_id = cur.lastrowid
-
- # Insert teams
- if data.get('teams'):
- for team_name in data['teams']:
- # Normalize team name (capitalize to match database: Software, Electrical, Mechanical)
- team_name_capitalized = team_name.capitalize()
- # Handle special case: "Software" not "Software" (already capitalized)
- if team_name_capitalized not in ['Software', 'Electrical', 'Mechanical']:
- # Try to match case-insensitively
- if team_name.lower() == 'software':
- team_name_capitalized = 'Software'
- elif team_name.lower() == 'electrical':
- team_name_capitalized = 'Electrical'
- elif team_name.lower() == 'mechanical':
- team_name_capitalized = 'Mechanical'
-
- cur.execute(
- "INSERT IGNORE INTO supplies_teams (supply_id, team_name) VALUES (%s, %s)",
- (supply_id, team_name_capitalized)
- )
-
- # Insert categories
- if data.get('categories'):
- for category_id in data['categories']:
- # Ensure category_id is an integer
- try:
- cat_id = int(category_id)
- cur.execute(
- "INSERT IGNORE INTO supplies_categories (supply_id, category_id) VALUES (%s, %s)",
- (supply_id, cat_id)
- )
- except (ValueError, TypeError):
- # Skip invalid category IDs
- continue
-
- conn.commit()
-
- # Fetch the created supply with teams and categories
- cur.execute("""
- SELECT id, name, description, image, last_order_date, last_modified, last_modified_by, created_at
- FROM supplies WHERE id = %s
- """, (supply_id,))
-
- row = cur.fetchone()
-
- # Get teams
- cur.execute("""
- SELECT team_name FROM supplies_teams WHERE supply_id = %s ORDER BY team_name
- """, (supply_id,))
- teams = [t['team_name'].lower() for t in cur.fetchall()]
-
- # Get categories
- cur.execute("""
- SELECT category_id FROM supplies_categories WHERE supply_id = %s ORDER BY category_id
- """, (supply_id,))
- category_ids = [c['category_id'] for c in cur.fetchall()]
-
- # Convert row dict to Supply object
- supply = Supply.from_dict(row).to_dict()
- supply['totalQty'] = 0
- supply['locations'] = []
- supply['teams'] = teams
- supply['categories'] = category_ids
-
- cur.close()
- conn.close()
-
- return jsonify(supply), 201
+ return jsonify(svc.create_supply(request.json, current_user_id)), 201
+ except svc.CatalogError as e:
+ return jsonify(e.body), e.status
except mysql.connector.IntegrityError as e:
- if 'Duplicate entry' in str(e) or 'unique' in str(e).lower():
- return jsonify({'error': 'Supply with this name already exists'}), 400
- return jsonify({'error': str(e)}), 400
+ if "Duplicate entry" in str(e) or "unique" in str(e).lower():
+ return jsonify({"error": "Supply with this name already exists"}), 400
+ return jsonify({"error": str(e)}), 400
except Exception as e:
- return jsonify({'error': str(e)}), 500
+ return jsonify({"error": str(e)}), 500
-@supplies_bp.route('/', methods=['PUT'])
+@supplies_bp.route("/", methods=["PUT"])
@require_auth
def update_supply(supply_id, current_user_id=None):
- """
- PUT /api/supplies/
- Update an existing supply (catalog entry).
-
- Args:
- supply_id: Supply ID to update
-
- Request body:
- {
- "name": "string" (optional),
- "description": "string" (optional),
- "image": "data:image/...;base64,..." (optional, max 10MB file),
- "last_order_date": "YYYY-MM-DD" (optional)
- }
-
- Returns:
- JSON object of the updated supply
- """
try:
- data = request.json
- if not data:
- return jsonify({'error': 'Request body is required'}), 400
-
- # Validate image size if provided
- if 'image' in data and data['image']:
- if data['image'].startswith('data:image'):
- base64_part = data['image'].split(',', 1)[1] if ',' in data['image'] else ''
- if len(base64_part) > 13_300_000:
- return jsonify({'error': 'Image file size exceeds 10MB limit'}), 400
-
- conn = get_db()
- cur = conn.cursor(dictionary=True)
-
- # Check if supply exists
- cur.execute("SELECT id FROM supplies WHERE id = %s", (supply_id,))
- if not cur.fetchone():
- cur.close()
- conn.close()
- return jsonify({'error': 'Supply not found'}), 404
-
- # Check if name is being changed and new name already exists
- if 'name' in data and data['name']:
- cur.execute("SELECT id FROM supplies WHERE name = %s AND id != %s", (data['name'].strip(), supply_id))
- if cur.fetchone():
- cur.close()
- conn.close()
- return jsonify({'error': 'Supply with this name already exists'}), 400
-
- # Build update query
- updates = []
- values = []
-
- if 'name' in data:
- updates.append("name = %s")
- values.append(data['name'].strip())
- if 'description' in data:
- updates.append("description = %s")
- values.append(data['description'].strip() or None)
- if 'image' in data:
- updates.append("image = %s")
- values.append(data['image'] or None)
- if 'last_order_date' in data:
- updates.append("last_order_date = %s")
- values.append(data['last_order_date'] or None)
-
- # Always update last_modified_by
- updates.append("last_modified_by = %s")
- values.append(current_user_id)
-
- values.append(supply_id)
-
- if updates:
- query = f"UPDATE supplies SET {', '.join(updates)} WHERE id = %s"
- cur.execute(query, values)
-
- # Update teams if provided
- if 'teams' in data:
- # Delete existing teams
- cur.execute("DELETE FROM supplies_teams WHERE supply_id = %s", (supply_id,))
- # Insert new teams
- if data.get('teams'):
- for team_name in data['teams']:
- # Normalize team name (capitalize to match database)
- team_name_capitalized = team_name.capitalize()
- # Handle special cases
- if team_name.lower() == 'software':
- team_name_capitalized = 'Software'
- elif team_name.lower() == 'electrical':
- team_name_capitalized = 'Electrical'
- elif team_name.lower() == 'mechanical':
- team_name_capitalized = 'Mechanical'
-
- cur.execute(
- "INSERT INTO supplies_teams (supply_id, team_name) VALUES (%s, %s)",
- (supply_id, team_name_capitalized)
- )
-
- # Update categories if provided
- if 'categories' in data:
- # Delete existing categories
- cur.execute("DELETE FROM supplies_categories WHERE supply_id = %s", (supply_id,))
- # Insert new categories
- if data.get('categories'):
- for category_id in data['categories']:
- try:
- cat_id = int(category_id)
- cur.execute(
- "INSERT INTO supplies_categories (supply_id, category_id) VALUES (%s, %s)",
- (supply_id, cat_id)
- )
- except (ValueError, TypeError):
- continue
-
- conn.commit()
-
- # Fetch updated supply
- cur.execute("""
- SELECT id, name, description, image, last_order_date, last_modified, last_modified_by, created_at
- FROM supplies WHERE id = %s
- """, (supply_id,))
-
- row = cur.fetchone()
- supply = Supply.from_dict(row).to_dict()
-
- # Get computed quantities
- cur.execute("""
- SELECT COALESCE(SUM(amount), 0) as totalQty
- FROM supplies_location
- WHERE supply_id = %s
- """, (supply_id,))
- total_qty_row = cur.fetchone()
- total_qty = total_qty_row['totalQty'] if total_qty_row else 0
-
- cur.execute("""
- SELECT location_name, shelf, amount
- FROM supplies_location
- WHERE supply_id = %s
- ORDER BY location_name, shelf
- """, (supply_id,))
-
- locations = []
- for loc_row in cur.fetchall():
- locations.append({
- 'location': loc_row['location_name'],
- 'shelf': loc_row['shelf'],
- 'qty': loc_row['amount']
- })
-
- # Get teams
- cur.execute("""
- SELECT team_name FROM supplies_teams WHERE supply_id = %s ORDER BY team_name
- """, (supply_id,))
- teams = [t['team_name'].lower() for t in cur.fetchall()]
-
- # Get categories
- cur.execute("""
- SELECT category_id FROM supplies_categories WHERE supply_id = %s ORDER BY category_id
- """, (supply_id,))
- category_ids = [c['category_id'] for c in cur.fetchall()]
-
- supply['totalQty'] = int(total_qty)
- supply['locations'] = locations
- supply['teams'] = teams
- supply['categories'] = category_ids
-
- # Get member name for last_modified_by if available
- if row['last_modified_by']:
- cur.execute("""
- SELECT first_name, last_name, uf_email
- FROM members
- WHERE uf_id = %s
- """, (row['last_modified_by'],))
- member = cur.fetchone()
- if member:
- supply['last_modified_by_name'] = f"{member['first_name']} {member['last_name']}"
- supply['last_modified_by_email'] = member['uf_email']
-
- cur.close()
- conn.close()
-
- return jsonify(supply), 200
+ return jsonify(svc.update_supply(supply_id, request.json, current_user_id)), 200
+ except svc.CatalogError as e:
+ return jsonify(e.body), e.status
except mysql.connector.IntegrityError as e:
- if 'Duplicate entry' in str(e) or 'unique' in str(e).lower():
- return jsonify({'error': 'Supply with this name already exists'}), 400
- return jsonify({'error': str(e)}), 400
+ if "Duplicate entry" in str(e) or "unique" in str(e).lower():
+ return jsonify({"error": "Supply with this name already exists"}), 400
+ return jsonify({"error": str(e)}), 400
except Exception as e:
- return jsonify({'error': str(e)}), 500
+ return jsonify({"error": str(e)}), 500
-@supplies_bp.route('/', methods=['DELETE'])
+@supplies_bp.route("/", methods=["DELETE"])
@require_auth
def delete_supply(supply_id, current_user_id=None):
- """
- DELETE /api/supplies/
- Delete a supply (catalog entry).
- CASCADE will automatically delete all supplies_location entries.
-
- Args:
- supply_id: Supply ID to delete
-
- Returns:
- 204 No Content on success, 404 if not found
- """
try:
- conn = get_db()
- cur = conn.cursor()
-
- # Check if supply exists
- cur.execute("SELECT id FROM supplies WHERE id = %s", (supply_id,))
- if not cur.fetchone():
- cur.close()
- conn.close()
- return jsonify({'error': 'Supply not found'}), 404
-
- cur.execute("DELETE FROM supplies WHERE id = %s", (supply_id,))
- conn.commit()
- cur.close()
- conn.close()
-
- return '', 204
+ svc.delete_supply(supply_id, current_user_id)
+ return "", 204
+ except svc.CatalogError as e:
+ return jsonify(e.body), e.status
+ except Exception as e:
+ return jsonify({"error": str(e)}), 500
+
+
+@supplies_bp.route("/history", methods=["GET"])
+@require_auth
+def get_supply_history(current_user_id=None):
+ try:
+ supply_id_filter = request.args.get("supply_id", type=int)
+ action_type_filter = request.args.get("action_type")
+ limit = request.args.get("limit", 100, type=int)
+ offset = request.args.get("offset", 0, type=int)
+ return jsonify(svc.get_supply_history(supply_id_filter, action_type_filter, limit, offset)), 200
+ except Exception as e:
+ return jsonify({"error": str(e)}), 500
+
+
+@supplies_bp.route("/history//undo", methods=["POST"])
+@require_auth
+def undo_supply_history(history_id, current_user_id=None):
+ try:
+ return jsonify(svc.undo_supply_history(history_id, current_user_id)), 200
+ except svc.CatalogError as e:
+ return jsonify(e.body), e.status
+ except Exception as e:
+ return jsonify({"error": str(e)}), 500
+
+
+@supplies_bp.route("/history//discard", methods=["POST"])
+@require_auth
+def discard_supply_history(history_id, current_user_id=None):
+ try:
+ return jsonify(svc.discard_supply_history(history_id)), 200
+ except svc.CatalogError as e:
+ return jsonify(e.body), e.status
except Exception as e:
- return jsonify({'error': str(e)}), 500
+ return jsonify({"error": str(e)}), 500
diff --git a/src/api/routes/supplies_location.py b/src/api/routes/supplies_location.py
index adbd19c..403103b 100644
--- a/src/api/routes/supplies_location.py
+++ b/src/api/routes/supplies_location.py
@@ -9,12 +9,22 @@
from flask import Blueprint, request, jsonify
import mysql.connector
+import uuid
from src.api.db import get_db
from src.api.models.supply_location import SupplyLocation
from src.api.middleware.auth import require_auth
+from src.api.helpers.history import log_location_history
+from src.api.helpers.unique_type_qty import (
+ map_total_qty_for_supply,
+ check_unique_type_map_qty,
+)
+from src.api.helpers.map_bounds import coords_in_room, clamp_coords_to_room
+from src.api.repositories import supplies_location_repository as sl_repo
supplies_location_bp = Blueprint('supplies_location', __name__)
+FREE_COORD_HISTORY_LABEL = 'Free Coordinate'
+
@supplies_location_bp.route('', methods=['GET'])
@require_auth
@@ -33,49 +43,19 @@ def get_all_supply_locations(current_user_id=None):
try:
location_filter = request.args.get('location')
supply_id_filter = request.args.get('supply_id')
-
+ sid = int(supply_id_filter) if supply_id_filter else None
+
conn = get_db()
cur = conn.cursor()
+
+ rows = sl_repo.fetch_joined_filtered(cur, location_filter, sid)
- query = """
- SELECT sl.id, sl.supply_id, sl.location_name, sl.shelf, sl.amount,
- sl.last_modified, sl.last_modified_by, sl.created_at,
- s.name as supply_name
- FROM supplies_location sl
- JOIN supplies s ON sl.supply_id = s.id
- WHERE 1=1
- """
- params = []
-
- if location_filter:
- query += " AND sl.location_name = %s"
- params.append(location_filter)
-
- if supply_id_filter:
- query += " AND sl.supply_id = %s"
- params.append(int(supply_id_filter))
-
- query += " ORDER BY sl.location_name, sl.shelf, s.name"
-
- cur.execute(query, params)
- rows = cur.fetchall()
-
- # Convert to SupplyLocation objects (need to adjust for JOIN)
locations = []
for row in rows:
- # row: (id, supply_id, location_name, shelf, amount, last_modified, last_modified_by, created_at, supply_name)
- loc = SupplyLocation(
- id=row[0],
- supply_id=row[1],
- location_name=row[2],
- shelf=row[3],
- amount=row[4],
- last_modified=row[5],
- last_modified_by=row[6],
- created_at=row[7]
- )
+ loc = SupplyLocation.from_db_row(row[:10])
loc_dict = loc.to_dict()
- loc_dict['supply_name'] = row[8] # Add supply name from JOIN
+ loc_dict['supply_name'] = row[10]
+ loc_dict['supply_public_id'] = row[11]
locations.append(loc_dict)
cur.close()
@@ -101,17 +81,11 @@ def get_supply_location(location_id, current_user_id=None):
try:
conn = get_db()
cur = conn.cursor()
-
- cur.execute("""
- SELECT id, supply_id, location_name, shelf, amount, last_modified, last_modified_by, created_at
- FROM supplies_location
- WHERE id = %s
- """, (location_id,))
-
- row = cur.fetchone()
+
+ row = sl_repo.fetch_by_id_tuple(cur, location_id)
cur.close()
conn.close()
-
+
if row:
location = SupplyLocation.from_db_row(row)
return jsonify(location.to_dict()), 200
@@ -137,33 +111,15 @@ def get_location_supplies(name, current_user_id=None):
try:
conn = get_db()
cur = conn.cursor()
-
- cur.execute("""
- SELECT sl.id, sl.supply_id, sl.location_name, sl.shelf, sl.amount,
- sl.last_modified, sl.last_modified_by, sl.created_at,
- s.name as supply_name
- FROM supplies_location sl
- JOIN supplies s ON sl.supply_id = s.id
- WHERE sl.location_name = %s
- ORDER BY sl.shelf, s.name
- """, (name,))
-
- rows = cur.fetchall()
+
+ rows = sl_repo.fetch_by_location_name_joined(cur, name)
locations = []
for row in rows:
- loc = SupplyLocation(
- id=row[0],
- supply_id=row[1],
- location_name=row[2],
- shelf=row[3],
- amount=row[4],
- last_modified=row[5],
- last_modified_by=row[6],
- created_at=row[7]
- )
+ loc = SupplyLocation.from_db_row(row[:10])
loc_dict = loc.to_dict()
- loc_dict['supply_name'] = row[8]
+ loc_dict['supply_name'] = row[10]
+ loc_dict['supply_public_id'] = row[11]
locations.append(loc_dict)
cur.close()
@@ -196,67 +152,162 @@ def add_supply_location(current_user_id=None):
if not data:
return jsonify({'error': 'Request body is required'}), 400
- required_fields = ['supply_id', 'location', 'amount']
- for field in required_fields:
- if field not in data:
- return jsonify({'error': f'Missing required field: {field}'}), 400
+ if 'supply_id' not in data:
+ return jsonify({'error': 'Missing required field: supply_id'}), 400
- if data['amount'] <= 0:
- return jsonify({'error': 'Amount must be positive'}), 400
+ supply_id = data['supply_id']
+ cx_raw, cy_raw = data.get('coord_x'), data.get('coord_y')
+ has_free = cx_raw is not None and cy_raw is not None
+ has_box = bool(data.get('location'))
+
+ if has_free and has_box:
+ return jsonify({'error': 'Send either location (box) or coord_x/coord_y (free place), not both'}), 400
+
+ if not has_free:
+ required_fields = ['location', 'amount']
+ for field in required_fields:
+ if field not in data:
+ return jsonify({'error': f'Missing required field: {field}'}), 400
+ if data['amount'] <= 0:
+ return jsonify({'error': 'Amount must be positive'}), 400
+ else:
+ amount = int(data.get('amount', 1))
+ if amount != 1:
+ return jsonify({
+ 'error': 'Free coordinate placements must use amount 1 (one unit per coordinate)',
+ 'error_type': 'FREE_COORD_AMOUNT',
+ }), 400
conn = get_db()
+ cur = conn.cursor(dictionary=True)
+
+ supply = sl_repo.fetch_supply_id_name_dict(cur, supply_id)
+ if not supply:
+ cur.close()
+ conn.close()
+ return jsonify({
+ 'error': 'Supply not found',
+ 'error_type': 'SUPPLY_DELETED',
+ 'supply_id': supply_id,
+ 'supply_name': data.get('supply_name', 'Unknown'),
+ 'message': 'This item was deleted by another user. Please refresh the page to see the latest data.'
+ }), 404
+
cur = conn.cursor()
- shelf = data.get('shelf')
- location_name = data['location']
- supply_id = data['supply_id']
- amount = data['amount']
+ batch_id = str(uuid.uuid4())
- # Check if entry already exists
- cur.execute("""
- SELECT id, amount FROM supplies_location
- WHERE supply_id = %s AND location_name = %s AND (shelf = %s OR (shelf IS NULL AND %s IS NULL))
- """, (supply_id, location_name, shelf, shelf))
-
- existing = cur.fetchone()
-
- if existing:
- # Update existing entry (increment amount)
- new_amount = existing[1] + amount
- cur.execute("""
- UPDATE supplies_location
- SET amount = %s, last_modified_by = %s
- WHERE id = %s
- """, (new_amount, current_user_id, existing[0]))
- location_id = existing[0]
+ if has_free:
+ cx, cy = clamp_coords_to_room(cx_raw, cy_raw)
+ if not coords_in_room(cx, cy):
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'Coordinates must be inside the map room bounds'}), 400
+
+ existing = sl_repo.select_free_coord_row(cur, supply_id, cx, cy)
+
+ if existing:
+ location_id = existing[0]
+ old_amount = existing[1]
+ if old_amount != 1:
+ sl_repo.update_free_coord_amount_and_user(cur, location_id, current_user_id)
+ sl_repo.touch_supply_last_modified(cur, supply_id, current_user_id)
+ conn.commit()
+ row = sl_repo.fetch_location_row_tuple(cur, location_id)
+ location = SupplyLocation.from_db_row(row)
+ cur.close()
+ conn.close()
+ return jsonify(location.to_dict()), 200
+
+ total_now = map_total_qty_for_supply(cur, supply_id)
+ ok_qty, err_qty = check_unique_type_map_qty(cur, supply_id, total_now + 1)
+ if not ok_qty:
+ cur.close()
+ conn.close()
+ return jsonify({'error': err_qty, 'error_type': 'UNIQUE_TYPE_QTY'}), 400
+
+ location_id = sl_repo.insert_free_coordinate_row(
+ cur, supply_id, cx, cy, current_user_id
+ )
+ log_location_history(
+ conn, 'ADD',
+ supply_id=supply_id,
+ supply_name=supply['name'],
+ location_name=FREE_COORD_HISTORY_LABEL,
+ shelf=None,
+ old_amount=None,
+ new_amount=1,
+ changed_by=current_user_id,
+ batch_id=batch_id,
+ related_location=f'{cx},{cy}',
+ )
else:
- # Insert new entry
- cur.execute("""
- INSERT INTO supplies_location (supply_id, location_name, shelf, amount, last_modified_by)
- VALUES (%s, %s, %s, %s, %s)
- """, (supply_id, location_name, shelf, amount, current_user_id))
- location_id = cur.lastrowid
-
+ shelf = data.get('shelf')
+ location_name = data['location']
+ amount = data['amount']
+
+ total_now = map_total_qty_for_supply(cur, supply_id)
+ ok_qty, err_qty = check_unique_type_map_qty(cur, supply_id, total_now + amount)
+ if not ok_qty:
+ cur.close()
+ conn.close()
+ return jsonify({'error': err_qty, 'error_type': 'UNIQUE_TYPE_QTY'}), 400
+
+ existing = sl_repo.select_box_row(cur, supply_id, location_name, shelf)
+
+ if existing:
+ old_amount = existing[1]
+ new_amount = old_amount + amount
+ sl_repo.update_location_amount(cur, new_amount, current_user_id, existing[0])
+ location_id = existing[0]
+ log_location_history(
+ conn, 'ADD',
+ supply_id=supply_id,
+ supply_name=supply['name'],
+ location_name=location_name,
+ shelf=shelf,
+ old_amount=old_amount,
+ new_amount=new_amount,
+ changed_by=current_user_id,
+ batch_id=batch_id
+ )
+ else:
+ location_id = sl_repo.insert_box_row(
+ cur, supply_id, location_name, shelf, amount, current_user_id
+ )
+ log_location_history(
+ conn, 'ADD',
+ supply_id=supply_id,
+ supply_name=supply['name'],
+ location_name=location_name,
+ shelf=shelf,
+ old_amount=None,
+ new_amount=amount,
+ changed_by=current_user_id,
+ batch_id=batch_id
+ )
+
+ sl_repo.touch_supply_last_modified(cur, supply_id, current_user_id)
+
conn.commit()
-
- # Fetch the created/updated location
- cur.execute("""
- SELECT id, supply_id, location_name, shelf, amount, last_modified, last_modified_by, created_at
- FROM supplies_location WHERE id = %s
- """, (location_id,))
-
- row = cur.fetchone()
+
+ row = sl_repo.fetch_location_row_tuple(cur, location_id)
location = SupplyLocation.from_db_row(row)
-
+
cur.close()
conn.close()
-
+
return jsonify(location.to_dict()), 201
except mysql.connector.IntegrityError as e:
if 'foreign key constraint' in str(e).lower():
return jsonify({'error': 'Supply or location does not exist'}), 400
if 'unique_supply_location_shelf' in str(e).lower():
return jsonify({'error': 'Supply location already exists'}), 400
+ if 'uniq_free_coord_uid' in str(e).lower():
+ return jsonify({
+ 'error': 'A floor marker already exists at these coordinates for this item',
+ 'error_type': 'FREE_COORD_DUPLICATE',
+ }), 409
return jsonify({'error': str(e)}), 400
except Exception as e:
return jsonify({'error': str(e)}), 500
@@ -288,53 +339,128 @@ def update_supply_location(location_id, current_user_id=None):
return jsonify({'error': 'Request body is required'}), 400
conn = get_db()
- cur = conn.cursor()
-
- # Check if location exists
- cur.execute("SELECT id FROM supplies_location WHERE id = %s", (location_id,))
- if not cur.fetchone():
+ cur = conn.cursor(dictionary=True)
+
+ old_location = sl_repo.fetch_location_for_update_join_dict(cur, location_id)
+ if not old_location:
cur.close()
conn.close()
return jsonify({'error': 'Supply location not found'}), 404
- # Build update query
+ cur = conn.cursor()
+
updates = []
values = []
+ old_amount = old_location['amount']
+ old_location_name = old_location['location_name']
+ old_shelf = old_location['shelf']
+ is_free = old_location_name is None and old_location.get('coord_x') is not None
+
+ hist_location = FREE_COORD_HISTORY_LABEL if is_free else old_location_name
+
if 'amount' in data:
if data['amount'] < 0:
cur.close()
conn.close()
return jsonify({'error': 'Amount cannot be negative'}), 400
+ if is_free and int(data['amount']) != 1:
+ cur.close()
+ conn.close()
+ return jsonify({
+ 'error': 'Free coordinate rows must have amount 1',
+ 'error_type': 'FREE_COORD_AMOUNT',
+ }), 400
updates.append("amount = %s")
values.append(data['amount'])
- if 'shelf' in data:
+ if 'shelf' in data and not is_free:
updates.append("shelf = %s")
values.append(data['shelf'])
if 'location' in data:
+ if is_free:
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'Cannot change box location on a free-coordinate row'}), 400
updates.append("location_name = %s")
values.append(data['location'])
- # Always update last_modified_by
+ if 'coord_x' in data and 'coord_y' in data:
+ if not is_free:
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'coord_x/coord_y only for free-coordinate placements'}), 400
+ cx, cy = clamp_coords_to_room(data['coord_x'], data['coord_y'])
+ if not coords_in_room(cx, cy):
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'Coordinates must be inside the map room bounds'}), 400
+ if sl_repo.select_free_coord_conflict(
+ cur, old_location["supply_id"], cx, cy, location_id
+ ):
+ cur.close()
+ conn.close()
+ return jsonify({
+ 'error': 'Another floor marker already exists at these coordinates for this item',
+ 'error_type': 'FREE_COORD_DUPLICATE',
+ }), 409
+ updates.append("coord_x = %s")
+ updates.append("coord_y = %s")
+ values.extend([cx, cy])
+
updates.append("last_modified_by = %s")
values.append(current_user_id)
values.append(location_id)
- if updates:
- query = f"UPDATE supplies_location SET {', '.join(updates)} WHERE id = %s"
- cur.execute(query, values)
+ if len(updates) > 1:
+ if 'amount' in data:
+ total_now = map_total_qty_for_supply(cur, old_location['supply_id'])
+ proposed = total_now - old_amount + int(data['amount'])
+ ok_qty, err_qty = check_unique_type_map_qty(cur, old_location['supply_id'], proposed)
+ if not ok_qty:
+ cur.close()
+ conn.close()
+ return jsonify({'error': err_qty, 'error_type': 'UNIQUE_TYPE_QTY'}), 400
+
+ sl_repo.update_supplies_location_dynamic(cur, updates, values)
+
+ if 'amount' in data:
+ log_location_history(
+ conn, 'UPDATE',
+ supply_id=old_location['supply_id'],
+ supply_name=old_location['supply_name'],
+ location_name=hist_location,
+ shelf=old_shelf,
+ old_amount=old_amount,
+ new_amount=data['amount'],
+ changed_by=current_user_id,
+ batch_id=str(uuid.uuid4()),
+ related_location=(
+ f"{old_location['coord_x']},{old_location['coord_y']}"
+ if is_free else None
+ ),
+ )
+ elif 'coord_x' in data:
+ log_location_history(
+ conn, 'UPDATE',
+ supply_id=old_location['supply_id'],
+ supply_name=old_location['supply_name'],
+ location_name=hist_location,
+ shelf=old_shelf,
+ old_amount=old_amount,
+ new_amount=old_amount,
+ changed_by=current_user_id,
+ batch_id=str(uuid.uuid4()),
+ related_location=f"{old_location['coord_x']},{old_location['coord_y']}",
+ )
+
+ sl_repo.touch_supply_last_modified(cur, old_location["supply_id"], current_user_id)
+
conn.commit()
-
- # Fetch updated location
- cur.execute("""
- SELECT id, supply_id, location_name, shelf, amount, last_modified, last_modified_by, created_at
- FROM supplies_location WHERE id = %s
- """, (location_id,))
-
- row = cur.fetchone()
+
+ row = sl_repo.fetch_location_row_tuple(cur, location_id)
location = SupplyLocation.from_db_row(row)
cur.close()
@@ -346,6 +472,11 @@ def update_supply_location(location_id, current_user_id=None):
return jsonify({'error': 'Location does not exist'}), 400
if 'unique_supply_location_shelf' in str(e).lower():
return jsonify({'error': 'Supply location already exists at this location/shelf'}), 400
+ if 'uniq_free_coord_uid' in str(e).lower():
+ return jsonify({
+ 'error': 'A floor marker already exists at these coordinates for this item',
+ 'error_type': 'FREE_COORD_DUPLICATE',
+ }), 409
return jsonify({'error': str(e)}), 400
except Exception as e:
return jsonify({'error': str(e)}), 500
@@ -366,16 +497,35 @@ def delete_supply_location(location_id, current_user_id=None):
"""
try:
conn = get_db()
- cur = conn.cursor()
-
- # Check if location exists
- cur.execute("SELECT id FROM supplies_location WHERE id = %s", (location_id,))
- if not cur.fetchone():
+ cur = conn.cursor(dictionary=True)
+
+ location_data = sl_repo.fetch_location_with_join_for_delete_dict(cur, location_id)
+ if not location_data:
cur.close()
conn.close()
return jsonify({'error': 'Supply location not found'}), 404
- cur.execute("DELETE FROM supplies_location WHERE id = %s", (location_id,))
+ del_free = location_data['location_name'] is None and location_data.get('coord_x') is not None
+ log_location_history(
+ conn, 'REMOVE',
+ supply_id=location_data['supply_id'],
+ supply_name=location_data['supply_name'],
+ location_name=FREE_COORD_HISTORY_LABEL if del_free else location_data['location_name'],
+ shelf=location_data['shelf'],
+ old_amount=location_data['amount'],
+ new_amount=None,
+ changed_by=current_user_id,
+ batch_id=str(uuid.uuid4()),
+ related_location=(
+ f"{location_data['coord_x']},{location_data['coord_y']}" if del_free else None
+ ),
+ )
+
+ cur = conn.cursor()
+ sl_repo.delete_by_id(cur, location_id)
+
+ sl_repo.touch_supply_last_modified(cur, location_data["supply_id"], current_user_id)
+
conn.commit()
cur.close()
conn.close()
@@ -422,74 +572,103 @@ def move_supply_locations(current_user_id=None):
return jsonify({'error': 'Amount must be positive'}), 400
conn = get_db()
- cur = conn.cursor()
+ cur = conn.cursor(dictionary=True)
supply_id = data['supply_id']
shelf_from = data.get('shelf_from')
shelf_to = data.get('shelf_to')
amount = data['amount']
- # Get source location
- cur.execute("""
- SELECT id, amount FROM supplies_location
- WHERE supply_id = %s AND location_name = %s AND (shelf = %s OR (shelf IS NULL AND %s IS NULL))
- """, (supply_id, data['from_location'], shelf_from, shelf_from))
+ # Get supply name for history
+ supply = sl_repo.fetch_supply_name_only_dict(cur, supply_id)
+ if not supply:
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'Supply not found'}), 404
- source = cur.fetchone()
+ supply_name = supply['name']
+
+ source = sl_repo.select_box_row_dict(
+ cur, supply_id, data["from_location"], shelf_from
+ )
if not source:
cur.close()
conn.close()
return jsonify({'error': 'Source supply location not found'}), 404
- source_id, source_amount = source
+ source_id = source['id']
+ source_amount = source['amount']
if amount > source_amount:
cur.close()
conn.close()
return jsonify({'error': f'Cannot move {amount} units. Only {source_amount} available'}), 400
- # Get or create destination location
- cur.execute("""
- SELECT id, amount FROM supplies_location
- WHERE supply_id = %s AND location_name = %s AND (shelf = %s OR (shelf IS NULL AND %s IS NULL))
- """, (supply_id, data['to_location'], shelf_to, shelf_to))
-
- dest = cur.fetchone()
+ dest = sl_repo.select_box_row_dict(
+ cur, supply_id, data["to_location"], shelf_to
+ )
# Calculate new amounts
new_source_amount = source_amount - amount
+ batch_id = str(uuid.uuid4())
+
+ cur = conn.cursor() # Switch to regular cursor for updates
+
if dest:
- dest_id, dest_amount = dest
+ dest_id = dest["id"]
+ dest_amount = dest["amount"]
new_dest_amount = dest_amount + amount
- cur.execute("""
- UPDATE supplies_location
- SET amount = %s, last_modified_by = %s
- WHERE id = %s
- """, (new_dest_amount, current_user_id, dest_id))
+ sl_repo.update_location_amount(cur, new_dest_amount, current_user_id, dest_id)
else:
- cur.execute("""
- INSERT INTO supplies_location (supply_id, location_name, shelf, amount, last_modified_by)
- VALUES (%s, %s, %s, %s, %s)
- """, (supply_id, data['to_location'], shelf_to, amount, current_user_id))
+ sl_repo.insert_box_row(
+ cur, supply_id, data["to_location"], shelf_to, amount, current_user_id
+ )
new_dest_amount = amount
-
- # Update or delete source
+
if new_source_amount > 0:
- cur.execute("""
- UPDATE supplies_location
- SET amount = %s, last_modified_by = %s
- WHERE id = %s
- """, (new_source_amount, current_user_id, source_id))
+ sl_repo.update_location_amount(
+ cur, new_source_amount, current_user_id, source_id
+ )
else:
- cur.execute("DELETE FROM supplies_location WHERE id = %s", (source_id,))
-
+ sl_repo.delete_by_id(cur, source_id)
+
+ # Log history: MOVE action (two rows: REMOVE from source, ADD to dest)
+ log_location_history(
+ conn, 'REMOVE',
+ supply_id=supply_id,
+ supply_name=supply_name,
+ location_name=data['from_location'],
+ shelf=shelf_from,
+ old_amount=source_amount,
+ new_amount=new_source_amount if new_source_amount > 0 else None,
+ changed_by=current_user_id,
+ related_location=data['to_location'],
+ related_shelf=shelf_to,
+ batch_id=batch_id
+ )
+ log_location_history(
+ conn, 'ADD',
+ supply_id=supply_id,
+ supply_name=supply_name,
+ location_name=data['to_location'],
+ shelf=shelf_to,
+ old_amount=dest['amount'] if dest else None,
+ new_amount=new_dest_amount,
+ changed_by=current_user_id,
+ related_location=data['from_location'],
+ related_shelf=shelf_from,
+ batch_id=batch_id
+ )
+
+ sl_repo.touch_supply_last_modified(cur, supply_id, current_user_id)
+
conn.commit()
-
+
result = {
- 'moved': amount,
- 'from_remaining': new_source_amount,
- 'to_total': new_dest_amount
+ "moved": amount,
+ "from_remaining": new_source_amount,
+ "to_total": new_dest_amount,
}
cur.close()
@@ -539,11 +718,20 @@ def bulk_add_supply_locations(current_user_id=None):
return jsonify({'error': 'additions must be a non-empty array'}), 400
conn = get_db()
- cur = conn.cursor()
+ cur = conn.cursor(dictionary=True)
supply_id = data['supply_id']
additions = data['additions']
+ # Get supply name for history
+ supply = sl_repo.fetch_supply_name_only_dict(cur, supply_id)
+ if not supply:
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'Supply not found'}), 404
+
+ supply_name = supply['name']
+
# Validate all additions
for addition in additions:
if 'location' not in addition or 'amount' not in addition:
@@ -555,58 +743,87 @@ def bulk_add_supply_locations(current_user_id=None):
conn.close()
return jsonify({'error': 'Amount must be positive'}), 400
+ # Generate batch_id for all additions
+ batch_id = str(uuid.uuid4())
+
+ cur = conn.cursor() # Switch to regular cursor for updates
+
# Process all additions in a transaction
results = []
for addition in additions:
location_name = addition['location']
shelf = addition.get('shelf')
amount = addition['amount']
+
+ total_now = map_total_qty_for_supply(cur, supply_id)
+ ok_qty, err_qty = check_unique_type_map_qty(cur, supply_id, total_now + amount)
+ if not ok_qty:
+ conn.rollback()
+ cur.close()
+ conn.close()
+ return jsonify({'error': err_qty, 'error_type': 'UNIQUE_TYPE_QTY'}), 400
- # Check if entry exists
- cur.execute("""
- SELECT id, amount FROM supplies_location
- WHERE supply_id = %s AND location_name = %s AND (shelf = %s OR (shelf IS NULL AND %s IS NULL))
- """, (supply_id, location_name, shelf, shelf))
-
- existing = cur.fetchone()
-
+ existing = sl_repo.select_box_row(cur, supply_id, location_name, shelf)
+
if existing:
- # Increment existing
- new_amount = existing[1] + amount
- cur.execute("""
- UPDATE supplies_location
- SET amount = %s, last_modified_by = %s
- WHERE id = %s
- """, (new_amount, current_user_id, existing[0]))
- results.append({
- 'location': location_name,
- 'shelf': shelf,
- 'action': 'updated',
- 'new_amount': new_amount
- })
+ old_amount = existing[1]
+ new_amount = old_amount + amount
+ sl_repo.update_location_amount(
+ cur, new_amount, current_user_id, existing[0]
+ )
+ results.append(
+ {
+ "location": location_name,
+ "shelf": shelf,
+ "action": "updated",
+ "new_amount": new_amount,
+ }
+ )
+ log_location_history(
+ conn,
+ "ADD",
+ supply_id=supply_id,
+ supply_name=supply_name,
+ location_name=location_name,
+ shelf=shelf,
+ old_amount=old_amount,
+ new_amount=new_amount,
+ changed_by=current_user_id,
+ batch_id=batch_id,
+ )
else:
- # Insert new
- cur.execute("""
- INSERT INTO supplies_location (supply_id, location_name, shelf, amount, last_modified_by)
- VALUES (%s, %s, %s, %s, %s)
- """, (supply_id, location_name, shelf, amount, current_user_id))
+ sl_repo.insert_box_row(
+ cur, supply_id, location_name, shelf, amount, current_user_id
+ )
results.append({
'location': location_name,
'shelf': shelf,
'action': 'created',
'new_amount': amount
})
-
+ # Log history: ADD action (new entry)
+ log_location_history(
+ conn, 'ADD',
+ supply_id=supply_id,
+ supply_name=supply_name,
+ location_name=location_name,
+ shelf=shelf,
+ old_amount=None,
+ new_amount=amount,
+ changed_by=current_user_id,
+ batch_id=batch_id
+ )
+
+ sl_repo.touch_supply_last_modified(cur, supply_id, current_user_id)
+
conn.commit()
-
+
cur.close()
conn.close()
-
- return jsonify({
- 'success': True,
- 'supply_id': supply_id,
- 'results': results
- }), 201
+
+ return jsonify(
+ {"success": True, "supply_id": supply_id, "results": results}
+ ), 201
except mysql.connector.IntegrityError as e:
conn.rollback()
if 'foreign key constraint' in str(e).lower():
diff --git a/src/api/routes/supplies_location_history.py b/src/api/routes/supplies_location_history.py
new file mode 100644
index 0000000..32aa61e
--- /dev/null
+++ b/src/api/routes/supplies_location_history.py
@@ -0,0 +1,416 @@
+"""
+Supply Location History API routes.
+"""
+import sys
+from pathlib import Path
+
+# Add src to path for imports (must be before other imports)
+sys.path.insert(0, str(Path(__file__).parent.parent.parent))
+
+from flask import Blueprint, request, jsonify, session
+import mysql.connector
+from src.api.db import get_db
+from src.api.helpers.datetime_json import db_datetime_to_utc_iso
+from src.api.middleware.auth import require_auth
+from src.api.helpers.history import is_latest_global_history_timestamp
+from src.api.helpers.unique_type_qty import map_total_qty_for_supply, check_unique_type_map_qty
+from src.api.repositories import supplies_location_history_repository as lh_repo
+
+supplies_location_history_bp = Blueprint('supplies_location_history', __name__)
+
+
+@supplies_location_history_bp.route('', methods=['GET'])
+@require_auth
+def get_location_history(current_user_id=None):
+ """
+ GET /api/supplies-location-history
+ Get paginated location history.
+
+ Query parameters:
+ supply_id: Filter by supply ID
+ supply_name: Filter by supply name (for deleted supplies)
+ location_name: Filter by location name
+ limit: Number of results (default 50)
+ offset: Offset for pagination (default 0)
+
+ Returns:
+ JSON array of history entries
+
+ NOTE: Undone entries are DELETED entirely from the database (not just marked as undone).
+ See undo_location_history and undo_batch_history endpoints which DELETE entries.
+ """
+ try:
+ supply_id = request.args.get('supply_id', type=int)
+ supply_name = request.args.get('supply_name')
+ location_name = request.args.get('location_name')
+ limit = request.args.get('limit', default=50, type=int)
+ offset = request.args.get('offset', default=0, type=int)
+
+ conn = get_db()
+ cur = conn.cursor(dictionary=True)
+
+ rows = lh_repo.fetch_history_page(
+ cur, supply_id, supply_name, location_name, limit, offset
+ )
+
+ # Format results
+ results = []
+ for row in rows:
+ result = {
+ 'id': row['id'],
+ 'supply_id': row['supply_id'],
+ 'supply_name': row['supply_name'],
+ 'location_name': row['location_name'],
+ 'shelf': row['shelf'],
+ 'action_type': row['action_type'],
+ 'old_amount': row['old_amount'],
+ 'new_amount': row['new_amount'],
+ 'related_location': row['related_location'],
+ 'related_shelf': row['related_shelf'],
+ 'batch_id': row['batch_id'],
+ 'undone': bool(row['undone']),
+ 'undone_at': db_datetime_to_utc_iso(row['undone_at']),
+ 'undone_by': row['undone_by'],
+ 'changed_by': row['changed_by'],
+ 'changed_by_name': f"{row['first_name']} {row['last_name']}" if row['first_name'] and row['last_name'] else None,
+ 'changed_at': db_datetime_to_utc_iso(row['changed_at'])
+ }
+ results.append(result)
+
+ cur.close()
+ conn.close()
+
+ return jsonify(results), 200
+ except Exception as e:
+ return jsonify({'error': str(e)}), 500
+
+
+@supplies_location_history_bp.route('//undo', methods=['POST'])
+@require_auth
+def undo_location_history(history_id, current_user_id=None):
+ """
+ POST /api/supplies-location-history//undo
+ Undo a single history entry.
+
+ Args:
+ history_id: History entry ID to undo
+
+ Returns:
+ JSON object of the updated history entry
+ """
+ try:
+ conn = get_db()
+ cur = conn.cursor(dictionary=True)
+
+ history = lh_repo.fetch_history_entry_for_undo_dict(cur, history_id)
+ if not history:
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'History entry not found'}), 404
+
+ if not session.get('is_leader', False):
+ if not is_latest_global_history_timestamp(cur, history['changed_at']):
+ cur.close()
+ conn.close()
+ return jsonify({
+ 'error': 'Only the most recent action can be undone.',
+ 'error_type': 'UNDO_NOT_LATEST',
+ }), 403
+
+ # No need to check undone status - if entry exists, it can be undone
+
+ action_type = history['action_type']
+
+ # Handle CASCADED_SUBTRACT separately (cannot undo individual cascaded subtractions)
+ if action_type == 'CASCADED_SUBTRACT':
+ cur.close()
+ conn.close()
+ return jsonify({
+ 'error': 'Cannot undo individual cascaded subtract entries. Use batch restore endpoint instead.',
+ 'error_type': 'CASCADED_SUBTRACT_ENTRY'
+ }), 400
+
+ cur = conn.cursor() # Switch to regular cursor for updates
+
+ # Undo based on action type
+ if action_type == 'ADD':
+ # Decrement amount by (new_amount - old_amount)
+ # If old_amount was None, decrement by new_amount
+ amount_to_remove = history['new_amount'] - (history['old_amount'] or 0)
+
+ if history["supply_id"]:
+ if not lh_repo.supply_exists_tuple(cur, history["supply_id"]):
+ cur.close()
+ conn.close()
+ return jsonify({
+ 'error': 'Supply no longer exists',
+ 'error_type': 'SUPPLY_DELETED',
+ 'supply_name': history['supply_name']
+ }), 409
+
+ current = lh_repo.select_location_entry_tuple(
+ cur,
+ history["supply_id"],
+ history["location_name"],
+ history["shelf"],
+ )
+ if current:
+ new_amount = current[1] - amount_to_remove
+ if new_amount <= 0:
+ lh_repo.delete_supplies_location_by_id(cur, current[0])
+ else:
+ lh_repo.update_supplies_location_amount_tuple(
+ cur, new_amount, current_user_id, current[0]
+ )
+ else:
+ # Location entry doesn't exist (already deleted), can't undo
+ cur.close()
+ conn.close()
+ return jsonify({
+ 'error': 'Location entry no longer exists, cannot undo',
+ 'error_type': 'LOCATION_DELETED'
+ }), 409
+
+ elif action_type == 'REMOVE':
+ # Re-insert or increment location entry with old_amount
+ if not history['supply_id']:
+ cur.close()
+ conn.close()
+ return jsonify({
+ 'error': 'Supply no longer exists',
+ 'error_type': 'SUPPLY_DELETED',
+ 'supply_name': history['supply_name']
+ }), 409
+
+ if not lh_repo.supply_exists_tuple(cur, history["supply_id"]):
+ cur.close()
+ conn.close()
+ return jsonify({
+ 'error': 'Supply no longer exists',
+ 'error_type': 'SUPPLY_DELETED',
+ 'supply_name': history['supply_name']
+ }), 409
+
+ existing = lh_repo.select_location_entry_tuple(
+ cur,
+ history["supply_id"],
+ history["location_name"],
+ history["shelf"],
+ )
+ if existing:
+ new_amount = existing[1] + (history["old_amount"] or 0)
+ lh_repo.update_supplies_location_amount_tuple(
+ cur, new_amount, current_user_id, existing[0]
+ )
+ else:
+ lh_repo.insert_supplies_location_box_tuple(
+ cur,
+ history["supply_id"],
+ history["location_name"],
+ history["shelf"],
+ history["old_amount"],
+ current_user_id,
+ )
+
+ elif action_type == 'UPDATE':
+ # Restore old_amount
+ if not history['supply_id']:
+ cur.close()
+ conn.close()
+ return jsonify({
+ 'error': 'Supply no longer exists',
+ 'error_type': 'SUPPLY_DELETED',
+ 'supply_name': history['supply_name']
+ }), 409
+
+ lh_repo.update_amount_by_supply_location_shelf_tuple(
+ cur,
+ history["old_amount"],
+ current_user_id,
+ history["supply_id"],
+ history["location_name"],
+ history["shelf"],
+ )
+
+ elif action_type == 'MOVE':
+ pcur = conn.cursor(dictionary=True)
+ paired = lh_repo.fetch_paired_move_row_dict(
+ pcur, history["batch_id"], history_id
+ )
+ pcur.close()
+ if not paired:
+ cur.close()
+ conn.close()
+ return jsonify({
+ 'error': 'Paired MOVE entry not found',
+ 'error_type': 'MOVE_PAIR_MISSING',
+ }), 400
+
+ if history["action_type"] == "REMOVE":
+ existing = lh_repo.select_location_entry_tuple(
+ cur,
+ history["supply_id"],
+ history["location_name"],
+ history["shelf"],
+ )
+ amount_to_restore = history["old_amount"] - (history["new_amount"] or 0)
+ if existing:
+ new_amount = existing[1] + amount_to_restore
+ lh_repo.update_supplies_location_amount_tuple(
+ cur, new_amount, current_user_id, existing[0]
+ )
+ else:
+ lh_repo.insert_supplies_location_box_tuple(
+ cur,
+ history["supply_id"],
+ history["location_name"],
+ history["shelf"],
+ amount_to_restore,
+ current_user_id,
+ )
+
+ dest_existing = lh_repo.select_location_entry_tuple(
+ cur,
+ history["supply_id"],
+ paired["location_name"],
+ paired["shelf"],
+ )
+ if dest_existing:
+ amount_to_remove = paired["new_amount"] - (paired["old_amount"] or 0)
+ new_dest_amount = dest_existing[1] - amount_to_remove
+ if new_dest_amount <= 0:
+ lh_repo.delete_supplies_location_by_id(cur, dest_existing[0])
+ else:
+ lh_repo.update_supplies_location_amount_tuple(
+ cur, new_dest_amount, current_user_id, dest_existing[0]
+ )
+
+ lh_repo.delete_history_by_id(cur, paired["id"])
+
+ sid = history.get('supply_id')
+ if sid:
+ cur_v = conn.cursor(dictionary=True)
+ total = map_total_qty_for_supply(cur_v, sid)
+ ok_qty, err_qty = check_unique_type_map_qty(cur_v, sid, total)
+ cur_v.close()
+ if not ok_qty:
+ conn.rollback()
+ cur.close()
+ conn.close()
+ return jsonify({'error': err_qty, 'error_type': 'UNIQUE_TYPE_QTY'}), 400
+
+ lh_repo.delete_history_by_id(cur, history_id)
+
+ conn.commit()
+ cur.close()
+ conn.close()
+
+ return jsonify({'success': True, 'deleted_id': history_id}), 200
+ except mysql.connector.IntegrityError as e:
+ if 'foreign key constraint' in str(e).lower():
+ return jsonify({
+ 'error': 'Supply or location does not exist',
+ 'error_type': 'UNDO_IMPOSSIBLE',
+ }), 400
+ return jsonify({'error': str(e), 'error_type': 'UNDO_IMPOSSIBLE'}), 400
+ except Exception as e:
+ return jsonify({'error': str(e)}), 500
+
+
+@supplies_location_history_bp.route('//discard', methods=['POST'])
+@require_auth
+def discard_location_history(history_id, current_user_id=None):
+ """
+ POST /api/supplies-location-history//discard
+ Delete a location history row (and paired MOVE leg) without changing inventory.
+ Same permission rules as undo (non-leaders: only the latest global history timestamp).
+ """
+ try:
+ conn = get_db()
+ cur = conn.cursor(dictionary=True)
+
+ history = lh_repo.fetch_history_meta_for_discard_dict(cur, history_id)
+ if not history:
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'History entry not found'}), 404
+
+ if not session.get('is_leader', False):
+ if not is_latest_global_history_timestamp(cur, history['changed_at']):
+ cur.close()
+ conn.close()
+ return jsonify({
+ 'error': 'Only the most recent action can be undone.',
+ 'error_type': 'UNDO_NOT_LATEST',
+ }), 403
+
+ if history['action_type'] == 'CASCADED_SUBTRACT':
+ cur.close()
+ conn.close()
+ return jsonify({
+ 'error': 'Cannot discard cascaded subtract entries individually.',
+ 'error_type': 'CASCADED_SUBTRACT_ENTRY',
+ }), 400
+
+ cur = conn.cursor()
+ paired_id = None
+ if history["action_type"] == "MOVE" and history.get("batch_id"):
+ prow = lh_repo.fetch_paired_id_tuple(
+ cur, history["batch_id"], history_id
+ )
+ if prow:
+ paired_id = prow[0]
+
+ if paired_id is not None:
+ lh_repo.delete_history_by_id(cur, paired_id)
+ lh_repo.delete_history_by_id(cur, history_id)
+ conn.commit()
+ cur.close()
+ conn.close()
+
+ return jsonify({'success': True, 'discarded_id': history_id}), 200
+ except Exception as e:
+ return jsonify({'error': str(e)}), 500
+
+
+@supplies_location_history_bp.route('/batch//undo', methods=['POST'])
+@require_auth
+def undo_batch_history(batch_id, current_user_id=None):
+ """
+ POST /api/supplies-location-history/batch//undo
+ Undo all history entries sharing a batch_id atomically by deleting them.
+
+ Args:
+ batch_id: Batch ID (UUID string)
+
+ Returns:
+ JSON object with count of deleted entries
+ """
+ try:
+ conn = get_db()
+ cur = conn.cursor(dictionary=True)
+
+ entries = lh_repo.fetch_batch_entries_ordered_dict(cur, batch_id)
+ if not entries:
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'No entries found for this batch'}), 404
+
+ # Delete all entries in the batch
+ deleted_count = 0
+ for entry in entries:
+ lh_repo.delete_history_by_id(cur, entry["id"])
+ deleted_count += 1
+
+ conn.commit()
+ cur.close()
+ conn.close()
+
+ return jsonify({
+ 'success': True,
+ 'batch_id': batch_id,
+ 'deleted_count': deleted_count
+ }), 200
+ except Exception as e:
+ return jsonify({'error': str(e)}), 500
+
diff --git a/src/api/routes/supply_types.py b/src/api/routes/supply_types.py
new file mode 100644
index 0000000..9e64b53
--- /dev/null
+++ b/src/api/routes/supply_types.py
@@ -0,0 +1,463 @@
+"""
+Supply types (item templates): authenticated read; any member may create/update; delete is leader-only.
+"""
+import sys
+import json
+from pathlib import Path
+
+sys.path.insert(0, str(Path(__file__).parent.parent.parent))
+
+from flask import Blueprint, request, jsonify, session
+
+from src.api.helpers.datetime_json import db_datetime_to_utc_iso
+import mysql.connector
+from src.api.db import get_db
+from src.api.middleware.auth import require_auth, require_leader
+from src.api.helpers.unique_type_qty import type_has_supply_with_map_qty_over_one
+from src.api.repositories import supply_types_repository as repo
+from src.api.repositories import categories_repository as cat_repo
+from src.api.repositories import teams_repository as team_repo
+from src.api.services.supply_catalog_service import normalize_teams
+
+supply_types_bp = Blueprint('supply_types', __name__)
+
+
+def _parse_validate_locked_category_ids(cur, raw):
+ if raw is None:
+ return []
+ if not isinstance(raw, list):
+ raise ValueError('locked_category_ids must be an array')
+ out = []
+ for x in raw:
+ try:
+ out.append(int(x))
+ except (TypeError, ValueError) as exc:
+ raise ValueError('locked_category_ids must contain integers') from exc
+ out = sorted(set(out))
+ if not out:
+ return []
+ rows = cat_repo.list_id_name_ordered(cur)
+ valid = {r['id'] for r in rows}
+ for i in out:
+ if i not in valid:
+ raise ValueError(f'Unknown category id: {i}')
+ return out
+
+
+def _parse_validate_locked_team_names(cur, raw):
+ if raw is None:
+ return []
+ if not isinstance(raw, list):
+ raise ValueError('locked_team_names must be an array')
+ if not all(isinstance(x, str) for x in raw):
+ raise ValueError('locked_team_names must be an array of strings')
+ normalized = normalize_teams(raw)
+ db_names = team_repo.list_team_names_ordered(cur)
+ allowed = {n.lower(): n for n in db_names}
+ out = []
+ for t in normalized:
+ tl = t.lower()
+ if tl not in allowed:
+ raise ValueError(f'Unknown team: {t}')
+ out.append(allowed[tl])
+ return out
+
+
+def _row_to_dict(row):
+ if not row:
+ return None
+ dcf = row.get('default_custom_fields')
+ if isinstance(dcf, str) and dcf:
+ try:
+ dcf = json.loads(dcf)
+ except (TypeError, ValueError):
+ dcf = {}
+ elif dcf is None:
+ dcf = {}
+ lck = row.get('locked_custom_field_keys')
+ if isinstance(lck, str) and lck:
+ try:
+ lck = json.loads(lck)
+ except (TypeError, ValueError):
+ lck = []
+ elif lck is None:
+ lck = []
+ if not isinstance(lck, list):
+ lck = []
+ lci = row.get('locked_category_ids')
+ if isinstance(lci, str) and lci.strip():
+ try:
+ lci = json.loads(lci)
+ except (TypeError, ValueError):
+ lci = []
+ elif lci is None:
+ lci = []
+ if not isinstance(lci, list):
+ lci = []
+ lci_out = []
+ for x in lci:
+ try:
+ lci_out.append(int(x))
+ except (TypeError, ValueError):
+ continue
+ lci_out = sorted(set(lci_out))
+
+ ltn = row.get('locked_team_names')
+ if isinstance(ltn, str) and ltn.strip():
+ try:
+ ltn = json.loads(ltn)
+ except (TypeError, ValueError):
+ ltn = []
+ elif ltn is None:
+ ltn = []
+ if not isinstance(ltn, list):
+ ltn = []
+ ltn_out = normalize_teams([str(x) for x in ltn if x is not None])
+
+ return {
+ 'id': row['id'],
+ 'name': row['name'],
+ 'template_description': row.get('template_description'),
+ 'item_name_prefix': row.get('item_name_prefix') or '',
+ 'item_description_prefix': row.get('item_description_prefix'),
+ 'image': row.get('image'),
+ 'default_custom_fields': dcf,
+ 'locked_custom_field_keys': lck,
+ 'locked_category_ids': lci_out,
+ 'locked_team_names': ltn_out,
+ 'is_unique': bool(row.get('is_unique')),
+ 'prevent_user_edit': bool(row.get('prevent_user_edit')),
+ 'created_at': db_datetime_to_utc_iso(row.get('created_at')),
+ 'updated_at': db_datetime_to_utc_iso(row.get('updated_at')),
+ }
+
+
+def _join_prefix_suffix(prefix, suffix):
+ """Match milventory joinPrefixSuffix: rstrip(prefix) + optional space + trim(suffix)."""
+ p = (prefix or '').rstrip()
+ s = (suffix or '').strip()
+ if not p:
+ return s
+ if not s:
+ return p
+ sep = '' if p.endswith(' ') else ' '
+ return f"{p}{sep}{s}"
+
+
+def _suffix_after_prefix(full, prefix):
+ op = (prefix or '').strip()
+ if not op:
+ return None
+ fn = (full or '').strip()
+ if fn.startswith(op):
+ return fn[len(op):].lstrip()
+ return None
+
+
+def _recompute_linked_supply_name(old_name, old_np, new_np):
+ """Rebuild supply.name after type item_name_prefix change; None = leave unchanged."""
+ old_np = (old_np or '').strip()
+ new_np = (new_np or '').strip()
+ old_name = (old_name or '').strip()
+ if not old_np:
+ if not new_np:
+ return None
+ return _join_prefix_suffix(new_np, old_name)
+ su = _suffix_after_prefix(old_name, old_np)
+ if su is None:
+ return None
+ return _join_prefix_suffix(new_np, su)
+
+
+def _recompute_linked_supply_description(old_desc, old_dp, new_dp):
+ """Rebuild supply.description after type item_description_prefix change; None = leave unchanged."""
+ old_dp = (old_dp or '').strip() if old_dp else ''
+ new_dp = (new_dp or '').strip() if new_dp else ''
+ old_d = (old_desc or '').strip() if old_desc else ''
+ if not old_dp:
+ if not new_dp:
+ return None
+ return _join_prefix_suffix(new_dp, old_d) if old_d else new_dp
+ if not old_d:
+ return new_dp if new_dp else None
+ su = _suffix_after_prefix(old_d, old_dp)
+ if su is None:
+ return None
+ out = _join_prefix_suffix(new_dp, su)
+ return out if out else (su or None)
+
+
+def _desc_norm(d):
+ if d is None:
+ return ''
+ return str(d).strip()
+
+
+@supply_types_bp.route('', methods=['GET'])
+@require_auth
+def list_supply_types(current_user_id=None):
+ try:
+ conn = get_db()
+ cur = conn.cursor(dictionary=True)
+ rows = repo.list_all_dict(cur)
+ cur.close()
+ conn.close()
+ return jsonify([_row_to_dict(r) for r in rows]), 200
+ except Exception as e:
+ return jsonify({'error': str(e)}), 500
+
+
+@supply_types_bp.route('/', methods=['GET'])
+@require_auth
+def get_supply_type(type_id, current_user_id=None):
+ try:
+ conn = get_db()
+ cur = conn.cursor(dictionary=True)
+ row = repo.fetch_by_id_dict(cur, type_id)
+ cur.close()
+ conn.close()
+ if not row:
+ return jsonify({'error': 'Type not found'}), 404
+ return jsonify(_row_to_dict(row)), 200
+ except Exception as e:
+ return jsonify({'error': str(e)}), 500
+
+
+@supply_types_bp.route('', methods=['POST'])
+@require_leader
+def create_supply_type(current_user_id=None):
+ try:
+ data = request.json or {}
+ name = (data.get('name') or '').strip()
+ if not name:
+ return jsonify({'error': 'Name is required'}), 400
+
+ template_description = (data.get('template_description') or '').strip() or None
+ item_name_prefix = (data.get('item_name_prefix') or '').strip()
+ item_description_prefix = (data.get('item_description_prefix') or '').strip() or None
+ image = data.get('image') or None
+ dcf = data.get('default_custom_fields')
+ if dcf is not None and not isinstance(dcf, dict):
+ return jsonify({'error': 'default_custom_fields must be an object'}), 400
+ lck = data.get('locked_custom_field_keys')
+ if lck is not None and not isinstance(lck, list):
+ return jsonify({'error': 'locked_custom_field_keys must be an array'}), 400
+ is_unique = 1 if data.get('is_unique') else 0
+ prevent_user_edit = 1 if data.get('prevent_user_edit') else 0
+
+ if image and str(image).startswith('data:image'):
+ b64 = str(image).split(',', 1)[1] if ',' in str(image) else ''
+ if len(b64) > 13_300_000:
+ return jsonify({'error': 'Image file size exceeds 10MB limit'}), 400
+
+ conn = get_db()
+ cur = conn.cursor(dictionary=True)
+ try:
+ lci = _parse_validate_locked_category_ids(cur, data.get('locked_category_ids'))
+ ltn = _parse_validate_locked_team_names(cur, data.get('locked_team_names'))
+ except ValueError as ve:
+ cur.close()
+ conn.close()
+ return jsonify({'error': str(ve)}), 400
+
+ tid = repo.insert_supply_type(
+ cur,
+ name,
+ template_description,
+ item_name_prefix,
+ item_description_prefix,
+ image,
+ json.dumps(dcf) if dcf else None,
+ json.dumps(lck) if lck else None,
+ json.dumps(lci) if lci else None,
+ json.dumps(ltn) if ltn else None,
+ is_unique,
+ prevent_user_edit,
+ )
+ conn.commit()
+ row = repo.fetch_by_id_dict(cur, tid)
+ cur.close()
+ conn.close()
+ return jsonify(_row_to_dict(row)), 201
+ except mysql.connector.IntegrityError as e:
+ if 'Duplicate' in str(e) or 'unique' in str(e).lower():
+ return jsonify({'error': 'A type with this name already exists'}), 400
+ return jsonify({'error': str(e)}), 400
+ except Exception as e:
+ return jsonify({'error': str(e)}), 500
+
+
+@supply_types_bp.route('/', methods=['PUT'])
+@require_auth
+def update_supply_type(type_id, current_user_id=None):
+ try:
+ data = request.json or {}
+ conn = get_db()
+ cur = conn.cursor(dictionary=True)
+ before = repo.fetch_by_id_dict(cur, type_id)
+ if not before:
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'Type not found'}), 404
+
+ is_leader = session.get('is_leader', False)
+ if not is_leader and bool(before.get('prevent_user_edit')):
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'This item type can only be edited by a leader.'}), 403
+ if not is_leader and 'prevent_user_edit' in data:
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'Leader access required'}), 403
+
+ old_np = (before.get('item_name_prefix') or '').strip()
+ odp = before.get('item_description_prefix')
+ old_dp = (odp or '').strip() if odp else ''
+ new_np = old_np
+ new_dp = old_dp
+ if 'item_name_prefix' in data:
+ new_np = (data.get('item_name_prefix') or '').strip()
+ if 'item_description_prefix' in data:
+ r = data.get('item_description_prefix')
+ if r is None:
+ new_dp = ''
+ else:
+ new_dp = (str(r) or '').strip()
+
+ if 'is_unique' in data and data.get('is_unique'):
+ if type_has_supply_with_map_qty_over_one(cur, type_id):
+ cur.close()
+ conn.close()
+ return jsonify({
+ 'error': (
+ 'Cannot mark this item type as unique: one item using this type already has more than '
+ '1 quantity on the map. Reduce that item to 1 quantity before enabling unique.'
+ )
+ }), 400
+
+ fields = []
+ vals = []
+ if 'name' in data:
+ fields.append('name = %s')
+ vals.append((data.get('name') or '').strip())
+ if 'template_description' in data:
+ fields.append('template_description = %s')
+ v = (data.get('template_description') or '').strip() or None
+ vals.append(v)
+ if 'item_name_prefix' in data:
+ fields.append('item_name_prefix = %s')
+ vals.append((data.get('item_name_prefix') or '').strip())
+ if 'item_description_prefix' in data:
+ fields.append('item_description_prefix = %s')
+ v = (data.get('item_description_prefix') or '').strip() or None
+ vals.append(v)
+ if 'image' in data:
+ fields.append('image = %s')
+ vals.append(data.get('image') or None)
+ if 'default_custom_fields' in data:
+ dcf = data.get('default_custom_fields')
+ if dcf is not None and not isinstance(dcf, dict):
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'default_custom_fields must be an object'}), 400
+ fields.append('default_custom_fields = %s')
+ vals.append(json.dumps(dcf) if dcf else None)
+ if 'locked_custom_field_keys' in data:
+ lck = data.get('locked_custom_field_keys')
+ if lck is not None and not isinstance(lck, list):
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'locked_custom_field_keys must be an array'}), 400
+ fields.append('locked_custom_field_keys = %s')
+ vals.append(json.dumps(lck) if lck else None)
+ if 'is_unique' in data:
+ fields.append('is_unique = %s')
+ vals.append(1 if data.get('is_unique') else 0)
+ if 'prevent_user_edit' in data:
+ fields.append('prevent_user_edit = %s')
+ vals.append(1 if data.get('prevent_user_edit') else 0)
+ if 'locked_category_ids' in data:
+ try:
+ lci = _parse_validate_locked_category_ids(cur, data.get('locked_category_ids'))
+ except ValueError as ve:
+ cur.close()
+ conn.close()
+ return jsonify({'error': str(ve)}), 400
+ fields.append('locked_category_ids = %s')
+ vals.append(json.dumps(lci) if lci else None)
+ if 'locked_team_names' in data:
+ try:
+ ltn = _parse_validate_locked_team_names(cur, data.get('locked_team_names'))
+ except ValueError as ve:
+ cur.close()
+ conn.close()
+ return jsonify({'error': str(ve)}), 400
+ fields.append('locked_team_names = %s')
+ vals.append(json.dumps(ltn) if ltn else None)
+
+ if fields:
+ vals.append(type_id)
+ repo.update_supply_type_columns(cur, fields, vals)
+
+ cascade_name = 'item_name_prefix' in data
+ cascade_desc = 'item_description_prefix' in data
+ if cascade_name or cascade_desc:
+ sup_rows = repo.select_supplies_id_name_desc_for_type(cur, type_id)
+ updates = []
+ for s in sup_rows:
+ nm = s['name']
+ dc = s['description']
+ if cascade_name:
+ nn = _recompute_linked_supply_name(s['name'], old_np, new_np)
+ if nn is not None:
+ nm = nn
+ if cascade_desc:
+ nd = _recompute_linked_supply_description(s['description'], old_dp, new_dp)
+ if nd is not None:
+ dc = nd
+ if nm != s['name'] or _desc_norm(dc) != _desc_norm(s['description']):
+ updates.append((s['id'], nm, dc))
+ proposed = [u[1] for u in updates]
+ if len(proposed) != len(set(proposed)):
+ conn.rollback()
+ cur.close()
+ conn.close()
+ return jsonify({
+ 'error': 'Updating prefixes would create duplicate item names for this type.',
+ }), 400
+ for sid, nm, dc in updates:
+ repo.update_supply_name_description(cur, sid, nm, dc)
+
+ conn.commit()
+
+ row = repo.fetch_by_id_dict(cur, type_id)
+ cur.close()
+ conn.close()
+ return jsonify(_row_to_dict(row)), 200
+ except mysql.connector.IntegrityError as e:
+ if 'Duplicate' in str(e) or 'unique' in str(e).lower():
+ return jsonify({'error': 'A type with this name already exists'}), 400
+ return jsonify({'error': str(e)}), 400
+ except Exception as e:
+ return jsonify({'error': str(e)}), 500
+
+
+@supply_types_bp.route('/', methods=['DELETE'])
+@require_leader
+def delete_supply_type(type_id, current_user_id=None):
+ try:
+ conn = get_db()
+ cur = conn.cursor()
+ deleted = repo.delete_supply_type_by_id(cur, type_id)
+ if deleted == 0:
+ cur.close()
+ conn.close()
+ return jsonify({'error': 'Type not found'}), 404
+ conn.commit()
+ cur.close()
+ conn.close()
+ return '', 204
+ except mysql.connector.IntegrityError as e:
+ return jsonify({'error': str(e)}), 400
+ except Exception as e:
+ return jsonify({'error': str(e)}), 500
diff --git a/src/api/routes/teams.py b/src/api/routes/teams.py
index 331036f..d5bf17e 100644
--- a/src/api/routes/teams.py
+++ b/src/api/routes/teams.py
@@ -1,35 +1,24 @@
"""Teams API routes."""
from flask import Blueprint, jsonify
import mysql.connector
-import os
-from src.scripts.helpers import parse_database_url
-teams_bp = Blueprint('teams', __name__)
+from src.api.db import get_db
+from src.api.repositories import teams_repository as repo
+teams_bp = Blueprint("teams", __name__)
-def get_db_connection():
- """Get database connection."""
- database_url = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb")
- db_params = parse_database_url(database_url)
- return mysql.connector.connect(**db_params)
-
-@teams_bp.route('/teams', methods=['GET'])
+@teams_bp.route("/teams", methods=["GET"])
def get_teams():
"""Get all teams."""
try:
- conn = get_db_connection()
+ conn = get_db()
cur = conn.cursor()
-
- cur.execute("SELECT name FROM teams ORDER BY name")
- teams = [row[0] for row in cur.fetchall()]
-
+ teams = repo.list_team_names_ordered(cur)
cur.close()
conn.close()
-
- return jsonify({'teams': teams}), 200
+ return jsonify({"teams": teams}), 200
except mysql.connector.Error as e:
- return jsonify({'error': f'Database error: {str(e)}'}), 500
+ return jsonify({"error": f"Database error: {str(e)}"}), 500
except Exception as e:
- return jsonify({'error': f'Unexpected error: {str(e)}'}), 500
-
+ return jsonify({"error": f"Unexpected error: {str(e)}"}), 500
diff --git a/src/api/services/__init__.py b/src/api/services/__init__.py
new file mode 100644
index 0000000..e17fc9c
--- /dev/null
+++ b/src/api/services/__init__.py
@@ -0,0 +1 @@
+"""Application services (orchestration, validation)."""
diff --git a/src/api/services/supply_catalog_service.py b/src/api/services/supply_catalog_service.py
new file mode 100644
index 0000000..3926253
--- /dev/null
+++ b/src/api/services/supply_catalog_service.py
@@ -0,0 +1,1002 @@
+"""Supplies catalog: validation, history, and API-shaped responses."""
+from __future__ import annotations
+
+import json
+import math
+import uuid
+from typing import Any, Dict, List, Optional, Tuple
+
+import mysql.connector
+from flask import session
+
+from src.api.db import get_db
+from src.api.helpers.datetime_json import db_datetime_to_utc_iso
+from src.api.helpers.history import (
+ get_supply_current_state,
+ is_latest_global_history_timestamp,
+ log_category_changes,
+ log_supply_history,
+ log_team_changes,
+ snapshot_supply_locations_before_delete,
+)
+from src.api.models.supply import Supply
+from src.api.repositories import custom_field_definitions_repository as cf_repo
+from src.api.repositories import supplies_repository as repo
+
+
+class CatalogError(Exception):
+ __slots__ = ("status", "body")
+
+ def __init__(self, status: int, body: dict):
+ self.status = status
+ self.body = body
+ super().__init__(str(body))
+
+
+def effective_supply_image(supply_image, type_image):
+ if type_image:
+ return type_image
+ return supply_image if supply_image else None
+
+
+def type_has_template_image(type_row) -> bool:
+ return bool(type_row and type_row.get("image"))
+
+
+def json_load_maybe(val, default=None):
+ if default is None:
+ default = {}
+ if val is None:
+ return default
+ if isinstance(val, dict):
+ return val
+ if isinstance(val, str) and val.strip():
+ try:
+ parsed = json.loads(val)
+ return parsed if isinstance(parsed, dict) else default
+ except (TypeError, ValueError):
+ return default
+ return default
+
+
+def merge_custom_fields_from_type(type_row, user_cf):
+ defaults = json_load_maybe(type_row.get("default_custom_fields"), {})
+ if not isinstance(defaults, dict):
+ defaults = {}
+ locked = type_row.get("locked_custom_field_keys")
+ if isinstance(locked, str) and locked.strip():
+ try:
+ locked = json.loads(locked)
+ except (TypeError, ValueError):
+ locked = []
+ elif not isinstance(locked, list):
+ locked = []
+ merged = dict(defaults)
+ if isinstance(user_cf, dict):
+ merged.update(user_cf)
+ for k in locked:
+ if k in defaults:
+ merged[k] = defaults[k]
+ elif k not in merged:
+ merged[k] = ""
+ return merged
+
+
+def json_load_int_list(val) -> List[int]:
+ if val is None:
+ return []
+ if isinstance(val, str) and val.strip():
+ try:
+ val = json.loads(val)
+ except (TypeError, ValueError):
+ return []
+ if not isinstance(val, list):
+ return []
+ out: List[int] = []
+ for x in val:
+ try:
+ out.append(int(x))
+ except (TypeError, ValueError):
+ continue
+ return sorted(set(out))
+
+
+def locked_team_names_from_type_row(type_row) -> List[str]:
+ if not type_row:
+ return []
+ raw = type_row.get("locked_team_names")
+ if isinstance(raw, str) and raw.strip():
+ try:
+ raw = json.loads(raw)
+ except (TypeError, ValueError):
+ return []
+ if not isinstance(raw, list):
+ return []
+ return normalize_teams([str(x) for x in raw if x is not None])
+
+
+def locked_category_ids_from_type_row(type_row) -> List[int]:
+ if not type_row:
+ return []
+ raw = type_row.get("locked_category_ids")
+ if isinstance(raw, str) and raw.strip():
+ try:
+ raw = json.loads(raw)
+ except (TypeError, ValueError):
+ return []
+ return json_load_int_list(raw)
+
+
+def merge_categories_with_type_locks(type_row, user_category_ids) -> List[int]:
+ locked = locked_category_ids_from_type_row(type_row)
+ user: List[int] = []
+ for x in user_category_ids or []:
+ try:
+ user.append(int(x))
+ except (TypeError, ValueError):
+ continue
+ return sorted(set(locked + user))
+
+
+def merge_teams_with_type_locks(type_row, user_teams_raw) -> List[str]:
+ locked = locked_team_names_from_type_row(type_row)
+ user = normalize_teams(user_teams_raw or [])
+ seen = set()
+ out: List[str] = []
+ for t in locked + user:
+ tl = t.lower()
+ if tl not in seen:
+ seen.add(tl)
+ out.append(t)
+ return out
+
+
+def validate_custom_fields(custom_fields, allowed_names) -> Tuple[bool, Optional[str]]:
+ if not custom_fields:
+ return True, None
+ if not isinstance(custom_fields, dict):
+ return False, "custom_fields must be an object"
+ for key in custom_fields:
+ if key not in allowed_names:
+ return False, f"Unknown custom field: {key}"
+ return True, None
+
+
+def _locked_custom_field_key_list(type_row) -> List[str]:
+ if not type_row:
+ return []
+ locked = type_row.get("locked_custom_field_keys")
+ if isinstance(locked, str) and locked.strip():
+ try:
+ locked = json.loads(locked)
+ except (TypeError, ValueError):
+ return []
+ if not isinstance(locked, list):
+ return []
+ return [str(k) for k in locked if k is not None]
+
+
+def _merged_cf_value_is_present_for_type(field_type: str, val: Any) -> bool:
+ ft = (field_type or "text").strip().lower()
+ if ft == "number":
+ if val is None:
+ return False
+ if isinstance(val, bool):
+ return False
+ if isinstance(val, (int, float)):
+ return math.isfinite(float(val))
+ if isinstance(val, str):
+ s = val.strip()
+ if not s:
+ return False
+ try:
+ x = float(s)
+ except (TypeError, ValueError):
+ return False
+ return math.isfinite(x)
+ return False
+ if ft == "date":
+ if val is None:
+ return False
+ return bool(str(val).strip())
+ if val is None:
+ return False
+ if isinstance(val, (int, float)) and not isinstance(val, bool):
+ return True
+ return bool(str(val).strip())
+
+
+def validate_locked_custom_fields_filled(type_row, merged_cf, cur) -> Tuple[bool, Optional[str]]:
+ """Every key in the type's locked_custom_field_keys must have a non-empty merged value."""
+ locked = _locked_custom_field_key_list(type_row)
+ if not locked:
+ return True, None
+ rows = cf_repo.list_id_name_type_ordered(cur)
+ name_to_type = {str(r[1]): str(r[2]) for r in rows}
+ cf: Dict[str, Any] = merged_cf if isinstance(merged_cf, dict) else {}
+ for key in locked:
+ ft = name_to_type.get(key, "text")
+ if not _merged_cf_value_is_present_for_type(ft, cf.get(key)):
+ return False, f'Custom field "{key}" is required for this item type.'
+ return True, None
+
+
+def validate_name_desc_prefixes(type_row, name, description) -> Tuple[bool, Optional[str]]:
+ np = (type_row.get("item_name_prefix") or "").strip()
+ if np:
+ nm = (name or "").strip()
+ if not nm.startswith(np):
+ return False, "Name must begin with the type prefix."
+ ndp_raw = type_row.get("item_description_prefix")
+ ndp = (ndp_raw or "").strip() if ndp_raw else ""
+ if ndp:
+ desc_str = (description or "").strip() if description is not None else ""
+ if desc_str and not desc_str.startswith(ndp):
+ return False, "Description must begin with the type prefix."
+ return True, None
+
+
+def normalize_teams(teams_raw: Optional[List]) -> List[str]:
+ out = []
+ for team in teams_raw or []:
+ tl = team.lower()
+ if tl == "software":
+ out.append("Software")
+ elif tl == "electrical":
+ out.append("Electrical")
+ elif tl == "mechanical":
+ out.append("Mechanical")
+ else:
+ out.append(team.capitalize())
+ return out
+
+
+def parse_custom_fields_cell(cf) -> dict:
+ if isinstance(cf, str) and cf:
+ try:
+ cf = json.loads(cf)
+ except (TypeError, ValueError):
+ cf = {}
+ elif cf is None:
+ cf = {}
+ return cf if isinstance(cf, dict) else {}
+
+
+def location_db_rows_to_api(loc_rows: List[dict]) -> List[dict]:
+ locations = []
+ for loc_row in loc_rows:
+ if loc_row["location_name"] is None and loc_row["coord_x"] is not None:
+ locations.append(
+ {
+ "location": "Free Coordinate",
+ "shelf": loc_row["shelf"],
+ "qty": loc_row["amount"],
+ "coord_x": int(loc_row["coord_x"]),
+ "coord_y": int(loc_row["coord_y"]),
+ "supply_location_id": loc_row["id"],
+ }
+ )
+ else:
+ locations.append(
+ {
+ "location": loc_row["location_name"],
+ "shelf": loc_row["shelf"],
+ "qty": loc_row["amount"],
+ "supply_location_id": loc_row["id"],
+ }
+ )
+ return locations
+
+
+def _attach_last_modified_names(supply_dict: dict, member: Optional[dict]) -> None:
+ if member:
+ supply_dict["last_modified_by_name"] = f"{member['first_name']} {member['last_name']}"
+ supply_dict["last_modified_by_email"] = member["uf_email"]
+
+
+def list_supplies() -> List[dict]:
+ conn = get_db()
+ cur = conn.cursor(dictionary=True)
+ try:
+ rows = repo.list_supplies_aggregate_rows(cur)
+ if not rows:
+ return []
+ ids = [r["id"] for r in rows]
+ loc_map = repo.fetch_locations_by_supply_ids(cur, ids)
+ team_map = repo.fetch_teams_by_supply_ids(cur, ids)
+ cat_map = repo.fetch_categories_by_supply_ids(cur, ids)
+ uf_ids = list({str(r["last_modified_by"]) for r in rows if r.get("last_modified_by")})
+ mem_map = repo.fetch_members_by_uf_ids(cur, uf_ids)
+
+ supplies = []
+ for row in rows:
+ sid = row["id"]
+ cf = parse_custom_fields_cell(row.get("custom_fields"))
+ supply_dict = {
+ "id": sid,
+ "public_id": row["public_id"],
+ "name": row["name"],
+ "description": row["description"],
+ "image": effective_supply_image(row.get("image"), row.get("type_image")),
+ "type_has_template_image": bool(row.get("type_image")),
+ "custom_fields": cf,
+ "supply_type_id": row.get("supply_type_id"),
+ "type_name": row.get("type_name"),
+ "lastModified": db_datetime_to_utc_iso(row["last_modified"]),
+ "last_modified_by": row["last_modified_by"],
+ "totalQty": int(row["totalQty"]),
+ "locations": location_db_rows_to_api(loc_map.get(sid, [])),
+ "teams": team_map.get(sid, []),
+ "categories": cat_map.get(sid, []),
+ }
+ if row["last_order_date"]:
+ supply_dict["last_order_date"] = db_datetime_to_utc_iso(row["last_order_date"])
+ if row["last_modified_by"]:
+ m = mem_map.get(str(row["last_modified_by"]))
+ _attach_last_modified_names(supply_dict, m)
+ supplies.append(supply_dict)
+ return supplies
+ finally:
+ cur.close()
+ conn.close()
+
+
+def get_supply(supply_id: int) -> dict:
+ conn = get_db()
+ cur = conn.cursor(dictionary=True)
+ try:
+ row = repo.fetch_supply_detail_aggregate_row(cur, supply_id)
+ if not row:
+ raise CatalogError(404, {"error": "Supply not found"})
+ cf = parse_custom_fields_cell(row.get("custom_fields"))
+ loc_rows = repo.fetch_locations_ordered_for_supply(cur, supply_id)
+ teams = repo.fetch_teams_for_supply_ordered(cur, supply_id)
+ category_ids = repo.fetch_categories_for_supply_ordered(cur, supply_id)
+ supply_dict = {
+ "id": row["id"],
+ "public_id": row["public_id"],
+ "name": row["name"],
+ "description": row["description"],
+ "image": effective_supply_image(row.get("image"), row.get("type_image")),
+ "type_has_template_image": bool(row.get("type_image")),
+ "custom_fields": cf,
+ "supply_type_id": row.get("supply_type_id"),
+ "type_name": row.get("type_name"),
+ "lastModified": db_datetime_to_utc_iso(row["last_modified"]),
+ "last_modified_by": row["last_modified_by"],
+ "totalQty": int(row["totalQty"]),
+ "locations": location_db_rows_to_api(loc_rows),
+ "teams": teams,
+ "categories": category_ids,
+ }
+ if row["last_order_date"]:
+ supply_dict["last_order_date"] = db_datetime_to_utc_iso(row["last_order_date"])
+ if row["last_modified_by"]:
+ m = repo.fetch_member_by_uf_id(cur, row["last_modified_by"])
+ _attach_last_modified_names(supply_dict, m)
+ return supply_dict
+ finally:
+ cur.close()
+ conn.close()
+
+
+def create_supply(data: dict, current_user_id: str) -> dict:
+ if not data:
+ raise CatalogError(400, {"error": "Request body is required"})
+ if "name" not in data or not data["name"].strip():
+ raise CatalogError(400, {"error": "Name is required"})
+
+ conn = get_db()
+ cur = conn.cursor(dictionary=True)
+ try:
+ custom_fields = data.get("custom_fields")
+ supply_type_id_raw = data.get("supply_type_id")
+ type_row = None
+ tid_insert = None
+ if supply_type_id_raw is not None and supply_type_id_raw != "":
+ try:
+ tid_insert = int(supply_type_id_raw)
+ except (TypeError, ValueError):
+ raise CatalogError(400, {"error": "Invalid supply_type_id"})
+ type_row = repo.fetch_supply_type_row(cur, tid_insert)
+ if not type_row:
+ raise CatalogError(400, {"error": "Supply type not found"})
+ custom_fields = merge_custom_fields_from_type(type_row, custom_fields)
+
+ allowed = repo.fetch_allowed_custom_field_names(cur)
+ ok, err = validate_custom_fields(custom_fields, allowed)
+ if not ok:
+ raise CatalogError(400, {"error": err})
+ if type_row:
+ okl, errl = validate_locked_custom_fields_filled(type_row, custom_fields, cur)
+ if not okl:
+ raise CatalogError(400, {"error": errl})
+
+ name_final = data["name"].strip()
+ desc_final = data.get("description", "").strip() or None
+
+ if type_row and type_has_template_image(type_row):
+ if data.get("image"):
+ raise CatalogError(
+ 400,
+ {
+ "error": "Items linked to a type that has a template image cannot use a separate item image.",
+ },
+ )
+ image_final = None
+ else:
+ image_final = data.get("image") or None
+ if image_final and str(image_final).startswith("data:image"):
+ base64_part = image_final.split(",", 1)[1] if "," in image_final else ""
+ if len(base64_part) > 13_300_000:
+ raise CatalogError(400, {"error": "Image file size exceeds 10MB limit"})
+
+ if type_row:
+ okp, errp = validate_name_desc_prefixes(type_row, name_final, desc_final)
+ if not okp:
+ raise CatalogError(400, {"error": errp})
+
+ cf_json = json.dumps(custom_fields) if custom_fields else None
+ new_public_id = str(uuid.uuid4())
+ supply_id = repo.insert_supply(
+ cur,
+ new_public_id,
+ name_final,
+ desc_final,
+ image_final,
+ cf_json,
+ data.get("last_order_date") or None,
+ current_user_id,
+ tid_insert,
+ )
+
+ cats_final = merge_categories_with_type_locks(type_row, data.get("categories"))
+ teams_final = merge_teams_with_type_locks(type_row, data.get("teams"))
+
+ for team_name in teams_final:
+ repo.insert_supply_team_ignore(cur, supply_id, team_name)
+
+ for cat_id in cats_final:
+ repo.insert_supply_category_ignore(cur, supply_id, cat_id)
+
+ new_values = {
+ "name": name_final,
+ "description": desc_final,
+ "image": image_final,
+ "last_order_date": data.get("last_order_date") or None,
+ }
+ history_id = log_supply_history(conn, supply_id, "CREATE", {}, new_values, current_user_id)
+ log_team_changes(conn, history_id, [], teams_final)
+ log_category_changes(conn, history_id, [], cats_final)
+
+ conn.commit()
+
+ row = repo.fetch_supply_with_type_join(cur, supply_id)
+ cf = parse_custom_fields_cell(row.get("custom_fields"))
+ teams = repo.fetch_teams_for_supply_ordered(cur, supply_id)
+ category_ids = repo.fetch_categories_for_supply_ordered(cur, supply_id)
+ supply = Supply.from_dict(row).to_dict()
+ supply["public_id"] = row.get("public_id")
+ supply["image"] = effective_supply_image(row.get("image"), row.get("type_image"))
+ supply["type_has_template_image"] = bool(row.get("type_image"))
+ supply["custom_fields"] = cf
+ supply["supply_type_id"] = row.get("supply_type_id")
+ supply["type_name"] = row.get("type_name")
+ supply["totalQty"] = 0
+ supply["locations"] = []
+ supply["teams"] = teams
+ supply["categories"] = category_ids
+ return supply
+ except mysql.connector.IntegrityError:
+ conn.rollback()
+ raise
+ except CatalogError:
+ conn.rollback()
+ raise
+ finally:
+ cur.close()
+ conn.close()
+
+
+def update_supply(supply_id: int, data: dict, current_user_id: str) -> dict:
+ if not data:
+ raise CatalogError(400, {"error": "Request body is required"})
+
+ conn = get_db()
+ cur = conn.cursor(dictionary=True)
+ try:
+ supply_check = repo.fetch_supply_id_type(cur, supply_id)
+ if not supply_check:
+ raise CatalogError(
+ 404,
+ {
+ "error": "Supply not found",
+ "error_type": "SUPPLY_DELETED",
+ "supply_id": supply_id,
+ "message": "This item was deleted by another user. Please refresh the page to see the latest data.",
+ },
+ )
+
+ current_state = get_supply_current_state(conn, supply_id)
+ old_values = {
+ "name": current_state["name"],
+ "description": current_state["description"],
+ "image": current_state["image"],
+ "last_order_date": current_state["last_order_date"],
+ }
+ old_teams = current_state["teams"]
+ old_categories = current_state["categories"]
+
+ requested_type_present = "supply_type_id" in data
+ unlink_from_type = bool(data.get("unlink_from_type")) or (
+ requested_type_present and (data.get("supply_type_id") is None or data.get("supply_type_id") == "")
+ )
+ old_type_id = supply_check.get("supply_type_id")
+ effective_type_id = None if unlink_from_type else old_type_id
+ type_row_update = None
+ if requested_type_present and not unlink_from_type:
+ try:
+ effective_type_id = int(data.get("supply_type_id"))
+ except (TypeError, ValueError):
+ raise CatalogError(400, {"error": "Invalid supply_type_id"})
+ type_row_update = repo.fetch_supply_type_row(cur, effective_type_id)
+ if not type_row_update:
+ raise CatalogError(400, {"error": "Supply type not found"})
+ if effective_type_id:
+ if not type_row_update:
+ type_row_update = repo.fetch_supply_type_row(cur, effective_type_id)
+ if not type_row_update:
+ effective_type_id = None
+
+ has_type_template_image = type_has_template_image(type_row_update)
+
+ if "image" in data and data["image"] and has_type_template_image:
+ raise CatalogError(
+ 400,
+ {
+ "error": "Items linked to a type that has a template image cannot use a separate item image.",
+ },
+ )
+
+ if "image" in data and data["image"] and not has_type_template_image:
+ img = data["image"]
+ if str(img).startswith("data:image"):
+ base64_part = img.split(",", 1)[1] if "," in img else ""
+ if len(base64_part) > 13_300_000:
+ raise CatalogError(400, {"error": "Image file size exceeds 10MB limit"})
+
+ merged_cf_for_update = None
+ if "custom_fields" in data:
+ allowed = repo.fetch_allowed_custom_field_names(cur)
+ cf_work = data.get("custom_fields")
+ if effective_type_id and type_row_update:
+ cf_work = merge_custom_fields_from_type(type_row_update, cf_work)
+ ok, err = validate_custom_fields(cf_work, allowed)
+ if not ok:
+ raise CatalogError(400, {"error": err})
+ if type_row_update:
+ okl, errl = validate_locked_custom_fields_filled(type_row_update, cf_work, cur)
+ if not okl:
+ raise CatalogError(400, {"error": errl})
+ merged_cf_for_update = cf_work
+
+ if type_row_update and ("name" in data or "description" in data):
+ prop_name = (
+ data["name"].strip()
+ if ("name" in data and data.get("name") is not None and str(data.get("name", "")).strip())
+ else current_state["name"]
+ )
+ prop_desc = (
+ (data["description"].strip() or None)
+ if "description" in data
+ else current_state["description"]
+ )
+ okp, errp = validate_name_desc_prefixes(type_row_update, prop_name, prop_desc)
+ if not okp:
+ raise CatalogError(400, {"error": errp})
+
+ updates = []
+ values = []
+ if "name" in data:
+ updates.append("name = %s")
+ values.append(data["name"].strip())
+ if "description" in data:
+ updates.append("description = %s")
+ values.append(data["description"].strip() or None)
+ if has_type_template_image:
+ updates.append("image = %s")
+ values.append(None)
+ elif "image" in data:
+ updates.append("image = %s")
+ values.append(data["image"] or None)
+ if "last_order_date" in data:
+ updates.append("last_order_date = %s")
+ values.append(data["last_order_date"] or None)
+ if "custom_fields" in data:
+ updates.append("custom_fields = %s")
+ values.append(json.dumps(merged_cf_for_update) if merged_cf_for_update else None)
+
+ if unlink_from_type or requested_type_present:
+ updates.append("supply_type_id = %s")
+ values.append(None if unlink_from_type else effective_type_id)
+
+ updates.append("last_modified_by = %s")
+ values.append(current_user_id)
+ values.append(supply_id)
+
+ if updates:
+ repo.update_supply_columns(cur, updates, values)
+
+ type_for_team_cat_merge = None if unlink_from_type else type_row_update
+
+ if "teams" in data:
+ repo.delete_teams_for_supply(cur, supply_id)
+ merged_teams = merge_teams_with_type_locks(type_for_team_cat_merge, data.get("teams"))
+ for team_name in merged_teams:
+ repo.insert_supply_team(cur, supply_id, team_name)
+
+ if "categories" in data:
+ repo.delete_categories_for_supply(cur, supply_id)
+ merged_cats = merge_categories_with_type_locks(type_for_team_cat_merge, data.get("categories"))
+ for cat_id in merged_cats:
+ repo.insert_supply_category(cur, supply_id, cat_id)
+
+ if has_type_template_image:
+ image_for_history = None
+ elif "image" in data:
+ image_for_history = data["image"] or None
+ else:
+ image_for_history = old_values["image"]
+
+ new_values = {
+ "name": data.get("name", old_values["name"]).strip()
+ if "name" in data
+ else old_values["name"],
+ "description": (data.get("description", "").strip() or None)
+ if "description" in data
+ else old_values["description"],
+ "image": image_for_history,
+ "last_order_date": data["last_order_date"]
+ if "last_order_date" in data
+ else old_values["last_order_date"],
+ }
+ history_id = log_supply_history(conn, supply_id, "UPDATE", old_values, new_values, current_user_id)
+
+ if "teams" in data:
+ new_teams = merge_teams_with_type_locks(type_for_team_cat_merge, data.get("teams"))
+ else:
+ new_teams = old_teams
+
+ if "categories" in data:
+ new_categories = merge_categories_with_type_locks(type_for_team_cat_merge, data.get("categories"))
+ else:
+ new_categories = old_categories
+ log_team_changes(conn, history_id, old_teams, new_teams)
+ log_category_changes(conn, history_id, old_categories, new_categories)
+
+ conn.commit()
+
+ row = repo.fetch_supply_with_type_join(cur, supply_id)
+ cf = parse_custom_fields_cell(row.get("custom_fields"))
+ supply = Supply.from_dict(row).to_dict()
+ supply["public_id"] = row.get("public_id")
+ supply["image"] = effective_supply_image(row.get("image"), row.get("type_image"))
+ supply["type_has_template_image"] = bool(row.get("type_image"))
+ supply["custom_fields"] = cf
+ supply["supply_type_id"] = row.get("supply_type_id")
+ supply["type_name"] = row.get("type_name")
+
+ total_qty = repo.sum_location_qty(cur, supply_id)
+ loc_rows = repo.fetch_locations_ordered_for_supply(cur, supply_id)
+ teams = repo.fetch_teams_for_supply_ordered(cur, supply_id)
+ category_ids = repo.fetch_categories_for_supply_ordered(cur, supply_id)
+
+ supply["totalQty"] = int(total_qty)
+ supply["locations"] = location_db_rows_to_api(loc_rows)
+ supply["teams"] = teams
+ supply["categories"] = category_ids
+
+ if row["last_modified_by"]:
+ m = repo.fetch_member_by_uf_id(cur, row["last_modified_by"])
+ _attach_last_modified_names(supply, m)
+ return supply
+ except mysql.connector.IntegrityError:
+ conn.rollback()
+ raise
+ except CatalogError:
+ conn.rollback()
+ raise
+ finally:
+ cur.close()
+ conn.close()
+
+
+def delete_supply(supply_id: int, current_user_id: str) -> None:
+ conn = get_db()
+ cur = conn.cursor(dictionary=True)
+ try:
+ supply_check = repo.fetch_supply_id_name(cur, supply_id)
+ if not supply_check:
+ raise CatalogError(
+ 404,
+ {
+ "error": "Supply not found",
+ "error_type": "SUPPLY_DELETED",
+ "supply_id": supply_id,
+ "message": "This item was already deleted by another user. Please refresh the page to see the latest data.",
+ },
+ )
+
+ current_state = get_supply_current_state(conn, supply_id)
+ old_values = {
+ "name": current_state["name"],
+ "description": current_state["description"],
+ "image": current_state["image"],
+ "last_order_date": current_state["last_order_date"],
+ }
+ old_teams = current_state["teams"]
+ old_categories = current_state["categories"]
+
+ history_id = log_supply_history(conn, supply_id, "DELETE", old_values, {}, current_user_id)
+ log_team_changes(conn, history_id, old_teams, [])
+ log_category_changes(conn, history_id, old_categories, [])
+ snapshot_supply_locations_before_delete(conn, supply_id, supply_check["name"], current_user_id)
+ repo.delete_supply_by_id(cur, supply_id)
+ conn.commit()
+ except CatalogError:
+ conn.rollback()
+ raise
+ finally:
+ cur.close()
+ conn.close()
+
+
+def get_supply_history(
+ supply_id_filter: Optional[int],
+ action_type_filter: Optional[str],
+ limit: int,
+ offset: int,
+) -> dict:
+ conn = get_db()
+ cur = conn.cursor(dictionary=True)
+ try:
+ total, rows = repo.fetch_supply_history_count_and_rows(
+ cur, supply_id_filter, action_type_filter, limit, offset
+ )
+ history_entries = []
+ for row in rows:
+ user = repo.fetch_member_by_uf_id(cur, row["changed_by"])
+ team_changes = [
+ {"team_name": t["team_name"], "action": t["action"]}
+ for t in repo.fetch_history_teams(cur, row["id"])
+ ]
+ category_changes = [
+ {"category_id": c["category_id"], "action": c["action"]}
+ for c in repo.fetch_history_categories(cur, row["id"])
+ ]
+
+ can_undo = False
+ undo_removes_log_only = False
+ if row["action_type"] == "DELETE":
+ can_undo = True
+ elif row["action_type"] == "CREATE":
+ can_undo = True
+ undo_removes_log_only = row["supply_id"] is None
+ elif row["action_type"] == "UPDATE":
+ can_undo = True
+ undo_removes_log_only = row["supply_id"] is None
+
+ history_entries.append(
+ {
+ "id": row["id"],
+ "supply_id": row["supply_id"],
+ "supply_name": row["supply_name"],
+ "action_type": row["action_type"],
+ "old_name": row["old_name"],
+ "new_name": row["new_name"],
+ "old_description": row["old_description"],
+ "new_description": row["new_description"],
+ "old_image": row["old_image"],
+ "new_image": row["new_image"],
+ "old_last_order_date": db_datetime_to_utc_iso(row["old_last_order_date"]),
+ "new_last_order_date": db_datetime_to_utc_iso(row["new_last_order_date"]),
+ "changed_by": row["changed_by"],
+ "changed_by_name": f"{user['first_name']} {user['last_name']}" if user else None,
+ "changed_by_email": user["uf_email"] if user else None,
+ "changed_at": db_datetime_to_utc_iso(row["changed_at"]),
+ "can_undo": can_undo,
+ "undo_removes_log_only": undo_removes_log_only,
+ "team_changes": team_changes,
+ "category_changes": category_changes,
+ }
+ )
+ return {"history": history_entries, "total": total}
+ finally:
+ cur.close()
+ conn.close()
+
+
+def undo_supply_history(history_id: int, current_user_id: str) -> dict:
+ conn = get_db()
+ cur = conn.cursor(dictionary=True)
+ restored_supply_id = None
+ try:
+ history = repo.fetch_history_row(cur, history_id)
+ if not history:
+ raise CatalogError(404, {"error": "History entry not found"})
+
+ if not session.get("is_leader", False):
+ if not is_latest_global_history_timestamp(cur, history["changed_at"]):
+ raise CatalogError(
+ 403,
+ {"error": "Only the most recent action can be undone.", "error_type": "UNDO_NOT_LATEST"},
+ )
+
+ original_supply_id = history["supply_id"]
+ if not original_supply_id and history["action_type"] == "DELETE":
+ if history["old_name"]:
+ name_matches = repo.select_supply_ids_by_name(cur, history["old_name"])
+ if len(name_matches) > 1:
+ raise CatalogError(
+ 400,
+ {
+ "error": "Cannot undo: multiple supplies share this name; disambiguation is not available for this history entry.",
+ "error_type": "UNDO_AMBIGUOUS_NAME",
+ },
+ )
+ if len(name_matches) == 1:
+ original_supply_id = name_matches[0]
+
+ team_changes = repo.fetch_history_teams(cur, history_id)
+ category_changes = repo.fetch_history_categories(cur, history_id)
+
+ if history["action_type"] == "CREATE":
+ if history["supply_id"]:
+ repo.delete_supply_by_id(cur, history["supply_id"])
+
+ elif history["action_type"] == "UPDATE":
+ if history["supply_id"]:
+ if not repo.supply_exists_by_id(cur, history["supply_id"]):
+ raise CatalogError(
+ 400,
+ {"error": "Cannot undo: supply no longer exists", "error_type": "UNDO_IMPOSSIBLE"},
+ )
+ upd = []
+ vals = []
+ if history["old_name"]:
+ upd.append("name = %s")
+ vals.append(history["old_name"])
+ if history["old_description"] is not None:
+ upd.append("description = %s")
+ vals.append(history["old_description"])
+ if history["old_image"] is not None:
+ upd.append("image = %s")
+ vals.append(history["old_image"])
+ if history["old_last_order_date"] is not None:
+ upd.append("last_order_date = %s")
+ vals.append(history["old_last_order_date"])
+ upd.append("last_modified_by = %s")
+ vals.append(current_user_id)
+ vals.append(history["supply_id"])
+ repo.update_supply_columns(cur, upd, vals)
+
+ repo.delete_teams_for_supply(cur, history["supply_id"])
+ for team_change in team_changes:
+ if team_change["action"] == "REMOVED":
+ repo.insert_supply_team_ignore(
+ cur, history["supply_id"], team_change["team_name"]
+ )
+
+ repo.delete_categories_for_supply(cur, history["supply_id"])
+ for cat_change in category_changes:
+ if cat_change["action"] == "REMOVED":
+ repo.insert_supply_category_ignore(
+ cur, history["supply_id"], cat_change["category_id"]
+ )
+
+ elif history["action_type"] == "DELETE":
+ restore_public_id = str(uuid.uuid4())
+ if original_supply_id:
+ repo.insert_supply_with_id(
+ cur,
+ original_supply_id,
+ restore_public_id,
+ history["old_name"],
+ history["old_description"],
+ history["old_image"],
+ history["old_last_order_date"],
+ current_user_id,
+ )
+ restored_supply_id = original_supply_id
+ else:
+ restored_supply_id = repo.insert_supply_without_id(
+ cur,
+ restore_public_id,
+ history["old_name"],
+ history["old_description"],
+ history["old_image"],
+ history["old_last_order_date"],
+ current_user_id,
+ )
+
+ for team_change in team_changes:
+ if team_change["action"] == "REMOVED":
+ repo.insert_supply_team_ignore(cur, restored_supply_id, team_change["team_name"])
+
+ for cat_change in category_changes:
+ if cat_change["action"] == "REMOVED":
+ repo.insert_supply_category_ignore(
+ cur, restored_supply_id, cat_change["category_id"]
+ )
+
+ snapshot_batch = repo.fetch_undo_snapshot_batch(
+ cur, history["old_name"], history["changed_at"]
+ )
+ if snapshot_batch and snapshot_batch["batch_id"]:
+ batch_id = snapshot_batch["batch_id"]
+ for entry in repo.fetch_undo_snapshot_entries(cur, batch_id):
+ repo.insert_supplies_location_row(
+ cur,
+ restored_supply_id,
+ entry["location_name"],
+ entry["shelf"],
+ entry["old_amount"],
+ current_user_id,
+ )
+ repo.delete_location_history_cascaded_batch(cur, batch_id)
+
+ repo.delete_history_by_id(cur, history_id)
+ conn.commit()
+
+ response_data = {
+ "success": True,
+ "message": f'Successfully undid {history["action_type"]} action',
+ }
+ if history["action_type"] == "DELETE" and restored_supply_id is not None:
+ response_data["restored_supply_id"] = restored_supply_id
+ return response_data
+ except CatalogError:
+ conn.rollback()
+ raise
+ except mysql.connector.IntegrityError as e:
+ conn.rollback()
+ raise CatalogError(400, {"error": str(e), "error_type": "UNDO_IMPOSSIBLE"})
+ except Exception:
+ conn.rollback()
+ raise
+ finally:
+ cur.close()
+ conn.close()
+
+
+def discard_supply_history(history_id: int) -> dict:
+ conn = get_db()
+ dcur = conn.cursor(dictionary=True)
+ try:
+ row = repo.fetch_history_id_and_changed_at(dcur, history_id)
+ if not row:
+ raise CatalogError(404, {"error": "History entry not found"})
+
+ if not session.get("is_leader", False):
+ if not is_latest_global_history_timestamp(dcur, row["changed_at"]):
+ raise CatalogError(
+ 403,
+ {"error": "Only the most recent action can be undone.", "error_type": "UNDO_NOT_LATEST"},
+ )
+ except CatalogError:
+ dcur.close()
+ conn.close()
+ raise
+
+ dcur.close()
+ xcur = conn.cursor()
+ try:
+ xcur.execute("DELETE FROM supplies_history WHERE id = %s", (history_id,))
+ conn.commit()
+ return {"success": True, "discarded_id": history_id}
+ except Exception:
+ conn.rollback()
+ raise
+ finally:
+ xcur.close()
+ conn.close()
diff --git a/src/scripts/helpers.py b/src/scripts/helpers.py
index d458147..b53f109 100644
--- a/src/scripts/helpers.py
+++ b/src/scripts/helpers.py
@@ -27,19 +27,28 @@
def get_sql_base_path(script_file):
"""
- Get the SQL base path, works in both Docker and local environments.
-
+ Root directory that contains nested `table_*.sql` DDL files.
+
+ Canonical layout: ``src/tables//table_.sql``.
+ Legacy ``src/sql/`` is still used if present and ``src/tables`` does not exist.
+
Args:
- script_file: The __file__ from the calling script
-
+ script_file: The ``__file__`` of the caller (under ``src/api`` or ``src/scripts``).
+
Returns:
- Path object pointing to the SQL directory
+ Path to ``src/tables`` (or legacy ``src/sql``).
"""
- if Path("/app/src/sql").exists():
- return Path("/app/src/sql") # Docker path
- else:
- # Local development path (relative to script location)
- return Path(script_file).parent.parent / "sql"
+ src_root = Path(script_file).resolve().parent.parent
+ candidates = [
+ Path("/app/src/tables"),
+ src_root / "tables",
+ Path("/app/src/sql"),
+ src_root / "sql",
+ ]
+ for p in candidates:
+ if p.is_dir():
+ return p
+ return src_root / "tables"
def discover_table_files(sql_base_path):
diff --git a/src/scripts/location_type_constants.py b/src/scripts/location_type_constants.py
new file mode 100644
index 0000000..daf4765
--- /dev/null
+++ b/src/scripts/location_type_constants.py
@@ -0,0 +1,15 @@
+"""Location type constants for seed scripts and API validation.
+
+Keep SYSTEM_SPECIAL_LOCATION_NAMES in sync with keys in
+milventory/src/constants/locationSvgByName.js (LOCATION_SVG_MARKUP_BY_NAME).
+"""
+
+# Seeded map locations that use custom SVGs — always type "special", not user-assignable.
+SYSTEM_SPECIAL_LOCATION_NAMES = frozenset(
+ ("To Be Delivered", "Lost Items", "Unsorted Items")
+)
+
+# Types leaders may set when creating or editing a location.
+LEADER_ASSIGNABLE_LOCATION_TYPES = frozenset(
+ ("drawer", "cabinet", "tall_cabinet", "table", "other")
+)
diff --git a/src/scripts/migrate_free_coord_one_per.py b/src/scripts/migrate_free_coord_one_per.py
new file mode 100644
index 0000000..8cf623c
--- /dev/null
+++ b/src/scripts/migrate_free_coord_one_per.py
@@ -0,0 +1,141 @@
+"""
+Migration: one unit per free coordinate; unique (supply_id + coords); CHECK amount = 1.
+
+Run after migrate_supplies_location_free_place (coord_x/coord_y exist).
+"""
+import os
+import sys
+from pathlib import Path
+
+sys.path.insert(0, str(Path(__file__).parent))
+
+import mysql.connector
+from helpers import parse_database_url
+
+
+def check_column_exists(cur, table_name, column_name):
+ cur.execute(
+ """
+ SELECT COUNT(*)
+ FROM information_schema.COLUMNS
+ WHERE TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = %s
+ AND COLUMN_NAME = %s
+ """,
+ (table_name, column_name),
+ )
+ return cur.fetchone()[0] > 0
+
+
+def check_constraint_exists(cur, name):
+ cur.execute(
+ """
+ SELECT COUNT(*)
+ FROM information_schema.TABLE_CONSTRAINTS
+ WHERE TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = 'supplies_location'
+ AND CONSTRAINT_NAME = %s
+ """,
+ (name,),
+ )
+ return cur.fetchone()[0] > 0
+
+
+def migrate_free_coord_one_per():
+ database_url = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb")
+ db_params = parse_database_url(database_url)
+
+ print("🔄 Migrating supplies_location: 1 qty per free coordinate + unique coords...")
+
+ conn = mysql.connector.connect(**db_params)
+ cur = conn.cursor()
+
+ if not check_column_exists(cur, "supplies_location", "coord_x"):
+ print(" ⚠ coord_x missing — run migrate_supplies_location_free_place first")
+ cur.close()
+ conn.close()
+ return
+
+ if check_column_exists(cur, "supplies_location", "free_coord_uid"):
+ print(" ✓ free_coord_uid already exists — skipping")
+ cur.close()
+ conn.close()
+ return
+
+ # Remove duplicate free-coordinate rows (keep lowest id)
+ cur.execute(
+ """
+ DELETE sl1 FROM supplies_location sl1
+ INNER JOIN supplies_location sl2
+ ON sl1.supply_id = sl2.supply_id
+ AND sl1.coord_x = sl2.coord_x
+ AND sl1.coord_y = sl2.coord_y
+ AND sl1.location_name IS NULL
+ AND sl2.location_name IS NULL
+ AND sl1.id > sl2.id
+ """
+ )
+ print(f" ✓ Removed duplicate free-coordinate rows ({cur.rowcount} deleted)")
+
+ cur.execute(
+ """
+ UPDATE supplies_location
+ SET amount = 1
+ WHERE location_name IS NULL
+ AND coord_x IS NOT NULL
+ AND coord_y IS NOT NULL
+ AND amount <> 1
+ """
+ )
+ if cur.rowcount:
+ print(f" ✓ Normalized free-coordinate amount to 1 ({cur.rowcount} rows)")
+
+ if check_constraint_exists(cur, "chk_supply_location_placement"):
+ cur.execute(
+ "ALTER TABLE supplies_location DROP CHECK chk_supply_location_placement"
+ )
+ print(" ✓ Dropped chk_supply_location_placement")
+
+ cur.execute(
+ """
+ ALTER TABLE supplies_location
+ ADD CONSTRAINT chk_supply_location_placement
+ CHECK (
+ (location_name IS NOT NULL AND coord_x IS NULL AND coord_y IS NULL)
+ OR
+ (location_name IS NULL AND coord_x IS NOT NULL AND coord_y IS NOT NULL AND amount = 1)
+ )
+ """
+ )
+ print(" ✓ Added CHECK (free rows must have amount = 1)")
+
+ cur.execute(
+ """
+ ALTER TABLE supplies_location
+ ADD COLUMN free_coord_uid VARCHAR(160)
+ GENERATED ALWAYS AS (
+ CASE
+ WHEN location_name IS NULL
+ AND coord_x IS NOT NULL
+ AND coord_y IS NOT NULL
+ THEN CONCAT('F', supply_id, ':', coord_x, ':', coord_y)
+ ELSE NULL
+ END
+ ) STORED,
+ ADD UNIQUE KEY uniq_free_coord_uid (free_coord_uid)
+ """
+ )
+ print(" ✓ Added free_coord_uid + UNIQUE uniq_free_coord_uid")
+
+ conn.commit()
+ cur.close()
+ conn.close()
+ print("✅ migrate_free_coord_one_per complete")
+
+
+if __name__ == "__main__":
+ try:
+ migrate_free_coord_one_per()
+ except mysql.connector.Error as e:
+ print(f"❌ migrate_free_coord_one_per: {e}")
+ raise
diff --git a/src/scripts/migrate_location_types_cleanup.py b/src/scripts/migrate_location_types_cleanup.py
new file mode 100644
index 0000000..847ab7c
--- /dev/null
+++ b/src/scripts/migrate_location_types_cleanup.py
@@ -0,0 +1,50 @@
+"""
+One-time cleanup: remove external type, fix unknown, set system SVG locations to special.
+"""
+import os
+import sys
+from pathlib import Path
+
+sys.path.insert(0, str(Path(__file__).parent))
+
+import mysql.connector
+from helpers import parse_database_url
+
+
+def migrate_location_types_cleanup():
+ try:
+ database_url = os.getenv(
+ "DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb"
+ )
+ db_params = parse_database_url(database_url)
+
+ conn = mysql.connector.connect(**db_params)
+ cur = conn.cursor()
+
+ cur.execute("SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = DATABASE() AND TABLE_NAME = 'locations'")
+ if cur.fetchone()[0] == 0:
+ cur.close()
+ conn.close()
+ return
+
+ print("🔄 Migrating location types (external → other, unknown → other, SVG names → special)...")
+
+ cur.execute("UPDATE locations SET type = %s WHERE type = %s", ("other", "external"))
+ ext_n = cur.rowcount
+ cur.execute("UPDATE locations SET type = %s WHERE type = %s", ("other", "unknown"))
+ unk_n = cur.rowcount
+
+ names = ("To Be Delivered", "Lost Items", "Unsorted Items")
+ placeholders = ",".join(["%s"] * len(names))
+ cur.execute(
+ f"UPDATE locations SET type = %s WHERE name IN ({placeholders})",
+ ("special",) + names,
+ )
+ spec_n = cur.rowcount
+
+ conn.commit()
+ cur.close()
+ conn.close()
+ print(f"✓ Location types cleanup: external→other rows={ext_n}, unknown→other rows={unk_n}, SVG names→special rows={spec_n}")
+ except Exception as e:
+ print(f"⚠ Warning: location types cleanup migration: {e}")
diff --git a/src/scripts/migrate_locations_schema.py b/src/scripts/migrate_locations_schema.py
new file mode 100644
index 0000000..b847717
--- /dev/null
+++ b/src/scripts/migrate_locations_schema.py
@@ -0,0 +1,116 @@
+"""
+Migration script to add x, y, width, height columns to locations table.
+Run this once to update existing database schema.
+"""
+import os
+import sys
+from pathlib import Path
+
+sys.path.insert(0, str(Path(__file__).parent))
+
+import mysql.connector
+from helpers import parse_database_url, get_sql_base_path, execute_sql_file
+
+
+def check_column_exists(cur, table_name, column_name):
+ """Check if a column exists in a table."""
+ cur.execute("""
+ SELECT COUNT(*)
+ FROM information_schema.COLUMNS
+ WHERE TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = %s
+ AND COLUMN_NAME = %s
+ """, (table_name, column_name))
+ return cur.fetchone()[0] > 0
+
+
+def migrate_locations_schema():
+ """Add coordinate columns to locations table if they don't exist."""
+ try:
+ database_url = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb")
+ db_params = parse_database_url(database_url)
+
+ print("🔄 Migrating locations table schema...")
+
+ conn = mysql.connector.connect(**db_params)
+ cur = conn.cursor()
+
+ # Check if table exists
+ cur.execute("""
+ SELECT COUNT(*)
+ FROM information_schema.TABLES
+ WHERE TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = 'locations'
+ """)
+ if cur.fetchone()[0] == 0:
+ print("⚠ Locations table does not exist. Creating it...")
+ sql_base_path = get_sql_base_path(__file__)
+ locations_file = sql_base_path / "location" / "table_locations.sql"
+ if locations_file.exists():
+ execute_sql_file(cur, locations_file, "locations table")
+ conn.commit()
+ print("✓ Locations table created with new schema")
+ cur.close()
+ conn.close()
+ return
+ else:
+ print(f"✗ Locations table SQL file not found at {locations_file}")
+ cur.close()
+ conn.close()
+ return
+
+ # Check which columns need to be added
+ columns_to_add = []
+ if not check_column_exists(cur, 'locations', 'x'):
+ columns_to_add.append(('x', 'INT NOT NULL DEFAULT 0', 'type'))
+ if not check_column_exists(cur, 'locations', 'y'):
+ columns_to_add.append(('y', 'INT NOT NULL DEFAULT 0', 'x'))
+ if not check_column_exists(cur, 'locations', 'width'):
+ columns_to_add.append(('width', 'INT NOT NULL DEFAULT 150', 'y'))
+ if not check_column_exists(cur, 'locations', 'height'):
+ columns_to_add.append(('height', 'INT NOT NULL DEFAULT 150', 'width'))
+
+ if not columns_to_add:
+ print("✓ All columns already exist, migration not needed")
+ cur.close()
+ conn.close()
+ return
+
+ # Add missing columns
+ print(f"📋 Adding {len(columns_to_add)} column(s)...")
+ for col_name, col_def, after_col in columns_to_add:
+ try:
+ # Check if column exists first (in case it was added between checks)
+ if check_column_exists(cur, 'locations', col_name):
+ print(f" ⊘ Column {col_name} already exists, skipping")
+ continue
+
+ query = f"ALTER TABLE locations ADD COLUMN {col_name} {col_def} AFTER {after_col}"
+ cur.execute(query)
+ print(f" ✓ Added column: {col_name}")
+ except mysql.connector.Error as e:
+ # If error is about duplicate column, that's okay
+ if 'Duplicate column name' in str(e) or '1060' in str(e):
+ print(f" ⊘ Column {col_name} already exists, skipping")
+ else:
+ print(f" ✗ Failed to add column {col_name}: {e}")
+
+ conn.commit()
+ print("✓ Migration complete")
+
+ cur.close()
+ conn.close()
+
+ except mysql.connector.Error as e:
+ print(f"✗ Database error: {e}")
+ sys.exit(1)
+ except Exception as e:
+ print(f"✗ Unexpected error: {e}")
+ import traceback
+ traceback.print_exc()
+ sys.exit(1)
+
+
+if __name__ == "__main__":
+ migrate_locations_schema()
+
diff --git a/src/scripts/migrate_supplies_custom_fields.py b/src/scripts/migrate_supplies_custom_fields.py
new file mode 100644
index 0000000..02a1084
--- /dev/null
+++ b/src/scripts/migrate_supplies_custom_fields.py
@@ -0,0 +1,65 @@
+"""
+Migration script to add custom_fields JSON column to supplies table.
+Run once to update existing database schema.
+"""
+import os
+import sys
+from pathlib import Path
+
+sys.path.insert(0, str(Path(__file__).parent))
+
+import mysql.connector
+from helpers import parse_database_url
+
+
+def check_column_exists(cur, table_name, column_name):
+ """Check if a column exists in a table."""
+ cur.execute("""
+ SELECT COUNT(*)
+ FROM information_schema.COLUMNS
+ WHERE TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = %s
+ AND COLUMN_NAME = %s
+ """, (table_name, column_name))
+ return cur.fetchone()[0] > 0
+
+
+def migrate_supplies_custom_fields():
+ """Add custom_fields JSON column to supplies if it doesn't exist."""
+ try:
+ database_url = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb")
+ db_params = parse_database_url(database_url)
+
+ print("🔄 Migrating supplies table (custom_fields)...")
+
+ conn = mysql.connector.connect(**db_params)
+ cur = conn.cursor()
+
+ if not check_column_exists(cur, 'supplies', 'custom_fields'):
+ cur.execute("ALTER TABLE supplies ADD COLUMN custom_fields JSON DEFAULT NULL AFTER image")
+ conn.commit()
+ print("✓ Added supplies.custom_fields column")
+ else:
+ print("✓ supplies.custom_fields already exists")
+
+ # Drop label from custom_field_definitions if present (use name only)
+ cur.execute("""
+ SELECT COUNT(*) FROM information_schema.TABLES
+ WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME = 'custom_field_definitions'
+ """)
+ if cur.fetchone()[0] > 0 and check_column_exists(cur, 'custom_field_definitions', 'label'):
+ cur.execute("ALTER TABLE custom_field_definitions DROP COLUMN label")
+ conn.commit()
+ print("✓ Dropped custom_field_definitions.label column")
+
+ cur.close()
+ conn.close()
+
+ except mysql.connector.Error as e:
+ print(f"⚠ Supplies custom_fields migration warning: {e}")
+ except Exception as e:
+ print(f"⚠ Supplies custom_fields migration warning: {e}")
+
+
+if __name__ == "__main__":
+ migrate_supplies_custom_fields()
diff --git a/src/scripts/migrate_supplies_location_free_place.py b/src/scripts/migrate_supplies_location_free_place.py
new file mode 100644
index 0000000..f350551
--- /dev/null
+++ b/src/scripts/migrate_supplies_location_free_place.py
@@ -0,0 +1,103 @@
+"""
+Migration: supplies_location free-coordinate rows (nullable location_name, coord_x/coord_y, CHECK).
+"""
+import os
+import sys
+from pathlib import Path
+
+sys.path.insert(0, str(Path(__file__).parent))
+
+import mysql.connector
+from helpers import parse_database_url
+
+
+def check_column_exists(cur, table_name, column_name):
+ cur.execute(
+ """
+ SELECT COUNT(*)
+ FROM information_schema.COLUMNS
+ WHERE TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = %s
+ AND COLUMN_NAME = %s
+ """,
+ (table_name, column_name),
+ )
+ return cur.fetchone()[0] > 0
+
+
+def get_location_fk_name(cur):
+ cur.execute(
+ """
+ SELECT CONSTRAINT_NAME
+ FROM information_schema.KEY_COLUMN_USAGE
+ WHERE TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = 'supplies_location'
+ AND REFERENCED_TABLE_NAME = 'locations'
+ LIMIT 1
+ """
+ )
+ row = cur.fetchone()
+ return row[0] if row else None
+
+
+def migrate_supplies_location_free_place():
+ try:
+ database_url = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb")
+ db_params = parse_database_url(database_url)
+
+ print("🔄 Migrating supplies_location for free place (coords)...")
+
+ conn = mysql.connector.connect(**db_params)
+ cur = conn.cursor()
+
+ if not check_column_exists(cur, "supplies_location", "coord_x"):
+ fk = get_location_fk_name(cur)
+ if fk:
+ cur.execute(f"ALTER TABLE supplies_location DROP FOREIGN KEY `{fk}`")
+ print(f" ✓ Dropped FK {fk}")
+
+ cur.execute(
+ """
+ ALTER TABLE supplies_location
+ MODIFY COLUMN location_name VARCHAR(100) NULL,
+ ADD COLUMN coord_x INT NULL AFTER location_name,
+ ADD COLUMN coord_y INT NULL AFTER coord_x
+ """
+ )
+ print(" ✓ Nullable location_name; added coord_x, coord_y")
+
+ cur.execute(
+ """
+ ALTER TABLE supplies_location
+ ADD CONSTRAINT fk_supply_location_location
+ FOREIGN KEY (location_name) REFERENCES locations(name)
+ ON UPDATE CASCADE ON DELETE CASCADE
+ """
+ )
+ print(" ✓ Re-added FK location_name -> locations(name)")
+
+ cur.execute(
+ """
+ ALTER TABLE supplies_location
+ ADD CONSTRAINT chk_supply_location_placement
+ CHECK (
+ (location_name IS NOT NULL AND coord_x IS NULL AND coord_y IS NULL)
+ OR
+ (location_name IS NULL AND coord_x IS NOT NULL AND coord_y IS NOT NULL)
+ )
+ """
+ )
+ print(" ✓ Added CHECK chk_supply_location_placement")
+ else:
+ print(" ✓ supplies_location.coord_x already exists")
+
+ conn.commit()
+ cur.close()
+ conn.close()
+ except mysql.connector.Error as e:
+ print(f"❌ migrate_supplies_location_free_place: {e}")
+ raise
+
+
+if __name__ == "__main__":
+ migrate_supplies_location_free_place()
diff --git a/src/scripts/migrate_supplies_location_history.py b/src/scripts/migrate_supplies_location_history.py
new file mode 100644
index 0000000..1500f91
--- /dev/null
+++ b/src/scripts/migrate_supplies_location_history.py
@@ -0,0 +1,81 @@
+"""
+Migration script to create the supplies_location_history table.
+"""
+import sys
+import os
+from pathlib import Path
+
+# Add project root to path for imports
+project_root = Path(__file__).parent.parent.parent
+sys.path.insert(0, str(project_root))
+
+import mysql.connector
+from src.scripts.helpers import parse_database_url, get_sql_base_path, execute_sql_file, table_exists
+
+def migrate_supplies_location_history():
+ """Create the supplies_location_history table if it doesn't exist."""
+ try:
+ # Use environment variables or defaults (same as app.py)
+ database_url = os.getenv("DATABASE_URL")
+ if not database_url:
+ # Build from individual env vars (same pattern as src/api/db.py)
+ db_host = os.getenv('DB_HOST', 'localhost')
+ db_port = int(os.getenv('DB_PORT', 3306))
+ db_user = os.getenv('DB_USER', 'mysqluser')
+ db_password = os.getenv('DB_PASSWORD', 'mysqlpassword')
+ db_name = os.getenv('DB_NAME', 'mydb')
+ database_url = f"mysql://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
+
+ db_params = parse_database_url(database_url)
+ # Use Pure Python connector to avoid auth plugin issues
+ db_params['use_pure'] = True
+ db_params['auth_plugin'] = 'mysql_native_password'
+
+ print("Migrating supplies_location_history table...")
+
+ conn = mysql.connector.connect(**db_params)
+ cur = conn.cursor()
+
+ # Check if table already exists
+ if table_exists(cur, 'supplies_location_history'):
+ print("[OK] supplies_location_history table already exists, skipping migration")
+ cur.close()
+ conn.close()
+ return True
+
+ # Get the SQL file path
+ sql_base_path = get_sql_base_path(__file__)
+ sql_file = sql_base_path / 'supplies_location' / 'table_supplies_location_history.sql'
+
+ if not sql_file.exists():
+ print(f"[ERROR] SQL file not found: {sql_file}")
+ cur.close()
+ conn.close()
+ return False
+
+ print(f"Creating supplies_location_history table from {sql_file.name}...")
+
+ # Execute the SQL file
+ if execute_sql_file(cur, sql_file, "supplies_location_history table"):
+ conn.commit()
+ print("[OK] Migration completed successfully")
+ cur.close()
+ conn.close()
+ return True
+ else:
+ conn.rollback()
+ print("[ERROR] Migration failed")
+ cur.close()
+ conn.close()
+ return False
+
+ except Exception as e:
+ print(f"[ERROR] Migration error: {e}")
+ import traceback
+ traceback.print_exc()
+ return False
+
+if __name__ == '__main__':
+ success = migrate_supplies_location_history()
+ sys.exit(0 if success else 1)
+
diff --git a/src/scripts/migrate_supplies_public_id.py b/src/scripts/migrate_supplies_public_id.py
new file mode 100644
index 0000000..5972dd9
--- /dev/null
+++ b/src/scripts/migrate_supplies_public_id.py
@@ -0,0 +1,143 @@
+"""
+Add supplies.public_id (UUID per row), unique index, and drop legacy UNIQUE on name.
+Idempotent: safe to run on every API startup.
+
+Requires MySQL 8+ for UUID() in UPDATE (per-row values).
+"""
+import os
+import sys
+from pathlib import Path
+
+sys.path.insert(0, str(Path(__file__).parent))
+
+import mysql.connector
+from helpers import parse_database_url
+
+
+def _db_params():
+ url = os.getenv("DATABASE_URL")
+ if url:
+ return parse_database_url(url)
+ return {
+ "host": os.getenv("DB_HOST", "localhost"),
+ "port": int(os.getenv("DB_PORT", 3306)),
+ "user": os.getenv("DB_USER", "mysqluser"),
+ "password": os.getenv("DB_PASSWORD", "mysqlpassword"),
+ "database": os.getenv("DB_NAME", "mydb"),
+ }
+
+
+def check_column_exists(cur, table_name, column_name):
+ cur.execute(
+ """
+ SELECT COUNT(*)
+ FROM information_schema.COLUMNS
+ WHERE TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = %s
+ AND COLUMN_NAME = %s
+ """,
+ (table_name, column_name),
+ )
+ return cur.fetchone()[0] > 0
+
+
+def index_named_exists(cur, table_name, index_name):
+ cur.execute(
+ """
+ SELECT COUNT(*)
+ FROM information_schema.STATISTICS
+ WHERE TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = %s
+ AND INDEX_NAME = %s
+ """,
+ (table_name, index_name),
+ )
+ return cur.fetchone()[0] > 0
+
+
+def unique_single_column_indexes(cur, table_name):
+ """Return (index_name,) for unique indexes that are exactly one column."""
+ cur.execute(
+ """
+ SELECT INDEX_NAME, GROUP_CONCAT(COLUMN_NAME ORDER BY SEQ_IN_INDEX) AS cols
+ FROM information_schema.STATISTICS
+ WHERE TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = %s
+ AND NON_UNIQUE = 0
+ GROUP BY INDEX_NAME
+ """,
+ (table_name,),
+ )
+ out = []
+ for row in cur.fetchall():
+ idx = row[0]
+ cols = row[1]
+ if idx == "PRIMARY":
+ continue
+ if cols and "," not in cols:
+ out.append((idx, cols))
+ return out
+
+
+def migrate_supplies_public_id():
+ print("Migrating supplies.public_id (duplicate names allowed)...")
+ params = _db_params()
+ conn = mysql.connector.connect(**params)
+ cur = conn.cursor()
+
+ try:
+ cur.execute(
+ """
+ SELECT COUNT(*) FROM information_schema.TABLES
+ WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME = 'supplies'
+ """
+ )
+ if cur.fetchone()[0] == 0:
+ print(" (no supplies table - skip)")
+ return
+
+ if not check_column_exists(cur, "supplies", "public_id"):
+ cur.execute("ALTER TABLE supplies ADD COLUMN public_id CHAR(36) NULL")
+ conn.commit()
+ print(" ✓ Added supplies.public_id")
+
+ cur.execute(
+ "UPDATE supplies SET public_id = UUID() WHERE public_id IS NULL OR public_id = ''"
+ )
+ n_backfill = cur.rowcount
+ conn.commit()
+ if n_backfill:
+ print(f" OK Backfilled public_id on {n_backfill} row(s)")
+
+ cur.execute("ALTER TABLE supplies MODIFY COLUMN public_id CHAR(36) NOT NULL")
+ conn.commit()
+ print(" OK supplies.public_id NOT NULL")
+
+ if not index_named_exists(cur, "supplies", "uq_supplies_public_id"):
+ cur.execute(
+ "ALTER TABLE supplies ADD UNIQUE INDEX uq_supplies_public_id (public_id)"
+ )
+ conn.commit()
+ print(" OK Added UNIQUE uq_supplies_public_id")
+
+ for idx_name, col in unique_single_column_indexes(cur, "supplies"):
+ if col == "name":
+ try:
+ cur.execute(f"ALTER TABLE supplies DROP INDEX `{idx_name}`")
+ conn.commit()
+ print(f" OK Dropped legacy unique index on name ({idx_name})")
+ except mysql.connector.Error as e:
+ print(f" WARN Could not DROP INDEX {idx_name}: {e}")
+
+ print("DONE migrate_supplies_public_id complete")
+ finally:
+ cur.close()
+ conn.close()
+
+
+if __name__ == "__main__":
+ try:
+ migrate_supplies_public_id()
+ except mysql.connector.Error as e:
+ print(f"ERROR migrate_supplies_public_id: {e}")
+ sys.exit(1)
diff --git a/src/scripts/migrate_supply_types.py b/src/scripts/migrate_supply_types.py
new file mode 100644
index 0000000..912c02a
--- /dev/null
+++ b/src/scripts/migrate_supply_types.py
@@ -0,0 +1,126 @@
+"""
+Migration: supply_types table and supplies.supply_type_id FK.
+"""
+import os
+import sys
+from pathlib import Path
+
+sys.path.insert(0, str(Path(__file__).parent))
+
+import mysql.connector
+from helpers import parse_database_url
+
+
+def check_table_exists(cur, table_name):
+ cur.execute("""
+ SELECT COUNT(*) FROM information_schema.TABLES
+ WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME = %s
+ """, (table_name,))
+ return cur.fetchone()[0] > 0
+
+
+def check_column_exists(cur, table_name, column_name):
+ cur.execute("""
+ SELECT COUNT(*)
+ FROM information_schema.COLUMNS
+ WHERE TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = %s
+ AND COLUMN_NAME = %s
+ """, (table_name, column_name))
+ return cur.fetchone()[0] > 0
+
+
+def migrate_supply_types():
+ try:
+ database_url = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb")
+ db_params = parse_database_url(database_url)
+
+ print("🔄 Migrating supply_types / supplies.supply_type_id...")
+
+ conn = mysql.connector.connect(**db_params)
+ cur = conn.cursor()
+
+ if not check_table_exists(cur, 'supply_types'):
+ cur.execute("""
+ CREATE TABLE supply_types (
+ id BIGINT AUTO_INCREMENT PRIMARY KEY,
+ name VARCHAR(200) NOT NULL,
+ template_description TEXT NULL,
+ item_name_prefix VARCHAR(200) NOT NULL DEFAULT '',
+ item_description_prefix TEXT NULL,
+ image LONGTEXT NULL,
+ default_custom_fields JSON NULL,
+ locked_custom_field_keys JSON NULL,
+ locked_category_ids JSON NULL,
+ locked_team_names JSON NULL,
+ is_unique TINYINT(1) NOT NULL DEFAULT 0,
+ prevent_user_edit TINYINT(1) NOT NULL DEFAULT 0,
+ created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+ UNIQUE KEY uq_supply_types_name (name)
+ )
+ """)
+ conn.commit()
+ print("✓ Created supply_types table")
+ else:
+ print("✓ supply_types already exists")
+
+ if not check_column_exists(cur, 'supplies', 'supply_type_id'):
+ cur.execute("""
+ ALTER TABLE supplies
+ ADD COLUMN supply_type_id BIGINT NULL AFTER custom_fields,
+ ADD CONSTRAINT fk_supplies_supply_type
+ FOREIGN KEY (supply_type_id) REFERENCES supply_types(id)
+ ON DELETE SET NULL ON UPDATE CASCADE
+ """)
+ conn.commit()
+ print("✓ Added supplies.supply_type_id")
+ else:
+ print("✓ supplies.supply_type_id already exists")
+
+ if check_table_exists(cur, 'supply_types') and not check_column_exists(
+ cur, 'supply_types', 'prevent_user_edit'
+ ):
+ cur.execute("""
+ ALTER TABLE supply_types
+ ADD COLUMN prevent_user_edit TINYINT(1) NOT NULL DEFAULT 0
+ AFTER is_unique
+ """)
+ conn.commit()
+ print("✓ Added supply_types.prevent_user_edit")
+ elif check_table_exists(cur, 'supply_types'):
+ print("✓ supply_types.prevent_user_edit already exists")
+
+ if check_table_exists(cur, 'supply_types') and not check_column_exists(
+ cur, 'supply_types', 'locked_category_ids'
+ ):
+ cur.execute(
+ "ALTER TABLE supply_types ADD COLUMN locked_category_ids JSON NULL AFTER locked_custom_field_keys"
+ )
+ conn.commit()
+ print("✓ Added supply_types.locked_category_ids")
+ elif check_table_exists(cur, 'supply_types'):
+ print("✓ supply_types.locked_category_ids already exists")
+
+ if check_table_exists(cur, 'supply_types') and not check_column_exists(
+ cur, 'supply_types', 'locked_team_names'
+ ):
+ cur.execute(
+ "ALTER TABLE supply_types ADD COLUMN locked_team_names JSON NULL AFTER locked_category_ids"
+ )
+ conn.commit()
+ print("✓ Added supply_types.locked_team_names")
+ elif check_table_exists(cur, 'supply_types'):
+ print("✓ supply_types.locked_team_names already exists")
+
+ cur.close()
+ conn.close()
+
+ except mysql.connector.Error as e:
+ print(f"⚠ supply_types migration warning: {e}")
+ except Exception as e:
+ print(f"⚠ supply_types migration warning: {e}")
+
+
+if __name__ == "__main__":
+ migrate_supply_types()
diff --git a/src/scripts/seed_data.py b/src/scripts/seed_data.py
index 06116e6..2fcfc5b 100644
--- a/src/scripts/seed_data.py
+++ b/src/scripts/seed_data.py
@@ -17,6 +17,7 @@
parse_database_url, get_sql_base_path, execute_sql_file, table_exists,
discover_table_files, topological_sort_tables
)
+from location_type_constants import SYSTEM_SPECIAL_LOCATION_NAMES
def get_seed_data_path(filename):
@@ -68,30 +69,36 @@ def load_locations_from_json():
def derive_location_type(title):
"""Derive location type from box title."""
+ if title and title.strip() in SYSTEM_SPECIAL_LOCATION_NAMES:
+ return 'special'
title_lower = title.lower()
if title_lower.startswith('drawer'):
return 'drawer'
- elif title_lower.startswith('cabinet') and not title_lower.startswith('tall cabinet'):
+ if title_lower.startswith('cabinet') and not title_lower.startswith('tall cabinet'):
return 'cabinet'
- elif title_lower.startswith('tall cabinet'):
+ if title_lower.startswith('tall cabinet'):
return 'tall_cabinet'
- elif title_lower.startswith('table'):
+ if title_lower.startswith('table'):
return 'table'
- elif 'workbench' in title_lower or title_lower == 'workbench':
- return 'workbench'
- else:
- return 'unknown'
+ if 'workbench' in title_lower or title_lower == 'workbench':
+ return 'other'
+ return 'other'
def ensure_all_tables_exist(conn, cur):
- """Ensure all tables exist by creating missing ones."""
+ """Ensure all tables exist by creating missing ones.
+
+ Returns:
+ tuple: (success_count, failed_tables_list) where failed_tables_list is a list of table names that failed to create
+ """
+ failed_tables = []
try:
sql_base_path = get_sql_base_path(__file__)
table_files = discover_table_files(sql_base_path)
if not table_files:
print("⚠ No table_*.sql files found")
- return
+ return 0, []
sorted_tables = topological_sort_tables(table_files)
missing_tables = []
@@ -102,18 +109,32 @@ def ensure_all_tables_exist(conn, cur):
if missing_tables:
print(f"📋 Creating {len(missing_tables)} missing table(s)...")
+ success_count = 0
for table_name, sql_file in missing_tables:
description = f"{table_name} table"
print(f" 🔨 Creating {table_name} from {sql_file.name}...")
if execute_sql_file(cur, sql_file, description):
print(f" ✓ {table_name} created")
+ success_count += 1
else:
print(f" ✗ Failed to create {table_name}")
+ failed_tables.append(table_name)
conn.commit()
+
+ if failed_tables:
+ print(f"\n❌ TABLE CREATION FAILED: {success_count}/{len(missing_tables)} tables created successfully")
+ print(f"❌ FAILED TABLES ({len(failed_tables)}): {', '.join(failed_tables)}")
+ else:
+ print(f"✓ All {success_count} missing table(s) created successfully")
+
+ return success_count, failed_tables
+ else:
+ return 0, []
except Exception as e:
print(f"⚠ Warning while ensuring tables exist: {e}")
import traceback
traceback.print_exc()
+ return 0, failed_tables
def get_db_connection():
@@ -168,11 +189,14 @@ def seed_categories():
cur = conn.cursor()
# Ensure all tables exist (including categories)
- ensure_all_tables_exist(conn, cur)
+ success_count, failed_tables = ensure_all_tables_exist(conn, cur)
# Verify categories table exists
if not table_exists(cur, 'categories'):
- print("✗ Categories table still does not exist after creation attempt")
+ if 'categories' in failed_tables:
+ print("✗ Categories table failed to be created (see errors above)")
+ else:
+ print("✗ Categories table still does not exist after creation attempt")
cur.close()
conn.close()
return
@@ -227,11 +251,14 @@ def seed_locations():
cur = conn.cursor()
# Ensure all tables exist (including locations)
- ensure_all_tables_exist(conn, cur)
+ success_count, failed_tables = ensure_all_tables_exist(conn, cur)
# Verify locations table exists
if not table_exists(cur, 'locations'):
- print("✗ Locations table still does not exist after creation attempt")
+ if 'locations' in failed_tables:
+ print("✗ Locations table failed to be created (see errors above)")
+ else:
+ print("✗ Locations table still does not exist after creation attempt")
cur.close()
conn.close()
return
@@ -261,23 +288,79 @@ def seed_locations():
json_names.add(name)
location_type = derive_location_type(name)
- shelf_count = 6 if location_type == 'tall_cabinet' else 0
+ # shelf_count is authoritative from JSON (0 means "no shelves").
+ try:
+ shelf_count = max(0, int(box.get('shelf_count', 0) or 0))
+ except (TypeError, ValueError):
+ shelf_count = 0
+
+ # Get coordinates from JSON box
+ x = box.get('x', 0)
+ y = box.get('y', 0)
+ width = box.get('width', 150)
+ height = box.get('height', 150)
if name in existing_names:
- # Update existing location
- cur.execute(
- "UPDATE locations SET type = %s, shelf_count = %s WHERE name = %s",
- (location_type, shelf_count, name)
- )
- if cur.rowcount > 0:
- update_count += 1
- else:
- # Insert new location
+ # Update existing location (update all fields including coordinates and protected status)
+ # Check if columns exist first
try:
cur.execute(
- "INSERT INTO locations (name, type, shelf_count) VALUES (%s, %s, %s)",
- (name, location_type, shelf_count)
+ "UPDATE locations SET type = %s, shelf_count = %s, x = %s, y = %s, width = %s, height = %s, protected = %s WHERE name = %s",
+ (location_type, shelf_count, x, y, width, height, True, name)
)
+ if cur.rowcount > 0:
+ update_count += 1
+ except mysql.connector.Error as e:
+ # If columns don't exist, try without them
+ if 'Unknown column' in str(e):
+ # Try without protected column
+ try:
+ cur.execute(
+ "UPDATE locations SET type = %s, shelf_count = %s, x = %s, y = %s, width = %s, height = %s WHERE name = %s",
+ (location_type, shelf_count, x, y, width, height, name)
+ )
+ if cur.rowcount > 0:
+ update_count += 1
+ except mysql.connector.Error as e2:
+ if 'Unknown column' in str(e2):
+ cur.execute(
+ "UPDATE locations SET type = %s, shelf_count = %s WHERE name = %s",
+ (location_type, shelf_count, name)
+ )
+ if cur.rowcount > 0:
+ update_count += 1
+ else:
+ raise
+ else:
+ raise
+ else:
+ # Insert new location with coordinates - set protected=True for locations from JSON
+ try:
+ # Try with coordinates and protected first
+ try:
+ cur.execute(
+ "INSERT INTO locations (name, type, shelf_count, x, y, width, height, protected) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)",
+ (name, location_type, shelf_count, x, y, width, height, True)
+ )
+ except mysql.connector.Error as e:
+ # If protected column doesn't exist, try without it
+ if 'Unknown column' in str(e) and 'protected' in str(e):
+ try:
+ cur.execute(
+ "INSERT INTO locations (name, type, shelf_count, x, y, width, height) VALUES (%s, %s, %s, %s, %s, %s, %s)",
+ (name, location_type, shelf_count, x, y, width, height)
+ )
+ except mysql.connector.Error as e2:
+ # If coordinate columns don't exist, insert without them
+ if 'Unknown column' in str(e2):
+ cur.execute(
+ "INSERT INTO locations (name, type, shelf_count) VALUES (%s, %s, %s)",
+ (name, location_type, shelf_count)
+ )
+ else:
+ raise
+ else:
+ raise
insert_count += 1
except mysql.connector.IntegrityError:
# Skip if already exists (race condition)
@@ -314,7 +397,7 @@ def seed_teams():
cur = conn.cursor()
# Ensure all tables exist (including teams)
- ensure_all_tables_exist(conn, cur)
+ success_count, failed_tables = ensure_all_tables_exist(conn, cur)
# Check if teams exist
cur.execute("SELECT COUNT(*) FROM teams")
@@ -360,7 +443,7 @@ def seed_test_user():
cur = conn.cursor(dictionary=True)
# Ensure all tables exist (including members)
- ensure_all_tables_exist(conn, cur)
+ success_count, failed_tables = ensure_all_tables_exist(conn, cur)
# Check if test user exists
cur.execute("SELECT uf_id FROM members WHERE uf_email = %s", ("test@ufl.edu",))
diff --git a/src/scripts/seed_locations.py b/src/scripts/seed_locations.py
index 7b2054e..c3349ae 100644
--- a/src/scripts/seed_locations.py
+++ b/src/scripts/seed_locations.py
@@ -14,14 +14,15 @@
import json
import bcrypt
from helpers import parse_database_url, get_sql_base_path, execute_sql_file, table_exists
+from location_type_constants import SYSTEM_SPECIAL_LOCATION_NAMES
def load_locations_from_json():
- """Load locations from milventory/public/inventory-locations.json."""
+ """Load locations from src/seed_data/inventory-locations.json."""
# Get project root (go up from src/scripts to project root)
script_dir = Path(__file__).parent
project_root = script_dir.parent.parent
- json_path = project_root / "milventory" / "public" / "inventory-locations.json"
+ json_path = project_root / "src" / "seed_data" / "inventory-locations.json"
if not json_path.exists():
print(f"⚠ Warning: {json_path} not found, using empty locations list")
@@ -41,23 +42,24 @@ def load_locations_from_json():
def derive_location_type(title):
"""Derive location type from box title."""
+ if title and title.strip() in SYSTEM_SPECIAL_LOCATION_NAMES:
+ return 'special'
title_lower = title.lower()
if title_lower.startswith('drawer'):
return 'drawer'
- elif title_lower.startswith('cabinet') and not title_lower.startswith('tall cabinet'):
+ if title_lower.startswith('cabinet') and not title_lower.startswith('tall cabinet'):
return 'cabinet'
- elif title_lower.startswith('tall cabinet'):
+ if title_lower.startswith('tall cabinet'):
return 'tall_cabinet'
- elif title_lower.startswith('table'):
+ if title_lower.startswith('table'):
return 'table'
- elif 'workbench' in title_lower or title_lower == 'workbench':
- return 'workbench'
- else:
- return 'unknown'
+ if 'workbench' in title_lower or title_lower == 'workbench':
+ return 'other'
+ return 'other'
def seed_locations():
- """Sync locations from milventory/public/inventory-locations.json with database."""
+ """Sync locations from src/seed_data/inventory-locations.json with database."""
try:
# Get database connection parameters
database_url = os.getenv("DATABASE_URL", "mysql://mysqluser:mysqlpassword@db:3306/mydb")
@@ -174,22 +176,34 @@ def seed_locations():
json_names.add(name)
location_type = derive_location_type(name)
- shelf_count = 6 if location_type == 'tall_cabinet' else 0
+ # shelf_count is authoritative from JSON. Legacy JSON entries that
+ # omit it fall back to 0 (no shelves) — add `"shelf_count": N` in
+ # inventory-locations.json to turn shelves on for a location.
+ try:
+ shelf_count = max(0, int(box.get('shelf_count', 0) or 0))
+ except (TypeError, ValueError):
+ shelf_count = 0
+
+ # Get coordinates from JSON box
+ x = box.get('x', 0)
+ y = box.get('y', 0)
+ width = box.get('width', 150)
+ height = box.get('height', 150)
if name in existing_names:
- # Update existing location (preserve if exists, but update type/shelf_count if changed)
+ # Update existing location (update all fields including coordinates and protected status)
cur.execute(
- "UPDATE locations SET type = %s, shelf_count = %s WHERE name = %s",
- (location_type, shelf_count, name)
+ "UPDATE locations SET type = %s, shelf_count = %s, x = %s, y = %s, width = %s, height = %s, protected = %s WHERE name = %s",
+ (location_type, shelf_count, x, y, width, height, True, name)
)
if cur.rowcount > 0:
update_count += 1
else:
- # Insert new location
+ # Insert new location with coordinates - set protected=True for locations from JSON
try:
cur.execute(
- "INSERT INTO locations (name, type, shelf_count) VALUES (%s, %s, %s)",
- (name, location_type, shelf_count)
+ "INSERT INTO locations (name, type, shelf_count, x, y, width, height, protected) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)",
+ (name, location_type, shelf_count, x, y, width, height, True)
)
insert_count += 1
except mysql.connector.IntegrityError:
diff --git a/src/scripts/test_tables.py b/src/scripts/test_tables.py
index b713d7a..9ac5223 100644
--- a/src/scripts/test_tables.py
+++ b/src/scripts/test_tables.py
@@ -113,7 +113,16 @@ def verify_specific_table_schemas(cur, database_name):
column_names = [col[0] for col in columns]
# Required columns for new schema
- required_columns = ['id', 'name', 'description', 'image', 'last_modified', 'last_modified_by', 'created_at']
+ required_columns = [
+ 'id',
+ 'public_id',
+ 'name',
+ 'description',
+ 'image',
+ 'last_modified',
+ 'last_modified_by',
+ 'created_at',
+ ]
# Optional but should exist
optional_columns = ['last_order_date']
# Old columns that should NOT exist
@@ -133,10 +142,10 @@ def verify_specific_table_schemas(cur, database_name):
image_col = next((col for col in columns if col[0] == 'image'), None)
if image_col and 'longtext' not in str(image_col[1]).lower():
print(f"⚠ supplies: image column should be LONGTEXT, found: {image_col[1]}")
- # Check that name is UNIQUE (col[3] is COLUMN_KEY, should be 'UNI' or 'PRI')
- name_col = next((col for col in columns if col[0] == 'name'), None)
- if name_col and 'uni' not in str(name_col[3]).lower() and 'pri' not in str(name_col[3]).lower():
- print(f"⚠ supplies: name column should be UNIQUE")
+ # public_id should be unique (stable external id)
+ pid_col = next((col for col in columns if col[0] == 'public_id'), None)
+ if pid_col and 'uni' not in str(pid_col[3]).lower() and 'pri' not in str(pid_col[3]).lower():
+ print(f"⚠ supplies: public_id column should be UNIQUE")
print(f"✓ supplies: Schema validated ({len(columns)} columns)")
else:
print(f"✗ supplies: Table not found")
diff --git a/src/seed_data/generate-locations.js b/src/seed_data/generate-locations.js
index 5013ebd..9061343 100644
--- a/src/seed_data/generate-locations.js
+++ b/src/seed_data/generate-locations.js
@@ -155,6 +155,7 @@ for (let i = 0; i < 4; i++) {
width: tallCabinetWidth,
height: tallCabinetHeight,
fill: 'var(--files)',
+ shelf_count: 6,
inventory: []
});
}
@@ -171,6 +172,7 @@ const tallCabinet104 = {
width: tallCabinetWidth,
height: tallCabinetHeight,
fill: 'var(--files)',
+ shelf_count: 6,
inventory: []
};
@@ -234,6 +236,37 @@ const bottomTables = [
}
];
+/** Outside the room rect, just below the lab (room bottom ~3920). */
+const statusLocations = [
+ {
+ title: 'To Be Delivered',
+ x: 400,
+ y: 4000,
+ width: 800,
+ height: 800,
+ fill: 'var(--table)',
+ inventory: []
+ },
+ {
+ title: 'Lost Items',
+ x: 1250,
+ y: 4000,
+ width: 800,
+ height: 800,
+ fill: 'var(--table)',
+ inventory: []
+ },
+ {
+ title: 'Unsorted Items',
+ x: 2100,
+ y: 4000,
+ width: 800,
+ height: 800,
+ fill: 'var(--table)',
+ inventory: []
+ }
+];
+
// Combine all boxes in the desired order
const allBoxes = [
...topDrawers,
@@ -244,7 +277,8 @@ const allBoxes = [
...tallCabinets,
tallCabinet104,
...tables,
- ...bottomTables
+ ...bottomTables,
+ ...statusLocations
];
// Create the full JSON structure
@@ -256,7 +290,7 @@ const output = {
// Format function to align columns
function formatInventoryData(data) {
// Define attribute order
- const attributeOrder = ['title', 'x', 'y', 'width', 'height', 'fill', 'isWorkbench', 'inventory'];
+ const attributeOrder = ['title', 'x', 'y', 'width', 'height', 'fill', 'shelf_count', 'isWorkbench', 'inventory'];
// Find the maximum width for each attribute's value across all boxes
const maxValueWidths = {};
diff --git a/src/seed_data/inventory-locations.json b/src/seed_data/inventory-locations.json
index 76c9996..cdcd7bd 100644
--- a/src/seed_data/inventory-locations.json
+++ b/src/seed_data/inventory-locations.json
@@ -16,21 +16,21 @@
}
},
"boxes": [
- { "title": "Drawer A" , "x": 720 , "y": 80 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer B" , "x": 875 , "y": 80 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer C" , "x": 1030, "y": 80 , "width": 300, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer D" , "x": 1335, "y": 80 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer E" , "x": 1490, "y": 80 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer F" , "x": 1645, "y": 80 , "width": 300, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer G" , "x": 1950, "y": 80 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer H" , "x": 2105, "y": 80 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer I" , "x": 2260, "y": 80 , "width": 300, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer J" , "x": 2565, "y": 80 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Drawer K" , "x": 2720, "y": 80 , "width": 150, "height": 150, "fill": "var(--drawer)" },
- { "title": "Cabinet 1" , "x": 720 , "y": 250 , "width": 305, "height": 155, "fill": "var(--table)" },
- { "title": "Cabinet 2" , "x": 1335, "y": 250 , "width": 305, "height": 155, "fill": "var(--table)" },
- { "title": "Cabinet 3" , "x": 1950, "y": 250 , "width": 305, "height": 155, "fill": "var(--table)" },
- { "title": "Cabinet 4" , "x": 2565, "y": 250 , "width": 305, "height": 155, "fill": "var(--table)" },
+ { "title": "Drawer A" , "x": 720 , "y": 120 , "width": 150, "height": 150, "fill": "var(--drawer)" },
+ { "title": "Drawer B" , "x": 875 , "y": 120 , "width": 150, "height": 150, "fill": "var(--drawer)" },
+ { "title": "Drawer C" , "x": 1030, "y": 120 , "width": 300, "height": 150, "fill": "var(--drawer)" },
+ { "title": "Drawer D" , "x": 1335, "y": 120 , "width": 150, "height": 150, "fill": "var(--drawer)" },
+ { "title": "Drawer E" , "x": 1490, "y": 120 , "width": 150, "height": 150, "fill": "var(--drawer)" },
+ { "title": "Drawer F" , "x": 1645, "y": 120 , "width": 300, "height": 150, "fill": "var(--drawer)" },
+ { "title": "Drawer G" , "x": 1950, "y": 120 , "width": 150, "height": 150, "fill": "var(--drawer)" },
+ { "title": "Drawer H" , "x": 2105, "y": 120 , "width": 150, "height": 150, "fill": "var(--drawer)" },
+ { "title": "Drawer I" , "x": 2260, "y": 120 , "width": 300, "height": 150, "fill": "var(--drawer)" },
+ { "title": "Drawer J" , "x": 2565, "y": 120 , "width": 150, "height": 150, "fill": "var(--drawer)" },
+ { "title": "Drawer K" , "x": 2720, "y": 120 , "width": 150, "height": 150, "fill": "var(--drawer)" },
+ { "title": "Cabinet 1" , "x": 720 , "y": 290 , "width": 305, "height": 155, "fill": "var(--table)" },
+ { "title": "Cabinet 2" , "x": 1335, "y": 290 , "width": 305, "height": 155, "fill": "var(--table)" },
+ { "title": "Cabinet 3" , "x": 1950, "y": 290 , "width": 305, "height": 155, "fill": "var(--table)" },
+ { "title": "Cabinet 4" , "x": 2565, "y": 290 , "width": 305, "height": 155, "fill": "var(--table)" },
{ "title": "Drawer L" , "x": 3500, "y": 840 , "width": 150, "height": 150, "fill": "var(--drawer)" },
{ "title": "Drawer M" , "x": 3500, "y": 995 , "width": 150, "height": 150, "fill": "var(--drawer)" },
{ "title": "Drawer N" , "x": 3500, "y": 1150, "width": 150, "height": 305, "fill": "var(--drawer)" },
@@ -54,12 +54,12 @@
{ "title": "Cabinet 10" , "x": 3325, "y": 2390, "width": 155, "height": 305, "fill": "var(--table)" },
{ "title": "Cabinet 11" , "x": 3325, "y": 2700, "width": 155, "height": 305, "fill": "var(--table)" },
{ "title": "Cabinet 12" , "x": 3325, "y": 3010, "width": 155, "height": 305, "fill": "var(--table)" },
- { "title": "Workbench" , "x": 140 , "y": 1680, "width": 350, "height": 520, "fill": "#e7ebf3" , "isWorkbench": true },
- { "title": "Tall Cabinet 103", "x": 140 , "y": 2260, "width": 240, "height": 300, "fill": "var(--files)" },
- { "title": "Tall Cabinet 102", "x": 140 , "y": 2565, "width": 240, "height": 300, "fill": "var(--files)" },
- { "title": "Tall Cabinet 101", "x": 140 , "y": 2870, "width": 240, "height": 300, "fill": "var(--files)" },
- { "title": "Tall Cabinet 100", "x": 140 , "y": 3175, "width": 240, "height": 300, "fill": "var(--files)" },
- { "title": "Tall Cabinet 104", "x": 3410, "y": 520 , "width": 240, "height": 300, "fill": "var(--files)" },
+ { "title": "Workbench" , "x": 140 , "y": 1680, "width": 350, "height": 520, "fill": "#e7ebf3" },
+ { "title": "Tall Cabinet 103", "x": 140 , "y": 2260, "width": 240, "height": 300, "fill": "var(--files)", "shelf_count": 6 },
+ { "title": "Tall Cabinet 102", "x": 140 , "y": 2565, "width": 240, "height": 300, "fill": "var(--files)", "shelf_count": 6 },
+ { "title": "Tall Cabinet 101", "x": 140 , "y": 2870, "width": 240, "height": 300, "fill": "var(--files)", "shelf_count": 6 },
+ { "title": "Tall Cabinet 100", "x": 140 , "y": 3175, "width": 240, "height": 300, "fill": "var(--files)", "shelf_count": 6 },
+ { "title": "Tall Cabinet 104", "x": 3410, "y": 520 , "width": 240, "height": 300, "fill": "var(--files)", "shelf_count": 6 },
{ "title": "Table A" , "x": 800 , "y": 1080, "width": 720, "height": 300, "fill": "var(--table)" },
{ "title": "Table B" , "x": 800 , "y": 1385, "width": 720, "height": 300, "fill": "var(--table)" },
{ "title": "Table C" , "x": 2100, "y": 1080, "width": 720, "height": 300, "fill": "var(--table)" },
@@ -69,6 +69,9 @@
{ "title": "Table G" , "x": 2100, "y": 2160, "width": 720, "height": 300, "fill": "var(--table)" },
{ "title": "Table H" , "x": 2100, "y": 2465, "width": 720, "height": 300, "fill": "var(--table)" },
{ "title": "Table I" , "x": 1400, "y": 3600, "width": 720, "height": 300, "fill": "var(--table)" },
- { "title": "Table J" , "x": 2170, "y": 3600, "width": 720, "height": 300, "fill": "var(--table)" }
+ { "title": "Table J" , "x": 2170, "y": 3600, "width": 720, "height": 300, "fill": "var(--table)" },
+ { "title": "To Be Delivered", "x": 400 , "y": 4000, "width": 800, "height": 800, "fill": "var(--table)" },
+ { "title": "Lost Items" , "x": 1250, "y": 4000, "width": 800, "height": 800, "fill": "var(--table)" },
+ { "title": "Unsorted Items" , "x": 2100, "y": 4000, "width": 800, "height": 800, "fill": "var(--table)" }
]
}
diff --git a/src/sql/location/table_locations.sql b/src/sql/location/table_locations.sql
deleted file mode 100644
index fe8c1d8..0000000
--- a/src/sql/location/table_locations.sql
+++ /dev/null
@@ -1,8 +0,0 @@
--- Location metadata (layout coordinates are in inventory-locations.json, not stored in DB)
-CREATE TABLE locations (
- name VARCHAR(100) PRIMARY KEY, -- e.g. "Drawer A", "Tall Cabinet 103"
- type VARCHAR(50) NOT NULL, -- "drawer", "cabinet", "table", "workbench", "tall_cabinet"
- shelf_count INT NOT NULL DEFAULT 0, -- 6 for Tall Cabinets, 0 for everything else
- created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
-);
-
diff --git a/src/sql/members/insert_member.sql b/src/sql/members/insert_member.sql
deleted file mode 100644
index ad7b841..0000000
--- a/src/sql/members/insert_member.sql
+++ /dev/null
@@ -1,3 +0,0 @@
--- Inserts into members table and returns the uf_id of the new member
-INSERT INTO members(first_name,last_name,uf_id,uf_email,phone_number,team,discord,github,grad_date,join_date,is_leader)
-VALUES (%(first)s,%(last)s,%(ufid)s,%(email)s,%(phone)s,%(team)s,%(discord)s,%(github)s,%(grad)s,%(join)s,%(leader)s)
\ No newline at end of file
diff --git a/src/sql/weekly_reports/insert_weekly_report.sql b/src/sql/weekly_reports/insert_weekly_report.sql
deleted file mode 100644
index 129f2d9..0000000
--- a/src/sql/weekly_reports/insert_weekly_report.sql
+++ /dev/null
@@ -1,4 +0,0 @@
--- Inserts into members table and returns the uf_id of the new member
-INSERT INTO weekly_reports(uf_id, report_date, progress_rating)
-VALUES (%(uf_id)s,%(report_date)s,%(progress_rating)s)
-RETURNING uf_id;
\ No newline at end of file
diff --git a/src/sql/applicants/table_applicants.sql b/src/tables/applicants/table_applicants.sql
similarity index 100%
rename from src/sql/applicants/table_applicants.sql
rename to src/tables/applicants/table_applicants.sql
diff --git a/src/sql/categories/table_categories.sql b/src/tables/categories/table_categories.sql
similarity index 99%
rename from src/sql/categories/table_categories.sql
rename to src/tables/categories/table_categories.sql
index 9470217..037be6a 100644
--- a/src/sql/categories/table_categories.sql
+++ b/src/tables/categories/table_categories.sql
@@ -5,3 +5,5 @@ CREATE TABLE IF NOT EXISTS categories (
INDEX idx_name (name)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
+
+
diff --git a/src/tables/custom_field_definitions/table_custom_field_definitions.sql b/src/tables/custom_field_definitions/table_custom_field_definitions.sql
new file mode 100644
index 0000000..1e8ca09
--- /dev/null
+++ b/src/tables/custom_field_definitions/table_custom_field_definitions.sql
@@ -0,0 +1,6 @@
+CREATE TABLE custom_field_definitions (
+ id INT AUTO_INCREMENT PRIMARY KEY,
+ name VARCHAR(200) NOT NULL UNIQUE COMMENT 'display name and key',
+ type VARCHAR(20) NOT NULL COMMENT 'text, number, or date',
+ CONSTRAINT chk_type CHECK (type IN ('text', 'number', 'date'))
+);
diff --git a/src/tables/location/table_locations.sql b/src/tables/location/table_locations.sql
new file mode 100644
index 0000000..1c5dcbe
--- /dev/null
+++ b/src/tables/location/table_locations.sql
@@ -0,0 +1,13 @@
+-- Location metadata and layout coordinates (all stored in DB)
+CREATE TABLE locations (
+ name VARCHAR(100) PRIMARY KEY, -- e.g. "Drawer A", "Tall Cabinet 103"
+ type VARCHAR(50) NOT NULL, -- leader: drawer|cabinet|tall_cabinet|table|other; system: special
+ x INT NOT NULL DEFAULT 0, -- X coordinate for layout
+ y INT NOT NULL DEFAULT 0, -- Y coordinate for layout
+ width INT NOT NULL DEFAULT 150, -- Width in pixels
+ height INT NOT NULL DEFAULT 150, -- Height in pixels
+ shelf_count INT NOT NULL DEFAULT 0, -- 6 for Tall Cabinets, 0 for everything else
+ protected BOOLEAN NOT NULL DEFAULT FALSE, -- TRUE for permanent locations from JSON
+ created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
+);
+
diff --git a/src/sql/members/table_members.sql b/src/tables/members/table_members.sql
similarity index 100%
rename from src/sql/members/table_members.sql
rename to src/tables/members/table_members.sql
diff --git a/src/sql/orders/table_orders.sql b/src/tables/orders/table_orders.sql
similarity index 100%
rename from src/sql/orders/table_orders.sql
rename to src/tables/orders/table_orders.sql
diff --git a/src/sql/supplies/table_supplies.sql b/src/tables/supplies/table_supplies.sql
similarity index 83%
rename from src/sql/supplies/table_supplies.sql
rename to src/tables/supplies/table_supplies.sql
index b207775..89e27ab 100644
--- a/src/sql/supplies/table_supplies.sql
+++ b/src/tables/supplies/table_supplies.sql
@@ -1,12 +1,15 @@
CREATE TABLE supplies (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
- name VARCHAR(200) NOT NULL UNIQUE,
+ public_id CHAR(36) NOT NULL,
+ name VARCHAR(200) NOT NULL,
description TEXT,
image LONGTEXT, -- Base64-encoded image data (nullable)
last_order_date DATE, -- Keep for backend tracking, not shown in frontend
last_modified DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
last_modified_by CHAR(8), -- UF ID of user who last modified (nullable, FK to members.uf_id)
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
+
+ UNIQUE KEY uq_supplies_public_id (public_id),
CONSTRAINT fk_supplies_modified_by FOREIGN KEY (last_modified_by) REFERENCES members(uf_id)
ON UPDATE CASCADE ON DELETE SET NULL
diff --git a/src/sql/supplies_categories/table_supplies_categories.sql b/src/tables/supplies_categories/table_supplies_categories.sql
similarity index 99%
rename from src/sql/supplies_categories/table_supplies_categories.sql
rename to src/tables/supplies_categories/table_supplies_categories.sql
index 4e7191a..275ac75 100644
--- a/src/sql/supplies_categories/table_supplies_categories.sql
+++ b/src/tables/supplies_categories/table_supplies_categories.sql
@@ -12,3 +12,5 @@ CREATE TABLE IF NOT EXISTS supplies_categories (
INDEX idx_category_id (category_id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
+
+
diff --git a/src/tables/supplies_history/table_supplies_history.sql b/src/tables/supplies_history/table_supplies_history.sql
new file mode 100644
index 0000000..a225a70
--- /dev/null
+++ b/src/tables/supplies_history/table_supplies_history.sql
@@ -0,0 +1,33 @@
+-- History of all supply (master item) operations (CREATE, UPDATE, DELETE)
+-- NOTE: This table does not track "undone" status. Instead, the API determines
+-- if an entry can be undone based on whether the supply still exists.
+-- See supplies_location_history table for undone tracking.
+CREATE TABLE supplies_history (
+ id BIGINT AUTO_INCREMENT PRIMARY KEY,
+ supply_id BIGINT NULL, -- Nullable to allow history entries for deleted supplies
+ action_type ENUM('CREATE', 'UPDATE', 'DELETE') NOT NULL,
+
+ -- Snapshot of data before/after change
+ old_name VARCHAR(200),
+ new_name VARCHAR(200),
+ old_description TEXT,
+ new_description TEXT,
+ old_image LONGTEXT,
+ new_image LONGTEXT,
+ old_last_order_date DATE,
+ new_last_order_date DATE,
+
+ -- Metadata
+ changed_by CHAR(8), -- UF ID (nullable to allow member deletion)
+ changed_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
+
+ -- Foreign keys
+ CONSTRAINT fk_history_supply FOREIGN KEY (supply_id) REFERENCES supplies(id)
+ ON UPDATE CASCADE ON DELETE SET NULL,
+ CONSTRAINT fk_history_changed_by FOREIGN KEY (changed_by) REFERENCES members(uf_id)
+ ON UPDATE CASCADE ON DELETE SET NULL,
+
+ INDEX idx_supply_id (supply_id),
+ INDEX idx_changed_at (changed_at DESC),
+ INDEX idx_action_type (action_type)
+);
diff --git a/src/tables/supplies_history/table_supplies_history_categories.sql b/src/tables/supplies_history/table_supplies_history_categories.sql
new file mode 100644
index 0000000..e5ce6ae
--- /dev/null
+++ b/src/tables/supplies_history/table_supplies_history_categories.sql
@@ -0,0 +1,9 @@
+CREATE TABLE supplies_history_categories (
+ id BIGINT AUTO_INCREMENT PRIMARY KEY,
+ history_id BIGINT NOT NULL,
+ category_id INT NOT NULL,
+ action ENUM('ADDED', 'REMOVED') NOT NULL,
+
+ CONSTRAINT fk_history_categories_history FOREIGN KEY (history_id) REFERENCES supplies_history(id)
+ ON UPDATE CASCADE ON DELETE CASCADE
+);
diff --git a/src/tables/supplies_history/table_supplies_history_teams.sql b/src/tables/supplies_history/table_supplies_history_teams.sql
new file mode 100644
index 0000000..7da585b
--- /dev/null
+++ b/src/tables/supplies_history/table_supplies_history_teams.sql
@@ -0,0 +1,9 @@
+CREATE TABLE supplies_history_teams (
+ id BIGINT AUTO_INCREMENT PRIMARY KEY,
+ history_id BIGINT NOT NULL,
+ team_name VARCHAR(50) NOT NULL,
+ action ENUM('ADDED', 'REMOVED') NOT NULL,
+
+ CONSTRAINT fk_history_teams_history FOREIGN KEY (history_id) REFERENCES supplies_history(id)
+ ON UPDATE CASCADE ON DELETE CASCADE
+);
diff --git a/src/sql/supplies_location/table_supplies_location.sql b/src/tables/supplies_location/table_supplies_location.sql
similarity index 100%
rename from src/sql/supplies_location/table_supplies_location.sql
rename to src/tables/supplies_location/table_supplies_location.sql
diff --git a/src/tables/supplies_location/table_supplies_location_history.sql b/src/tables/supplies_location/table_supplies_location_history.sql
new file mode 100644
index 0000000..33c1121
--- /dev/null
+++ b/src/tables/supplies_location/table_supplies_location_history.sql
@@ -0,0 +1,64 @@
+-- History of all location operations (ADD, REMOVE, UPDATE, MOVE, CASCADED_SUBTRACT)
+CREATE TABLE supplies_location_history (
+ id BIGINT AUTO_INCREMENT PRIMARY KEY,
+
+ -- Supply reference (nullable: SET NULL when supply is hard-deleted)
+ supply_id BIGINT NULL,
+ -- Denormalized supply name: REQUIRED for restoration after supply deletion
+ supply_name VARCHAR(200) NOT NULL,
+
+ -- Location info (denormalized: location rows can be renamed/deleted)
+ location_name VARCHAR(100) NOT NULL,
+ shelf INT DEFAULT NULL,
+
+ -- Action
+ action_type ENUM('ADD', 'REMOVE', 'UPDATE', 'MOVE', 'CASCADED_SUBTRACT') NOT NULL,
+ -- ADD = units placed into this location
+ -- REMOVE = units removed from this location
+ -- UPDATE = amount changed directly (e.g. edit qty field)
+ -- MOVE = units moved between locations (generates two rows: one REMOVE, one ADD)
+ -- CASCADED_SUBTRACT = items subtracted from location when master item is deleted (CASCADE)
+
+ -- Amounts (NULL means "not applicable" for that side)
+ old_amount INT DEFAULT NULL, -- amount before change (NULL for ADD)
+ new_amount INT DEFAULT NULL, -- amount after change (NULL for REMOVE/CASCADED_SUBTRACT)
+
+ -- For MOVE actions: where the units came from / went to
+ related_location VARCHAR(100) DEFAULT NULL, -- the other location in a MOVE
+ related_shelf INT DEFAULT NULL,
+
+ -- For grouping a single logical operation (e.g. bulk-add touches multiple rows)
+ batch_id CHAR(36) DEFAULT NULL, -- UUID generated per API call
+
+ -- Undo bookkeeping (DEPRECATED: Entries are now DELETED entirely when undone)
+ -- NOTE: The undone fields are kept for backward compatibility but are no longer used.
+ -- When an action is undone, the history entry is DELETED entirely from the database.
+ -- See undo_location_history and undo_batch_history endpoints which DELETE entries.
+ undone BOOLEAN NOT NULL DEFAULT FALSE,
+ undone_at DATETIME DEFAULT NULL,
+ undone_by CHAR(8) DEFAULT NULL,
+
+ -- Audit
+ changed_by CHAR(8) DEFAULT NULL,
+ changed_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
+
+ -- Indexes
+ INDEX idx_supply_id (supply_id),
+ INDEX idx_supply_name (supply_name),
+ INDEX idx_location (location_name),
+ INDEX idx_changed_at (changed_at DESC),
+ INDEX idx_batch (batch_id),
+ INDEX idx_undone (undone, changed_at DESC),
+
+ -- FKs
+ CONSTRAINT fk_slh_supply
+ FOREIGN KEY (supply_id) REFERENCES supplies(id)
+ ON UPDATE CASCADE ON DELETE SET NULL,
+ CONSTRAINT fk_slh_changed_by
+ FOREIGN KEY (changed_by) REFERENCES members(uf_id)
+ ON UPDATE CASCADE ON DELETE SET NULL,
+ CONSTRAINT fk_slh_undone_by
+ FOREIGN KEY (undone_by) REFERENCES members(uf_id)
+ ON UPDATE CASCADE ON DELETE SET NULL
+);
+
diff --git a/src/sql/supplies_teams/table_supplies_teams.sql b/src/tables/supplies_teams/table_supplies_teams.sql
similarity index 100%
rename from src/sql/supplies_teams/table_supplies_teams.sql
rename to src/tables/supplies_teams/table_supplies_teams.sql
diff --git a/src/tables/supply_types/table_supply_types.sql b/src/tables/supply_types/table_supply_types.sql
new file mode 100644
index 0000000..f71afe7
--- /dev/null
+++ b/src/tables/supply_types/table_supply_types.sql
@@ -0,0 +1,18 @@
+-- Template types for master supplies (item types)
+CREATE TABLE supply_types (
+ id BIGINT AUTO_INCREMENT PRIMARY KEY,
+ name VARCHAR(200) NOT NULL,
+ template_description TEXT NULL,
+ item_name_prefix VARCHAR(200) NOT NULL DEFAULT '',
+ item_description_prefix TEXT NULL,
+ image LONGTEXT NULL,
+ default_custom_fields JSON NULL,
+ locked_custom_field_keys JSON NULL,
+ locked_category_ids JSON NULL,
+ locked_team_names JSON NULL,
+ is_unique TINYINT(1) NOT NULL DEFAULT 0,
+ prevent_user_edit TINYINT(1) NOT NULL DEFAULT 0,
+ created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+ UNIQUE KEY uq_supply_types_name (name)
+);
diff --git a/src/sql/teams/table_teams.sql b/src/tables/teams/table_teams.sql
similarity index 100%
rename from src/sql/teams/table_teams.sql
rename to src/tables/teams/table_teams.sql
diff --git a/src/sql/weekly_reports/table_weekly_reports.sql b/src/tables/weekly_reports/table_weekly_reports.sql
similarity index 100%
rename from src/sql/weekly_reports/table_weekly_reports.sql
rename to src/tables/weekly_reports/table_weekly_reports.sql