From 7700696f8b184de44ce808d88585b75af8a2a34d Mon Sep 17 00:00:00 2001 From: Eric Moss <66630775+emoss08@users.noreply.github.com> Date: Sat, 1 Feb 2025 18:43:12 -0500 Subject: [PATCH] =?UTF-8?q?=E2=AD=95(wip):=20shipment=20api=20and=20page?= =?UTF-8?q?=20(#303)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ✅(add): createdAt column helper * 🟡(change): column visibility * 🪛(fix): trailer form * ✅(add): new validation context function * 🟡(change): add support for user defined time format * 🍪(chore): bump * 🍪(chore): fix readme * 🟡(change): fleet details in trailer api * Update fixtures.yml * 🟡(change): include fleet details in tractor api * 🟡(change): add query params to api logging * bump * ✅(add): shipment validation * 🪛(fix): tests * ✅(add): new test for shipment * 🍪(chore): bump * 🪛(fix): shipment migration fields * ✅(add): shipment api * ✅(add): shipment billing validation * 🪛(fix): migraitons * 🟡(change): shipment api query params * 🟡(change): shipment repo * 🍪(chore): bump fixtures * ⭕(wip): shipment page * 🪛(fix): broken test * 🟡(change): add support for column id * ✅(add): data-table header with tooltip * 🟡(change): date utils to default to military time * 🪛(fix): shipment status enums * Update 20241211015858_shipment_move.tx.up.sql * ✅(add): include customer details in shipment api * Update fixtures.yml * 🟡(wip): pc miler routing * ⭕(wip): shipment page * 🍪(chore): run go imports * ✅(add): location single search * Update page.tsx * ⭕(wip): shipment page * ⭕(wip): shipment map view * 🟡(change): loading skeleton * bump * 🟡(change): add discord badge --- README.md | 5 +- go.mod | 14 + go.sum | 32 + internal/api/handlers/routing/handler.go | 61 ++ internal/api/handlers/shipment/handler.go | 187 ++++++ internal/api/handlers/tractor/handler.go | 2 + internal/api/handlers/trailer/handler.go | 2 + internal/api/middleware/logging.go | 2 + internal/api/routes/router.go | 10 + internal/bootstrap/app.go | 2 + internal/bootstrap/modules/api/handlers.go | 4 + internal/bootstrap/modules/external/module.go | 10 + internal/bootstrap/modules/services/module.go | 4 + .../bootstrap/modules/validators/module.go | 4 + internal/core/domain/README.md | 10 +- internal/core/domain/google/enums.go | 27 + internal/core/domain/location/location.go | 2 +- .../pcmilerconfiguration/pcmilerconfig.go | 49 ++ internal/core/domain/shipment/enums.go | 26 +- internal/core/domain/shipment/shipment.go | 164 ++++- internal/core/domain/shipment/shipmentmove.go | 72 ++- internal/core/domain/shipment/stop.go | 102 ++- internal/core/domain/trailer/trailer.go | 2 +- internal/core/domain/types.go | 7 + internal/core/domain/user/enums.go | 7 +- internal/core/domain/worker/workerpto.go | 2 +- .../repositories/pcmilerconfiguration.go | 17 + internal/core/ports/repositories/shipment.go | 33 + internal/core/ports/repositories/tractor.go | 2 + internal/core/ports/repositories/trailer.go | 2 + internal/core/ports/repositories/usstate.go | 1 + internal/core/services/routing/service.go | 67 ++ internal/core/services/shipment/events.go | 1 - internal/core/services/shipment/service.go | 286 +++++++++ internal/core/services/tracking/realtime.go | 1 - internal/core/services/tracking/service.go | 1 - .../20241211015840_shipment.tx.up.sql | 25 +- .../20241211015858_shipment_move.tx.up.sql | 7 +- .../migrations/20241211015927_stop.tx.up.sql | 23 +- .../20241211020004_worker.tx.up.sql | 3 +- .../20250116025406_service_type.tx.up.sql | 2 +- .../20250122212814_location.tx.down.sql | 4 +- .../20250122212814_location.tx.up.sql | 7 + .../20250125144620_tractor.tx.up.sql | 7 + .../20250126210710_trailer.tx.up.sql | 7 + .../20250201004605_pcmiler.tx.down.sql | 3 + .../20250201004605_pcmiler.tx.up.sql | 20 + .../postgres/repositories/hazmatexpiration.go | 4 - .../postgres/repositories/location_test.go | 2 +- .../postgres/repositories/locationindex.go | 14 + .../database/postgres/repositories/module.go | 2 + .../repositories/pcmilerconfiguration.go | 52 ++ .../postgres/repositories/shipment.go | 224 +++++++ .../database/postgres/repositories/tractor.go | 7 + .../postgres/repositories/tractor_test.go | 33 + .../database/postgres/repositories/trailer.go | 8 + .../postgres/repositories/trailer_test.go | 33 + .../database/postgres/repositories/usstate.go | 48 ++ internal/infrastructure/external/maps/bing.go | 1 - .../infrastructure/external/maps/google.go | 1 - internal/infrastructure/external/maps/here.go | 1 - .../infrastructure/external/maps/mapbox.go | 1 - .../infrastructure/external/maps/pcmiler.go | 1 - .../external/maps/pcmiler/client.go | 98 +++ .../external/maps/pcmiler/errors.go | 46 ++ .../external/maps/pcmiler/types.go | 168 +++++ internal/pkg/errors/errors.go | 101 ++- internal/pkg/registry/domain.go | 3 + .../pkg/validator/shipmentvalidator/move.go | 158 +++++ .../validator/shipmentvalidator/move_test.go | 280 ++++++++ .../validator/shipmentvalidator/shipment.go | 283 +++++++++ .../shipmentvalidator/shipment_test.go | 505 +++++++++++++++ .../pkg/validator/shipmentvalidator/stop.go | 63 ++ .../validator/shipmentvalidator/stop_test.go | 142 +++++ internal/pkg/validator/types.go | 9 + test/fixtures/fixtures.yml | 597 +++++++++++++++++- ui/package-lock.json | 92 ++- ui/package.json | 10 +- ui/src/app/auth/_components/auth-form.tsx | 2 +- .../_components/commodity-columns.tsx | 1 + .../_components/customer-columns.tsx | 2 + .../_components/customer-edit-modal.tsx | 4 +- .../equip-manufacturer-columns.tsx | 1 + .../equip-manufacturer-create-modal.tsx | 2 +- .../_components/equip-type-columns.tsx | 1 + .../_components/fleet-code-columns.tsx | 1 + .../hazardous-material-columns.tsx | 1 + .../_components/location-category-columns.tsx | 1 + .../_components/location-columns.tsx | 15 +- .../_components/service-type-columns.tsx | 1 + .../_components/shipment-type-columns.tsx | 1 + .../shipment/_components/shipment-columns.tsx | 158 +++++ .../shipment/_components/shipment-table.tsx | 25 + .../_components/sidebar/shipment-card.tsx | 132 ++++ .../sidebar/shipment-filter-options.tsx | 30 + .../_components/sidebar/shipment-sidebar.tsx | 188 ++++++ .../_components/sidebar/shipment-timeline.tsx | 72 +++ ui/src/app/shipment/page.tsx | 137 ++++ .../tractor/_components/tractor-columns.tsx | 20 + .../app/tractor/_components/tractor-form.tsx | 7 +- .../app/tractor/_components/tractor-table.tsx | 1 + .../trailers/_components/trailer-columns.tsx | 10 + .../app/trailers/_components/trailer-form.tsx | 3 + .../trailers/_components/trailer-table.tsx | 1 + .../_components/workers-table-columns.tsx | 1 + .../_components/data-table-column-header.tsx | 123 ++++ .../_components/data-table-column-helpers.tsx | 226 +++++-- .../_components/data-table-filter-dialog.tsx | 1 - .../_components/data-table-view-options.tsx | 15 +- ui/src/components/data-table/data-table.tsx | 11 +- .../components/fields/select-components.tsx | 2 +- ui/src/components/fields/select-field.tsx | 5 +- ui/src/components/providers.tsx | 4 +- ui/src/components/status-badge.tsx | 36 ++ ui/src/components/ui/form-edit-model.tsx | 5 +- ui/src/hooks/use-resize-observer.ts | 17 + ui/src/lib/date.ts | 206 ++++-- ui/src/lib/nav-links.ts | 11 +- ui/src/lib/queries.ts | 3 +- ui/src/lib/schemas/move-schema.ts | 35 + ui/src/lib/schemas/shipment-filter-schema.ts | 11 + ui/src/lib/schemas/shipment-schema.ts | 153 +++++ ui/src/lib/schemas/stop-schema.ts | 86 +++ ui/src/lib/shipment/utils.ts | 146 +++++ ui/src/lib/utils.ts | 94 ++- ui/src/lib/variants/badge.ts | 17 +- ui/src/routing/router.tsx | 11 + ui/src/types/fields.ts | 12 +- ui/src/types/move.ts | 13 + ui/src/types/shipment.ts | 39 ++ ui/src/types/stop.ts | 20 + ui/vite.config.ts | 3 + 132 files changed, 6166 insertions(+), 272 deletions(-) create mode 100644 internal/api/handlers/routing/handler.go create mode 100644 internal/api/handlers/shipment/handler.go create mode 100644 internal/bootstrap/modules/external/module.go create mode 100644 internal/core/domain/google/enums.go create mode 100644 internal/core/domain/pcmilerconfiguration/pcmilerconfig.go create mode 100644 internal/core/ports/repositories/pcmilerconfiguration.go create mode 100644 internal/core/ports/repositories/shipment.go delete mode 100644 internal/core/services/shipment/events.go delete mode 100644 internal/core/services/tracking/realtime.go delete mode 100644 internal/core/services/tracking/service.go create mode 100644 internal/infrastructure/database/postgres/migrations/20250201004605_pcmiler.tx.down.sql create mode 100644 internal/infrastructure/database/postgres/migrations/20250201004605_pcmiler.tx.up.sql create mode 100644 internal/infrastructure/database/postgres/repositories/locationindex.go create mode 100644 internal/infrastructure/database/postgres/repositories/pcmilerconfiguration.go create mode 100644 internal/infrastructure/database/postgres/repositories/shipment.go delete mode 100644 internal/infrastructure/external/maps/bing.go delete mode 100644 internal/infrastructure/external/maps/google.go delete mode 100644 internal/infrastructure/external/maps/here.go delete mode 100644 internal/infrastructure/external/maps/mapbox.go delete mode 100644 internal/infrastructure/external/maps/pcmiler.go create mode 100644 internal/infrastructure/external/maps/pcmiler/client.go create mode 100644 internal/infrastructure/external/maps/pcmiler/errors.go create mode 100644 internal/infrastructure/external/maps/pcmiler/types.go create mode 100644 internal/pkg/validator/shipmentvalidator/move.go create mode 100644 internal/pkg/validator/shipmentvalidator/move_test.go create mode 100644 internal/pkg/validator/shipmentvalidator/shipment.go create mode 100644 internal/pkg/validator/shipmentvalidator/shipment_test.go create mode 100644 internal/pkg/validator/shipmentvalidator/stop.go create mode 100644 internal/pkg/validator/shipmentvalidator/stop_test.go create mode 100644 ui/src/app/shipment/_components/shipment-columns.tsx create mode 100644 ui/src/app/shipment/_components/shipment-table.tsx create mode 100644 ui/src/app/shipment/_components/sidebar/shipment-card.tsx create mode 100644 ui/src/app/shipment/_components/sidebar/shipment-filter-options.tsx create mode 100644 ui/src/app/shipment/_components/sidebar/shipment-sidebar.tsx create mode 100644 ui/src/app/shipment/_components/sidebar/shipment-timeline.tsx create mode 100644 ui/src/app/shipment/page.tsx create mode 100644 ui/src/hooks/use-resize-observer.ts create mode 100644 ui/src/lib/schemas/move-schema.ts create mode 100644 ui/src/lib/schemas/shipment-filter-schema.ts create mode 100644 ui/src/lib/schemas/shipment-schema.ts create mode 100644 ui/src/lib/schemas/stop-schema.ts create mode 100644 ui/src/lib/shipment/utils.ts create mode 100644 ui/src/types/move.ts create mode 100644 ui/src/types/shipment.ts create mode 100644 ui/src/types/stop.ts diff --git a/README.md b/README.md index a4d22d3f2..bc5746798 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,7 @@ [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT) [![Go Report Card](https://goreportcard.com/badge/github.com/emoss08/trenova)](https://goreportcard.com/report/github.com/emoss08/trenova) + [![Discord](https://dcbadge.limes.pink/api/server/https://discord.gg/XDBqyvrryq?style=flat-square&theme=default-inverted)](https://discord.gg/XDBqyvrryq) > [!IMPORTANT] @@ -181,8 +182,8 @@ Comprehensive documentation is available: We believe in building a strong, supportive community around Trenova: -- [GitHub Issues](https://github.com/username/trenova/issues) - Bug reports and feature requests -- [GitHub Discussions](https://github.com/username/trenova/discussions) - Community discussions and support +- [GitHub Issues](https://github.com/emoss08/trenova/issues) - Bug reports and feature requests +- [GitHub Discussions](https://github.com/emoss08/trenova/discussions) - Community discussions and support - [Documentation](https://docs.trenova.io) - Comprehensive guides and references ## Commercial Support diff --git a/go.mod b/go.mod index 4f8ccad70..d19fb2fdc 100644 --- a/go.mod +++ b/go.mod @@ -8,6 +8,8 @@ require ( github.com/fatih/color v1.18.0 github.com/go-ozzo/ozzo-validation/v4 v4.3.0 github.com/gofiber/fiber/v2 v2.52.6 + github.com/google/go-querystring v1.1.0 + github.com/imroc/req/v3 v3.49.1 github.com/jackc/pgx/v5 v5.7.2 github.com/jaswdr/faker/v2 v2.3.3 github.com/lib/pq v1.10.9 @@ -47,6 +49,7 @@ require ( github.com/bytedance/sonic/loader v0.2.3 // indirect github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/cloudflare/circl v1.5.0 // indirect github.com/cloudwego/base64x v0.1.5 // indirect github.com/containerd/log v0.1.0 // indirect github.com/containerd/platforms v0.2.1 // indirect @@ -63,10 +66,14 @@ require ( github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-ole/go-ole v1.3.0 // indirect + github.com/go-task/slim-sprig/v3 v3.0.0 // indirect github.com/goccy/go-json v0.10.4 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang-jwt/jwt/v4 v4.5.1 // indirect + github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad // indirect github.com/google/uuid v1.6.0 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect @@ -90,6 +97,7 @@ require ( github.com/moby/sys/userns v0.1.0 // indirect github.com/moby/term v0.5.2 // indirect github.com/morikuni/aec v1.0.0 // indirect + github.com/onsi/ginkgo/v2 v2.22.0 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.1.0 // indirect github.com/pelletier/go-toml/v2 v2.2.3 // indirect @@ -98,6 +106,9 @@ require ( github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect github.com/puzpuzpuz/xsync/v3 v3.5.0 // indirect + github.com/quic-go/qpack v0.5.1 // indirect + github.com/quic-go/quic-go v0.48.2 // indirect + github.com/refraction-networking/utls v1.6.7 // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/rs/xid v1.6.0 // indirect github.com/sagikazarmark/locafero v0.7.0 // indirect @@ -127,13 +138,16 @@ require ( go.opentelemetry.io/otel/metric v1.34.0 // indirect go.opentelemetry.io/otel/trace v1.34.0 // indirect go.uber.org/dig v1.18.0 // indirect + go.uber.org/mock v0.5.0 // indirect go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.27.0 // indirect golang.org/x/arch v0.13.0 // indirect golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8 // indirect + golang.org/x/mod v0.22.0 // indirect golang.org/x/sync v0.10.0 // indirect golang.org/x/sys v0.29.0 // indirect golang.org/x/text v0.21.0 // indirect + golang.org/x/tools v0.29.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect mellium.im/sasl v0.3.2 // indirect ) diff --git a/go.sum b/go.sum index bc929d150..e85ce7e73 100644 --- a/go.sum +++ b/go.sum @@ -24,6 +24,8 @@ github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK3 github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cloudflare/circl v1.5.0 h1:hxIWksrX6XN5a1L2TI/h53AGPhNHoUBo+TD1ms9+pys= +github.com/cloudflare/circl v1.5.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/cloudwego/base64x v0.1.5 h1:XPciSp1xaq2VCSt6lF0phncD4koWyULpl5bUxbfCyP4= github.com/cloudwego/base64x v0.1.5/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= @@ -73,6 +75,8 @@ github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= github.com/go-ozzo/ozzo-validation/v4 v4.3.0 h1:byhDUpfEwjsVQb1vBunvIjh2BHQ9ead57VkAEY4V+Es= github.com/go-ozzo/ozzo-validation/v4 v4.3.0/go.mod h1:2NKgrcHl3z6cJs+3Oo940FPRiTzuqKbvfrL2RxCj6Ew= +github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= +github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= github.com/goccy/go-json v0.10.4 h1:JSwxQzIqKfmFX1swYPpUThQZp/Ka4wzJdK0LWVytLPM= github.com/goccy/go-json v0.10.4/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= @@ -82,14 +86,26 @@ github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt/v4 v4.5.1 h1:JdqV9zKUdtaa9gdPlywC3aeoEsR681PlKC+4F5gQgeo= github.com/golang-jwt/jwt/v4 v4.5.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= +github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= +github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad h1:a6HEuzUHeKH6hwfN/ZoQgRgVIWFJljSWa/zetS2WTvg= +github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms= github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/imroc/req/v3 v3.49.1 h1:Nvwo02riiPEzh74ozFHeEJrtjakFxnoWNR3YZYuQm9U= +github.com/imroc/req/v3 v3.49.1/go.mod h1:tsOk8K7zI6cU4xu/VWCZVtq9Djw9IWm4MslKzme5woU= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= @@ -161,6 +177,10 @@ github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= github.com/oklog/ulid/v2 v2.1.0 h1:+9lhoxAP56we25tyYETBBY1YLA2SaoLvUFgrP2miPJU= github.com/oklog/ulid/v2 v2.1.0/go.mod h1:rcEKHmBBKfef9DhnvX7y1HZBYxjXb0cP5ExxNsTT1QQ= +github.com/onsi/ginkgo/v2 v2.22.0 h1:Yed107/8DjTr0lKCNt7Dn8yQ6ybuDRQoMGrNFKzMfHg= +github.com/onsi/ginkgo/v2 v2.22.0/go.mod h1:7Du3c42kxCUegi0IImZ1wUQzMBVecgIHjR1C+NkhLQo= +github.com/onsi/gomega v1.34.2 h1:pNCwDkzrsv7MS9kpaQvVb1aVLahQXyJ/Tv5oAZMI3i8= +github.com/onsi/gomega v1.34.2/go.mod h1:v1xfxRgk0KIsG+QOdm7p8UosrOzPYRo60fd3B/1Dukc= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= @@ -179,8 +199,14 @@ github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 h1:o4JXh1EVt github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/puzpuzpuz/xsync/v3 v3.5.0 h1:i+cMcpEDY1BkNm7lPDkCtE4oElsYLn+EKF8kAu2vXT4= github.com/puzpuzpuz/xsync/v3 v3.5.0/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA= +github.com/quic-go/qpack v0.5.1 h1:giqksBPnT/HDtZ6VhtFKgoLOWmlyo9Ei6u9PqzIMbhI= +github.com/quic-go/qpack v0.5.1/go.mod h1:+PC4XFrEskIVkcLzpEkbLqq1uCoxPhQuvK5rH1ZgaEg= +github.com/quic-go/quic-go v0.48.2 h1:wsKXZPeGWpMpCGSWqOcqpW2wZYic/8T3aqiOID0/KWE= +github.com/quic-go/quic-go v0.48.2/go.mod h1:yBgs3rWBOADpga7F+jJsb6Ybg1LSYiQvwWlLX+/6HMs= github.com/redis/go-redis/v9 v9.7.0 h1:HhLSs+B6O021gwzl+locl0zEDnyNkxMtf/Z3NNBMa9E= github.com/redis/go-redis/v9 v9.7.0/go.mod h1:f6zhXITC7JUJIlPEiBOTXxJgPLdZcA93GewI7inzyWw= +github.com/refraction-networking/utls v1.6.7 h1:zVJ7sP1dJx/WtVuITug3qYUq034cDq9B2MR1K67ULZM= +github.com/refraction-networking/utls v1.6.7/go.mod h1:BC3O4vQzye5hqpmDTWUqi4P5DDhzJfkV1tdqtawQIH0= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= @@ -302,6 +328,8 @@ go.uber.org/fx v1.23.0 h1:lIr/gYWQGfTwGcSXWXu4vP5Ws6iqnNEIY+F/aFzCKTg= go.uber.org/fx v1.23.0/go.mod h1:o/D9n+2mLP6v1EG+qsdT1O8wKopYAsqZasju97SDFCU= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.uber.org/mock v0.5.0 h1:KAMbZvZPyBPWgD14IrIQ38QCyjwpvVVV6K/bHl1IwQU= +go.uber.org/mock v0.5.0/go.mod h1:ge71pBPLYDk7QIi1LupWxdAykm7KIEFchiOqd6z7qMM= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= @@ -317,6 +345,8 @@ golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8 h1:yqrTHse8TCMW1M1ZCP+VAR/l0 golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8/go.mod h1:tujkw807nyEEAamNbDrEGzRav+ilXA7PCRAd6xsmwiU= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -353,6 +383,8 @@ golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGm golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.29.0 h1:Xx0h3TtM9rzQpQuR4dKLrdglAmCEN5Oi+P74JdhdzXE= +golang.org/x/tools v0.29.0/go.mod h1:KMQVMRsVxU6nHCFXrBPhDB8XncLNLM0lIy/F14RP588= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/internal/api/handlers/routing/handler.go b/internal/api/handlers/routing/handler.go new file mode 100644 index 000000000..92169bbc1 --- /dev/null +++ b/internal/api/handlers/routing/handler.go @@ -0,0 +1,61 @@ +package routing + +import ( + "github.com/emoss08/trenova/internal/api/middleware" + "github.com/emoss08/trenova/internal/core/ports/repositories" + "github.com/emoss08/trenova/internal/core/services/routing" + "github.com/emoss08/trenova/internal/pkg/ctx" + "github.com/emoss08/trenova/internal/pkg/validator" + "github.com/gofiber/fiber/v2" + "go.uber.org/fx" +) + +type HandlerParams struct { + fx.In + + RoutingService *routing.Service + ErrorHandler *validator.ErrorHandler +} + +type Handler struct { + rs *routing.Service + eh *validator.ErrorHandler +} + +func NewHandler(p HandlerParams) *Handler { + return &Handler{ + rs: p.RoutingService, + eh: p.ErrorHandler, + } +} + +func (h Handler) RegisterRoutes(r fiber.Router, rl *middleware.RateLimiter) { + api := r.Group("/routing") + + api.Get("/single-search", rl.WithRateLimit( + []fiber.Handler{h.singleSearch}, + middleware.PerMinute(120), + )...) +} + +func (h Handler) singleSearch(c *fiber.Ctx) error { + reqCtx, err := ctx.WithRequestContext(c) + if err != nil { + return h.eh.HandleError(c, err) + } + + opts := routing.SingleSearchParams{ + Query: c.Query("query"), + ConfigOpts: repositories.GetPCMilerConfigurationOptions{ + OrgID: reqCtx.OrgID, + BuID: reqCtx.BuID, + }, + } + + resp, err := h.rs.SingleSearch(c.UserContext(), opts) + if err != nil { + return h.eh.HandleError(c, err) + } + + return c.JSON(resp) +} diff --git a/internal/api/handlers/shipment/handler.go b/internal/api/handlers/shipment/handler.go new file mode 100644 index 000000000..6a91c5067 --- /dev/null +++ b/internal/api/handlers/shipment/handler.go @@ -0,0 +1,187 @@ +package shipment + +import ( + "github.com/emoss08/trenova/internal/api/middleware" + shipmentdomain "github.com/emoss08/trenova/internal/core/domain/shipment" + "github.com/emoss08/trenova/internal/core/ports" + "github.com/emoss08/trenova/internal/core/ports/repositories" + "github.com/emoss08/trenova/internal/core/services/shipment" + "github.com/emoss08/trenova/internal/pkg/ctx" + "github.com/emoss08/trenova/internal/pkg/utils/paginationutils/limitoffsetpagination" + "github.com/emoss08/trenova/internal/pkg/validator" + "github.com/emoss08/trenova/pkg/types" + "github.com/emoss08/trenova/pkg/types/pulid" + "github.com/gofiber/fiber/v2" + "go.uber.org/fx" +) + +type Handler struct { + ss *shipment.Service + eh *validator.ErrorHandler +} + +type HandlerParams struct { + fx.In + + ShipmentService *shipment.Service + ErrorHandler *validator.ErrorHandler +} + +func NewHandler(p HandlerParams) *Handler { + return &Handler{ss: p.ShipmentService, eh: p.ErrorHandler} +} + +func (h Handler) RegisterRoutes(r fiber.Router, rl *middleware.RateLimiter) { + api := r.Group("/shipments") + + api.Get("/select-options", rl.WithRateLimit( + []fiber.Handler{h.selectOptions}, + middleware.PerMinute(120), // 120 reads per minute + )...) + + api.Get("/", rl.WithRateLimit( + []fiber.Handler{h.list}, + middleware.PerSecond(5), // 5 reads per second + )...) + + api.Post("/", rl.WithRateLimit( + []fiber.Handler{h.create}, + middleware.PerMinute(60), // 60 reads per minute + )...) + + api.Get("/:shipmentID", rl.WithRateLimit( + []fiber.Handler{h.get}, + middleware.PerMinute(60), // 60 reads per minute + )...) + + api.Put("/:shipmentID", rl.WithRateLimit( + []fiber.Handler{h.update}, + middleware.PerMinute(60), // 60 writes per minute + )...) +} + +func (h Handler) selectOptions(c *fiber.Ctx) error { + reqCtx, err := ctx.WithRequestContext(c) + if err != nil { + return h.eh.HandleError(c, err) + } + + opts := &repositories.ListShipmentOptions{ + Filter: &ports.LimitOffsetQueryOptions{ + TenantOpts: &ports.TenantOptions{ + OrgID: reqCtx.OrgID, + BuID: reqCtx.BuID, + UserID: reqCtx.UserID, + }, + Limit: 100, + Offset: 0, + }, + } + + options, err := h.ss.SelectOptions(c.UserContext(), opts) + if err != nil { + return h.eh.HandleError(c, err) + } + + return c.Status(fiber.StatusOK).JSON(ports.Response[[]*types.SelectOption]{ + Results: options, + Count: len(options), + Next: "", + Prev: "", + }) +} + +func (h Handler) list(c *fiber.Ctx) error { + reqCtx, err := ctx.WithRequestContext(c) + if err != nil { + return h.eh.HandleError(c, err) + } + + handler := func(fc *fiber.Ctx, filter *ports.LimitOffsetQueryOptions) (*ports.ListResult[*shipmentdomain.Shipment], error) { + return h.ss.List(fc.UserContext(), &repositories.ListShipmentOptions{ + ShipmentOptions: repositories.ShipmentOptions{ + ExpandShipmentDetails: c.QueryBool("expandShipmentDetails"), + }, + Filter: filter, + }) + } + + return limitoffsetpagination.HandlePaginatedRequest(c, h.eh, reqCtx, handler) +} + +func (h Handler) get(c *fiber.Ctx) error { + reqCtx, err := ctx.WithRequestContext(c) + if err != nil { + return h.eh.HandleError(c, err) + } + + shipmentID, err := pulid.MustParse(c.Params("shipmentID")) + if err != nil { + return h.eh.HandleError(c, err) + } + + shp, err := h.ss.Get(c.UserContext(), repositories.GetShipmentByIDOptions{ + ID: shipmentID, + BuID: reqCtx.BuID, + OrgID: reqCtx.OrgID, + UserID: reqCtx.UserID, + ShipmentOptions: repositories.ShipmentOptions{ + ExpandShipmentDetails: c.QueryBool("expandShipmentDetails"), + }, + }) + if err != nil { + return h.eh.HandleError(c, err) + } + + return c.Status(fiber.StatusOK).JSON(shp) +} + +func (h Handler) create(c *fiber.Ctx) error { + reqCtx, err := ctx.WithRequestContext(c) + if err != nil { + return h.eh.HandleError(c, err) + } + + shp := new(shipmentdomain.Shipment) + shp.OrganizationID = reqCtx.OrgID + shp.BusinessUnitID = reqCtx.BuID + + if err = c.BodyParser(shp); err != nil { + return h.eh.HandleError(c, err) + } + + entity, err := h.ss.Create(c.UserContext(), shp, reqCtx.UserID) + if err != nil { + return h.eh.HandleError(c, err) + } + + return c.Status(fiber.StatusOK).JSON(entity) +} + +func (h Handler) update(c *fiber.Ctx) error { + reqCtx, err := ctx.WithRequestContext(c) + if err != nil { + return h.eh.HandleError(c, err) + } + + shpID, err := pulid.MustParse(c.Params("shipmentID")) + if err != nil { + return h.eh.HandleError(c, err) + } + + shp := new(shipmentdomain.Shipment) + shp.ID = shpID + shp.OrganizationID = reqCtx.OrgID + shp.BusinessUnitID = reqCtx.BuID + + if err = c.BodyParser(shp); err != nil { + return h.eh.HandleError(c, err) + } + + entity, err := h.ss.Update(c.UserContext(), shp, reqCtx.UserID) + if err != nil { + return h.eh.HandleError(c, err) + } + + return c.Status(fiber.StatusOK).JSON(entity) +} diff --git a/internal/api/handlers/tractor/handler.go b/internal/api/handlers/tractor/handler.go index bbd24a0d0..fba23255d 100644 --- a/internal/api/handlers/tractor/handler.go +++ b/internal/api/handlers/tractor/handler.go @@ -102,6 +102,7 @@ func (h Handler) list(c *fiber.Ctx) error { Filter: filter, IncludeWorkerDetails: c.QueryBool("includeWorkerDetails"), IncludeEquipmentDetails: c.QueryBool("includeEquipmentDetails"), + IncludeFleetDetails: c.QueryBool("includeFleetDetails"), }) } @@ -126,6 +127,7 @@ func (h Handler) get(c *fiber.Ctx) error { UserID: reqCtx.UserID, IncludeWorkerDetails: c.QueryBool("includeWorkerDetails"), IncludeEquipmentDetails: c.QueryBool("includeEquipmentDetails"), + IncludeFleetDetails: c.QueryBool("includeFleetDetails"), }) if err != nil { return h.eh.HandleError(c, err) diff --git a/internal/api/handlers/trailer/handler.go b/internal/api/handlers/trailer/handler.go index 4ef9e8bd4..d1fb34d68 100644 --- a/internal/api/handlers/trailer/handler.go +++ b/internal/api/handlers/trailer/handler.go @@ -101,6 +101,7 @@ func (h Handler) list(c *fiber.Ctx) error { return h.ts.List(fc.UserContext(), &repositories.ListTrailerOptions{ Filter: filter, IncludeEquipmentDetails: c.QueryBool("includeEquipmentDetails"), + IncludeFleetDetails: c.QueryBool("includeFleetDetails"), }) } @@ -124,6 +125,7 @@ func (h Handler) get(c *fiber.Ctx) error { OrgID: reqCtx.OrgID, UserID: reqCtx.UserID, IncludeEquipmentDetails: c.QueryBool("includeEquipmentDetails"), + IncludeFleetDetails: c.QueryBool("includeFleetDetails"), }) if err != nil { return h.eh.HandleError(c, err) diff --git a/internal/api/middleware/logging.go b/internal/api/middleware/logging.go index 534f90d07..4e7efcfce 100644 --- a/internal/api/middleware/logging.go +++ b/internal/api/middleware/logging.go @@ -120,6 +120,7 @@ func NewLogger(l *logger.Logger, config ...LogConfig) fiber.Handler { start := time.Now() path := c.Path() method := c.Method() + queryParams := c.Queries() // Extract request body if configured var reqBody string @@ -139,6 +140,7 @@ func NewLogger(l *logger.Logger, config ...LogConfig) fiber.Handler { Str("requestId", requestID). Str("method", method). Str("path", path). + Interface("queryParams", queryParams). Int("status", c.Response().StatusCode()). Str("ip", c.IP()). Str("latency", formattedDuration). diff --git a/internal/api/routes/router.go b/internal/api/routes/router.go index 0745feb73..6d8f01c1c 100644 --- a/internal/api/routes/router.go +++ b/internal/api/routes/router.go @@ -15,9 +15,11 @@ import ( "github.com/emoss08/trenova/internal/api/handlers/locationcategory" organizationHandler "github.com/emoss08/trenova/internal/api/handlers/organization" "github.com/emoss08/trenova/internal/api/handlers/reporting" + "github.com/emoss08/trenova/internal/api/handlers/routing" "github.com/emoss08/trenova/internal/api/handlers/search" "github.com/emoss08/trenova/internal/api/handlers/servicetype" "github.com/emoss08/trenova/internal/api/handlers/session" + "github.com/emoss08/trenova/internal/api/handlers/shipment" "github.com/emoss08/trenova/internal/api/handlers/shipmenttype" "github.com/emoss08/trenova/internal/api/handlers/tableconfiguration" "github.com/emoss08/trenova/internal/api/handlers/tractor" @@ -87,6 +89,8 @@ type RouterParams struct { TractorHandler *tractor.Handler TrailerHandler *trailer.Handler CustomerHandler *customer.Handler + ShipmentHandler *shipment.Handler + RoutingHandler *routing.Handler } type Router struct { @@ -230,4 +234,10 @@ func (r *Router) setupProtectedRoutes(router fiber.Router, rl *middleware.RateLi // Customers r.p.CustomerHandler.RegisterRoutes(router, rl) + + // Shipments + r.p.ShipmentHandler.RegisterRoutes(router, rl) + + // Routing + r.p.RoutingHandler.RegisterRoutes(router, rl) } diff --git a/internal/bootstrap/app.go b/internal/bootstrap/app.go index 0df160c96..a29311bbd 100644 --- a/internal/bootstrap/app.go +++ b/internal/bootstrap/app.go @@ -8,6 +8,7 @@ import ( "time" "github.com/emoss08/trenova/internal/bootstrap/modules/api" + "github.com/emoss08/trenova/internal/bootstrap/modules/external" "github.com/emoss08/trenova/internal/bootstrap/modules/infrastructure" "github.com/emoss08/trenova/internal/bootstrap/modules/services" "github.com/emoss08/trenova/internal/bootstrap/modules/validators" @@ -23,6 +24,7 @@ func Bootstrap() error { infrastructure.Module, redisRepos.Module, postgresRepos.Module, + external.Module, validators.Module, services.Module, api.Module, diff --git a/internal/bootstrap/modules/api/handlers.go b/internal/bootstrap/modules/api/handlers.go index 0e27ea0e5..e7c7b35f9 100644 --- a/internal/bootstrap/modules/api/handlers.go +++ b/internal/bootstrap/modules/api/handlers.go @@ -13,9 +13,11 @@ import ( "github.com/emoss08/trenova/internal/api/handlers/locationcategory" "github.com/emoss08/trenova/internal/api/handlers/organization" "github.com/emoss08/trenova/internal/api/handlers/reporting" + "github.com/emoss08/trenova/internal/api/handlers/routing" "github.com/emoss08/trenova/internal/api/handlers/search" "github.com/emoss08/trenova/internal/api/handlers/servicetype" "github.com/emoss08/trenova/internal/api/handlers/session" + "github.com/emoss08/trenova/internal/api/handlers/shipment" "github.com/emoss08/trenova/internal/api/handlers/shipmenttype" "github.com/emoss08/trenova/internal/api/handlers/tableconfiguration" "github.com/emoss08/trenova/internal/api/handlers/tractor" @@ -51,4 +53,6 @@ var HandlersModule = fx.Module("api.Handlers", fx.Provide( tractor.NewHandler, trailer.NewHandler, customer.NewHandler, + shipment.NewHandler, + routing.NewHandler, )) diff --git a/internal/bootstrap/modules/external/module.go b/internal/bootstrap/modules/external/module.go new file mode 100644 index 000000000..11998fd81 --- /dev/null +++ b/internal/bootstrap/modules/external/module.go @@ -0,0 +1,10 @@ +package external + +import ( + "github.com/emoss08/trenova/internal/infrastructure/external/maps/pcmiler" + "go.uber.org/fx" +) + +var Module = fx.Module("external", fx.Provide( + pcmiler.NewClient, +)) diff --git a/internal/bootstrap/modules/services/module.go b/internal/bootstrap/modules/services/module.go index 2c480c4da..01ae854f6 100644 --- a/internal/bootstrap/modules/services/module.go +++ b/internal/bootstrap/modules/services/module.go @@ -16,9 +16,11 @@ import ( "github.com/emoss08/trenova/internal/core/services/organization" "github.com/emoss08/trenova/internal/core/services/permission" "github.com/emoss08/trenova/internal/core/services/reporting" + "github.com/emoss08/trenova/internal/core/services/routing" "github.com/emoss08/trenova/internal/core/services/search" "github.com/emoss08/trenova/internal/core/services/servicetype" "github.com/emoss08/trenova/internal/core/services/session" + "github.com/emoss08/trenova/internal/core/services/shipment" "github.com/emoss08/trenova/internal/core/services/shipmenttype" "github.com/emoss08/trenova/internal/core/services/tableconfiguration" "github.com/emoss08/trenova/internal/core/services/tractor" @@ -55,4 +57,6 @@ var Module = fx.Module("services", fx.Provide( tractor.NewService, trailer.NewService, customer.NewService, + shipment.NewService, + routing.NewService, )) diff --git a/internal/bootstrap/modules/validators/module.go b/internal/bootstrap/modules/validators/module.go index d88bddd2b..5ce554a38 100644 --- a/internal/bootstrap/modules/validators/module.go +++ b/internal/bootstrap/modules/validators/module.go @@ -11,6 +11,7 @@ import ( "github.com/emoss08/trenova/internal/pkg/validator/locationvalidator" "github.com/emoss08/trenova/internal/pkg/validator/servicetypevalidator" "github.com/emoss08/trenova/internal/pkg/validator/shipmenttypevalidator" + "github.com/emoss08/trenova/internal/pkg/validator/shipmentvalidator" "github.com/emoss08/trenova/internal/pkg/validator/tractorvalidator" "github.com/emoss08/trenova/internal/pkg/validator/trailervalidator" "github.com/emoss08/trenova/internal/pkg/validator/workervalidator" @@ -34,4 +35,7 @@ var Module = fx.Module("validators", fx.Provide( tractorvalidator.NewValidator, trailervalidator.NewValidator, customervalidator.NewValidator, + shipmentvalidator.NewStopValidator, + shipmentvalidator.NewMoveValidator, + shipmentvalidator.NewValidator, )) diff --git a/internal/core/domain/README.md b/internal/core/domain/README.md index 86575cbd0..f89a45796 100644 --- a/internal/core/domain/README.md +++ b/internal/core/domain/README.md @@ -40,8 +40,8 @@ The `domain` directory contains the core business objects, rules, and logic of t Origin Address Destination Address Weight Weight - CreatedAt time.Time - UpdatedAt time.Time + CreatedAt int64 + UpdatedAt int64 } ``` @@ -64,7 +64,7 @@ The `domain` directory contains the core business objects, rules, and logic of t ShipmentID string PreviousStatus ShipmentStatus NewStatus ShipmentStatus - ChangedAt time.Time + ChangedAt int64 } ``` @@ -147,8 +147,8 @@ type Shipment struct { Status Status Weight Weight Route Route - createdAt time.Time - updatedAt time.Time + createdAt int64 + updatedAt int64 } // NewShipment creates a new shipment with validation diff --git a/internal/core/domain/google/enums.go b/internal/core/domain/google/enums.go new file mode 100644 index 000000000..a6a291588 --- /dev/null +++ b/internal/core/domain/google/enums.go @@ -0,0 +1,27 @@ +package google + +type RouteAvoidance string + +// TODO(Wolfred): We'll need to map these to the actual values used by Google Maps +// and then add them to the API +const ( + // AvoidTolls avoids tolls + RouteAvoidanceAvoidTolls = RouteAvoidance("AvoidTolls") // `tolls` in google maps + + // AvoidHighways avoids highways + RouteAvoidanceAvoidHighways = RouteAvoidance("AvoidHighways") // `highways` in google maps + + // AvoidFerries avoids ferries + RouteAvoidanceAvoidFerries = RouteAvoidance("AvoidFerries") // `ferries` in google maps + + // Indoor avoids indoor routes + RouteAvoidanceAvoidIndoor = RouteAvoidance("AvoidIndoor") // `indoor` in google maps +) + +type RouteModel string + +const ( + RouteModelBestGuess = RouteModel("BestGuess") // `best_guess` in google maps (Default) + RouteModelOptimistic = RouteModel("Optimistic") // `optimistic` in google maps + RouteModelPessimistic = RouteModel("Pessimistic") // `pessimistic` in google maps +) diff --git a/internal/core/domain/location/location.go b/internal/core/domain/location/location.go index d22f5daa0..d53d9e7da 100644 --- a/internal/core/domain/location/location.go +++ b/internal/core/domain/location/location.go @@ -65,7 +65,7 @@ func (l *Location) Validate(ctx context.Context, multiErr *errors.MultiError) { if err != nil { var validationErrs validation.Errors if eris.As(err, &validationErrs) { - errors.FromValidationErrors(validationErrs, multiErr, "") + errors.FromOzzoErrors(validationErrs, multiErr) } } } diff --git a/internal/core/domain/pcmilerconfiguration/pcmilerconfig.go b/internal/core/domain/pcmilerconfiguration/pcmilerconfig.go new file mode 100644 index 000000000..575842395 --- /dev/null +++ b/internal/core/domain/pcmilerconfiguration/pcmilerconfig.go @@ -0,0 +1,49 @@ +package pcmilerconfiguration + +import ( + "context" + + "github.com/emoss08/trenova/internal/core/domain/businessunit" + "github.com/emoss08/trenova/internal/core/domain/organization" + "github.com/emoss08/trenova/internal/pkg/utils/timeutils" + "github.com/emoss08/trenova/pkg/types/pulid" + "github.com/uptrace/bun" +) + +type PCMilerConfiguration struct { + bun.BaseModel `bun:"table:pcmiler_configurations,alias:pcm" json:"-"` + + // Primary identifiers + ID pulid.ID `json:"id" bun:",pk,type:VARCHAR(100),notnull"` + BusinessUnitID pulid.ID `json:"businessUnitId" bun:"business_unit_id,pk,notnull,type:VARCHAR(100)"` + OrganizationID pulid.ID `json:"organizationId" bun:"organization_id,pk,notnull,type:VARCHAR(100)"` + + // Core Fields + APIKey string `json:"apiKey" bun:"api_key,type:VARCHAR(255)"` + + // Metadata + Version int64 `bun:"version,type:BIGINT" json:"version"` + CreatedAt int64 `bun:"created_at,type:BIGINT,nullzero,notnull,default:extract(epoch from current_timestamp)::bigint" json:"createdAt"` + UpdatedAt int64 `bun:"updated_at,type:BIGINT,nullzero,notnull,default:extract(epoch from current_timestamp)::bigint" json:"updatedAt"` + + // Relationships + BusinessUnit *businessunit.BusinessUnit `bun:"rel:belongs-to,join:business_unit_id=id" json:"-"` + Organization *organization.Organization `bun:"rel:has-one,join:organization_id=id" json:"-"` +} + +func (p *PCMilerConfiguration) BeforeAppendModel(_ context.Context, query bun.Query) error { + now := timeutils.NowUnix() + + switch query.(type) { + case *bun.InsertQuery: + if p.ID.IsNil() { + p.ID = pulid.MustNew("pcm_") + } + + p.CreatedAt = now + case *bun.UpdateQuery: + p.UpdatedAt = now + } + + return nil +} diff --git a/internal/core/domain/shipment/enums.go b/internal/core/domain/shipment/enums.go index aed2cbd18..c241170d6 100644 --- a/internal/core/domain/shipment/enums.go +++ b/internal/core/domain/shipment/enums.go @@ -3,40 +3,40 @@ package shipment type Status string const ( - New = Status("New") - InTransit = Status("InTransit") - Delayed = Status("Delayed") - Completed = Status("Completed") - Billed = Status("Billed") - Canceled = Status("Canceled") + StatusNew = Status("New") + StatusInTransit = Status("InTransit") + StatusDelayed = Status("Delayed") + StatusCompleted = Status("Completed") + StatusBilled = Status("Billed") + StatusCanceled = Status("Canceled") ) type RatingMethod string const ( // FlatRate is the cost per shipment - FlatRate = RatingMethod("FlatRate") + RatingMethodFlatRate = RatingMethod("FlatRate") // PerMile is the cost per mile of the shipment - PerMile = RatingMethod("PerMile") + RatingMethodPerMile = RatingMethod("PerMile") // PerStop is the cost per stop of the shipment - PerStop = RatingMethod("PerStop") + RatingMethodPerStop = RatingMethod("PerStop") // PerPound is the cost per pound of the shipment - PerPound = RatingMethod("PerPound") + RatingMethodPerPound = RatingMethod("PerPound") // PerPallet is the cost per pallet position used - PerPallet = RatingMethod("PerPallet") + RatingMethodPerPallet = RatingMethod("PerPallet") // PerLinearFoot is the cost based on the linear feet of trailer space used. // This is commonly used for LTL shipments, Flatbed haulers, and specific // commodities that are measured in linear feet. - PerLinearFoot = RatingMethod("PerLinearFoot") + RatingMethodPerLinearFoot = RatingMethod("PerLinearFoot") // Other takes the rating units and the rate and does multiplication // of the two to get the total cost - Other = RatingMethod("Other") + RatingMethodOther = RatingMethod("Other") ) type EntryMethod string diff --git a/internal/core/domain/shipment/shipment.go b/internal/core/domain/shipment/shipment.go index ed12c9ac5..3c06d590c 100644 --- a/internal/core/domain/shipment/shipment.go +++ b/internal/core/domain/shipment/shipment.go @@ -1,16 +1,31 @@ package shipment import ( + "context" + "strings" + + "github.com/emoss08/trenova/internal/core/domain" "github.com/emoss08/trenova/internal/core/domain/businessunit" "github.com/emoss08/trenova/internal/core/domain/customer" + "github.com/emoss08/trenova/internal/core/domain/equipmenttype" "github.com/emoss08/trenova/internal/core/domain/organization" "github.com/emoss08/trenova/internal/core/domain/servicetype" "github.com/emoss08/trenova/internal/core/domain/shipmenttype" + "github.com/emoss08/trenova/internal/core/ports/infra" + "github.com/emoss08/trenova/internal/pkg/errors" + "github.com/emoss08/trenova/internal/pkg/utils/timeutils" "github.com/emoss08/trenova/pkg/types/pulid" + validation "github.com/go-ozzo/ozzo-validation/v4" + "github.com/rotisserie/eris" "github.com/shopspring/decimal" "github.com/uptrace/bun" ) +var ( + _ bun.BeforeAppendModelHook = (*Shipment)(nil) + _ domain.Validatable = (*Shipment)(nil) +) + type Shipment struct { bun.BaseModel `bun:"table:shipments,alias:sp" json:"-"` @@ -20,9 +35,11 @@ type Shipment struct { OrganizationID pulid.ID `bun:"organization_id,type:VARCHAR(100),pk,notnull" json:"organizationId"` // Relationship identifiers (Non-Primary-Keys) - ServiceTypeID pulid.ID `bun:"service_type_id,type:VARCHAR(100),notnull" json:"serviceTypeId"` - ShipmentTypeID pulid.ID `bun:"shipment_type_id,type:VARCHAR(100),notnull" json:"shipmentTypeId"` - CustomerID pulid.ID `bun:"customer_id,type:VARCHAR(100),notnull" json:"customerId"` + ServiceTypeID pulid.ID `bun:"service_type_id,type:VARCHAR(100),notnull" json:"serviceTypeId"` + ShipmentTypeID pulid.ID `bun:"shipment_type_id,type:VARCHAR(100),notnull" json:"shipmentTypeId"` + CustomerID pulid.ID `bun:"customer_id,type:VARCHAR(100),notnull" json:"customerId"` + TractorTypeID *pulid.ID `bun:"tractor_type_id,type:VARCHAR(100),nullzero" json:"tractorTypeId"` + TrailerTypeID *pulid.ID `bun:"trailer_type_id,type:VARCHAR(100),nullzero" json:"trailerTypeId"` // Core fields Status Status `json:"status" bun:"status,type:status_enum,notnull,default:'New'"` @@ -56,10 +73,139 @@ type Shipment struct { UpdatedAt int64 `json:"updatedAt" bun:"updated_at,notnull,default:extract(epoch from current_timestamp)::bigint"` // Relationships - BusinessUnit *businessunit.BusinessUnit `json:"businessUnit,omitempty" bun:"rel:belongs-to,join:business_unit_id=id"` - Organization *organization.Organization `json:"organization,omitempty" bun:"rel:belongs-to,join:organization_id=id"` - ShipmentType *shipmenttype.ShipmentType `json:"shipmentType,omitempty" bun:"rel:belongs-to,join:shipment_type_id=id"` - ServiceType *servicetype.ServiceType `json:"serviceType,omitempty" bun:"rel:belongs-to,join:service_type_id=id"` - Customer *customer.Customer `json:"customer,omitempty" bun:"rel:belongs-to,join:customer_id=id"` - Commodities []*ShipmentCommodity `json:"commodities,omitempty" bun:"rel:has-many,join:id=shipment_id"` + BusinessUnit *businessunit.BusinessUnit `json:"businessUnit,omitempty" bun:"rel:belongs-to,join:business_unit_id=id"` + Organization *organization.Organization `json:"organization,omitempty" bun:"rel:belongs-to,join:organization_id=id"` + ShipmentType *shipmenttype.ShipmentType `json:"shipmentType,omitempty" bun:"rel:belongs-to,join:shipment_type_id=id"` + ServiceType *servicetype.ServiceType `json:"serviceType,omitempty" bun:"rel:belongs-to,join:service_type_id=id"` + Customer *customer.Customer `json:"customer,omitempty" bun:"rel:belongs-to,join:customer_id=id"` + TractorType *equipmenttype.EquipmentType `json:"tractorType,omitempty" bun:"rel:belongs-to,join:tractor_type_id=id"` + TrailerType *equipmenttype.EquipmentType `json:"trailerType,omitempty" bun:"rel:belongs-to,join:trailer_type_id=id"` + Moves []*ShipmentMove `json:"moves,omitempty" bun:"rel:has-many,join:id=shipment_id"` + Commodities []*ShipmentCommodity `json:"commodities,omitempty" bun:"rel:has-many,join:id=shipment_id"` +} + +func (st *Shipment) Validate(ctx context.Context, multiErr *errors.MultiError) { + err := validation.ValidateStructWithContext(ctx, st, + // Status is required and must be a valid status + validation.Field(&st.Status, + validation.Required.Error("Status is required"), + validation.In( + StatusNew, + StatusInTransit, + StatusDelayed, + StatusCompleted, + StatusBilled, + StatusCanceled, + ).Error("Status must be a valid status"), + ), + + // ShipmentTypeID is required + validation.Field(&st.ShipmentTypeID, + validation.Required.Error("Shipment Type is required"), + ), + + // CustomerID is required + validation.Field(&st.CustomerID, + validation.Required.Error("Customer is required"), + ), + + // BOL is required and must be between 1 and 100 characters + validation.Field(&st.BOL, + validation.Required.Error("BOL is required"), + validation.Length(1, 100).Error("BOL must be between 1 and 100 characters"), + ), + + // Rating method is required and must be a valid rating method + validation.Field(&st.RatingMethod, + validation.Required.Error("Rating Method is required"), + validation.In( + RatingMethodFlatRate, + RatingMethodPerMile, + RatingMethodPerStop, + RatingMethodPerPound, + RatingMethodPerPallet, + RatingMethodPerLinearFoot, + RatingMethodOther, + ).Error("Rating Method must be a valid rating method"), + ), + + // Freight Charge Amount is required when rating method is flat + validation.Field(&st.FreightChargeAmount, + validation.When(st.RatingMethod == RatingMethodFlatRate, + validation.Required.Error("Freight Charge Amount is required when rating method is Flat"), + ), + ), + + // Weight is reuqired method is per pound + validation.Field(&st.Weight, + validation.When(st.RatingMethod == RatingMethodPerPound, + validation.Required.Error("Weight is required when rating method is Per Pound"), + ), + ), + + // Ensure rating unit is greater than 0 and required when rating method is Per Mile + validation.Field(&st.RatingUnit, + validation.When(st.RatingMethod == RatingMethodPerMile, + validation.Required.Error("Rating Unit is required when rating method is Per Mile"), + validation.Min(1).Error("Rating Unit must be greater than 0"), + ), + ), + ) + if err != nil { + var validationErrs validation.Errors + if eris.As(err, &validationErrs) { + errors.FromOzzoErrors(validationErrs, multiErr) + } + + } +} + +// Pagination Configuration +func (st *Shipment) GetID() string { + return st.ID.String() +} + +func (st *Shipment) GetTableName() string { + return "shipments" +} + +// Search Configuration +func (st *Shipment) GetSearchType() string { + return "shipment" +} + +func (st *Shipment) ToDocument() infra.SearchDocument { + searchableText := []string{ + st.ProNumber, + st.BOL, + } + + return infra.SearchDocument{ + ID: st.ID.String(), + Type: "shipment", + BusinessUnitID: st.BusinessUnitID.String(), + OrganizationID: st.OrganizationID.String(), + CreatedAt: st.CreatedAt, + UpdatedAt: st.UpdatedAt, + Title: st.ProNumber, + Description: st.ProNumber, + SearchableText: strings.Join(searchableText, " "), + } +} + +func (st *Shipment) BeforeAppendModel(_ context.Context, query bun.Query) error { + now := timeutils.NowUnix() + + switch query.(type) { + case *bun.InsertQuery: + if st.ID.IsNil() { + st.ID = pulid.MustNew("shp_") + } + + st.CreatedAt = now + case *bun.UpdateQuery: + st.UpdatedAt = now + } + + return nil } diff --git a/internal/core/domain/shipment/shipmentmove.go b/internal/core/domain/shipment/shipmentmove.go index 0a6fae06d..048ebafb7 100644 --- a/internal/core/domain/shipment/shipmentmove.go +++ b/internal/core/domain/shipment/shipmentmove.go @@ -5,9 +5,14 @@ import ( "github.com/emoss08/trenova/internal/core/domain/businessunit" "github.com/emoss08/trenova/internal/core/domain/organization" + "github.com/emoss08/trenova/internal/core/domain/tractor" + "github.com/emoss08/trenova/internal/core/domain/trailer" "github.com/emoss08/trenova/internal/core/domain/worker" + "github.com/emoss08/trenova/internal/pkg/errors" "github.com/emoss08/trenova/internal/pkg/utils/timeutils" "github.com/emoss08/trenova/pkg/types/pulid" + validation "github.com/go-ozzo/ozzo-validation/v4" + "github.com/rotisserie/eris" "github.com/uptrace/bun" ) @@ -20,16 +25,17 @@ type ShipmentMove struct { OrganizationID pulid.ID `bun:"organization_id,pk,notnull,type:VARCHAR(100)" json:"organizationId"` // Relationship identifiers (Non-Primary-Keys) - ShipmentID pulid.ID `bun:"shipment_id,type:VARCHAR(100),notnull" json:"shipmentId"` - PrimaryWorkerID pulid.ID `bun:"primary_worker_id,type:VARCHAR(100),nullzero" json:"primaryWorkerId"` - SecondaryWorkerID pulid.ID `bun:"secondary_worker_id,type:VARCHAR(100),nullzero" json:"secondaryWorkerId"` - // TODO(Wolfred): Add trailer and tractor ID + ShipmentID pulid.ID `bun:"shipment_id,type:VARCHAR(100),notnull" json:"shipmentId"` + PrimaryWorkerID pulid.ID `bun:"primary_worker_id,type:VARCHAR(100),notnull" json:"primaryWorkerId"` + SecondaryWorkerID *pulid.ID `bun:"secondary_worker_id,type:VARCHAR(100),nullzero" json:"secondaryWorkerId"` + TrailerID pulid.ID `bun:"trailer_id,type:VARCHAR(100),nullzero" json:"trailerId"` + TractorID pulid.ID `bun:"tractor_id,type:VARCHAR(100),nullzero" json:"tractorId"` // Core Fields - Status StopStatus `json:"status" bun:"status,type:stop_status_enum,notnull,default:'New'"` - Loaded bool `json:"loaded" bun:"loaded,type:BOOLEAN,notnull,default:true"` - SequenceNumber int `json:"sequenceNumber" bun:"sequence_number,type:INTEGER,notnull,default:0"` - Distance *float64 `json:"distance" bun:"distance,type:FLOAT,nullzero"` + Status StopStatus `json:"status" bun:"status,type:stop_status_enum,notnull,default:'New'"` + Loaded bool `json:"loaded" bun:"loaded,type:BOOLEAN,notnull,default:true"` + Sequence int `json:"sequence" bun:"sequence,type:INTEGER,notnull,default:0"` + Distance *float64 `json:"distance" bun:"distance,type:FLOAT,nullzero"` // Metadata Version int64 `bun:"version,type:BIGINT" json:"version"` @@ -37,14 +43,48 @@ type ShipmentMove struct { UpdatedAt int64 `bun:"updated_at,type:BIGINT,nullzero,notnull,default:extract(epoch from current_timestamp)::bigint" json:"updatedAt"` // Relationships - BusinessUnit *businessunit.BusinessUnit `bun:"rel:belongs-to,join:business_unit_id=id" json:"-"` - Organization *organization.Organization `bun:"rel:belongs-to,join:organization_id=id" json:"-"` - Shipment *Shipment `bun:"rel:belongs-to,join:shipment_id=id" json:"shipment,omitempty"` - // Tractor *Tractor `bun:"rel:belongs-to,join:tractor_id=id" json:"tractor,omitempty"` - // Trailer *Trailer `bun:"rel:belongs-to,join:trailer_id=id" json:"trailer,omitempty"` - PrimaryWorker *worker.Worker `bun:"rel:belongs-to,join:primary_worker_id=id" json:"primaryWorker,omitempty"` - SecondaryWorker *worker.Worker `bun:"rel:belongs-to,join:secondary_worker_id=id" json:"secondaryWorker,omitempty"` - Stops []*Stop `bun:"rel:has-many,join:id=shipment_move_id" json:"stops,omitempty"` + BusinessUnit *businessunit.BusinessUnit `bun:"rel:belongs-to,join:business_unit_id=id" json:"-"` + Organization *organization.Organization `bun:"rel:belongs-to,join:organization_id=id" json:"-"` + Shipment *Shipment `bun:"rel:belongs-to,join:shipment_id=id" json:"shipment,omitempty"` + Tractor *tractor.Tractor `bun:"rel:belongs-to,join:tractor_id=id" json:"tractor,omitempty"` + Trailer *trailer.Trailer `bun:"rel:belongs-to,join:trailer_id=id" json:"trailer,omitempty"` + PrimaryWorker *worker.Worker `bun:"rel:belongs-to,join:primary_worker_id=id" json:"primaryWorker,omitempty"` + SecondaryWorker *worker.Worker `bun:"rel:belongs-to,join:secondary_worker_id=id" json:"secondaryWorker,omitempty"` + Stops []*Stop `bun:"rel:has-many,join:id=shipment_move_id" json:"stops,omitempty"` +} + +func (sm *ShipmentMove) Validate(ctx context.Context, multiErr *errors.MultiError) { + err := validation.ValidateStructWithContext(ctx, sm, + // Status is required and must be a valid stop status + validation.Field(&sm.Status, + validation.Required.Error("Status is required"), + validation.In( + StopStatusNew, + StopStatusInTransit, + StopStatusCompleted, + StopStatusCanceled, + ).Error("Status must be a valid stop status"), + ), + + // Tractor ID is requird + validation.Field(&sm.TractorID, + validation.Required.Error("Tractor is required"), + ), + // Trailer ID is requird + validation.Field(&sm.TrailerID, + validation.Required.Error("Trailer is required"), + ), + // Primary Worker ID is requird + validation.Field(&sm.PrimaryWorkerID, + validation.Required.Error("Primary Worker is required"), + ), + ) + if err != nil { + var validationErrs validation.Errors + if eris.As(err, &validationErrs) { + errors.FromOzzoErrors(validationErrs, multiErr) + } + } } // Pagination Configuration diff --git a/internal/core/domain/shipment/stop.go b/internal/core/domain/shipment/stop.go index b2cda9b68..7d4d6f15d 100644 --- a/internal/core/domain/shipment/stop.go +++ b/internal/core/domain/shipment/stop.go @@ -1,18 +1,112 @@ package shipment import ( + "context" + "time" + + "github.com/emoss08/trenova/internal/core/domain/businessunit" + "github.com/emoss08/trenova/internal/core/domain/location" + "github.com/emoss08/trenova/internal/core/domain/organization" + "github.com/emoss08/trenova/internal/pkg/errors" "github.com/emoss08/trenova/pkg/types/pulid" + validation "github.com/go-ozzo/ozzo-validation/v4" + "github.com/rotisserie/eris" "github.com/uptrace/bun" ) +var _ bun.BeforeAppendModelHook = (*Stop)(nil) + type Stop struct { bun.BaseModel `bun:"table:stops,alias:stp" json:"-"` // Primary identifiers - ID pulid.ID `bun:",pk,type:VARCHAR(100),notnull" json:"id"` - BusinessUnitID pulid.ID `bun:"business_unit_id,pk,notnull,type:VARCHAR(100)" json:"businessUnitId"` - OrganizationID pulid.ID `bun:"organization_id,pk,notnull,type:VARCHAR(100)" json:"organizationId"` + ID pulid.ID `json:"id" bun:",pk,type:VARCHAR(100),notnull"` + BusinessUnitID pulid.ID `json:"businessUnitId" bun:"business_unit_id,pk,notnull,type:VARCHAR(100)"` + OrganizationID pulid.ID `json:"organizationId" bun:"organization_id,pk,notnull,type:VARCHAR(100)"` // Relationship identifiers (Non-Primary-Keys) - ShipmentMoveID pulid.ID `bun:"shipment_move_id,notnull,type:VARCHAR(100)" json:"shipmentMoveId"` + ShipmentMoveID pulid.ID `json:"shipmentMoveId" bun:"shipment_move_id,notnull,type:VARCHAR(100)"` + LocationID pulid.ID `json:"locationId" bun:"location_id,notnull,type:VARCHAR(100)"` + + // Core Fields + Status StopStatus `json:"status" bun:"status,notnull,type:VARCHAR(100)"` + Type StopType `json:"type" bun:"type,notnull,type:VARCHAR(100)"` + Sequence int `json:"sequence" bun:"sequence,notnull,type:INT"` + Pieces *int `json:"pieces" bun:"pieces,type:INTEGER,nullzero"` + Weight *int `json:"weight" bun:"weight,type:INTEGER,nullzero"` + PlannedArrival int64 `json:"plannedArrival" bun:"planned_arrival,type:BIGINT,nullzero"` + PlannedDeparture int64 `json:"plannedDeparture" bun:"planned_departure,type:BIGINT,nullzero"` + ActualArrival *int64 `json:"actualArrival" bun:"actual_arrival,type:BIGINT,nullzero"` + ActualDeparture *int64 `json:"actualDeparture" bun:"actual_departure,type:BIGINT,nullzero"` + AddressLine string `json:"addressLine" bun:"address_line,type:VARCHAR(255),nullzero"` + + // Metadata + Version int64 `json:"version" bun:"version,type:BIGINT"` + CreatedAt int64 `json:"createdAt" bun:"created_at,notnull,default:extract(epoch from current_timestamp)::bigint"` + UpdatedAt int64 `json:"updatedAt" bun:"updated_at,notnull,default:extract(epoch from current_timestamp)::bigint"` + + // Relationships + BusinessUnit *businessunit.BusinessUnit `json:"businessUnit,omitempty" bun:"rel:belongs-to,join:business_unit_id=id"` + Organization *organization.Organization `json:"organization,omitempty" bun:"rel:belongs-to,join:organization_id=id"` + ShipmentMove *ShipmentMove `json:"shipmentMove,omitempty" bun:"rel:belongs-to,join:shipment_move_id=id"` + Location *location.Location `json:"location,omitempty" bun:"rel:belongs-to,join:location_id=id"` +} + +func (s *Stop) Validate(ctx context.Context, multiErr *errors.MultiError) { + err := validation.ValidateStructWithContext(ctx, s, + // Type is required and must be a valid stop type + validation.Field(&s.Type, + validation.Required.Error("Type is required"), + validation.In( + StopTypePickup, + StopTypeDelivery, + StopTypeSplitPickup, + StopTypeSplitDelivery, + ).Error("Type must be a valid stop type"), + ), + + // Status is required and must be a valid stop status + validation.Field(&s.Status, + validation.Required.Error("Status is required"), + validation.In( + StopStatusNew, + StopStatusInTransit, + StopStatusCompleted, + StopStatusCanceled, + ).Error("Status must be a valid stop status"), + ), + + // Planned arrival is required + validation.Field(&s.PlannedArrival, + validation.Required.Error("Planned arrival is required"), + ), + + // Planned departure is required + validation.Field(&s.PlannedDeparture, + validation.Required.Error("Planned departure is required"), + ), + ) + if err != nil { + var validationErrs validation.Errors + if eris.As(err, &validationErrs) { + errors.FromOzzoErrors(validationErrs, multiErr) + } + } +} + +func (s *Stop) BeforeAppendModel(_ context.Context, query bun.Query) error { + now := time.Now().Unix() + + switch query.(type) { + case *bun.InsertQuery: + if s.ID == "" { + s.ID = pulid.MustNew("stp_") + } + + s.CreatedAt = now + case *bun.UpdateQuery: + s.UpdatedAt = now + } + + return nil } diff --git a/internal/core/domain/trailer/trailer.go b/internal/core/domain/trailer/trailer.go index 0d48cd5c3..3cc935326 100644 --- a/internal/core/domain/trailer/trailer.go +++ b/internal/core/domain/trailer/trailer.go @@ -88,7 +88,7 @@ func (t *Trailer) Validate(ctx context.Context, multiErr *errors.MultiError) { if err != nil { var validationErrs validation.Errors if eris.As(err, &validationErrs) { - errors.FromValidationErrors(validationErrs, multiErr, "") + errors.FromOzzoErrors(validationErrs, multiErr) } } } diff --git a/internal/core/domain/types.go b/internal/core/domain/types.go index cfeb14671..19fc77f91 100644 --- a/internal/core/domain/types.go +++ b/internal/core/domain/types.go @@ -39,6 +39,13 @@ const ( EquipmentStatusSold = EquipmentStatus("Sold") ) +type RoutingProvider string + +const ( + // PCMiler is the provider for PCMiler + RoutingProviderPCMiler = RoutingProvider("PCMiler") +) + type Validatable interface { Validate(ctx context.Context, multiErr *errors.MultiError) GetTableName() string diff --git a/internal/core/domain/user/enums.go b/internal/core/domain/user/enums.go index 4cb3ef194..3da38417a 100644 --- a/internal/core/domain/user/enums.go +++ b/internal/core/domain/user/enums.go @@ -3,6 +3,9 @@ package user type TimeFormat string const ( - TimeFormat12Hour TimeFormat = "12-hour" - TimeFormat24Hour TimeFormat = "24-hour" + // TimeFormat12Hour is the 12-hour time format + TimeFormat12Hour = TimeFormat("12-hour") + + // TimeFormat24Hour is the 24-hour time format (commonly known as military time) + TimeFormat24Hour = TimeFormat("24-hour") ) diff --git a/internal/core/domain/worker/workerpto.go b/internal/core/domain/worker/workerpto.go index 9ec403e4e..a597f8d84 100644 --- a/internal/core/domain/worker/workerpto.go +++ b/internal/core/domain/worker/workerpto.go @@ -15,7 +15,7 @@ import ( "github.com/uptrace/bun" ) -var _ bun.BeforeAppendModelHook = (*Worker)(nil) +var _ bun.BeforeAppendModelHook = (*WorkerPTO)(nil) type WorkerPTO struct { bun.BaseModel `bun:"table:worker_pto,alias:wpto" json:"-"` diff --git a/internal/core/ports/repositories/pcmilerconfiguration.go b/internal/core/ports/repositories/pcmilerconfiguration.go new file mode 100644 index 000000000..a8fe90857 --- /dev/null +++ b/internal/core/ports/repositories/pcmilerconfiguration.go @@ -0,0 +1,17 @@ +package repositories + +import ( + "context" + + "github.com/emoss08/trenova/internal/core/domain/pcmilerconfiguration" + "github.com/emoss08/trenova/pkg/types/pulid" +) + +type GetPCMilerConfigurationOptions struct { + OrgID pulid.ID + BuID pulid.ID +} + +type PCMilerConfigurationRepository interface { + GetPCMilerConfiguration(ctx context.Context, opts GetPCMilerConfigurationOptions) (*pcmilerconfiguration.PCMilerConfiguration, error) +} diff --git a/internal/core/ports/repositories/shipment.go b/internal/core/ports/repositories/shipment.go new file mode 100644 index 000000000..8059b1194 --- /dev/null +++ b/internal/core/ports/repositories/shipment.go @@ -0,0 +1,33 @@ +package repositories + +import ( + "context" + + "github.com/emoss08/trenova/internal/core/domain/shipment" + "github.com/emoss08/trenova/internal/core/ports" + "github.com/emoss08/trenova/pkg/types/pulid" +) + +type ShipmentOptions struct { + ExpandShipmentDetails bool `query:"expandShipmentDetails"` +} + +type ListShipmentOptions struct { + Filter *ports.LimitOffsetQueryOptions + ShipmentOptions ShipmentOptions +} + +type GetShipmentByIDOptions struct { + ID pulid.ID + OrgID pulid.ID + BuID pulid.ID + UserID pulid.ID + ShipmentOptions ShipmentOptions +} + +type ShipmentRepository interface { + List(ctx context.Context, opts *ListShipmentOptions) (*ports.ListResult[*shipment.Shipment], error) + GetByID(ctx context.Context, opts GetShipmentByIDOptions) (*shipment.Shipment, error) + Create(ctx context.Context, t *shipment.Shipment) (*shipment.Shipment, error) + Update(ctx context.Context, t *shipment.Shipment) (*shipment.Shipment, error) +} diff --git a/internal/core/ports/repositories/tractor.go b/internal/core/ports/repositories/tractor.go index 355dec166..9b298ab1d 100644 --- a/internal/core/ports/repositories/tractor.go +++ b/internal/core/ports/repositories/tractor.go @@ -12,6 +12,7 @@ type ListTractorOptions struct { Filter *ports.LimitOffsetQueryOptions IncludeWorkerDetails bool `query:"includeWorkerDetails"` IncludeEquipmentDetails bool `query:"includeEquipmentDetails"` + IncludeFleetDetails bool `query:"includeFleetDetails"` } type GetTractorByIDOptions struct { @@ -21,6 +22,7 @@ type GetTractorByIDOptions struct { UserID pulid.ID IncludeWorkerDetails bool `query:"includeWorkerDetails"` IncludeEquipmentDetails bool `query:"includeEquipmentDetails"` + IncludeFleetDetails bool `query:"includeFleetDetails"` } type TractorRepository interface { diff --git a/internal/core/ports/repositories/trailer.go b/internal/core/ports/repositories/trailer.go index b3ac0b0e0..4477f93f1 100644 --- a/internal/core/ports/repositories/trailer.go +++ b/internal/core/ports/repositories/trailer.go @@ -11,6 +11,7 @@ import ( type ListTrailerOptions struct { Filter *ports.LimitOffsetQueryOptions IncludeEquipmentDetails bool `query:"includeEquipmentDetails"` + IncludeFleetDetails bool `query:"includeFleetDetails"` } type GetTrailerByIDOptions struct { @@ -19,6 +20,7 @@ type GetTrailerByIDOptions struct { BuID pulid.ID UserID pulid.ID IncludeEquipmentDetails bool `query:"includeEquipmentDetails"` + IncludeFleetDetails bool `query:"includeFleetDetails"` } type TrailerRepository interface { diff --git a/internal/core/ports/repositories/usstate.go b/internal/core/ports/repositories/usstate.go index 99e0adc79..8950874ae 100644 --- a/internal/core/ports/repositories/usstate.go +++ b/internal/core/ports/repositories/usstate.go @@ -9,6 +9,7 @@ import ( type UsStateRepository interface { List(ctx context.Context) (*ports.ListResult[*usstate.UsState], error) + GetByAbbreviation(ctx context.Context, abbreviation string) (*usstate.UsState, error) } type UsStateCacheRepository interface { diff --git a/internal/core/services/routing/service.go b/internal/core/services/routing/service.go index 4fb0f0cb4..a991b3503 100644 --- a/internal/core/services/routing/service.go +++ b/internal/core/services/routing/service.go @@ -1 +1,68 @@ package routing + +import ( + "github.com/emoss08/trenova/internal/core/ports" + "github.com/emoss08/trenova/internal/core/ports/repositories" + "github.com/emoss08/trenova/internal/infrastructure/external/maps/pcmiler" + "github.com/emoss08/trenova/internal/pkg/logger" + "github.com/rs/zerolog" + "go.uber.org/fx" + "golang.org/x/net/context" +) + +type ServiceParams struct { + fx.In + + Logger *logger.Logger + Repo repositories.PCMilerConfigurationRepository + Client pcmiler.Client +} + +type Service struct { + l *zerolog.Logger + repo repositories.PCMilerConfigurationRepository + client pcmiler.Client +} + +func NewService(p ServiceParams) *Service { + log := p.Logger.With().Str("service", "routing").Logger() + + return &Service{ + l: &log, + repo: p.Repo, + client: p.Client, + } +} + +type SingleSearchParams struct { + // The query to search for + Query string `json:"query" query:"query"` + + // The options for the PCMiler configuration + ConfigOpts repositories.GetPCMilerConfigurationOptions +} + +func (s *Service) SingleSearch(ctx context.Context, opts SingleSearchParams) (*ports.ListResult[*pcmiler.Location], error) { + config, err := s.repo.GetPCMilerConfiguration(ctx, opts.ConfigOpts) + if err != nil { + s.l.Error().Err(err).Msg("failed to get PCMiler configuration") + return nil, err + } + + params := &pcmiler.SingleSearchParams{ + AuthToken: config.APIKey, + Query: opts.Query, + Countries: "US", + } + + resp, err := s.client.SingleSearch(ctx, params) + if err != nil { + s.l.Error().Err(err).Msg("failed to make single search request") + return nil, err + } + + return &ports.ListResult[*pcmiler.Location]{ + Items: resp.Locations, + Total: len(resp.Locations), + }, nil +} diff --git a/internal/core/services/shipment/events.go b/internal/core/services/shipment/events.go deleted file mode 100644 index 861ffcfd8..000000000 --- a/internal/core/services/shipment/events.go +++ /dev/null @@ -1 +0,0 @@ -package shipment diff --git a/internal/core/services/shipment/service.go b/internal/core/services/shipment/service.go index 861ffcfd8..8ca21398b 100644 --- a/internal/core/services/shipment/service.go +++ b/internal/core/services/shipment/service.go @@ -1 +1,287 @@ package shipment + +import ( + "context" + + "github.com/emoss08/trenova/internal/core/domain/permission" + "github.com/emoss08/trenova/internal/core/domain/shipment" + "github.com/emoss08/trenova/internal/core/ports" + "github.com/emoss08/trenova/internal/core/ports/repositories" + "github.com/emoss08/trenova/internal/core/ports/services" + "github.com/emoss08/trenova/internal/core/services/audit" + "github.com/emoss08/trenova/internal/core/services/search" + "github.com/emoss08/trenova/internal/pkg/errors" + "github.com/emoss08/trenova/internal/pkg/logger" + "github.com/emoss08/trenova/internal/pkg/utils/jsonutils" + "github.com/emoss08/trenova/internal/pkg/validator" + "github.com/emoss08/trenova/internal/pkg/validator/shipmentvalidator" + "github.com/emoss08/trenova/pkg/types" + "github.com/emoss08/trenova/pkg/types/pulid" + "github.com/rotisserie/eris" + "github.com/rs/zerolog" + "go.uber.org/fx" +) + +type ServiceParams struct { + fx.In + + Logger *logger.Logger + Repo repositories.ShipmentRepository + PermService services.PermissionService + AuditService services.AuditService + SearchService *search.Service + Validator *shipmentvalidator.Validator +} + +type Service struct { + l *zerolog.Logger + repo repositories.ShipmentRepository + ps services.PermissionService + as services.AuditService + ss *search.Service + v *shipmentvalidator.Validator +} + +func NewService(p ServiceParams) *Service { + log := p.Logger.With(). + Str("service", "shipment"). + Logger() + + return &Service{ + l: &log, + repo: p.Repo, + ps: p.PermService, + as: p.AuditService, + ss: p.SearchService, + v: p.Validator, + } +} + +func (s *Service) SelectOptions(ctx context.Context, opts *repositories.ListShipmentOptions) ([]*types.SelectOption, error) { + result, err := s.repo.List(ctx, opts) + if err != nil { + return nil, err + } + + options := make([]*types.SelectOption, len(result.Items)) + for i, t := range result.Items { + options[i] = &types.SelectOption{ + Value: t.GetID(), + Label: t.ProNumber, + } + } + + return options, nil +} + +func (s *Service) List(ctx context.Context, opts *repositories.ListShipmentOptions) (*ports.ListResult[*shipment.Shipment], error) { + log := s.l.With().Str("operation", "List").Logger() + + result, err := s.ps.HasAnyPermissions(ctx, + []*services.PermissionCheck{ + { + UserID: opts.Filter.TenantOpts.UserID, + Resource: permission.ResourceShipment, + Action: permission.ActionRead, + BusinessUnitID: opts.Filter.TenantOpts.BuID, + OrganizationID: opts.Filter.TenantOpts.OrgID, + }, + }, + ) + if err != nil { + s.l.Error().Err(err).Msg("failed to check permissions") + return nil, eris.Wrap(err, "check permissions") + } + + if !result.Allowed { + return nil, errors.NewAuthorizationError("You do not have permission to read shipments") + } + + entities, err := s.repo.List(ctx, opts) + if err != nil { + log.Error().Err(err).Msg("failed to list shipments") + return nil, err + } + + return &ports.ListResult[*shipment.Shipment]{ + Items: entities.Items, + Total: entities.Total, + }, nil +} + +func (s *Service) Get(ctx context.Context, opts repositories.GetShipmentByIDOptions) (*shipment.Shipment, error) { + log := s.l.With(). + Str("operation", "GetByID"). + Str("shipmentID", opts.ID.String()). + Logger() + + result, err := s.ps.HasAnyPermissions(ctx, + []*services.PermissionCheck{ + { + UserID: opts.UserID, + Resource: permission.ResourceShipment, + Action: permission.ActionRead, + BusinessUnitID: opts.BuID, + OrganizationID: opts.OrgID, + }, + }, + ) + if err != nil { + log.Error().Err(err).Msg("failed to check permissions") + return nil, err + } + + if !result.Allowed { + return nil, errors.NewAuthorizationError("You do not have permission to read this shipment") + } + + entity, err := s.repo.GetByID(ctx, opts) + if err != nil { + log.Error().Err(err).Msg("failed to get shipment") + return nil, err + } + + return entity, nil +} + +func (s *Service) Create(ctx context.Context, shp *shipment.Shipment, userID pulid.ID) (*shipment.Shipment, error) { + log := s.l.With(). + Str("operation", "Create"). + Str("code", shp.ProNumber). + Logger() + + result, err := s.ps.HasAnyPermissions(ctx, + []*services.PermissionCheck{ + { + UserID: userID, + Resource: permission.ResourceShipment, + Action: permission.ActionCreate, + BusinessUnitID: shp.BusinessUnitID, + OrganizationID: shp.OrganizationID, + }, + }, + ) + if err != nil { + log.Error().Err(err).Msg("failed to check permissions") + return nil, err + } + + if !result.Allowed { + return nil, errors.NewAuthorizationError("You do not have permission to create a shipment") + } + + valCtx := &validator.ValidationContext{ + IsCreate: true, + IsUpdate: false, + } + + if err := s.v.Validate(ctx, valCtx, shp); err != nil { + return nil, err + } + + createdEntity, err := s.repo.Create(ctx, shp) + if err != nil { + return nil, err + } + + if err = s.ss.Index(ctx, createdEntity); err != nil { + log.Error().Err(err).Msg("failed to update search index") + } + + err = s.as.LogAction( + &services.LogActionParams{ + Resource: permission.ResourceShipment, + ResourceID: createdEntity.GetID(), + Action: permission.ActionCreate, + UserID: userID, + CurrentState: jsonutils.MustToJSON(createdEntity), + OrganizationID: createdEntity.OrganizationID, + BusinessUnitID: createdEntity.BusinessUnitID, + }, + audit.WithComment("Shipment created"), + ) + if err != nil { + log.Error().Err(err).Msg("failed to log shipment creation") + } + + return createdEntity, nil +} + +func (s *Service) Update(ctx context.Context, shp *shipment.Shipment, userID pulid.ID) (*shipment.Shipment, error) { + log := s.l.With(). + Str("operation", "Update"). + Str("code", shp.ProNumber). + Logger() + + result, err := s.ps.HasAnyPermissions(ctx, + []*services.PermissionCheck{ + { + UserID: userID, + Resource: permission.ResourceShipment, + Action: permission.ActionUpdate, + BusinessUnitID: shp.BusinessUnitID, + OrganizationID: shp.OrganizationID, + }, + }, + ) + if err != nil { + log.Error().Err(err).Msg("failed to check permissions") + return nil, err + } + + if !result.Allowed { + return nil, errors.NewAuthorizationError("You do not have permission to update this shipment") + } + + valCtx := &validator.ValidationContext{ + IsUpdate: true, + IsCreate: false, + } + + if err := s.v.Validate(ctx, valCtx, shp); err != nil { + return nil, err + } + + original, err := s.repo.GetByID(ctx, repositories.GetShipmentByIDOptions{ + ID: shp.ID, + OrgID: shp.OrganizationID, + BuID: shp.BusinessUnitID, + }) + if err != nil { + return nil, err + } + + updatedEntity, err := s.repo.Update(ctx, shp) + if err != nil { + log.Error().Err(err).Msg("failed to update shipment") + return nil, err + } + + if err = s.ss.Index(ctx, updatedEntity); err != nil { + log.Error(). + Err(err). + Interface("shipment", updatedEntity). + Msg("failed to update search index") + } + + // Log the update if the insert was successful + err = s.as.LogAction( + &services.LogActionParams{ + Resource: permission.ResourceShipment, + ResourceID: updatedEntity.GetID(), + Action: permission.ActionUpdate, + UserID: userID, + CurrentState: jsonutils.MustToJSON(updatedEntity), + PreviousState: jsonutils.MustToJSON(original), + OrganizationID: updatedEntity.OrganizationID, + BusinessUnitID: updatedEntity.BusinessUnitID, + }, + audit.WithComment("Shipment updated"), + audit.WithDiff(original, updatedEntity), + ) + if err != nil { + log.Error().Err(err).Msg("failed to log shipment update") + } + + return updatedEntity, nil +} diff --git a/internal/core/services/tracking/realtime.go b/internal/core/services/tracking/realtime.go deleted file mode 100644 index dc2fc9212..000000000 --- a/internal/core/services/tracking/realtime.go +++ /dev/null @@ -1 +0,0 @@ -package tracking diff --git a/internal/core/services/tracking/service.go b/internal/core/services/tracking/service.go deleted file mode 100644 index dc2fc9212..000000000 --- a/internal/core/services/tracking/service.go +++ /dev/null @@ -1 +0,0 @@ -package tracking diff --git a/internal/infrastructure/database/postgres/migrations/20241211015840_shipment.tx.up.sql b/internal/infrastructure/database/postgres/migrations/20241211015840_shipment.tx.up.sql index 94e5286e1..ed5339538 100644 --- a/internal/infrastructure/database/postgres/migrations/20241211015840_shipment.tx.up.sql +++ b/internal/infrastructure/database/postgres/migrations/20241211015840_shipment.tx.up.sql @@ -2,13 +2,14 @@ CREATE TYPE "shipment_status_enum" AS ENUM( 'New', -- Initial state when shipment is created 'InTransit', -- Shipment is currently being transported + 'Delayed', -- Shipment is currently delayed 'Completed', -- Shipment has been delivered successfully 'Billed', -- Shipment has been billed to the customer 'Canceled' -- Shipment has been Canceled ); CREATE TYPE "rating_method_enum" AS ENUM( - 'Flat', -- Fixed rate for entire shipment + 'FlatRate', -- Fixed rate for entire shipment 'PerMile', -- Rate calculated per mile traveled 'PerStop', -- Rate calculated per stop made 'PerPound', -- Rate calculated by weight @@ -23,22 +24,29 @@ CREATE TABLE IF NOT EXISTS "shipments"( "pro_number" varchar(100) NOT NULL, "organization_id" varchar(100) NOT NULL, "business_unit_id" varchar(100) NOT NULL, + -- Core Fields "status" shipment_status_enum NOT NULL DEFAULT 'New', "bol" varchar(100) NOT NULL, - "rating_method" rating_method_enum NOT NULL DEFAULT 'Flat', - "rating_unit" integer NOT NULL DEFAULT 1 CHECK ("rating_unit" > 0), - "freight_charge_amount" numeric(19, 4) NOT NULL DEFAULT 0 CHECK ("freight_charge_amount" >= 0), - "other_charge_amount" numeric(19, 4) NOT NULL DEFAULT 0 CHECK ("other_charge_amount" >= 0), - "total_charge_amount" numeric(19, 4) NOT NULL DEFAULT 0 CHECK ("total_charge_amount" >= 0), - "pieces" integer CHECK ("pieces" > 0), - "weight" integer CHECK ("weight" > 0), + -- Misc. Shipment Related Fields + "actual_ship_date" bigint, + "actual_delivery_date" bigint, "temperature_min" numeric(10, 2), "temperature_max" numeric(10, 2), + -- Billing Related Fields "bill_date" bigint, "ready_to_bill" boolean NOT NULL DEFAULT FALSE, "ready_to_bill_date" bigint, "sent_to_billing" boolean NOT NULL DEFAULT FALSE, + "rating_unit" integer NOT NULL DEFAULT 1 CHECK ("rating_unit" > 0), + "rating_method" rating_method_enum NOT NULL DEFAULT 'FlatRate', + "freight_charge_amount" numeric(19, 4) NOT NULL DEFAULT 0 CHECK ("freight_charge_amount" >= 0), + "other_charge_amount" numeric(19, 4) NOT NULL DEFAULT 0 CHECK ("other_charge_amount" >= 0), + "total_charge_amount" numeric(19, 4) NOT NULL DEFAULT 0 CHECK ("total_charge_amount" >= 0), + "pieces" integer CHECK ("pieces" > 0), + "weight" integer CHECK ("weight" > 0), "sent_to_billing_date" bigint, + "billed" boolean NOT NULL DEFAULT FALSE, + -- Metadata "version" bigint NOT NULL DEFAULT 0, "created_at" bigint NOT NULL DEFAULT EXTRACT(EPOCH FROM CURRENT_TIMESTAMP) ::bigint, "updated_at" bigint NOT NULL DEFAULT EXTRACT(EPOCH FROM CURRENT_TIMESTAMP) ::bigint, @@ -59,6 +67,5 @@ CREATE INDEX IF NOT EXISTS "idx_shipments_business_unit" ON "shipments"("busines CREATE INDEX IF NOT EXISTS "idx_shipments_billing_status" ON "shipments"("ready_to_bill", "sent_to_billing"); --- Add helpful comments COMMENT ON TABLE shipments IS 'Stores information about shipments and their billing status'; diff --git a/internal/infrastructure/database/postgres/migrations/20241211015858_shipment_move.tx.up.sql b/internal/infrastructure/database/postgres/migrations/20241211015858_shipment_move.tx.up.sql index 69c25eba2..47631da3b 100644 --- a/internal/infrastructure/database/postgres/migrations/20241211015858_shipment_move.tx.up.sql +++ b/internal/infrastructure/database/postgres/migrations/20241211015858_shipment_move.tx.up.sql @@ -3,7 +3,7 @@ CREATE TYPE "stop_status_enum" AS ENUM( 'New', -- Initial state when move is created 'InTransit', -- Move is currently being executed 'Completed', -- Move has been completed successfully - 'Cancelled' -- Move has been cancelled and won't be completed + 'Canceled' -- Move has been cancelled and won't be completed ); CREATE TABLE IF NOT EXISTS "shipment_moves"( @@ -16,10 +16,11 @@ CREATE TABLE IF NOT EXISTS "shipment_moves"( -- Core Fields "status" stop_status_enum NOT NULL DEFAULT 'New', "loaded" boolean NOT NULL DEFAULT TRUE, - "sequence_number" integer NOT NULL DEFAULT 0 CHECK ("sequence_number" >= 0), + "sequence" integer NOT NULL DEFAULT 0 CHECK ("sequence" >= 0), "distance" float, -- Metadata "version" bigint NOT NULL DEFAULT 0, + "created_at" bigint NOT NULL DEFAULT EXTRACT(EPOCH FROM CURRENT_TIMESTAMP) ::bigint, "updated_at" bigint NOT NULL DEFAULT EXTRACT(EPOCH FROM CURRENT_TIMESTAMP) ::bigint, -- Constraints @@ -37,7 +38,7 @@ CREATE INDEX IF NOT EXISTS "idx_shipment_moves_business_unit" ON "shipment_moves CREATE INDEX IF NOT EXISTS "idx_shipment_moves_shipment" ON "shipment_moves"("shipment_id", "organization_id"); -CREATE INDEX IF NOT EXISTS "idx_shipment_moves_sequence_number" ON "shipment_moves"("sequence_number"); +CREATE INDEX IF NOT EXISTS "idx_shipment_moves_sequence" ON "shipment_moves"("sequence"); COMMENT ON TABLE shipment_moves IS 'Stores information about individual moves within a shipment journey'; diff --git a/internal/infrastructure/database/postgres/migrations/20241211015927_stop.tx.up.sql b/internal/infrastructure/database/postgres/migrations/20241211015927_stop.tx.up.sql index b1e3efa93..1602e630d 100644 --- a/internal/infrastructure/database/postgres/migrations/20241211015927_stop.tx.up.sql +++ b/internal/infrastructure/database/postgres/migrations/20241211015927_stop.tx.up.sql @@ -2,7 +2,7 @@ CREATE TYPE stop_type_enum AS ENUM( 'Pickup', -- Regular pickup stop 'Delivery', -- Regular delivery stop - 'SplitDrop', -- Partial delivery of shipment + 'SplitDelivery', -- Partial delivery of shipment 'SplitPickup' -- Partial pickup of shipment ); @@ -16,22 +16,21 @@ CREATE TABLE IF NOT EXISTS stops( "status" stop_status_enum NOT NULL DEFAULT 'New', "type" stop_type_enum NOT NULL DEFAULT 'Pickup', "sequence" integer NOT NULL DEFAULT 0, - "pieces" integer CHECK ("pieces" > 0), - "weight" integer CHECK ("weight" > 0), - "scheduled_arrival_date" bigint NOT NULL, - "scheduled_departure_date" bigint NOT NULL, - "actual_arrival_date" bigint, - "actual_departure_date" bigint, - "address_line" varchar(100) NOT NULL, + "pieces" integer, + "weight" integer, + "planned_arrival" bigint NOT NULL, + "planned_departure" bigint NOT NULL, + "actual_arrival" bigint, + "actual_departure" bigint, + "address_line" varchar(255), + -- Metadata "version" bigint NOT NULL DEFAULT 0, "created_at" bigint NOT NULL DEFAULT EXTRACT(EPOCH FROM CURRENT_TIMESTAMP) ::bigint, "updated_at" bigint NOT NULL DEFAULT EXTRACT(EPOCH FROM CURRENT_TIMESTAMP) ::bigint, CONSTRAINT "pk_stops" PRIMARY KEY ("id", "organization_id", "business_unit_id"), CONSTRAINT "fk_stops_business_unit" FOREIGN KEY ("business_unit_id") REFERENCES "business_units"("id") ON UPDATE NO ACTION ON DELETE CASCADE, CONSTRAINT "fk_stops_organization" FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE NO ACTION ON DELETE CASCADE, - CONSTRAINT "fk_stops_shipment_move" FOREIGN KEY ("shipment_move_id", "organization_id", "business_unit_id") REFERENCES "shipment_moves"("id", "organization_id", "business_unit_id") ON UPDATE NO ACTION ON DELETE CASCADE, - CONSTRAINT "check_scheduled_dates" CHECK ("scheduled_departure_date" >= "scheduled_arrival_date"), - CONSTRAINT "check_actual_dates" CHECK ("actual_departure_date" >= "actual_arrival_date") + CONSTRAINT "fk_stops_shipment_move" FOREIGN KEY ("shipment_move_id", "organization_id", "business_unit_id") REFERENCES "shipment_moves"("id", "organization_id", "business_unit_id") ON UPDATE NO ACTION ON DELETE CASCADE ); CREATE INDEX IF NOT EXISTS "idx_stops_created_at" ON "stops"("created_at", "updated_at"); @@ -42,7 +41,7 @@ CREATE INDEX IF NOT EXISTS "idx_stops_type" ON "stops"("type"); CREATE INDEX IF NOT EXISTS "idx_stops_business_unit" ON "stops"("business_unit_id", "organization_id"); -CREATE INDEX IF NOT EXISTS "idx_stops_shipment_move" ON "stops"("shipment_move_id", "organization_id"); +CREATE INDEX IF NOT EXISTS "idx_stops_shipment_move" ON "stops"("shipment_move_id", "organization_id", "business_unit_id"); COMMENT ON TABLE stops IS 'Stores information about pickup and delivery stops for shipments'; diff --git a/internal/infrastructure/database/postgres/migrations/20241211020004_worker.tx.up.sql b/internal/infrastructure/database/postgres/migrations/20241211020004_worker.tx.up.sql index ed87805c3..4c5d55b66 100644 --- a/internal/infrastructure/database/postgres/migrations/20241211020004_worker.tx.up.sql +++ b/internal/infrastructure/database/postgres/migrations/20241211020004_worker.tx.up.sql @@ -134,7 +134,8 @@ ALTER TABLE "shipment_moves" ADD CONSTRAINT "fk_shipments_primary_worker" FOREIGN KEY ("primary_worker_id", "organization_id", "business_unit_id") REFERENCES "workers"("id", "organization_id", "business_unit_id") ON UPDATE NO ACTION ON DELETE SET NULL; ALTER TABLE "shipment_moves" - ADD COLUMN "secondary_worker_id" varchar(100) NOT NULL; + ADD COLUMN "secondary_worker_id" varchar(100); + ALTER TABLE "shipment_moves" ADD CONSTRAINT "fk_shipments_secondary_worker" FOREIGN KEY ("secondary_worker_id", "organization_id", "business_unit_id") REFERENCES "workers"("id", "organization_id", "business_unit_id") ON UPDATE NO ACTION ON DELETE SET NULL; diff --git a/internal/infrastructure/database/postgres/migrations/20250116025406_service_type.tx.up.sql b/internal/infrastructure/database/postgres/migrations/20250116025406_service_type.tx.up.sql index 038fecadd..7dc466c6f 100644 --- a/internal/infrastructure/database/postgres/migrations/20250116025406_service_type.tx.up.sql +++ b/internal/infrastructure/database/postgres/migrations/20250116025406_service_type.tx.up.sql @@ -32,7 +32,7 @@ COMMENT ON TABLE "service_types" IS 'Stores information about service types'; --bun:split ALTER TABLE "shipments" - ADD COLUMN "service_type_id" varchar(100); + ADD COLUMN "service_type_id" varchar(100) NOT NULL; ALTER TABLE "shipments" ADD CONSTRAINT "fk_shipments_service_type" FOREIGN KEY ("service_type_id", "business_unit_id", "organization_id") REFERENCES "service_types"("id", "business_unit_id", "organization_id") ON UPDATE NO ACTION ON DELETE SET NULL; diff --git a/internal/infrastructure/database/postgres/migrations/20250122212814_location.tx.down.sql b/internal/infrastructure/database/postgres/migrations/20250122212814_location.tx.down.sql index 9fb708b77..4e1487625 100644 --- a/internal/infrastructure/database/postgres/migrations/20250122212814_location.tx.down.sql +++ b/internal/infrastructure/database/postgres/migrations/20250122212814_location.tx.down.sql @@ -1,3 +1 @@ -SET statement_timeout = 0; - -SELECT 1; +DROP TABLE IF EXISTS "locations"; \ No newline at end of file diff --git a/internal/infrastructure/database/postgres/migrations/20250122212814_location.tx.up.sql b/internal/infrastructure/database/postgres/migrations/20250122212814_location.tx.up.sql index b627365af..155443203 100644 --- a/internal/infrastructure/database/postgres/migrations/20250122212814_location.tx.up.sql +++ b/internal/infrastructure/database/postgres/migrations/20250122212814_location.tx.up.sql @@ -44,3 +44,10 @@ CREATE INDEX "idx_locations_created_updated" ON "locations"("created_at", "updat COMMENT ON TABLE "locations" IS 'Stores information about locations'; +--bun:split +ALTER TABLE "stops" + ADD COLUMN "location_id" varchar(100) NOT NULL; + +ALTER TABLE "stops" + ADD CONSTRAINT "fk_stops_location" FOREIGN KEY ("location_id", "business_unit_id", "organization_id") REFERENCES "locations"("id", "business_unit_id", "organization_id") ON UPDATE NO ACTION ON DELETE SET NULL; + diff --git a/internal/infrastructure/database/postgres/migrations/20250125144620_tractor.tx.up.sql b/internal/infrastructure/database/postgres/migrations/20250125144620_tractor.tx.up.sql index 9a89c6421..f57cf1404 100644 --- a/internal/infrastructure/database/postgres/migrations/20250125144620_tractor.tx.up.sql +++ b/internal/infrastructure/database/postgres/migrations/20250125144620_tractor.tx.up.sql @@ -60,3 +60,10 @@ CREATE INDEX "idx_tractors_created_updated" ON "tractors"("created_at", "updated COMMENT ON TABLE "tractors" IS 'Stores information about tractors'; +--bun:split +ALTER TABLE "shipment_moves" + ADD COLUMN "tractor_id" varchar(100) NOT NULL; + +ALTER TABLE "shipment_moves" + ADD CONSTRAINT "fk_shipment_moves_tractor" FOREIGN KEY ("tractor_id", "business_unit_id", "organization_id") REFERENCES "tractors"("id", "business_unit_id", "organization_id") ON UPDATE NO ACTION ON DELETE SET NULL; + diff --git a/internal/infrastructure/database/postgres/migrations/20250126210710_trailer.tx.up.sql b/internal/infrastructure/database/postgres/migrations/20250126210710_trailer.tx.up.sql index dcbf2000b..b4fef982a 100644 --- a/internal/infrastructure/database/postgres/migrations/20250126210710_trailer.tx.up.sql +++ b/internal/infrastructure/database/postgres/migrations/20250126210710_trailer.tx.up.sql @@ -47,3 +47,10 @@ CREATE INDEX "idx_trailers_created_updated" ON "trailers"("created_at", "updated COMMENT ON TABLE "trailers" IS 'Stores information about trailers'; +--bun:split +ALTER TABLE "shipment_moves" + ADD COLUMN "trailer_id" varchar(100) NOT NULL; + +ALTER TABLE "shipment_moves" + ADD CONSTRAINT "fk_shipment_moves_trailer" FOREIGN KEY ("trailer_id", "business_unit_id", "organization_id") REFERENCES "trailers"("id", "business_unit_id", "organization_id") ON UPDATE NO ACTION ON DELETE SET NULL; + diff --git a/internal/infrastructure/database/postgres/migrations/20250201004605_pcmiler.tx.down.sql b/internal/infrastructure/database/postgres/migrations/20250201004605_pcmiler.tx.down.sql new file mode 100644 index 000000000..9fb708b77 --- /dev/null +++ b/internal/infrastructure/database/postgres/migrations/20250201004605_pcmiler.tx.down.sql @@ -0,0 +1,3 @@ +SET statement_timeout = 0; + +SELECT 1; diff --git a/internal/infrastructure/database/postgres/migrations/20250201004605_pcmiler.tx.up.sql b/internal/infrastructure/database/postgres/migrations/20250201004605_pcmiler.tx.up.sql new file mode 100644 index 000000000..ee1a216fe --- /dev/null +++ b/internal/infrastructure/database/postgres/migrations/20250201004605_pcmiler.tx.up.sql @@ -0,0 +1,20 @@ +CREATE TABLE IF NOT EXISTS "pcmiler_configurations"( + -- Primary identifiers + "id" varchar(100) NOT NULL, + "business_unit_id" varchar(100) NOT NULL, + "organization_id" varchar(100) NOT NULL, + -- Core fields + "api_key" varchar(255) NOT NULL, + -- Metadata + "version" bigint NOT NULL DEFAULT 0, + "created_at" bigint NOT NULL DEFAULT EXTRACT(EPOCH FROM current_timestamp) ::bigint, + "updated_at" bigint NOT NULL DEFAULT EXTRACT(EPOCH FROM current_timestamp) ::bigint, + -- Constraints + CONSTRAINT "pk_pcmiler_configurations" PRIMARY KEY ("id", "business_unit_id", "organization_id"), + CONSTRAINT "fk_pcmiler_configurations_business_unit" FOREIGN KEY ("business_unit_id") REFERENCES "business_units"("id") ON UPDATE NO ACTION ON DELETE CASCADE, + CONSTRAINT "fk_pcmiler_configurations_organization" FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE NO ACTION ON DELETE CASCADE +); + +--bun:split +-- Ensure the organization has one PCMiler configuration +CREATE UNIQUE INDEX "idx_pcmiler_configurations_organization" ON "pcmiler_configurations"("organization_id"); diff --git a/internal/infrastructure/database/postgres/repositories/hazmatexpiration.go b/internal/infrastructure/database/postgres/repositories/hazmatexpiration.go index 125bcaebb..40eabc7ba 100644 --- a/internal/infrastructure/database/postgres/repositories/hazmatexpiration.go +++ b/internal/infrastructure/database/postgres/repositories/hazmatexpiration.go @@ -43,8 +43,6 @@ func (r *hazmatExpirationRepository) GetHazmatExpirationByStateID(ctx context.Co return nil, eris.Wrap(err, "get database connection") } - r.logger.Debug().Str("state_id", stateID.String()).Msg("getting hazmat expiration") - expiration := new(compliance.HazmatExpiration) err = dba.NewSelect().Model(expiration). Where("state_id = ?", stateID). @@ -53,7 +51,5 @@ func (r *hazmatExpirationRepository) GetHazmatExpirationByStateID(ctx context.Co return nil, eris.Wrap(err, "get hazmat expiration by state id") } - r.logger.Debug().Interface("expiration", expiration).Msg("hazmat expiration") - return expiration, nil } diff --git a/internal/infrastructure/database/postgres/repositories/location_test.go b/internal/infrastructure/database/postgres/repositories/location_test.go index b1d8de463..71ef976f6 100644 --- a/internal/infrastructure/database/postgres/repositories/location_test.go +++ b/internal/infrastructure/database/postgres/repositories/location_test.go @@ -53,7 +53,7 @@ func TestLocationRepository(t *testing.T) { OrgID: org.ID, BuID: bu.ID, }, - Query: "Test Location", + Query: "Ralphs", }, } diff --git a/internal/infrastructure/database/postgres/repositories/locationindex.go b/internal/infrastructure/database/postgres/repositories/locationindex.go new file mode 100644 index 000000000..d17a906a5 --- /dev/null +++ b/internal/infrastructure/database/postgres/repositories/locationindex.go @@ -0,0 +1,14 @@ +package repositories + +import ( + "github.com/emoss08/trenova/internal/core/ports/db" + "github.com/emoss08/trenova/internal/pkg/logger" + "go.uber.org/fx" +) + +type LocationIndexRepositoryParams struct { + fx.In + + DB db.Connection + Logger *logger.Logger +} diff --git a/internal/infrastructure/database/postgres/repositories/module.go b/internal/infrastructure/database/postgres/repositories/module.go index 7fc462746..209853113 100644 --- a/internal/infrastructure/database/postgres/repositories/module.go +++ b/internal/infrastructure/database/postgres/repositories/module.go @@ -24,4 +24,6 @@ var Module = fx.Module("postgres-repositories", fx.Provide( NewTractorRepository, NewTrailerRepository, NewCustomerRepository, + NewShipmentRepository, + NewPCMilerConfigurationRepository, )) diff --git a/internal/infrastructure/database/postgres/repositories/pcmilerconfiguration.go b/internal/infrastructure/database/postgres/repositories/pcmilerconfiguration.go new file mode 100644 index 000000000..587d944ea --- /dev/null +++ b/internal/infrastructure/database/postgres/repositories/pcmilerconfiguration.go @@ -0,0 +1,52 @@ +package repositories + +import ( + "context" + + "github.com/emoss08/trenova/internal/core/domain/pcmilerconfiguration" + "github.com/emoss08/trenova/internal/core/ports/db" + "github.com/emoss08/trenova/internal/core/ports/repositories" + "github.com/emoss08/trenova/internal/pkg/logger" + "github.com/rotisserie/eris" + "github.com/rs/zerolog" + "go.uber.org/fx" +) + +type PCMilerConfigurationRepositoryParams struct { + fx.In + + DB db.Connection + Logger *logger.Logger +} + +type pcmilerConfigurationRepository struct { + db db.Connection + logger *zerolog.Logger +} + +func NewPCMilerConfigurationRepository(p PCMilerConfigurationRepositoryParams) repositories.PCMilerConfigurationRepository { + log := p.Logger.With().Str("repository", "pcmiler").Logger() + + return &pcmilerConfigurationRepository{ + db: p.DB, + logger: &log, + } +} + +func (r *pcmilerConfigurationRepository) GetPCMilerConfiguration(ctx context.Context, opts repositories.GetPCMilerConfigurationOptions) (*pcmilerconfiguration.PCMilerConfiguration, error) { + dba, err := r.db.DB(ctx) + if err != nil { + return nil, eris.Wrap(err, "get database connection") + } + + config := new(pcmilerconfiguration.PCMilerConfiguration) + err = dba.NewSelect().Model(config). + Where("pcm.organization_id = ?", opts.OrgID). + Where("pcm.business_unit_id = ?", opts.BuID). + Scan(ctx) + if err != nil { + return nil, err + } + + return config, nil +} diff --git a/internal/infrastructure/database/postgres/repositories/shipment.go b/internal/infrastructure/database/postgres/repositories/shipment.go new file mode 100644 index 000000000..e1e7f2f9f --- /dev/null +++ b/internal/infrastructure/database/postgres/repositories/shipment.go @@ -0,0 +1,224 @@ +package repositories + +import ( + "context" + "database/sql" + "fmt" + + "github.com/emoss08/trenova/internal/core/domain/shipment" + "github.com/emoss08/trenova/internal/core/ports" + "github.com/emoss08/trenova/internal/core/ports/db" + "github.com/emoss08/trenova/internal/core/ports/repositories" + "github.com/emoss08/trenova/internal/pkg/errors" + "github.com/emoss08/trenova/internal/pkg/logger" + "github.com/emoss08/trenova/internal/pkg/utils/queryutils/queryfilters" + "github.com/rotisserie/eris" + "github.com/rs/zerolog" + "github.com/uptrace/bun" + "go.uber.org/fx" +) + +type ShipmentRepositoryParams struct { + fx.In + + DB db.Connection + Logger *logger.Logger +} + +type shipmentRepository struct { + db db.Connection + l *zerolog.Logger +} + +func NewShipmentRepository(p ShipmentRepositoryParams) repositories.ShipmentRepository { + log := p.Logger.With(). + Str("repository", "shipment"). + Logger() + + return &shipmentRepository{ + db: p.DB, + l: &log, + } +} + +func (sr *shipmentRepository) addOptions(q *bun.SelectQuery, opts repositories.ShipmentOptions) *bun.SelectQuery { + if opts.ExpandShipmentDetails { + q = q.Relation("Customer") + q = q.Relation("Moves") + + q = q.RelationWithOpts("Moves.Stops", bun.RelationOpts{ + Apply: func(sq *bun.SelectQuery) *bun.SelectQuery { + return sq.Relation("Location").Relation("Location.State") + }, + }) + + q = q.Relation("ServiceType") + q = q.Relation("Commodities") + } + + return q +} + +func (sr *shipmentRepository) filterQuery(q *bun.SelectQuery, opts *repositories.ListShipmentOptions) *bun.SelectQuery { + q = queryfilters.TenantFilterQuery(&queryfilters.TenantFilterQueryOptions{ + Query: q, + TableAlias: "sp", + Filter: opts.Filter, + }) + + if opts.Filter.Query != "" { + q = q.Where("sp.pro_number ILIKE ?", "%"+opts.Filter.Query+"%") + } + + q = sr.addOptions(q, opts.ShipmentOptions) + + return q.Limit(opts.Filter.Limit).Offset(opts.Filter.Offset) +} + +func (sr *shipmentRepository) List(ctx context.Context, opts *repositories.ListShipmentOptions) (*ports.ListResult[*shipment.Shipment], error) { + dba, err := sr.db.DB(ctx) + if err != nil { + return nil, eris.Wrap(err, "get database connection") + } + + log := sr.l.With(). + Str("operation", "List"). + Str("buID", opts.Filter.TenantOpts.BuID.String()). + Str("userID", opts.Filter.TenantOpts.UserID.String()). + Logger() + + entities := make([]*shipment.Shipment, 0) + + q := dba.NewSelect().Model(&entities) + q = sr.filterQuery(q, opts) + + total, err := q.ScanAndCount(ctx) + if err != nil { + log.Error().Err(err).Msg("failed to scan shipments") + return nil, err + } + + return &ports.ListResult[*shipment.Shipment]{ + Items: entities, + Total: total, + }, nil +} + +func (sr *shipmentRepository) GetByID(ctx context.Context, opts repositories.GetShipmentByIDOptions) (*shipment.Shipment, error) { + dba, err := sr.db.DB(ctx) + if err != nil { + return nil, eris.Wrap(err, "get database connection") + } + + log := sr.l.With(). + Str("operation", "GetByID"). + Str("shipmentID", opts.ID.String()). + Logger() + + entity := new(shipment.Shipment) + + q := dba.NewSelect().Model(entity). + Where("sp.id = ? AND sp.organization_id = ? AND sp.business_unit_id = ?", opts.ID, opts.OrgID, opts.BuID) + + q = sr.addOptions(q, opts.ShipmentOptions) + + if err = q.Scan(ctx); err != nil { + if eris.Is(err, sql.ErrNoRows) { + return nil, errors.NewNotFoundError("Shipment not found within your organization") + } + + log.Error().Err(err).Msg("failed to get shipment") + return nil, err + } + + return entity, nil +} + +func (sr *shipmentRepository) Create(ctx context.Context, shp *shipment.Shipment) (*shipment.Shipment, error) { + dba, err := sr.db.DB(ctx) + if err != nil { + return nil, eris.Wrap(err, "get database connection") + } + + log := sr.l.With(). + Str("operation", "Create"). + Str("orgID", shp.OrganizationID.String()). + Str("buID", shp.BusinessUnitID.String()). + Logger() + + err = dba.RunInTx(ctx, nil, func(c context.Context, tx bun.Tx) error { + if _, iErr := tx.NewInsert().Model(shp).Exec(c); iErr != nil { + log.Error(). + Err(iErr). + Interface("shipment", shp). + Msg("failed to insert shipment") + return err + } + + return nil + }) + if err != nil { + log.Error().Err(err).Msg("failed to create shipment") + return nil, err + } + + return shp, nil +} + +func (sr *shipmentRepository) Update(ctx context.Context, shp *shipment.Shipment) (*shipment.Shipment, error) { + dba, err := sr.db.DB(ctx) + if err != nil { + return nil, eris.Wrap(err, "get database connection") + } + + log := sr.l.With(). + Str("operation", "Update"). + Str("id", shp.GetID()). + Int64("version", shp.Version). + Logger() + + err = dba.RunInTx(ctx, nil, func(c context.Context, tx bun.Tx) error { + ov := shp.Version + + shp.Version++ + + results, rErr := tx.NewUpdate(). + Model(shp). + WherePK(). + Where("sp.version = ?", ov). + Returning("*"). + Exec(c) + if rErr != nil { + log.Error(). + Err(rErr). + Interface("shipment", shp). + Msg("failed to update shipment") + return err + } + + rows, roErr := results.RowsAffected() + if roErr != nil { + log.Error(). + Err(roErr). + Interface("shipment", shp). + Msg("failed to get rows affected") + return err + } + + if rows == 0 { + return errors.NewValidationError( + "version", + errors.ErrVersionMismatch, + fmt.Sprintf("Version mismatch. The Shipment (%s) has either been updated or deleted since the last request.", shp.GetID()), + ) + } + + return nil + }) + if err != nil { + log.Error().Err(err).Msg("failed to update shipment") + return nil, err + } + + return shp, nil +} diff --git a/internal/infrastructure/database/postgres/repositories/tractor.go b/internal/infrastructure/database/postgres/repositories/tractor.go index 946cf927f..4cef3a7ed 100644 --- a/internal/infrastructure/database/postgres/repositories/tractor.go +++ b/internal/infrastructure/database/postgres/repositories/tractor.go @@ -55,7 +55,10 @@ func (tr *tractorRepository) filterQuery(q *bun.SelectQuery, opts *repositories. if opts.IncludeWorkerDetails { q = q.Relation("PrimaryWorker").Relation("PrimaryWorker.Profile") q = q.Relation("SecondaryWorker").Relation("SecondaryWorker.Profile") + } + if opts.IncludeFleetDetails { + q = q.Relation("FleetCode") } if opts.Filter.Query != "" { @@ -121,6 +124,10 @@ func (tr *tractorRepository) GetByID(ctx context.Context, opts repositories.GetT }) } + if opts.IncludeFleetDetails { + query = query.Relation("FleetCode") + } + // Include the equipment details if requested if opts.IncludeEquipmentDetails { query = query.Relation("EquipmentType").Relation("EquipmentManufacturer") diff --git a/internal/infrastructure/database/postgres/repositories/tractor_test.go b/internal/infrastructure/database/postgres/repositories/tractor_test.go index 7ff262b69..80d3b30a3 100644 --- a/internal/infrastructure/database/postgres/repositories/tractor_test.go +++ b/internal/infrastructure/database/postgres/repositories/tractor_test.go @@ -105,6 +105,26 @@ func TestTractorRepository(t *testing.T) { require.NotEmpty(t, result.Items[0].EquipmentManufacturer) }) + t.Run("list tractors with fleet details", func(t *testing.T) { + opts := &repoports.ListTractorOptions{ + IncludeFleetDetails: true, + Filter: &ports.LimitOffsetQueryOptions{ + Limit: 10, + Offset: 0, + TenantOpts: &ports.TenantOptions{ + OrgID: org.ID, + BuID: bu.ID, + }, + }, + } + + result, err := repo.List(ctx, opts) + require.NoError(t, err) + require.NotNil(t, result) + require.NotEmpty(t, result.Items) + require.NotEmpty(t, result.Items[0].FleetCode) + }) + t.Run("get tractor by id", func(t *testing.T) { testutils.TestRepoGetByID(ctx, t, repo, repoports.GetTractorByIDOptions{ ID: trk.ID, @@ -152,6 +172,19 @@ func TestTractorRepository(t *testing.T) { require.NotEmpty(t, entity.EquipmentType) }) + t.Run("get tractor by id with fleet details", func(t *testing.T) { + entity, err := repo.GetByID(ctx, repoports.GetTractorByIDOptions{ + ID: trk.ID, + OrgID: org.ID, + BuID: bu.ID, + IncludeFleetDetails: true, + }) + + require.NoError(t, err) + require.NotNil(t, entity) + require.NotEmpty(t, entity.FleetCode) + }) + t.Run("get tractor by id failure", func(t *testing.T) { result, err := repo.GetByID(ctx, repoports.GetTractorByIDOptions{ ID: "invalid-id", diff --git a/internal/infrastructure/database/postgres/repositories/trailer.go b/internal/infrastructure/database/postgres/repositories/trailer.go index b6ee55186..da817c603 100644 --- a/internal/infrastructure/database/postgres/repositories/trailer.go +++ b/internal/infrastructure/database/postgres/repositories/trailer.go @@ -52,6 +52,10 @@ func (tr *trailerRepository) filterQuery(q *bun.SelectQuery, opts *repositories. q = q.Relation("EquipmentType").Relation("EquipmentManufacturer") } + if opts.IncludeFleetDetails { + q = q.Relation("FleetCode") + } + if opts.Filter.Query != "" { q = q.Where("tr.code ILIKE ?", "%"+opts.Filter.Query+"%") } @@ -108,6 +112,10 @@ func (tr *trailerRepository) GetByID(ctx context.Context, opts repositories.GetT query = query.Relation("EquipmentType").Relation("EquipmentManufacturer") } + if opts.IncludeFleetDetails { + query = query.Relation("FleetCode") + } + if err = query.Scan(ctx); err != nil { if eris.Is(err, sql.ErrNoRows) { return nil, errors.NewNotFoundError("Trailer not found within your organization") diff --git a/internal/infrastructure/database/postgres/repositories/trailer_test.go b/internal/infrastructure/database/postgres/repositories/trailer_test.go index 9365b371c..90d052f87 100644 --- a/internal/infrastructure/database/postgres/repositories/trailer_test.go +++ b/internal/infrastructure/database/postgres/repositories/trailer_test.go @@ -81,6 +81,26 @@ func TestTrailerRepository(t *testing.T) { require.NotEmpty(t, result.Items[0].EquipmentManufacturer) }) + t.Run("list trailers with fleet details", func(t *testing.T) { + opts := &repoports.ListTrailerOptions{ + IncludeFleetDetails: true, + Filter: &ports.LimitOffsetQueryOptions{ + Limit: 10, + Offset: 0, + TenantOpts: &ports.TenantOptions{ + OrgID: org.ID, + BuID: bu.ID, + }, + }, + } + + result, err := repo.List(ctx, opts) + require.NoError(t, err) + require.NotNil(t, result) + require.NotEmpty(t, result.Items) + require.NotEmpty(t, result.Items[0].FleetCode) + }) + t.Run("get trailer by id", func(t *testing.T) { testutils.TestRepoGetByID(ctx, t, repo, repoports.GetTrailerByIDOptions{ ID: trail.ID, @@ -114,6 +134,19 @@ func TestTrailerRepository(t *testing.T) { require.NotEmpty(t, entity.EquipmentManufacturer) }) + t.Run("get trailer by id with fleet details", func(t *testing.T) { + entity, err := repo.GetByID(ctx, repoports.GetTrailerByIDOptions{ + ID: trail.ID, + OrgID: org.ID, + BuID: bu.ID, + IncludeFleetDetails: true, + }) + + require.NoError(t, err) + require.NotNil(t, entity) + require.NotEmpty(t, entity.FleetCode) + }) + t.Run("get trailer by id failure", func(t *testing.T) { result, err := repo.GetByID(ctx, repoports.GetTrailerByIDOptions{ ID: "invalid-id", diff --git a/internal/infrastructure/database/postgres/repositories/usstate.go b/internal/infrastructure/database/postgres/repositories/usstate.go index d8014ec21..da250ecf6 100644 --- a/internal/infrastructure/database/postgres/repositories/usstate.go +++ b/internal/infrastructure/database/postgres/repositories/usstate.go @@ -10,6 +10,7 @@ import ( "github.com/emoss08/trenova/internal/pkg/logger" "github.com/rotisserie/eris" "github.com/rs/zerolog" + "github.com/uptrace/bun" "go.uber.org/fx" ) @@ -76,3 +77,50 @@ func (r *usStateRepository) List(ctx context.Context) (*ports.ListResult[*usstat Total: count, }, nil } + +func (r *usStateRepository) GetByAbbreviation(ctx context.Context, abbreviation string) (*usstate.UsState, error) { + dba, err := r.db.DB(ctx) + if err != nil { + return nil, eris.Wrap(err, "get database connection") + } + + log := r.l.With(). + Str("operation", "GetByAbbreviation"). + Str("abbreviation", abbreviation). + Logger() + + state := new(usstate.UsState) + + if err = dba.NewSelect(). + Model(state). + Where("abbreviation = ?", abbreviation). + Scan(ctx); err != nil { + log.Error().Err(err).Msg("failed to get us state by abbreviation") + return nil, eris.Wrap(err, "failed to get us state by abbreviation") + } + + return state, nil +} + +func (r *usStateRepository) GetBulkStatesByAbbreviation(ctx context.Context, abbreviations []string) ([]*usstate.UsState, error) { + dba, err := r.db.DB(ctx) + if err != nil { + return nil, eris.Wrap(err, "get database connection") + } + + log := r.l.With(). + Str("operation", "GetBulkStatesByAbbreviation"). + Logger() + + states := make([]*usstate.UsState, 0) + + if err = dba.NewSelect(). + Model(&states). + Where("abbreviation IN (?)", bun.In(abbreviations)). + Scan(ctx); err != nil { + log.Error().Err(err).Msg("failed to get us states by abbreviation") + return nil, eris.Wrap(err, "failed to get us states by abbreviation") + } + + return states, nil +} diff --git a/internal/infrastructure/external/maps/bing.go b/internal/infrastructure/external/maps/bing.go deleted file mode 100644 index 3470f7b5c..000000000 --- a/internal/infrastructure/external/maps/bing.go +++ /dev/null @@ -1 +0,0 @@ -package maps diff --git a/internal/infrastructure/external/maps/google.go b/internal/infrastructure/external/maps/google.go deleted file mode 100644 index 3470f7b5c..000000000 --- a/internal/infrastructure/external/maps/google.go +++ /dev/null @@ -1 +0,0 @@ -package maps diff --git a/internal/infrastructure/external/maps/here.go b/internal/infrastructure/external/maps/here.go deleted file mode 100644 index 3470f7b5c..000000000 --- a/internal/infrastructure/external/maps/here.go +++ /dev/null @@ -1 +0,0 @@ -package maps diff --git a/internal/infrastructure/external/maps/mapbox.go b/internal/infrastructure/external/maps/mapbox.go deleted file mode 100644 index 3470f7b5c..000000000 --- a/internal/infrastructure/external/maps/mapbox.go +++ /dev/null @@ -1 +0,0 @@ -package maps diff --git a/internal/infrastructure/external/maps/pcmiler.go b/internal/infrastructure/external/maps/pcmiler.go deleted file mode 100644 index 3470f7b5c..000000000 --- a/internal/infrastructure/external/maps/pcmiler.go +++ /dev/null @@ -1 +0,0 @@ -package maps diff --git a/internal/infrastructure/external/maps/pcmiler/client.go b/internal/infrastructure/external/maps/pcmiler/client.go new file mode 100644 index 000000000..e27898001 --- /dev/null +++ b/internal/infrastructure/external/maps/pcmiler/client.go @@ -0,0 +1,98 @@ +package pcmiler + +import ( + "context" + "fmt" + "time" + + "github.com/emoss08/trenova/internal/pkg/logger" + "github.com/google/go-querystring/query" + "github.com/imroc/req/v3" + "github.com/rs/zerolog" + "go.uber.org/fx" +) + +type Client interface { + SingleSearch(ctx context.Context, req *SingleSearchParams) (*LocationResponse, error) +} + +type ClientParams struct { + fx.In + + Logger *logger.Logger +} + +type client struct { + l *zerolog.Logger + rc *req.Client +} + +func NewClient(p ClientParams) Client { + log := p.Logger.With().Str("client", "pcmiler").Logger() + + reqClient := req.C(). + SetTimeout(10 * time.Second). + EnableDumpEachRequest(). + EnableCompression() + + c := &client{ + l: &log, + rc: reqClient, + } + + return c +} + +func (c *client) SingleSearch(ctx context.Context, params *SingleSearchParams) (*LocationResponse, error) { + v, err := query.Values(params) + if err != nil { + c.l.Error().Err(err).Msg("failed to parse single search params") + return nil, err + } + + url := fmt.Sprintf("%s?%s", SingleSearchURL, v.Encode()) + c.l.Trace().Msgf("Making single search request to %s", url) + + var locationResp LocationResponse + resp, err := c.rc.R(). + SetContext(ctx). + SetSuccessResult(&locationResp). + Get(url) + if err != nil { + c.l.Error().Err(err).Msg("failed to make single search request") + return nil, err + } + + if resp.IsErrorState() { + c.l.Error().Interface("error", resp.Err).Msg("single search request failed") + return nil, resp.Err + } + + return &locationResp, nil +} + +type RouteReportParams struct { + AuthToken string `url:"authToken"` + + // Comma separated list of lat/long pairs separated by semi colons + // Example: -76.123456,42.123456;-76.123126,42.123126 + Stops string `url:"stops"` + + // The asset ID to use for the route report + AssetID string `url:"assetId"` + + // The place ID to use for the route report + PlaceID string `url:"placeId"` + + // The region to use for the route report + // Default is 4 (North America) + Region Region `url:"region"` + + // Comma separated list of reports to generate + // Example: "Mileage,Detail,CalcMiles,Directions,Geotunnel,LeastCost,Road,State,WeatherAlerts" + Reports string `url:"reports"` +} + +// func (c *client) RouteReport(ctx context.Context, params *RouteReportParams) (interface{}, error) { + +// } diff --git a/internal/infrastructure/external/maps/pcmiler/errors.go b/internal/infrastructure/external/maps/pcmiler/errors.go new file mode 100644 index 000000000..5761b9390 --- /dev/null +++ b/internal/infrastructure/external/maps/pcmiler/errors.go @@ -0,0 +1,46 @@ +package pcmiler + +type ErrorCodes int + +const ( + ErrorCodeOK = ErrorCodes(0) // OK + ErrorCodeNoGlobals = ErrorCodes(1) // NoGlobals + ErrorCodeGeoGlobals = ErrorCodes(2) // GeoGlobals + ErrorCodeGridGlobals = ErrorCodes(3) // GridGlobals + ErrorCodePOIGlobals = ErrorCodes(4) // POIGlobals + ErrorCodeInvalidID = ErrorCodes(5) // InvalidID + ErrorCodeNotImplemented = ErrorCodes(6) // NotImplemented + ErrorCodeNoQuery = ErrorCodes(7) // NoQuery + ErrorCodeNoQueryAfterFormatting = ErrorCodes(8) // NoQueryAfterFormatting + ErrorCodeNoAllowedInterps = ErrorCodes(9) // NoAllowedInterps + ErrorCodeInvalidNumResultsRequested = ErrorCodes(10) // InvalidNumResultsRequested + ErrorCodeNoDataLoaded = ErrorCodes(11) // NoDataLoaded + ErrorCodeDataLoad = ErrorCodes(12) // DataLoad + ErrorCodeBadData = ErrorCodes(13) // BadData + ErrorCodeFileMissing = ErrorCodes(14) // FileMissing + ErrorCodeFolderIndexOOB = ErrorCodes(15) // FolderIndexOOB + ErrorCodeCityFileClientOOB = ErrorCodes(16) // CityFileClientOOB + ErrorCodeOOB = ErrorCodes(17) // OOB + ErrorCodeFileIO = ErrorCodes(18) // FileIO + ErrorCodeMemory = ErrorCodes(19) // Memory + ErrorCodeNoPreviousSearch = ErrorCodes(20) // NoPreviousSearch + ErrorCodeThreadStart = ErrorCodes(21) // ThreadStart + ErrorCodeThreadEnqueue = ErrorCodes(22) // ThreadEnqueue + ErrorCodeObjectNotInitialized = ErrorCodes(23) // ObjectNotInitialized + ErrorCodePOIData = ErrorCodes(24) // POIData + ErrorCodeInternal = ErrorCodes(25) // Internal + ErrorCodeQueryIsNotUTF8 = ErrorCodes(26) // QueryIsNotUTF8 + ErrorCodeInvalidQuery = ErrorCodes(27) // InvalidQuery + ErrorCodeInvalidInterpRankingSettings = ErrorCodes(28) // InvalidInterpRankingSettings + ErrorCodeInvalidParameters = ErrorCodes(29) // InvalidParameters + ErrorCodeSynonymVersionMismatch = ErrorCodes(30) // SynonymVersionMismatch + ErrorCodeSynonymAmbiguityMismatch = ErrorCodes(31) // SynonymAmbiguityMismatch + ErrorCodeFrequenciesDisabled = ErrorCodes(32) // FrequenciesDisabled + ErrorCodeTimeOut = ErrorCodes(33) // TimeOut + ErrorCodeInvalidInputCountry = ErrorCodes(34) // InvalidInputCountry + ErrorCodeInvalidInputState = ErrorCodes(35) // InvalidInputState + ErrorCodeStateIsNotPartOfCountry = ErrorCodes(36) // StateIsNotPartOfCountry + ErrorCodeIndexVersionMismatch = ErrorCodes(37) // IndexVersionMismatch + ErrorCodeInvalidLatLonForRegion = ErrorCodes(38) // InvalidLatLonForRegion + ErrorCodeUnknown = ErrorCodes(39) // Unknown +) diff --git a/internal/infrastructure/external/maps/pcmiler/types.go b/internal/infrastructure/external/maps/pcmiler/types.go new file mode 100644 index 000000000..3be992b11 --- /dev/null +++ b/internal/infrastructure/external/maps/pcmiler/types.go @@ -0,0 +1,168 @@ +package pcmiler + +var ( + // SingleSearchURL is the URL for the single search API + SingleSearchURL = "https://singlesearch.alk.com/NA/api/search" + + // RouteReportURL is the URL for the route report API + RouteReportURL = "https://pcmiler.alk.com/apis/rest/v1.0/Service.svc/route/routeReports" +) + +// Address struct represents address details +type Address struct { + StreetAddress string `json:"streetAddress"` + LocalArea string `json:"localArea"` + City string `json:"city"` + State string `json:"state"` + StateName string `json:"stateName"` + Zip string `json:"zip"` + County string `json:"county"` + Country string `json:"country"` + CountryFullName string `json:"countryFullName"` + SPLC *string `json:"splc"` +} + +// Coords struct represents latitude and longitude +type Coords struct { + Lat string `json:"lat"` + Lon string `json:"lon"` +} + +// Region indicates the region of the location. +type Region int + +const ( + RegionUnknown = Region(0) // Unknown + RegionAF = Region(1) // Africa + RegionAS = Region(2) // Asia + RegionEU = Region(3) // Europe + RegionNA = Region(4) // North America (Default) + RegionOC = Region(5) // Oceania + RegionSA = Region(6) // South America + RegionME = Region(7) // Middle East + RegionDeprecated = Region(8) // Deprecated + RegionMX = Region(9) // Mexico +) + +// ResultType indicates the type of match with the search string. +type ResultType int + +const ( + ResultTypeCountry = ResultType(0) // Country + ResultTypeState = ResultType(1) // State + ResultTypeCounty = ResultType(2) // County + ResultTypeCity = ResultType(3) // City + ResultTypeZip = ResultType(4) // Zip + ResultTypeSPLC = ResultType(5) // SPLC + ResultTypeStreet = ResultType(6) // Street + ResultTypeRouteNumber = ResultType(7) // RouteNumber + ResultTypeRouteAlpha = ResultType(8) // RouteAlpha + ResultTypePOI = ResultType(9) // POI + ResultTypePOIStreet = ResultType(10) // POIStreet + ResultTypeFullPostCode = ResultType(11) // FullPostCode + ResultTypePOIType = ResultType(12) // POIType + ResultTypeCrossStreet = ResultType(13) // CrossStreet + ResultTypeLatLon = ResultType(14) // LatLon + ResultTypeCustomPlace = ResultType(15) // CustomPlace + ResultTypeNone = ResultType(16) // None + ResultTypeTrimblePlaces = ResultType(17) // TrimblePlaces +) + +// Location struct represents a single location with address, coordinates, and other metadata +type Location struct { + Region int `json:"region"` + POITypeID int `json:"poiTypeId"` + PersistentPOIID int `json:"persistentPoiId"` + SiteID int `json:"siteId"` + ResultType int `json:"resultType"` + ShortString string `json:"shortString"` + TimeZone string `json:"timeZone"` + Coords Coords `json:"coords"` + Address Address `json:"address"` +} + +// LocationResponse represents the entire response containing multiple locations +type LocationResponse struct { + Err int `json:"err"` + Locations []*Location `json:"locations"` +} + +type SingleSearchParams struct { + // The API key for the PCMiler API (Required) + AuthToken string `json:"authToken" url:"authToken"` + + // String indicating the text to search for (Required) + Query string `json:"query" url:"query"` + + // Limits search results by the specified number. Must be a value between 1 and 100. (Not Required) + MaxResults int `json:"maxResults,omitempty" url:"maxResults,omitempty"` + + // The current longitude and latitude, where longitude and latitude are either decimal or + // integer coordinates. (Not Required) + CurrentLonLat string `json:"currentLonLat,omitempty" url:"currentLonLat,omitempty"` + + // A comma-separated list of InterpTypes to include in the search response. + // Allowed filters: Country, State, County, City, POBox, Zip, SPLC, Street, RouteNumber, + // RouteAlpha, POI, POIStreet, FullPostCode, POIType, CrossStreet, LatLon, CustomPlace, and None. + // (Example: includeOnly=CustomPlace,Street. This will return only custom places and streets that match + // with the search string.) + // To include results from ZIP codes that only apply to post office boxes, the values POBox and Zip must be + // included in your request. (Not Required) + IncludeOnly string `json:"includeOnly,omitempty" url:"includeOnly,omitempty"` + + // A comma-separated list of Points of Interest (POI) category names by which you want to filter all POI results. A GET + // call to /search/poiCategories can be used to retrieve the current list of + // categories available for filtering. (Not Required) + PoiCategories string `json:"poiCategories,omitempty" url:"poiCategories,omitempty"` + + // A comma-separated list of country codes by which you want to filter all results. It defaults to ISO format. + Countries string `json:"countries,omitempty" url:"countries,omitempty"` + + // The standard for country abbreviations: ISO, FIPS, GENC2, and GENC3. (Not Required) + CountryType string `json:"countryType,omitempty" url:"countryType,omitempty"` + + // A comma-separated list of state abbreviations by which you want to filter all results. (Not Required) + States string `json:"states,omitempty" url:"states,omitempty"` + + // Set to include=Meta to include additional metadata in your results, such as road grid and link + // information as well as the confidence level of the search results. + // (See QueryConfidence in response parameters below.) (Not Required) + Include string `json:"include,omitempty" url:"include,omitempty"` + + // If set to true, this option includes custom places in the search results where the location’s PlaceId + // or PlaceName starts with the query string. Note that custom places can only be searched by name and ID, + // not by address, city, state, or ZIP code. Custom place results will appear before any other search results. + // This setting is a convenience feature that integrates a call to the Places API’s places/v1/place/search + // endpoint into the single search call. However, it comes with a performance cost, as single search must + // wait for the places call to complete before returning results. For optimal performance, consider leaving + // this option off (false) and making a separate parallel call to the places endpoint. + // Default is false. (Not Required) + UseCustomPlaces bool `json:"useCustomPlaces,omitempty" url:"useCustomPlaces,omitempty"` + + // Sets whether the house number should be returned as a separate field from the rest of the street address. + // Default is false. (Not Required) + SeparateHN bool `json:"separateHN,omitempty" url:"separateHN,omitempty"` + + // If set to true, all potential house number ranges will be returned for a particular street match. + // Default is false. (Not Required) + GetAllHNRanges bool `json:"getAllHNRanges,omitempty" url:"getAllHNRanges,omitempty"` + + // Set to true to return the TrimblePlaceId, PlaceName and SiteName for a location, if they exist. + // Default is false. (Not Required) + IncludeTrimblePlaceIds bool `json:"includeTrimblePlaceIds,omitempty" url:"includeTrimblePlaceIds,omitempty"` + + // The language to use in results. U.S. English is the default. + Lang string `json:"lang,omitempty" url:"lang,omitempty"` + + // Used with partial postal code queries. When provided, the center coordinates of multiple postal code + // points will be calculated and returned. For example, a search for &includeOnly=zip&query=840 will return + // a long list of ZIP codes starting with “840.” Adding &includeCenter instead returns a single point that is + // roughly central to all of those ZIP codes. + IncludeCenter bool `json:"includeCenter,omitempty" url:"includeCenter,omitempty"` + + // Limits the amount of difference that is allowed between the input query and the match results. This is generally better left to the default of false, but in some cases where automation is applied to results, it may limit false positives. + StrictMatch bool `json:"strictMatch,omitempty" url:"strictMatch,omitempty"` + + // Limits search results to within a specified distance in miles from the current location, as identified by currentLonLat. A valid currentLonLat must be sent in the request for this parameter to take effect. In some cases, a short radiusFromCurrentLonLat may produce no matches. + RadiusFromCurrentLonLat float32 `json:"radiusFromCurrentLonLat,omitempty" url:"radiusFromCurrentLonLat,omitempty"` +} diff --git a/internal/pkg/errors/errors.go b/internal/pkg/errors/errors.go index 1514ef2d4..6902a059b 100644 --- a/internal/pkg/errors/errors.go +++ b/internal/pkg/errors/errors.go @@ -5,6 +5,7 @@ import ( "fmt" "strings" + "github.com/bytedance/sonic" val "github.com/go-ozzo/ozzo-validation/v4" "github.com/rotisserie/eris" ) @@ -37,6 +38,8 @@ func IsError(err error) bool { } type MultiError struct { + prefix string + parent *MultiError Errors []*Error `json:"errors"` } @@ -46,19 +49,72 @@ func NewMultiError() *MultiError { } } +// getFullPrefix builds the complete prefix by walking up the parent chain +func (m *MultiError) getFullPrefix() string { + var prefixes []string + current := m + + // Walk up the parent chain collecting prefixes + for current != nil && current.prefix != "" { + prefixes = append([]string{current.prefix}, prefixes...) + current = current.parent + } + + if len(prefixes) == 0 { + return "" + } + + return strings.Join(prefixes, ".") +} + +func (m *MultiError) WithPrefix(prefix string) *MultiError { + return &MultiError{ + prefix: prefix, + parent: m, + Errors: m.Errors, // Share the same error slice with parent + } +} + +func (m *MultiError) WithIndex(prefix string, idx int) *MultiError { + return m.WithPrefix(fmt.Sprintf("%s[%d]", prefix, idx)) +} + // Add adds a new validation error to the collection func (m *MultiError) Add(field string, code ErrorCode, message string) { - m.Errors = append(m.Errors, &Error{ - Field: field, + fieldPath := field + fullPrefix := m.getFullPrefix() + + if fullPrefix != "" { + if field != "" { + fieldPath = fmt.Sprintf("%s.%s", fullPrefix, field) + } else { + fieldPath = fullPrefix + } + } + + err := &Error{ + Field: fieldPath, Code: code, Message: message, - }) + } + + // Always add to the root parent + root := m + for root.parent != nil { + root = root.parent + } + root.Errors = append(root.Errors, err) } // AddError adds an existing Error to the collection func (m *MultiError) AddError(err *Error) { if err != nil { - m.Errors = append(m.Errors, err) + // If this is a child MultiError, propagate to parent + if m.parent != nil { + m.parent.Errors = append(m.parent.Errors, err) + } else { + m.Errors = append(m.Errors, err) + } } } @@ -92,6 +148,14 @@ func (m *MultiError) MarshalJSON() ([]byte, error) { }) } +func (m *MultiError) ToJSON() string { + output, err := sonic.Marshal(m) + if err != nil { + return "" + } + return string(output) +} + func IsMultiError(err error) bool { var multiErr *MultiError return eris.As(err, &multiErr) @@ -287,10 +351,20 @@ func inferErrorCode(err error) ErrorCode { } } +// Deprecated: Use FromOzzoErrors instead. func FromValidationErrors(valErrors val.Errors, multiErr *MultiError, prefix string) { for field, err := range valErrors { fieldName := field - if prefix != "" { + fullPrefix := multiErr.getFullPrefix() // Get the full prefix from MultiError + + // Combine prefixes if both exist + if fullPrefix != "" { + if prefix != "" { + fieldName = fmt.Sprintf("%s.%s.%s", fullPrefix, prefix, field) + } else { + fieldName = fmt.Sprintf("%s.%s", fullPrefix, field) + } + } else if prefix != "" { fieldName = fmt.Sprintf("%s.%s", prefix, field) } @@ -301,3 +375,20 @@ func FromValidationErrors(valErrors val.Errors, multiErr *MultiError, prefix str }) } } + +func FromOzzoErrors(valErrors val.Errors, multiErr *MultiError) { + for field, err := range valErrors { + fieldName := field + fullPrefix := multiErr.getFullPrefix() // Get the full prefix from MultiError + + if fullPrefix != "" { + fieldName = fmt.Sprintf("%s.%s", fullPrefix, field) + } + + multiErr.AddError(&Error{ + Field: fieldName, + Code: inferErrorCode(err), + Message: err.Error(), + }) + } +} diff --git a/internal/pkg/registry/domain.go b/internal/pkg/registry/domain.go index 8b8966b7d..cf33db9a7 100644 --- a/internal/pkg/registry/domain.go +++ b/internal/pkg/registry/domain.go @@ -13,6 +13,7 @@ import ( "github.com/emoss08/trenova/internal/core/domain/hazardousmaterial" "github.com/emoss08/trenova/internal/core/domain/location" "github.com/emoss08/trenova/internal/core/domain/organization" + "github.com/emoss08/trenova/internal/core/domain/pcmilerconfiguration" "github.com/emoss08/trenova/internal/core/domain/permission" "github.com/emoss08/trenova/internal/core/domain/pretrainedmodels" "github.com/emoss08/trenova/internal/core/domain/resource" @@ -64,8 +65,10 @@ func RegisterEntities() []any { &trailer.Trailer{}, &location.LocationCategory{}, &location.Location{}, + &shipment.Stop{}, &shipment.ShipmentCommodity{}, &shipment.ShipmentMove{}, &shipment.Shipment{}, + &pcmilerconfiguration.PCMilerConfiguration{}, } } diff --git a/internal/pkg/validator/shipmentvalidator/move.go b/internal/pkg/validator/shipmentvalidator/move.go new file mode 100644 index 000000000..2a0673677 --- /dev/null +++ b/internal/pkg/validator/shipmentvalidator/move.go @@ -0,0 +1,158 @@ +package shipmentvalidator + +import ( + "context" + "fmt" + + "github.com/emoss08/trenova/internal/core/domain/shipment" + "github.com/emoss08/trenova/internal/core/ports/db" + "github.com/emoss08/trenova/internal/pkg/errors" + "github.com/emoss08/trenova/internal/pkg/validator" + "go.uber.org/fx" +) + +type MoveValidatorParams struct { + fx.In + + DB db.Connection + StopValidator *StopValidator +} + +type MoveValidator struct { + db db.Connection + sv *StopValidator +} + +func NewMoveValidator(p MoveValidatorParams) *MoveValidator { + return &MoveValidator{ + db: p.DB, + sv: p.StopValidator, + } +} + +func (v *MoveValidator) Validate(ctx context.Context, valCtx *validator.ValidationContext, m *shipment.ShipmentMove, multiErr *errors.MultiError, idx int) { + moveMultiErr := multiErr.WithIndex("moves", idx) + + m.Validate(ctx, moveMultiErr) + v.validateStops(ctx, valCtx, m, moveMultiErr) + + if valCtx.IsCreate { + v.validateID(m, moveMultiErr) + } +} + +func (v *MoveValidator) validateID(m *shipment.ShipmentMove, multiErr *errors.MultiError) { + if m.ID.IsNotNil() { + multiErr.Add("id", errors.ErrInvalid, "ID cannot be set on create") + } +} + +func (v *MoveValidator) validateStops(ctx context.Context, valCtx *validator.ValidationContext, m *shipment.ShipmentMove, multiErr *errors.MultiError) { + v.validateStopLength(m, multiErr) + v.validateStopTimes(m, multiErr) + v.validateStopSequence(m, multiErr) + + for idx, stop := range m.Stops { + v.sv.Validate(ctx, valCtx, stop, multiErr, idx) + } +} + +// validateStopLength validates that atleast two stops are in a movement. +func (v *MoveValidator) validateStopLength(m *shipment.ShipmentMove, multiErr *errors.MultiError) { + if len(m.Stops) < 2 { + multiErr.Add("stops", errors.ErrInvalid, "At least two stops is required in a move") + return + } +} + +func (v *MoveValidator) validateStopTimes(m *shipment.ShipmentMove, multiErr *errors.MultiError) { + if len(m.Stops) <= 1 { + return + } + + for i := 0; i < len(m.Stops)-1; i++ { + currStop := m.Stops[i] + nextStop := m.Stops[i+1] + + // Validate sequential stop times + if currStop.PlannedDeparture >= nextStop.PlannedArrival { + multiErr.Add( + fmt.Sprintf("stops[%d].plannedDeparture", i), + errors.ErrInvalid, + "Planned departure must be before next stop's planned arrival", + ) + } + + if currStop.ActualDeparture != nil && nextStop.ActualArrival != nil { + if *currStop.ActualDeparture >= *nextStop.ActualArrival { + multiErr.Add( + fmt.Sprintf("stops[%d].actualDeparture", i), + errors.ErrInvalid, + "Actual departure must be before next stop's actual arrival", + ) + } + } + } +} + +func (v *MoveValidator) validateStopSequence(m *shipment.ShipmentMove, multiErr *errors.MultiError) { + // Quick lookup maps for stop types + pickupTypes := map[shipment.StopType]bool{ //nolint: exhaustive // We only need to check for pickup and split pickup + shipment.StopTypePickup: true, + shipment.StopTypeSplitPickup: true, + } + + deliveryTypes := map[shipment.StopType]bool{ //nolint: exhaustive // We only need to check for delivery and split delivery + shipment.StopTypeDelivery: true, + shipment.StopTypeSplitDelivery: true, + } + + // Guard clause for empty stops + if len(m.Stops) == 0 { + multiErr.Add("stops", errors.ErrInvalid, "Movement must have at least one stop") + return + } + + // Validate first stop is a pickup type + if !pickupTypes[m.Stops[0].Type] { + multiErr.Add( + "stops[0].type", + errors.ErrInvalid, + "First stop must be a pickup or split pickup", + ) + } + + // Validate last stop is a delivery type + if !deliveryTypes[m.Stops[len(m.Stops)-1].Type] { + multiErr.Add( + fmt.Sprintf("stops[%d].type", len(m.Stops)-1), + errors.ErrInvalid, + "Last stop must be a delivery or split delivery", + ) + } + + // Keep track of all pickups before current stop + hasPickup := false + for i, stop := range m.Stops { + // Validate stop type is allowed + if !pickupTypes[stop.Type] && !deliveryTypes[stop.Type] { + multiErr.Add( + fmt.Sprintf("stops[%d].type", i), + errors.ErrInvalid, + "Stop type must be pickup or delivery", + ) + continue + } + + // Track pickup status and validate delivery sequence + if pickupTypes[stop.Type] { + hasPickup = true + } else if deliveryTypes[stop.Type] && !hasPickup { + multiErr.Add( + fmt.Sprintf("stops[%d].type", i), + errors.ErrInvalid, + "Delivery stop must be preceded by a pickup or split pickup", + ) + } + } +} diff --git a/internal/pkg/validator/shipmentvalidator/move_test.go b/internal/pkg/validator/shipmentvalidator/move_test.go new file mode 100644 index 000000000..77c4bea7e --- /dev/null +++ b/internal/pkg/validator/shipmentvalidator/move_test.go @@ -0,0 +1,280 @@ +package shipmentvalidator_test + +import ( + "testing" + + "github.com/emoss08/trenova/internal/core/domain/shipment" + "github.com/emoss08/trenova/internal/pkg/errors" + "github.com/emoss08/trenova/internal/pkg/validator" + spValidator "github.com/emoss08/trenova/internal/pkg/validator/shipmentvalidator" + "github.com/emoss08/trenova/pkg/types/pulid" + "github.com/emoss08/trenova/test/testutils" +) + +func newMovement() *shipment.ShipmentMove { + return &shipment.ShipmentMove{ + Status: shipment.StopStatusNew, + PrimaryWorkerID: pulid.MustNew("wrk_"), + TractorID: pulid.MustNew("trk_"), + TrailerID: pulid.MustNew("trl_"), + Stops: []*shipment.Stop{ + { + Type: shipment.StopTypePickup, + Sequence: 0, + Status: shipment.StopStatusNew, + PlannedArrival: 100, + PlannedDeparture: 200, + }, + { + Type: shipment.StopTypePickup, + Sequence: 1, + Status: shipment.StopStatusNew, + PlannedArrival: 300, + PlannedDeparture: 400, + }, + { + Type: shipment.StopTypeDelivery, + Sequence: 2, + Status: shipment.StopStatusNew, + PlannedArrival: 500, + PlannedDeparture: 600, + }, + { + Type: shipment.StopTypeDelivery, + Sequence: 3, + Status: shipment.StopStatusNew, + PlannedArrival: 700, + PlannedDeparture: 800, + }, + }, + } +} + +func TestMoveValidator(t *testing.T) { + val := spValidator.NewMoveValidator(spValidator.MoveValidatorParams{ + DB: ts.DB, + }) + + scenarios := []struct { + name string + modifyMove func(*shipment.ShipmentMove) + expectedErrors []struct { + Field string + Code errors.ErrorCode + Message string + } + }{ + { + name: "status is required", + modifyMove: func(s *shipment.ShipmentMove) { + s.Status = "" + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "moves[0].status", Code: errors.ErrRequired, Message: "Status is required"}, + }, + }, + { + name: "tractor is required", + modifyMove: func(s *shipment.ShipmentMove) { + s.TractorID = "" + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "moves[0].tractorId", Code: errors.ErrRequired, Message: "Tractor is required"}, + }, + }, + { + name: "trailer is required", + modifyMove: func(s *shipment.ShipmentMove) { + s.TrailerID = "" + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "moves[0].trailerId", Code: errors.ErrRequired, Message: "Trailer is required"}, + }, + }, + { + name: "primary worker is required", + modifyMove: func(s *shipment.ShipmentMove) { + s.PrimaryWorkerID = "" + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "moves[0].primaryWorkerId", Code: errors.ErrRequired, Message: "Primary Worker is required"}, + }, + }, + { + name: "validate stop planned times", + modifyMove: func(s *shipment.ShipmentMove) { + s.Stops[0].PlannedDeparture = 300 + s.Stops[1].PlannedArrival = 200 + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "moves[0].stops[0].plannedDeparture", Code: errors.ErrInvalid, Message: "Planned departure must be before next stop's planned arrival"}, + }, + }, + { + name: "validate actual departure before next arrival", + modifyMove: func(s *shipment.ShipmentMove) { + s.Stops[0].ActualDeparture = &[]int64{300}[0] + s.Stops[1].ActualArrival = &[]int64{200}[0] + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "moves[0].stops[0].actualDeparture", Code: errors.ErrInvalid, Message: "Actual departure must be before next stop's actual arrival"}, + }, + }, + { + name: "validate planned departure before next planned arrival", + modifyMove: func(s *shipment.ShipmentMove) { + s.Stops[0].PlannedDeparture = 300 + s.Stops[1].PlannedArrival = 200 + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "moves[0].stops[0].plannedDeparture", Code: errors.ErrInvalid, Message: "Planned departure must be before next stop's planned arrival"}, + }, + }, + { + name: "first stop must be pickup", + modifyMove: func(s *shipment.ShipmentMove) { + s.Stops[0].Type = shipment.StopTypeDelivery + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "moves[0].stops[0].type", Code: errors.ErrInvalid, Message: "First stop must be a pickup or split pickup"}, + {Field: "moves[0].stops[0].type", Code: errors.ErrInvalid, Message: "Delivery stop must be preceded by a pickup or split pickup"}, + }, + }, + { + name: "last stop must be delivery", + modifyMove: func(s *shipment.ShipmentMove) { + s.Stops[len(s.Stops)-1].Type = shipment.StopTypePickup + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "moves[0].stops[3].type", Code: errors.ErrInvalid, Message: "Last stop must be a delivery or split delivery"}, + }, + }, + { + name: "invalid stop type in sequence", + modifyMove: func(s *shipment.ShipmentMove) { + s.Stops[1].Type = "INVALID_TYPE" + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "moves[0].stops[1].type", Code: errors.ErrInvalid, Message: "Stop type must be pickup or delivery"}, + }, + }, + { + name: "delivery before pickup", + modifyMove: func(s *shipment.ShipmentMove) { + s.Stops[0].Type = shipment.StopTypeDelivery + s.Stops[1].Type = shipment.StopTypeSplitPickup + s.Stops[2].Type = shipment.StopTypeDelivery + s.Stops[3].Type = shipment.StopTypeDelivery + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "moves[0].stops[0].type", Code: errors.ErrInvalid, Message: "First stop must be a pickup or split pickup"}, + {Field: "moves[0].stops[0].type", Code: errors.ErrInvalid, Message: "Delivery stop must be preceded by a pickup or split pickup"}, + }, + }, + { + name: "split pickup and split delivery sequence", + modifyMove: func(s *shipment.ShipmentMove) { + s.Stops[0].Type = shipment.StopTypeDelivery + s.Stops[1].Type = shipment.StopTypeSplitPickup + s.Stops[2].Type = shipment.StopTypeDelivery + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "moves[0].stops[0].type", Code: errors.ErrInvalid, Message: "First stop must be a pickup or split pickup"}, + {Field: "moves[0].stops[0].type", Code: errors.ErrInvalid, Message: "Delivery stop must be preceded by a pickup or split pickup"}, + }, + }, + { + name: "atleast two stops is required", + modifyMove: func(s *shipment.ShipmentMove) { + s.Stops = nil + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "moves[0].stops", Code: errors.ErrInvalid, Message: "At least two stops is required in a move"}, + {Field: "moves[0].stops", Code: errors.ErrInvalid, Message: "Movement must have at least one stop"}, + }, + }, + { + name: "no id on create", + modifyMove: func(s *shipment.ShipmentMove) { + s.ID = pulid.MustNew("sm_") + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "moves[0].id", Code: errors.ErrInvalid, Message: "ID cannot be set on create"}, + }, + }, + } + + for _, scenario := range scenarios { + t.Run(scenario.name, func(t *testing.T) { + move := newMovement() + scenario.modifyMove(move) + + vCtx := validator.NewValidationContext(ctx, &validator.ValidationContext{ + IsCreate: true, + }) + + me := errors.NewMultiError() + + val.Validate(ctx, vCtx, move, me, 0) + + matcher := testutils.NewErrorMatcher(t, me) + matcher.HasExactErrors(scenario.expectedErrors) + }) + } +} diff --git a/internal/pkg/validator/shipmentvalidator/shipment.go b/internal/pkg/validator/shipmentvalidator/shipment.go new file mode 100644 index 000000000..7b10fc33a --- /dev/null +++ b/internal/pkg/validator/shipmentvalidator/shipment.go @@ -0,0 +1,283 @@ +package shipmentvalidator + +import ( + "context" + + "github.com/emoss08/trenova/internal/core/domain/shipment" + "github.com/emoss08/trenova/internal/core/ports/db" + "github.com/emoss08/trenova/internal/pkg/errors" + "github.com/emoss08/trenova/internal/pkg/utils/queryutils" + "github.com/emoss08/trenova/internal/pkg/validator" + "github.com/rotisserie/eris" + "go.uber.org/fx" +) + +type ValidatorParams struct { + fx.In + + DB db.Connection + MoveValidator *MoveValidator +} + +type Validator struct { + db db.Connection + mv *MoveValidator +} + +func NewValidator(p ValidatorParams) *Validator { + return &Validator{ + db: p.DB, + mv: p.MoveValidator, + } +} + +func (v *Validator) Validate(ctx context.Context, valCtx *validator.ValidationContext, shp *shipment.Shipment) *errors.MultiError { + multiErr := errors.NewMultiError() + + shp.Validate(ctx, multiErr) + + // Validate uniqueness + if err := v.ValidateUniqueness(ctx, valCtx, shp, multiErr); err != nil { + multiErr.Add("uniqueness", errors.ErrSystemError, err.Error()) + } + + // Validate ID + v.validateID(shp, valCtx, multiErr) + + // Validate Ready To Bill + v.validateReadyToBill(shp, multiErr) + + // Validate Billing Flags + v.validateBillingFlags(shp, multiErr) + + // Validate Temperature + v.validateTemperature(shp, multiErr) + + // Validate Moves + v.ValidateMoves(ctx, valCtx, shp, multiErr) + + if multiErr.HasErrors() { + return multiErr + } + + return nil +} + +func (v *Validator) ValidateMoves(ctx context.Context, valCtx *validator.ValidationContext, shp *shipment.Shipment, multiErr *errors.MultiError) { + if len(shp.Moves) == 0 { + multiErr.Add("moves", errors.ErrInvalid, "Shipment must have at least one move") + return + } + + for idx, move := range shp.Moves { + v.mv.Validate(ctx, valCtx, move, multiErr, idx) + } +} + +func (v *Validator) ValidateUniqueness(ctx context.Context, valCtx *validator.ValidationContext, shp *shipment.Shipment, multiErr *errors.MultiError) error { + dba, err := v.db.DB(ctx) + if err != nil { + return eris.Wrap(err, "get database connection") + } + + vb := queryutils.NewUniquenessValidator(shp.GetTableName()). + WithTenant(shp.OrganizationID, shp.BusinessUnitID). + WithModelName("Shipment"). + WithFieldAndTemplate("pro_number", shp.ProNumber, + "Shipment with Pro Number ':value' already exists in the organization.", + map[string]string{ + "value": shp.ProNumber, + }) + + if valCtx.IsCreate { + vb.WithOperation(queryutils.OperationCreate) + } else { + vb.WithOperation(queryutils.OperationUpdate). + WithPrimaryKey("id", shp.GetID()) + } + + queryutils.CheckFieldUniqueness(ctx, dba, vb.Build(), multiErr) + + return nil +} + +func (v *Validator) validateID(shp *shipment.Shipment, valCtx *validator.ValidationContext, multiErr *errors.MultiError) { + if valCtx.IsCreate && shp.ID.IsNotNil() { + multiErr.Add("id", errors.ErrInvalid, "ID cannot be set on create") + } +} + +func (v *Validator) validateReadyToBill(shp *shipment.Shipment, multiErr *errors.MultiError) { + // If the shipment is ready to bill, then the status must be "Completed" + // ! This will change when we have shipment controls + // ! That will determine if the organization allows ready to bill to be set + // ! Whether the shipment is completed or not. + if shp.ReadyToBill && shp.Status != shipment.StatusCompleted { + multiErr.Add("readyToBill", errors.ErrInvalid, "Shipment must be completed to be marked as ready to bill") + } +} + +func (v *Validator) validateTemperature(shp *shipment.Shipment, multiErr *errors.MultiError) { + if shp.TemperatureMin.Valid && shp.TemperatureMax.Valid && shp.TemperatureMin.Decimal.GreaterThan(shp.TemperatureMax.Decimal) { + multiErr.Add("temperatureMin", errors.ErrInvalid, "Temperature Min must be less than Temperature Max") + } +} + +// validateBillingFlags performs comprehensive validation of billing-related fields and flags +// to ensure proper billing state transitions and data consistency. +func (v *Validator) validateBillingFlags(shp *shipment.Shipment, multiErr *errors.MultiError) { //nolint: gocognit // validation + // -------------------------------------- + // 1. Ready to Bill State Validation + // Ensures that if a shipment is not marked as ready to bill, + // no subsequent billing states or dates can be set. + // This enforces the proper progression of the billing workflow. + // -------------------------------------- + if !shp.ReadyToBill { //nolint: nestif // It is what it is + if shp.ReadyToBillDate != nil { + multiErr.Add( + "readyToBillDate", + errors.ErrInvalid, + "Ready to bill date cannot be set when shipment is not ready to bill", + ) + } + if shp.SentToBilling { + multiErr.Add( + "sentToBilling", + errors.ErrInvalid, + "Cannot be sent to billing when shipment is not ready to bill", + ) + } + if shp.SentToBillingDate != nil { + multiErr.Add( + "sentToBillingDate", + errors.ErrInvalid, + "Sent to billing date cannot be set when shipment is not ready to bill", + ) + } + if shp.Billed { + multiErr.Add( + "billed", + errors.ErrInvalid, + "Cannot be marked as billed when shipment is not ready to bill", + ) + } + if shp.BillDate != nil { + multiErr.Add( + "billDate", + errors.ErrInvalid, + "Bill date cannot be set when shipment is not ready to bill", + ) + } + } + + // -------------------------------------- + // 2. Sent to Billing State Validation + // Validates that if a shipment is not marked as sent to billing, + // no billing completion states or dates can be set. + // This prevents skipping steps in the billing process. + // -------------------------------------- + if !shp.SentToBilling { + if shp.SentToBillingDate != nil { + multiErr.Add( + "sentToBillingDate", + errors.ErrInvalid, + "Sent to billing date cannot be set when not sent to billing", + ) + } + if shp.Billed { + multiErr.Add( + "billed", + errors.ErrInvalid, + "Cannot be marked as billed when not sent to billing", + ) + } + if shp.BillDate != nil { + multiErr.Add( + "billDate", + errors.ErrInvalid, + "Bill date cannot be set when not sent to billing", + ) + } + } + + // -------------------------------------- + // 3. Billed State Validation + // Ensures that the bill date can only be set when the shipment + // is marked as billed, preventing inconsistent billing states. + // -------------------------------------- + if !shp.Billed && shp.BillDate != nil { + multiErr.Add( + "billDate", + errors.ErrInvalid, + "Bill date cannot be set when not billed", + ) + } + + // -------------------------------------- + // 4. Date Sequence Validation + // Validates that all billing-related dates follow the correct + // chronological order. This ensures a logical progression of + // the billing process and prevents date inconsistencies. + // -------------------------------------- + if shp.ReadyToBillDate != nil && shp.SentToBillingDate != nil { + if *shp.SentToBillingDate < *shp.ReadyToBillDate { + multiErr.Add( + "sentToBillingDate", + errors.ErrInvalid, + "Sent to billing date cannot be before ready to bill date", + ) + } + } + + if shp.SentToBillingDate != nil && shp.BillDate != nil { + if *shp.BillDate < *shp.SentToBillingDate { + multiErr.Add( + "billDate", + errors.ErrInvalid, + "Bill date cannot be before sent to billing date", + ) + } + } + + // -------------------------------------- + // 5. Charge Amount Validation + // Validates billing amounts when a shipment is marked as billed. + // Ensures all required charges are present and properly calculated, + // maintaining financial accuracy in the system. + // -------------------------------------- + if shp.Billed { + if !shp.FreightChargeAmount.Valid || shp.FreightChargeAmount.Decimal.IsZero() { + multiErr.Add( + "freightChargeAmount", + errors.ErrRequired, + "Freight charge amount is required when shipment is billed", + ) + } + + // Validate that total charge equals the sum of freight and other charges + if shp.FreightChargeAmount.Valid && shp.OtherChargeAmount.Valid { + expectedTotal := shp.FreightChargeAmount.Decimal.Add(shp.OtherChargeAmount.Decimal) + if !shp.TotalChargeAmount.Decimal.Equal(expectedTotal) { + multiErr.Add( + "totalChargeAmount", + errors.ErrInvalid, + "Total charge amount must equal freight charge plus other charges", + ) + } + } + } + + // -------------------------------------- + // 7. Delivery Verification + // Ensures that a shipment cannot be marked as ready for billing + // until it has been delivered. This prevents premature billing + // and ensures service completion before billing processes begin. + // -------------------------------------- + if shp.ReadyToBill && shp.ActualDeliveryDate == nil { + multiErr.Add( + "actualDeliveryDate", + errors.ErrInvalid, + "Actual delivery date is required to mark shipment as ready to bill", + ) + } +} diff --git a/internal/pkg/validator/shipmentvalidator/shipment_test.go b/internal/pkg/validator/shipmentvalidator/shipment_test.go new file mode 100644 index 000000000..6690cce89 --- /dev/null +++ b/internal/pkg/validator/shipmentvalidator/shipment_test.go @@ -0,0 +1,505 @@ +package shipmentvalidator_test + +import ( + "context" + "os" + "testing" + + "github.com/emoss08/trenova/internal/core/domain/shipment" + "github.com/emoss08/trenova/internal/pkg/errors" + "github.com/emoss08/trenova/internal/pkg/validator" + spValidator "github.com/emoss08/trenova/internal/pkg/validator/shipmentvalidator" + "github.com/emoss08/trenova/pkg/types/pulid" + "github.com/emoss08/trenova/test/testutils" + "github.com/shopspring/decimal" +) + +var ( + ts *testutils.TestSetup + ctx = context.Background() +) + +func TestMain(m *testing.M) { + setup, err := testutils.NewTestSetup(ctx) + if err != nil { + panic(err) + } + + ts = setup + + os.Exit(m.Run()) +} + +func newShipment() *shipment.Shipment { + return &shipment.Shipment{ + ProNumber: "123456", + Status: shipment.StatusNew, + ShipmentTypeID: pulid.MustNew("st_"), + CustomerID: pulid.MustNew("cust_"), + BOL: "1234567890", + RatingMethod: shipment.RatingMethodFlatRate, + FreightChargeAmount: decimal.NewNullDecimal(decimal.NewFromInt(1000)), + ReadyToBill: false, + Moves: []*shipment.ShipmentMove{ + { + Status: shipment.StopStatusNew, + PrimaryWorkerID: pulid.MustNew("wrk_"), + TractorID: pulid.MustNew("trk_"), + TrailerID: pulid.MustNew("trl_"), + Stops: []*shipment.Stop{ + { + Type: shipment.StopTypePickup, + Sequence: 0, + Status: shipment.StopStatusNew, + PlannedArrival: 100, + PlannedDeparture: 200, + }, + { + Type: shipment.StopTypePickup, + Sequence: 1, + Status: shipment.StopStatusNew, + PlannedArrival: 300, + PlannedDeparture: 400, + }, + { + Type: shipment.StopTypeDelivery, + Sequence: 2, + Status: shipment.StopStatusNew, + PlannedArrival: 500, + PlannedDeparture: 600, + }, + { + Type: shipment.StopTypeDelivery, + Sequence: 3, + Status: shipment.StopStatusNew, + PlannedArrival: 700, + PlannedDeparture: 800, + }, + }, + }, + }, + } +} + +func TestShipmentValidator(t *testing.T) { //nolint: funlen // Tests + sv := spValidator.NewStopValidator(spValidator.StopValidatorParams{ + DB: ts.DB, + }) + + mv := spValidator.NewMoveValidator(spValidator.MoveValidatorParams{ + DB: ts.DB, + StopValidator: sv, + }) + + val := spValidator.NewValidator(spValidator.ValidatorParams{ + DB: ts.DB, + MoveValidator: mv, + }) + + scenarios := []struct { + name string + modifyShipment func(*shipment.Shipment) + expectedErrors []struct { + Field string + Code errors.ErrorCode + Message string + } + }{ + { + name: "cannot mark ready to bill when status is not completed", + modifyShipment: func(shp *shipment.Shipment) { + shp.Status = shipment.StatusNew + shp.ReadyToBill = true + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + { + Field: "readyToBill", + Code: errors.ErrInvalid, + Message: "Shipment must be completed to be marked as ready to bill", + }, + { + Field: "actualDeliveryDate", + Code: errors.ErrInvalid, + Message: "Actual delivery date is required to mark shipment as ready to bill", + }, + }, + }, + { + name: "customer is required", + modifyShipment: func(shp *shipment.Shipment) { + shp.CustomerID = pulid.Nil + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + { + Field: "customerId", + Code: errors.ErrRequired, + Message: "Customer is required", + }, + }, + }, + { + name: "shipment type is required", + modifyShipment: func(shp *shipment.Shipment) { + shp.ShipmentTypeID = pulid.Nil + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + { + Field: "shipmentTypeId", + Code: errors.ErrRequired, + Message: "Shipment Type is required", + }, + }, + }, + { + name: "bol is required", + modifyShipment: func(shp *shipment.Shipment) { + shp.BOL = "" + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + { + Field: "bol", + Code: errors.ErrRequired, + Message: "BOL is required", + }, + }, + }, + { + name: "freight charge amount is required when rating method is flat", + modifyShipment: func(shp *shipment.Shipment) { + shp.RatingMethod = shipment.RatingMethodFlatRate + shp.FreightChargeAmount = decimal.NullDecimal{} + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + { + Field: "freightChargeAmount", + Code: errors.ErrRequired, + Message: "Freight Charge Amount is required when rating method is Flat", + }, + }, + }, + { + name: "weight is required when rating method is per pound", + modifyShipment: func(shp *shipment.Shipment) { + shp.RatingMethod = shipment.RatingMethodPerPound + shp.Weight = nil + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + { + Field: "weight", + Code: errors.ErrRequired, + Message: "Weight is required when rating method is Per Pound", + }, + }, + }, + { + name: "rating unit is required when rating method is per mile", + modifyShipment: func(shp *shipment.Shipment) { + shp.RatingMethod = shipment.RatingMethodPerMile + shp.RatingUnit = 0 + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + { + Field: "ratingUnit", + Code: errors.ErrRequired, + Message: "Rating Unit is required when rating method is Per Mile", + }, + }, + }, + { + name: "shipment must have at last one move", + modifyShipment: func(shp *shipment.Shipment) { + shp.Moves = []*shipment.ShipmentMove{} + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + { + Field: "moves", + Code: errors.ErrInvalid, + Message: "Shipment must have at least one move", + }, + }, + }, + { + name: "shipment ready to bill state validation", + modifyShipment: func(shp *shipment.Shipment) { + shp.ReadyToBill = false + + // Failure case + shp.ReadyToBillDate = &[]int64{100}[0] + shp.SentToBilling = true + shp.SentToBillingDate = &[]int64{100}[0] + shp.Billed = true + shp.BillDate = &[]int64{100}[0] + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + { + Field: "readyToBillDate", + Code: errors.ErrInvalid, + Message: "Ready to bill date cannot be set when shipment is not ready to bill", + }, + { + Field: "sentToBilling", + Code: errors.ErrInvalid, + Message: "Cannot be sent to billing when shipment is not ready to bill", + }, + { + Field: "sentToBillingDate", + Code: errors.ErrInvalid, + Message: "Sent to billing date cannot be set when shipment is not ready to bill", + }, + { + Field: "billDate", + Code: errors.ErrInvalid, + Message: "Bill date cannot be set when shipment is not ready to bill", + }, + { + Field: "billed", + Code: errors.ErrInvalid, + Message: "Cannot be marked as billed when shipment is not ready to bill", + }, + }, + }, + { + name: "shipment sent to billing state validation", + modifyShipment: func(shp *shipment.Shipment) { + shp.SentToBilling = false + shp.Status = shipment.StatusCompleted + shp.ReadyToBill = true + shp.ReadyToBillDate = &[]int64{100}[0] + shp.ActualDeliveryDate = &[]int64{100}[0] + + // Failure case + shp.SentToBillingDate = &[]int64{100}[0] + shp.Billed = true + shp.BillDate = &[]int64{100}[0] + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + { + Field: "sentToBillingDate", + Code: errors.ErrInvalid, + Message: "Sent to billing date cannot be set when not sent to billing", + }, + { + Field: "billed", + Code: errors.ErrInvalid, + Message: "Cannot be marked as billed when not sent to billing", + }, + { + Field: "billDate", + Code: errors.ErrInvalid, + Message: "Bill date cannot be set when not sent to billing", + }, + }, + }, + { + name: "shipment billed state validation", + modifyShipment: func(shp *shipment.Shipment) { + shp.SentToBilling = false + shp.Status = shipment.StatusCompleted + shp.ReadyToBill = true + shp.ReadyToBillDate = &[]int64{100}[0] + shp.ActualDeliveryDate = &[]int64{100}[0] + shp.SentToBilling = true + shp.SentToBillingDate = &[]int64{100}[0] + + // Failure case + shp.Billed = false + shp.BillDate = &[]int64{100}[0] + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + { + Field: "billDate", + Code: errors.ErrInvalid, + Message: "Bill date cannot be set when not billed", + }, + }, + }, + { + name: "shipment date sequence validation", + modifyShipment: func(shp *shipment.Shipment) { + shp.Status = shipment.StatusCompleted + shp.SentToBilling = true + shp.ReadyToBill = true + shp.ActualDeliveryDate = &[]int64{100}[0] + shp.Billed = true + + // Failure case + shp.ReadyToBillDate = &[]int64{500}[0] + shp.BillDate = &[]int64{100}[0] + shp.SentToBillingDate = &[]int64{400}[0] + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + { + Field: "sentToBillingDate", + Code: errors.ErrInvalid, + Message: "Sent to billing date cannot be before ready to bill date", + }, + { + Field: "billDate", + Code: errors.ErrInvalid, + Message: "Bill date cannot be before sent to billing date", + }, + }, + }, + { + name: "shipment charge amount validation", + modifyShipment: func(shp *shipment.Shipment) { + shp.Status = shipment.StatusCompleted + shp.SentToBilling = true + shp.ReadyToBill = true + shp.ActualDeliveryDate = &[]int64{100}[0] + shp.Billed = true + + // Failure case + shp.FreightChargeAmount = decimal.NullDecimal{} + shp.OtherChargeAmount = decimal.NullDecimal{} + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + { + Field: "freightChargeAmount", + Code: errors.ErrRequired, + Message: "Freight charge amount is required when shipment is billed", + }, + { + Field: "freightChargeAmount", + Code: errors.ErrRequired, + Message: "Freight Charge Amount is required when rating method is Flat", + }, + }, + }, + { + name: "validate total charge amount", + modifyShipment: func(shp *shipment.Shipment) { + shp.Status = shipment.StatusCompleted + shp.SentToBilling = true + shp.ReadyToBill = true + shp.ActualDeliveryDate = &[]int64{100}[0] + shp.Billed = true + + // Failure case + shp.FreightChargeAmount = decimal.NewNullDecimal(decimal.NewFromInt(1000)) + shp.OtherChargeAmount = decimal.NewNullDecimal(decimal.NewFromInt(1000)) + shp.TotalChargeAmount = decimal.NewNullDecimal(decimal.NewFromInt(1000)) + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + { + Field: "totalChargeAmount", + Code: errors.ErrInvalid, + Message: "Total charge amount must equal freight charge plus other charges", + }, + }, + }, + { + name: "shipment must be delivered before ready to bill", + modifyShipment: func(shp *shipment.Shipment) { + shp.Status = shipment.StatusCompleted + shp.SentToBilling = true + + // Failure case + shp.ReadyToBill = true + shp.ActualDeliveryDate = nil + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + { + Field: "actualDeliveryDate", + Code: errors.ErrInvalid, + Message: "Actual delivery date is required to mark shipment as ready to bill", + }, + }, + }, + { + name: "temperature min must be less than temperature max", + modifyShipment: func(shp *shipment.Shipment) { + shp.TemperatureMin = decimal.NewNullDecimal(decimal.NewFromInt(100)) + shp.TemperatureMax = decimal.NewNullDecimal(decimal.NewFromInt(99)) + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + { + Field: "temperatureMin", + Code: errors.ErrInvalid, + Message: "Temperature Min must be less than Temperature Max", + }, + }, + }, + } + + for _, scenario := range scenarios { + t.Run(scenario.name, func(t *testing.T) { + vCtx := validator.NewValidationContext(ctx, &validator.ValidationContext{ + IsCreate: true, + }) + + shp := newShipment() + + scenario.modifyShipment(shp) + + me := val.Validate(ctx, vCtx, shp) + + matcher := testutils.NewErrorMatcher(t, me) + matcher.HasExactErrors(scenario.expectedErrors) + }) + } +} diff --git a/internal/pkg/validator/shipmentvalidator/stop.go b/internal/pkg/validator/shipmentvalidator/stop.go new file mode 100644 index 000000000..0ff3c9c6b --- /dev/null +++ b/internal/pkg/validator/shipmentvalidator/stop.go @@ -0,0 +1,63 @@ +package shipmentvalidator + +import ( + "context" + + "github.com/emoss08/trenova/internal/core/domain/shipment" + "github.com/emoss08/trenova/internal/core/ports/db" + "github.com/emoss08/trenova/internal/pkg/errors" + "github.com/emoss08/trenova/internal/pkg/validator" + "go.uber.org/fx" +) + +type StopValidatorParams struct { + fx.In + + DB db.Connection +} + +type StopValidator struct { + db db.Connection +} + +func NewStopValidator(p StopValidatorParams) *StopValidator { + return &StopValidator{ + db: p.DB, + } +} + +func (v *StopValidator) Validate(ctx context.Context, valCtx *validator.ValidationContext, s *shipment.Stop, multiErr *errors.MultiError, idx int) { + stopMultiErr := multiErr.WithIndex("stops", idx) + + s.Validate(ctx, stopMultiErr) + + if valCtx.IsCreate { + v.validateID(s, stopMultiErr) + } + + v.validateTimes(stopMultiErr, s) +} + +func (v *StopValidator) validateID(s *shipment.Stop, multiErr *errors.MultiError) { + if s.ID.IsNotNil() { + multiErr.Add("id", errors.ErrInvalid, "ID cannot be set on create") + } +} + +func (v *StopValidator) validateTimes(multiErr *errors.MultiError, s *shipment.Stop) { + if s.PlannedArrival > s.PlannedDeparture { + multiErr.Add("plannedArrival", errors.ErrInvalid, "Planned arrival must be before planned departure") + } + + if s.PlannedDeparture < s.PlannedArrival { + multiErr.Add("plannedDeparture", errors.ErrInvalid, "Planned departure must be after planned arrival") + } + + if s.ActualArrival != nil && s.ActualDeparture != nil && *s.ActualArrival > *s.ActualDeparture { + multiErr.Add("actualArrival", errors.ErrInvalid, "Actual arrival must be before actual departure") + } + + if s.ActualArrival != nil && s.ActualDeparture != nil && *s.ActualDeparture < *s.ActualArrival { + multiErr.Add("actualDeparture", errors.ErrInvalid, "Actual departure must be after actual arrival") + } +} diff --git a/internal/pkg/validator/shipmentvalidator/stop_test.go b/internal/pkg/validator/shipmentvalidator/stop_test.go new file mode 100644 index 000000000..2ea86ce78 --- /dev/null +++ b/internal/pkg/validator/shipmentvalidator/stop_test.go @@ -0,0 +1,142 @@ +package shipmentvalidator_test + +import ( + "testing" + + "github.com/emoss08/trenova/internal/core/domain/shipment" + "github.com/emoss08/trenova/internal/pkg/errors" + "github.com/emoss08/trenova/internal/pkg/validator" + spValidator "github.com/emoss08/trenova/internal/pkg/validator/shipmentvalidator" + "github.com/emoss08/trenova/pkg/types/pulid" + "github.com/emoss08/trenova/test/testutils" +) + +func newStop() *shipment.Stop { + return &shipment.Stop{ + Type: shipment.StopTypePickup, + Sequence: 1, + Status: shipment.StopStatusNew, + PlannedArrival: 100, + PlannedDeparture: 200, + } +} + +func TestStopValidator(t *testing.T) { + val := spValidator.NewStopValidator(spValidator.StopValidatorParams{ + DB: ts.DB, + }) + + scenarios := []struct { + name string + modifyStop func(*shipment.Stop) + expectedErrors []struct { + Field string + Code errors.ErrorCode + Message string + } + }{ + { + name: "type is required", + modifyStop: func(s *shipment.Stop) { + s.Type = "" + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "stops[0].type", Code: errors.ErrRequired, Message: "Type is required"}, + }, + }, + { + name: "status is required", + modifyStop: func(s *shipment.Stop) { + s.Status = "" + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "stops[0].status", Code: errors.ErrRequired, Message: "Status is required"}, + }, + }, + { + name: "planned arrival is required", + modifyStop: func(s *shipment.Stop) { + s.PlannedArrival = 0 + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "stops[0].plannedArrival", Code: errors.ErrRequired, Message: "Planned arrival is required"}, + }, + }, + { + name: "planned times are invalid", + modifyStop: func(s *shipment.Stop) { + s.PlannedDeparture = 0 + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "stops[0].plannedDeparture", Code: errors.ErrRequired, Message: "Planned departure is required"}, + {Field: "stops[0].plannedArrival", Code: errors.ErrInvalid, Message: "Planned arrival must be before planned departure"}, + {Field: "stops[0].plannedDeparture", Code: errors.ErrInvalid, Message: "Planned departure must be after planned arrival"}, + }, + }, + { + name: "arrival times are invalid", + modifyStop: func(s *shipment.Stop) { + arrival := int64(200) + departure := int64(100) + + s.ActualArrival = &arrival + s.ActualDeparture = &departure + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "stops[0].actualArrival", Code: errors.ErrInvalid, Message: "Actual arrival must be before actual departure"}, + {Field: "stops[0].actualDeparture", Code: errors.ErrInvalid, Message: "Actual departure must be after actual arrival"}, + }, + }, + { + name: "no id on create", + modifyStop: func(s *shipment.Stop) { + s.ID = pulid.MustNew("stp_") + }, + expectedErrors: []struct { + Field string + Code errors.ErrorCode + Message string + }{ + {Field: "stops[0].id", Code: errors.ErrInvalid, Message: "ID cannot be set on create"}, + }, + }, + } + + for _, scenario := range scenarios { + t.Run(scenario.name, func(t *testing.T) { + stop := newStop() + scenario.modifyStop(stop) + + vCtx := validator.NewValidationContext(ctx, &validator.ValidationContext{ + IsCreate: true, + }) + + me := errors.NewMultiError() + + val.Validate(ctx, vCtx, stop, me, 0) + + matcher := testutils.NewErrorMatcher(t, me) + matcher.HasExactErrors(scenario.expectedErrors) + }) + } +} diff --git a/internal/pkg/validator/types.go b/internal/pkg/validator/types.go index bd4836bd8..f8871b6b5 100644 --- a/internal/pkg/validator/types.go +++ b/internal/pkg/validator/types.go @@ -1,6 +1,15 @@ package validator +import "context" + type ValidationContext struct { IsCreate bool IsUpdate bool } + +func NewValidationContext(ctx context.Context, valCtx *ValidationContext) *ValidationContext { + return &ValidationContext{ + IsCreate: valCtx.IsCreate, + IsUpdate: valCtx.IsUpdate, + } +} diff --git a/test/fixtures/fixtures.yml b/test/fixtures/fixtures.yml index a79d9620c..2f4679e02 100644 --- a/test/fixtures/fixtures.yml +++ b/test/fixtures/fixtures.yml @@ -343,6 +343,21 @@ org_type: BrokerageCarrier created_at: "{{ timestamp }}" +- model: PCMilerConfiguration + rows: + - _id: pcmiler_1 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + api_key: "1234567890" + created_at: "{{ timestamp }}" + updated_at: "{{ timestamp }}" + - _id: pcmiler_2 + organization_id: "{{ $.Organization.trenova_2.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + api_key: "1234567890" + created_at: "{{ timestamp }}" + updated_at: "{{ timestamp }}" + - model: User rows: - _id: test_user @@ -606,13 +621,49 @@ organization_id: "{{ $.Organization.trenova.ID }}" business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" created_at: "{{ now.Unix }}" - name: "Test Location" - address_line_1: "1234 Main St" - code: "TEST000000" - city: "Los Angeles" - postal_code: "90001" + name: "Ralphs" + address_line_1: "8626 Firestone Blvd" + code: "RAL001" + city: "Downey" + postal_code: "90241" state_id: "{{ $.UsState.ca.ID }}" location_category_id: "{{ $.LocationCategory.location_category_1.ID }}" + latitude: 33.93638237907036 + longitude: -118.12713490285472 + place_id: "ChIJd2xkV5zNwoAROL47nNV-eKw" + is_geocoded: true + + - _id: test_location_2 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + name: "Mcdonalds" + address_line_1: "1101 Summit Ave" + code: "MCD001" + city: "Greensboro" + postal_code: "27410" + state_id: "{{ $.UsState.nc.ID }}" + location_category_id: "{{ $.LocationCategory.location_category_1.ID }}" + latitude: 36.08947731626447 + longitude: -79.77463246046224 + place_id: "ChIJqQCYky0fU4gRGsIU6EhmoPA" + is_geocoded: true + + - _id: test_location_3 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + name: "Walmart" + address_line_1: "6185 Retail Rd" + code: "WAL001" + city: "Dallas" + postal_code: "75231" + state_id: "{{ $.UsState.tx.ID }}" + longitude: -96.75433187219494 + latitude: 32.86299364462629 + location_category_id: "{{ $.LocationCategory.location_category_1.ID }}" + place_id: "ChIJBRfH_4WfToYRFFH_9IqgCPY" + is_geocoded: true - model: ServiceType rows: @@ -853,6 +904,7 @@ equipment_type_id: "{{ $.EquipmentType.tractor_equip_type.ID }}" equipment_manufacturer_id: "{{ $.EquipmentManufacturer.freightliner_manufacturer.ID }}" code: TRN-001 + fleet_code_id: "{{ $.FleetCode.fc_1.ID }}" primary_worker_id: "{{ $.Worker.worker_1.ID }}" secondary_worker_id: "{{ $.Worker.worker_2.ID }}" created_at: "{{ now.Unix }}" @@ -868,6 +920,7 @@ model: "Test Trailer" make: "Test Trailer" year: 2024 + fleet_code_id: "{{ $.FleetCode.fc_1.ID }}" license_plate_number: "TEST000000" vin: "TEST000000" last_inspection_date: "{{ now.Unix }}" @@ -889,3 +942,537 @@ city: "Los Angeles" postal_code: "90001" state_id: "{{ $.UsState.ca.ID }}" + +- model: Shipment + rows: + - _id: test_shipment + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "New" + pro_number: "S121094129213012" + bol: "TEST000000" + shipment_type_id: "{{ $.ShipmentType.ftl_shipment_type.ID }}" + service_type_id: "{{ $.ServiceType.std_service_type.ID }}" + customer_id: "{{ $.Customer.test_customer.ID }}" + rating_unit: 1 + rating_method: "PerMile" + other_charge_amount: 100.00 + freight_charge_amount: 100.00 + total_charge_amount: 200.00 + pieces: 1 + weight: 1000.00 + ready_to_bill: false + billed: false + sent_to_billing: false + temperature_min: 10.00 + temperature_max: 20.00 + + # In Transit Shipment + - _id: in_transit_shipment + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "InTransit" + pro_number: "S121094159213013" + bol: "TEST000000" + shipment_type_id: "{{ $.ShipmentType.ftl_shipment_type.ID }}" + service_type_id: "{{ $.ServiceType.std_service_type.ID }}" + customer_id: "{{ $.Customer.test_customer.ID }}" + rating_unit: 1 + rating_method: "PerMile" + other_charge_amount: 100.00 + freight_charge_amount: 400.00 + total_charge_amount: 500.00 + pieces: 1 + weight: 1000.00 + ready_to_bill: false + billed: false + sent_to_billing: false + temperature_min: 10.00 + temperature_max: 20.00 + - _id: in_transit_shipment_2 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "InTransit" + pro_number: "S121054159213013" + bol: "TEST000000" + shipment_type_id: "{{ $.ShipmentType.ftl_shipment_type.ID }}" + service_type_id: "{{ $.ServiceType.std_service_type.ID }}" + customer_id: "{{ $.Customer.test_customer.ID }}" + rating_unit: 1 + rating_method: "PerMile" + other_charge_amount: 100.00 + freight_charge_amount: 400.00 + total_charge_amount: 500.00 + pieces: 1 + weight: 1000.00 + ready_to_bill: false + billed: false + sent_to_billing: false + temperature_min: 10.00 + temperature_max: 20.00 + + # Delayed Shipment + - _id: delayed_shipment + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "Delayed" + pro_number: "S121094159213014" + bol: "TEST000000" + shipment_type_id: "{{ $.ShipmentType.ftl_shipment_type.ID }}" + service_type_id: "{{ $.ServiceType.std_service_type.ID }}" + customer_id: "{{ $.Customer.test_customer.ID }}" + rating_unit: 1 + rating_method: "PerMile" + other_charge_amount: 100.00 + freight_charge_amount: 400.00 + total_charge_amount: 500.00 + pieces: 1 + weight: 1000.00 + ready_to_bill: false + billed: false + sent_to_billing: false + temperature_min: 10.00 + temperature_max: 20.00 + + # Completed Shipment + - _id: completed_shipment + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "Completed" + pro_number: "S121092159223015" + bol: "TEST000000" + shipment_type_id: "{{ $.ShipmentType.ftl_shipment_type.ID }}" + service_type_id: "{{ $.ServiceType.std_service_type.ID }}" + customer_id: "{{ $.Customer.test_customer.ID }}" + rating_unit: 1 + rating_method: "PerMile" + other_charge_amount: 100.00 + freight_charge_amount: 400.00 + total_charge_amount: 500.00 + pieces: 1 + weight: 1000.00 + ready_to_bill: false + billed: false + sent_to_billing: false + temperature_min: 10.00 + temperature_max: 20.00 + + # Billed Shipment + - _id: billed_shipment + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "Billed" + pro_number: "S121094155223016" + bol: "TEST000000" + shipment_type_id: "{{ $.ShipmentType.ftl_shipment_type.ID }}" + service_type_id: "{{ $.ServiceType.std_service_type.ID }}" + customer_id: "{{ $.Customer.test_customer.ID }}" + rating_unit: 1 + rating_method: "FlatRate" + other_charge_amount: 100.00 + freight_charge_amount: 400.00 + total_charge_amount: 500.00 + pieces: 1 + weight: 1000.00 + billed: true + ready_to_bill: true + ready_to_bill_date: "{{ now.Unix }}" + sent_to_billing: true + sent_to_billing_date: "{{ now.Unix }}" + temperature_min: 10.00 + temperature_max: 20.00 + + # Canceled Shipment + - _id: canceled_shipment + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "Canceled" + pro_number: "S121094151223026" + bol: "TEST000000" + shipment_type_id: "{{ $.ShipmentType.ftl_shipment_type.ID }}" + service_type_id: "{{ $.ServiceType.std_service_type.ID }}" + customer_id: "{{ $.Customer.test_customer.ID }}" + rating_unit: 1 + rating_method: "FlatRate" + other_charge_amount: 100.00 + freight_charge_amount: 400.00 + total_charge_amount: 500.00 + pieces: 1 + weight: 1000.00 + billed: true + ready_to_bill: true + ready_to_bill_date: "{{ now.Unix }}" + sent_to_billing: true + sent_to_billing_date: "{{ now.Unix }}" + temperature_min: 10.00 + temperature_max: 20.00 + +- model: ShipmentMove + rows: + # Shipment 1 Moves + - _id: test_shipment_move + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "New" + sequence: 0 + shipment_id: "{{ $.Shipment.test_shipment.ID }}" + tractor_id: "{{ $.Tractor.tractor_1.ID }}" + trailer_id: "{{ $.Trailer.test_trailer.ID }}" + primary_worker_id: "{{ $.Worker.worker_1.ID }}" + distance: 1523.00 + - _id: test_shipment_move_2 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "New" + sequence: 1 + shipment_id: "{{ $.Shipment.test_shipment.ID }}" + tractor_id: "{{ $.Tractor.tractor_1.ID }}" + trailer_id: "{{ $.Trailer.test_trailer.ID }}" + primary_worker_id: "{{ $.Worker.worker_1.ID }}" + distance: 100.00 + + # In Transit Shipment Move + - _id: in_transit_shipment_move + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "InTransit" + sequence: 1 + shipment_id: "{{ $.Shipment.in_transit_shipment.ID }}" + tractor_id: "{{ $.Tractor.tractor_1.ID }}" + trailer_id: "{{ $.Trailer.test_trailer.ID }}" + primary_worker_id: "{{ $.Worker.worker_1.ID }}" + distance: 5223.00 + - _id: in_transit_shipment_move_2 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "InTransit" + sequence: 1 + shipment_id: "{{ $.Shipment.in_transit_shipment_2.ID }}" + tractor_id: "{{ $.Tractor.tractor_1.ID }}" + trailer_id: "{{ $.Trailer.test_trailer.ID }}" + primary_worker_id: "{{ $.Worker.worker_1.ID }}" + distance: 5223.00 + + # Delayed Shipment Move + - _id: test_shipment_move_4 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "InTransit" + sequence: 1 + shipment_id: "{{ $.Shipment.delayed_shipment.ID }}" + tractor_id: "{{ $.Tractor.tractor_1.ID }}" + trailer_id: "{{ $.Trailer.test_trailer.ID }}" + primary_worker_id: "{{ $.Worker.worker_1.ID }}" + distance: 2312.00 + + # Completed Shipment Move + - _id: test_shipment_move_5 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "Completed" + sequence: 1 + shipment_id: "{{ $.Shipment.completed_shipment.ID }}" + tractor_id: "{{ $.Tractor.tractor_1.ID }}" + trailer_id: "{{ $.Trailer.test_trailer.ID }}" + primary_worker_id: "{{ $.Worker.worker_1.ID }}" + distance: 2312.00 + + # Billed Shipment Move + - _id: test_shipment_move_6 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "Completed" + sequence: 1 + shipment_id: "{{ $.Shipment.billed_shipment.ID }}" + tractor_id: "{{ $.Tractor.tractor_1.ID }}" + trailer_id: "{{ $.Trailer.test_trailer.ID }}" + primary_worker_id: "{{ $.Worker.worker_1.ID }}" + distance: 2312.00 + + # Cancelled Shipment Move + - _id: test_shipment_move_7 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "Canceled" + sequence: 1 + shipment_id: "{{ $.Shipment.canceled_shipment.ID }}" + tractor_id: "{{ $.Tractor.tractor_1.ID }}" + trailer_id: "{{ $.Trailer.test_trailer.ID }}" + primary_worker_id: "{{ $.Worker.worker_1.ID }}" + distance: 2312.00 + +- model: Stop + rows: + # Shipment 1 Move Stops + - _id: test_stop + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "New" + type: "Pickup" + sequence: 0 + shipment_move_id: "{{ $.ShipmentMove.test_shipment_move.ID }}" + location_id: "{{ $.Location.test_location.ID }}" + address_line: "8626 Firestone Blvd, Downey, CA 90242" + pieces: 1 + weight: 1000.00 + planned_arrival: 1737685184 + planned_departure: 1737692384 + + - _id: test_stop_2 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "New" + type: "Delivery" + sequence: 1 + shipment_move_id: "{{ $.ShipmentMove.test_shipment_move.ID }}" + location_id: "{{ $.Location.test_location_2.ID }}" + address_line: "1101 Summit Ave, Greensboro, NC 27410" + pieces: 1 + weight: 1000.00 + planned_arrival: 1737746081 + planned_departure: 1737836081 + + - _id: test_stop_3 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "New" + type: "Pickup" + sequence: 0 + shipment_move_id: "{{ $.ShipmentMove.test_shipment_move_2.ID }}" + location_id: "{{ $.Location.test_location_2.ID }}" + address_line: "1101 Summit Ave, Greensboro, NC 27410" + pieces: 1 + weight: 1000.00 + planned_arrival: 1737918881 + planned_departure: 1738019681 + + - _id: test_stop_4 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "New" + type: "Delivery" + sequence: 2 + shipment_move_id: "{{ $.ShipmentMove.test_shipment_move.ID }}" + location_id: "{{ $.Location.test_location_3.ID }}" + address_line: "6185 Retail Rd, Dallas, TX 75231" + pieces: 1 + weight: 1000.00 + planned_arrival: 1738073681 + planned_departure: 1738185281 + + # In Transit Shipment Move Stops + - _id: in_transit_stop_1 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "New" + type: "Pickup" + sequence: 0 + shipment_move_id: "{{ $.ShipmentMove.in_transit_shipment_move.ID }}" + location_id: "{{ $.Location.test_location.ID }}" + address_line: "8626 Firestone Blvd, Downey, CA 90242" + pieces: 1 + weight: 1000.00 + planned_arrival: "{{ now.Unix }}" + planned_departure: "{{ now.Unix }}" + + - _id: in_transit_stop_2 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "New" + type: "Delivery" + sequence: 1 + shipment_move_id: "{{ $.ShipmentMove.in_transit_shipment_move.ID }}" + location_id: "{{ $.Location.test_location_2.ID }}" + address_line: "1101 Summit Ave, Greensboro, NC 27410" + pieces: 1 + weight: 1000.00 + planned_arrival: "{{ now.Unix }}" + planned_departure: "{{ now.Unix }}" + + # In Transit Shipment Move 2 Stops + - _id: in_transit_stop_3 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "New" + type: "Pickup" + sequence: 0 + shipment_move_id: "{{ $.ShipmentMove.in_transit_shipment_move_2.ID }}" + location_id: "{{ $.Location.test_location_3.ID }}" + address_line: "6185 Retail Rd, Dallas, TX 75231" + pieces: 1 + weight: 1000.00 + planned_arrival: "{{ now.Unix }}" + planned_departure: "{{ now.Unix }}" + + - _id: in_transit_stop_4 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "New" + type: "Delivery" + sequence: 1 + shipment_move_id: "{{ $.ShipmentMove.in_transit_shipment_move_2.ID }}" + location_id: "{{ $.Location.test_location_2.ID }}" + address_line: "1101 Summit Ave, Greensboro, NC 27410" + pieces: 1 + weight: 1000.00 + planned_arrival: "{{ now.Unix }}" + planned_departure: "{{ now.Unix }}" + + # Delayed Shipment Move Stops + - _id: test_stop_7 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "New" + type: "Pickup" + sequence: 0 + shipment_move_id: "{{ $.ShipmentMove.test_shipment_move_4.ID }}" + location_id: "{{ $.Location.test_location_2.ID }}" + address_line: "1101 Summit Ave, Greensboro, NC 27410" + pieces: 1 + weight: 1000.00 + planned_arrival: "{{ monthsAgo 1 }}" + planned_departure: "{{ monthsAgo 1 }}" + + - _id: test_stop_8 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "New" + type: "Delivery" + sequence: 1 + shipment_move_id: "{{ $.ShipmentMove.test_shipment_move_4.ID }}" + location_id: "{{ $.Location.test_location_3.ID }}" + address_line: "6185 Retail Rd, Dallas, TX 75231" + pieces: 1 + weight: 1000.00 + planned_arrival: "{{ monthsAgo 1 }}" + planned_departure: "{{ monthsAgo 1 }}" + + # Completed Shipment Move Stops + - _id: test_stop_9 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "Completed" + type: "Pickup" + sequence: 0 + shipment_move_id: "{{ $.ShipmentMove.test_shipment_move_5.ID }}" + location_id: "{{ $.Location.test_location_2.ID }}" + address_line: "1101 Summit Ave, Greensboro, NC 27410" + pieces: 1 + weight: 1000.00 + planned_arrival: "{{ now.Unix }}" + planned_departure: "{{ now.Unix }}" + actual_arrival: "{{ now.Unix }}" + actual_departure: "{{ now.Unix }}" + + - _id: test_stop_10 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "Completed" + type: "Delivery" + sequence: 1 + shipment_move_id: "{{ $.ShipmentMove.test_shipment_move_5.ID }}" + location_id: "{{ $.Location.test_location_3.ID }}" + address_line: "6185 Retail Rd, Dallas, TX 75231" + pieces: 1 + weight: 1000.00 + planned_arrival: "{{ now.Unix }}" + planned_departure: "{{ now.Unix }}" + actual_arrival: "{{ now.Unix }}" + actual_departure: "{{ now.Unix }}" + + # Billed Shipment Move Stops + - _id: test_stop_11 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "Completed" + type: "Pickup" + sequence: 0 + shipment_move_id: "{{ $.ShipmentMove.test_shipment_move_6.ID }}" + location_id: "{{ $.Location.test_location_3.ID }}" + address_line: "6185 Retail Rd, Dallas, TX 75231" + pieces: 1 + weight: 1000.00 + planned_arrival: "{{ now.Unix }}" + planned_departure: "{{ now.Unix }}" + actual_arrival: "{{ now.Unix }}" + actual_departure: "{{ now.Unix }}" + + - _id: test_stop_11 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "Completed" + type: "Delivery" + sequence: 1 + shipment_move_id: "{{ $.ShipmentMove.test_shipment_move_6.ID }}" + location_id: "{{ $.Location.test_location_2.ID }}" + address_line: "1101 Summit Ave, Greensboro, NC 27410" + pieces: 1 + weight: 1000.00 + planned_arrival: "{{ now.Unix }}" + planned_departure: "{{ now.Unix }}" + actual_arrival: "{{ now.Unix }}" + actual_departure: "{{ now.Unix }}" + + # Canceled Shipment Move Stops + - _id: test_stop_12 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "Canceled" + type: "Pickup" + sequence: 0 + shipment_move_id: "{{ $.ShipmentMove.test_shipment_move_7.ID }}" + location_id: "{{ $.Location.test_location.ID }}" + address_line: "8626 Firestone Blvd, Downey, CA 90242" + pieces: 1 + weight: 1000.00 + planned_arrival: "{{ now.Unix }}" + planned_departure: "{{ now.Unix }}" + actual_arrival: "{{ now.Unix }}" + actual_departure: "{{ now.Unix }}" + + - _id: test_stop_13 + organization_id: "{{ $.Organization.trenova.ID }}" + business_unit_id: "{{ $.BusinessUnit.trenova.ID }}" + created_at: "{{ now.Unix }}" + status: "Canceled" + type: "Delivery" + sequence: 1 + shipment_move_id: "{{ $.ShipmentMove.test_shipment_move_7.ID }}" + location_id: "{{ $.Location.test_location_2.ID }}" + address_line: "1101 Summit Ave, Greensboro, NC 27410" + pieces: 1 + weight: 1000.00 + planned_arrival: "{{ now.Unix }}" + planned_departure: "{{ now.Unix }}" + actual_arrival: "{{ now.Unix }}" + actual_departure: "{{ now.Unix }}" diff --git a/ui/package-lock.json b/ui/package-lock.json index 3d8ba5ad0..3e38b22a7 100644 --- a/ui/package-lock.json +++ b/ui/package-lock.json @@ -15,6 +15,7 @@ "@fontsource-variable/inter": "^5.1.1", "@fortawesome/pro-regular-svg-icons": "^6.7.1", "@fortawesome/pro-solid-svg-icons": "^6.7.2", + "@googlemaps/markerclusterer": "^2.5.1", "@hookform/resolvers": "^3.10.0", "@lukemorales/query-key-factory": "^1.3.4", "@radix-ui/react-alert-dialog": "^1.1.4", @@ -33,9 +34,11 @@ "@radix-ui/react-switch": "^1.1.2", "@radix-ui/react-tooltip": "^1.1.6", "@radix-ui/react-visually-hidden": "^1.1.1", + "@tailwindcss/vite": "^4.0.0", "@tanstack/react-query": "^5.64.2", "@tanstack/react-table": "^8.20.6", "@uidotdev/usehooks": "^2.4.1", + "@vis.gl/react-google-maps": "^1.5.1", "@vitejs/plugin-react": "^4.3.4", "chrono-node": "^2.7.7", "class-variance-authority": "^0.7.1", @@ -57,6 +60,7 @@ "react-router-dom": "^7.1.3", "react-select": "^5.9.0", "sonner": "^1.7.2", + "supercluster": "^8.0.1", "tailwind-merge": "^2.6.0", "uuid": "^11.0.5", "vaul": "^1.1.2", @@ -67,12 +71,14 @@ "devDependencies": { "@eslint/js": "^9.18.0", "@rollup/plugin-node-resolve": "^16.0.0", - "@tailwindcss/vite": "^4.0.0", "@tanstack/eslint-plugin-query": "^5.64.2", "@tanstack/react-query-devtools": "^5.64.2", + "@types/geojson": "^7946.0.14", + "@types/google.maps": "^3.58.1", "@types/node": "^22.10.7", "@types/react": "^18.3.16", "@types/react-dom": "^18.3.5", + "@types/supercluster": "^7.1.3", "@vite-pwa/assets-generator": "^0.2.6", "@vitejs/plugin-react-swc": "^3.7.2", "babel-plugin-react-compiler": "^19.0.0-beta-decd7b8-20250118", @@ -2580,6 +2586,16 @@ "node": ">=6" } }, + "node_modules/@googlemaps/markerclusterer": { + "version": "2.5.3", + "resolved": "https://registry.npmjs.org/@googlemaps/markerclusterer/-/markerclusterer-2.5.3.tgz", + "integrity": "sha512-x7lX0R5yYOoiNectr10wLgCBasNcXFHiADIBdmn7jQllF2B5ENQw5XtZK+hIw4xnV0Df0xhN4LN98XqA5jaiOw==", + "license": "Apache-2.0", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "supercluster": "^8.0.1" + } + }, "node_modules/@hookform/resolvers": { "version": "3.10.0", "resolved": "https://registry.npmjs.org/@hookform/resolvers/-/resolvers-3.10.0.tgz", @@ -4353,7 +4369,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.0.0.tgz", "integrity": "sha512-tfG2uBvo6j6kDIPmntxwXggCOZAt7SkpAXJ6pTIYirNdk5FBqh/CZZ9BZPpgcl/tNFLs6zc4yghM76sqiELG9g==", - "dev": true, "license": "MIT", "dependencies": { "enhanced-resolve": "^5.18.0", @@ -4365,7 +4380,6 @@ "version": "2.4.2", "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.4.2.tgz", "integrity": "sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==", - "dev": true, "license": "MIT", "bin": { "jiti": "lib/jiti-cli.mjs" @@ -4375,7 +4389,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.0.0.tgz", "integrity": "sha512-W3FjpJgy4VV1JiL7iBYDf2n/WkeDg1Il+0Q7eWnqPyvkPPCo/Mbwc5BiaT7dfBNV6tQKAhVE34rU5xl8pSl50w==", - "dev": true, "license": "MIT", "engines": { "node": ">= 10" @@ -4401,7 +4414,6 @@ "cpu": [ "arm64" ], - "dev": true, "license": "MIT", "optional": true, "os": [ @@ -4418,7 +4430,6 @@ "cpu": [ "arm64" ], - "dev": true, "license": "MIT", "optional": true, "os": [ @@ -4435,7 +4446,6 @@ "cpu": [ "x64" ], - "dev": true, "license": "MIT", "optional": true, "os": [ @@ -4452,7 +4462,6 @@ "cpu": [ "x64" ], - "dev": true, "license": "MIT", "optional": true, "os": [ @@ -4469,7 +4478,6 @@ "cpu": [ "arm" ], - "dev": true, "license": "MIT", "optional": true, "os": [ @@ -4486,7 +4494,6 @@ "cpu": [ "arm64" ], - "dev": true, "license": "MIT", "optional": true, "os": [ @@ -4503,7 +4510,6 @@ "cpu": [ "arm64" ], - "dev": true, "license": "MIT", "optional": true, "os": [ @@ -4520,7 +4526,6 @@ "cpu": [ "x64" ], - "dev": true, "license": "MIT", "optional": true, "os": [ @@ -4537,7 +4542,6 @@ "cpu": [ "x64" ], - "dev": true, "license": "MIT", "optional": true, "os": [ @@ -4554,7 +4558,6 @@ "cpu": [ "arm64" ], - "dev": true, "license": "MIT", "optional": true, "os": [ @@ -4571,7 +4574,6 @@ "cpu": [ "x64" ], - "dev": true, "license": "MIT", "optional": true, "os": [ @@ -4585,7 +4587,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/@tailwindcss/vite/-/vite-4.0.0.tgz", "integrity": "sha512-4uukMiU9gHui8KMPMdWic5SP1O/tmQ1NFSRNrQWmcop5evAVl/LZ6/LuWL3quEiecp2RBcRWwqJrG+mFXlRlew==", - "dev": true, "license": "MIT", "dependencies": { "@tailwindcss/node": "^4.0.0", @@ -4765,6 +4766,19 @@ "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", "license": "MIT" }, + "node_modules/@types/geojson": { + "version": "7946.0.16", + "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz", + "integrity": "sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/google.maps": { + "version": "3.58.1", + "resolved": "https://registry.npmjs.org/@types/google.maps/-/google.maps-3.58.1.tgz", + "integrity": "sha512-X9QTSvGJ0nCfMzYOnaVs/k6/4L+7F5uCS+4iUmkLEls6J9S/Phv+m/i3mDeyc49ZBgwab3EFO1HEoBY7k98EGQ==", + "license": "MIT" + }, "node_modules/@types/json-schema": { "version": "7.0.15", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", @@ -4830,6 +4844,16 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/supercluster": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/@types/supercluster/-/supercluster-7.1.3.tgz", + "integrity": "sha512-Z0pOY34GDFl3Q6hUFYf3HkTwKEE02e7QgtJppBt+beEAxnyOpJua+voGFvxINBHa06GwLFFym7gRPY2SiKIfIA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/geojson": "*" + } + }, "node_modules/@types/trusted-types": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", @@ -5089,6 +5113,20 @@ "dev": true, "license": "ISC" }, + "node_modules/@vis.gl/react-google-maps": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/@vis.gl/react-google-maps/-/react-google-maps-1.5.1.tgz", + "integrity": "sha512-SQwr06IOeTEUTtnMWAianbcjT3u4rANm2AKMAU7cjzghHVWGNViYWY6k7oUHyeurYg//OzsgcXtCPKiis/fTtA==", + "license": "MIT", + "dependencies": { + "@types/google.maps": "^3.54.10", + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "react": ">=16.8.0 || ^19.0 || ^19.0.0-rc", + "react-dom": ">=16.8.0 || ^19.0 || ^19.0.0-rc" + } + }, "node_modules/@vite-pwa/assets-generator": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/@vite-pwa/assets-generator/-/assets-generator-0.2.6.tgz", @@ -6680,7 +6718,6 @@ "version": "5.18.0", "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.0.tgz", "integrity": "sha512-0/r0MySGYG8YqlayBZ6MuCfECmHFdJ5qyPh8s8wa5Hnm6SaFLSK1VYCbj+NKp090Nm1caZhD+QTnmxO7esYGyQ==", - "dev": true, "license": "MIT", "dependencies": { "graceful-fs": "^4.2.4", @@ -7355,7 +7392,6 @@ "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true, "license": "MIT" }, "node_modules/fast-diff": { @@ -7852,7 +7888,6 @@ "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "dev": true, "license": "ISC" }, "node_modules/graphemer": { @@ -8813,6 +8848,12 @@ "node": ">=4.0" } }, + "node_modules/kdbush": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/kdbush/-/kdbush-4.0.2.tgz", + "integrity": "sha512-WbCVYJ27Sz8zi9Q7Q0xHC+05iwkm3Znipc2XTlrnJbsHMYktW4hPhXUE8Ys1engBrvffoSCqbil1JQAa7clRpA==", + "license": "ISC" + }, "node_modules/keyv": { "version": "4.5.4", "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", @@ -8851,7 +8892,6 @@ "version": "1.29.1", "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.29.1.tgz", "integrity": "sha512-FmGoeD4S05ewj+AkhTY+D+myDvXI6eL27FjHIjoyUkO/uw7WZD1fBVs0QxeYWa7E17CUHJaYX/RUGISCtcrG4Q==", - "devOptional": true, "license": "MPL-2.0", "dependencies": { "detect-libc": "^1.0.3" @@ -9080,7 +9120,6 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", "integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==", - "devOptional": true, "license": "Apache-2.0", "bin": { "detect-libc": "bin/detect-libc.js" @@ -12226,6 +12265,15 @@ "integrity": "sha512-Orov6g6BB1sDfYgzWfTHDOxamtX1bE/zo104Dh9e6fqJ3PooipYyfJ0pUmrZO2wAvO8YbEyeFrkV91XTsGMSrw==", "license": "MIT" }, + "node_modules/supercluster": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/supercluster/-/supercluster-8.0.1.tgz", + "integrity": "sha512-IiOea5kJ9iqzD2t7QJq/cREyLHTtSmUT6gQsweojg9WH2sYJqZK9SswTu6jrscO6D1G5v5vYZ9ru/eq85lXeZQ==", + "license": "ISC", + "dependencies": { + "kdbush": "^4.0.2" + } + }, "node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -12308,7 +12356,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.0.0.tgz", "integrity": "sha512-ULRPI3A+e39T7pSaf1xoi58AqqJxVCLg8F/uM5A3FadUbnyDTgltVnXJvdkTjwCOGA6NazqHVcwPJC5h2vRYVQ==", - "dev": true, "license": "MIT" }, "node_modules/tailwindcss-animate": { @@ -12325,7 +12372,6 @@ "version": "2.2.1", "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", - "dev": true, "license": "MIT", "engines": { "node": ">=6" diff --git a/ui/package.json b/ui/package.json index eff3e75b5..799698602 100644 --- a/ui/package.json +++ b/ui/package.json @@ -17,6 +17,7 @@ "@fontsource-variable/inter": "^5.1.1", "@fortawesome/pro-regular-svg-icons": "^6.7.1", "@fortawesome/pro-solid-svg-icons": "^6.7.2", + "@googlemaps/markerclusterer": "^2.5.1", "@hookform/resolvers": "^3.10.0", "@lukemorales/query-key-factory": "^1.3.4", "@radix-ui/react-alert-dialog": "^1.1.4", @@ -35,10 +36,13 @@ "@radix-ui/react-switch": "^1.1.2", "@radix-ui/react-tooltip": "^1.1.6", "@radix-ui/react-visually-hidden": "^1.1.1", + "@tailwindcss/vite": "^4.0.0", "@tanstack/react-query": "^5.64.2", "@tanstack/react-table": "^8.20.6", "@uidotdev/usehooks": "^2.4.1", + "@vis.gl/react-google-maps": "^1.5.1", "@vitejs/plugin-react": "^4.3.4", + "supercluster": "^8.0.1", "chrono-node": "^2.7.7", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", @@ -69,10 +73,12 @@ "devDependencies": { "@eslint/js": "^9.18.0", "@rollup/plugin-node-resolve": "^16.0.0", - "@tailwindcss/vite": "^4.0.0", "@tanstack/eslint-plugin-query": "^5.64.2", "@tanstack/react-query-devtools": "^5.64.2", + "@types/google.maps": "^3.58.1", "@types/node": "^22.10.7", + "@types/geojson": "^7946.0.14", + "@types/supercluster": "^7.1.3", "@types/react": "^18.3.16", "@types/react-dom": "^18.3.5", "@vite-pwa/assets-generator": "^0.2.6", @@ -98,4 +104,4 @@ "vite-plugin-compression2": "^1.3.3", "vite-plugin-pwa": "^0.21.1" } -} +} \ No newline at end of file diff --git a/ui/src/app/auth/_components/auth-form.tsx b/ui/src/app/auth/_components/auth-form.tsx index 9521148a7..9b603a61b 100644 --- a/ui/src/app/auth/_components/auth-form.tsx +++ b/ui/src/app/auth/_components/auth-form.tsx @@ -64,7 +64,7 @@ export function AuthForm() { - Don't have an account yet? + Don't have an account yet? Create an Account diff --git a/ui/src/app/commodities/_components/commodity-columns.tsx b/ui/src/app/commodities/_components/commodity-columns.tsx index ff28252c5..b0a171bc6 100644 --- a/ui/src/app/commodities/_components/commodity-columns.tsx +++ b/ui/src/app/commodities/_components/commodity-columns.tsx @@ -66,5 +66,6 @@ export function getColumns(): ColumnDef[] { ), }, + commonColumns.createdAt, ]; } diff --git a/ui/src/app/customers/_components/customer-columns.tsx b/ui/src/app/customers/_components/customer-columns.tsx index a565663e0..43942ff4d 100644 --- a/ui/src/app/customers/_components/customer-columns.tsx +++ b/ui/src/app/customers/_components/customer-columns.tsx @@ -37,6 +37,7 @@ export function getColumns(): ColumnDef[] { ), }, { + id: "autoMarkReadyToBill", accessorKey: "autoMarkReadyToBill", header: ({ column }) => ( [] { ), }, + commonColumns.createdAt, ]; } diff --git a/ui/src/app/customers/_components/customer-edit-modal.tsx b/ui/src/app/customers/_components/customer-edit-modal.tsx index 06fe58892..e7ef06a11 100644 --- a/ui/src/app/customers/_components/customer-edit-modal.tsx +++ b/ui/src/app/customers/_components/customer-edit-modal.tsx @@ -1,7 +1,7 @@ import { FormEditModal } from "@/components/ui/form-edit-model"; import { - customerSchema, - type CustomerSchema, + customerSchema, + type CustomerSchema, } from "@/lib/schemas/customer-schema"; import { type EditTableSheetProps } from "@/types/data-table"; import { yupResolver } from "@hookform/resolvers/yup"; diff --git a/ui/src/app/equipment-manufacturers/_components/equip-manufacturer-columns.tsx b/ui/src/app/equipment-manufacturers/_components/equip-manufacturer-columns.tsx index 81fe72e8a..9de811439 100644 --- a/ui/src/app/equipment-manufacturers/_components/equip-manufacturer-columns.tsx +++ b/ui/src/app/equipment-manufacturers/_components/equip-manufacturer-columns.tsx @@ -39,5 +39,6 @@ export function getColumns(): ColumnDef[] { ), }, + commonColumns.createdAt, ]; } diff --git a/ui/src/app/equipment-manufacturers/_components/equip-manufacturer-create-modal.tsx b/ui/src/app/equipment-manufacturers/_components/equip-manufacturer-create-modal.tsx index 81d136247..0b1f46dc2 100644 --- a/ui/src/app/equipment-manufacturers/_components/equip-manufacturer-create-modal.tsx +++ b/ui/src/app/equipment-manufacturers/_components/equip-manufacturer-create-modal.tsx @@ -26,7 +26,7 @@ export function CreateEquipManufacturerModal({ } form={form} schema={equipmentManufacturerSchema} diff --git a/ui/src/app/equipment-types/_components/equip-type-columns.tsx b/ui/src/app/equipment-types/_components/equip-type-columns.tsx index 088f0ef16..f0ee9bbf8 100644 --- a/ui/src/app/equipment-types/_components/equip-type-columns.tsx +++ b/ui/src/app/equipment-types/_components/equip-type-columns.tsx @@ -46,5 +46,6 @@ export function getColumns(): ColumnDef[] { ), }, + commonColumns.createdAt, ]; } diff --git a/ui/src/app/fleet-codes/_components/fleet-code-columns.tsx b/ui/src/app/fleet-codes/_components/fleet-code-columns.tsx index 729d11152..d4ec161f7 100644 --- a/ui/src/app/fleet-codes/_components/fleet-code-columns.tsx +++ b/ui/src/app/fleet-codes/_components/fleet-code-columns.tsx @@ -51,5 +51,6 @@ export function getColumns(): ColumnDef[] { return

{manager.name}

; }, }, + commonColumns.createdAt, ]; } diff --git a/ui/src/app/hazardous-materials/_components/hazardous-material-columns.tsx b/ui/src/app/hazardous-materials/_components/hazardous-material-columns.tsx index c089e358b..8cad537c4 100644 --- a/ui/src/app/hazardous-materials/_components/hazardous-material-columns.tsx +++ b/ui/src/app/hazardous-materials/_components/hazardous-material-columns.tsx @@ -62,5 +62,6 @@ export function getColumns(): ColumnDef[] { ), }, + commonColumns.createdAt, ]; } diff --git a/ui/src/app/location-categories/_components/location-category-columns.tsx b/ui/src/app/location-categories/_components/location-category-columns.tsx index 8af4f97fe..5c79a19d7 100644 --- a/ui/src/app/location-categories/_components/location-category-columns.tsx +++ b/ui/src/app/location-categories/_components/location-category-columns.tsx @@ -53,5 +53,6 @@ export function getColumns(): ColumnDef[] { ), }, + commonColumns.createdAt, ]; } diff --git a/ui/src/app/locations/_components/location-columns.tsx b/ui/src/app/locations/_components/location-columns.tsx index c1d51612f..4a6019203 100644 --- a/ui/src/app/locations/_components/location-columns.tsx +++ b/ui/src/app/locations/_components/location-columns.tsx @@ -7,6 +7,7 @@ import { import { DataTableDescription } from "@/components/data-table/_components/data-table-components"; import { StatusBadge } from "@/components/status-badge"; import { type LocationSchema } from "@/lib/schemas/location-schema"; +import { formatLocation } from "@/lib/utils"; import { createColumnHelper, type ColumnDef } from "@tanstack/react-table"; export function getColumns(): ColumnDef[] { @@ -59,18 +60,10 @@ export function getColumns(): ColumnDef[] { ), cell: ({ row }) => { - const state = row.original?.state; - const addressLine = - row.original.addressLine1 + - (row.original.addressLine2 ? `, ${row.original.addressLine2}` : ""); - const cityStateZip = `${row.original.city} ${state?.abbreviation}, ${row.original.postalCode}`; - - return ( -

- {addressLine} {cityStateZip} -

- ); + return

{formatLocation(row.original)}

; }, }, + + commonColumns.createdAt, ]; } diff --git a/ui/src/app/service-types/_components/service-type-columns.tsx b/ui/src/app/service-types/_components/service-type-columns.tsx index 902955ab2..30263ab74 100644 --- a/ui/src/app/service-types/_components/service-type-columns.tsx +++ b/ui/src/app/service-types/_components/service-type-columns.tsx @@ -40,5 +40,6 @@ export function getColumns(): ColumnDef[] { ), }, + commonColumns.createdAt, ]; } diff --git a/ui/src/app/shipment-types/_components/shipment-type-columns.tsx b/ui/src/app/shipment-types/_components/shipment-type-columns.tsx index 0e14e142e..27eb7c604 100644 --- a/ui/src/app/shipment-types/_components/shipment-type-columns.tsx +++ b/ui/src/app/shipment-types/_components/shipment-type-columns.tsx @@ -40,5 +40,6 @@ export function getColumns(): ColumnDef[] { ), }, + commonColumns.createdAt, ]; } diff --git a/ui/src/app/shipment/_components/shipment-columns.tsx b/ui/src/app/shipment/_components/shipment-columns.tsx new file mode 100644 index 000000000..fa35289ec --- /dev/null +++ b/ui/src/app/shipment/_components/shipment-columns.tsx @@ -0,0 +1,158 @@ +import { + DataTableColumnHeader, + DataTableColumnHeaderWithTooltip, +} from "@/components/data-table/_components/data-table-column-header"; +import { + createCommonColumns, + createEntityColumn, + createEntityRefColumn, + createNestedEntityRefColumn, +} from "@/components/data-table/_components/data-table-column-helpers"; +import { ShipmentStatusBadge } from "@/components/status-badge"; +import { generateDateTimeString, toDate } from "@/lib/date"; +import { LocationSchema } from "@/lib/schemas/location-schema"; +import { + calculateShipmentMileage, + getDestinationStopInfo, + getOriginStopInfo, + ShipmentLocations, +} from "@/lib/shipment/utils"; +import { formatLocation } from "@/lib/utils"; +import { Shipment } from "@/types/shipment"; +import { createColumnHelper, type ColumnDef } from "@tanstack/react-table"; + +export function getColumns(): ColumnDef[] { + const columnHelper = createColumnHelper(); + const commonColumns = createCommonColumns(columnHelper); + + return [ + commonColumns.selection, + { + accessorKey: "status", + header: ({ column }) => ( + + ), + cell: ({ row }) => { + const status = row.original.status; + return ; + }, + size: 100, + }, + createEntityColumn(columnHelper, "proNumber", { + accessorKey: "proNumber", + getHeaderText: "Pro Number", + getId: (shipment) => shipment.id, + getDisplayText: (shipment) => shipment.proNumber, + }), + createEntityRefColumn(columnHelper, "customer", { + basePath: "/billing/configurations/customers", + getId: (customer) => customer.id, + getDisplayText: (customer) => customer.name, + getHeaderText: "Customer", + }), + createNestedEntityRefColumn(columnHelper, { + columnId: "originLocation", + basePath: "/dispatch/configurations/locations", + getHeaderText: "Origin Location", + getId: (location) => location.id, + getDisplayText: (location: LocationSchema) => location.name, + getSecondaryInfo: (location) => { + return { + entity: location, + displayText: formatLocation(location), + clickable: false, + }; + }, + getEntity: (shipment) => { + try { + return ShipmentLocations.useLocations(shipment).origin; + } catch { + throw new Error("Shipment has no origin location"); + } + }, + }), + { + id: "originPickup", + header: ({ column }) => ( + + ), + cell: ({ row }) => { + const shipment = row.original; + const originStop = getOriginStopInfo(shipment); + if (!originStop) { + return

-

; + } + + const arrivalDate = toDate(originStop.plannedArrival); + if (!arrivalDate) { + return

-

; + } + + return

{generateDateTimeString(arrivalDate)}

; + }, + }, + createNestedEntityRefColumn(columnHelper, { + columnId: "destinationLocation", + basePath: "/dispatch/configurations/locations", + getHeaderText: "Destination Location", + getId: (location) => location.id, + getDisplayText: (location: LocationSchema) => location.name, + getSecondaryInfo: (location) => { + return { + entity: location, + displayText: formatLocation(location), + clickable: false, + }; + }, + getEntity: (shipment) => { + try { + return ShipmentLocations.useLocations(shipment).destination; + } catch { + throw new Error("Shipment has no destination location"); + } + }, + }), + { + id: "destinationPickup", + header: ({ column }) => ( + + ), + cell: ({ row }) => { + const shipment = row.original; + const destinationStop = getDestinationStopInfo(shipment); + if (!destinationStop) { + return

-

; + } + + const arrivalDate = toDate(destinationStop.plannedArrival); + if (!arrivalDate) { + return

-

; + } + + return

{generateDateTimeString(arrivalDate)}

; + }, + }, + { + id: "totalDistance", + header: ({ column }) => ( + + ), + cell: ({ row }) => { + const shipment = row.original; + const mileage = calculateShipmentMileage(shipment); + return mileage; + }, + }, + { + accessorKey: "bol", + header: ({ column }) => ( + + ), + }, + commonColumns.createdAt, + ]; +} diff --git a/ui/src/app/shipment/_components/shipment-table.tsx b/ui/src/app/shipment/_components/shipment-table.tsx new file mode 100644 index 000000000..e040d204c --- /dev/null +++ b/ui/src/app/shipment/_components/shipment-table.tsx @@ -0,0 +1,25 @@ +import { DataTable } from "@/components/data-table/data-table"; +import { Shipment } from "@/types/shipment"; +import { useMemo } from "react"; +import { getColumns } from "./shipment-columns"; + +export default function ShipmentTable() { + const columns = useMemo(() => getColumns(), []); + + return ( + + name="Shipment" + link="/shipments/" + extraSearchParams={{ + includeMoveDetails: true, + includeStopDetails: true, + includeCustomerDetails: true, + }} + queryKey="shipment-list" + exportModelName="shipment" + // TableModal={CreateTractorModal} + // TableEditModal={EditTractorModal} + columns={columns} + /> + ); +} diff --git a/ui/src/app/shipment/_components/sidebar/shipment-card.tsx b/ui/src/app/shipment/_components/sidebar/shipment-card.tsx new file mode 100644 index 000000000..87bf00a61 --- /dev/null +++ b/ui/src/app/shipment/_components/sidebar/shipment-card.tsx @@ -0,0 +1,132 @@ +import { ShipmentStatusBadge } from "@/components/status-badge"; +import { Icon } from "@/components/ui/icons"; +import { InternalLink } from "@/components/ui/link"; +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from "@/components/ui/tooltip"; +import { CustomerSchema } from "@/lib/schemas/customer-schema"; +import { LocationSchema } from "@/lib/schemas/location-schema"; +import { ShipmentLocations } from "@/lib/shipment/utils"; +import { formatLocation } from "@/lib/utils"; +import { type Shipment as ShipmentResponse } from "@/types/shipment"; +import { faSignalStream } from "@fortawesome/pro-regular-svg-icons"; +import { Timeline } from "./shipment-timeline"; + +export function ShipmentCard({ shipment }: { shipment: ShipmentResponse }) { + const { status, customer } = shipment; + const { origin } = ShipmentLocations.useLocations(shipment); + + if (!origin) { + return

-

; + } + + return ( +
+
+
+ + +
+
+ +
+ +
+
+ +
+
+ ); +} + +function ProNumber({ shipment }: { shipment: ShipmentResponse }) { + return ( +
+ + {shipment.proNumber} + +
+ ); +} + +function CustomerBadge({ customer }: { customer: CustomerSchema }) { + return ( + + + + + {customer.code} + + + +

Click to view {customer.name}

+
+
+
+ ); +} + +function StopInformation({ shipment }: { shipment: ShipmentResponse }) { + const { destination, origin } = ShipmentLocations.useLocations(shipment); + + if (!origin || !destination) { + return

-

; + } + + const items = [ + { + id: "location-1", + content: ( +
+

+ {formatLocation(origin)} +

+
+ ), + }, + { + id: "location-2", + content: ( +
+

+ {formatLocation(destination)} +

+
+ ), + }, + ]; + + return ; +} + +export function LocationGeocoded({ location }: { location: LocationSchema }) { + return !location.isGeocoded ? ( + + + + + + + + + +

Origin Location Not Geocoded

+
+
+
+ ) : null; +} diff --git a/ui/src/app/shipment/_components/sidebar/shipment-filter-options.tsx b/ui/src/app/shipment/_components/sidebar/shipment-filter-options.tsx new file mode 100644 index 000000000..f265767a3 --- /dev/null +++ b/ui/src/app/shipment/_components/sidebar/shipment-filter-options.tsx @@ -0,0 +1,30 @@ +import { Button } from "@/components/ui/button"; +import { useState } from "react"; + +type TabType = "shipments" | "vehicles" | "workers" | "assets"; + +const TABS: { id: TabType; label: string }[] = [ + { id: "shipments", label: "Shipments" }, + { id: "vehicles", label: "Vehicles" }, + { id: "workers", label: "Workers" }, + { id: "assets", label: "Assets" }, +]; + +export function FilterOptions() { + const [activeTab, setActiveTab] = useState("shipments"); + + return ( +
+ {TABS.map(({ id, label }) => ( + + ))} +
+ ); +} diff --git a/ui/src/app/shipment/_components/sidebar/shipment-sidebar.tsx b/ui/src/app/shipment/_components/sidebar/shipment-sidebar.tsx new file mode 100644 index 000000000..8e63ff980 --- /dev/null +++ b/ui/src/app/shipment/_components/sidebar/shipment-sidebar.tsx @@ -0,0 +1,188 @@ +import { InputField } from "@/components/fields/input-field"; +import { SelectField } from "@/components/fields/select-field"; +import { Button } from "@/components/ui/button"; +import { Icon } from "@/components/ui/icons"; +import { ScrollArea } from "@/components/ui/scroll-area"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@/components/ui/select"; +import { Skeleton } from "@/components/ui/skeleton"; +import { statusChoices } from "@/lib/choices"; +import { ShipmentFilterSchema } from "@/lib/schemas/shipment-filter-schema"; +import { type Shipment as ShipmentResponse } from "@/types/shipment"; +import { + faChevronLeft, + faChevronRight, + faFilter, + faSearch, +} from "@fortawesome/pro-regular-svg-icons"; +import { useMemo } from "react"; +import { useFormContext } from "react-hook-form"; +import { ShipmentCard } from "./shipment-card"; +import { FilterOptions } from "./shipment-filter-options"; + +type ShipmentSidebarProps = { + shipments: ShipmentResponse[]; + totalCount: number; + page: number; + pageSize: number; + onPageChange: (page: number) => void; + onPageSizeChange: (pageSize: number) => void; + pageSizeOptions: readonly number[]; + isLoading: boolean; +}; + +// Define a loading shipment card component +function ShipmentCardSkeleton() { + return ( +
+
+ + +
+
+ + +
+
+ + +
+
+ ); +} + +export default function ShipmentSidebar({ + shipments, + totalCount, + page, + pageSize, + onPageChange, + onPageSizeChange, + pageSizeOptions, + isLoading = true, +}: ShipmentSidebarProps) { + const { control } = useFormContext(); + const totalPages = Math.ceil(totalCount / pageSize); + + const start = (page - 1) * pageSize + 1; + const end = Math.min(page * pageSize, totalCount); + + // const isLoading = true; + const displayData = useMemo( + () => + isLoading + ? (Array.from({ length: pageSize }, () => undefined) as ( + | ShipmentResponse + | undefined + )[]) + : shipments, + [isLoading, pageSize, shipments], + ); + + return ( +
+ {/* Header section with filters */} +
+ +
+ + } + /> + + +
+
+ + {/* Scrollable shipments list with calculated height */} +
+ +
+ {displayData.map((shipment, index) => + isLoading || !shipment ? ( + + ) : ( + + ), + )} +
+
+
+ + {/* Fixed bottom section */} +
+
+ +
+ +
+ Page {page} of {totalPages} +
+ +
+
+

+ {totalCount > 0 + ? `Showing ${start}-${end} of ${totalCount} shipments` + : "No shipments found"} +

+
+
+ ); +} diff --git a/ui/src/app/shipment/_components/sidebar/shipment-timeline.tsx b/ui/src/app/shipment/_components/sidebar/shipment-timeline.tsx new file mode 100644 index 000000000..58484cfd0 --- /dev/null +++ b/ui/src/app/shipment/_components/sidebar/shipment-timeline.tsx @@ -0,0 +1,72 @@ +import { Icon } from "@/components/ui/icons"; +import { cn } from "@/lib/utils"; +import { faLocationDot } from "@fortawesome/pro-solid-svg-icons"; +import React from "react"; + +interface TimelineItemProps { + icon?: React.ReactNode; + content: React.ReactNode; + isLast?: boolean; + className?: string; +} + +const TimelineItem: React.FC = ({ + icon, + content, + isLast = false, + className, +}) => { + return ( +
+
+ {isLast ? ( +
+ +
+ ) : ( +
+ {icon ||
} +
+ )} + {!isLast && ( + +
{content}
+
+ ); +}; + +interface TimelineProps { + items: Array<{ + id: string | number; + icon?: React.ReactNode; + content: React.ReactNode; + }>; + className?: string; +} + +export const Timeline: React.FC = ({ items, className }) => { + return ( +
+ {items.map((item, index) => ( + + ))} +
+ ); +}; diff --git a/ui/src/app/shipment/page.tsx b/ui/src/app/shipment/page.tsx new file mode 100644 index 000000000..6a3cf72df --- /dev/null +++ b/ui/src/app/shipment/page.tsx @@ -0,0 +1,137 @@ +"use no memo"; + +import { MetaTags } from "@/components/meta-tags"; +import { SuspenseLoader } from "@/components/ui/component-loader"; +import { API_URL } from "@/constants/env"; +import { ShipmentFilterSchema } from "@/lib/schemas/shipment-filter-schema"; +import { LimitOffsetResponse } from "@/types/server"; +import { type Shipment as ShipmentResponse } from "@/types/shipment"; +import { useQuery } from "@tanstack/react-query"; +import { APIProvider, Map } from "@vis.gl/react-google-maps"; +import { parseAsInteger, useQueryState } from "nuqs"; +import { useCallback, useTransition } from "react"; +import { FormProvider, useForm } from "react-hook-form"; +import ShipmentSidebar from "./_components/sidebar/shipment-sidebar"; + +const DEFAULT_PAGE_SIZE = 10; +const PAGE_SIZE_OPTIONS = [10, 25, 50] as const; + +const searchParams = { + page: parseAsInteger.withDefault(1), + pageSize: parseAsInteger.withDefault(DEFAULT_PAGE_SIZE), +}; + +type ShipmentQueryParams = { + pageIndex: number; + pageSize: number; + expandShipmentDetails: boolean; +}; + +function fetchShipments(queryParams: ShipmentQueryParams) { + const fetchURL = new URL(`${API_URL}/shipments/`); + fetchURL.searchParams.set("limit", queryParams.pageSize.toString()); + fetchURL.searchParams.set( + "offset", + (queryParams.pageIndex * queryParams.pageSize).toString(), + ); + fetchURL.searchParams.set( + "expandShipmentDetails", + queryParams.expandShipmentDetails.toString(), + ); + + return useQuery>({ + queryKey: ["shipments", fetchURL.href, queryParams], + queryFn: async () => { + const response = await fetch(fetchURL.href, { + credentials: "include", + }); + return response.json(); + }, + }); +} + +export function Shipment() { + const center = { lat: 39.8283, lng: -98.5795 }; // Center of continental US + const [isTransitioning, startTransition] = useTransition(); + + const [page, setPage] = useQueryState( + "page", + searchParams.page.withOptions({ + startTransition, + shallow: false, + }), + ); + + const [pageSize, setPageSize] = useQueryState( + "pageSize", + searchParams.pageSize.withOptions({ + startTransition, + shallow: false, + }), + ); + + const form = useForm(); + + const { data, isLoading } = fetchShipments({ + pageIndex: (page ?? 1) - 1, + pageSize: pageSize ?? DEFAULT_PAGE_SIZE, + expandShipmentDetails: true, + }); + + const handlePageChange = useCallback( + (page: number) => { + startTransition(() => { + setPage(page); + }); + }, + [setPage, startTransition], + ); + + const handlePageSizeChange = useCallback( + (pageSize: number) => { + startTransition(() => { + setPage(1); + setPageSize(pageSize); + }); + }, + [setPage, setPageSize, startTransition], + ); + + return ( + <> + + + +
+
+ + + +
+
+ + + +
+
+
+
+ + ); +} diff --git a/ui/src/app/tractor/_components/tractor-columns.tsx b/ui/src/app/tractor/_components/tractor-columns.tsx index 812eb7176..0c4cb3124 100644 --- a/ui/src/app/tractor/_components/tractor-columns.tsx +++ b/ui/src/app/tractor/_components/tractor-columns.tsx @@ -43,6 +43,16 @@ export function getColumns(): ColumnDef[] { }, }, ), + createEntityRefColumn( + columnHelper, + "equipmentManufacturer", + { + basePath: "/equipment/configurations/equipment-manufacturers", + getId: (equipManufacturer) => equipManufacturer.id, + getDisplayText: (equipManufacturer) => equipManufacturer.name, + getHeaderText: "Equipment Manufacturer", + }, + ), createEntityRefColumn( columnHelper, "primaryWorker", @@ -61,5 +71,15 @@ export function getColumns(): ColumnDef[] { : null, }, ), + createEntityRefColumn(columnHelper, "fleetCode", { + basePath: "/dispatch/configurations/fleet-codes", + getId: (fleetCode) => fleetCode.id, + getDisplayText: (fleetCode) => fleetCode.name, + getHeaderText: "Fleet Code", + color: { + getColor: (fleetCode) => fleetCode.color, + }, + }), + commonColumns.createdAt, ]; } diff --git a/ui/src/app/tractor/_components/tractor-form.tsx b/ui/src/app/tractor/_components/tractor-form.tsx index 9b972c8ff..ad8fc26bb 100644 --- a/ui/src/app/tractor/_components/tractor-form.tsx +++ b/ui/src/app/tractor/_components/tractor-form.tsx @@ -87,6 +87,7 @@ export function TractorForm() { label="Fleet Code" placeholder="Fleet Code" description="Select the fleet code of the tractor" + hasPermission hasPopoutWindow popoutLink="/dispatch/configurations/fleet-codes/" popoutLinkLabel="Fleet Code" @@ -101,6 +102,7 @@ export function TractorForm() { label="Equipment Type" placeholder="Equipment Type" description="Select the equipment type of the tractor" + hasPermission hasPopoutWindow popoutLink="/equipment/configurations/equipment-types/" popoutLinkLabel="Equipment Type" @@ -115,9 +117,10 @@ export function TractorForm() { label="Equipment Manufacturer" placeholder="Equipment Manufacturer" description="Select the equipment manufacturer of the tractor" + hasPermission hasPopoutWindow popoutLink="/equipment/configurations/equipment-manufacturers/" - popoutLinkLabel="Equipment Manufacturer" + popoutLinkLabel="Equip Manu." /> @@ -129,6 +132,7 @@ export function TractorForm() { label="Primary Worker" placeholder="Primary Worker" description="Select the primary worker of the tractor" + hasPermission hasPopoutWindow popoutLink="/dispatch/configurations/workers/" popoutLinkLabel="Worker" @@ -142,6 +146,7 @@ export function TractorForm() { label="Secondary Worker" placeholder="Secondary Worker" description="Select the secondary worker of the tractor" + hasPermission hasPopoutWindow popoutLink="/dispatch/configurations/workers/" popoutLinkLabel="Worker" diff --git a/ui/src/app/tractor/_components/tractor-table.tsx b/ui/src/app/tractor/_components/tractor-table.tsx index d9fde0d5f..93bfa2910 100644 --- a/ui/src/app/tractor/_components/tractor-table.tsx +++ b/ui/src/app/tractor/_components/tractor-table.tsx @@ -15,6 +15,7 @@ export default function TractorTable() { extraSearchParams={{ includeWorkerDetails: true, includeEquipmentDetails: true, + includeFleetDetails: true, }} queryKey="tractor-list" exportModelName="tractor" diff --git a/ui/src/app/trailers/_components/trailer-columns.tsx b/ui/src/app/trailers/_components/trailer-columns.tsx index 148ff1534..af8ecaf47 100644 --- a/ui/src/app/trailers/_components/trailer-columns.tsx +++ b/ui/src/app/trailers/_components/trailer-columns.tsx @@ -54,6 +54,15 @@ export function getColumns(): ColumnDef[] { getHeaderText: "Equipment Manufacturer", }, ), + createEntityRefColumn(columnHelper, "fleetCode", { + basePath: "/dispatch/configurations/fleet-codes", + getId: (fleetCode) => fleetCode.id, + getDisplayText: (fleetCode) => fleetCode.name, + getHeaderText: "Fleet Code", + color: { + getColor: (fleetCode) => fleetCode.color, + }, + }), { accessorKey: "lastInspectionDate", header: ({ column }) => ( @@ -64,5 +73,6 @@ export function getColumns(): ColumnDef[] { return ; }, }, + commonColumns.createdAt, ]; } diff --git a/ui/src/app/trailers/_components/trailer-form.tsx b/ui/src/app/trailers/_components/trailer-form.tsx index a8be8956d..db8dcb184 100644 --- a/ui/src/app/trailers/_components/trailer-form.tsx +++ b/ui/src/app/trailers/_components/trailer-form.tsx @@ -46,6 +46,7 @@ function GeneralInformationSection({ label="Equipment Type" placeholder="Equipment Type" description="The type of equipment the trailer is categorized under." + hasPermission hasPopoutWindow popoutLink="/equipment/configurations/equipment-types/" popoutLinkLabel="Equipment Type" @@ -60,6 +61,7 @@ function GeneralInformationSection({ label="Equipment Manufacturer" placeholder="Equipment Manufacturer" description="The manufacturer of the trailer's equipment." + hasPermission hasPopoutWindow popoutLink="/equipment/configurations/equipment-manufacturers/" popoutLinkLabel="Equipment Manufacturer" @@ -111,6 +113,7 @@ function GeneralInformationSection({ label="Fleet Code" placeholder="Fleet Code" description="The fleet code associated with the trailer." + hasPermission hasPopoutWindow popoutLink="/dispatch/configurations/fleet-codes/" popoutLinkLabel="Fleet Code" diff --git a/ui/src/app/trailers/_components/trailer-table.tsx b/ui/src/app/trailers/_components/trailer-table.tsx index 84c54ef71..839d6b253 100644 --- a/ui/src/app/trailers/_components/trailer-table.tsx +++ b/ui/src/app/trailers/_components/trailer-table.tsx @@ -14,6 +14,7 @@ export default function TrailerTable() { link="/trailers/" extraSearchParams={{ includeEquipmentDetails: true, + includeFleetDetails: true, }} queryKey="trailer-list" exportModelName="trailer" diff --git a/ui/src/app/workers/_components/workers-table-columns.tsx b/ui/src/app/workers/_components/workers-table-columns.tsx index b7112b9f4..510c26c1b 100644 --- a/ui/src/app/workers/_components/workers-table-columns.tsx +++ b/ui/src/app/workers/_components/workers-table-columns.tsx @@ -78,5 +78,6 @@ export function getColumns(): ColumnDef[] { ); }, }, + commonColumns.createdAt, ]; } diff --git a/ui/src/components/data-table/_components/data-table-column-header.tsx b/ui/src/components/data-table/_components/data-table-column-header.tsx index 894705762..0cc44936c 100644 --- a/ui/src/components/data-table/_components/data-table-column-header.tsx +++ b/ui/src/components/data-table/_components/data-table-column-header.tsx @@ -8,11 +8,18 @@ import { SelectItem, SelectTrigger, } from "@/components/ui/select"; +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from "@/components/ui/tooltip"; import { cn } from "@/lib/utils"; import { faArrowDown, faArrowUp, faArrowUpArrowDown, + faCircleInfo, } from "@fortawesome/pro-regular-svg-icons"; import { ArrowDownIcon, ArrowUpIcon, EyeNoneIcon } from "@radix-ui/react-icons"; @@ -123,3 +130,119 @@ export function DataTableColumnHeader({
); } + +type DataTableColumnHeaderWithTooltipProps = + DataTableColumnHeaderProps & { + title: string; + tooltipContent: string; + }; + +export function DataTableColumnHeaderWithTooltip({ + title, + tooltipContent, + column, + className, +}: DataTableColumnHeaderWithTooltipProps) { + if (!column.getCanSort() && !column.getCanHide()) { + return
{title}
; + } + + const ascValue = `${column.id}-asc`; + const descValue = `${column.id}-desc`; + const hideValue = `${column.id}-hide`; + + return ( +
+ +
+ ); +} diff --git a/ui/src/components/data-table/_components/data-table-column-helpers.tsx b/ui/src/components/data-table/_components/data-table-column-helpers.tsx index bea5cc433..e1d46dd29 100644 --- a/ui/src/components/data-table/_components/data-table-column-helpers.tsx +++ b/ui/src/components/data-table/_components/data-table-column-helpers.tsx @@ -6,9 +6,40 @@ import { TooltipProvider, TooltipTrigger, } from "@/components/ui/tooltip"; +import { generateDateOnlyString, toDate } from "@/lib/date"; +import { BaseModel } from "@/types/common"; import { ColumnDef, ColumnHelper } from "@tanstack/react-table"; +import { v4 } from "uuid"; import { DataTableColumnHeader } from "./data-table-column-header"; +type EntityRefConfig = { + basePath: string; + getId: (entity: TEntity) => string | undefined; + getDisplayText: (entity: TEntity) => string; + getHeaderText?: string; + getSecondaryInfo?: ( + entity: TEntity, + parent: TParent, + ) => { + label?: string; + entity: TEntity; + displayText: string; + clickable?: boolean; + } | null; + className?: string; + color?: { + getColor: (entity: TEntity) => string | undefined; + }; +}; + +type NestedEntityRefConfig = EntityRefConfig< + TEntity, + TParent +> & { + getEntity: (parent: TParent) => TEntity | null | undefined; + columnId?: string; +}; + export function createCommonColumns>( columnHelper: ColumnHelper, ) { @@ -36,30 +67,32 @@ export function createCommonColumns>( aria-label="Select row" /> ), + size: 50, enableSorting: false, enableHiding: false, }), + createdAt: createdAtColumn(columnHelper) as ColumnDef, }; } -type EntityRefConfig = { - basePath: string; - getId: (entity: TEntity) => string | undefined; - getDisplayText: (entity: TEntity) => string; - getHeaderText?: string; - getSecondaryInfo?: ( - entity: TEntity, - parent: TParent, - ) => { - label: string; - entity: TEntity; - displayText: string; - } | null; - className?: string; - color?: { - getColor: (entity: TEntity) => string | undefined; - }; -}; +function createdAtColumn>( + columnHelper: ColumnHelper, +) { + return columnHelper.accessor( + (row) => (row.original as unknown as BaseModel).createdAt, + { + id: "createdAt", + header: "Created At", + cell: ({ row }) => { + const { createdAt } = row.original; + const date = toDate(createdAt as number); + if (!date) return

-

; + + return

{generateDateOnlyString(date)}

; + }, + }, + ); +} export function createEntityRefColumn< T extends Record, @@ -82,11 +115,7 @@ export function createEntityRefColumn< const entity = getValue(); if (!entity) { - return ( -

- No {config.basePath.split("/").pop()} -

- ); + return

-

; } const id = config.getId(entity); @@ -94,6 +123,9 @@ export function createEntityRefColumn< const secondaryInfo = config.getSecondaryInfo?.(entity, row.original); const color = config.color?.getColor(entity); + // clickable should default to true unless otherwise specified + const clickable = secondaryInfo?.clickable ?? true; + return (
@@ -136,25 +168,29 @@ export function createEntityRefColumn< {secondaryInfo && (
- {secondaryInfo.label}: + {secondaryInfo.label && {secondaryInfo.label}:} - - {secondaryInfo.displayText} - + {clickable ? ( + + {secondaryInfo.displayText} + + ) : ( +

{secondaryInfo.displayText}

+ )}

Click to view {secondaryInfo.displayText}

@@ -234,3 +270,115 @@ export function createEntityColumn>( }, }) as ColumnDef; } + +export function createNestedEntityRefColumn< + T extends Record, + TValue, +>( + columnHelper: ColumnHelper, + config: NestedEntityRefConfig, +): ColumnDef { + return columnHelper.accessor((row) => config.getEntity(row), { + id: config.columnId ?? v4(), + header: ({ column }) => ( + + ), + cell: ({ getValue, row }) => { + const entity = getValue(); + + if (!entity) { + return

-

; + } + + const id = config.getId(entity); + const displayText = config.getDisplayText(entity); + const secondaryInfo = config.getSecondaryInfo?.(entity, row.original); + const color = config.color?.getColor(entity); + + // clickable should default to true unless otherwise specified + const clickable = secondaryInfo?.clickable ?? true; + + return ( +
+ + + + + {color ? ( +
+
+

{displayText}

+
+ ) : ( + + {displayText} + + )} + + + +

Click to view {displayText}

+
+ + + + {secondaryInfo && ( +
+ {secondaryInfo.label && {secondaryInfo.label}:} + {clickable ? ( + + + + + {secondaryInfo.displayText} + + + +

Click to view {secondaryInfo.displayText}

+
+
+
+ ) : ( +

{secondaryInfo.displayText}

+ )} +
+ )} +
+ ); + }, + }) as ColumnDef; +} diff --git a/ui/src/components/data-table/_components/data-table-filter-dialog.tsx b/ui/src/components/data-table/_components/data-table-filter-dialog.tsx index b93940ea8..89ab28ccd 100644 --- a/ui/src/components/data-table/_components/data-table-filter-dialog.tsx +++ b/ui/src/components/data-table/_components/data-table-filter-dialog.tsx @@ -11,7 +11,6 @@ import { } from "@/components/ui/dialog"; import { Form, FormControl, FormGroup } from "@/components/ui/form"; import { dataTableConfig } from "@/config/data-table"; -import { generateRowId } from "@/hooks/use-data-table-query"; import { getDefaultFilterOperator, getValidFilters } from "@/lib/data-table"; import { http } from "@/lib/http-client"; import { Status } from "@/types/common"; diff --git a/ui/src/components/data-table/_components/data-table-view-options.tsx b/ui/src/components/data-table/_components/data-table-view-options.tsx index dbaf61400..722893ee0 100644 --- a/ui/src/components/data-table/_components/data-table-view-options.tsx +++ b/ui/src/components/data-table/_components/data-table-view-options.tsx @@ -1,7 +1,6 @@ import { Button } from "@/components/ui/button"; import { Icon } from "@/components/ui/icons"; -import { Badge } from "@/components/ui/badge"; import { Input } from "@/components/ui/input"; import { Label } from "@/components/ui/label"; import { @@ -11,7 +10,7 @@ import { } from "@/components/ui/popover"; import { ScrollArea } from "@/components/ui/scroll-area"; import { Switch } from "@/components/ui/switch"; -import { toSentenceCase } from "@/lib/utils"; +import { toSentenceCase, toTitleCase } from "@/lib/utils"; import { useTableStore } from "@/stores/table-store"; import { DataTableCreateButtonProps, @@ -147,13 +146,9 @@ export function DataTableViewOptions({ > View - +
{visibleColumnsCount} - +
Toggle column visibility options @@ -180,7 +175,7 @@ export function DataTableViewOptions({ htmlFor={column.id} className="flex-grow text-sm font-normal" > - {toSentenceCase(column.id)} + {toTitleCase(column.id)} ({ onCheckedChange={() => handleToggleVisibility(column.id, isVisible) } - aria-label={`Toggle ${toSentenceCase(column.id)} column`} + aria-label={`Toggle ${toTitleCase(column.id)} column`} />
); diff --git a/ui/src/components/data-table/data-table.tsx b/ui/src/components/data-table/data-table.tsx index 7e6c4f67e..1ca5225e3 100644 --- a/ui/src/components/data-table/data-table.tsx +++ b/ui/src/components/data-table/data-table.tsx @@ -206,6 +206,11 @@ export function DataTable>({ enableRowSelection: true, onRowSelectionChange: setRowSelection, // onSortingChange: setSorting, + defaultColumn: { + size: 200, + minSize: 10, + maxSize: 300, + }, onColumnFiltersChange: setColumnFilters, onColumnVisibilityChange: setColumnVisibility, getCoreRowModel: getCoreRowModel(), @@ -223,9 +228,9 @@ export function DataTable>({ const isEntityError = entityQuery.error; return ( -
+
-
+
Put something here
@@ -238,7 +243,7 @@ export function DataTable>({ />
-
+
diff --git a/ui/src/components/fields/select-components.tsx b/ui/src/components/fields/select-components.tsx index 409596d1b..54e6c1f07 100644 --- a/ui/src/components/fields/select-components.tsx +++ b/ui/src/components/fields/select-components.tsx @@ -189,7 +189,7 @@ export function IndicatorSeparator(props: IndicatorSeparatorProps) { export function ClearIndicator(props: ClearIndicatorProps) { return ( - + ); } diff --git a/ui/src/components/fields/select-field.tsx b/ui/src/components/fields/select-field.tsx index 94ed547ff..f2072008f 100644 --- a/ui/src/components/fields/select-field.tsx +++ b/ui/src/components/fields/select-field.tsx @@ -223,8 +223,7 @@ const ReactSelectInput = React.forwardRef( multiValueLabel: () => "text-xs leading-4", multiValueRemove: () => "hover:text-foreground/50 text-foreground rounded-md h-4 w-4", - indicatorsContainer: () => - cn("gap-1", isReadOnly && "cursor-not-allowed"), + indicatorsContainer: () => cn(isReadOnly && "cursor-not-allowed"), clearIndicator: () => "text-foreground/50 hover:text-foreground", dropdownIndicator: () => "p-1 text-foreground/50 rounded-md hover:text-foreground", @@ -263,6 +262,7 @@ export function SelectField({ isReadOnly, isMulti, isLoading, + isClearable, isFetchError, placeholder, menuPlacement, @@ -296,6 +296,7 @@ export function SelectField({ ref={ref} name={name} isMulti={isMulti} + isClearable={isClearable} onChange={onChange} placeholder={placeholder} onBlur={onBlur} diff --git a/ui/src/components/providers.tsx b/ui/src/components/providers.tsx index d1edb7b3d..9bd6194e7 100644 --- a/ui/src/components/providers.tsx +++ b/ui/src/components/providers.tsx @@ -1,6 +1,6 @@ import { APIError } from "@/types/errors"; import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; -import { ReactQueryDevtools } from "@tanstack/react-query-devtools"; +// import { ReactQueryDevtools } from "@tanstack/react-query-devtools"; import { NuqsAdapter } from "nuqs/adapters/react-router"; import { HelmetProvider } from "react-helmet-async"; import { ThemeProvider } from "./theme-provider"; @@ -28,7 +28,7 @@ export function Providers({ children }: { children: React.ReactNode }) { - + {/* */} {children} diff --git a/ui/src/components/status-badge.tsx b/ui/src/components/status-badge.tsx index cfc92f99c..594c8ca1c 100644 --- a/ui/src/components/status-badge.tsx +++ b/ui/src/components/status-badge.tsx @@ -2,6 +2,7 @@ import { type WorkerSchema } from "@/lib/schemas/worker-schema"; import { badgeVariants } from "@/lib/variants/badge"; import { type Status } from "@/types/common"; import { type PackingGroupChoiceProps } from "@/types/hazardous-material"; +import { ShipmentStatus } from "@/types/shipment"; import { EquipmentStatus } from "@/types/tractor"; import { type VariantProps } from "class-variance-authority"; import { Badge } from "./ui/badge"; @@ -61,6 +62,41 @@ export function EquipmentStatusBadge({ status }: { status: EquipmentStatus }) { ); } +export function ShipmentStatusBadge({ status }: { status: ShipmentStatus }) { + const statusAttributes: Record = { + [ShipmentStatus.New]: { + variant: "purple", + text: "New", + }, + [ShipmentStatus.InTransit]: { + variant: "info", + text: "In Transit", + }, + [ShipmentStatus.Delayed]: { + variant: "warning", + text: "Delayed", + }, + [ShipmentStatus.Completed]: { + variant: "active", + text: "Completed", + }, + [ShipmentStatus.Billed]: { + variant: "teal", + text: "Billed", + }, + [ShipmentStatus.Canceled]: { + variant: "inactive", + text: "Canceled", + }, + }; + + return ( + + {statusAttributes[status].text} + + ); +} + export function PackingGroupBadge({ group, }: { diff --git a/ui/src/components/ui/form-edit-model.tsx b/ui/src/components/ui/form-edit-model.tsx index 7bb644c26..b2966405c 100644 --- a/ui/src/components/ui/form-edit-model.tsx +++ b/ui/src/components/ui/form-edit-model.tsx @@ -201,7 +201,10 @@ export function FormEditModal({ {!isLoading && currentRecord && ( Last updated on{" "} - {formatToUserTimezone(currentRecord.updatedAt, user?.timezone)} + {formatToUserTimezone(currentRecord.updatedAt, { + timezone: user?.timezone, + timeFormat: user?.timeFormat, + })} )} diff --git a/ui/src/hooks/use-resize-observer.ts b/ui/src/hooks/use-resize-observer.ts new file mode 100644 index 000000000..44f9260dc --- /dev/null +++ b/ui/src/hooks/use-resize-observer.ts @@ -0,0 +1,17 @@ +import React, { useEffect } from "react"; + +export function useResizeObserver( + ref: React.RefObject, + callback: (entry: ResizeObserverEntry) => void, +) { + useEffect(() => { + if (!ref.current) return; + + const observer = new ResizeObserver((entries) => { + callback(entries[0]); + }); + + observer.observe(ref.current); + return () => observer.disconnect(); + }, [ref, callback]); +} diff --git a/ui/src/lib/date.ts b/ui/src/lib/date.ts index f9c31389e..0a2731ace 100644 --- a/ui/src/lib/date.ts +++ b/ui/src/lib/date.ts @@ -1,14 +1,57 @@ +import { TimeFormat } from "@/types/user"; import * as chrono from "chrono-node"; import { format, fromUnixTime } from "date-fns"; +type DateFormatOptions = { + /** + * The timezone to format the date in + * @default 'UTC' + */ + timezone?: string; + + /** + * The time format to use (12-hour or 24-hour) + * @default '24-hour' + */ + timeFormat?: TimeFormat; + + /** + * Whether to show seconds + * @default false + */ + showSeconds?: boolean; + + /** + * Whether to show the timezone name + * @default true + */ + showTimeZone?: boolean; + + /** + * Whether to show the date + * @default true + */ + showDate?: boolean; +}; + +const TIME_FORMAT_24 = "HH:mm"; +const TIME_FORMAT_24_WITH_SECONDS = "HH:mm:ss"; +const DATE_FORMAT = "MM/dd/yyyy"; +const DATE_TIME_FORMAT_24 = `${DATE_FORMAT} ${TIME_FORMAT_24}`; +const DATE_TIME_FORMAT_24_WITH_SECONDS = `${DATE_FORMAT} ${TIME_FORMAT_24_WITH_SECONDS}`; + /** * Converts a Date object to a Unix timestamp. * The timestamp represents the number of seconds since the Unix epoch (January 1, 1970, 00:00:00 UTC). * * @param date The Date object to convert. * @returns A Unix timestamp representing the input date. + * @throws {Error} If the input date is invalid */ export function dateToUnixTimestamp(date: Date): number { + if (!(date instanceof Date) || isNaN(date.getTime())) { + throw new Error("Invalid date provided to dateToUnixTimestamp"); + } return Math.floor(date.getTime() / 1000); } @@ -21,7 +64,6 @@ export function dateToUnixTimestamp(date: Date): number { export function getTodayDate(): number { const date = new Date(); date.setUTCHours(0, 0, 0, 0); - return dateToUnixTimestamp(date); } @@ -32,43 +74,57 @@ export function getTodayDate(): number { * @param unixTimeStamp The Unix timestamp to convert, or undefined. * @returns A Date object representing the timestamp, or undefined if the input is undefined. */ -export const toDate = (unixTimeStamp: number | undefined) => { - return unixTimeStamp ? new Date(unixTimeStamp * 1000) : undefined; +export const toDate = (unixTimeStamp: number | undefined): Date | undefined => { + if (!unixTimeStamp || isNaN(unixTimeStamp)) { + return undefined; + } + const date = new Date(unixTimeStamp * 1000); + return isNaN(date.getTime()) ? undefined : date; }; /** - * Converts a Unix timestamp to a Date object. + * Converts a Date object to a Unix timestamp. * Handles undefined input gracefully. * - * @param unixTimeStamp The Unix timestamp to convert, or undefined. - * @returns A Date object representing the timestamp, or undefined if the input is undefined. + * @param date The Date object to convert, or undefined. + * @returns A Unix timestamp representing the date, or undefined if the input is undefined. */ -export const toUnixTimeStamp = (date: Date | undefined) => { - if (!date) return undefined; - - return date ? Math.floor(date.getTime() / 1000) : undefined; +export const toUnixTimeStamp = (date: Date | undefined): number | undefined => { + if (!date || !(date instanceof Date) || isNaN(date.getTime())) { + return undefined; + } + return Math.floor(date.getTime() / 1000); }; /** * Generates a date string from a Date object. - * Formats the date using date-fns in the format "MMM do yyyy". + * Formats the date using date-fns in the format "dd MMM yyyy". * * @param date The Date object to format. * @returns A formatted date string. + * @throws {Error} If the input date is invalid */ -export function generateDateOnlyString(date: Date) { - return format(date, "MMM do yyyy"); +export function generateDateOnlyString(date: Date): string { + if (!(date instanceof Date) || isNaN(date.getTime())) { + throw new Error("Invalid date provided to generateDateOnlyString"); + } + return format(date, DATE_FORMAT); } /** * Generates a Date object with the time set to midnight (00:00:00) from a date string. * Parses the input string using chrono-node and normalizes the time to midnight. + * * @param date The date string to parse. * @returns A Date object representing the parsed date at midnight, or null if parsing fails. */ -export function generateDateOnly(date: string) { +export function generateDateOnly(date: string): Date | null { + if (!date || typeof date !== "string") { + return null; + } + const parsed = chrono.parseDate(date); - if (parsed) { + if (parsed && !isNaN(parsed.getTime())) { const normalized = new Date(parsed); normalized.setHours(0, 0, 0, 0); return normalized; @@ -76,33 +132,61 @@ export function generateDateOnly(date: string) { return null; } -const dateOnlyFormatRegex = - /^(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s([1-9]|[12]\d|3[01])(st|nd|rd|th)\s\d{4}$/; +// const dateOnlyFormatRegex = +// /^(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s([1-9]|[12]\d|3[01])(st|nd|rd|th)\s\d{4}$/; -export function isValidDateOnlyFormat(dateString: string) { - return dateOnlyFormatRegex.test(dateString); +/** + * Checks if a date string matches the expected format. + * + * @param dateString The date string to validate. + * @returns True if the date string matches the format, false otherwise. + */ +export function isValidDateOnlyFormat(dateString: string): boolean { + if (!dateString || typeof dateString !== "string") { + return false; + } + try { + const date = new Date(dateString); + return !isNaN(date.getTime()) && format(date, DATE_FORMAT) === dateString; + } catch { + return false; + } } /** - * Generates a date and time string from a Date object. - * Formats the date using date-fns in the format "MMM do yyyy, hh:mm a". + * Generates a date and time string from a Date object using 24-hour format. * * @param date The Date object to format. + * @param showSeconds Whether to include seconds in the output. * @returns A formatted date and time string. + * @throws {Error} If the input date is invalid */ -export function generateDateTimeString(date: Date) { - return format(date, "MMM do yyyy, hh:mm a"); +export function generateDateTimeString( + date: Date, + showSeconds = false, +): string { + if (!(date instanceof Date) || isNaN(date.getTime())) { + throw new Error("Invalid date provided to generateDateTimeString"); + } + return format( + date, + showSeconds ? DATE_TIME_FORMAT_24_WITH_SECONDS : DATE_TIME_FORMAT_24, + ); } /** * Generates a Date object from a date and time string. - * Parses the input string using chrono-node. * * @param date The date and time string to parse. * @returns A Date object representing the parsed date and time, or null if parsing fails. */ -export function generateDateTime(date: string) { - return chrono.parseDate(date); +export function generateDateTime(date: string): Date | null { + if (!date || typeof date !== "string") { + return null; + } + + const parsed = chrono.parseDate(date); + return parsed && !isNaN(parsed.getTime()) ? parsed : null; } const dateTimeFormatRegex = @@ -110,39 +194,75 @@ const dateTimeFormatRegex = /** * Checks if a date string is in a valid date and time format. - * Uses a regular expression to validate the format "MMM do yyyy, hh:mm a". * * @param dateString The date string to validate. * @returns True if the date string is in the valid format, false otherwise. */ -export function isValidDateTimeFormat(dateString: string) { +export function isValidDateTimeFormat(dateString: string): boolean { + if (!dateString || typeof dateString !== "string") { + return false; + } return dateTimeFormatRegex.test(dateString); } /** - * Formats a Unix timestamp to a date and time string in the specified timezone. - * Converts the timestamp to a Date object and formats it using the provided timezone. + * Formats a Unix timestamp to a localized date string based on user preferences * - * @param timestamp The Unix timestamp to format. - * @param timezone The timezone to format the date in - * @returns A formatted date and time string in the specified timezone, or "N/A" if the date is invalid. + * @param timestamp - Unix timestamp in seconds + * @param options - Formatting options + * @returns Formatted date string */ -export function formatToUserTimezone(timestamp: number, timezone?: string) { - // Convert Unix timestamp to Date object +export function formatToUserTimezone( + timestamp: number, + options: DateFormatOptions = {}, +): string { + if (!timestamp || isNaN(timestamp)) { + return "N/A"; + } + + const { + timezone = "UTC", + showSeconds = false, + showTimeZone = true, + showDate = true, + } = options; + const date = fromUnixTime(timestamp); - // Check if the date is valid if (isNaN(date.getTime())) { return "N/A"; } - return date.toLocaleString("en-US", { - year: "numeric", - month: "2-digit", - day: "2-digit", + const formatOptions: Intl.DateTimeFormatOptions = { hour: "2-digit", minute: "2-digit", - timeZone: timezone || "UTC", - timeZoneName: "short", - }); + timeZone: timezone, + hour12: false, // Always use 24-hour format + }; + + if (showSeconds) { + formatOptions.second = "2-digit"; + } + + if (showTimeZone) { + formatOptions.timeZoneName = "short"; + } + + if (showDate) { + formatOptions.year = "numeric"; + formatOptions.month = "2-digit"; + formatOptions.day = "2-digit"; + } + + return new Intl.DateTimeFormat("en-US", formatOptions).format(date); +} + +/** + * Validates if a given value is a valid Date object + * + * @param date - Value to check + * @returns boolean indicating if the value is a valid Date + */ +export function isValidDate(date: unknown): date is Date { + return date instanceof Date && !isNaN(date.getTime()); } diff --git a/ui/src/lib/nav-links.ts b/ui/src/lib/nav-links.ts index 562b02856..48c3609e7 100644 --- a/ui/src/lib/nav-links.ts +++ b/ui/src/lib/nav-links.ts @@ -132,8 +132,8 @@ export const routes: routeInfo[] = [ tree: [ { key: "shipments", - label: "Shipment Entry", - link: "/shipments/entry", + label: "Shipments Management", + link: "/shipments/management", }, { key: "configuration-files", @@ -245,6 +245,13 @@ export const commandRoutes: CommandGroupInfo[] = [ id: "shipment-management", label: "Shipment Management", routes: [ + { + id: "shipments", + link: "/shipments/management", + label: "Shipments", + icon: faTruck, + }, + { id: "shipment-types", link: "/shipments/configurations/shipment-types", diff --git a/ui/src/lib/queries.ts b/ui/src/lib/queries.ts index 96fb02675..7483551a4 100644 --- a/ui/src/lib/queries.ts +++ b/ui/src/lib/queries.ts @@ -35,8 +35,7 @@ export const queries = createQueryKeyStore({ options: () => ({ queryKey: ["us-states/options"], queryFn: async () => { - const response = await getUsStateOptions(); - return response; + return await getUsStateOptions(); }, }), }, diff --git a/ui/src/lib/schemas/move-schema.ts b/ui/src/lib/schemas/move-schema.ts new file mode 100644 index 000000000..725431e93 --- /dev/null +++ b/ui/src/lib/schemas/move-schema.ts @@ -0,0 +1,35 @@ +import { StopStatus } from "@/types/stop"; +import { boolean, type InferType, mixed, number, object, string } from "yup"; + +export const moveSchema = object({ + id: string().optional(), + organizationId: string().nullable().optional(), + businessUnitId: string().nullable().optional(), + // * The shipment ID will be associated on the backend + shipmentID: string().optional(), + status: mixed() + .required("Status is required") + .oneOf(Object.values(StopStatus)), + primaryWorkerId: string().required("Primary Worker is required"), + secondaryWorkerId: string().optional(), + trailerId: string().optional(), + tractorId: string().optional(), + loaded: boolean().required("Loaded is required"), + sequence: number().required("Sequence is required"), + distance: number() + .transform((_, originalValue) => { + if ( + originalValue === "" || + originalValue === null || + originalValue === undefined + ) { + return undefined; + } + const parsed = parseInt(originalValue, 10); + return isNaN(parsed) ? undefined : parsed; + }) + .integer("Distance must be a whole number") + .min(0, "Distance cannot be negative"), +}); + +export type MoveSchema = InferType; diff --git a/ui/src/lib/schemas/shipment-filter-schema.ts b/ui/src/lib/schemas/shipment-filter-schema.ts new file mode 100644 index 000000000..d719f0032 --- /dev/null +++ b/ui/src/lib/schemas/shipment-filter-schema.ts @@ -0,0 +1,11 @@ +import { ShipmentStatus } from "@/types/shipment"; +import { type InferType, mixed, object, string } from "yup"; + +export const shipmentFilterSchema = object({ + search: string().optional(), + status: mixed() + .optional() + .oneOf(Object.values(ShipmentStatus)), +}); + +export type ShipmentFilterSchema = InferType; diff --git a/ui/src/lib/schemas/shipment-schema.ts b/ui/src/lib/schemas/shipment-schema.ts new file mode 100644 index 000000000..e3629a280 --- /dev/null +++ b/ui/src/lib/schemas/shipment-schema.ts @@ -0,0 +1,153 @@ +import { RatingMethod, ShipmentStatus } from "@/types/shipment"; +import { boolean, type InferType, mixed, number, object, string } from "yup"; + +export const shipmentSchema = object({ + id: string().optional(), + organizationId: string().nullable().optional(), + businessUnitId: string().nullable().optional(), + serviceTypeId: string().required("Service Type is required"), + shipmentTypeId: string().required("Shipment Type is required"), + customerId: string().required("Customer is required"), + tractorTypeId: string() + .nullable() + .optional() + .transform((_, originalValue) => { + if ( + originalValue === "" || + originalValue === null || + originalValue === undefined + ) { + return undefined; + } + return originalValue; + }), + trailerTypeId: string() + .nullable() + .optional() + .transform((_, originalValue) => { + if ( + originalValue === "" || + originalValue === null || + originalValue === undefined + ) { + return undefined; + } + return originalValue; + }), + status: mixed() + .required("Status is required") + .oneOf(Object.values(ShipmentStatus)), + proNumber: string().required("Pro Number is required"), + ratingUnit: number().required("Rating Unit is required"), + ratingMethod: mixed() + .required("Rating Method is required") + .oneOf(Object.values(RatingMethod)), + otherChargeAmount: number().required("Other Charge Amount is required"), + freightChargeAmount: number().required("Freight Charge Amount is required"), + totalChargeAmount: number().required("Total Charge Amount is required"), + pieces: number() + .transform((_, originalValue) => { + if ( + originalValue === "" || + originalValue === null || + originalValue === undefined + ) { + return undefined; + } + const parsed = parseInt(originalValue, 10); + return isNaN(parsed) ? undefined : parsed; + }) + .integer("Pieces must be a whole number") + .optional(), + weight: number() + .transform((_, originalValue) => { + if ( + originalValue === "" || + originalValue === null || + originalValue === undefined + ) { + return undefined; + } + const parsed = parseInt(originalValue, 10); + return isNaN(parsed) ? undefined : parsed; + }) + .integer("Weight must be a whole number") + .optional(), + readyToBillDate: number() + .transform((_, originalValue) => { + if ( + originalValue === "" || + originalValue === null || + originalValue === undefined + ) { + return undefined; + } + const parsed = parseInt(originalValue, 10); + return isNaN(parsed) ? undefined : parsed; + }) + .integer("Ready to Bill Date must be a whole number") + .optional(), + sentToBillingDate: number() + .transform((_, originalValue) => { + if ( + originalValue === "" || + originalValue === null || + originalValue === undefined + ) { + return undefined; + } + const parsed = parseInt(originalValue, 10); + return isNaN(parsed) ? undefined : parsed; + }) + .integer("Sent to Billing Date must be a whole number") + .optional(), + billDate: number() + .transform((_, originalValue) => { + if ( + originalValue === "" || + originalValue === null || + originalValue === undefined + ) { + return undefined; + } + const parsed = parseInt(originalValue, 10); + return isNaN(parsed) ? undefined : parsed; + }) + .integer("Bill Date must be a whole number") + .optional(), + readyToBill: boolean().required("Ready to Bill is required"), + sentToBilling: boolean().required("Sent to Billing is required"), + billed: boolean().required("Billed is required"), + temperatureMin: number().required("Temperature Min is required"), + temperatureMax: number().required("Temperature Max is required"), + bol: string().required("BOL is required"), + actualDeliveryDate: number() + .transform((_, originalValue) => { + if ( + originalValue === "" || + originalValue === null || + originalValue === undefined + ) { + return undefined; + } + const parsed = parseInt(originalValue, 10); + return isNaN(parsed) ? undefined : parsed; + }) + .integer("Actual Delivery Date must be a whole number") + .optional(), + actualShipDate: number() + .transform((_, originalValue) => { + if ( + originalValue === "" || + originalValue === null || + originalValue === undefined + ) { + return undefined; + } + const parsed = parseInt(originalValue, 10); + return isNaN(parsed) ? undefined : parsed; + }) + .integer("Actual Ship Date must be a whole number") + .optional(), +}); +export type ShipmentSchema = InferType; diff --git a/ui/src/lib/schemas/stop-schema.ts b/ui/src/lib/schemas/stop-schema.ts new file mode 100644 index 000000000..8901722b2 --- /dev/null +++ b/ui/src/lib/schemas/stop-schema.ts @@ -0,0 +1,86 @@ +import { StopStatus, StopType } from "@/types/stop"; +import { type InferType, mixed, number, object, ref, string } from "yup"; + +export const stopSchema = object({ + id: string().optional(), + organizationId: string().nullable().optional(), + businessUnitId: string().nullable().optional(), + status: mixed() + .required("Status is required") + .oneOf(Object.values(StopStatus)), + type: mixed() + .required("Type is required") + .oneOf(Object.values(StopType)), + sequence: number().required("Sequence is required"), + pieces: number() + .transform((_, originalValue) => { + if ( + originalValue === "" || + originalValue === null || + originalValue === undefined + ) { + return undefined; + } + const parsed = parseInt(originalValue, 10); + return isNaN(parsed) ? undefined : parsed; + }) + .integer("Pieces must be a whole number") + .min(0, "Pieces cannot be negative") + .optional(), + weight: number() + .transform((_, originalValue) => { + if ( + originalValue === "" || + originalValue === null || + originalValue === undefined + ) { + return undefined; + } + const parsed = parseInt(originalValue, 10); + return isNaN(parsed) ? undefined : parsed; + }) + .integer("Weight must be a whole number") + .min(0, "Weight cannot be negative") + .optional(), + plannedArrival: number() + .min(0, "Planned arrival cannot be negative") + .max(ref("plannedDeparture"), "Planned arrival must be before departure") + .required("Planned arrival is required"), + plannedDeparture: number() + .min(ref("plannedArrival"), "Planned departure must be after arrival") + .required("Planned departure is required"), + actualArrival: number() + .transform((_, originalValue) => { + if ( + originalValue === "" || + originalValue === null || + originalValue === undefined + ) { + return undefined; + } + const parsed = parseInt(originalValue, 10); + return isNaN(parsed) ? undefined : parsed; + }) + .integer("Actual arrival must be a whole number") + .min(0, "Actual arrival cannot be negative") + .max(ref("actualDeparture"), "Actual arrival must be before departure") + .optional(), + actualDeparture: number() + .transform((_, originalValue) => { + if ( + originalValue === "" || + originalValue === null || + originalValue === undefined + ) { + return undefined; + } + const parsed = parseInt(originalValue, 10); + return isNaN(parsed) ? undefined : parsed; + }) + .integer("Actual departure must be a whole number") + .min(ref("actualArrival"), "Actual departure cannot be before arrival") + .optional(), + addressLine: string().required("Address line is required"), +}); + +export type StopSchema = InferType; diff --git a/ui/src/lib/shipment/utils.ts b/ui/src/lib/shipment/utils.ts new file mode 100644 index 000000000..18b9e547e --- /dev/null +++ b/ui/src/lib/shipment/utils.ts @@ -0,0 +1,146 @@ +import { Shipment } from "@/types/shipment"; +import { useMemo } from "react"; +import { LocationSchema } from "../schemas/location-schema"; + +const STOP_TYPES = { + PICKUP: "Pickup", + DELIVERY: "Delivery", +} as const; + +export function getOriginStopInfo(shipment: Shipment) { + if (!shipment.moves?.length) { + return null; + } + + const firstMove = shipment.moves[0]; + if (!firstMove.stops?.length) { + return null; + } + + for (const stop of firstMove.stops) { + if (stop.type === STOP_TYPES.PICKUP) { + return stop; + } + } + + return null; +} + +function calculateOriginLocation(shipment: Shipment) { + const originStop = getOriginStopInfo(shipment); + + if (!originStop) { + return null; + } + + if (!originStop.location) { + return null; + } + + return originStop.location; +} + +export function getDestinationStopInfo(shipment: Shipment) { + if (!shipment.moves?.length) { + return null; + } + + const { moves } = shipment; + for (let i = moves.length - 1; i >= 0; i--) { + const move = moves[i]; + if (!move.stops?.length) continue; + + for (let j = move.stops.length - 1; j >= 0; j--) { + const stop = move.stops[j]; + if (stop.type === STOP_TYPES.DELIVERY) { + return stop; + } + } + } +} + +function calculateDestinationLocation(shipment: Shipment) { + const destinationStop = getDestinationStopInfo(shipment); + + if (!destinationStop) { + return null; + } + + if (!destinationStop.location) { + return null; + } + + return destinationStop.location; +} + +const locationCache = new WeakMap< + Shipment, + { + origin: LocationSchema | null; + destination: LocationSchema | null; + } +>(); + +function useShipmentLocations(shipment: Shipment) { + return useMemo( + () => ({ + origin: calculateOriginLocation(shipment), + destination: calculateDestinationLocation(shipment), + }), + [shipment], + ); // React's useMemo is enough for component-level caching +} + +export const ShipmentLocations = { + useLocations: useShipmentLocations, + getOrigin: (shipment: Shipment) => { + const cached = locationCache.get(shipment); + if (cached) return cached.origin; + const result = calculateOriginLocation(shipment); + locationCache.set(shipment, { + origin: result, + destination: calculateDestinationLocation(shipment), + }); + return result; + }, + + getDestination: (shipment: Shipment) => { + const cached = locationCache.get(shipment); + if (cached) return cached.destination; + const result = calculateDestinationLocation(shipment); + locationCache.set(shipment, { + origin: calculateOriginLocation(shipment), + destination: result, + }); + return result; + }, + invalidate: (shipment: Shipment) => { + locationCache.delete(shipment); + }, +} as const; + +export function calculateShipmentMileage(shipment: Shipment) { + // First find all of the moves for the shipment + const { moves } = shipment; + if (!moves?.length) { + return 0; + } + + // Second, loop through all of the moves and sum up the distance for each move + + let totalDistance = 0; + for (const move of moves) { + const { distance } = move; + if (!distance) { + continue; + } + + if (typeof distance !== "number") { + throw new Error("Distance is not a number"); + } + + totalDistance += distance; + } + + return totalDistance; +} diff --git a/ui/src/lib/utils.ts b/ui/src/lib/utils.ts index 42de15af8..c2a3c81cb 100644 --- a/ui/src/lib/utils.ts +++ b/ui/src/lib/utils.ts @@ -1,6 +1,7 @@ import { clsx, type ClassValue } from "clsx"; import { RefObject, useEffect } from "react"; import { twMerge } from "tailwind-merge"; +import { LocationSchema } from "./schemas/location-schema"; export function cn(...inputs: ClassValue[]) { return twMerge(clsx(inputs)); @@ -111,10 +112,87 @@ export const cleanObject = (obj: Record): Record => { return cleanedObj; }; +/** + * List of words that should remain lowercase in titles + * unless they are the first or last word + */ +const LOWERCASE_WORDS = new Set([ + "a", + "an", + "the", + "and", + "but", + "or", + "for", + "nor", + "in", + "on", + "at", + "to", + "by", + "of", +]); + +/** + * Converts a string to title case with special handling for technical terms + * @param str - The input string to format + * @returns Formatted string in title case + */ export function toTitleCase(str: string): string { - return str - .split(" ") - .map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()) + // First, handle technical terms and special cases + const technicalTerms: Record = { + id: "ID", + url: "URL", + uri: "URI", + api: "API", + ui: "UI", + ux: "UX", + ip: "IP", + sql: "SQL", + }; + + // Split the input string by common delimiters + const words = str + .replace(/_/g, " ") // Replace underscores with spaces + .replace(/([A-Z])/g, " $1") // Add space before capital letters + .toLowerCase() // Convert to lowercase + .replace(/\s+/g, " ") // Replace multiple spaces with single space + .trim() // Remove leading/trailing spaces + .split(" "); // Split into array of words + + return words + .map((word, index, arr) => { + // Check if it's a known technical term + if (technicalTerms[word]) { + return technicalTerms[word]; + } + + // Special handling for "At" in timestamps + if ( + word === "at" && + (arr[index - 1]?.toLowerCase().includes("created") || + arr[index - 1]?.toLowerCase().includes("updated")) + ) { + return "At"; + } + + // Always capitalize first and last words + if (index === 0 || index === arr.length - 1) { + return word.charAt(0).toUpperCase() + word.slice(1); + } + + // Keep lowercase words in lowercase unless they're after a colon or period + if ( + LOWERCASE_WORDS.has(word) && + arr[index - 1]?.slice(-1) !== ":" && + arr[index - 1]?.slice(-1) !== "." + ) { + return word; + } + + // Capitalize the first letter of other words + return word.charAt(0).toUpperCase() + word.slice(1); + }) .join(" "); } @@ -159,3 +237,13 @@ export function composeEventHandlers( } }; } + +// +export function formatLocation(location: LocationSchema) { + const { state, addressLine1, addressLine2, city, postalCode } = location; + + const addressLine = addressLine1 + (addressLine2 ? `, ${addressLine2}` : ""); + const cityStateZip = `${city} ${state?.abbreviation}, ${postalCode}`; + + return `${addressLine} ${cityStateZip}`; +} diff --git a/ui/src/lib/variants/badge.ts b/ui/src/lib/variants/badge.ts index eeb27bff2..d959d947f 100644 --- a/ui/src/lib/variants/badge.ts +++ b/ui/src/lib/variants/badge.ts @@ -9,16 +9,13 @@ export const badgeVariants = cva( "border-transparent bg-primary text-primary-foreground hover:bg-primary/80", secondary: "border-transparent bg-secondary text-secondary-foreground hover:bg-secondary/80", - active: - "[&_svg]:text-green-600 dark:bg-green-600/20 dark:text-green-400", - inactive: "[&_svg]:text-red-600 dark:bg-red-600/20 dark:text-red-400", - info: "[&_svg]:text-blue-600 dark:bg-blue-600/20 dark:text-blue-400", - purple: - "[&_svg]:text-purple-600 dark:bg-purple-600/20 dark:text-purple-400", - pink: "[&_svg]:text-pink-600 dark:bg-pink-600/20 dark:text-pink-400", - teal: "[&_svg]:text-teal-600 dark:bg-teal-600/20 dark:text-teal-400", - warning: - "[&_svg]:text-yellow-600 dark:bg-yellow-600/20 dark:text-yellow-400", + active: "text-green-600 bg-green-600/20 dark:text-green-400", + inactive: "text-red-600 bg-red-600/20 dark:text-red-400", + info: "text-blue-600 bg-blue-600/20 dark:text-blue-400", + purple: "text-purple-600 bg-purple-600/20 dark:text-purple-400", + pink: "text-pink-600 bg-pink-600/20 dark:text-pink-400", + teal: "text-teal-600 bg-teal-600/20 dark:text-teal-400", + warning: "text-yellow-600 bg-yellow-600/20 dark:text-yellow-400", outline: "text-muted-foreground", }, }, diff --git a/ui/src/routing/router.tsx b/ui/src/routing/router.tsx index 67defe219..e218919af 100644 --- a/ui/src/routing/router.tsx +++ b/ui/src/routing/router.tsx @@ -32,6 +32,13 @@ const routes: RouteObject[] = [ }, }, // Shipment Links + { + path: "/shipments/management", + async lazy() { + let { Shipment } = await import("@/app/shipment/page"); + return { Component: Shipment }; + }, + }, { path: "/shipments/configurations/shipment-types", async lazy() { @@ -76,6 +83,10 @@ const routes: RouteObject[] = [ let { ChargeTypes } = await import("@/app/charge-types/page"); return { Component: ChargeTypes }; }, + handle: { + crumb: "Charge Types", + title: "Charge Types", + }, }, // Dispatch Links { diff --git a/ui/src/types/fields.ts b/ui/src/types/fields.ts index b86b1f292..79d45d6fc 100644 --- a/ui/src/types/fields.ts +++ b/ui/src/types/fields.ts @@ -12,10 +12,13 @@ import { GroupBase, Props as ReactSelectProps } from "react-select"; import { type AsyncProps as ReactAsyncSelectProps } from "react-select/async"; type BaseInputFieldProps = Omit & { - label: string; + label?: string; description?: string; }; +export type InputFieldProps = BaseInputFieldProps & + FormControlProps; + type FormControlProps = { name: Path; control: Control; @@ -28,9 +31,6 @@ export type ColorFieldProps = { className?: string; } & FormControlProps; -export type InputFieldProps = BaseInputFieldProps & - FormControlProps; - type BaseCheckboxFieldProps = Omit & { label: string; outlined?: boolean; @@ -78,7 +78,7 @@ export type BaseSelectFieldProps = Omit< > & { onChange: (...event: any[]) => void; options: SelectOption[]; - label: string; + label?: string; description?: string; isReadOnly?: boolean; isInvalid?: boolean; @@ -93,7 +93,7 @@ export type BaseAsyncSelectFieldProps = Omit< > & { onChange: (...event: any[]) => void; link: string; - label: string; + label?: string; description?: string; isReadOnly?: boolean; isInvalid?: boolean; diff --git a/ui/src/types/move.ts b/ui/src/types/move.ts new file mode 100644 index 000000000..7a0ec68dd --- /dev/null +++ b/ui/src/types/move.ts @@ -0,0 +1,13 @@ +import { type MoveSchema } from "@/lib/schemas/move-schema"; +import { type WorkerSchema } from "@/lib/schemas/worker-schema"; +import { type Stop } from "./stop"; +import { type Tractor } from "./tractor"; +import { type Trailer } from "./trailer"; + +export type ShipmentMove = MoveSchema & { + primaryWorker: WorkerSchema; + secondaryWorker?: WorkerSchema | null; + trailer?: Trailer | null; + tractor?: Tractor | null; + stops: Stop[]; +}; diff --git a/ui/src/types/shipment.ts b/ui/src/types/shipment.ts new file mode 100644 index 000000000..9718a56f9 --- /dev/null +++ b/ui/src/types/shipment.ts @@ -0,0 +1,39 @@ +import { CustomerSchema } from "@/lib/schemas/customer-schema"; +import { EquipmentTypeSchema } from "@/lib/schemas/equipment-type-schema"; +import { type ServiceTypeSchema } from "@/lib/schemas/service-type-schema"; +import { type ShipmentSchema } from "@/lib/schemas/shipment-schema"; +import { ShipmentTypeSchema } from "@/lib/schemas/shipment-type-schema"; +import { ShipmentMove } from "./move"; + +export enum ShipmentStatus { + New = "New", + InTransit = "InTransit", + Delayed = "Delayed", + Completed = "Completed", + Billed = "Billed", + Canceled = "Canceled", +} + +export enum RatingMethod { + FlatRate = "FlatRate", + PerMile = "PerMile", + PerStop = "PerStop", + PerPound = "PerPound", + PerPallet = "PerPallet", + PerLinearFoot = "PerLinearFoot", + Other = "Other", +} + +export enum EntryMethod { + Manual = "Manual", + Electronic = "Electronic", +} + +export type Shipment = ShipmentSchema & { + serviceType: ServiceTypeSchema; + shipmentType: ShipmentTypeSchema; + customer: CustomerSchema; + tractorType?: EquipmentTypeSchema | null; + trailerType?: EquipmentTypeSchema | null; + moves: ShipmentMove[]; +}; diff --git a/ui/src/types/stop.ts b/ui/src/types/stop.ts new file mode 100644 index 000000000..fe9f1ffd1 --- /dev/null +++ b/ui/src/types/stop.ts @@ -0,0 +1,20 @@ +import { type LocationSchema } from "@/lib/schemas/location-schema"; +import { type StopSchema } from "@/lib/schemas/stop-schema"; + +export enum StopStatus { + New = "New", + InTransit = "InTransit", + Completed = "Completed", + Canceled = "Canceled", +} + +export enum StopType { + Pickup = "Pickup", + Delivery = "Delivery", + SplitPickup = "SplitPickup", + SplitDelivery = "SplitDelivery", +} + +export type Stop = StopSchema & { + location?: LocationSchema | null; +}; diff --git a/ui/vite.config.ts b/ui/vite.config.ts index 1fca84e04..b58e508a7 100644 --- a/ui/vite.config.ts +++ b/ui/vite.config.ts @@ -57,6 +57,9 @@ const vendorChunks = { "@fortawesome/pro-solid-svg-icons", ], + // Maps + "google-maps": ["@vis.gl/react-google-maps"], + // Date handling "date-utils": ["date-fns", "chrono-node"],