Spaces:
Sleeping
Sleeping
initial commit
Browse files- .gitattributes copy +2 -0
- .gitignore +174 -0
- Dockerfile +16 -0
- LICENSE +674 -0
- README.md +107 -8
- app.py +35 -0
- config.toml +17 -0
- huggingface-metadata.yaml +8 -0
- requirements.txt +11 -0
- src/.DS_Store +0 -0
- src/__init__.py +0 -0
- src/components/__init__.py +1 -0
- src/components/map_visualization.py +204 -0
- src/pages/__init__.py +8 -0
- src/pages/_about_page.py +97 -0
- src/pages/_contact_page.py +28 -0
- src/pages/_home_page.py +74 -0
- src/pages/_map_page.py +524 -0
- src/pages/_optimize_page.py +1781 -0
- src/utils/__init__.py +1 -0
- src/utils/generate_all_data.py +89 -0
- src/utils/generate_delivery_data.py +241 -0
- src/utils/generate_travel_matrix.py +327 -0
- src/utils/generate_vehicle_data.py +168 -0
.gitattributes copy
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
*.csv filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Byte-compiled / optimized / DLL files
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[cod]
|
| 4 |
+
*$py.class
|
| 5 |
+
|
| 6 |
+
# C extensions
|
| 7 |
+
*.so
|
| 8 |
+
|
| 9 |
+
# Distribution / packaging
|
| 10 |
+
.Python
|
| 11 |
+
build/
|
| 12 |
+
develop-eggs/
|
| 13 |
+
dist/
|
| 14 |
+
downloads/
|
| 15 |
+
eggs/
|
| 16 |
+
.eggs/
|
| 17 |
+
lib/
|
| 18 |
+
lib64/
|
| 19 |
+
parts/
|
| 20 |
+
sdist/
|
| 21 |
+
var/
|
| 22 |
+
wheels/
|
| 23 |
+
share/python-wheels/
|
| 24 |
+
*.egg-info/
|
| 25 |
+
.installed.cfg
|
| 26 |
+
*.egg
|
| 27 |
+
MANIFEST
|
| 28 |
+
|
| 29 |
+
# PyInstaller
|
| 30 |
+
# Usually these files are written by a python script from a template
|
| 31 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
| 32 |
+
*.manifest
|
| 33 |
+
*.spec
|
| 34 |
+
|
| 35 |
+
# Installer logs
|
| 36 |
+
pip-log.txt
|
| 37 |
+
pip-delete-this-directory.txt
|
| 38 |
+
|
| 39 |
+
# Unit test / coverage reports
|
| 40 |
+
htmlcov/
|
| 41 |
+
.tox/
|
| 42 |
+
.nox/
|
| 43 |
+
.coverage
|
| 44 |
+
.coverage.*
|
| 45 |
+
.cache
|
| 46 |
+
nosetests.xml
|
| 47 |
+
coverage.xml
|
| 48 |
+
*.cover
|
| 49 |
+
*.py,cover
|
| 50 |
+
.hypothesis/
|
| 51 |
+
.pytest_cache/
|
| 52 |
+
cover/
|
| 53 |
+
|
| 54 |
+
# Translations
|
| 55 |
+
*.mo
|
| 56 |
+
*.pot
|
| 57 |
+
|
| 58 |
+
# Django stuff:
|
| 59 |
+
*.log
|
| 60 |
+
local_settings.py
|
| 61 |
+
db.sqlite3
|
| 62 |
+
db.sqlite3-journal
|
| 63 |
+
|
| 64 |
+
# Flask stuff:
|
| 65 |
+
instance/
|
| 66 |
+
.webassets-cache
|
| 67 |
+
|
| 68 |
+
# Scrapy stuff:
|
| 69 |
+
.scrapy
|
| 70 |
+
|
| 71 |
+
# Sphinx documentation
|
| 72 |
+
docs/_build/
|
| 73 |
+
|
| 74 |
+
# PyBuilder
|
| 75 |
+
.pybuilder/
|
| 76 |
+
target/
|
| 77 |
+
|
| 78 |
+
# Jupyter Notebook
|
| 79 |
+
.ipynb_checkpoints
|
| 80 |
+
|
| 81 |
+
# IPython
|
| 82 |
+
profile_default/
|
| 83 |
+
ipython_config.py
|
| 84 |
+
|
| 85 |
+
# pyenv
|
| 86 |
+
# For a library or package, you might want to ignore these files since the code is
|
| 87 |
+
# intended to run in multiple environments; otherwise, check them in:
|
| 88 |
+
# .python-version
|
| 89 |
+
|
| 90 |
+
# pipenv
|
| 91 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
| 92 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
| 93 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
| 94 |
+
# install all needed dependencies.
|
| 95 |
+
#Pipfile.lock
|
| 96 |
+
|
| 97 |
+
# UV
|
| 98 |
+
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
| 99 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
| 100 |
+
# commonly ignored for libraries.
|
| 101 |
+
#uv.lock
|
| 102 |
+
|
| 103 |
+
# poetry
|
| 104 |
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
| 105 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
| 106 |
+
# commonly ignored for libraries.
|
| 107 |
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
| 108 |
+
#poetry.lock
|
| 109 |
+
|
| 110 |
+
# pdm
|
| 111 |
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
| 112 |
+
#pdm.lock
|
| 113 |
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
| 114 |
+
# in version control.
|
| 115 |
+
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
| 116 |
+
.pdm.toml
|
| 117 |
+
.pdm-python
|
| 118 |
+
.pdm-build/
|
| 119 |
+
|
| 120 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
| 121 |
+
__pypackages__/
|
| 122 |
+
|
| 123 |
+
# Celery stuff
|
| 124 |
+
celerybeat-schedule
|
| 125 |
+
celerybeat.pid
|
| 126 |
+
|
| 127 |
+
# SageMath parsed files
|
| 128 |
+
*.sage.py
|
| 129 |
+
|
| 130 |
+
# Environments
|
| 131 |
+
.env
|
| 132 |
+
.venv
|
| 133 |
+
env/
|
| 134 |
+
venv/
|
| 135 |
+
ENV/
|
| 136 |
+
env.bak/
|
| 137 |
+
venv.bak/
|
| 138 |
+
|
| 139 |
+
# Spyder project settings
|
| 140 |
+
.spyderproject
|
| 141 |
+
.spyproject
|
| 142 |
+
|
| 143 |
+
# Rope project settings
|
| 144 |
+
.ropeproject
|
| 145 |
+
|
| 146 |
+
# mkdocs documentation
|
| 147 |
+
/site
|
| 148 |
+
|
| 149 |
+
# mypy
|
| 150 |
+
.mypy_cache/
|
| 151 |
+
.dmypy.json
|
| 152 |
+
dmypy.json
|
| 153 |
+
|
| 154 |
+
# Pyre type checker
|
| 155 |
+
.pyre/
|
| 156 |
+
|
| 157 |
+
# pytype static type analyzer
|
| 158 |
+
.pytype/
|
| 159 |
+
|
| 160 |
+
# Cython debug symbols
|
| 161 |
+
cython_debug/
|
| 162 |
+
|
| 163 |
+
# PyCharm
|
| 164 |
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
| 165 |
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
| 166 |
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
| 167 |
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
| 168 |
+
#.idea/
|
| 169 |
+
|
| 170 |
+
# Ruff stuff:
|
| 171 |
+
.ruff_cache/
|
| 172 |
+
|
| 173 |
+
# PyPI configuration file
|
| 174 |
+
.pypirc
|
Dockerfile
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.9-slim
|
| 2 |
+
|
| 3 |
+
WORKDIR /app
|
| 4 |
+
|
| 5 |
+
# Copy requirements and install dependencies
|
| 6 |
+
COPY requirements.txt .
|
| 7 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 8 |
+
|
| 9 |
+
# Copy the application code
|
| 10 |
+
COPY . .
|
| 11 |
+
|
| 12 |
+
# Expose the Streamlit port
|
| 13 |
+
EXPOSE 8501
|
| 14 |
+
|
| 15 |
+
# Command to run the application
|
| 16 |
+
CMD ["streamlit", "run", "app.py", "--server.address=0.0.0.0", "--server.port=8501"]
|
LICENSE
ADDED
|
@@ -0,0 +1,674 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
GNU GENERAL PUBLIC LICENSE
|
| 2 |
+
Version 3, 29 June 2007
|
| 3 |
+
|
| 4 |
+
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
| 5 |
+
Everyone is permitted to copy and distribute verbatim copies
|
| 6 |
+
of this license document, but changing it is not allowed.
|
| 7 |
+
|
| 8 |
+
Preamble
|
| 9 |
+
|
| 10 |
+
The GNU General Public License is a free, copyleft license for
|
| 11 |
+
software and other kinds of works.
|
| 12 |
+
|
| 13 |
+
The licenses for most software and other practical works are designed
|
| 14 |
+
to take away your freedom to share and change the works. By contrast,
|
| 15 |
+
the GNU General Public License is intended to guarantee your freedom to
|
| 16 |
+
share and change all versions of a program--to make sure it remains free
|
| 17 |
+
software for all its users. We, the Free Software Foundation, use the
|
| 18 |
+
GNU General Public License for most of our software; it applies also to
|
| 19 |
+
any other work released this way by its authors. You can apply it to
|
| 20 |
+
your programs, too.
|
| 21 |
+
|
| 22 |
+
When we speak of free software, we are referring to freedom, not
|
| 23 |
+
price. Our General Public Licenses are designed to make sure that you
|
| 24 |
+
have the freedom to distribute copies of free software (and charge for
|
| 25 |
+
them if you wish), that you receive source code or can get it if you
|
| 26 |
+
want it, that you can change the software or use pieces of it in new
|
| 27 |
+
free programs, and that you know you can do these things.
|
| 28 |
+
|
| 29 |
+
To protect your rights, we need to prevent others from denying you
|
| 30 |
+
these rights or asking you to surrender the rights. Therefore, you have
|
| 31 |
+
certain responsibilities if you distribute copies of the software, or if
|
| 32 |
+
you modify it: responsibilities to respect the freedom of others.
|
| 33 |
+
|
| 34 |
+
For example, if you distribute copies of such a program, whether
|
| 35 |
+
gratis or for a fee, you must pass on to the recipients the same
|
| 36 |
+
freedoms that you received. You must make sure that they, too, receive
|
| 37 |
+
or can get the source code. And you must show them these terms so they
|
| 38 |
+
know their rights.
|
| 39 |
+
|
| 40 |
+
Developers that use the GNU GPL protect your rights with two steps:
|
| 41 |
+
(1) assert copyright on the software, and (2) offer you this License
|
| 42 |
+
giving you legal permission to copy, distribute and/or modify it.
|
| 43 |
+
|
| 44 |
+
For the developers' and authors' protection, the GPL clearly explains
|
| 45 |
+
that there is no warranty for this free software. For both users' and
|
| 46 |
+
authors' sake, the GPL requires that modified versions be marked as
|
| 47 |
+
changed, so that their problems will not be attributed erroneously to
|
| 48 |
+
authors of previous versions.
|
| 49 |
+
|
| 50 |
+
Some devices are designed to deny users access to install or run
|
| 51 |
+
modified versions of the software inside them, although the manufacturer
|
| 52 |
+
can do so. This is fundamentally incompatible with the aim of
|
| 53 |
+
protecting users' freedom to change the software. The systematic
|
| 54 |
+
pattern of such abuse occurs in the area of products for individuals to
|
| 55 |
+
use, which is precisely where it is most unacceptable. Therefore, we
|
| 56 |
+
have designed this version of the GPL to prohibit the practice for those
|
| 57 |
+
products. If such problems arise substantially in other domains, we
|
| 58 |
+
stand ready to extend this provision to those domains in future versions
|
| 59 |
+
of the GPL, as needed to protect the freedom of users.
|
| 60 |
+
|
| 61 |
+
Finally, every program is threatened constantly by software patents.
|
| 62 |
+
States should not allow patents to restrict development and use of
|
| 63 |
+
software on general-purpose computers, but in those that do, we wish to
|
| 64 |
+
avoid the special danger that patents applied to a free program could
|
| 65 |
+
make it effectively proprietary. To prevent this, the GPL assures that
|
| 66 |
+
patents cannot be used to render the program non-free.
|
| 67 |
+
|
| 68 |
+
The precise terms and conditions for copying, distribution and
|
| 69 |
+
modification follow.
|
| 70 |
+
|
| 71 |
+
TERMS AND CONDITIONS
|
| 72 |
+
|
| 73 |
+
0. Definitions.
|
| 74 |
+
|
| 75 |
+
"This License" refers to version 3 of the GNU General Public License.
|
| 76 |
+
|
| 77 |
+
"Copyright" also means copyright-like laws that apply to other kinds of
|
| 78 |
+
works, such as semiconductor masks.
|
| 79 |
+
|
| 80 |
+
"The Program" refers to any copyrightable work licensed under this
|
| 81 |
+
License. Each licensee is addressed as "you". "Licensees" and
|
| 82 |
+
"recipients" may be individuals or organizations.
|
| 83 |
+
|
| 84 |
+
To "modify" a work means to copy from or adapt all or part of the work
|
| 85 |
+
in a fashion requiring copyright permission, other than the making of an
|
| 86 |
+
exact copy. The resulting work is called a "modified version" of the
|
| 87 |
+
earlier work or a work "based on" the earlier work.
|
| 88 |
+
|
| 89 |
+
A "covered work" means either the unmodified Program or a work based
|
| 90 |
+
on the Program.
|
| 91 |
+
|
| 92 |
+
To "propagate" a work means to do anything with it that, without
|
| 93 |
+
permission, would make you directly or secondarily liable for
|
| 94 |
+
infringement under applicable copyright law, except executing it on a
|
| 95 |
+
computer or modifying a private copy. Propagation includes copying,
|
| 96 |
+
distribution (with or without modification), making available to the
|
| 97 |
+
public, and in some countries other activities as well.
|
| 98 |
+
|
| 99 |
+
To "convey" a work means any kind of propagation that enables other
|
| 100 |
+
parties to make or receive copies. Mere interaction with a user through
|
| 101 |
+
a computer network, with no transfer of a copy, is not conveying.
|
| 102 |
+
|
| 103 |
+
An interactive user interface displays "Appropriate Legal Notices"
|
| 104 |
+
to the extent that it includes a convenient and prominently visible
|
| 105 |
+
feature that (1) displays an appropriate copyright notice, and (2)
|
| 106 |
+
tells the user that there is no warranty for the work (except to the
|
| 107 |
+
extent that warranties are provided), that licensees may convey the
|
| 108 |
+
work under this License, and how to view a copy of this License. If
|
| 109 |
+
the interface presents a list of user commands or options, such as a
|
| 110 |
+
menu, a prominent item in the list meets this criterion.
|
| 111 |
+
|
| 112 |
+
1. Source Code.
|
| 113 |
+
|
| 114 |
+
The "source code" for a work means the preferred form of the work
|
| 115 |
+
for making modifications to it. "Object code" means any non-source
|
| 116 |
+
form of a work.
|
| 117 |
+
|
| 118 |
+
A "Standard Interface" means an interface that either is an official
|
| 119 |
+
standard defined by a recognized standards body, or, in the case of
|
| 120 |
+
interfaces specified for a particular programming language, one that
|
| 121 |
+
is widely used among developers working in that language.
|
| 122 |
+
|
| 123 |
+
The "System Libraries" of an executable work include anything, other
|
| 124 |
+
than the work as a whole, that (a) is included in the normal form of
|
| 125 |
+
packaging a Major Component, but which is not part of that Major
|
| 126 |
+
Component, and (b) serves only to enable use of the work with that
|
| 127 |
+
Major Component, or to implement a Standard Interface for which an
|
| 128 |
+
implementation is available to the public in source code form. A
|
| 129 |
+
"Major Component", in this context, means a major essential component
|
| 130 |
+
(kernel, window system, and so on) of the specific operating system
|
| 131 |
+
(if any) on which the executable work runs, or a compiler used to
|
| 132 |
+
produce the work, or an object code interpreter used to run it.
|
| 133 |
+
|
| 134 |
+
The "Corresponding Source" for a work in object code form means all
|
| 135 |
+
the source code needed to generate, install, and (for an executable
|
| 136 |
+
work) run the object code and to modify the work, including scripts to
|
| 137 |
+
control those activities. However, it does not include the work's
|
| 138 |
+
System Libraries, or general-purpose tools or generally available free
|
| 139 |
+
programs which are used unmodified in performing those activities but
|
| 140 |
+
which are not part of the work. For example, Corresponding Source
|
| 141 |
+
includes interface definition files associated with source files for
|
| 142 |
+
the work, and the source code for shared libraries and dynamically
|
| 143 |
+
linked subprograms that the work is specifically designed to require,
|
| 144 |
+
such as by intimate data communication or control flow between those
|
| 145 |
+
subprograms and other parts of the work.
|
| 146 |
+
|
| 147 |
+
The Corresponding Source need not include anything that users
|
| 148 |
+
can regenerate automatically from other parts of the Corresponding
|
| 149 |
+
Source.
|
| 150 |
+
|
| 151 |
+
The Corresponding Source for a work in source code form is that
|
| 152 |
+
same work.
|
| 153 |
+
|
| 154 |
+
2. Basic Permissions.
|
| 155 |
+
|
| 156 |
+
All rights granted under this License are granted for the term of
|
| 157 |
+
copyright on the Program, and are irrevocable provided the stated
|
| 158 |
+
conditions are met. This License explicitly affirms your unlimited
|
| 159 |
+
permission to run the unmodified Program. The output from running a
|
| 160 |
+
covered work is covered by this License only if the output, given its
|
| 161 |
+
content, constitutes a covered work. This License acknowledges your
|
| 162 |
+
rights of fair use or other equivalent, as provided by copyright law.
|
| 163 |
+
|
| 164 |
+
You may make, run and propagate covered works that you do not
|
| 165 |
+
convey, without conditions so long as your license otherwise remains
|
| 166 |
+
in force. You may convey covered works to others for the sole purpose
|
| 167 |
+
of having them make modifications exclusively for you, or provide you
|
| 168 |
+
with facilities for running those works, provided that you comply with
|
| 169 |
+
the terms of this License in conveying all material for which you do
|
| 170 |
+
not control copyright. Those thus making or running the covered works
|
| 171 |
+
for you must do so exclusively on your behalf, under your direction
|
| 172 |
+
and control, on terms that prohibit them from making any copies of
|
| 173 |
+
your copyrighted material outside their relationship with you.
|
| 174 |
+
|
| 175 |
+
Conveying under any other circumstances is permitted solely under
|
| 176 |
+
the conditions stated below. Sublicensing is not allowed; section 10
|
| 177 |
+
makes it unnecessary.
|
| 178 |
+
|
| 179 |
+
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
| 180 |
+
|
| 181 |
+
No covered work shall be deemed part of an effective technological
|
| 182 |
+
measure under any applicable law fulfilling obligations under article
|
| 183 |
+
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
| 184 |
+
similar laws prohibiting or restricting circumvention of such
|
| 185 |
+
measures.
|
| 186 |
+
|
| 187 |
+
When you convey a covered work, you waive any legal power to forbid
|
| 188 |
+
circumvention of technological measures to the extent such circumvention
|
| 189 |
+
is effected by exercising rights under this License with respect to
|
| 190 |
+
the covered work, and you disclaim any intention to limit operation or
|
| 191 |
+
modification of the work as a means of enforcing, against the work's
|
| 192 |
+
users, your or third parties' legal rights to forbid circumvention of
|
| 193 |
+
technological measures.
|
| 194 |
+
|
| 195 |
+
4. Conveying Verbatim Copies.
|
| 196 |
+
|
| 197 |
+
You may convey verbatim copies of the Program's source code as you
|
| 198 |
+
receive it, in any medium, provided that you conspicuously and
|
| 199 |
+
appropriately publish on each copy an appropriate copyright notice;
|
| 200 |
+
keep intact all notices stating that this License and any
|
| 201 |
+
non-permissive terms added in accord with section 7 apply to the code;
|
| 202 |
+
keep intact all notices of the absence of any warranty; and give all
|
| 203 |
+
recipients a copy of this License along with the Program.
|
| 204 |
+
|
| 205 |
+
You may charge any price or no price for each copy that you convey,
|
| 206 |
+
and you may offer support or warranty protection for a fee.
|
| 207 |
+
|
| 208 |
+
5. Conveying Modified Source Versions.
|
| 209 |
+
|
| 210 |
+
You may convey a work based on the Program, or the modifications to
|
| 211 |
+
produce it from the Program, in the form of source code under the
|
| 212 |
+
terms of section 4, provided that you also meet all of these conditions:
|
| 213 |
+
|
| 214 |
+
a) The work must carry prominent notices stating that you modified
|
| 215 |
+
it, and giving a relevant date.
|
| 216 |
+
|
| 217 |
+
b) The work must carry prominent notices stating that it is
|
| 218 |
+
released under this License and any conditions added under section
|
| 219 |
+
7. This requirement modifies the requirement in section 4 to
|
| 220 |
+
"keep intact all notices".
|
| 221 |
+
|
| 222 |
+
c) You must license the entire work, as a whole, under this
|
| 223 |
+
License to anyone who comes into possession of a copy. This
|
| 224 |
+
License will therefore apply, along with any applicable section 7
|
| 225 |
+
additional terms, to the whole of the work, and all its parts,
|
| 226 |
+
regardless of how they are packaged. This License gives no
|
| 227 |
+
permission to license the work in any other way, but it does not
|
| 228 |
+
invalidate such permission if you have separately received it.
|
| 229 |
+
|
| 230 |
+
d) If the work has interactive user interfaces, each must display
|
| 231 |
+
Appropriate Legal Notices; however, if the Program has interactive
|
| 232 |
+
interfaces that do not display Appropriate Legal Notices, your
|
| 233 |
+
work need not make them do so.
|
| 234 |
+
|
| 235 |
+
A compilation of a covered work with other separate and independent
|
| 236 |
+
works, which are not by their nature extensions of the covered work,
|
| 237 |
+
and which are not combined with it such as to form a larger program,
|
| 238 |
+
in or on a volume of a storage or distribution medium, is called an
|
| 239 |
+
"aggregate" if the compilation and its resulting copyright are not
|
| 240 |
+
used to limit the access or legal rights of the compilation's users
|
| 241 |
+
beyond what the individual works permit. Inclusion of a covered work
|
| 242 |
+
in an aggregate does not cause this License to apply to the other
|
| 243 |
+
parts of the aggregate.
|
| 244 |
+
|
| 245 |
+
6. Conveying Non-Source Forms.
|
| 246 |
+
|
| 247 |
+
You may convey a covered work in object code form under the terms
|
| 248 |
+
of sections 4 and 5, provided that you also convey the
|
| 249 |
+
machine-readable Corresponding Source under the terms of this License,
|
| 250 |
+
in one of these ways:
|
| 251 |
+
|
| 252 |
+
a) Convey the object code in, or embodied in, a physical product
|
| 253 |
+
(including a physical distribution medium), accompanied by the
|
| 254 |
+
Corresponding Source fixed on a durable physical medium
|
| 255 |
+
customarily used for software interchange.
|
| 256 |
+
|
| 257 |
+
b) Convey the object code in, or embodied in, a physical product
|
| 258 |
+
(including a physical distribution medium), accompanied by a
|
| 259 |
+
written offer, valid for at least three years and valid for as
|
| 260 |
+
long as you offer spare parts or customer support for that product
|
| 261 |
+
model, to give anyone who possesses the object code either (1) a
|
| 262 |
+
copy of the Corresponding Source for all the software in the
|
| 263 |
+
product that is covered by this License, on a durable physical
|
| 264 |
+
medium customarily used for software interchange, for a price no
|
| 265 |
+
more than your reasonable cost of physically performing this
|
| 266 |
+
conveying of source, or (2) access to copy the
|
| 267 |
+
Corresponding Source from a network server at no charge.
|
| 268 |
+
|
| 269 |
+
c) Convey individual copies of the object code with a copy of the
|
| 270 |
+
written offer to provide the Corresponding Source. This
|
| 271 |
+
alternative is allowed only occasionally and noncommercially, and
|
| 272 |
+
only if you received the object code with such an offer, in accord
|
| 273 |
+
with subsection 6b.
|
| 274 |
+
|
| 275 |
+
d) Convey the object code by offering access from a designated
|
| 276 |
+
place (gratis or for a charge), and offer equivalent access to the
|
| 277 |
+
Corresponding Source in the same way through the same place at no
|
| 278 |
+
further charge. You need not require recipients to copy the
|
| 279 |
+
Corresponding Source along with the object code. If the place to
|
| 280 |
+
copy the object code is a network server, the Corresponding Source
|
| 281 |
+
may be on a different server (operated by you or a third party)
|
| 282 |
+
that supports equivalent copying facilities, provided you maintain
|
| 283 |
+
clear directions next to the object code saying where to find the
|
| 284 |
+
Corresponding Source. Regardless of what server hosts the
|
| 285 |
+
Corresponding Source, you remain obligated to ensure that it is
|
| 286 |
+
available for as long as needed to satisfy these requirements.
|
| 287 |
+
|
| 288 |
+
e) Convey the object code using peer-to-peer transmission, provided
|
| 289 |
+
you inform other peers where the object code and Corresponding
|
| 290 |
+
Source of the work are being offered to the general public at no
|
| 291 |
+
charge under subsection 6d.
|
| 292 |
+
|
| 293 |
+
A separable portion of the object code, whose source code is excluded
|
| 294 |
+
from the Corresponding Source as a System Library, need not be
|
| 295 |
+
included in conveying the object code work.
|
| 296 |
+
|
| 297 |
+
A "User Product" is either (1) a "consumer product", which means any
|
| 298 |
+
tangible personal property which is normally used for personal, family,
|
| 299 |
+
or household purposes, or (2) anything designed or sold for incorporation
|
| 300 |
+
into a dwelling. In determining whether a product is a consumer product,
|
| 301 |
+
doubtful cases shall be resolved in favor of coverage. For a particular
|
| 302 |
+
product received by a particular user, "normally used" refers to a
|
| 303 |
+
typical or common use of that class of product, regardless of the status
|
| 304 |
+
of the particular user or of the way in which the particular user
|
| 305 |
+
actually uses, or expects or is expected to use, the product. A product
|
| 306 |
+
is a consumer product regardless of whether the product has substantial
|
| 307 |
+
commercial, industrial or non-consumer uses, unless such uses represent
|
| 308 |
+
the only significant mode of use of the product.
|
| 309 |
+
|
| 310 |
+
"Installation Information" for a User Product means any methods,
|
| 311 |
+
procedures, authorization keys, or other information required to install
|
| 312 |
+
and execute modified versions of a covered work in that User Product from
|
| 313 |
+
a modified version of its Corresponding Source. The information must
|
| 314 |
+
suffice to ensure that the continued functioning of the modified object
|
| 315 |
+
code is in no case prevented or interfered with solely because
|
| 316 |
+
modification has been made.
|
| 317 |
+
|
| 318 |
+
If you convey an object code work under this section in, or with, or
|
| 319 |
+
specifically for use in, a User Product, and the conveying occurs as
|
| 320 |
+
part of a transaction in which the right of possession and use of the
|
| 321 |
+
User Product is transferred to the recipient in perpetuity or for a
|
| 322 |
+
fixed term (regardless of how the transaction is characterized), the
|
| 323 |
+
Corresponding Source conveyed under this section must be accompanied
|
| 324 |
+
by the Installation Information. But this requirement does not apply
|
| 325 |
+
if neither you nor any third party retains the ability to install
|
| 326 |
+
modified object code on the User Product (for example, the work has
|
| 327 |
+
been installed in ROM).
|
| 328 |
+
|
| 329 |
+
The requirement to provide Installation Information does not include a
|
| 330 |
+
requirement to continue to provide support service, warranty, or updates
|
| 331 |
+
for a work that has been modified or installed by the recipient, or for
|
| 332 |
+
the User Product in which it has been modified or installed. Access to a
|
| 333 |
+
network may be denied when the modification itself materially and
|
| 334 |
+
adversely affects the operation of the network or violates the rules and
|
| 335 |
+
protocols for communication across the network.
|
| 336 |
+
|
| 337 |
+
Corresponding Source conveyed, and Installation Information provided,
|
| 338 |
+
in accord with this section must be in a format that is publicly
|
| 339 |
+
documented (and with an implementation available to the public in
|
| 340 |
+
source code form), and must require no special password or key for
|
| 341 |
+
unpacking, reading or copying.
|
| 342 |
+
|
| 343 |
+
7. Additional Terms.
|
| 344 |
+
|
| 345 |
+
"Additional permissions" are terms that supplement the terms of this
|
| 346 |
+
License by making exceptions from one or more of its conditions.
|
| 347 |
+
Additional permissions that are applicable to the entire Program shall
|
| 348 |
+
be treated as though they were included in this License, to the extent
|
| 349 |
+
that they are valid under applicable law. If additional permissions
|
| 350 |
+
apply only to part of the Program, that part may be used separately
|
| 351 |
+
under those permissions, but the entire Program remains governed by
|
| 352 |
+
this License without regard to the additional permissions.
|
| 353 |
+
|
| 354 |
+
When you convey a copy of a covered work, you may at your option
|
| 355 |
+
remove any additional permissions from that copy, or from any part of
|
| 356 |
+
it. (Additional permissions may be written to require their own
|
| 357 |
+
removal in certain cases when you modify the work.) You may place
|
| 358 |
+
additional permissions on material, added by you to a covered work,
|
| 359 |
+
for which you have or can give appropriate copyright permission.
|
| 360 |
+
|
| 361 |
+
Notwithstanding any other provision of this License, for material you
|
| 362 |
+
add to a covered work, you may (if authorized by the copyright holders of
|
| 363 |
+
that material) supplement the terms of this License with terms:
|
| 364 |
+
|
| 365 |
+
a) Disclaiming warranty or limiting liability differently from the
|
| 366 |
+
terms of sections 15 and 16 of this License; or
|
| 367 |
+
|
| 368 |
+
b) Requiring preservation of specified reasonable legal notices or
|
| 369 |
+
author attributions in that material or in the Appropriate Legal
|
| 370 |
+
Notices displayed by works containing it; or
|
| 371 |
+
|
| 372 |
+
c) Prohibiting misrepresentation of the origin of that material, or
|
| 373 |
+
requiring that modified versions of such material be marked in
|
| 374 |
+
reasonable ways as different from the original version; or
|
| 375 |
+
|
| 376 |
+
d) Limiting the use for publicity purposes of names of licensors or
|
| 377 |
+
authors of the material; or
|
| 378 |
+
|
| 379 |
+
e) Declining to grant rights under trademark law for use of some
|
| 380 |
+
trade names, trademarks, or service marks; or
|
| 381 |
+
|
| 382 |
+
f) Requiring indemnification of licensors and authors of that
|
| 383 |
+
material by anyone who conveys the material (or modified versions of
|
| 384 |
+
it) with contractual assumptions of liability to the recipient, for
|
| 385 |
+
any liability that these contractual assumptions directly impose on
|
| 386 |
+
those licensors and authors.
|
| 387 |
+
|
| 388 |
+
All other non-permissive additional terms are considered "further
|
| 389 |
+
restrictions" within the meaning of section 10. If the Program as you
|
| 390 |
+
received it, or any part of it, contains a notice stating that it is
|
| 391 |
+
governed by this License along with a term that is a further
|
| 392 |
+
restriction, you may remove that term. If a license document contains
|
| 393 |
+
a further restriction but permits relicensing or conveying under this
|
| 394 |
+
License, you may add to a covered work material governed by the terms
|
| 395 |
+
of that license document, provided that the further restriction does
|
| 396 |
+
not survive such relicensing or conveying.
|
| 397 |
+
|
| 398 |
+
If you add terms to a covered work in accord with this section, you
|
| 399 |
+
must place, in the relevant source files, a statement of the
|
| 400 |
+
additional terms that apply to those files, or a notice indicating
|
| 401 |
+
where to find the applicable terms.
|
| 402 |
+
|
| 403 |
+
Additional terms, permissive or non-permissive, may be stated in the
|
| 404 |
+
form of a separately written license, or stated as exceptions;
|
| 405 |
+
the above requirements apply either way.
|
| 406 |
+
|
| 407 |
+
8. Termination.
|
| 408 |
+
|
| 409 |
+
You may not propagate or modify a covered work except as expressly
|
| 410 |
+
provided under this License. Any attempt otherwise to propagate or
|
| 411 |
+
modify it is void, and will automatically terminate your rights under
|
| 412 |
+
this License (including any patent licenses granted under the third
|
| 413 |
+
paragraph of section 11).
|
| 414 |
+
|
| 415 |
+
However, if you cease all violation of this License, then your
|
| 416 |
+
license from a particular copyright holder is reinstated (a)
|
| 417 |
+
provisionally, unless and until the copyright holder explicitly and
|
| 418 |
+
finally terminates your license, and (b) permanently, if the copyright
|
| 419 |
+
holder fails to notify you of the violation by some reasonable means
|
| 420 |
+
prior to 60 days after the cessation.
|
| 421 |
+
|
| 422 |
+
Moreover, your license from a particular copyright holder is
|
| 423 |
+
reinstated permanently if the copyright holder notifies you of the
|
| 424 |
+
violation by some reasonable means, this is the first time you have
|
| 425 |
+
received notice of violation of this License (for any work) from that
|
| 426 |
+
copyright holder, and you cure the violation prior to 30 days after
|
| 427 |
+
your receipt of the notice.
|
| 428 |
+
|
| 429 |
+
Termination of your rights under this section does not terminate the
|
| 430 |
+
licenses of parties who have received copies or rights from you under
|
| 431 |
+
this License. If your rights have been terminated and not permanently
|
| 432 |
+
reinstated, you do not qualify to receive new licenses for the same
|
| 433 |
+
material under section 10.
|
| 434 |
+
|
| 435 |
+
9. Acceptance Not Required for Having Copies.
|
| 436 |
+
|
| 437 |
+
You are not required to accept this License in order to receive or
|
| 438 |
+
run a copy of the Program. Ancillary propagation of a covered work
|
| 439 |
+
occurring solely as a consequence of using peer-to-peer transmission
|
| 440 |
+
to receive a copy likewise does not require acceptance. However,
|
| 441 |
+
nothing other than this License grants you permission to propagate or
|
| 442 |
+
modify any covered work. These actions infringe copyright if you do
|
| 443 |
+
not accept this License. Therefore, by modifying or propagating a
|
| 444 |
+
covered work, you indicate your acceptance of this License to do so.
|
| 445 |
+
|
| 446 |
+
10. Automatic Licensing of Downstream Recipients.
|
| 447 |
+
|
| 448 |
+
Each time you convey a covered work, the recipient automatically
|
| 449 |
+
receives a license from the original licensors, to run, modify and
|
| 450 |
+
propagate that work, subject to this License. You are not responsible
|
| 451 |
+
for enforcing compliance by third parties with this License.
|
| 452 |
+
|
| 453 |
+
An "entity transaction" is a transaction transferring control of an
|
| 454 |
+
organization, or substantially all assets of one, or subdividing an
|
| 455 |
+
organization, or merging organizations. If propagation of a covered
|
| 456 |
+
work results from an entity transaction, each party to that
|
| 457 |
+
transaction who receives a copy of the work also receives whatever
|
| 458 |
+
licenses to the work the party's predecessor in interest had or could
|
| 459 |
+
give under the previous paragraph, plus a right to possession of the
|
| 460 |
+
Corresponding Source of the work from the predecessor in interest, if
|
| 461 |
+
the predecessor has it or can get it with reasonable efforts.
|
| 462 |
+
|
| 463 |
+
You may not impose any further restrictions on the exercise of the
|
| 464 |
+
rights granted or affirmed under this License. For example, you may
|
| 465 |
+
not impose a license fee, royalty, or other charge for exercise of
|
| 466 |
+
rights granted under this License, and you may not initiate litigation
|
| 467 |
+
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
| 468 |
+
any patent claim is infringed by making, using, selling, offering for
|
| 469 |
+
sale, or importing the Program or any portion of it.
|
| 470 |
+
|
| 471 |
+
11. Patents.
|
| 472 |
+
|
| 473 |
+
A "contributor" is a copyright holder who authorizes use under this
|
| 474 |
+
License of the Program or a work on which the Program is based. The
|
| 475 |
+
work thus licensed is called the contributor's "contributor version".
|
| 476 |
+
|
| 477 |
+
A contributor's "essential patent claims" are all patent claims
|
| 478 |
+
owned or controlled by the contributor, whether already acquired or
|
| 479 |
+
hereafter acquired, that would be infringed by some manner, permitted
|
| 480 |
+
by this License, of making, using, or selling its contributor version,
|
| 481 |
+
but do not include claims that would be infringed only as a
|
| 482 |
+
consequence of further modification of the contributor version. For
|
| 483 |
+
purposes of this definition, "control" includes the right to grant
|
| 484 |
+
patent sublicenses in a manner consistent with the requirements of
|
| 485 |
+
this License.
|
| 486 |
+
|
| 487 |
+
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
| 488 |
+
patent license under the contributor's essential patent claims, to
|
| 489 |
+
make, use, sell, offer for sale, import and otherwise run, modify and
|
| 490 |
+
propagate the contents of its contributor version.
|
| 491 |
+
|
| 492 |
+
In the following three paragraphs, a "patent license" is any express
|
| 493 |
+
agreement or commitment, however denominated, not to enforce a patent
|
| 494 |
+
(such as an express permission to practice a patent or covenant not to
|
| 495 |
+
sue for patent infringement). To "grant" such a patent license to a
|
| 496 |
+
party means to make such an agreement or commitment not to enforce a
|
| 497 |
+
patent against the party.
|
| 498 |
+
|
| 499 |
+
If you convey a covered work, knowingly relying on a patent license,
|
| 500 |
+
and the Corresponding Source of the work is not available for anyone
|
| 501 |
+
to copy, free of charge and under the terms of this License, through a
|
| 502 |
+
publicly available network server or other readily accessible means,
|
| 503 |
+
then you must either (1) cause the Corresponding Source to be so
|
| 504 |
+
available, or (2) arrange to deprive yourself of the benefit of the
|
| 505 |
+
patent license for this particular work, or (3) arrange, in a manner
|
| 506 |
+
consistent with the requirements of this License, to extend the patent
|
| 507 |
+
license to downstream recipients. "Knowingly relying" means you have
|
| 508 |
+
actual knowledge that, but for the patent license, your conveying the
|
| 509 |
+
covered work in a country, or your recipient's use of the covered work
|
| 510 |
+
in a country, would infringe one or more identifiable patents in that
|
| 511 |
+
country that you have reason to believe are valid.
|
| 512 |
+
|
| 513 |
+
If, pursuant to or in connection with a single transaction or
|
| 514 |
+
arrangement, you convey, or propagate by procuring conveyance of, a
|
| 515 |
+
covered work, and grant a patent license to some of the parties
|
| 516 |
+
receiving the covered work authorizing them to use, propagate, modify
|
| 517 |
+
or convey a specific copy of the covered work, then the patent license
|
| 518 |
+
you grant is automatically extended to all recipients of the covered
|
| 519 |
+
work and works based on it.
|
| 520 |
+
|
| 521 |
+
A patent license is "discriminatory" if it does not include within
|
| 522 |
+
the scope of its coverage, prohibits the exercise of, or is
|
| 523 |
+
conditioned on the non-exercise of one or more of the rights that are
|
| 524 |
+
specifically granted under this License. You may not convey a covered
|
| 525 |
+
work if you are a party to an arrangement with a third party that is
|
| 526 |
+
in the business of distributing software, under which you make payment
|
| 527 |
+
to the third party based on the extent of your activity of conveying
|
| 528 |
+
the work, and under which the third party grants, to any of the
|
| 529 |
+
parties who would receive the covered work from you, a discriminatory
|
| 530 |
+
patent license (a) in connection with copies of the covered work
|
| 531 |
+
conveyed by you (or copies made from those copies), or (b) primarily
|
| 532 |
+
for and in connection with specific products or compilations that
|
| 533 |
+
contain the covered work, unless you entered into that arrangement,
|
| 534 |
+
or that patent license was granted, prior to 28 March 2007.
|
| 535 |
+
|
| 536 |
+
Nothing in this License shall be construed as excluding or limiting
|
| 537 |
+
any implied license or other defenses to infringement that may
|
| 538 |
+
otherwise be available to you under applicable patent law.
|
| 539 |
+
|
| 540 |
+
12. No Surrender of Others' Freedom.
|
| 541 |
+
|
| 542 |
+
If conditions are imposed on you (whether by court order, agreement or
|
| 543 |
+
otherwise) that contradict the conditions of this License, they do not
|
| 544 |
+
excuse you from the conditions of this License. If you cannot convey a
|
| 545 |
+
covered work so as to satisfy simultaneously your obligations under this
|
| 546 |
+
License and any other pertinent obligations, then as a consequence you may
|
| 547 |
+
not convey it at all. For example, if you agree to terms that obligate you
|
| 548 |
+
to collect a royalty for further conveying from those to whom you convey
|
| 549 |
+
the Program, the only way you could satisfy both those terms and this
|
| 550 |
+
License would be to refrain entirely from conveying the Program.
|
| 551 |
+
|
| 552 |
+
13. Use with the GNU Affero General Public License.
|
| 553 |
+
|
| 554 |
+
Notwithstanding any other provision of this License, you have
|
| 555 |
+
permission to link or combine any covered work with a work licensed
|
| 556 |
+
under version 3 of the GNU Affero General Public License into a single
|
| 557 |
+
combined work, and to convey the resulting work. The terms of this
|
| 558 |
+
License will continue to apply to the part which is the covered work,
|
| 559 |
+
but the special requirements of the GNU Affero General Public License,
|
| 560 |
+
section 13, concerning interaction through a network will apply to the
|
| 561 |
+
combination as such.
|
| 562 |
+
|
| 563 |
+
14. Revised Versions of this License.
|
| 564 |
+
|
| 565 |
+
The Free Software Foundation may publish revised and/or new versions of
|
| 566 |
+
the GNU General Public License from time to time. Such new versions will
|
| 567 |
+
be similar in spirit to the present version, but may differ in detail to
|
| 568 |
+
address new problems or concerns.
|
| 569 |
+
|
| 570 |
+
Each version is given a distinguishing version number. If the
|
| 571 |
+
Program specifies that a certain numbered version of the GNU General
|
| 572 |
+
Public License "or any later version" applies to it, you have the
|
| 573 |
+
option of following the terms and conditions either of that numbered
|
| 574 |
+
version or of any later version published by the Free Software
|
| 575 |
+
Foundation. If the Program does not specify a version number of the
|
| 576 |
+
GNU General Public License, you may choose any version ever published
|
| 577 |
+
by the Free Software Foundation.
|
| 578 |
+
|
| 579 |
+
If the Program specifies that a proxy can decide which future
|
| 580 |
+
versions of the GNU General Public License can be used, that proxy's
|
| 581 |
+
public statement of acceptance of a version permanently authorizes you
|
| 582 |
+
to choose that version for the Program.
|
| 583 |
+
|
| 584 |
+
Later license versions may give you additional or different
|
| 585 |
+
permissions. However, no additional obligations are imposed on any
|
| 586 |
+
author or copyright holder as a result of your choosing to follow a
|
| 587 |
+
later version.
|
| 588 |
+
|
| 589 |
+
15. Disclaimer of Warranty.
|
| 590 |
+
|
| 591 |
+
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
| 592 |
+
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
| 593 |
+
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
| 594 |
+
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
| 595 |
+
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
| 596 |
+
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
| 597 |
+
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
| 598 |
+
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
| 599 |
+
|
| 600 |
+
16. Limitation of Liability.
|
| 601 |
+
|
| 602 |
+
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
| 603 |
+
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
| 604 |
+
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
| 605 |
+
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
| 606 |
+
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
| 607 |
+
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
| 608 |
+
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
| 609 |
+
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
| 610 |
+
SUCH DAMAGES.
|
| 611 |
+
|
| 612 |
+
17. Interpretation of Sections 15 and 16.
|
| 613 |
+
|
| 614 |
+
If the disclaimer of warranty and limitation of liability provided
|
| 615 |
+
above cannot be given local legal effect according to their terms,
|
| 616 |
+
reviewing courts shall apply local law that most closely approximates
|
| 617 |
+
an absolute waiver of all civil liability in connection with the
|
| 618 |
+
Program, unless a warranty or assumption of liability accompanies a
|
| 619 |
+
copy of the Program in return for a fee.
|
| 620 |
+
|
| 621 |
+
END OF TERMS AND CONDITIONS
|
| 622 |
+
|
| 623 |
+
How to Apply These Terms to Your New Programs
|
| 624 |
+
|
| 625 |
+
If you develop a new program, and you want it to be of the greatest
|
| 626 |
+
possible use to the public, the best way to achieve this is to make it
|
| 627 |
+
free software which everyone can redistribute and change under these terms.
|
| 628 |
+
|
| 629 |
+
To do so, attach the following notices to the program. It is safest
|
| 630 |
+
to attach them to the start of each source file to most effectively
|
| 631 |
+
state the exclusion of warranty; and each file should have at least
|
| 632 |
+
the "copyright" line and a pointer to where the full notice is found.
|
| 633 |
+
|
| 634 |
+
<one line to give the program's name and a brief idea of what it does.>
|
| 635 |
+
Copyright (C) <year> <name of author>
|
| 636 |
+
|
| 637 |
+
This program is free software: you can redistribute it and/or modify
|
| 638 |
+
it under the terms of the GNU General Public License as published by
|
| 639 |
+
the Free Software Foundation, either version 3 of the License, or
|
| 640 |
+
(at your option) any later version.
|
| 641 |
+
|
| 642 |
+
This program is distributed in the hope that it will be useful,
|
| 643 |
+
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
| 644 |
+
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
| 645 |
+
GNU General Public License for more details.
|
| 646 |
+
|
| 647 |
+
You should have received a copy of the GNU General Public License
|
| 648 |
+
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
| 649 |
+
|
| 650 |
+
Also add information on how to contact you by electronic and paper mail.
|
| 651 |
+
|
| 652 |
+
If the program does terminal interaction, make it output a short
|
| 653 |
+
notice like this when it starts in an interactive mode:
|
| 654 |
+
|
| 655 |
+
<program> Copyright (C) <year> <name of author>
|
| 656 |
+
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
| 657 |
+
This is free software, and you are welcome to redistribute it
|
| 658 |
+
under certain conditions; type `show c' for details.
|
| 659 |
+
|
| 660 |
+
The hypothetical commands `show w' and `show c' should show the appropriate
|
| 661 |
+
parts of the General Public License. Of course, your program's commands
|
| 662 |
+
might be different; for a GUI interface, you would use an "about box".
|
| 663 |
+
|
| 664 |
+
You should also get your employer (if you work as a programmer) or school,
|
| 665 |
+
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
| 666 |
+
For more information on this, and how to apply and follow the GNU GPL, see
|
| 667 |
+
<https://www.gnu.org/licenses/>.
|
| 668 |
+
|
| 669 |
+
The GNU General Public License does not permit incorporating your program
|
| 670 |
+
into proprietary programs. If your program is a subroutine library, you
|
| 671 |
+
may consider it more useful to permit linking proprietary applications with
|
| 672 |
+
the library. If this is what you want to do, use the GNU Lesser General
|
| 673 |
+
Public License instead of this License. But first, please read
|
| 674 |
+
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
README.md
CHANGED
|
@@ -1,11 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
---
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 9 |
---
|
| 10 |
|
| 11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# [Project: Delivery Route Optimization](https://huggingface.co/spaces/Jing997/DeliveryRouteOptimisation)
|
| 2 |
+
|
| 3 |
+

|
| 4 |
+
|
| 5 |
+
This project is a **Delivery Route Optimization** tool built using Streamlit. It aims to optimize delivery routes for a fleet of vehicles while considering constraints such as delivery time windows, vehicle capacity, and traffic conditions.
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
### Key Features
|
| 9 |
+
1. **Route Optimization**:
|
| 10 |
+
- Solve the **Vehicle Routing Problem (VRP)** to determine the most efficient routes for a fleet of vehicles.
|
| 11 |
+
- Incorporate constraints like:
|
| 12 |
+
- Delivery time windows.
|
| 13 |
+
- Vehicle capacity.
|
| 14 |
+
- Traffic conditions.
|
| 15 |
+
|
| 16 |
+
2. **Map Visualization**:
|
| 17 |
+
- Display optimized routes on an interactive map using **Folium**.
|
| 18 |
+
- Highlight delivery stops, start and end points, and route distances.
|
| 19 |
+
|
| 20 |
+
3. **Calendar View**:
|
| 21 |
+
- Provide a calendar-based schedule for deliveries.
|
| 22 |
+
- Allow users to view and manage delivery schedules for specific days or weeks.
|
| 23 |
+
|
| 24 |
+
4. **Real-Time Updates**:
|
| 25 |
+
- Enable real-time updates for route changes due to unexpected events (e.g., traffic congestion, vehicle breakdowns).
|
| 26 |
+
- Re-optimize routes dynamically and update the map and calendar views.
|
| 27 |
+
|
| 28 |
+
### Tools and Technologies
|
| 29 |
+
- **Python**: Core programming language for optimization and application logic.
|
| 30 |
+
- **Google OR-Tools**: Solve the Vehicle Routing Problem (VRP) with constraints.
|
| 31 |
+
- **Streamlit**: Build an interactive web application for route visualization and schedule management.
|
| 32 |
+
- **Folium**: Create interactive maps for route visualization.
|
| 33 |
+
- **Synthetic Data**: Integrate real-time traffic data for dynamic route adjustments.
|
| 34 |
+
|
| 35 |
---
|
| 36 |
+
|
| 37 |
+
## Project Structure
|
| 38 |
+
|
| 39 |
+
```
|
| 40 |
+
streamlit-app-template
|
| 41 |
+
├── src
|
| 42 |
+
│ ├── app.py # Main entry point of the Streamlit application
|
| 43 |
+
│ ├── components # Directory for reusable UI components
|
| 44 |
+
│ │ └── __init__.py
|
| 45 |
+
│ ├── pages # Directory for different pages of the application
|
| 46 |
+
│ │ └── __init__.py
|
| 47 |
+
│ ├── utils # Directory for utility functions
|
| 48 |
+
│ │ └── __init__.py
|
| 49 |
+
├── requirements.txt # List of dependencies for the application
|
| 50 |
+
├── .streamlit # Configuration settings for Streamlit
|
| 51 |
+
│ ├── config.toml
|
| 52 |
+
├── img # Folder for storing images
|
| 53 |
+
│ └── delivery_route_network.png
|
| 54 |
+
├── .gitignore # Files and directories to ignore in Git
|
| 55 |
+
├── README.md # Documentation for the project
|
| 56 |
+
└── LICENSE # Licensing information
|
| 57 |
+
```
|
| 58 |
+
|
| 59 |
---
|
| 60 |
|
| 61 |
+
## Installation
|
| 62 |
+
|
| 63 |
+
To get started with this Streamlit application template, follow these steps:
|
| 64 |
+
|
| 65 |
+
1. Clone the repository:
|
| 66 |
+
```
|
| 67 |
+
git clone https://github.com/yourusername/streamlit-app-template.git
|
| 68 |
+
cd streamlit-app-template
|
| 69 |
+
```
|
| 70 |
+
|
| 71 |
+
2. Create a virtual environment (optional but recommended):
|
| 72 |
+
```
|
| 73 |
+
python -m venv venv
|
| 74 |
+
source venv/bin/activate # On macOS/Linux
|
| 75 |
+
venv\Scripts\activate # On Windows
|
| 76 |
+
```
|
| 77 |
+
|
| 78 |
+
3. Install the required dependencies:
|
| 79 |
+
```
|
| 80 |
+
pip install -r requirements.txt
|
| 81 |
+
```
|
| 82 |
+
|
| 83 |
+
4. Run the Streamlit application:
|
| 84 |
+
```
|
| 85 |
+
streamlit run src/app.py
|
| 86 |
+
```
|
| 87 |
+
|
| 88 |
+
---
|
| 89 |
+
|
| 90 |
+
## Deployment
|
| 91 |
+
|
| 92 |
+
This application is deployed on Hugging Face Spaces and is accessible at:
|
| 93 |
+
|
| 94 |
+
🚀 [Delivery Route Optimization App](https://huggingface.co/spaces/Jing997/DeliveryRouteOptimisation)
|
| 95 |
+
|
| 96 |
+
The deployed version provides all the features of the local application without requiring any installation. You can:
|
| 97 |
+
|
| 98 |
+
- View and analyze optimized delivery routes
|
| 99 |
+
- Interact with the map visualization
|
| 100 |
+
- Explore the calendar view of scheduled deliveries
|
| 101 |
+
- Test different optimization parameters
|
| 102 |
+
- Filter deliveries by date, status, and priority
|
| 103 |
+
|
| 104 |
+
The app is automatically updated whenever changes are pushed to the connected GitHub repository.
|
| 105 |
+
|
| 106 |
+
---
|
| 107 |
+
|
| 108 |
+
## License
|
| 109 |
+
|
| 110 |
+
This project is licensed under the GNU General Public License v3.0 - see the [LICENSE](LICENSE) file for details.
|
app.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Main entry point of the Streamlit application
|
| 2 |
+
|
| 3 |
+
import streamlit as st
|
| 4 |
+
import sys
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
|
| 7 |
+
# Import all pages from the pages module
|
| 8 |
+
from src.pages import home_page, about_page, contact_page, map_page, optimize_page
|
| 9 |
+
|
| 10 |
+
def main():
|
| 11 |
+
st.set_page_config(
|
| 12 |
+
page_title="Delivery Route Optimization",
|
| 13 |
+
page_icon="🚚",
|
| 14 |
+
layout="wide",
|
| 15 |
+
initial_sidebar_state="expanded",
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
st.sidebar.title("Navigation")
|
| 19 |
+
|
| 20 |
+
# Sidebar navigation
|
| 21 |
+
pages = {
|
| 22 |
+
"Home": home_page,
|
| 23 |
+
"Map": map_page,
|
| 24 |
+
"Optimizer": optimize_page, # Add the new page
|
| 25 |
+
"About": about_page,
|
| 26 |
+
"Contact": contact_page
|
| 27 |
+
}
|
| 28 |
+
|
| 29 |
+
selection = st.sidebar.radio("Go to", list(pages.keys()))
|
| 30 |
+
|
| 31 |
+
# Render the selected page
|
| 32 |
+
pages[selection]()
|
| 33 |
+
|
| 34 |
+
if __name__ == "__main__":
|
| 35 |
+
main()
|
config.toml
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[general]
|
| 2 |
+
email = "jinghui.me@gmail.com"
|
| 3 |
+
# The email address for the Streamlit sharing service.
|
| 4 |
+
|
| 5 |
+
[server]
|
| 6 |
+
headless = true
|
| 7 |
+
port = 8501
|
| 8 |
+
enableCORS = false
|
| 9 |
+
# Configuration for the server, including running in headless mode and setting the port.
|
| 10 |
+
|
| 11 |
+
[theme]
|
| 12 |
+
primaryColor = "#F39C12"
|
| 13 |
+
backgroundColor = "#FFFFFF"
|
| 14 |
+
secondaryBackgroundColor = "#F0F0F0"
|
| 15 |
+
textColor = "#000000"
|
| 16 |
+
font = "sans serif"
|
| 17 |
+
# Theme settings for the Streamlit application, including colors and font.
|
huggingface-metadata.yaml
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# File: .github/huggingface-metadata.yaml
|
| 2 |
+
title: Streamlit Scheduler App
|
| 3 |
+
emoji: 🚚
|
| 4 |
+
colorFrom: indigo
|
| 5 |
+
colorTo: blue
|
| 6 |
+
sdk: docker
|
| 7 |
+
app_port: 8501
|
| 8 |
+
pinned: false
|
requirements.txt
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
streamlit>=1.24.0
|
| 2 |
+
pandas>=1.5.0
|
| 3 |
+
numpy>=1.23.0
|
| 4 |
+
plotly>=5.13.0
|
| 5 |
+
folium>=0.14.0
|
| 6 |
+
streamlit-folium>=0.11.0
|
| 7 |
+
requests>=2.28.0
|
| 8 |
+
python-dotenv>=1.0.0
|
| 9 |
+
ortools>=9.6.0
|
| 10 |
+
geopy
|
| 11 |
+
matplotlib
|
src/.DS_Store
ADDED
|
Binary file (6.15 kB). View file
|
|
|
src/__init__.py
ADDED
|
File without changes
|
src/components/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# This file is intentionally left blank.
|
src/components/map_visualization.py
ADDED
|
@@ -0,0 +1,204 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
from pathlib import Path
|
| 4 |
+
import pandas as pd
|
| 5 |
+
import folium
|
| 6 |
+
import streamlit as st
|
| 7 |
+
|
| 8 |
+
def load_data():
|
| 9 |
+
"""
|
| 10 |
+
Load delivery and vehicle data from CSV files
|
| 11 |
+
|
| 12 |
+
Returns:
|
| 13 |
+
tuple: (delivery_data, vehicle_data)
|
| 14 |
+
"""
|
| 15 |
+
# Get the project root directory
|
| 16 |
+
root_dir = Path(__file__).resolve().parent.parent.parent
|
| 17 |
+
|
| 18 |
+
# Define data paths
|
| 19 |
+
delivery_data_path = os.path.join(root_dir, 'data', 'delivery-data', 'delivery_data.csv')
|
| 20 |
+
vehicle_data_path = os.path.join(root_dir, 'data', 'vehicle-data', 'vehicle_data.csv')
|
| 21 |
+
|
| 22 |
+
# Load data
|
| 23 |
+
try:
|
| 24 |
+
delivery_data = pd.read_csv(delivery_data_path)
|
| 25 |
+
vehicle_data = pd.read_csv(vehicle_data_path)
|
| 26 |
+
return delivery_data, vehicle_data
|
| 27 |
+
except FileNotFoundError as e:
|
| 28 |
+
st.error(f"Could not load data: {e}")
|
| 29 |
+
st.info("Please generate the data first by running: python src/utils/generate_all_data.py")
|
| 30 |
+
return None, None
|
| 31 |
+
|
| 32 |
+
def create_delivery_map(delivery_data=None, vehicle_data=None, show_deliveries=True, show_depots=True,
|
| 33 |
+
date_filter=None, status_filter=None, priority_filter=None):
|
| 34 |
+
"""
|
| 35 |
+
Create a Folium map with markers for deliveries and vehicle depots
|
| 36 |
+
|
| 37 |
+
Parameters:
|
| 38 |
+
delivery_data (pd.DataFrame): Delivery data
|
| 39 |
+
vehicle_data (pd.DataFrame): Vehicle data
|
| 40 |
+
show_deliveries (bool): Whether to show delivery markers
|
| 41 |
+
show_depots (bool): Whether to show depot markers
|
| 42 |
+
date_filter (str): Filter deliveries by date
|
| 43 |
+
status_filter (str): Filter deliveries by status
|
| 44 |
+
priority_filter (str): Filter deliveries by priority
|
| 45 |
+
|
| 46 |
+
Returns:
|
| 47 |
+
folium.Map: Folium map with markers
|
| 48 |
+
"""
|
| 49 |
+
# If data not provided, load it
|
| 50 |
+
if delivery_data is None or vehicle_data is None:
|
| 51 |
+
delivery_data, vehicle_data = load_data()
|
| 52 |
+
if delivery_data is None or vehicle_data is None:
|
| 53 |
+
return None
|
| 54 |
+
|
| 55 |
+
# Apply filters to delivery data
|
| 56 |
+
if date_filter is not None:
|
| 57 |
+
delivery_data = delivery_data[delivery_data['delivery_date'] == date_filter]
|
| 58 |
+
|
| 59 |
+
if status_filter is not None:
|
| 60 |
+
delivery_data = delivery_data[delivery_data['status'] == status_filter]
|
| 61 |
+
|
| 62 |
+
if priority_filter is not None:
|
| 63 |
+
delivery_data = delivery_data[delivery_data['priority'] == priority_filter]
|
| 64 |
+
|
| 65 |
+
# Create map centered around Singapore
|
| 66 |
+
singapore_coords = [1.3521, 103.8198] # Center of Singapore
|
| 67 |
+
m = folium.Map(location=singapore_coords, zoom_start=12)
|
| 68 |
+
|
| 69 |
+
# Add delivery markers
|
| 70 |
+
if show_deliveries and not delivery_data.empty:
|
| 71 |
+
for _, row in delivery_data.iterrows():
|
| 72 |
+
# Create popup content with delivery information
|
| 73 |
+
popup_content = f"""
|
| 74 |
+
<b>Delivery ID:</b> {row['delivery_id']}<br>
|
| 75 |
+
<b>Customer:</b> {row['customer_name']}<br>
|
| 76 |
+
<b>Address:</b> {row['address']}<br>
|
| 77 |
+
<b>Time Window:</b> {row['time_window']}<br>
|
| 78 |
+
<b>Status:</b> {row['status']}<br>
|
| 79 |
+
<b>Priority:</b> {row['priority']}<br>
|
| 80 |
+
<b>Weight:</b> {row['weight_kg']} kg<br>
|
| 81 |
+
<b>Volume:</b> {row['volume_m3']} m³
|
| 82 |
+
"""
|
| 83 |
+
|
| 84 |
+
# Set marker color based on priority
|
| 85 |
+
color_map = {'High': 'red', 'Medium': 'orange', 'Low': 'blue'}
|
| 86 |
+
color = color_map.get(row['priority'], 'blue')
|
| 87 |
+
|
| 88 |
+
# Add marker to map
|
| 89 |
+
folium.Marker(
|
| 90 |
+
location=[row['latitude'], row['longitude']],
|
| 91 |
+
popup=folium.Popup(popup_content, max_width=300),
|
| 92 |
+
tooltip=f"Delivery {row['delivery_id']}: {row['customer_name']}",
|
| 93 |
+
icon=folium.Icon(color=color, icon="package", prefix="fa")
|
| 94 |
+
).add_to(m)
|
| 95 |
+
|
| 96 |
+
# Add depot markers
|
| 97 |
+
if show_depots and not vehicle_data.empty:
|
| 98 |
+
for _, row in vehicle_data.iterrows():
|
| 99 |
+
# Create popup content with vehicle information
|
| 100 |
+
popup_content = f"""
|
| 101 |
+
<b>Vehicle ID:</b> {row['vehicle_id']}<br>
|
| 102 |
+
<b>Type:</b> {row['vehicle_type']}<br>
|
| 103 |
+
<b>Driver:</b> {row['driver_name']}<br>
|
| 104 |
+
<b>Status:</b> {row['status']}<br>
|
| 105 |
+
<b>Capacity:</b> {row['max_weight_kg']} kg / {row['max_volume_m3']} m³<br>
|
| 106 |
+
<b>Working Hours:</b> {row['start_time']} - {row['end_time']}
|
| 107 |
+
"""
|
| 108 |
+
|
| 109 |
+
# Add marker to map
|
| 110 |
+
folium.Marker(
|
| 111 |
+
location=[row['depot_latitude'], row['depot_longitude']],
|
| 112 |
+
popup=folium.Popup(popup_content, max_width=300),
|
| 113 |
+
tooltip=f"Depot: {row['vehicle_id']}",
|
| 114 |
+
icon=folium.Icon(color="green", icon="truck", prefix="fa")
|
| 115 |
+
).add_to(m)
|
| 116 |
+
|
| 117 |
+
return m
|
| 118 |
+
|
| 119 |
+
def display_map_component():
|
| 120 |
+
"""
|
| 121 |
+
Display the map visualization component in Streamlit
|
| 122 |
+
"""
|
| 123 |
+
st.subheader("Delivery and Depot Locations")
|
| 124 |
+
|
| 125 |
+
# Load data
|
| 126 |
+
delivery_data, vehicle_data = load_data()
|
| 127 |
+
if delivery_data is None or vehicle_data is None:
|
| 128 |
+
return
|
| 129 |
+
|
| 130 |
+
# Create sidebar filters
|
| 131 |
+
with st.sidebar:
|
| 132 |
+
st.subheader("Map Filters")
|
| 133 |
+
|
| 134 |
+
# Show/hide options
|
| 135 |
+
show_deliveries = st.checkbox("Show Deliveries", value=True)
|
| 136 |
+
show_depots = st.checkbox("Show Depots", value=True)
|
| 137 |
+
|
| 138 |
+
# Delivery date filter
|
| 139 |
+
dates = sorted(delivery_data['delivery_date'].unique())
|
| 140 |
+
selected_date = st.selectbox(
|
| 141 |
+
"Filter by Date",
|
| 142 |
+
options=["All"] + list(dates),
|
| 143 |
+
index=0
|
| 144 |
+
)
|
| 145 |
+
date_filter = None if selected_date == "All" else selected_date
|
| 146 |
+
|
| 147 |
+
# Delivery status filter
|
| 148 |
+
statuses = sorted(delivery_data['status'].unique())
|
| 149 |
+
selected_status = st.selectbox(
|
| 150 |
+
"Filter by Status",
|
| 151 |
+
options=["All"] + list(statuses),
|
| 152 |
+
index=0
|
| 153 |
+
)
|
| 154 |
+
status_filter = None if selected_status == "All" else selected_status
|
| 155 |
+
|
| 156 |
+
# Delivery priority filter
|
| 157 |
+
priorities = sorted(delivery_data['priority'].unique())
|
| 158 |
+
selected_priority = st.selectbox(
|
| 159 |
+
"Filter by Priority",
|
| 160 |
+
options=["All"] + list(priorities),
|
| 161 |
+
index=0
|
| 162 |
+
)
|
| 163 |
+
priority_filter = None if selected_priority == "All" else selected_priority
|
| 164 |
+
|
| 165 |
+
# Display statistics
|
| 166 |
+
col1, col2, col3 = st.columns(3)
|
| 167 |
+
|
| 168 |
+
# Apply filters for stats calculation
|
| 169 |
+
filtered_delivery_data = delivery_data
|
| 170 |
+
if date_filter:
|
| 171 |
+
filtered_delivery_data = filtered_delivery_data[filtered_delivery_data['delivery_date'] == date_filter]
|
| 172 |
+
if status_filter:
|
| 173 |
+
filtered_delivery_data = filtered_delivery_data[filtered_delivery_data['status'] == status_filter]
|
| 174 |
+
if priority_filter:
|
| 175 |
+
filtered_delivery_data = filtered_delivery_data[filtered_delivery_data['priority'] == priority_filter]
|
| 176 |
+
|
| 177 |
+
with col1:
|
| 178 |
+
st.metric("Total Deliveries", filtered_delivery_data.shape[0])
|
| 179 |
+
|
| 180 |
+
with col2:
|
| 181 |
+
st.metric("Total Weight", f"{filtered_delivery_data['weight_kg'].sum():.2f} kg")
|
| 182 |
+
|
| 183 |
+
with col3:
|
| 184 |
+
st.metric("Available Vehicles", vehicle_data[vehicle_data['status'] == 'Available'].shape[0])
|
| 185 |
+
|
| 186 |
+
# Create and display the map
|
| 187 |
+
delivery_map = create_delivery_map(
|
| 188 |
+
delivery_data=delivery_data,
|
| 189 |
+
vehicle_data=vehicle_data,
|
| 190 |
+
show_deliveries=show_deliveries,
|
| 191 |
+
show_depots=show_depots,
|
| 192 |
+
date_filter=date_filter,
|
| 193 |
+
status_filter=status_filter,
|
| 194 |
+
priority_filter=priority_filter
|
| 195 |
+
)
|
| 196 |
+
|
| 197 |
+
if delivery_map:
|
| 198 |
+
folium_static(delivery_map, width=800, height=600)
|
| 199 |
+
else:
|
| 200 |
+
st.error("Could not create map. Please check that data is available.")
|
| 201 |
+
|
| 202 |
+
if __name__ == "__main__":
|
| 203 |
+
# Run the map visualization component
|
| 204 |
+
display_map_component()
|
src/pages/__init__.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Import all page functions to make them available from this package
|
| 2 |
+
from src.pages._home_page import home_page
|
| 3 |
+
from src.pages._about_page import about_page
|
| 4 |
+
from src.pages._contact_page import contact_page
|
| 5 |
+
from src.pages._map_page import map_page
|
| 6 |
+
from src.pages._optimize_page import optimize_page
|
| 7 |
+
|
| 8 |
+
__all__ = ['home_page', 'about_page', 'contact_page', 'map_page', 'optimize_page']
|
src/pages/_about_page.py
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
|
| 3 |
+
def about_page():
|
| 4 |
+
"""
|
| 5 |
+
Render the about page
|
| 6 |
+
"""
|
| 7 |
+
st.title("About This Project")
|
| 8 |
+
|
| 9 |
+
st.write("""
|
| 10 |
+
## Project Overview
|
| 11 |
+
|
| 12 |
+
This project is a **Delivery Route Optimization** tool built using Streamlit. It aims to optimize delivery
|
| 13 |
+
routes for a fleet of vehicles while considering constraints such as delivery time windows, vehicle capacity,
|
| 14 |
+
and traffic conditions.
|
| 15 |
+
|
| 16 |
+
""")
|
| 17 |
+
|
| 18 |
+
# Project overview from about page
|
| 19 |
+
st.write("""
|
| 20 |
+
This project is a **Delivery Route Optimization** tool that provides an interactive web interface
|
| 21 |
+
for solving complex logistics challenges. It uses advanced algorithms to determine the most efficient
|
| 22 |
+
delivery routes while balancing various constraints and business priorities.
|
| 23 |
+
""")
|
| 24 |
+
|
| 25 |
+
# Key features in columns
|
| 26 |
+
st.subheader("Key Features")
|
| 27 |
+
|
| 28 |
+
col1, col2 = st.columns(2)
|
| 29 |
+
|
| 30 |
+
with col1:
|
| 31 |
+
st.markdown("""
|
| 32 |
+
#### Route Optimization
|
| 33 |
+
- Solves the **Vehicle Routing Problem (VRP)** to determine efficient routes
|
| 34 |
+
- Incorporates constraints like time windows and vehicle capacity
|
| 35 |
+
- Prioritizes deliveries based on importance and urgency
|
| 36 |
+
|
| 37 |
+
#### Map Visualization
|
| 38 |
+
- Displays optimized routes on an interactive map
|
| 39 |
+
- Highlights delivery stops and depot locations
|
| 40 |
+
- Provides detailed route information and statistics
|
| 41 |
+
""")
|
| 42 |
+
|
| 43 |
+
with col2:
|
| 44 |
+
st.markdown("""
|
| 45 |
+
#### Calendar View
|
| 46 |
+
- Calendar-based schedule for deliveries
|
| 47 |
+
- Shows delivery timeline and workload distribution
|
| 48 |
+
- Helps manage delivery schedules efficiently
|
| 49 |
+
|
| 50 |
+
#### Interactive Dashboard
|
| 51 |
+
- Real-time delivery status monitoring
|
| 52 |
+
- Data filtering and visualization options
|
| 53 |
+
- Customizable optimization parameters
|
| 54 |
+
""")
|
| 55 |
+
|
| 56 |
+
# Tools and technologies in an expander
|
| 57 |
+
with st.expander("Tools and Technologies"):
|
| 58 |
+
col1, col2, col3 = st.columns(3)
|
| 59 |
+
|
| 60 |
+
with col1:
|
| 61 |
+
st.markdown("""
|
| 62 |
+
#### Core Technologies
|
| 63 |
+
- **Python** - Main programming language
|
| 64 |
+
- **Streamlit** - Interactive web interface
|
| 65 |
+
- **Google OR-Tools** - Optimization engine
|
| 66 |
+
""")
|
| 67 |
+
|
| 68 |
+
with col2:
|
| 69 |
+
st.markdown("""
|
| 70 |
+
#### Data Visualization
|
| 71 |
+
- **Folium** - Interactive maps
|
| 72 |
+
- **Plotly** - Charts and timelines
|
| 73 |
+
- **Pandas** - Data processing
|
| 74 |
+
""")
|
| 75 |
+
|
| 76 |
+
with col3:
|
| 77 |
+
st.markdown("""
|
| 78 |
+
#### Routing Services
|
| 79 |
+
- **OSRM** - Road distances calculation
|
| 80 |
+
- **TimeMatrix** - Travel time estimation
|
| 81 |
+
- **Geocoding** - Location services
|
| 82 |
+
""")
|
| 83 |
+
|
| 84 |
+
# Navigation guidance
|
| 85 |
+
st.header("Getting Started")
|
| 86 |
+
st.write("""
|
| 87 |
+
Use the sidebar navigation to explore the application:
|
| 88 |
+
|
| 89 |
+
- **Map**: Visualize delivery locations and vehicle depots
|
| 90 |
+
- **Optimizer**: Create optimized delivery routes
|
| 91 |
+
- **About**: Learn more about this application
|
| 92 |
+
- **Contact**: Get in touch with the team
|
| 93 |
+
""")
|
| 94 |
+
|
| 95 |
+
# Make sure the function can be executed standalone
|
| 96 |
+
if __name__ == "__main__":
|
| 97 |
+
about_page()
|
src/pages/_contact_page.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
|
| 3 |
+
def contact_page():
|
| 4 |
+
"""
|
| 5 |
+
Render the contact page
|
| 6 |
+
"""
|
| 7 |
+
st.title("Contact")
|
| 8 |
+
|
| 9 |
+
st.write("""
|
| 10 |
+
### Get in Touch
|
| 11 |
+
|
| 12 |
+
For questions, feedback, or suggestions about this application, please feel free to reach out.
|
| 13 |
+
|
| 14 |
+
**Email**: jinghui.me@gmail.com
|
| 15 |
+
|
| 16 |
+
### Repository
|
| 17 |
+
|
| 18 |
+
This project is open-source. Find the code on GitHub:
|
| 19 |
+
[streamlit-schedular-app](https://github.com/yourusername/streamlit-schedular-app)
|
| 20 |
+
|
| 21 |
+
### License
|
| 22 |
+
|
| 23 |
+
This project is licensed under the MIT License. See the LICENSE file for more details.
|
| 24 |
+
""")
|
| 25 |
+
|
| 26 |
+
# Make sure the function can be executed standalone
|
| 27 |
+
if __name__ == "__main__":
|
| 28 |
+
contact_page()
|
src/pages/_home_page.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
import pandas as pd
|
| 3 |
+
import os
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
|
| 6 |
+
def home_page():
|
| 7 |
+
"""
|
| 8 |
+
Render the combined home and about page
|
| 9 |
+
"""
|
| 10 |
+
st.title("Delivery Route Optimization")
|
| 11 |
+
|
| 12 |
+
st.write("""
|
| 13 |
+
Welcome to the Delivery Route Optimization application! This tool helps logistics teams
|
| 14 |
+
optimize delivery routes for a fleet of vehicles while considering constraints such as delivery time windows,
|
| 15 |
+
vehicle capacity, and traffic conditions.
|
| 16 |
+
|
| 17 |
+
Use the navigation sidebar to explore different features of this application.
|
| 18 |
+
""")
|
| 19 |
+
|
| 20 |
+
# Quick stats from data at the top
|
| 21 |
+
try:
|
| 22 |
+
# Get data paths
|
| 23 |
+
root_dir = Path(__file__).resolve().parent.parent.parent
|
| 24 |
+
delivery_path = os.path.join(root_dir, 'data', 'delivery-data', 'delivery_data.csv')
|
| 25 |
+
vehicle_path = os.path.join(root_dir, 'data', 'vehicle-data', 'vehicle_data.csv')
|
| 26 |
+
|
| 27 |
+
if os.path.exists(delivery_path) and os.path.exists(vehicle_path):
|
| 28 |
+
# Load data for stats
|
| 29 |
+
delivery_data = pd.read_csv(delivery_path)
|
| 30 |
+
vehicle_data = pd.read_csv(vehicle_path)
|
| 31 |
+
|
| 32 |
+
# Display stats
|
| 33 |
+
st.subheader("Current Statistics")
|
| 34 |
+
col1, col2, col3 = st.columns(3)
|
| 35 |
+
with col1:
|
| 36 |
+
st.metric("Total Deliveries", len(delivery_data))
|
| 37 |
+
with col2:
|
| 38 |
+
st.metric("Total Vehicles", len(vehicle_data))
|
| 39 |
+
with col3:
|
| 40 |
+
pending = delivery_data[delivery_data['status'] == 'Pending'] if 'status' in delivery_data.columns else []
|
| 41 |
+
st.metric("Pending Deliveries", len(pending))
|
| 42 |
+
|
| 43 |
+
# Add more detailed stats in an expander
|
| 44 |
+
with st.expander("View More Statistics"):
|
| 45 |
+
# Status breakdown
|
| 46 |
+
if 'status' in delivery_data.columns:
|
| 47 |
+
st.write("#### Delivery Status Breakdown")
|
| 48 |
+
status_counts = delivery_data['status'].value_counts().reset_index()
|
| 49 |
+
status_counts.columns = ['Status', 'Count']
|
| 50 |
+
status_chart = st.bar_chart(status_counts.set_index('Status'))
|
| 51 |
+
|
| 52 |
+
# Priority breakdown
|
| 53 |
+
if 'priority' in delivery_data.columns:
|
| 54 |
+
st.write("#### Delivery Priority Breakdown")
|
| 55 |
+
priority_counts = delivery_data['priority'].value_counts().reset_index()
|
| 56 |
+
priority_counts.columns = ['Priority', 'Count']
|
| 57 |
+
priority_chart = st.bar_chart(priority_counts.set_index('Priority'))
|
| 58 |
+
else:
|
| 59 |
+
st.info("Please generate data first to see statistics")
|
| 60 |
+
st.code("python src/utils/generate_all_data.py")
|
| 61 |
+
except Exception as e:
|
| 62 |
+
st.info("Generate data first to see statistics")
|
| 63 |
+
st.code("python src/utils/generate_all_data.py")
|
| 64 |
+
|
| 65 |
+
# Add the image
|
| 66 |
+
img_path = Path(__file__).resolve().parent.parent.parent / "img" / "delivery-route-network.jpg"
|
| 67 |
+
if os.path.exists(img_path):
|
| 68 |
+
st.image(str(img_path), caption="Delivery Route Network")
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
# Make sure the function can be executed standalone
|
| 72 |
+
if __name__ == "__main__":
|
| 73 |
+
st.set_page_config(page_title="Home - Delivery Route Optimization", page_icon="🚚", layout="wide")
|
| 74 |
+
home_page()
|
src/pages/_map_page.py
ADDED
|
@@ -0,0 +1,524 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
import folium
|
| 3 |
+
from streamlit_folium import folium_static
|
| 4 |
+
import pandas as pd
|
| 5 |
+
import os
|
| 6 |
+
import plotly.figure_factory as ff
|
| 7 |
+
import plotly.express as px
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
from datetime import datetime, timedelta
|
| 10 |
+
import numpy as np
|
| 11 |
+
|
| 12 |
+
def map_page():
|
| 13 |
+
"""
|
| 14 |
+
Render the map visualization page with delivery and depot locations.
|
| 15 |
+
Can be called from app.py to display within the main application.
|
| 16 |
+
"""
|
| 17 |
+
st.title("Delivery Route Map")
|
| 18 |
+
st.write("""
|
| 19 |
+
This page visualizes the delivery locations and vehicle depots on an interactive map.
|
| 20 |
+
Use the filters in the sidebar to customize the view.
|
| 21 |
+
""")
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
# Add help section with expander
|
| 25 |
+
with st.expander("📚 How to Use the Map Pageß"):
|
| 26 |
+
st.markdown("""
|
| 27 |
+
## Step-by-Step Guide to the Map Page
|
| 28 |
+
|
| 29 |
+
The Map page provides an interactive visualization of all delivery locations and vehicle depots. It helps you understand delivery distribution, monitor delivery status, and plan logistics operations.
|
| 30 |
+
|
| 31 |
+
### 1. Map Navigation
|
| 32 |
+
|
| 33 |
+
- **Pan**: Click and drag to move around the map
|
| 34 |
+
- **Zoom**: Use the scroll wheel or the +/- buttons in the top-left corner
|
| 35 |
+
- **View Details**: Click on any marker to see detailed information about that delivery or depot
|
| 36 |
+
|
| 37 |
+
### 2. Using Map Filters (Sidebar)
|
| 38 |
+
|
| 39 |
+
- **Show/Hide Elements**:
|
| 40 |
+
- Toggle "Show Deliveries" to display or hide delivery markers
|
| 41 |
+
- Toggle "Show Depots" to display or hide vehicle depot markers
|
| 42 |
+
- Enable "Show Data Table" to view raw delivery data below the map
|
| 43 |
+
- Enable "Show Calendar View" to see delivery schedules organized by date
|
| 44 |
+
|
| 45 |
+
- **Filter by Attributes**:
|
| 46 |
+
- Use "Filter by Priority" to show only deliveries of selected priority levels (High, Medium, Low)
|
| 47 |
+
- Use "Filter by Status" to show only deliveries with selected statuses (Pending, In Transit, Delivered)
|
| 48 |
+
|
| 49 |
+
- **Date Filtering**:
|
| 50 |
+
- Use the "Date Range" selector to focus on deliveries within specific dates
|
| 51 |
+
- This affects both the map display and the calendar view
|
| 52 |
+
|
| 53 |
+
### 3. Understanding the Map Markers
|
| 54 |
+
|
| 55 |
+
- **Delivery Markers**:
|
| 56 |
+
- Red markers: High priority deliveries
|
| 57 |
+
- Orange markers: Medium priority deliveries
|
| 58 |
+
- Blue markers: Low priority deliveries
|
| 59 |
+
|
| 60 |
+
- **Depot Markers**:
|
| 61 |
+
- Green house icons: Vehicle depot locations
|
| 62 |
+
|
| 63 |
+
### 4. Using the Calendar View
|
| 64 |
+
|
| 65 |
+
- Select specific dates from the dropdown to view scheduled deliveries
|
| 66 |
+
- Each tab shows deliveries for one selected date
|
| 67 |
+
- Timeline bars are color-coded by priority (red=High, orange=Medium, blue=Low)
|
| 68 |
+
- Hover over timeline bars to see detailed delivery information
|
| 69 |
+
- Check the summary metrics below each calendar for quick insights
|
| 70 |
+
|
| 71 |
+
### 5. Reading the Delivery Statistics
|
| 72 |
+
|
| 73 |
+
- The top section shows key metrics about displayed deliveries:
|
| 74 |
+
- Total number of deliveries shown
|
| 75 |
+
- Total weight of all displayed deliveries
|
| 76 |
+
- Number of pending deliveries
|
| 77 |
+
- Breakdown of deliveries by status
|
| 78 |
+
|
| 79 |
+
### 6. Data Table Features
|
| 80 |
+
|
| 81 |
+
When "Show Data Table" is enabled:
|
| 82 |
+
- Green highlighted rows: Completed deliveries
|
| 83 |
+
- Red highlighted rows: Urgent high-priority deliveries due within the next week
|
| 84 |
+
- Sort any column by clicking the column header
|
| 85 |
+
- Search across all fields using the search box
|
| 86 |
+
|
| 87 |
+
This map view helps you visualize your delivery operations geographically while the calendar provides a time-based perspective of your delivery schedule.
|
| 88 |
+
""")
|
| 89 |
+
|
| 90 |
+
# Initialize session state variables for filters
|
| 91 |
+
if 'map_filters' not in st.session_state:
|
| 92 |
+
st.session_state.map_filters = {
|
| 93 |
+
'selected_dates': ["All"],
|
| 94 |
+
'priority_filter': [],
|
| 95 |
+
'status_filter': [],
|
| 96 |
+
'date_range': [None, None],
|
| 97 |
+
'show_calendar': True,
|
| 98 |
+
'show_map': True,
|
| 99 |
+
'show_data_table': False,
|
| 100 |
+
'cluster_markers': True
|
| 101 |
+
}
|
| 102 |
+
|
| 103 |
+
# Create filters in sidebar
|
| 104 |
+
with st.sidebar:
|
| 105 |
+
st.header("Map Filters")
|
| 106 |
+
|
| 107 |
+
# Show/hide options - use session state values as defaults
|
| 108 |
+
show_deliveries = st.checkbox(
|
| 109 |
+
"Show Deliveries",
|
| 110 |
+
value=st.session_state.map_filters.get('show_deliveries', True),
|
| 111 |
+
key="show_deliveries_checkbox"
|
| 112 |
+
)
|
| 113 |
+
st.session_state.map_filters['show_deliveries'] = show_deliveries
|
| 114 |
+
|
| 115 |
+
show_depots = st.checkbox(
|
| 116 |
+
"Show Depots",
|
| 117 |
+
value=st.session_state.map_filters.get('show_depots', True),
|
| 118 |
+
key="show_depots_checkbox"
|
| 119 |
+
)
|
| 120 |
+
st.session_state.map_filters['show_depots'] = show_depots
|
| 121 |
+
|
| 122 |
+
# Show/hide data table
|
| 123 |
+
show_data_table = st.checkbox(
|
| 124 |
+
"Show Data Table",
|
| 125 |
+
value=st.session_state.map_filters.get('show_data_table', False),
|
| 126 |
+
key="show_data_table_checkbox"
|
| 127 |
+
)
|
| 128 |
+
st.session_state.map_filters['show_data_table'] = show_data_table
|
| 129 |
+
|
| 130 |
+
# Choose visualization tabs
|
| 131 |
+
show_calendar = st.checkbox(
|
| 132 |
+
"Show Calendar View",
|
| 133 |
+
value=st.session_state.map_filters.get('show_calendar', True),
|
| 134 |
+
key="show_calendar_checkbox"
|
| 135 |
+
)
|
| 136 |
+
st.session_state.map_filters['show_calendar'] = show_calendar
|
| 137 |
+
|
| 138 |
+
# Try to load data
|
| 139 |
+
try:
|
| 140 |
+
# Get data paths
|
| 141 |
+
root_dir = Path(__file__).resolve().parent.parent.parent # Go up to project root level
|
| 142 |
+
delivery_path = os.path.join(root_dir, 'data', 'delivery-data', 'delivery_data.csv') # Fixed directory name with underscore
|
| 143 |
+
vehicle_path = os.path.join(root_dir, 'data', 'vehicle-data', 'vehicle_data.csv') # Fixed directory name with underscore
|
| 144 |
+
|
| 145 |
+
# Check if files exist
|
| 146 |
+
if not os.path.exists(delivery_path):
|
| 147 |
+
# Try with hyphen instead of underscore
|
| 148 |
+
delivery_path = os.path.join(root_dir, 'data', 'delivery-data', 'delivery_data.csv')
|
| 149 |
+
if not os.path.exists(delivery_path):
|
| 150 |
+
st.warning(f"Delivery data file not found at: {delivery_path}")
|
| 151 |
+
st.info("Please generate data first with: python src/utils/generate_all_data.py")
|
| 152 |
+
return
|
| 153 |
+
|
| 154 |
+
if not os.path.exists(vehicle_path):
|
| 155 |
+
# Try with hyphen instead of underscore
|
| 156 |
+
vehicle_path = os.path.join(root_dir, 'data', 'vehicle-data', 'vehicle_data.csv')
|
| 157 |
+
if not os.path.exists(vehicle_path):
|
| 158 |
+
st.warning(f"Vehicle data file not found at: {vehicle_path}")
|
| 159 |
+
st.info("Please generate data first with: python src/utils/generate_all_data.py")
|
| 160 |
+
return
|
| 161 |
+
|
| 162 |
+
# Load data
|
| 163 |
+
delivery_data = pd.read_csv(delivery_path)
|
| 164 |
+
vehicle_data = pd.read_csv(vehicle_path)
|
| 165 |
+
|
| 166 |
+
# Ensure delivery_date is properly formatted as datetime
|
| 167 |
+
if 'delivery_date' in delivery_data.columns:
|
| 168 |
+
delivery_data['delivery_date'] = pd.to_datetime(delivery_data['delivery_date'])
|
| 169 |
+
|
| 170 |
+
# Add more filters if data is available - CONVERT TO MULTI-SELECT
|
| 171 |
+
if 'priority' in delivery_data.columns:
|
| 172 |
+
with st.sidebar:
|
| 173 |
+
all_priorities = sorted(delivery_data['priority'].unique().tolist())
|
| 174 |
+
selected_priorities = st.multiselect(
|
| 175 |
+
"Filter by Priority",
|
| 176 |
+
options=all_priorities,
|
| 177 |
+
default=st.session_state.map_filters.get('priority_filter', all_priorities),
|
| 178 |
+
key="priority_multiselect"
|
| 179 |
+
)
|
| 180 |
+
st.session_state.map_filters['priority_filter'] = selected_priorities
|
| 181 |
+
|
| 182 |
+
if selected_priorities:
|
| 183 |
+
delivery_data = delivery_data[delivery_data['priority'].isin(selected_priorities)]
|
| 184 |
+
|
| 185 |
+
if 'status' in delivery_data.columns:
|
| 186 |
+
with st.sidebar:
|
| 187 |
+
all_statuses = sorted(delivery_data['status'].unique().tolist())
|
| 188 |
+
selected_statuses = st.multiselect(
|
| 189 |
+
"Filter by Status",
|
| 190 |
+
options=all_statuses,
|
| 191 |
+
default=st.session_state.map_filters.get('status_filter', all_statuses),
|
| 192 |
+
key="status_multiselect"
|
| 193 |
+
)
|
| 194 |
+
st.session_state.map_filters['status_filter'] = selected_statuses
|
| 195 |
+
|
| 196 |
+
if selected_statuses:
|
| 197 |
+
delivery_data = delivery_data[delivery_data['status'].isin(selected_statuses)]
|
| 198 |
+
|
| 199 |
+
if 'delivery_date' in delivery_data.columns:
|
| 200 |
+
with st.sidebar:
|
| 201 |
+
# Get the min/max dates from the ORIGINAL unfiltered data
|
| 202 |
+
# Load original data to get proper date range
|
| 203 |
+
original_data = pd.read_csv(delivery_path)
|
| 204 |
+
if 'delivery_date' in original_data.columns:
|
| 205 |
+
original_data['delivery_date'] = pd.to_datetime(original_data['delivery_date'])
|
| 206 |
+
|
| 207 |
+
min_date = original_data['delivery_date'].min().date()
|
| 208 |
+
max_date = original_data['delivery_date'].max().date()
|
| 209 |
+
|
| 210 |
+
# Get saved values from session state
|
| 211 |
+
saved_start_date = st.session_state.map_filters.get('date_range', [None, None])[0]
|
| 212 |
+
saved_end_date = st.session_state.map_filters.get('date_range', [None, None])[1]
|
| 213 |
+
|
| 214 |
+
# Validate saved dates - ensure they're within allowed range
|
| 215 |
+
if saved_start_date and saved_start_date < min_date:
|
| 216 |
+
saved_start_date = min_date
|
| 217 |
+
if saved_end_date and saved_end_date > max_date:
|
| 218 |
+
saved_end_date = max_date
|
| 219 |
+
|
| 220 |
+
# Set default values with proper validation
|
| 221 |
+
default_start_date = saved_start_date if saved_start_date else min_date
|
| 222 |
+
default_end_date = saved_end_date if saved_end_date else min(min_date + timedelta(days=7), max_date)
|
| 223 |
+
|
| 224 |
+
# Add date range picker
|
| 225 |
+
try:
|
| 226 |
+
date_range = st.date_input(
|
| 227 |
+
"Date Range",
|
| 228 |
+
value=(default_start_date, default_end_date),
|
| 229 |
+
min_value=min_date,
|
| 230 |
+
max_value=max_date,
|
| 231 |
+
key="date_range_input"
|
| 232 |
+
)
|
| 233 |
+
|
| 234 |
+
# Update session state with new date range
|
| 235 |
+
if len(date_range) == 2:
|
| 236 |
+
st.session_state.map_filters['date_range'] = list(date_range)
|
| 237 |
+
start_date, end_date = date_range
|
| 238 |
+
mask = (delivery_data['delivery_date'].dt.date >= start_date) & (delivery_data['delivery_date'].dt.date <= end_date)
|
| 239 |
+
delivery_data = delivery_data[mask]
|
| 240 |
+
except Exception as e:
|
| 241 |
+
# If there's any error with the date range, reset it
|
| 242 |
+
st.error(f"Error with date range: {e}")
|
| 243 |
+
st.session_state.map_filters['date_range'] = [min_date, max_date]
|
| 244 |
+
date_range = (min_date, max_date)
|
| 245 |
+
mask = (delivery_data['delivery_date'].dt.date >= min_date) & (delivery_data['delivery_date'].dt.date <= max_date)
|
| 246 |
+
delivery_data = delivery_data[mask]
|
| 247 |
+
|
| 248 |
+
# MOVED STATISTICS TO THE TOP
|
| 249 |
+
st.subheader("Delivery Overview")
|
| 250 |
+
col1, col2, col3 = st.columns(3)
|
| 251 |
+
|
| 252 |
+
with col1:
|
| 253 |
+
st.metric("Deliveries Shown", len(delivery_data))
|
| 254 |
+
|
| 255 |
+
with col2:
|
| 256 |
+
if 'weight_kg' in delivery_data.columns:
|
| 257 |
+
total_weight = delivery_data['weight_kg'].sum()
|
| 258 |
+
st.metric("Total Weight", f"{total_weight:.2f} kg")
|
| 259 |
+
|
| 260 |
+
with col3:
|
| 261 |
+
if 'status' in delivery_data.columns:
|
| 262 |
+
pending = len(delivery_data[delivery_data['status'] == 'Pending'])
|
| 263 |
+
st.metric("Pending Deliveries", pending)
|
| 264 |
+
|
| 265 |
+
# Status count columns - dynamic based on available statuses
|
| 266 |
+
if 'status' in delivery_data.columns:
|
| 267 |
+
status_counts = delivery_data['status'].value_counts()
|
| 268 |
+
# Create a varying number of columns based on unique statuses
|
| 269 |
+
status_cols = st.columns(len(status_counts))
|
| 270 |
+
|
| 271 |
+
for i, (status, count) in enumerate(status_counts.items()):
|
| 272 |
+
with status_cols[i]:
|
| 273 |
+
# Choose color based on status
|
| 274 |
+
delta_color = "normal"
|
| 275 |
+
if status == "Delivered":
|
| 276 |
+
delta_color = "off"
|
| 277 |
+
elif status == "In Transit":
|
| 278 |
+
delta_color = "normal"
|
| 279 |
+
elif status == "Pending":
|
| 280 |
+
delta_color = "inverse" # Red
|
| 281 |
+
|
| 282 |
+
# Calculate percentage
|
| 283 |
+
percentage = round((count / len(delivery_data)) * 100, 1)
|
| 284 |
+
st.metric(
|
| 285 |
+
f"{status}",
|
| 286 |
+
count,
|
| 287 |
+
f"{percentage}% of total",
|
| 288 |
+
delta_color=delta_color
|
| 289 |
+
)
|
| 290 |
+
|
| 291 |
+
# Create map
|
| 292 |
+
singapore_coords = [1.3521, 103.8198] # Center of Singapore
|
| 293 |
+
m = folium.Map(location=singapore_coords, zoom_start=12)
|
| 294 |
+
|
| 295 |
+
# Add delivery markers
|
| 296 |
+
if show_deliveries:
|
| 297 |
+
for _, row in delivery_data.iterrows():
|
| 298 |
+
# Create popup content
|
| 299 |
+
popup_content = f"<b>ID:</b> {row['delivery_id']}<br>"
|
| 300 |
+
|
| 301 |
+
if 'customer_name' in row:
|
| 302 |
+
popup_content += f"<b>Customer:</b> {row['customer_name']}<br>"
|
| 303 |
+
|
| 304 |
+
if 'address' in row:
|
| 305 |
+
popup_content += f"<b>Address:</b> {row['address']}<br>"
|
| 306 |
+
|
| 307 |
+
if 'time_window' in row:
|
| 308 |
+
popup_content += f"<b>Time Window:</b> {row['time_window']}<br>"
|
| 309 |
+
|
| 310 |
+
if 'priority' in row:
|
| 311 |
+
popup_content += f"<b>Priority:</b> {row['priority']}<br>"
|
| 312 |
+
|
| 313 |
+
if 'delivery_date' in row:
|
| 314 |
+
popup_content += f"<b>Date:</b> {row['delivery_date'].strftime('%b %d, %Y')}<br>"
|
| 315 |
+
|
| 316 |
+
if 'status' in row:
|
| 317 |
+
popup_content += f"<b>Status:</b> {row['status']}<br>"
|
| 318 |
+
|
| 319 |
+
# Choose marker color based on priority
|
| 320 |
+
color = 'blue'
|
| 321 |
+
if 'priority' in row:
|
| 322 |
+
if row['priority'] == 'High':
|
| 323 |
+
color = 'red'
|
| 324 |
+
elif row['priority'] == 'Medium':
|
| 325 |
+
color = 'orange'
|
| 326 |
+
|
| 327 |
+
# Add marker to map
|
| 328 |
+
folium.Marker(
|
| 329 |
+
[row['latitude'], row['longitude']],
|
| 330 |
+
popup=folium.Popup(popup_content, max_width=300),
|
| 331 |
+
tooltip=f"Delivery {row['delivery_id']}",
|
| 332 |
+
icon=folium.Icon(color=color)
|
| 333 |
+
).add_to(m)
|
| 334 |
+
|
| 335 |
+
# Add depot markers
|
| 336 |
+
if show_depots:
|
| 337 |
+
for _, row in vehicle_data.iterrows():
|
| 338 |
+
# Create popup content
|
| 339 |
+
popup_content = f"<b>Vehicle ID:</b> {row['vehicle_id']}<br>"
|
| 340 |
+
|
| 341 |
+
if 'vehicle_type' in row:
|
| 342 |
+
popup_content += f"<b>Type:</b> {row['vehicle_type']}<br>"
|
| 343 |
+
|
| 344 |
+
if 'driver_name' in row:
|
| 345 |
+
popup_content += f"<b>Driver:</b> {row['driver_name']}<br>"
|
| 346 |
+
|
| 347 |
+
# Add marker to map
|
| 348 |
+
folium.Marker(
|
| 349 |
+
[row['depot_latitude'], row['depot_longitude']],
|
| 350 |
+
popup=folium.Popup(popup_content, max_width=300),
|
| 351 |
+
tooltip=f"Depot: {row['vehicle_id']}",
|
| 352 |
+
icon=folium.Icon(color='green', icon='home', prefix='fa')
|
| 353 |
+
).add_to(m)
|
| 354 |
+
|
| 355 |
+
# Display the map
|
| 356 |
+
folium_static(m, width=800, height=500)
|
| 357 |
+
|
| 358 |
+
# Display calendar visualization if selected
|
| 359 |
+
if show_calendar and 'delivery_date' in delivery_data.columns and 'time_window' in delivery_data.columns:
|
| 360 |
+
st.subheader("Delivery Schedule Calendar")
|
| 361 |
+
|
| 362 |
+
# Process data for calendar view
|
| 363 |
+
calendar_data = delivery_data.copy()
|
| 364 |
+
|
| 365 |
+
# Extract start and end times from time_window
|
| 366 |
+
calendar_data[['start_time', 'end_time']] = calendar_data['time_window'].str.split('-', expand=True)
|
| 367 |
+
|
| 368 |
+
# Create start and end datetime for each delivery
|
| 369 |
+
calendar_data['Start'] = pd.to_datetime(
|
| 370 |
+
calendar_data['delivery_date'].dt.strftime('%Y-%m-%d') + ' ' + calendar_data['start_time']
|
| 371 |
+
)
|
| 372 |
+
calendar_data['Finish'] = pd.to_datetime(
|
| 373 |
+
calendar_data['delivery_date'].dt.strftime('%Y-%m-%d') + ' ' + calendar_data['end_time']
|
| 374 |
+
)
|
| 375 |
+
|
| 376 |
+
# Create task column for Gantt chart
|
| 377 |
+
calendar_data['Task'] = calendar_data['delivery_id'] + ': ' + calendar_data['customer_name']
|
| 378 |
+
|
| 379 |
+
# Create color mapping for priority
|
| 380 |
+
if 'priority' in calendar_data.columns:
|
| 381 |
+
color_map = {'High': 'rgb(255, 0, 0)', 'Medium': 'rgb(255, 165, 0)', 'Low': 'rgb(0, 0, 255)'}
|
| 382 |
+
calendar_data['Color'] = calendar_data['priority'].map(color_map)
|
| 383 |
+
else:
|
| 384 |
+
calendar_data['Color'] = 'rgb(0, 0, 255)' # Default blue
|
| 385 |
+
|
| 386 |
+
# Get all available dates and add ƒmulti-select filter
|
| 387 |
+
all_dates = sorted(calendar_data['delivery_date'].dt.date.unique())
|
| 388 |
+
|
| 389 |
+
# Format dates for display in the dropdown
|
| 390 |
+
date_options = {date.strftime('%b %d, %Y'): date for date in all_dates}
|
| 391 |
+
|
| 392 |
+
# Get default selection from session state
|
| 393 |
+
default_selections = st.session_state.map_filters.get('calendar_selected_dates', [])
|
| 394 |
+
|
| 395 |
+
# Validate default selections - only keep dates that exist in current options
|
| 396 |
+
valid_default_selections = [date_str for date_str in default_selections if date_str in date_options.keys()]
|
| 397 |
+
|
| 398 |
+
# If no valid selections remain, default to first date (if available)
|
| 399 |
+
if not valid_default_selections and date_options:
|
| 400 |
+
valid_default_selections = [list(date_options.keys())[0]]
|
| 401 |
+
|
| 402 |
+
# Add multiselect for date filtering with validated defaults
|
| 403 |
+
selected_date_strings = st.multiselect(
|
| 404 |
+
"Select dates to display",
|
| 405 |
+
options=list(date_options.keys()),
|
| 406 |
+
default=valid_default_selections,
|
| 407 |
+
key="calendar_date_selector"
|
| 408 |
+
)
|
| 409 |
+
|
| 410 |
+
# Save selections to session state
|
| 411 |
+
st.session_state.map_filters['calendar_selected_dates'] = selected_date_strings
|
| 412 |
+
|
| 413 |
+
# Convert selected strings back to date objects
|
| 414 |
+
selected_dates = [date_options[date_str] for date_str in selected_date_strings]
|
| 415 |
+
|
| 416 |
+
if not selected_dates:
|
| 417 |
+
st.info("Please select at least one date to view the delivery schedule.")
|
| 418 |
+
else:
|
| 419 |
+
# Filter calendar data to only include selected dates
|
| 420 |
+
filtered_calendar = calendar_data[calendar_data['delivery_date'].dt.date.isin(selected_dates)]
|
| 421 |
+
|
| 422 |
+
# Group tasks by date for better visualization
|
| 423 |
+
date_groups = filtered_calendar.groupby(filtered_calendar['delivery_date'].dt.date)
|
| 424 |
+
|
| 425 |
+
# Create tabs only for the selected dates
|
| 426 |
+
date_tabs = st.tabs([date.strftime('%b %d, %Y') for date in selected_dates])
|
| 427 |
+
|
| 428 |
+
for i, (date, tab) in enumerate(zip(selected_dates, date_tabs)):
|
| 429 |
+
with tab:
|
| 430 |
+
# Filter data for this date
|
| 431 |
+
day_data = filtered_calendar[filtered_calendar['delivery_date'].dt.date == date]
|
| 432 |
+
|
| 433 |
+
if len(day_data) > 0:
|
| 434 |
+
# Create figure
|
| 435 |
+
fig = px.timeline(
|
| 436 |
+
day_data,
|
| 437 |
+
x_start="Start",
|
| 438 |
+
x_end="Finish",
|
| 439 |
+
y="Task",
|
| 440 |
+
color="priority" if 'priority' in day_data.columns else None,
|
| 441 |
+
color_discrete_map={"High": "red", "Medium": "orange", "Low": "blue"},
|
| 442 |
+
hover_data=["customer_name", "address", "weight_kg", "status"]
|
| 443 |
+
)
|
| 444 |
+
|
| 445 |
+
# Update layout
|
| 446 |
+
fig.update_layout(
|
| 447 |
+
title=f"Deliveries scheduled for {date.strftime('%b %d, %Y')}",
|
| 448 |
+
xaxis_title="Time of Day",
|
| 449 |
+
yaxis_title="Delivery",
|
| 450 |
+
height=max(300, 50 * len(day_data)),
|
| 451 |
+
yaxis={'categoryorder':'category ascending'}
|
| 452 |
+
)
|
| 453 |
+
|
| 454 |
+
# Display figure
|
| 455 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 456 |
+
|
| 457 |
+
# Show summary
|
| 458 |
+
col1, col2, col3 = st.columns(3)
|
| 459 |
+
with col1:
|
| 460 |
+
st.metric("Total Deliveries", len(day_data))
|
| 461 |
+
with col2:
|
| 462 |
+
if 'weight_kg' in day_data.columns:
|
| 463 |
+
st.metric("Total Weight", f"{day_data['weight_kg'].sum():.2f} kg")
|
| 464 |
+
with col3:
|
| 465 |
+
if 'priority' in day_data.columns and 'High' in day_data['priority'].values:
|
| 466 |
+
st.metric("High Priority", len(day_data[day_data['priority'] == 'High']))
|
| 467 |
+
|
| 468 |
+
# NEW - Add delivery status breakdown for this day
|
| 469 |
+
if 'status' in day_data.columns:
|
| 470 |
+
st.write("##### Deliveries by Status")
|
| 471 |
+
status_counts = day_data['status'].value_counts()
|
| 472 |
+
status_cols = st.columns(min(4, len(status_counts)))
|
| 473 |
+
|
| 474 |
+
for i, (status, count) in enumerate(status_counts.items()):
|
| 475 |
+
col_idx = i % len(status_cols)
|
| 476 |
+
with status_cols[col_idx]:
|
| 477 |
+
st.metric(status, count)
|
| 478 |
+
else:
|
| 479 |
+
st.info(f"No deliveries scheduled for {date.strftime('%b %d, %Y')}")
|
| 480 |
+
|
| 481 |
+
# Display raw data table if selected
|
| 482 |
+
if show_data_table:
|
| 483 |
+
st.subheader("Delivery Data")
|
| 484 |
+
|
| 485 |
+
# Create a copy for display
|
| 486 |
+
display_df = delivery_data.copy()
|
| 487 |
+
|
| 488 |
+
# Convert delivery_date back to string for display
|
| 489 |
+
if 'delivery_date' in display_df.columns:
|
| 490 |
+
display_df['delivery_date'] = display_df['delivery_date'].dt.strftime('%b %d, %Y')
|
| 491 |
+
|
| 492 |
+
# Compute which deliveries are urgent (next 7 days)
|
| 493 |
+
if 'delivery_date' in delivery_data.columns:
|
| 494 |
+
today = datetime.now().date()
|
| 495 |
+
next_week = today + timedelta(days=7)
|
| 496 |
+
|
| 497 |
+
# Function to highlight rows based on delivery status and urgency
|
| 498 |
+
def highlight_rows(row):
|
| 499 |
+
delivery_date = pd.to_datetime(row['delivery_date']).date() if 'delivery_date' in row else None
|
| 500 |
+
|
| 501 |
+
# Check status first - highlight delivered rows in green
|
| 502 |
+
if 'status' in row and row['status'] == 'Delivered':
|
| 503 |
+
return ['background-color: rgba(0, 255, 0, 0.1)'] * len(row)
|
| 504 |
+
# Then check for urgent high-priority deliveries - highlight in red
|
| 505 |
+
elif delivery_date and delivery_date <= next_week and delivery_date >= today and row['priority'] == 'High':
|
| 506 |
+
return ['background-color: rgba(255, 0, 0, 0.1)'] * len(row)
|
| 507 |
+
else:
|
| 508 |
+
return [''] * len(row)
|
| 509 |
+
|
| 510 |
+
# Display styled dataframe
|
| 511 |
+
st.dataframe(display_df.style.apply(highlight_rows, axis=1))
|
| 512 |
+
else:
|
| 513 |
+
st.dataframe(display_df)
|
| 514 |
+
|
| 515 |
+
except Exception as e:
|
| 516 |
+
st.error(f"Error loading data: {str(e)}")
|
| 517 |
+
st.info("Please generate the data first by running: python src/utils/generate_all_data.py")
|
| 518 |
+
st.write("Error details:", e) # Detailed error for debugging
|
| 519 |
+
|
| 520 |
+
# Make the function executable when file is run directly
|
| 521 |
+
if __name__ == "__main__":
|
| 522 |
+
# This is for debugging/testing the function independently
|
| 523 |
+
st.set_page_config(page_title="Map View - Delivery Route Optimization", page_icon="🗺️", layout="wide")
|
| 524 |
+
map_page()
|
src/pages/_optimize_page.py
ADDED
|
@@ -0,0 +1,1781 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
import pandas as pd
|
| 3 |
+
import numpy as np
|
| 4 |
+
import folium
|
| 5 |
+
from streamlit_folium import folium_static
|
| 6 |
+
import os
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
from datetime import datetime, timedelta
|
| 9 |
+
import matplotlib.pyplot as plt
|
| 10 |
+
import random
|
| 11 |
+
import time
|
| 12 |
+
from ortools.constraint_solver import routing_enums_pb2
|
| 13 |
+
from ortools.constraint_solver import pywrapcp
|
| 14 |
+
import folium.plugins
|
| 15 |
+
from folium.features import DivIcon
|
| 16 |
+
import requests
|
| 17 |
+
import plotly.express as px
|
| 18 |
+
|
| 19 |
+
def clear_optimization_results():
|
| 20 |
+
"""Clear optimization results when parameters change"""
|
| 21 |
+
if 'optimization_result' in st.session_state:
|
| 22 |
+
st.session_state.optimization_result = None
|
| 23 |
+
|
| 24 |
+
def optimize_page():
|
| 25 |
+
"""
|
| 26 |
+
Render the optimization page with controls for route optimization
|
| 27 |
+
"""
|
| 28 |
+
st.title("Delivery Route Optimization")
|
| 29 |
+
|
| 30 |
+
# Add help section with expander
|
| 31 |
+
with st.expander("📚 How to Use This Page"):
|
| 32 |
+
st.markdown("""
|
| 33 |
+
## Step-by-Step Guide to Route Optimization
|
| 34 |
+
|
| 35 |
+
This application helps you optimize delivery routes by assigning deliveries to vehicles in the most efficient way possible. Follow these steps to get started:
|
| 36 |
+
|
| 37 |
+
### 1. Set Optimization Parameters (Sidebar)
|
| 38 |
+
|
| 39 |
+
- **Select Delivery Dates**: Choose which dates to include in optimization. Select "All" to include all dates.
|
| 40 |
+
- **Priority Importance**: Higher values give more weight to high-priority deliveries.
|
| 41 |
+
- **Time Window Importance**: Higher values enforce stricter adherence to delivery time windows.
|
| 42 |
+
- **Load Balancing vs Distance**: Higher values distribute deliveries more evenly across vehicles.
|
| 43 |
+
- **Maximum Vehicles**: Set the maximum number of vehicles to use for deliveries.
|
| 44 |
+
- **Minimum Time Window Compliance**: Set the minimum percentage of deliveries that must be within their time windows.
|
| 45 |
+
|
| 46 |
+
### 2. Generate Routes
|
| 47 |
+
|
| 48 |
+
- Review the delivery statistics and vehicle availability information
|
| 49 |
+
- Click the **Generate Optimal Routes** button to run the optimization algorithm
|
| 50 |
+
- The algorithm will assign deliveries to vehicles based on your parameters
|
| 51 |
+
|
| 52 |
+
### 3. Review Optimization Results
|
| 53 |
+
|
| 54 |
+
- **Overall Performance**: Check metrics like assigned deliveries, vehicles used, and time window compliance
|
| 55 |
+
- **Time & Distance Distribution**: See how delivery workload is distributed across vehicles
|
| 56 |
+
- **Route Map**: Interactive map showing the optimized routes for each vehicle
|
| 57 |
+
- Use the date filter to show routes for specific days
|
| 58 |
+
- Hover over markers and routes for detailed information
|
| 59 |
+
- **Calendar View**: View delivery schedules organized by date
|
| 60 |
+
- Green bars indicate on-time deliveries
|
| 61 |
+
- Orange bars indicate late deliveries
|
| 62 |
+
- Red bars indicate unassigned deliveries
|
| 63 |
+
|
| 64 |
+
### 4. Adjust and Refine
|
| 65 |
+
|
| 66 |
+
If the results don't meet your requirements:
|
| 67 |
+
|
| 68 |
+
- **Not enough vehicles?** Increase the maximum vehicles allowed
|
| 69 |
+
- **Time windows not met?** Decrease the time window importance or minimum compliance
|
| 70 |
+
- **High priority deliveries not assigned?** Increase priority importance
|
| 71 |
+
- **Routes too unbalanced?** Increase load balancing parameter
|
| 72 |
+
|
| 73 |
+
Remember to click **Generate Optimal Routes** after changing any parameters to see the updated results.
|
| 74 |
+
""")
|
| 75 |
+
|
| 76 |
+
# Initialize session state variables
|
| 77 |
+
if 'optimization_result' not in st.session_state:
|
| 78 |
+
st.session_state.optimization_result = None
|
| 79 |
+
if 'optimization_params' not in st.session_state:
|
| 80 |
+
st.session_state.optimization_params = {
|
| 81 |
+
'priority_weight': 0.3,
|
| 82 |
+
'time_window_weight': 0.5,
|
| 83 |
+
'balance_weight': 0.2,
|
| 84 |
+
'max_vehicles': 5,
|
| 85 |
+
'selected_dates': ["All"]
|
| 86 |
+
}
|
| 87 |
+
if 'calendar_display_dates' not in st.session_state:
|
| 88 |
+
st.session_state.calendar_display_dates = None
|
| 89 |
+
# Add this new session state variable to store calculated road routes
|
| 90 |
+
if 'calculated_road_routes' not in st.session_state:
|
| 91 |
+
st.session_state.calculated_road_routes = {}
|
| 92 |
+
|
| 93 |
+
# Load data
|
| 94 |
+
data = load_all_data()
|
| 95 |
+
if not data:
|
| 96 |
+
return
|
| 97 |
+
|
| 98 |
+
delivery_data, vehicle_data, distance_matrix, time_matrix, locations = data
|
| 99 |
+
|
| 100 |
+
# Optimization parameters
|
| 101 |
+
st.sidebar.header("Optimization Parameters")
|
| 102 |
+
|
| 103 |
+
# Date selection for deliveries
|
| 104 |
+
if 'delivery_date' in delivery_data.columns:
|
| 105 |
+
available_dates = sorted(delivery_data['delivery_date'].unique())
|
| 106 |
+
date_options = ["All"] + list(available_dates)
|
| 107 |
+
|
| 108 |
+
# Store current value before selection
|
| 109 |
+
current_selected_dates = st.session_state.optimization_params['selected_dates']
|
| 110 |
+
|
| 111 |
+
selected_dates = st.sidebar.multiselect(
|
| 112 |
+
"Select Delivery Dates",
|
| 113 |
+
options=date_options,
|
| 114 |
+
default=current_selected_dates,
|
| 115 |
+
key="delivery_date_selector"
|
| 116 |
+
)
|
| 117 |
+
|
| 118 |
+
# Check if selection changed
|
| 119 |
+
if selected_dates != current_selected_dates:
|
| 120 |
+
clear_optimization_results()
|
| 121 |
+
st.session_state.optimization_params['selected_dates'] = selected_dates
|
| 122 |
+
|
| 123 |
+
# Handle filtering based on selection
|
| 124 |
+
if "All" not in selected_dates:
|
| 125 |
+
if selected_dates: # If specific dates were selected
|
| 126 |
+
delivery_data = delivery_data[delivery_data['delivery_date'].isin(selected_dates)]
|
| 127 |
+
elif available_dates: # No dates selected, show warning
|
| 128 |
+
st.sidebar.warning("No dates selected. Please select at least one delivery date.")
|
| 129 |
+
return
|
| 130 |
+
# If "All" is selected, keep all dates - no filtering needed
|
| 131 |
+
|
| 132 |
+
# Priority weighting
|
| 133 |
+
current_priority = st.session_state.optimization_params['priority_weight']
|
| 134 |
+
priority_weight = st.sidebar.slider(
|
| 135 |
+
"Priority Importance",
|
| 136 |
+
min_value=0.0,
|
| 137 |
+
max_value=1.0,
|
| 138 |
+
value=current_priority,
|
| 139 |
+
help="Higher values give more importance to high-priority deliveries",
|
| 140 |
+
key="priority_weight",
|
| 141 |
+
on_change=clear_optimization_results
|
| 142 |
+
)
|
| 143 |
+
|
| 144 |
+
# Time window importance
|
| 145 |
+
current_time_window = st.session_state.optimization_params['time_window_weight']
|
| 146 |
+
time_window_weight = st.sidebar.slider(
|
| 147 |
+
"Time Window Importance",
|
| 148 |
+
min_value=0.0,
|
| 149 |
+
max_value=1.0,
|
| 150 |
+
value=current_time_window,
|
| 151 |
+
help="Higher values enforce stricter adherence to delivery time windows",
|
| 152 |
+
key="time_window_weight",
|
| 153 |
+
on_change=clear_optimization_results
|
| 154 |
+
)
|
| 155 |
+
|
| 156 |
+
# Distance vs load balancing
|
| 157 |
+
current_balance = st.session_state.optimization_params['balance_weight']
|
| 158 |
+
balance_weight = st.sidebar.slider(
|
| 159 |
+
"Load Balancing vs Distance",
|
| 160 |
+
min_value=0.0,
|
| 161 |
+
max_value=1.0,
|
| 162 |
+
value=current_balance,
|
| 163 |
+
help="Higher values prioritize even distribution of deliveries across vehicles over total distance",
|
| 164 |
+
key="balance_weight",
|
| 165 |
+
on_change=clear_optimization_results
|
| 166 |
+
)
|
| 167 |
+
|
| 168 |
+
# Max vehicles to use
|
| 169 |
+
available_vehicles = vehicle_data[vehicle_data['status'] == 'Available']
|
| 170 |
+
current_max_vehicles = st.session_state.optimization_params['max_vehicles']
|
| 171 |
+
max_vehicles = st.sidebar.slider(
|
| 172 |
+
"Maximum Vehicles to Use",
|
| 173 |
+
min_value=1,
|
| 174 |
+
max_value=len(available_vehicles),
|
| 175 |
+
value=min(current_max_vehicles, len(available_vehicles)),
|
| 176 |
+
key="max_vehicles",
|
| 177 |
+
on_change=clear_optimization_results
|
| 178 |
+
)
|
| 179 |
+
|
| 180 |
+
# Add minimum time window compliance slider
|
| 181 |
+
min_time_window_compliance = st.sidebar.slider(
|
| 182 |
+
"Minimum Time Window Compliance (%)",
|
| 183 |
+
min_value=0,
|
| 184 |
+
max_value=100,
|
| 185 |
+
value=75,
|
| 186 |
+
help="Minimum percentage of deliveries that must be within their time window",
|
| 187 |
+
key="min_time_window_compliance",
|
| 188 |
+
on_change=clear_optimization_results
|
| 189 |
+
)
|
| 190 |
+
|
| 191 |
+
# Update session state with new parameter values
|
| 192 |
+
st.session_state.optimization_params['priority_weight'] = priority_weight
|
| 193 |
+
st.session_state.optimization_params['time_window_weight'] = time_window_weight
|
| 194 |
+
st.session_state.optimization_params['balance_weight'] = balance_weight
|
| 195 |
+
st.session_state.optimization_params['max_vehicles'] = max_vehicles
|
| 196 |
+
|
| 197 |
+
# # Add a notification when parameters have changed and results need regenerating
|
| 198 |
+
# if ('optimization_result' not in st.session_state or st.session_state.optimization_result is None):
|
| 199 |
+
# st.warning("⚠️ Optimization parameters have changed. Please click 'Generate Optimal Routes' to update results.")
|
| 200 |
+
|
| 201 |
+
# Main optimization section
|
| 202 |
+
col1, col2 = st.columns([2, 1])
|
| 203 |
+
|
| 204 |
+
with col1:
|
| 205 |
+
st.subheader("Delivery Route Optimizer")
|
| 206 |
+
|
| 207 |
+
# Filter out completed deliveries for statistics
|
| 208 |
+
if 'status' in delivery_data.columns:
|
| 209 |
+
pending_deliveries = delivery_data[delivery_data['status'] != 'Delivered']
|
| 210 |
+
completed_count = len(delivery_data) - len(pending_deliveries)
|
| 211 |
+
else:
|
| 212 |
+
pending_deliveries = delivery_data
|
| 213 |
+
completed_count = 0
|
| 214 |
+
|
| 215 |
+
st.write(f"Optimizing routes for {len(pending_deliveries)} pending deliveries using up to {max_vehicles} vehicles")
|
| 216 |
+
|
| 217 |
+
# Statistics
|
| 218 |
+
st.write("#### Delivery Statistics")
|
| 219 |
+
total_count = len(delivery_data)
|
| 220 |
+
pending_count = len(pending_deliveries)
|
| 221 |
+
|
| 222 |
+
col1a, col1b = st.columns(2)
|
| 223 |
+
with col1a:
|
| 224 |
+
st.metric("Total Deliveries", total_count)
|
| 225 |
+
with col1b:
|
| 226 |
+
st.metric("Pending Deliveries", pending_count,
|
| 227 |
+
delta=f"-{completed_count}" if completed_count > 0 else None,
|
| 228 |
+
delta_color="inverse" if completed_count > 0 else "normal")
|
| 229 |
+
|
| 230 |
+
if 'priority' in delivery_data.columns:
|
| 231 |
+
# Show priority breakdown for pending deliveries only
|
| 232 |
+
priority_counts = pending_deliveries['priority'].value_counts()
|
| 233 |
+
|
| 234 |
+
# Display priority counts in a more visual way
|
| 235 |
+
st.write("##### Priority Breakdown")
|
| 236 |
+
priority_cols = st.columns(min(3, len(priority_counts)))
|
| 237 |
+
|
| 238 |
+
for i, (priority, count) in enumerate(priority_counts.items()):
|
| 239 |
+
col_idx = i % len(priority_cols)
|
| 240 |
+
with priority_cols[col_idx]:
|
| 241 |
+
st.metric(f"{priority}", count)
|
| 242 |
+
|
| 243 |
+
if 'weight_kg' in delivery_data.columns:
|
| 244 |
+
# Calculate weight only for pending deliveries
|
| 245 |
+
total_weight = pending_deliveries['weight_kg'].sum()
|
| 246 |
+
st.metric("Total Weight (Pending)", f"{total_weight:.2f} kg")
|
| 247 |
+
|
| 248 |
+
with col2:
|
| 249 |
+
st.write("#### Vehicle Availability")
|
| 250 |
+
st.write(f"Available Vehicles: {len(available_vehicles)}")
|
| 251 |
+
|
| 252 |
+
# Show vehicle capacity
|
| 253 |
+
if 'max_weight_kg' in vehicle_data.columns:
|
| 254 |
+
total_capacity = available_vehicles['max_weight_kg'].sum()
|
| 255 |
+
st.write(f"Total Capacity: {total_capacity:.2f} kg")
|
| 256 |
+
|
| 257 |
+
# Check if we have enough capacity
|
| 258 |
+
if 'weight_kg' in delivery_data.columns:
|
| 259 |
+
if total_capacity < total_weight:
|
| 260 |
+
st.warning("⚠️ Insufficient vehicle capacity for all deliveries")
|
| 261 |
+
else:
|
| 262 |
+
st.success("✅ Sufficient vehicle capacity")
|
| 263 |
+
|
| 264 |
+
# Run optimization button
|
| 265 |
+
run_optimization_btn = st.button("Generate Optimal Routes")
|
| 266 |
+
|
| 267 |
+
# Check if we should display results (either have results in session or button was clicked)
|
| 268 |
+
if run_optimization_btn or st.session_state.optimization_result is not None:
|
| 269 |
+
if run_optimization_btn:
|
| 270 |
+
# Run new optimization
|
| 271 |
+
with st.spinner("Calculating optimal routes..."):
|
| 272 |
+
start_time = time.time()
|
| 273 |
+
|
| 274 |
+
# Filter out completed deliveries before optimization
|
| 275 |
+
if 'status' in delivery_data.columns:
|
| 276 |
+
pending_deliveries = delivery_data[delivery_data['status'] != 'Delivered']
|
| 277 |
+
else:
|
| 278 |
+
pending_deliveries = delivery_data
|
| 279 |
+
|
| 280 |
+
# Prepare data for optimization - USE PENDING DELIVERIES ONLY
|
| 281 |
+
optimization_result = run_optimization(
|
| 282 |
+
delivery_data=pending_deliveries,
|
| 283 |
+
vehicle_data=available_vehicles.iloc[:max_vehicles],
|
| 284 |
+
distance_matrix=distance_matrix,
|
| 285 |
+
time_matrix=time_matrix,
|
| 286 |
+
locations=locations,
|
| 287 |
+
priority_weight=priority_weight,
|
| 288 |
+
time_window_weight=time_window_weight,
|
| 289 |
+
balance_weight=balance_weight,
|
| 290 |
+
min_time_window_compliance=min_time_window_compliance/100.0 # Convert to decimal
|
| 291 |
+
)
|
| 292 |
+
|
| 293 |
+
end_time = time.time()
|
| 294 |
+
st.success(f"Optimization completed in {end_time - start_time:.2f} seconds")
|
| 295 |
+
|
| 296 |
+
# Store results in session state
|
| 297 |
+
st.session_state.optimization_result = optimization_result
|
| 298 |
+
else:
|
| 299 |
+
# Use existing results
|
| 300 |
+
optimization_result = st.session_state.optimization_result
|
| 301 |
+
|
| 302 |
+
# Filter pending deliveries before displaying results
|
| 303 |
+
if 'status' in delivery_data.columns:
|
| 304 |
+
pending_deliveries = delivery_data[delivery_data['status'] != 'Delivered']
|
| 305 |
+
else:
|
| 306 |
+
pending_deliveries = delivery_data
|
| 307 |
+
|
| 308 |
+
# Display results with filtered pending deliveries
|
| 309 |
+
display_optimization_results(
|
| 310 |
+
optimization_result=optimization_result,
|
| 311 |
+
delivery_data=pending_deliveries, # ← CHANGED: Use pending_deliveries instead
|
| 312 |
+
vehicle_data=available_vehicles.iloc[:max_vehicles],
|
| 313 |
+
distance_matrix=distance_matrix,
|
| 314 |
+
time_matrix=time_matrix,
|
| 315 |
+
locations=locations
|
| 316 |
+
)
|
| 317 |
+
|
| 318 |
+
def load_all_data():
|
| 319 |
+
"""
|
| 320 |
+
Load all necessary data for optimization
|
| 321 |
+
|
| 322 |
+
Returns:
|
| 323 |
+
tuple of (delivery_data, vehicle_data, distance_matrix, time_matrix, locations)
|
| 324 |
+
"""
|
| 325 |
+
# Get data paths
|
| 326 |
+
root_dir = Path(__file__).resolve().parent.parent.parent
|
| 327 |
+
delivery_path = os.path.join(root_dir, 'data', 'delivery-data', 'delivery_data.csv')
|
| 328 |
+
vehicle_path = os.path.join(root_dir, 'data', 'vehicle-data', 'vehicle_data.csv')
|
| 329 |
+
distance_matrix_path = os.path.join(root_dir, 'data', 'time-matrix', 'distance_matrix.csv')
|
| 330 |
+
time_matrix_path = os.path.join(root_dir, 'data', 'time-matrix', 'base_time_matrix.csv')
|
| 331 |
+
locations_path = os.path.join(root_dir, 'data', 'time-matrix', 'locations.csv')
|
| 332 |
+
|
| 333 |
+
# Check if files exist
|
| 334 |
+
missing_files = []
|
| 335 |
+
for path, name in [
|
| 336 |
+
(delivery_path, "delivery data"),
|
| 337 |
+
(vehicle_path, "vehicle data"),
|
| 338 |
+
(distance_matrix_path, "distance matrix"),
|
| 339 |
+
(time_matrix_path, "time matrix"),
|
| 340 |
+
(locations_path, "locations data")
|
| 341 |
+
]:
|
| 342 |
+
if not os.path.exists(path):
|
| 343 |
+
missing_files.append(name)
|
| 344 |
+
|
| 345 |
+
if missing_files:
|
| 346 |
+
st.error(f"Missing required data: {', '.join(missing_files)}")
|
| 347 |
+
st.info("Please generate all data first by running: python src/utils/generate_all_data.py")
|
| 348 |
+
return None
|
| 349 |
+
|
| 350 |
+
# Load data
|
| 351 |
+
delivery_data = pd.read_csv(delivery_path)
|
| 352 |
+
vehicle_data = pd.read_csv(vehicle_path)
|
| 353 |
+
distance_matrix = pd.read_csv(distance_matrix_path, index_col=0)
|
| 354 |
+
time_matrix = pd.read_csv(time_matrix_path, index_col=0)
|
| 355 |
+
locations = pd.read_csv(locations_path)
|
| 356 |
+
|
| 357 |
+
return delivery_data, vehicle_data, distance_matrix, time_matrix, locations
|
| 358 |
+
|
| 359 |
+
def run_optimization(delivery_data, vehicle_data, distance_matrix, time_matrix, locations,
|
| 360 |
+
priority_weight, time_window_weight, balance_weight, min_time_window_compliance=0.75):
|
| 361 |
+
"""
|
| 362 |
+
Run the route optimization algorithm using Google OR-Tools
|
| 363 |
+
|
| 364 |
+
Parameters:
|
| 365 |
+
delivery_data (pd.DataFrame): DataFrame containing delivery information
|
| 366 |
+
vehicle_data (pd.DataFrame): DataFrame containing vehicle information
|
| 367 |
+
distance_matrix (pd.DataFrame): Distance matrix between locations
|
| 368 |
+
time_matrix (pd.DataFrame): Time matrix between locations
|
| 369 |
+
locations (pd.DataFrame): DataFrame with location details
|
| 370 |
+
priority_weight (float): Weight for delivery priority in optimization (α)
|
| 371 |
+
time_window_weight (float): Weight for time window adherence (β)
|
| 372 |
+
balance_weight (float): Weight for balancing load across vehicles (γ)
|
| 373 |
+
min_time_window_compliance (float): Minimum required time window compliance (δ)
|
| 374 |
+
|
| 375 |
+
Returns:
|
| 376 |
+
dict: Optimization results
|
| 377 |
+
"""
|
| 378 |
+
st.write("Setting up optimization model with OR-Tools...")
|
| 379 |
+
|
| 380 |
+
# Extract required data for optimization
|
| 381 |
+
num_vehicles = len(vehicle_data)
|
| 382 |
+
num_deliveries = len(delivery_data)
|
| 383 |
+
|
| 384 |
+
# Create a list of all locations (depots + delivery points)
|
| 385 |
+
all_locations = []
|
| 386 |
+
delivery_locations = []
|
| 387 |
+
depot_locations = []
|
| 388 |
+
vehicle_capacities = []
|
| 389 |
+
|
| 390 |
+
# First, add depot locations (one per vehicle)
|
| 391 |
+
for i, (_, vehicle) in enumerate(vehicle_data.iterrows()):
|
| 392 |
+
depot_loc = {
|
| 393 |
+
'id': vehicle['vehicle_id'],
|
| 394 |
+
'type': 'depot',
|
| 395 |
+
'index': i, # Important for mapping to OR-Tools indices
|
| 396 |
+
'latitude': vehicle['depot_latitude'],
|
| 397 |
+
'longitude': vehicle['depot_longitude'],
|
| 398 |
+
'vehicle_index': i
|
| 399 |
+
}
|
| 400 |
+
depot_locations.append(depot_loc)
|
| 401 |
+
all_locations.append(depot_loc)
|
| 402 |
+
|
| 403 |
+
# Add vehicle capacity
|
| 404 |
+
if 'max_weight_kg' in vehicle:
|
| 405 |
+
vehicle_capacities.append(int(vehicle['max_weight_kg'] * 100)) # Convert to integers (OR-Tools works better with integers)
|
| 406 |
+
else:
|
| 407 |
+
vehicle_capacities.append(1000) # Default capacity of 10kg (1000 in scaled units)
|
| 408 |
+
|
| 409 |
+
# Then add delivery locations
|
| 410 |
+
for i, (_, delivery) in enumerate(delivery_data.iterrows()):
|
| 411 |
+
# Determine priority factor (will be used in the objective function)
|
| 412 |
+
priority_factor = 1.0
|
| 413 |
+
if 'priority' in delivery:
|
| 414 |
+
if delivery['priority'] == 'High':
|
| 415 |
+
priority_factor = 0.5 # Higher priority = lower cost
|
| 416 |
+
elif delivery['priority'] == 'Low':
|
| 417 |
+
priority_factor = 2.0 # Lower priority = higher cost
|
| 418 |
+
|
| 419 |
+
# Calculate delivery demand (weight)
|
| 420 |
+
demand = int(delivery.get('weight_kg', 1.0) * 100) # Convert to integers
|
| 421 |
+
|
| 422 |
+
delivery_loc = {
|
| 423 |
+
'id': delivery['delivery_id'],
|
| 424 |
+
'type': 'delivery',
|
| 425 |
+
'index': num_vehicles + i, # Important for mapping to OR-Tools indices
|
| 426 |
+
'latitude': delivery['latitude'],
|
| 427 |
+
'longitude': delivery['longitude'],
|
| 428 |
+
'priority': delivery.get('priority', 'Medium'),
|
| 429 |
+
'priority_factor': priority_factor,
|
| 430 |
+
'weight_kg': delivery.get('weight_kg', 1.0),
|
| 431 |
+
'demand': demand,
|
| 432 |
+
'time_window': delivery.get('time_window', '09:00-17:00'),
|
| 433 |
+
'customer_name': delivery.get('customer_name', 'Unknown')
|
| 434 |
+
}
|
| 435 |
+
delivery_locations.append(delivery_loc)
|
| 436 |
+
all_locations.append(delivery_loc)
|
| 437 |
+
|
| 438 |
+
# Create distance and time matrices for OR-Tools
|
| 439 |
+
dist_matrix = np.zeros((len(all_locations), len(all_locations)))
|
| 440 |
+
time_matrix_mins = np.zeros((len(all_locations), len(all_locations)))
|
| 441 |
+
|
| 442 |
+
# Use the provided distance_matrix if it's the right size, otherwise compute distances
|
| 443 |
+
if isinstance(distance_matrix, pd.DataFrame) and len(distance_matrix) == len(all_locations):
|
| 444 |
+
# Convert dataframe to numpy array
|
| 445 |
+
dist_matrix = distance_matrix.values
|
| 446 |
+
time_matrix_mins = time_matrix.values
|
| 447 |
+
else:
|
| 448 |
+
# Compute simple Euclidean distances (this is a fallback)
|
| 449 |
+
for i in range(len(all_locations)):
|
| 450 |
+
for j in range(len(all_locations)):
|
| 451 |
+
if i == j:
|
| 452 |
+
continue
|
| 453 |
+
|
| 454 |
+
# Approximate distance in km (very rough)
|
| 455 |
+
lat1, lon1 = all_locations[i]['latitude'], all_locations[i]['longitude']
|
| 456 |
+
lat2, lon2 = all_locations[j]['latitude'], all_locations[j]['longitude']
|
| 457 |
+
|
| 458 |
+
# Simple Euclidean distance (for demo purposes)
|
| 459 |
+
dist = ((lat1 - lat2) ** 2 + (lon1 - lon2) ** 2) ** 0.5 * 111 # Convert to km
|
| 460 |
+
dist_matrix[i, j] = dist
|
| 461 |
+
time_matrix_mins[i, j] = dist * 2 # Rough estimate: 30km/h -> 2 mins per km
|
| 462 |
+
|
| 463 |
+
# Prepare demand array (0 for depots, actual demand for deliveries)
|
| 464 |
+
demands = [0] * num_vehicles + [d['demand'] for d in delivery_locations]
|
| 465 |
+
|
| 466 |
+
# Calculate total weight of all deliveries
|
| 467 |
+
total_delivery_weight = sum(d['demand'] for d in delivery_locations)
|
| 468 |
+
|
| 469 |
+
# OR-Tools setup
|
| 470 |
+
# Create the routing index manager
|
| 471 |
+
manager = pywrapcp.RoutingIndexManager(
|
| 472 |
+
len(all_locations), # Number of nodes (depots + deliveries)
|
| 473 |
+
num_vehicles, # Number of vehicles
|
| 474 |
+
list(range(num_vehicles)), # Vehicle start nodes (depot indices)
|
| 475 |
+
list(range(num_vehicles)) # Vehicle end nodes (back to depots)
|
| 476 |
+
)
|
| 477 |
+
|
| 478 |
+
# Create Routing Model
|
| 479 |
+
routing = pywrapcp.RoutingModel(manager)
|
| 480 |
+
|
| 481 |
+
# Define distance callback with priority weighting
|
| 482 |
+
# This implements the objective function: min sum_{i,j,k} c_jk * x_ijk * p_k^α
|
| 483 |
+
def distance_callback(from_index, to_index):
|
| 484 |
+
"""Returns the weighted distance between the two nodes."""
|
| 485 |
+
# Convert from routing variable Index to distance matrix NodeIndex
|
| 486 |
+
from_node = manager.IndexToNode(from_index)
|
| 487 |
+
to_node = manager.IndexToNode(to_index)
|
| 488 |
+
|
| 489 |
+
# Get base distance
|
| 490 |
+
base_distance = int(dist_matrix[from_node, to_node] * 1000) # Convert to integers
|
| 491 |
+
|
| 492 |
+
# Apply priority weighting to destination node (if it's a delivery)
|
| 493 |
+
if to_node >= num_vehicles: # It's a delivery node
|
| 494 |
+
delivery_idx = to_node - num_vehicles
|
| 495 |
+
# Apply the priority factor with the priority weight (α)
|
| 496 |
+
priority_factor = delivery_locations[delivery_idx]['priority_factor']
|
| 497 |
+
# Higher priority_weight = stronger effect of priority on cost
|
| 498 |
+
priority_multiplier = priority_factor ** priority_weight
|
| 499 |
+
return int(base_distance * priority_multiplier)
|
| 500 |
+
|
| 501 |
+
return base_distance
|
| 502 |
+
|
| 503 |
+
# Define time callback
|
| 504 |
+
def time_callback(from_index, to_index):
|
| 505 |
+
"""Returns the travel time between the two nodes."""
|
| 506 |
+
# Convert from routing variable Index to time matrix NodeIndex
|
| 507 |
+
from_node = manager.IndexToNode(from_index)
|
| 508 |
+
to_node = manager.IndexToNode(to_index)
|
| 509 |
+
return int(time_matrix_mins[from_node, to_node] * 60) # Convert minutes to seconds (integers)
|
| 510 |
+
|
| 511 |
+
# Define service time callback - time spent at each delivery
|
| 512 |
+
def service_time_callback(from_index):
|
| 513 |
+
"""Returns the service time for the node."""
|
| 514 |
+
# Service time is 0 for depots and 10 minutes (600 seconds) for deliveries
|
| 515 |
+
node_idx = manager.IndexToNode(from_index)
|
| 516 |
+
if node_idx >= num_vehicles: # It's a delivery node
|
| 517 |
+
return 600 # 10 minutes in seconds
|
| 518 |
+
return 0 # No service time for depots
|
| 519 |
+
|
| 520 |
+
# Define demand callback
|
| 521 |
+
def demand_callback(from_index):
|
| 522 |
+
"""Returns the demand of the node."""
|
| 523 |
+
# Convert from routing variable Index to demands array
|
| 524 |
+
from_node = manager.IndexToNode(from_index)
|
| 525 |
+
return demands[from_node]
|
| 526 |
+
|
| 527 |
+
# Register callbacks
|
| 528 |
+
transit_callback_index = routing.RegisterTransitCallback(distance_callback)
|
| 529 |
+
time_callback_index = routing.RegisterTransitCallback(time_callback)
|
| 530 |
+
service_callback_index = routing.RegisterUnaryTransitCallback(service_time_callback)
|
| 531 |
+
demand_callback_index = routing.RegisterUnaryTransitCallback(demand_callback)
|
| 532 |
+
|
| 533 |
+
# Set the arc cost evaluator for all vehicles - this is our objective function
|
| 534 |
+
routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)
|
| 535 |
+
|
| 536 |
+
# Add capacity dimension - Hard Constraint 2: Vehicle Capacity Limits
|
| 537 |
+
routing.AddDimensionWithVehicleCapacity(
|
| 538 |
+
demand_callback_index,
|
| 539 |
+
0, # null capacity slack
|
| 540 |
+
vehicle_capacities, # vehicle maximum capacities
|
| 541 |
+
True, # start cumul to zero
|
| 542 |
+
'Capacity'
|
| 543 |
+
)
|
| 544 |
+
|
| 545 |
+
capacity_dimension = routing.GetDimensionOrDie('Capacity')
|
| 546 |
+
|
| 547 |
+
# Add load balancing penalties - Soft Constraint 3: Load Balancing Penalties
|
| 548 |
+
if balance_weight > 0.01:
|
| 549 |
+
# Calculate target weight per vehicle (ideal balanced load)
|
| 550 |
+
target_weight = total_delivery_weight / len(vehicle_capacities)
|
| 551 |
+
|
| 552 |
+
for i in range(num_vehicles):
|
| 553 |
+
# Get vehicle capacity
|
| 554 |
+
vehicle_capacity = vehicle_capacities[i]
|
| 555 |
+
|
| 556 |
+
# Set penalties for deviating from balanced load
|
| 557 |
+
# Scale penalty based on the balance_weight parameter (γ)
|
| 558 |
+
balance_penalty = int(10000 * balance_weight)
|
| 559 |
+
|
| 560 |
+
# Add soft bounds around the target weight
|
| 561 |
+
# Lower bound: Don't penalize for being under the target if there's not enough weight
|
| 562 |
+
lower_target = max(0, int(target_weight * 0.8))
|
| 563 |
+
capacity_dimension.SetCumulVarSoftLowerBound(
|
| 564 |
+
routing.End(i), lower_target, balance_penalty
|
| 565 |
+
)
|
| 566 |
+
|
| 567 |
+
# Upper bound: Penalize for going over the target
|
| 568 |
+
# But allow using more capacity if necessary to assign all deliveries
|
| 569 |
+
upper_target = min(vehicle_capacity, int(target_weight * 1.2))
|
| 570 |
+
capacity_dimension.SetCumulVarSoftUpperBound(
|
| 571 |
+
routing.End(i), upper_target, balance_penalty
|
| 572 |
+
)
|
| 573 |
+
|
| 574 |
+
# Add time dimension with service times
|
| 575 |
+
# This implements Hard Constraint 5: Time Continuity and
|
| 576 |
+
# Hard Constraint 6: Maximum Route Duration
|
| 577 |
+
routing.AddDimension(
|
| 578 |
+
time_callback_index,
|
| 579 |
+
60 * 60, # Allow waiting time of 60 mins
|
| 580 |
+
24 * 60 * 60, # Maximum time per vehicle (24 hours in seconds) - Hard Constraint 6
|
| 581 |
+
False, # Don't force start cumul to zero
|
| 582 |
+
'Time'
|
| 583 |
+
)
|
| 584 |
+
time_dimension = routing.GetDimensionOrDie('Time')
|
| 585 |
+
|
| 586 |
+
# Add service time to each node's visit duration
|
| 587 |
+
for node_idx in range(len(all_locations)):
|
| 588 |
+
index = manager.NodeToIndex(node_idx)
|
| 589 |
+
time_dimension.SetCumulVarSoftUpperBound(
|
| 590 |
+
index,
|
| 591 |
+
24 * 60 * 60, # 24 hours in seconds
|
| 592 |
+
1000000 # High penalty for violating the 24-hour constraint
|
| 593 |
+
)
|
| 594 |
+
time_dimension.SlackVar(index).SetValue(0)
|
| 595 |
+
|
| 596 |
+
# Store time window variables to track compliance
|
| 597 |
+
time_window_vars = []
|
| 598 |
+
compliance_threshold = int(min_time_window_compliance * num_deliveries)
|
| 599 |
+
|
| 600 |
+
# Add time window constraints - Hard Constraint 7: Time Window Compliance
|
| 601 |
+
if time_window_weight > 0.01:
|
| 602 |
+
# Create binary variables to track time window compliance
|
| 603 |
+
for delivery_idx, delivery in enumerate(delivery_locations):
|
| 604 |
+
if 'time_window' in delivery and delivery['time_window']:
|
| 605 |
+
try:
|
| 606 |
+
start_time_str, end_time_str = delivery['time_window'].split('-')
|
| 607 |
+
start_hour, start_min = map(int, start_time_str.split(':'))
|
| 608 |
+
end_hour, end_min = map(int, end_time_str.split(':'))
|
| 609 |
+
|
| 610 |
+
# Convert to seconds since midnight
|
| 611 |
+
start_time_sec = (start_hour * 60 + start_min) * 60
|
| 612 |
+
end_time_sec = (end_hour * 60 + end_min) * 60
|
| 613 |
+
|
| 614 |
+
# Get the node index
|
| 615 |
+
index = manager.NodeToIndex(num_vehicles + delivery_idx)
|
| 616 |
+
|
| 617 |
+
# Add soft upper bound penalty with very high weight for late deliveries
|
| 618 |
+
# This implements Soft Constraint 2: Time Window Penalties
|
| 619 |
+
time_dimension.SetCumulVarSoftUpperBound(
|
| 620 |
+
index,
|
| 621 |
+
end_time_sec,
|
| 622 |
+
int(1000000 * time_window_weight) # High penalty for being late
|
| 623 |
+
)
|
| 624 |
+
|
| 625 |
+
# Don't penalize for early deliveries, just wait
|
| 626 |
+
time_dimension.CumulVar(index).SetMin(start_time_sec)
|
| 627 |
+
|
| 628 |
+
# Track this time window for compliance calculation
|
| 629 |
+
time_window_vars.append((index, start_time_sec, end_time_sec))
|
| 630 |
+
except:
|
| 631 |
+
# Skip if time window format is invalid
|
| 632 |
+
pass
|
| 633 |
+
|
| 634 |
+
# Hard Constraint 1: All Deliveries Must Be Assigned
|
| 635 |
+
# This is enforced by not creating disjunctions with penalties, but instead making all nodes mandatory
|
| 636 |
+
|
| 637 |
+
# Hard Constraint 3: Flow Conservation (Route Continuity) is inherently enforced by OR-Tools
|
| 638 |
+
|
| 639 |
+
# Hard Constraint 4: Start and End at Assigned Depots is enforced by the RoutingIndexManager setup
|
| 640 |
+
|
| 641 |
+
# Set parameters for the solver
|
| 642 |
+
search_parameters = pywrapcp.DefaultRoutingSearchParameters()
|
| 643 |
+
|
| 644 |
+
# Use guided local search to find good solutions
|
| 645 |
+
search_parameters.local_search_metaheuristic = (
|
| 646 |
+
routing_enums_pb2.LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH
|
| 647 |
+
)
|
| 648 |
+
|
| 649 |
+
# Use path cheapest arc with resource constraints as the first solution strategy
|
| 650 |
+
search_parameters.first_solution_strategy = (
|
| 651 |
+
routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC
|
| 652 |
+
)
|
| 653 |
+
|
| 654 |
+
# Give the solver enough time to find a good solution
|
| 655 |
+
search_parameters.time_limit.seconds = 10
|
| 656 |
+
|
| 657 |
+
# Enable logging
|
| 658 |
+
search_parameters.log_search = True
|
| 659 |
+
|
| 660 |
+
# Try to enforce the time window compliance threshold
|
| 661 |
+
if compliance_threshold > 0:
|
| 662 |
+
# First try to solve with all deliveries required
|
| 663 |
+
routing.CloseModelWithParameters(search_parameters)
|
| 664 |
+
|
| 665 |
+
# Solve the problem
|
| 666 |
+
st.write(f"Solving optimization model with {num_deliveries} deliveries and {num_vehicles} vehicles...")
|
| 667 |
+
st.write(f"Target: At least {compliance_threshold} of {num_deliveries} deliveries ({min_time_window_compliance*100:.0f}%) must be within time windows")
|
| 668 |
+
|
| 669 |
+
solution = routing.SolveWithParameters(search_parameters)
|
| 670 |
+
else:
|
| 671 |
+
# If no time window compliance required, solve normally
|
| 672 |
+
solution = routing.SolveWithParameters(search_parameters)
|
| 673 |
+
|
| 674 |
+
# If no solution was found, try a relaxed version (allow some deliveries to be unassigned)
|
| 675 |
+
if not solution:
|
| 676 |
+
st.warning("Could not find a solution with all deliveries assigned. Trying a relaxed version...")
|
| 677 |
+
|
| 678 |
+
# Create a new model with disjunctions to allow dropping some deliveries with high penalties
|
| 679 |
+
routing = pywrapcp.RoutingModel(manager)
|
| 680 |
+
|
| 681 |
+
# Re-register callbacks
|
| 682 |
+
transit_callback_index = routing.RegisterTransitCallback(distance_callback)
|
| 683 |
+
time_callback_index = routing.RegisterTransitCallback(time_callback)
|
| 684 |
+
service_callback_index = routing.RegisterUnaryTransitCallback(service_time_callback)
|
| 685 |
+
demand_callback_index = routing.RegisterUnaryTransitCallback(demand_callback)
|
| 686 |
+
|
| 687 |
+
routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)
|
| 688 |
+
|
| 689 |
+
# Add capacity dimension again
|
| 690 |
+
routing.AddDimensionWithVehicleCapacity(
|
| 691 |
+
demand_callback_index,
|
| 692 |
+
0, vehicle_capacities, True, 'Capacity'
|
| 693 |
+
)
|
| 694 |
+
|
| 695 |
+
# Add time dimension again
|
| 696 |
+
routing.AddDimension(
|
| 697 |
+
time_callback_index,
|
| 698 |
+
60 * 60, 24 * 60 * 60, False, 'Time'
|
| 699 |
+
)
|
| 700 |
+
time_dimension = routing.GetDimensionOrDie('Time')
|
| 701 |
+
|
| 702 |
+
# Add disjunctions with very high penalties to try to include all deliveries
|
| 703 |
+
for delivery_idx in range(num_deliveries):
|
| 704 |
+
index = manager.NodeToIndex(num_vehicles + delivery_idx)
|
| 705 |
+
routing.AddDisjunction([index], 1000000) # High penalty but allows dropping if necessary
|
| 706 |
+
|
| 707 |
+
# Try to solve with relaxed constraints
|
| 708 |
+
search_parameters.time_limit.seconds = 15 # Give more time for relaxed version
|
| 709 |
+
solution = routing.SolveWithParameters(search_parameters)
|
| 710 |
+
|
| 711 |
+
if not solution:
|
| 712 |
+
st.error("Could not find any solution. Try increasing the number of vehicles or relaxing other constraints.")
|
| 713 |
+
return {
|
| 714 |
+
'routes': {},
|
| 715 |
+
'stats': {},
|
| 716 |
+
'parameters': {
|
| 717 |
+
'priority_weight': priority_weight,
|
| 718 |
+
'time_window_weight': time_window_weight,
|
| 719 |
+
'balance_weight': balance_weight,
|
| 720 |
+
'min_time_window_compliance': min_time_window_compliance
|
| 721 |
+
}
|
| 722 |
+
}
|
| 723 |
+
|
| 724 |
+
# Extract solution
|
| 725 |
+
optimized_routes = {}
|
| 726 |
+
route_stats = {}
|
| 727 |
+
|
| 728 |
+
if solution:
|
| 729 |
+
st.success("Solution found!")
|
| 730 |
+
|
| 731 |
+
total_time_window_compliance = 0
|
| 732 |
+
total_deliveries_assigned = 0
|
| 733 |
+
|
| 734 |
+
for vehicle_idx in range(num_vehicles):
|
| 735 |
+
route = []
|
| 736 |
+
vehicle_id = vehicle_data.iloc[vehicle_idx]['vehicle_id']
|
| 737 |
+
|
| 738 |
+
# Get the vehicle information
|
| 739 |
+
vehicle_info = {
|
| 740 |
+
'id': vehicle_id,
|
| 741 |
+
'type': vehicle_data.iloc[vehicle_idx].get('vehicle_type', 'Standard'),
|
| 742 |
+
'capacity': vehicle_data.iloc[vehicle_idx].get('max_weight_kg', 1000),
|
| 743 |
+
'depot_latitude': vehicle_data.iloc[vehicle_idx]['depot_latitude'],
|
| 744 |
+
'depot_longitude': vehicle_data.iloc[vehicle_idx]['depot_longitude']
|
| 745 |
+
}
|
| 746 |
+
|
| 747 |
+
# Initialize variables for tracking
|
| 748 |
+
index = routing.Start(vehicle_idx)
|
| 749 |
+
total_distance = 0
|
| 750 |
+
total_time = 0
|
| 751 |
+
total_load = 0
|
| 752 |
+
time_window_compliant = 0
|
| 753 |
+
total_deliveries = 0
|
| 754 |
+
|
| 755 |
+
# Initialize variables to track current position and time
|
| 756 |
+
current_time_sec = 8 * 3600 # Start at 8:00 AM (8 hours * 3600 seconds)
|
| 757 |
+
|
| 758 |
+
while not routing.IsEnd(index):
|
| 759 |
+
# Get the node index in the original data
|
| 760 |
+
node_idx = manager.IndexToNode(index)
|
| 761 |
+
|
| 762 |
+
# Skip depot nodes (they're already at the start)
|
| 763 |
+
if node_idx >= num_vehicles:
|
| 764 |
+
# This is a delivery node - get the corresponding delivery
|
| 765 |
+
delivery_idx = node_idx - num_vehicles
|
| 766 |
+
delivery = delivery_locations[delivery_idx].copy() # Create a copy to modify
|
| 767 |
+
|
| 768 |
+
# Calculate estimated arrival time in minutes since start of day
|
| 769 |
+
arrival_time_sec = solution.Min(time_dimension.CumulVar(index))
|
| 770 |
+
arrival_time_mins = arrival_time_sec // 60
|
| 771 |
+
|
| 772 |
+
# Store the estimated arrival time in the delivery
|
| 773 |
+
delivery['estimated_arrival'] = arrival_time_mins
|
| 774 |
+
|
| 775 |
+
# Check time window compliance
|
| 776 |
+
if 'time_window' in delivery and delivery['time_window']:
|
| 777 |
+
try:
|
| 778 |
+
start_time_str, end_time_str = delivery['time_window'].split('-')
|
| 779 |
+
start_hour, start_min = map(int, start_time_str.split(':'))
|
| 780 |
+
end_hour, end_min = map(int, end_time_str.split(':'))
|
| 781 |
+
|
| 782 |
+
# Convert to minutes for comparison
|
| 783 |
+
start_mins = start_hour * 60 + start_min
|
| 784 |
+
end_mins = end_hour * 60 + end_min
|
| 785 |
+
|
| 786 |
+
# Check if delivery is within time window
|
| 787 |
+
on_time = False
|
| 788 |
+
|
| 789 |
+
# If arrival <= end_time, consider it on-time (including early arrivals)
|
| 790 |
+
if arrival_time_mins <= end_mins:
|
| 791 |
+
on_time = True
|
| 792 |
+
time_window_compliant += 1
|
| 793 |
+
total_time_window_compliance += 1
|
| 794 |
+
|
| 795 |
+
delivery['within_time_window'] = on_time
|
| 796 |
+
except Exception as e:
|
| 797 |
+
st.warning(f"Error parsing time window for delivery {delivery['id']}: {str(e)}")
|
| 798 |
+
delivery['within_time_window'] = False
|
| 799 |
+
|
| 800 |
+
# Add to route
|
| 801 |
+
route.append(delivery)
|
| 802 |
+
total_deliveries += 1
|
| 803 |
+
total_deliveries_assigned += 1
|
| 804 |
+
|
| 805 |
+
# Add to total load
|
| 806 |
+
total_load += delivery['demand'] / 100 # Convert back to original units
|
| 807 |
+
|
| 808 |
+
# Move to the next node
|
| 809 |
+
previous_idx = index
|
| 810 |
+
index = solution.Value(routing.NextVar(index))
|
| 811 |
+
|
| 812 |
+
# Add distance and time from previous to current
|
| 813 |
+
if not routing.IsEnd(index):
|
| 814 |
+
previous_node = manager.IndexToNode(previous_idx)
|
| 815 |
+
next_node = manager.IndexToNode(index)
|
| 816 |
+
|
| 817 |
+
# Add distance between these points
|
| 818 |
+
segment_distance = dist_matrix[previous_node, next_node]
|
| 819 |
+
total_distance += segment_distance
|
| 820 |
+
|
| 821 |
+
# Add travel time between these points
|
| 822 |
+
segment_time_sec = int(time_matrix_mins[previous_node, next_node] * 60)
|
| 823 |
+
total_time += segment_time_sec / 60 # Convert seconds back to minutes
|
| 824 |
+
|
| 825 |
+
# Store the route if it's not empty
|
| 826 |
+
if route:
|
| 827 |
+
optimized_routes[vehicle_id] = route
|
| 828 |
+
|
| 829 |
+
# Calculate time window compliance percentage
|
| 830 |
+
time_window_percent = (time_window_compliant / total_deliveries * 100) if total_deliveries > 0 else 0
|
| 831 |
+
|
| 832 |
+
# Store route statistics
|
| 833 |
+
route_stats[vehicle_id] = {
|
| 834 |
+
'vehicle_type': vehicle_info['type'],
|
| 835 |
+
'capacity_kg': vehicle_info['capacity'],
|
| 836 |
+
'deliveries': len(route),
|
| 837 |
+
'total_distance_km': round(total_distance, 2),
|
| 838 |
+
'estimated_time_mins': round(total_time),
|
| 839 |
+
'total_load_kg': round(total_load, 2),
|
| 840 |
+
'time_window_compliant': time_window_compliant,
|
| 841 |
+
'time_window_compliance': time_window_percent
|
| 842 |
+
}
|
| 843 |
+
|
| 844 |
+
# Check if overall time window compliance meets the minimum requirement
|
| 845 |
+
overall_compliance = 0
|
| 846 |
+
if total_deliveries_assigned > 0:
|
| 847 |
+
overall_compliance = (total_time_window_compliance / total_deliveries_assigned)
|
| 848 |
+
|
| 849 |
+
if overall_compliance < min_time_window_compliance:
|
| 850 |
+
st.warning(f"Solution found, but time window compliance ({overall_compliance*100:.1f}%) is below the minimum required ({min_time_window_compliance*100:.0f}%).")
|
| 851 |
+
st.info("Consider adjusting parameters: increase the number of vehicles, reduce the minimum compliance requirement, or adjust time window importance.")
|
| 852 |
+
else:
|
| 853 |
+
st.success(f"Solution meets time window compliance requirement: {overall_compliance*100:.1f}% (minimum required: {min_time_window_compliance*100:.0f}%)")
|
| 854 |
+
else:
|
| 855 |
+
st.error("No solution found. Try adjusting the parameters.")
|
| 856 |
+
optimized_routes = {}
|
| 857 |
+
route_stats = {}
|
| 858 |
+
|
| 859 |
+
return {
|
| 860 |
+
'routes': optimized_routes,
|
| 861 |
+
'stats': route_stats,
|
| 862 |
+
'parameters': {
|
| 863 |
+
'priority_weight': priority_weight,
|
| 864 |
+
'time_window_weight': time_window_weight,
|
| 865 |
+
'balance_weight': balance_weight,
|
| 866 |
+
'min_time_window_compliance': min_time_window_compliance
|
| 867 |
+
}
|
| 868 |
+
}
|
| 869 |
+
|
| 870 |
+
def display_optimization_results(optimization_result, delivery_data, vehicle_data,
|
| 871 |
+
distance_matrix, time_matrix, locations):
|
| 872 |
+
"""
|
| 873 |
+
Display the optimization results
|
| 874 |
+
|
| 875 |
+
Parameters:
|
| 876 |
+
optimization_result (dict): Result from the optimization algorithm
|
| 877 |
+
delivery_data (pd.DataFrame): Delivery information
|
| 878 |
+
vehicle_data (pd.DataFrame): Vehicle information
|
| 879 |
+
distance_matrix (pd.DataFrame): Distance matrix between locations
|
| 880 |
+
time_matrix (pd.DataFrame): Time matrix between locations
|
| 881 |
+
locations (pd.DataFrame): Location details
|
| 882 |
+
"""
|
| 883 |
+
# Define colors for vehicle routes
|
| 884 |
+
colors = ['blue', 'red', 'green', 'purple', 'orange', 'darkblue',
|
| 885 |
+
'darkred', 'darkgreen', 'cadetblue', 'darkpurple', 'pink',
|
| 886 |
+
'lightblue', 'lightred', 'lightgreen', 'gray', 'black', 'lightgray']
|
| 887 |
+
|
| 888 |
+
routes = optimization_result['routes']
|
| 889 |
+
|
| 890 |
+
# Display summary statistics
|
| 891 |
+
st.subheader("Optimization Results")
|
| 892 |
+
|
| 893 |
+
# Calculate overall statistics
|
| 894 |
+
total_deliveries = sum(len(route) for route in routes.values())
|
| 895 |
+
active_vehicles = sum(1 for route in routes.values() if len(route) > 0)
|
| 896 |
+
|
| 897 |
+
# Calculate additional metrics
|
| 898 |
+
total_distance = sum(stats.get('total_distance_km', 0) for stats in optimization_result.get('stats', {}).values())
|
| 899 |
+
total_time_mins = sum(stats.get('estimated_time_mins', 0) for stats in optimization_result.get('stats', {}).values())
|
| 900 |
+
|
| 901 |
+
# Calculate time window compliance (on-time percentage)
|
| 902 |
+
on_time_deliveries = 0
|
| 903 |
+
total_route_deliveries = 0
|
| 904 |
+
|
| 905 |
+
# Count deliveries within time window
|
| 906 |
+
for vehicle_id, route in routes.items():
|
| 907 |
+
stats = optimization_result.get('stats', {}).get(vehicle_id, {})
|
| 908 |
+
|
| 909 |
+
# Only process if we have stats for this vehicle
|
| 910 |
+
if stats and 'time_window_compliant' in stats:
|
| 911 |
+
# Use the actual count of compliant deliveries, not the percentage
|
| 912 |
+
on_time_deliveries += stats['time_window_compliant']
|
| 913 |
+
else:
|
| 914 |
+
# Try to estimate based on delivery details
|
| 915 |
+
for delivery in route:
|
| 916 |
+
if 'time_window' in delivery and 'estimated_arrival' in delivery:
|
| 917 |
+
# Format is typically "HH:MM-HH:MM"
|
| 918 |
+
try:
|
| 919 |
+
time_window = delivery['time_window']
|
| 920 |
+
start_time_str, end_time_str = time_window.split('-')
|
| 921 |
+
|
| 922 |
+
# Convert to minutes for comparison
|
| 923 |
+
start_mins = int(start_time_str.split(':')[0]) * 60 + int(start_time_str.split(':')[1])
|
| 924 |
+
end_mins = int(end_time_str.split(':')[0]) * 60 + int(end_time_str.split(':')[1])
|
| 925 |
+
arrival_mins = delivery.get('estimated_arrival', 0)
|
| 926 |
+
|
| 927 |
+
# Only consider deliveries late if they arrive after the end time
|
| 928 |
+
if arrival_mins <= end_mins:
|
| 929 |
+
on_time_deliveries += 1
|
| 930 |
+
except:
|
| 931 |
+
pass
|
| 932 |
+
|
| 933 |
+
total_route_deliveries += len(route)
|
| 934 |
+
|
| 935 |
+
# Ensure we have a valid number for on-time percentage
|
| 936 |
+
delivery_ontime_percent = 0
|
| 937 |
+
if total_route_deliveries > 0:
|
| 938 |
+
delivery_ontime_percent = (on_time_deliveries / total_route_deliveries) * 100
|
| 939 |
+
|
| 940 |
+
# Display metrics in a nicer layout with columns
|
| 941 |
+
st.write("### Overall Performance")
|
| 942 |
+
col1, col2, col3 = st.columns(3)
|
| 943 |
+
with col1:
|
| 944 |
+
st.metric("Deliveries Assigned", f"{total_deliveries}/{len(delivery_data)}")
|
| 945 |
+
st.metric("Vehicles Used", f"{active_vehicles}/{len(vehicle_data)}")
|
| 946 |
+
|
| 947 |
+
with col2:
|
| 948 |
+
st.metric("Total Distance", f"{total_distance:.1f} km")
|
| 949 |
+
st.metric("Total Time", f"{int(total_time_mins//60)}h {int(total_time_mins%60)}m")
|
| 950 |
+
|
| 951 |
+
with col3:
|
| 952 |
+
st.metric("Time Window Compliance", f"{delivery_ontime_percent:.0f}%")
|
| 953 |
+
|
| 954 |
+
# Calculate route efficiency (meters per delivery)
|
| 955 |
+
if total_deliveries > 0:
|
| 956 |
+
efficiency = (total_distance * 1000) / total_deliveries
|
| 957 |
+
st.metric("Avg Distance per Delivery", f"{efficiency:.0f} m")
|
| 958 |
+
|
| 959 |
+
# Add a visualization of time distribution
|
| 960 |
+
st.write("### Time & Distance Distribution by Vehicle")
|
| 961 |
+
time_data = {vehicle_id: stats.get('estimated_time_mins', 0)
|
| 962 |
+
for vehicle_id, stats in optimization_result.get('stats', {}).items()
|
| 963 |
+
if len(routes.get(vehicle_id, [])) > 0}
|
| 964 |
+
|
| 965 |
+
if time_data:
|
| 966 |
+
# Create bar charts for time and distance
|
| 967 |
+
time_df = pd.DataFrame({
|
| 968 |
+
'Vehicle': list(time_data.keys()),
|
| 969 |
+
'Time (mins)': list(time_data.values())
|
| 970 |
+
})
|
| 971 |
+
|
| 972 |
+
distance_data = {vehicle_id: stats.get('total_distance_km', 0)
|
| 973 |
+
for vehicle_id, stats in optimization_result.get('stats', {}).items()
|
| 974 |
+
if len(routes.get(vehicle_id, [])) > 0}
|
| 975 |
+
|
| 976 |
+
distance_df = pd.DataFrame({
|
| 977 |
+
'Vehicle': list(distance_data.keys()),
|
| 978 |
+
'Distance (km)': list(distance_data.values())
|
| 979 |
+
})
|
| 980 |
+
|
| 981 |
+
col1, col2 = st.columns(2)
|
| 982 |
+
with col1:
|
| 983 |
+
st.bar_chart(time_df.set_index('Vehicle'))
|
| 984 |
+
with col2:
|
| 985 |
+
st.bar_chart(distance_df.set_index('Vehicle'))
|
| 986 |
+
|
| 987 |
+
# Display the map with all routes
|
| 988 |
+
st.subheader("Route Map with Road Navigation")
|
| 989 |
+
|
| 990 |
+
# Add info about the route visualization
|
| 991 |
+
st.info("""
|
| 992 |
+
The map shows delivery routes that follow road networks from the depot to each stop in sequence, and back to the depot.
|
| 993 |
+
Numbered circles indicate the stop sequence, and arrows show travel direction.
|
| 994 |
+
""")
|
| 995 |
+
|
| 996 |
+
# Extract all available dates from the delivery data
|
| 997 |
+
if 'delivery_date' in delivery_data.columns:
|
| 998 |
+
# Extract unique dates, ensuring all are converted to datetime objects
|
| 999 |
+
available_dates = sorted(pd.to_datetime(delivery_data['delivery_date'].unique()))
|
| 1000 |
+
|
| 1001 |
+
# Format dates for display
|
| 1002 |
+
date_options = {}
|
| 1003 |
+
for date in available_dates:
|
| 1004 |
+
# Ensure date is a proper datetime object before formatting
|
| 1005 |
+
if isinstance(date, str):
|
| 1006 |
+
date_obj = pd.to_datetime(date)
|
| 1007 |
+
else:
|
| 1008 |
+
date_obj = date
|
| 1009 |
+
# Create the formatted string key
|
| 1010 |
+
date_str = date_obj.strftime('%b %d, %Y')
|
| 1011 |
+
date_options[date_str] = date_obj
|
| 1012 |
+
|
| 1013 |
+
# Default to earliest date
|
| 1014 |
+
default_date = min(available_dates) if available_dates else None
|
| 1015 |
+
default_date_str = default_date.strftime('%b %d, %Y') if default_date else None
|
| 1016 |
+
|
| 1017 |
+
# Create date selection dropdown
|
| 1018 |
+
selected_date_str = st.selectbox(
|
| 1019 |
+
"Select date to show routes for:",
|
| 1020 |
+
options=list(date_options.keys()),
|
| 1021 |
+
index=0 if default_date_str else None,
|
| 1022 |
+
)
|
| 1023 |
+
|
| 1024 |
+
# Convert selected string back to date object
|
| 1025 |
+
selected_date = date_options[selected_date_str] if selected_date_str else None
|
| 1026 |
+
|
| 1027 |
+
# Filter routes to only show deliveries for the selected date
|
| 1028 |
+
if selected_date is not None:
|
| 1029 |
+
filtered_routes = {}
|
| 1030 |
+
|
| 1031 |
+
for vehicle_id, route in routes.items():
|
| 1032 |
+
# Keep only deliveries for the selected date
|
| 1033 |
+
filtered_route = []
|
| 1034 |
+
|
| 1035 |
+
for delivery in route:
|
| 1036 |
+
delivery_id = delivery['id']
|
| 1037 |
+
# Find the delivery in the original data to get its date
|
| 1038 |
+
delivery_row = delivery_data[delivery_data['delivery_id'] == delivery_id]
|
| 1039 |
+
|
| 1040 |
+
if not delivery_row.empty and 'delivery_date' in delivery_row:
|
| 1041 |
+
delivery_date = delivery_row['delivery_date'].iloc[0]
|
| 1042 |
+
|
| 1043 |
+
# Check if this delivery is for the selected date
|
| 1044 |
+
if pd.to_datetime(delivery_date).date() == pd.to_datetime(selected_date).date():
|
| 1045 |
+
filtered_route.append(delivery)
|
| 1046 |
+
|
| 1047 |
+
# Only add the vehicle if it has deliveries on this date
|
| 1048 |
+
if filtered_route:
|
| 1049 |
+
filtered_routes[vehicle_id] = filtered_route
|
| 1050 |
+
|
| 1051 |
+
# Replace the original routes with filtered ones for map display
|
| 1052 |
+
routes_for_map = filtered_routes
|
| 1053 |
+
st.write(f"Showing routes for {len(routes_for_map)} vehicles on {selected_date_str}")
|
| 1054 |
+
else:
|
| 1055 |
+
routes_for_map = routes
|
| 1056 |
+
else:
|
| 1057 |
+
routes_for_map = routes
|
| 1058 |
+
st.warning("No delivery dates available in data. Showing all routes.")
|
| 1059 |
+
|
| 1060 |
+
# Create a map centered on Singapore
|
| 1061 |
+
singapore_coords = [1.3521, 103.8198]
|
| 1062 |
+
m = folium.Map(location=singapore_coords, zoom_start=12)
|
| 1063 |
+
|
| 1064 |
+
# Modify loop to use routes_for_map instead of routes
|
| 1065 |
+
# Count total route segments for progress bar
|
| 1066 |
+
total_segments = sum(len(route) + 1 for route in routes_for_map.values() if route) # +1 for return to depot
|
| 1067 |
+
|
| 1068 |
+
# Create a unique key for this optimization result to use in session state
|
| 1069 |
+
optimization_key = hash(str(optimization_result))
|
| 1070 |
+
|
| 1071 |
+
# Check if we have stored routes for this optimization result
|
| 1072 |
+
if optimization_key not in st.session_state.calculated_road_routes:
|
| 1073 |
+
# Initialize storage for this optimization
|
| 1074 |
+
st.session_state.calculated_road_routes[optimization_key] = {}
|
| 1075 |
+
|
| 1076 |
+
# Count total route segments for progress bar
|
| 1077 |
+
total_segments = sum(len(route) + 1 for route in routes_for_map.values() if route) # +1 for return to depot
|
| 1078 |
+
route_progress = st.progress(0)
|
| 1079 |
+
progress_container = st.empty()
|
| 1080 |
+
progress_container.text("Calculating routes: 0%")
|
| 1081 |
+
|
| 1082 |
+
# Counter for processed segments
|
| 1083 |
+
processed_segments = 0
|
| 1084 |
+
|
| 1085 |
+
for i, (vehicle_id, route) in enumerate(routes_for_map.items()):
|
| 1086 |
+
if not route:
|
| 1087 |
+
continue
|
| 1088 |
+
|
| 1089 |
+
# Get vehicle info
|
| 1090 |
+
vehicle_info = vehicle_data[vehicle_data['vehicle_id'] == vehicle_id].iloc[0]
|
| 1091 |
+
|
| 1092 |
+
# Use color cycling if we have more vehicles than colors
|
| 1093 |
+
color = colors[i % len(colors)]
|
| 1094 |
+
|
| 1095 |
+
# Add depot marker
|
| 1096 |
+
depot_lat, depot_lon = vehicle_info['depot_latitude'], vehicle_info['depot_longitude']
|
| 1097 |
+
|
| 1098 |
+
# Create depot popup content
|
| 1099 |
+
depot_popup = f"""
|
| 1100 |
+
<b>Depot:</b> {vehicle_id}<br>
|
| 1101 |
+
<b>Vehicle Type:</b> {vehicle_info['vehicle_type']}<br>
|
| 1102 |
+
<b>Driver:</b> {vehicle_info.get('driver_name', 'Unknown')}<br>
|
| 1103 |
+
"""
|
| 1104 |
+
|
| 1105 |
+
# Add depot marker with START label
|
| 1106 |
+
folium.Marker(
|
| 1107 |
+
[depot_lat, depot_lon],
|
| 1108 |
+
popup=folium.Popup(depot_popup, max_width=300),
|
| 1109 |
+
tooltip=f"Depot: {vehicle_id} (START/END)",
|
| 1110 |
+
icon=folium.Icon(color=color, icon='home', prefix='fa')
|
| 1111 |
+
).add_to(m)
|
| 1112 |
+
|
| 1113 |
+
# Create route points for complete journey
|
| 1114 |
+
waypoints = [(depot_lat, depot_lon)] # Start at depot
|
| 1115 |
+
|
| 1116 |
+
# Add all delivery locations as waypoints
|
| 1117 |
+
for delivery in route:
|
| 1118 |
+
waypoints.append((delivery['latitude'], delivery['longitude']))
|
| 1119 |
+
|
| 1120 |
+
# Close the loop back to depot
|
| 1121 |
+
waypoints.append((depot_lat, depot_lon))
|
| 1122 |
+
|
| 1123 |
+
# Add delivery point markers with sequenced numbering
|
| 1124 |
+
for j, delivery in enumerate(route):
|
| 1125 |
+
lat, lon = delivery['latitude'], delivery['longitude']
|
| 1126 |
+
|
| 1127 |
+
# Create popup content
|
| 1128 |
+
popup_content = f"""
|
| 1129 |
+
<b>Stop {j+1}:</b> {delivery['id']}<br>
|
| 1130 |
+
<b>Customer:</b> {delivery.get('customer_name', 'Unknown')}<br>
|
| 1131 |
+
"""
|
| 1132 |
+
|
| 1133 |
+
if 'priority' in delivery:
|
| 1134 |
+
popup_content += f"<b>Priority:</b> {delivery['priority']}<br>"
|
| 1135 |
+
|
| 1136 |
+
if 'weight_kg' in delivery:
|
| 1137 |
+
popup_content += f"<b>Weight:</b> {delivery['weight_kg']:.2f} kg<br>"
|
| 1138 |
+
|
| 1139 |
+
if 'time_window' in delivery:
|
| 1140 |
+
popup_content += f"<b>Time Window:</b> {delivery['time_window']}<br>"
|
| 1141 |
+
|
| 1142 |
+
# Add circle markers and other delivery visualizations
|
| 1143 |
+
folium.Circle(
|
| 1144 |
+
location=[lat, lon],
|
| 1145 |
+
radius=50,
|
| 1146 |
+
color=color,
|
| 1147 |
+
fill=True,
|
| 1148 |
+
fill_color=color,
|
| 1149 |
+
fill_opacity=0.7,
|
| 1150 |
+
tooltip=f"Stop {j+1}: {delivery['id']}"
|
| 1151 |
+
).add_to(m)
|
| 1152 |
+
|
| 1153 |
+
# Add text label with stop number
|
| 1154 |
+
folium.map.Marker(
|
| 1155 |
+
[lat, lon],
|
| 1156 |
+
icon=DivIcon(
|
| 1157 |
+
icon_size=(20, 20),
|
| 1158 |
+
icon_anchor=(10, 10),
|
| 1159 |
+
html=f'<div style="font-size: 12pt; color: #444444; font-weight: bold; text-align: center;">{j+1}</div>',
|
| 1160 |
+
)
|
| 1161 |
+
).add_to(m)
|
| 1162 |
+
|
| 1163 |
+
# Add regular marker with popup
|
| 1164 |
+
folium.Marker(
|
| 1165 |
+
[lat + 0.0003, lon], # slight offset to not overlap with the circle
|
| 1166 |
+
popup=folium.Popup(popup_content, max_width=300),
|
| 1167 |
+
tooltip=f"Delivery {delivery['id']}",
|
| 1168 |
+
icon=folium.Icon(color=color, icon='box', prefix='fa')
|
| 1169 |
+
).add_to(m)
|
| 1170 |
+
|
| 1171 |
+
# Create road-based routes between each waypoint with progress tracking
|
| 1172 |
+
for k in range(len(waypoints) - 1):
|
| 1173 |
+
# Get start and end points of this segment
|
| 1174 |
+
start_point = waypoints[k]
|
| 1175 |
+
end_point = waypoints[k+1]
|
| 1176 |
+
|
| 1177 |
+
# Create a key for this route segment
|
| 1178 |
+
route_key = f"{vehicle_id}_{k}"
|
| 1179 |
+
|
| 1180 |
+
# Update progress text
|
| 1181 |
+
segment_desc = "depot" if k == 0 else f"stop {k}"
|
| 1182 |
+
next_desc = f"stop {k+1}" if k < len(waypoints) - 2 else "depot"
|
| 1183 |
+
|
| 1184 |
+
# Check if we have already calculated this route
|
| 1185 |
+
if route_key in st.session_state.calculated_road_routes[optimization_key]:
|
| 1186 |
+
# Use stored route
|
| 1187 |
+
road_route = st.session_state.calculated_road_routes[optimization_key][route_key]
|
| 1188 |
+
progress_text = f"Using stored route for Vehicle {vehicle_id}: {segment_desc} → {next_desc}"
|
| 1189 |
+
else:
|
| 1190 |
+
# Calculate and store new route
|
| 1191 |
+
progress_text = f"Calculating route for Vehicle {vehicle_id}: {segment_desc} → {next_desc}"
|
| 1192 |
+
with st.spinner(progress_text):
|
| 1193 |
+
# Get a road-like route between these points
|
| 1194 |
+
road_route = get_road_route(start_point, end_point)
|
| 1195 |
+
# Store for future use
|
| 1196 |
+
st.session_state.calculated_road_routes[optimization_key][route_key] = road_route
|
| 1197 |
+
|
| 1198 |
+
# Add the route line (non-animated)
|
| 1199 |
+
folium.PolyLine(
|
| 1200 |
+
road_route,
|
| 1201 |
+
color=color,
|
| 1202 |
+
weight=4,
|
| 1203 |
+
opacity=0.8,
|
| 1204 |
+
tooltip=f"Route {vehicle_id}: {segment_desc} → {next_desc}"
|
| 1205 |
+
).add_to(m)
|
| 1206 |
+
|
| 1207 |
+
# Add direction arrow
|
| 1208 |
+
idx = int(len(road_route) * 0.7)
|
| 1209 |
+
if idx < len(road_route) - 1:
|
| 1210 |
+
p1 = road_route[idx]
|
| 1211 |
+
p2 = road_route[idx + 1]
|
| 1212 |
+
|
| 1213 |
+
# Calculate direction angle
|
| 1214 |
+
dy = p2[0] - p1[0]
|
| 1215 |
+
dx = p2[1] - p1[1]
|
| 1216 |
+
angle = (90 - np.degrees(np.arctan2(dy, dx))) % 360
|
| 1217 |
+
|
| 1218 |
+
# Add arrow marker
|
| 1219 |
+
folium.RegularPolygonMarker(
|
| 1220 |
+
location=p1,
|
| 1221 |
+
number_of_sides=3,
|
| 1222 |
+
radius=8,
|
| 1223 |
+
rotation=angle,
|
| 1224 |
+
color=color,
|
| 1225 |
+
fill_color=color,
|
| 1226 |
+
fill_opacity=0.8
|
| 1227 |
+
).add_to(m)
|
| 1228 |
+
|
| 1229 |
+
# Update progress after each segment
|
| 1230 |
+
processed_segments += 1
|
| 1231 |
+
progress_percentage = int((processed_segments / total_segments) * 100)
|
| 1232 |
+
route_progress.progress(processed_segments / total_segments)
|
| 1233 |
+
progress_container.text(f"Calculating routes: {progress_percentage}%")
|
| 1234 |
+
|
| 1235 |
+
# Add a message to show when using cached routes
|
| 1236 |
+
if optimization_key in st.session_state.calculated_road_routes:
|
| 1237 |
+
cached_count = len(st.session_state.calculated_road_routes[optimization_key])
|
| 1238 |
+
if cached_count > 0 and cached_count >= processed_segments:
|
| 1239 |
+
st.info(f"✅ Using {cached_count} previously calculated routes. No recalculation needed.")
|
| 1240 |
+
|
| 1241 |
+
# Clear progress display when done
|
| 1242 |
+
progress_container.empty()
|
| 1243 |
+
route_progress.empty()
|
| 1244 |
+
st.success("All routes calculated successfully!")
|
| 1245 |
+
|
| 1246 |
+
# Display the map
|
| 1247 |
+
folium_static(m, width=800, height=600)
|
| 1248 |
+
|
| 1249 |
+
# -----------------------------------------------------
|
| 1250 |
+
# Unified Schedule Calendar Section
|
| 1251 |
+
# -----------------------------------------------------
|
| 1252 |
+
st.subheader("Schedule Calendar View")
|
| 1253 |
+
st.write("This calendar shows both delivery schedules and vehicle assignments. On-time deliveries are shown in green, late deliveries in red.")
|
| 1254 |
+
|
| 1255 |
+
# Process data for calendar view
|
| 1256 |
+
if routes:
|
| 1257 |
+
# First, collect all assigned deliveries and their details
|
| 1258 |
+
calendar_data = []
|
| 1259 |
+
|
| 1260 |
+
# Track which deliveries were actually included in routes
|
| 1261 |
+
assigned_delivery_ids = set()
|
| 1262 |
+
|
| 1263 |
+
# Step 1: Process all assigned deliveries first
|
| 1264 |
+
for vehicle_id, route in routes.items():
|
| 1265 |
+
for delivery in route:
|
| 1266 |
+
assigned_delivery_ids.add(delivery['id'])
|
| 1267 |
+
# Get vehicle info
|
| 1268 |
+
vehicle_info = vehicle_data[vehicle_data['vehicle_id'] == vehicle_id].iloc[0]
|
| 1269 |
+
vehicle_type = vehicle_info.get('vehicle_type', 'Standard')
|
| 1270 |
+
driver_name = vehicle_info.get('driver_name', 'Unknown')
|
| 1271 |
+
|
| 1272 |
+
# Extract delivery data
|
| 1273 |
+
delivery_id = delivery['id']
|
| 1274 |
+
customer_name = delivery.get('customer_name', 'Unknown')
|
| 1275 |
+
priority = delivery.get('priority', 'Medium')
|
| 1276 |
+
time_window = delivery.get('time_window', '09:00-17:00')
|
| 1277 |
+
weight = delivery.get('weight_kg', 0)
|
| 1278 |
+
|
| 1279 |
+
# Extract start and end times from time_window
|
| 1280 |
+
start_time_str, end_time_str = time_window.split('-')
|
| 1281 |
+
|
| 1282 |
+
# Get delivery date from original data
|
| 1283 |
+
delivery_row = delivery_data[delivery_data['delivery_id'] == delivery_id]
|
| 1284 |
+
delivery_date = delivery_row['delivery_date'].iloc[0] if not delivery_row.empty and 'delivery_date' in delivery_row else datetime.now().date()
|
| 1285 |
+
|
| 1286 |
+
# Create start and end datetime for the delivery
|
| 1287 |
+
try:
|
| 1288 |
+
# Convert to pandas datetime
|
| 1289 |
+
if isinstance(delivery_date, pd.Timestamp):
|
| 1290 |
+
date_str = delivery_date.strftime('%Y-%m-%d')
|
| 1291 |
+
elif isinstance(delivery_date, str):
|
| 1292 |
+
date_str = pd.to_datetime(delivery_date).strftime('%Y-%m-%d')
|
| 1293 |
+
else:
|
| 1294 |
+
date_str = delivery_date.strftime('%Y-%m-%d')
|
| 1295 |
+
|
| 1296 |
+
start_datetime = pd.to_datetime(f"{date_str} {start_time_str}")
|
| 1297 |
+
end_datetime = pd.to_datetime(f"{date_str} {end_time_str}")
|
| 1298 |
+
|
| 1299 |
+
# Check if this is on time (based on the estimated arrival from the route)
|
| 1300 |
+
estimated_arrival_mins = delivery.get('estimated_arrival', 0)
|
| 1301 |
+
|
| 1302 |
+
# Convert time_window to minutes for comparison
|
| 1303 |
+
start_mins = int(start_time_str.split(':')[0]) * 60 + int(start_time_str.split(':')[1])
|
| 1304 |
+
end_mins = int(end_time_str.split(':')[0]) * 60 + int(end_time_str.split(':')[1])
|
| 1305 |
+
|
| 1306 |
+
# Determine if delivery is on time
|
| 1307 |
+
on_time = start_mins <= estimated_arrival_mins <= end_mins
|
| 1308 |
+
|
| 1309 |
+
# Set color based on on-time status and assignment
|
| 1310 |
+
if on_time:
|
| 1311 |
+
# Green for on-time
|
| 1312 |
+
color = 'on_time'
|
| 1313 |
+
else:
|
| 1314 |
+
# Red for not on-time
|
| 1315 |
+
color = 'late'
|
| 1316 |
+
|
| 1317 |
+
calendar_data.append({
|
| 1318 |
+
'delivery_id': delivery_id,
|
| 1319 |
+
'customer_name': customer_name,
|
| 1320 |
+
'vehicle_id': vehicle_id,
|
| 1321 |
+
'driver_name': driver_name,
|
| 1322 |
+
'vehicle_type': vehicle_type,
|
| 1323 |
+
'priority': priority,
|
| 1324 |
+
'time_window': time_window,
|
| 1325 |
+
'estimated_arrival_mins': estimated_arrival_mins,
|
| 1326 |
+
'estimated_arrival_time': f"{estimated_arrival_mins//60:02d}:{estimated_arrival_mins%60:02d}",
|
| 1327 |
+
'weight_kg': weight,
|
| 1328 |
+
'Start': start_datetime,
|
| 1329 |
+
'Finish': end_datetime,
|
| 1330 |
+
'Task': f"{delivery_id}: {customer_name}",
|
| 1331 |
+
'Vehicle Task': f"{vehicle_id}: {driver_name}",
|
| 1332 |
+
'on_time': on_time,
|
| 1333 |
+
'assigned': True,
|
| 1334 |
+
'color': color,
|
| 1335 |
+
'delivery_date': pd.to_datetime(date_str)
|
| 1336 |
+
})
|
| 1337 |
+
except Exception as e:
|
| 1338 |
+
st.warning(f"Could not process time window for delivery {delivery_id}: {str(e)}")
|
| 1339 |
+
|
| 1340 |
+
# Step 2: Now add unassigned deliveries
|
| 1341 |
+
for _, row in delivery_data.iterrows():
|
| 1342 |
+
delivery_id = row['delivery_id']
|
| 1343 |
+
|
| 1344 |
+
# Skip if already assigned
|
| 1345 |
+
if delivery_id in assigned_delivery_ids:
|
| 1346 |
+
continue
|
| 1347 |
+
|
| 1348 |
+
# Extract data for unassigned delivery
|
| 1349 |
+
customer_name = row.get('customer_name', 'Unknown')
|
| 1350 |
+
priority = row.get('priority', 'Medium')
|
| 1351 |
+
time_window = row.get('time_window', '09:00-17:00')
|
| 1352 |
+
weight = row.get('weight_kg', 0)
|
| 1353 |
+
|
| 1354 |
+
# Extract start and end times from time_window
|
| 1355 |
+
start_time_str, end_time_str = time_window.split('-')
|
| 1356 |
+
|
| 1357 |
+
# Get delivery date
|
| 1358 |
+
if 'delivery_date' in row:
|
| 1359 |
+
delivery_date = row['delivery_date']
|
| 1360 |
+
else:
|
| 1361 |
+
delivery_date = datetime.now().date()
|
| 1362 |
+
|
| 1363 |
+
# Create start and end datetime
|
| 1364 |
+
try:
|
| 1365 |
+
# Convert to pandas datetime
|
| 1366 |
+
if isinstance(delivery_date, pd.Timestamp):
|
| 1367 |
+
date_str = delivery_date.strftime('%Y-%m-%d')
|
| 1368 |
+
elif isinstance(delivery_date, str):
|
| 1369 |
+
date_str = pd.to_datetime(delivery_date).strftime('%Y-%m-%d')
|
| 1370 |
+
else:
|
| 1371 |
+
date_str = delivery_date.strftime('%Y-%m-%d')
|
| 1372 |
+
|
| 1373 |
+
start_datetime = pd.to_datetime(f"{date_str} {start_time_str}")
|
| 1374 |
+
end_datetime = pd.to_datetime(f"{date_str} {end_time_str}")
|
| 1375 |
+
|
| 1376 |
+
# For unassigned deliveries set color to 'unassigned'
|
| 1377 |
+
calendar_data.append({
|
| 1378 |
+
'delivery_id': delivery_id,
|
| 1379 |
+
'customer_name': customer_name,
|
| 1380 |
+
'vehicle_id': 'Unassigned',
|
| 1381 |
+
'driver_name': 'N/A',
|
| 1382 |
+
'vehicle_type': 'N/A',
|
| 1383 |
+
'priority': priority,
|
| 1384 |
+
'time_window': time_window,
|
| 1385 |
+
'estimated_arrival_mins': 0,
|
| 1386 |
+
'estimated_arrival_time': 'N/A',
|
| 1387 |
+
'weight_kg': weight,
|
| 1388 |
+
'Start': start_datetime,
|
| 1389 |
+
'Finish': end_datetime,
|
| 1390 |
+
'Task': f"{delivery_id}: {customer_name} (UNASSIGNED)",
|
| 1391 |
+
'Vehicle Task': 'Unassigned',
|
| 1392 |
+
'on_time': False,
|
| 1393 |
+
'assigned': False,
|
| 1394 |
+
'color': 'unassigned', # Color for unassigned
|
| 1395 |
+
'delivery_date': pd.to_datetime(date_str)
|
| 1396 |
+
})
|
| 1397 |
+
except Exception as e:
|
| 1398 |
+
st.warning(f"Could not process time window for unassigned delivery {delivery_id}: {str(e)}")
|
| 1399 |
+
|
| 1400 |
+
if calendar_data:
|
| 1401 |
+
# Convert to DataFrame
|
| 1402 |
+
cal_df = pd.DataFrame(calendar_data)
|
| 1403 |
+
|
| 1404 |
+
# Create color mapping for on-time status
|
| 1405 |
+
cal_df['Color'] = cal_df['on_time'].map({True: 'rgb(0, 200, 0)', False: 'rgb(255, 0, 0)'})
|
| 1406 |
+
|
| 1407 |
+
# Get all available dates
|
| 1408 |
+
all_dates = sorted(cal_df['delivery_date'].dt.date.unique())
|
| 1409 |
+
|
| 1410 |
+
# Format dates for display in the dropdown
|
| 1411 |
+
date_options = {date.strftime('%b %d, %Y'): date for date in all_dates}
|
| 1412 |
+
|
| 1413 |
+
# Initialize calendar display dates if not already set or if dates have changed
|
| 1414 |
+
available_date_keys = list(date_options.keys())
|
| 1415 |
+
|
| 1416 |
+
# Default to all dates
|
| 1417 |
+
if st.session_state.calendar_display_dates is None or not all(date in available_date_keys for date in st.session_state.calendar_display_dates):
|
| 1418 |
+
st.session_state.calendar_display_dates = available_date_keys
|
| 1419 |
+
|
| 1420 |
+
# Add multiselect for date filtering with session state
|
| 1421 |
+
selected_date_strings = st.multiselect(
|
| 1422 |
+
"Select dates to display",
|
| 1423 |
+
options=available_date_keys,
|
| 1424 |
+
default=st.session_state.calendar_display_dates,
|
| 1425 |
+
key="calendar_date_selector"
|
| 1426 |
+
)
|
| 1427 |
+
|
| 1428 |
+
# Update the session state
|
| 1429 |
+
st.session_state.calendar_display_dates = selected_date_strings
|
| 1430 |
+
|
| 1431 |
+
# Convert selected strings back to date objects
|
| 1432 |
+
selected_dates = [date_options[date_str] for date_str in selected_date_strings]
|
| 1433 |
+
|
| 1434 |
+
if not selected_dates:
|
| 1435 |
+
st.info("Please select at least one date to view the delivery schedule.")
|
| 1436 |
+
else:
|
| 1437 |
+
# Filter calendar data to only include selected dates
|
| 1438 |
+
filtered_cal_df = cal_df[cal_df['delivery_date'].dt.date.isin(selected_dates)]
|
| 1439 |
+
|
| 1440 |
+
# Create tabs only for the selected dates
|
| 1441 |
+
date_tabs = st.tabs([date.strftime('%b %d, %Y') for date in selected_dates])
|
| 1442 |
+
|
| 1443 |
+
for i, (date, tab) in enumerate(zip(selected_dates, date_tabs)):
|
| 1444 |
+
with tab:
|
| 1445 |
+
# Filter data for this date
|
| 1446 |
+
day_data = filtered_cal_df[filtered_cal_df['delivery_date'].dt.date == date]
|
| 1447 |
+
|
| 1448 |
+
if len(day_data) > 0:
|
| 1449 |
+
# FIRST SECTION: DELIVERY SCHEDULE VIEW
|
| 1450 |
+
st.write("#### Delivery Schedule")
|
| 1451 |
+
|
| 1452 |
+
# Create figure for delivery view
|
| 1453 |
+
fig = px.timeline(
|
| 1454 |
+
day_data,
|
| 1455 |
+
x_start="Start",
|
| 1456 |
+
x_end="Finish",
|
| 1457 |
+
y="Task",
|
| 1458 |
+
color="color", # Use our color column
|
| 1459 |
+
color_discrete_map={
|
| 1460 |
+
"on_time": "green",
|
| 1461 |
+
"late": "orange",
|
| 1462 |
+
"unassigned": "red" # Unassigned deliveries also red
|
| 1463 |
+
},
|
| 1464 |
+
hover_data=["customer_name", "vehicle_id", "driver_name", "priority", "time_window",
|
| 1465 |
+
"estimated_arrival_time", "weight_kg", "assigned"]
|
| 1466 |
+
)
|
| 1467 |
+
|
| 1468 |
+
# Fix the pattern application code
|
| 1469 |
+
for i, row in day_data.iterrows():
|
| 1470 |
+
# Only add diagonal pattern to assigned deliveries
|
| 1471 |
+
if row['assigned']:
|
| 1472 |
+
for trace in fig.data:
|
| 1473 |
+
# Find which trace corresponds to this row's color group
|
| 1474 |
+
color_value = row['color']
|
| 1475 |
+
|
| 1476 |
+
# Look for matching trace
|
| 1477 |
+
if trace.name == color_value and any(y == row['Task'] for y in trace.y):
|
| 1478 |
+
# Add pattern only to assigned bars
|
| 1479 |
+
if 'marker' not in trace:
|
| 1480 |
+
trace.marker = dict()
|
| 1481 |
+
if 'pattern' not in trace.marker:
|
| 1482 |
+
trace.marker.pattern = dict(
|
| 1483 |
+
shape="\\", # Diagonal lines
|
| 1484 |
+
size=4,
|
| 1485 |
+
solidity=0.5,
|
| 1486 |
+
fgcolor="black"
|
| 1487 |
+
)
|
| 1488 |
+
|
| 1489 |
+
# Add status labels to the bars
|
| 1490 |
+
for idx, row in day_data.iterrows():
|
| 1491 |
+
status_text = "✓ On-time" if row['on_time'] and row['assigned'] else "⚠ Late" if row['assigned'] else "Not assigned"
|
| 1492 |
+
position = (row['Start'] + (row['Finish'] - row['Start'])/2)
|
| 1493 |
+
|
| 1494 |
+
# Only add labels to assigned deliveries
|
| 1495 |
+
if row['assigned']:
|
| 1496 |
+
fig.add_annotation(
|
| 1497 |
+
x=position,
|
| 1498 |
+
y=row['Task'],
|
| 1499 |
+
text=status_text,
|
| 1500 |
+
showarrow=False,
|
| 1501 |
+
font=dict(color="black", size=10),
|
| 1502 |
+
xanchor="center"
|
| 1503 |
+
)
|
| 1504 |
+
|
| 1505 |
+
# Update layout
|
| 1506 |
+
fig.update_layout(
|
| 1507 |
+
title=f"Deliveries by Customer - {date.strftime('%b %d, %Y')}",
|
| 1508 |
+
xaxis_title="Time of Day",
|
| 1509 |
+
yaxis_title="Delivery",
|
| 1510 |
+
height=max(300, 50 * len(day_data)),
|
| 1511 |
+
yaxis={'categoryorder':'category ascending'},
|
| 1512 |
+
showlegend=False # Hide the legend as we have custom annotations
|
| 1513 |
+
)
|
| 1514 |
+
|
| 1515 |
+
# Display figure
|
| 1516 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 1517 |
+
|
| 1518 |
+
# Show summary metrics for delivery view
|
| 1519 |
+
col1, col2, col3, col4 = st.columns(4)
|
| 1520 |
+
with col1:
|
| 1521 |
+
st.metric("Total Deliveries", len(day_data))
|
| 1522 |
+
with col2:
|
| 1523 |
+
st.metric("On-Time Deliveries", len(day_data[day_data['on_time']]))
|
| 1524 |
+
with col3:
|
| 1525 |
+
st.metric("Late Deliveries", len(day_data[~day_data['on_time']]))
|
| 1526 |
+
with col4:
|
| 1527 |
+
if 'weight_kg' in day_data.columns:
|
| 1528 |
+
st.metric("Total Weight", f"{day_data['weight_kg'].sum():.2f} kg")
|
| 1529 |
+
|
| 1530 |
+
# Add breakdown of deliveries by priority
|
| 1531 |
+
if 'priority' in day_data.columns:
|
| 1532 |
+
st.write("##### Deliveries by Priority")
|
| 1533 |
+
priority_counts = day_data['priority'].value_counts()
|
| 1534 |
+
priority_cols = st.columns(min(4, len(priority_counts)))
|
| 1535 |
+
|
| 1536 |
+
for j, (priority, count) in enumerate(priority_counts.items()):
|
| 1537 |
+
col_idx = j % len(priority_cols)
|
| 1538 |
+
with priority_cols[col_idx]:
|
| 1539 |
+
st.metric(priority, count)
|
| 1540 |
+
|
| 1541 |
+
# SECOND SECTION: VEHICLE SCHEDULE VIEW
|
| 1542 |
+
st.write("#### Vehicle Schedule")
|
| 1543 |
+
|
| 1544 |
+
# Create figure grouped by vehicle
|
| 1545 |
+
fig_vehicle = px.timeline(
|
| 1546 |
+
day_data,
|
| 1547 |
+
x_start="Start",
|
| 1548 |
+
x_end="Finish",
|
| 1549 |
+
y="Vehicle Task",
|
| 1550 |
+
color="on_time",
|
| 1551 |
+
color_discrete_map={True: "green", False: "red"},
|
| 1552 |
+
hover_data=["delivery_id", "customer_name", "priority", "time_window",
|
| 1553 |
+
"estimated_arrival_time", "weight_kg"]
|
| 1554 |
+
)
|
| 1555 |
+
|
| 1556 |
+
# Add labels for each delivery to the bars
|
| 1557 |
+
for idx, row in day_data.iterrows():
|
| 1558 |
+
fig_vehicle.add_annotation(
|
| 1559 |
+
x=(row['Start'] + (row['Finish'] - row['Start'])/2),
|
| 1560 |
+
y=row['Vehicle Task'],
|
| 1561 |
+
text=f"#{row['delivery_id']}",
|
| 1562 |
+
showarrow=False,
|
| 1563 |
+
font=dict(size=10, color="black")
|
| 1564 |
+
)
|
| 1565 |
+
|
| 1566 |
+
# Update layout
|
| 1567 |
+
fig_vehicle.update_layout(
|
| 1568 |
+
title=f"Vehicle Assignment Schedule - {date.strftime('%b %d, %Y')}",
|
| 1569 |
+
xaxis_title="Time of Day",
|
| 1570 |
+
yaxis_title="Vehicle",
|
| 1571 |
+
height=max(300, 70 * day_data['Vehicle Task'].nunique()),
|
| 1572 |
+
yaxis={'categoryorder':'category ascending'}
|
| 1573 |
+
)
|
| 1574 |
+
|
| 1575 |
+
# Display figure for vehicle view
|
| 1576 |
+
st.plotly_chart(fig_vehicle, use_container_width=True)
|
| 1577 |
+
|
| 1578 |
+
# Show vehicle utilization summary
|
| 1579 |
+
st.write("##### Vehicle Utilization")
|
| 1580 |
+
|
| 1581 |
+
# Calculate vehicle utilization metrics
|
| 1582 |
+
vehicle_metrics = []
|
| 1583 |
+
for vehicle_id in day_data['vehicle_id'].unique():
|
| 1584 |
+
vehicle_deliveries = day_data[day_data['vehicle_id'] == vehicle_id]
|
| 1585 |
+
|
| 1586 |
+
# Calculate total delivery time for this vehicle
|
| 1587 |
+
total_mins = sum((row['Finish'] - row['Start']).total_seconds() / 60 for _, row in vehicle_deliveries.iterrows())
|
| 1588 |
+
|
| 1589 |
+
# Count on-time deliveries
|
| 1590 |
+
on_time_count = len(vehicle_deliveries[vehicle_deliveries['on_time'] == True])
|
| 1591 |
+
|
| 1592 |
+
# Get the driver name
|
| 1593 |
+
driver_name = vehicle_deliveries['driver_name'].iloc[0] if not vehicle_deliveries.empty else "Unknown"
|
| 1594 |
+
|
| 1595 |
+
vehicle_metrics.append({
|
| 1596 |
+
'vehicle_id': vehicle_id,
|
| 1597 |
+
'driver_name': driver_name,
|
| 1598 |
+
'deliveries': len(vehicle_deliveries),
|
| 1599 |
+
'delivery_time_mins': total_mins,
|
| 1600 |
+
'on_time_deliveries': on_time_count,
|
| 1601 |
+
'on_time_percentage': (on_time_count / len(vehicle_deliveries)) * 100 if len(vehicle_deliveries) > 0 else 0
|
| 1602 |
+
})
|
| 1603 |
+
|
| 1604 |
+
# Display metrics in a nice format
|
| 1605 |
+
metrics_df = pd.DataFrame(vehicle_metrics)
|
| 1606 |
+
|
| 1607 |
+
# Show as a table
|
| 1608 |
+
st.dataframe(metrics_df.style.format({
|
| 1609 |
+
'delivery_time_mins': '{:.0f}',
|
| 1610 |
+
'on_time_percentage': '{:.1f}%'
|
| 1611 |
+
}))
|
| 1612 |
+
|
| 1613 |
+
else:
|
| 1614 |
+
st.info(f"No deliveries scheduled for {date.strftime('%b %d, %Y')}")
|
| 1615 |
+
else:
|
| 1616 |
+
st.info("No calendar data available. Please generate routes first.")
|
| 1617 |
+
|
| 1618 |
+
def create_distance_matrix(locations):
|
| 1619 |
+
"""
|
| 1620 |
+
Create a simple Euclidean distance matrix between locations
|
| 1621 |
+
|
| 1622 |
+
In a real implementation, this would be replaced by actual road distances
|
| 1623 |
+
|
| 1624 |
+
Parameters:
|
| 1625 |
+
locations (list): List of location dictionaries with lat and lon
|
| 1626 |
+
|
| 1627 |
+
Returns:
|
| 1628 |
+
numpy.ndarray: Distance matrix
|
| 1629 |
+
"""
|
| 1630 |
+
n = len(locations)
|
| 1631 |
+
matrix = np.zeros((n, n))
|
| 1632 |
+
for i in range(n):
|
| 1633 |
+
for j in range(n):
|
| 1634 |
+
if i == j:
|
| 1635 |
+
continue
|
| 1636 |
+
|
| 1637 |
+
# Approximate distance in km (very rough)
|
| 1638 |
+
lat1, lon1 = locations[i]['latitude'], locations[i]['longitude']
|
| 1639 |
+
lat2, lon2 = locations[j]['latitude'], locations[j]['longitude']
|
| 1640 |
+
|
| 1641 |
+
# Simple Euclidean distance (for demo purposes)
|
| 1642 |
+
# In reality, we'd use actual road distances
|
| 1643 |
+
dist = ((lat1 - lat2) ** 2 + (lon1 - lon2) ** 2) ** 0.5 * 111
|
| 1644 |
+
matrix[i, j] = dist
|
| 1645 |
+
|
| 1646 |
+
return matrix
|
| 1647 |
+
|
| 1648 |
+
def get_road_route(start_point, end_point):
|
| 1649 |
+
"""
|
| 1650 |
+
Get a route that follows actual roads between two points using OpenStreetMap's routing service.
|
| 1651 |
+
|
| 1652 |
+
Args:
|
| 1653 |
+
start_point: (lat, lon) tuple of start location
|
| 1654 |
+
end_point: (lat, lon) tuple of end location
|
| 1655 |
+
|
| 1656 |
+
Returns:
|
| 1657 |
+
list: List of (lat, lon) points representing the actual road route
|
| 1658 |
+
"""
|
| 1659 |
+
try:
|
| 1660 |
+
# OSRM expects coordinates in lon,lat format
|
| 1661 |
+
start_lat, start_lon = start_point
|
| 1662 |
+
end_lat, end_lon = end_point
|
| 1663 |
+
|
| 1664 |
+
# Build the API URL for OSRM (OpenStreetMap Routing Machine)
|
| 1665 |
+
url = f"http://router.project-osrm.org/route/v1/driving/{start_lon},{start_lat};{end_lon},{end_lat}"
|
| 1666 |
+
params = {
|
| 1667 |
+
"overview": "full",
|
| 1668 |
+
"geometries": "geojson",
|
| 1669 |
+
"steps": "true"
|
| 1670 |
+
}
|
| 1671 |
+
|
| 1672 |
+
# Replace direct text output with spinner
|
| 1673 |
+
with st.spinner(f"Getting route from ({start_lat:.4f}, {start_lon:.4f}) to ({end_lat:.4f}, {end_lon:.4f})..."):
|
| 1674 |
+
response = requests.get(url, params=params, timeout=5)
|
| 1675 |
+
|
| 1676 |
+
if response.status_code == 200:
|
| 1677 |
+
data = response.json()
|
| 1678 |
+
|
| 1679 |
+
# Check if a route was found
|
| 1680 |
+
if data['code'] == 'Ok' and len(data['routes']) > 0:
|
| 1681 |
+
# Extract the geometry (list of coordinates) from the response
|
| 1682 |
+
geometry = data['routes'][0]['geometry']['coordinates']
|
| 1683 |
+
|
| 1684 |
+
# OSRM returns points as [lon, lat], but we need [lat, lon]
|
| 1685 |
+
route_points = [(lon, lat) for lat, lon in geometry]
|
| 1686 |
+
return route_points
|
| 1687 |
+
|
| 1688 |
+
# If we get here, something went wrong with the API call
|
| 1689 |
+
st.warning(f"Could not get road route: {response.status_code} - {response.text if response.status_code != 200 else 'No routes found'}")
|
| 1690 |
+
|
| 1691 |
+
except Exception as e:
|
| 1692 |
+
st.warning(f"Error getting road route: {str(e)}")
|
| 1693 |
+
|
| 1694 |
+
# Fallback to our approximation method if the API call fails
|
| 1695 |
+
with st.spinner("Generating approximate route..."):
|
| 1696 |
+
# Create a more sophisticated approximation with higher density of points
|
| 1697 |
+
start_lat, start_lon = start_point
|
| 1698 |
+
end_lat, end_lon = end_point
|
| 1699 |
+
|
| 1700 |
+
# Calculate the direct distance
|
| 1701 |
+
direct_dist = ((start_lat - end_lat)**2 + (start_lon - end_lon)**2)**0.5
|
| 1702 |
+
|
| 1703 |
+
# Generate more points for longer distances
|
| 1704 |
+
num_points = max(10, int(direct_dist * 10000)) # Scale based on distance
|
| 1705 |
+
|
| 1706 |
+
# Create a path with small random deviations to look like a road
|
| 1707 |
+
route_points = []
|
| 1708 |
+
|
| 1709 |
+
# Starting point
|
| 1710 |
+
route_points.append((start_lat, start_lon))
|
| 1711 |
+
|
| 1712 |
+
# Calculate major waypoints - like going through major roads
|
| 1713 |
+
# Find a midpoint that's slightly off the direct line
|
| 1714 |
+
mid_lat = (start_lat + end_lat) / 2
|
| 1715 |
+
mid_lon = (start_lon + end_lon) / 2
|
| 1716 |
+
|
| 1717 |
+
# Add some perpendicular deviation to simulate taking streets
|
| 1718 |
+
# Get perpendicular direction
|
| 1719 |
+
dx = end_lat - start_lat
|
| 1720 |
+
dy = end_lon - start_lon
|
| 1721 |
+
|
| 1722 |
+
# Perpendicular direction
|
| 1723 |
+
perpendicular_x = -dy
|
| 1724 |
+
perpendicular_y = dx
|
| 1725 |
+
|
| 1726 |
+
# Normalize and scale
|
| 1727 |
+
magnitude = (perpendicular_x**2 + perpendicular_y**2)**0.5
|
| 1728 |
+
if magnitude > 0:
|
| 1729 |
+
perpendicular_x /= magnitude
|
| 1730 |
+
perpendicular_y /= magnitude
|
| 1731 |
+
|
| 1732 |
+
# Scale the perpendicular offset based on distance
|
| 1733 |
+
offset_scale = direct_dist * 0.2 # 20% of direct distance
|
| 1734 |
+
|
| 1735 |
+
# Apply offset to midpoint
|
| 1736 |
+
mid_lat += perpendicular_x * offset_scale * random.choice([-1, 1])
|
| 1737 |
+
mid_lon += perpendicular_y * offset_scale * random.choice([-1, 1])
|
| 1738 |
+
|
| 1739 |
+
# Generate a smooth path from start to midpoint
|
| 1740 |
+
for i in range(1, num_points // 2):
|
| 1741 |
+
t = i / (num_points // 2)
|
| 1742 |
+
# Quadratic Bezier curve parameters
|
| 1743 |
+
u = 1 - t
|
| 1744 |
+
lat = u**2 * start_lat + 2 * u * t * mid_lat + t**2 * mid_lat
|
| 1745 |
+
lon = u**2 * start_lon + 2 * u * t * mid_lon + t**2 * mid_lon
|
| 1746 |
+
|
| 1747 |
+
# Add small random noise to make it look like following streets
|
| 1748 |
+
noise_scale = 0.0002 * direct_dist
|
| 1749 |
+
lat += random.uniform(-noise_scale, noise_scale)
|
| 1750 |
+
lon += random.uniform(-noise_scale, noise_scale)
|
| 1751 |
+
|
| 1752 |
+
route_points.append((lat, lon))
|
| 1753 |
+
|
| 1754 |
+
# Generate a smooth path from midpoint to end
|
| 1755 |
+
for i in range(num_points // 2, num_points):
|
| 1756 |
+
t = (i - num_points // 2) / (num_points // 2)
|
| 1757 |
+
# Quadratic Bezier curve parameters
|
| 1758 |
+
u = 1 - t
|
| 1759 |
+
lat = u**2 * mid_lat + 2 * u * t * mid_lat + t**2 * end_lat
|
| 1760 |
+
lon = u**2 * mid_lon + 2 * u * t * mid_lon + t**2 * end_lon
|
| 1761 |
+
|
| 1762 |
+
# Add small random noise to make it look like following streets
|
| 1763 |
+
noise_scale = 0.0002 * direct_dist
|
| 1764 |
+
lat += random.uniform(-noise_scale, noise_scale)
|
| 1765 |
+
lon += random.uniform(-noise_scale, noise_scale)
|
| 1766 |
+
|
| 1767 |
+
route_points.append((lat, lon))
|
| 1768 |
+
|
| 1769 |
+
# Ending point
|
| 1770 |
+
route_points.append((end_lat, end_lon))
|
| 1771 |
+
|
| 1772 |
+
return route_points
|
| 1773 |
+
|
| 1774 |
+
# Add this condition to make the function importable
|
| 1775 |
+
if __name__ == "__main__":
|
| 1776 |
+
st.set_page_config(
|
| 1777 |
+
page_title="Route Optimizer - Delivery Route Optimization",
|
| 1778 |
+
page_icon="🛣️",
|
| 1779 |
+
layout="wide"
|
| 1780 |
+
)
|
| 1781 |
+
optimize_page()
|
src/utils/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# This file is intentionally left blank.
|
src/utils/generate_all_data.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
from pathlib import Path
|
| 4 |
+
|
| 5 |
+
# Add the project root directory to the Python path
|
| 6 |
+
sys.path.append(str(Path(__file__).resolve().parent.parent.parent))
|
| 7 |
+
|
| 8 |
+
def create_data_directory():
|
| 9 |
+
"""
|
| 10 |
+
Ensure data directories exist for all generated files.
|
| 11 |
+
|
| 12 |
+
This function creates the necessary directory structure to store
|
| 13 |
+
delivery data, vehicle data, and travel time matrices.
|
| 14 |
+
|
| 15 |
+
Returns:
|
| 16 |
+
--------
|
| 17 |
+
tuple of (str, str, str)
|
| 18 |
+
Paths to time matrix directory, vehicle data directory, and delivery data directory
|
| 19 |
+
"""
|
| 20 |
+
vehicle_data_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'data', 'vehicle-data')
|
| 21 |
+
os.makedirs(vehicle_data_dir, exist_ok=True)
|
| 22 |
+
|
| 23 |
+
delivery_data_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'data', 'delivery-data')
|
| 24 |
+
os.makedirs(delivery_data_dir, exist_ok=True)
|
| 25 |
+
|
| 26 |
+
time_matrix_data_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'data', 'time-matrix')
|
| 27 |
+
os.makedirs(time_matrix_data_dir, exist_ok=True)
|
| 28 |
+
return time_matrix_data_dir, vehicle_data_dir, delivery_data_dir
|
| 29 |
+
|
| 30 |
+
def main():
|
| 31 |
+
"""
|
| 32 |
+
Run all data generation scripts for the delivery route optimization project.
|
| 33 |
+
|
| 34 |
+
This function orchestrates the creation of all synthetic datasets needed for
|
| 35 |
+
the route optimization problem, including delivery data, vehicle data, and
|
| 36 |
+
travel time/distance matrices.
|
| 37 |
+
|
| 38 |
+
Generated Files:
|
| 39 |
+
--------------
|
| 40 |
+
1. Delivery Data:
|
| 41 |
+
- Contains information about delivery locations, time windows, packages, etc.
|
| 42 |
+
- Used to define the delivery stops in the routing problem.
|
| 43 |
+
|
| 44 |
+
2. Vehicle Data:
|
| 45 |
+
- Contains information about the delivery fleet, capacity, depots, etc.
|
| 46 |
+
- Used to define the available resources for delivery routes.
|
| 47 |
+
|
| 48 |
+
3. Travel Matrices:
|
| 49 |
+
- Contains distance and time information between all locations.
|
| 50 |
+
- Used by the optimization algorithm to calculate route costs.
|
| 51 |
+
|
| 52 |
+
Usage:
|
| 53 |
+
------
|
| 54 |
+
These generated datasets form the foundation of the delivery route optimization
|
| 55 |
+
application. Together they define:
|
| 56 |
+
- Where deliveries need to be made (delivery data)
|
| 57 |
+
- What resources are available for deliveries (vehicle data)
|
| 58 |
+
- How long it takes to travel between locations (travel matrices)
|
| 59 |
+
|
| 60 |
+
The route optimization algorithm uses these inputs to determine the most
|
| 61 |
+
efficient assignment of deliveries to vehicles and the optimal sequence of
|
| 62 |
+
stops for each vehicle.
|
| 63 |
+
"""
|
| 64 |
+
print("Starting data generation process...")
|
| 65 |
+
|
| 66 |
+
time_matrix_data_dir, vehicle_data_dir, delivery_data_dir = create_data_directory()
|
| 67 |
+
print(f"Time Matrix Data will be saved to: {time_matrix_data_dir}")
|
| 68 |
+
print(f"Delivery Data will be saved to: {delivery_data_dir}")
|
| 69 |
+
print(f"Vehicle Data will be saved to: {vehicle_data_dir}")
|
| 70 |
+
|
| 71 |
+
# Import and run delivery data generation
|
| 72 |
+
print("\n1. Generating delivery data...")
|
| 73 |
+
from src.utils.generate_delivery_data import generate_delivery_data
|
| 74 |
+
delivery_data = generate_delivery_data(50, use_geocoding=True)
|
| 75 |
+
|
| 76 |
+
# Import and run vehicle data generation
|
| 77 |
+
print("\n2. Generating vehicle data...")
|
| 78 |
+
from src.utils.generate_vehicle_data import generate_vehicle_data
|
| 79 |
+
vehicle_data = generate_vehicle_data(10)
|
| 80 |
+
|
| 81 |
+
# Import and run travel matrix generation
|
| 82 |
+
print("\n3. Generating travel matrices...")
|
| 83 |
+
from src.utils.generate_travel_matrix import generate_travel_matrix
|
| 84 |
+
generate_travel_matrix()
|
| 85 |
+
|
| 86 |
+
print("\nAll data generation complete! Files saved to data directory.")
|
| 87 |
+
|
| 88 |
+
if __name__ == "__main__":
|
| 89 |
+
main()
|
src/utils/generate_delivery_data.py
ADDED
|
@@ -0,0 +1,241 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
import numpy as np
|
| 3 |
+
import random
|
| 4 |
+
from datetime import datetime, timedelta
|
| 5 |
+
import os
|
| 6 |
+
import time
|
| 7 |
+
import requests
|
| 8 |
+
from geopy.geocoders import Nominatim
|
| 9 |
+
|
| 10 |
+
# Set random seed for reproducibility
|
| 11 |
+
np.random.seed(42)
|
| 12 |
+
|
| 13 |
+
def generate_delivery_data(n_deliveries=50, use_geocoding=False):
|
| 14 |
+
"""
|
| 15 |
+
Generate synthetic delivery data with realistic Singapore addresses
|
| 16 |
+
"""
|
| 17 |
+
# Define real Singapore neighborhoods and their actual streets
|
| 18 |
+
# Format: [neighborhood_name, [list_of_real_streets], postal_code_prefix]
|
| 19 |
+
sg_neighborhoods = [
|
| 20 |
+
['Ang Mo Kio', ['Ang Mo Kio Avenue 1', 'Ang Mo Kio Avenue 3', 'Ang Mo Kio Avenue 4', 'Ang Mo Kio Avenue 10'], '56'],
|
| 21 |
+
['Bedok', ['Bedok North Avenue 1', 'Bedok North Road', 'Bedok Reservoir Road', 'New Upper Changi Road'], '46'],
|
| 22 |
+
['Bishan', ['Bishan Street 11', 'Bishan Street 12', 'Bishan Street 13', 'Bishan Street 22'], '57'],
|
| 23 |
+
['Bukit Merah', ['Jalan Bukit Merah', 'Henderson Road', 'Tiong Bahru Road', 'Redhill Close'], '15'],
|
| 24 |
+
['Bukit Batok', ['Bukit Batok East Avenue 6', 'Bukit Batok West Avenue 8', 'Bukit Batok Street 21'], '65'],
|
| 25 |
+
['Clementi', ['Clementi Avenue 1', 'Clementi Avenue 4', 'Clementi Road', 'Commonwealth Avenue West'], '12'],
|
| 26 |
+
['Geylang', ['Geylang East Avenue 1', 'Geylang Road', 'Guillemard Road', 'Sims Avenue'], '38'],
|
| 27 |
+
['Hougang', ['Hougang Avenue 1', 'Hougang Avenue 7', 'Hougang Street 91', 'Upper Serangoon Road'], '53'],
|
| 28 |
+
['Jurong East', ['Jurong East Street 13', 'Jurong East Avenue 1', 'Jurong Gateway Road'], '60'],
|
| 29 |
+
['Jurong West', ['Jurong West Street 41', 'Jurong West Street 52', 'Jurong West Street 93'], '64'],
|
| 30 |
+
['Kallang', ['Kallang Avenue', 'Geylang Bahru', 'Boon Keng Road', 'Upper Boon Keng Road'], '33'],
|
| 31 |
+
['Punggol', ['Punggol Central', 'Punggol Field', 'Punggol Road', 'Punggol Way'], '82'],
|
| 32 |
+
['Queenstown', ['Commonwealth Avenue', 'Commonwealth Drive', 'Mei Chin Road', 'Stirling Road'], '14'],
|
| 33 |
+
['Sengkang', ['Sengkang East Way', 'Sengkang West Way', 'Compassvale Road', 'Fernvale Road'], '54'],
|
| 34 |
+
['Serangoon', ['Serangoon Avenue 2', 'Serangoon Avenue 3', 'Serangoon North Avenue 1'], '55'],
|
| 35 |
+
['Tampines', ['Tampines Street 11', 'Tampines Street 21', 'Tampines Avenue 1', 'Tampines Avenue 4'], '52'],
|
| 36 |
+
['Toa Payoh', ['Toa Payoh Lorong 1', 'Toa Payoh Lorong 2', 'Toa Payoh Lorong 4', 'Toa Payoh Central'], '31'],
|
| 37 |
+
['Woodlands', ['Woodlands Avenue 1', 'Woodlands Drive 16', 'Woodlands Drive 72', 'Woodlands Circle'], '73'],
|
| 38 |
+
['Yishun', ['Yishun Avenue 1', 'Yishun Avenue 4', 'Yishun Ring Road', 'Yishun Street 22'], '76']
|
| 39 |
+
]
|
| 40 |
+
|
| 41 |
+
# Bounding boxes for neighborhoods (for fallback coordinates)
|
| 42 |
+
# Format: [name, min_lat, max_lat, min_lon, max_lon]
|
| 43 |
+
neighborhood_bounds = {
|
| 44 |
+
'Ang Mo Kio': [1.360000, 1.380000, 103.830000, 103.860000],
|
| 45 |
+
'Bedok': [1.320000, 1.335000, 103.920000, 103.950000],
|
| 46 |
+
'Bishan': [1.345000, 1.360000, 103.830000, 103.855000],
|
| 47 |
+
'Bukit Merah': [1.270000, 1.290000, 103.800000, 103.830000],
|
| 48 |
+
'Bukit Batok': [1.340000, 1.360000, 103.740000, 103.770000],
|
| 49 |
+
'Clementi': [1.310000, 1.325000, 103.750000, 103.780000],
|
| 50 |
+
'Geylang': [1.310000, 1.325000, 103.880000, 103.900000],
|
| 51 |
+
'Hougang': [1.370000, 1.385000, 103.880000, 103.900000],
|
| 52 |
+
'Jurong East': [1.330000, 1.345000, 103.730000, 103.750000],
|
| 53 |
+
'Jurong West': [1.340000, 1.360000, 103.690000, 103.720000],
|
| 54 |
+
'Kallang': [1.300000, 1.320000, 103.850000, 103.880000],
|
| 55 |
+
'Punggol': [1.390000, 1.410000, 103.900000, 103.920000],
|
| 56 |
+
'Queenstown': [1.290000, 1.310000, 103.780000, 103.805000],
|
| 57 |
+
'Sengkang': [1.380000, 1.395000, 103.870000, 103.900000],
|
| 58 |
+
'Serangoon': [1.345000, 1.360000, 103.865000, 103.885000],
|
| 59 |
+
'Tampines': [1.345000, 1.365000, 103.930000, 103.960000],
|
| 60 |
+
'Toa Payoh': [1.326000, 1.341000, 103.840000, 103.865000],
|
| 61 |
+
'Woodlands': [1.430000, 1.450000, 103.770000, 103.800000],
|
| 62 |
+
'Yishun': [1.410000, 1.430000, 103.820000, 103.850000]
|
| 63 |
+
}
|
| 64 |
+
|
| 65 |
+
# Generate delivery IDs
|
| 66 |
+
delivery_ids = [f'DEL{str(i).zfill(4)}' for i in range(1, n_deliveries + 1)]
|
| 67 |
+
|
| 68 |
+
# Generate customer names (fictional)
|
| 69 |
+
first_names = ['Tan', 'Lim', 'Lee', 'Ng', 'Wong', 'Chan', 'Goh', 'Ong', 'Teo', 'Koh',
|
| 70 |
+
'Chua', 'Loh', 'Yeo', 'Sim', 'Ho', 'Ang', 'Tay', 'Yap', 'Leong', 'Foo']
|
| 71 |
+
last_names = ['Wei', 'Ming', 'Hui', 'Ling', 'Yong', 'Jun', 'Hong', 'Xin', 'Yi', 'Jie',
|
| 72 |
+
'Cheng', 'Kai', 'Zhi', 'Tian', 'Yu', 'En', 'Yang', 'Hao', 'Chong', 'Zheng']
|
| 73 |
+
customer_names = [f"{random.choice(first_names)} {random.choice(last_names)}" for _ in range(n_deliveries)]
|
| 74 |
+
|
| 75 |
+
addresses = []
|
| 76 |
+
postal_codes = []
|
| 77 |
+
latitudes = []
|
| 78 |
+
longitudes = []
|
| 79 |
+
neighborhood_names = []
|
| 80 |
+
|
| 81 |
+
# Initialize geocoder if using geocoding
|
| 82 |
+
if use_geocoding:
|
| 83 |
+
geolocator = Nominatim(user_agent="delivery_app")
|
| 84 |
+
|
| 85 |
+
# Generate realistic addresses
|
| 86 |
+
for i in range(n_deliveries):
|
| 87 |
+
# Randomly select a neighborhood
|
| 88 |
+
neighborhood_data = random.choice(sg_neighborhoods)
|
| 89 |
+
neighborhood = neighborhood_data[0]
|
| 90 |
+
streets = neighborhood_data[1]
|
| 91 |
+
postal_prefix = neighborhood_data[2]
|
| 92 |
+
|
| 93 |
+
# Randomly select a street in that neighborhood
|
| 94 |
+
street = random.choice(streets)
|
| 95 |
+
|
| 96 |
+
# Generate block number (realistic for HDB)
|
| 97 |
+
block = random.randint(100, 600)
|
| 98 |
+
|
| 99 |
+
# Generate unit number
|
| 100 |
+
unit_floor = random.randint(2, 20)
|
| 101 |
+
unit_number = random.randint(1, 150)
|
| 102 |
+
|
| 103 |
+
# Generate postal code (with realistic prefix)
|
| 104 |
+
postal_suffix = str(random.randint(0, 999)).zfill(3)
|
| 105 |
+
postal_code = postal_prefix + postal_suffix
|
| 106 |
+
|
| 107 |
+
# Create two formats of address - one for display, one for geocoding
|
| 108 |
+
display_address = f"Block {block}, #{unit_floor:02d}-{unit_number:03d}, {street}, Singapore {postal_code}"
|
| 109 |
+
geocode_address = f"{block} {street}, Singapore {postal_code}" # Simpler format for geocoding
|
| 110 |
+
|
| 111 |
+
# Default coordinates from neighborhood bounding box (fallback)
|
| 112 |
+
bounds = neighborhood_bounds[neighborhood]
|
| 113 |
+
default_lat = round(random.uniform(bounds[0], bounds[1]), 6)
|
| 114 |
+
default_lon = round(random.uniform(bounds[2], bounds[3]), 6)
|
| 115 |
+
|
| 116 |
+
# Use geocoding API if requested
|
| 117 |
+
if use_geocoding:
|
| 118 |
+
try:
|
| 119 |
+
location = geolocator.geocode(geocode_address)
|
| 120 |
+
|
| 121 |
+
if location:
|
| 122 |
+
lat = location.latitude
|
| 123 |
+
lon = location.longitude
|
| 124 |
+
print(f"✓ Successfully geocoded: {geocode_address} → ({lat}, {lon})")
|
| 125 |
+
else:
|
| 126 |
+
# First fallback: try with just street and postal code
|
| 127 |
+
simpler_address = f"{street}, Singapore {postal_code}"
|
| 128 |
+
location = geolocator.geocode(simpler_address)
|
| 129 |
+
|
| 130 |
+
if location:
|
| 131 |
+
lat = location.latitude
|
| 132 |
+
lon = location.longitude
|
| 133 |
+
print(f"✓ Fallback geocoded: {simpler_address} → ({lat}, {lon})")
|
| 134 |
+
else:
|
| 135 |
+
# Second fallback: just use the neighborhood center
|
| 136 |
+
lat = default_lat
|
| 137 |
+
lon = default_lon
|
| 138 |
+
print(f"✗ Could not geocode: {geocode_address}, using neighborhood coordinates")
|
| 139 |
+
|
| 140 |
+
# Add delay to avoid being rate limited
|
| 141 |
+
time.sleep(1)
|
| 142 |
+
|
| 143 |
+
except Exception as e:
|
| 144 |
+
print(f"✗ Geocoding error for {geocode_address}: {str(e)}")
|
| 145 |
+
lat = default_lat
|
| 146 |
+
lon = default_lon
|
| 147 |
+
else:
|
| 148 |
+
# Without geocoding, use the default coordinates
|
| 149 |
+
lat = default_lat
|
| 150 |
+
lon = default_lon
|
| 151 |
+
|
| 152 |
+
addresses.append(display_address)
|
| 153 |
+
postal_codes.append(postal_code)
|
| 154 |
+
latitudes.append(lat)
|
| 155 |
+
longitudes.append(lon)
|
| 156 |
+
neighborhood_names.append(neighborhood)
|
| 157 |
+
|
| 158 |
+
# Generate delivery dates (within the next 7 days)
|
| 159 |
+
base_date = datetime.now().date()
|
| 160 |
+
delivery_dates = [base_date + timedelta(days=random.randint(1, 7)) for _ in range(n_deliveries)]
|
| 161 |
+
|
| 162 |
+
# Generate time windows (between 9 AM and 5 PM)
|
| 163 |
+
time_windows = []
|
| 164 |
+
for _ in range(n_deliveries):
|
| 165 |
+
start_hour = random.randint(9, 16)
|
| 166 |
+
window_length = random.choice([1, 2, 3]) # 1, 2, or 3 hour windows
|
| 167 |
+
end_hour = min(start_hour + window_length, 18)
|
| 168 |
+
|
| 169 |
+
start_time = f"{start_hour:02d}:00"
|
| 170 |
+
end_time = f"{end_hour:02d}:00"
|
| 171 |
+
time_windows.append(f"{start_time}-{end_time}")
|
| 172 |
+
|
| 173 |
+
# Generate package details
|
| 174 |
+
weights = np.random.uniform(0.5, 20.0, n_deliveries) # in kg
|
| 175 |
+
volumes = np.random.uniform(0.01, 0.5, n_deliveries) # in m³
|
| 176 |
+
|
| 177 |
+
# Priority levels
|
| 178 |
+
priorities = np.random.choice(['High', 'Medium', 'Low'], n_deliveries,
|
| 179 |
+
p=[0.2, 0.5, 0.3]) # 20% High, 50% Medium, 30% Low
|
| 180 |
+
|
| 181 |
+
# Required vehicle type
|
| 182 |
+
vehicle_types = np.random.choice(['Standard', 'Large', 'Refrigerated'], n_deliveries,
|
| 183 |
+
p=[0.7, 0.2, 0.1])
|
| 184 |
+
|
| 185 |
+
# Status
|
| 186 |
+
statuses = np.random.choice(['Pending', 'Assigned', 'In Transit', 'Delivered'], n_deliveries,
|
| 187 |
+
p=[0.6, 0.2, 0.15, 0.05])
|
| 188 |
+
|
| 189 |
+
# Additional notes
|
| 190 |
+
notes = []
|
| 191 |
+
special_instructions = [
|
| 192 |
+
'Call customer before delivery',
|
| 193 |
+
'Fragile items',
|
| 194 |
+
'Leave at door',
|
| 195 |
+
'Signature required',
|
| 196 |
+
'No delivery on weekends',
|
| 197 |
+
None
|
| 198 |
+
]
|
| 199 |
+
|
| 200 |
+
for _ in range(n_deliveries):
|
| 201 |
+
if random.random() < 0.7: # 70% chance of having a note
|
| 202 |
+
notes.append(random.choice(special_instructions))
|
| 203 |
+
else:
|
| 204 |
+
notes.append(None)
|
| 205 |
+
|
| 206 |
+
# Create DataFrame
|
| 207 |
+
df = pd.DataFrame({
|
| 208 |
+
'delivery_id': delivery_ids,
|
| 209 |
+
'customer_name': customer_names,
|
| 210 |
+
'address': addresses,
|
| 211 |
+
'postal_code': postal_codes,
|
| 212 |
+
'neighborhood': neighborhood_names,
|
| 213 |
+
'latitude': latitudes,
|
| 214 |
+
'longitude': longitudes,
|
| 215 |
+
'delivery_date': delivery_dates,
|
| 216 |
+
'time_window': time_windows,
|
| 217 |
+
'weight_kg': weights.round(2),
|
| 218 |
+
'volume_m3': volumes.round(3),
|
| 219 |
+
'priority': priorities,
|
| 220 |
+
'vehicle_type': vehicle_types,
|
| 221 |
+
'status': statuses,
|
| 222 |
+
'special_instructions': notes
|
| 223 |
+
})
|
| 224 |
+
|
| 225 |
+
# Ensure the directory exists
|
| 226 |
+
data_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'data', 'delivery-data')
|
| 227 |
+
os.makedirs(data_dir, exist_ok=True)
|
| 228 |
+
|
| 229 |
+
# Save to CSV
|
| 230 |
+
output_path = os.path.join(data_dir, 'delivery_data.csv')
|
| 231 |
+
df.to_csv(output_path, index=False)
|
| 232 |
+
print(f"Delivery data generated and saved to {output_path}")
|
| 233 |
+
return df
|
| 234 |
+
|
| 235 |
+
if __name__ == "__main__":
|
| 236 |
+
# Set to True if you want to use real geocoding (slower but more accurate)
|
| 237 |
+
USE_GEOCODING = True
|
| 238 |
+
delivery_data = generate_delivery_data(50, use_geocoding=USE_GEOCODING)
|
| 239 |
+
print("Sample of delivery data:")
|
| 240 |
+
print(delivery_data.head())
|
| 241 |
+
|
src/utils/generate_travel_matrix.py
ADDED
|
@@ -0,0 +1,327 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
import numpy as np
|
| 3 |
+
import os
|
| 4 |
+
import time
|
| 5 |
+
import requests
|
| 6 |
+
from math import radians, sin, cos, sqrt, atan2
|
| 7 |
+
import random
|
| 8 |
+
|
| 9 |
+
def haversine_distance(lat1, lon1, lat2, lon2):
|
| 10 |
+
"""
|
| 11 |
+
Calculate the Haversine distance between two points in kilometers.
|
| 12 |
+
The Haversine distance is the great-circle distance between two points on a sphere.
|
| 13 |
+
|
| 14 |
+
Parameters:
|
| 15 |
+
-----------
|
| 16 |
+
lat1, lon1 : float
|
| 17 |
+
Coordinates of the first point in decimal degrees
|
| 18 |
+
lat2, lon2 : float
|
| 19 |
+
Coordinates of the second point in decimal degrees
|
| 20 |
+
|
| 21 |
+
Returns:
|
| 22 |
+
--------
|
| 23 |
+
float
|
| 24 |
+
Distance between the two points in kilometers
|
| 25 |
+
"""
|
| 26 |
+
# Convert decimal degrees to radians
|
| 27 |
+
lat1, lon1, lat2, lon2 = map(radians, [lat1, lon1, lat2, lon2])
|
| 28 |
+
|
| 29 |
+
# Haversine formula
|
| 30 |
+
dlon = lon2 - lon1
|
| 31 |
+
dlat = lat2 - lat1
|
| 32 |
+
a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
|
| 33 |
+
c = 2 * atan2(sqrt(a), sqrt(1-a))
|
| 34 |
+
distance = 6371 * c # Radius of Earth in kilometers
|
| 35 |
+
|
| 36 |
+
return distance
|
| 37 |
+
|
| 38 |
+
def get_road_distance_with_retry(origin, destination, max_retries=3, initial_backoff=1):
|
| 39 |
+
"""
|
| 40 |
+
Get road distance between two points with retry logic
|
| 41 |
+
|
| 42 |
+
Parameters:
|
| 43 |
+
-----------
|
| 44 |
+
origin : dict
|
| 45 |
+
Origin location with 'latitude' and 'longitude' keys
|
| 46 |
+
destination : dict
|
| 47 |
+
Destination location with 'latitude' and 'longitude' keys
|
| 48 |
+
max_retries : int
|
| 49 |
+
Maximum number of retry attempts
|
| 50 |
+
initial_backoff : int
|
| 51 |
+
Initial backoff time in seconds
|
| 52 |
+
|
| 53 |
+
Returns:
|
| 54 |
+
--------
|
| 55 |
+
tuple of (float, float)
|
| 56 |
+
Distance in km and duration in minutes
|
| 57 |
+
"""
|
| 58 |
+
# URLs for different public OSRM instances to distribute load
|
| 59 |
+
osrm_urls = [
|
| 60 |
+
"http://router.project-osrm.org",
|
| 61 |
+
"https://routing.openstreetmap.de",
|
| 62 |
+
# Add more public OSRM servers if available
|
| 63 |
+
]
|
| 64 |
+
|
| 65 |
+
retry_count = 0
|
| 66 |
+
backoff = initial_backoff
|
| 67 |
+
|
| 68 |
+
while retry_count < max_retries:
|
| 69 |
+
try:
|
| 70 |
+
# Use a random OSRM server from the list to distribute load
|
| 71 |
+
base_url = random.choice(osrm_urls)
|
| 72 |
+
url = f"{base_url}/route/v1/driving/{origin['longitude']},{origin['latitude']};{destination['longitude']},{destination['latitude']}?overview=false"
|
| 73 |
+
|
| 74 |
+
# Add a timeout to prevent hanging connections
|
| 75 |
+
response = requests.get(url, timeout=5)
|
| 76 |
+
data = response.json()
|
| 77 |
+
|
| 78 |
+
if data.get('code') == 'Ok':
|
| 79 |
+
# Extract distance and duration
|
| 80 |
+
distance = data['routes'][0]['distance'] / 1000 # meters to km
|
| 81 |
+
duration = data['routes'][0]['duration'] / 60 # seconds to minutes
|
| 82 |
+
return round(distance, 2), round(duration, 2)
|
| 83 |
+
else:
|
| 84 |
+
print(f"API returned error: {data.get('message', 'Unknown error')}")
|
| 85 |
+
|
| 86 |
+
except requests.exceptions.RequestException as e:
|
| 87 |
+
print(f"Request failed: {e}. Retry {retry_count+1}/{max_retries}")
|
| 88 |
+
|
| 89 |
+
# Exponential backoff with jitter to prevent thundering herd
|
| 90 |
+
jitter = random.uniform(0, 0.5 * backoff)
|
| 91 |
+
sleep_time = backoff + jitter
|
| 92 |
+
time.sleep(sleep_time)
|
| 93 |
+
backoff *= 2 # Exponential backoff
|
| 94 |
+
retry_count += 1
|
| 95 |
+
|
| 96 |
+
# Fallback to haversine after all retries failed
|
| 97 |
+
print(f"All retries failed for route from ({origin['latitude']},{origin['longitude']}) to ({destination['latitude']},{destination['longitude']}). Using haversine distance.")
|
| 98 |
+
distance = haversine_distance(
|
| 99 |
+
origin['latitude'], origin['longitude'],
|
| 100 |
+
destination['latitude'], destination['longitude']
|
| 101 |
+
)
|
| 102 |
+
distance = distance * 1.3 # Road factor
|
| 103 |
+
time_mins = (distance / 40) * 60 # 40 km/h
|
| 104 |
+
|
| 105 |
+
return round(distance, 2), round(time_mins, 2)
|
| 106 |
+
|
| 107 |
+
def get_road_distance(origins, destinations, use_osrm=True):
|
| 108 |
+
"""
|
| 109 |
+
Calculate actual road distances and travel times between multiple origins and destinations
|
| 110 |
+
using the OSRM (Open Source Routing Machine) API.
|
| 111 |
+
|
| 112 |
+
Parameters:
|
| 113 |
+
-----------
|
| 114 |
+
origins : list of dict
|
| 115 |
+
List of origin locations with 'latitude' and 'longitude' keys
|
| 116 |
+
destinations : list of dict
|
| 117 |
+
List of destination locations with 'latitude' and 'longitude' keys
|
| 118 |
+
use_osrm : bool, default=True
|
| 119 |
+
Whether to use OSRM API or fall back to haversine distance
|
| 120 |
+
|
| 121 |
+
Returns:
|
| 122 |
+
--------
|
| 123 |
+
tuple of (numpy.ndarray, numpy.ndarray)
|
| 124 |
+
Arrays containing distances (in km) and durations (in minutes) between each origin-destination pair
|
| 125 |
+
"""
|
| 126 |
+
n_origins = len(origins)
|
| 127 |
+
n_destinations = len(destinations)
|
| 128 |
+
distance_matrix = np.zeros((n_origins, n_destinations))
|
| 129 |
+
duration_matrix = np.zeros((n_origins, n_destinations))
|
| 130 |
+
|
| 131 |
+
# If OSRM is not requested, fall back to haversine distance
|
| 132 |
+
if not use_osrm:
|
| 133 |
+
print("Using haversine distance as fallback.")
|
| 134 |
+
for i, origin in enumerate(origins):
|
| 135 |
+
for j, dest in enumerate(destinations):
|
| 136 |
+
distance = haversine_distance(
|
| 137 |
+
origin['latitude'], origin['longitude'],
|
| 138 |
+
dest['latitude'], dest['longitude']
|
| 139 |
+
)
|
| 140 |
+
# Adjust for road networks (roads are typically not straight lines)
|
| 141 |
+
distance = distance * 1.3 # Apply a factor to approximate road distance
|
| 142 |
+
time_mins = (distance / 40) * 60 # Assuming average speed of 40 km/h
|
| 143 |
+
|
| 144 |
+
distance_matrix[i, j] = round(distance, 2)
|
| 145 |
+
duration_matrix[i, j] = round(time_mins, 2)
|
| 146 |
+
return distance_matrix, duration_matrix
|
| 147 |
+
|
| 148 |
+
# Process in batches to prevent overwhelming the API
|
| 149 |
+
print(f"Processing {n_origins} origins and {n_destinations} destinations in batches...")
|
| 150 |
+
total_requests = n_origins * n_destinations
|
| 151 |
+
completed = 0
|
| 152 |
+
|
| 153 |
+
try:
|
| 154 |
+
# Try OSRM's table service for small datasets first (more efficient)
|
| 155 |
+
if n_origins + n_destinations <= 50:
|
| 156 |
+
print("Trying OSRM table API for efficient matrix calculation...")
|
| 157 |
+
try:
|
| 158 |
+
# Code for table API would go here, but we'll skip for now as it's more complex
|
| 159 |
+
# and the batch approach is more reliable for handling errors
|
| 160 |
+
raise NotImplementedError("Table API not implemented, falling back to individual routes")
|
| 161 |
+
except Exception as e:
|
| 162 |
+
print(f"Table API failed: {e}. Using individual routes instead.")
|
| 163 |
+
# Continue with individual route requests below
|
| 164 |
+
|
| 165 |
+
# Process with individual route requests
|
| 166 |
+
for i, origin in enumerate(origins):
|
| 167 |
+
for j, dest in enumerate(destinations):
|
| 168 |
+
# Skip if origin and destination are the same point
|
| 169 |
+
if i == j:
|
| 170 |
+
distance_matrix[i, j] = 0
|
| 171 |
+
duration_matrix[i, j] = 0
|
| 172 |
+
completed += 1
|
| 173 |
+
continue
|
| 174 |
+
|
| 175 |
+
# Get distance with retry logic
|
| 176 |
+
distance, duration = get_road_distance_with_retry(origin, dest)
|
| 177 |
+
distance_matrix[i, j] = distance
|
| 178 |
+
duration_matrix[i, j] = duration
|
| 179 |
+
|
| 180 |
+
# Show progress
|
| 181 |
+
completed += 1
|
| 182 |
+
if completed % 10 == 0:
|
| 183 |
+
print(f"Progress: {completed}/{total_requests} routes calculated ({(completed/total_requests)*100:.1f}%)")
|
| 184 |
+
|
| 185 |
+
# Add randomized delay to prevent overwhelming the API
|
| 186 |
+
time.sleep(random.uniform(0.1, 0.5))
|
| 187 |
+
|
| 188 |
+
except KeyboardInterrupt:
|
| 189 |
+
print("\nOperation interrupted by user. Saving partial results...")
|
| 190 |
+
|
| 191 |
+
return distance_matrix, duration_matrix
|
| 192 |
+
|
| 193 |
+
def generate_travel_matrix(use_osrm=True):
|
| 194 |
+
"""
|
| 195 |
+
Generate travel time and distance matrices between all locations in the delivery problem.
|
| 196 |
+
|
| 197 |
+
Parameters:
|
| 198 |
+
-----------
|
| 199 |
+
use_osrm : bool, default=True
|
| 200 |
+
Whether to use OSRM API for real road distances instead of haversine
|
| 201 |
+
|
| 202 |
+
Returns:
|
| 203 |
+
--------
|
| 204 |
+
tuple of (pd.DataFrame, pd.DataFrame, dict)
|
| 205 |
+
Distance matrix, base time matrix, and hourly time matrices
|
| 206 |
+
"""
|
| 207 |
+
# Create data directories if they don't exist
|
| 208 |
+
data_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'data')
|
| 209 |
+
time_matrix_dir = os.path.join(data_dir, 'time-matrix')
|
| 210 |
+
delivery_data_dir = os.path.join(data_dir, 'delivery-data')
|
| 211 |
+
vehicle_data_dir = os.path.join(data_dir, 'vehicle-data')
|
| 212 |
+
|
| 213 |
+
# Ensure all directories exist
|
| 214 |
+
for directory in [time_matrix_dir, delivery_data_dir, vehicle_data_dir]:
|
| 215 |
+
os.makedirs(directory, exist_ok=True)
|
| 216 |
+
|
| 217 |
+
# Read delivery and vehicle data
|
| 218 |
+
try:
|
| 219 |
+
delivery_data = pd.read_csv(os.path.join(delivery_data_dir, 'delivery_data.csv'))
|
| 220 |
+
vehicle_data = pd.read_csv(os.path.join(vehicle_data_dir, 'vehicle_data.csv'))
|
| 221 |
+
except FileNotFoundError:
|
| 222 |
+
print("Error: Please generate delivery and vehicle data first!")
|
| 223 |
+
return
|
| 224 |
+
|
| 225 |
+
# Extract locations
|
| 226 |
+
delivery_locations = delivery_data[['delivery_id', 'latitude', 'longitude']].values
|
| 227 |
+
depot_locations = vehicle_data[['vehicle_id', 'depot_latitude', 'depot_longitude']].values
|
| 228 |
+
|
| 229 |
+
# Average speed for time calculation (km/h)
|
| 230 |
+
avg_speed = vehicle_data['avg_speed_kmh'].mean()
|
| 231 |
+
|
| 232 |
+
# Traffic factor matrix (to simulate traffic conditions at different times)
|
| 233 |
+
hours_in_day = 24
|
| 234 |
+
traffic_factors = np.ones((hours_in_day, 1))
|
| 235 |
+
|
| 236 |
+
# Simulate morning rush hour (8-10 AM)
|
| 237 |
+
traffic_factors[8:10] = 1.5
|
| 238 |
+
|
| 239 |
+
# Simulate evening rush hour (5-7 PM)
|
| 240 |
+
traffic_factors[17:19] = 1.8
|
| 241 |
+
|
| 242 |
+
# Late night (less traffic)
|
| 243 |
+
traffic_factors[22:] = 0.8
|
| 244 |
+
traffic_factors[:5] = 0.7
|
| 245 |
+
|
| 246 |
+
# Create a combined list of all locations (depots + delivery points)
|
| 247 |
+
all_locations = []
|
| 248 |
+
|
| 249 |
+
# Add depot locations
|
| 250 |
+
for row in depot_locations:
|
| 251 |
+
all_locations.append({
|
| 252 |
+
'id': row[0], # vehicle_id as location id
|
| 253 |
+
'type': 'depot',
|
| 254 |
+
'latitude': row[1],
|
| 255 |
+
'longitude': row[2]
|
| 256 |
+
})
|
| 257 |
+
|
| 258 |
+
# Add delivery locations
|
| 259 |
+
for row in delivery_locations:
|
| 260 |
+
all_locations.append({
|
| 261 |
+
'id': row[0], # delivery_id as location id
|
| 262 |
+
'type': 'delivery',
|
| 263 |
+
'latitude': row[1],
|
| 264 |
+
'longitude': row[2]
|
| 265 |
+
})
|
| 266 |
+
|
| 267 |
+
print(f"Calculating distances between {len(all_locations)} locations...")
|
| 268 |
+
|
| 269 |
+
# Save the locations file early so we have this data even if the process is interrupted
|
| 270 |
+
location_df = pd.DataFrame(all_locations)
|
| 271 |
+
location_df.to_csv(os.path.join(time_matrix_dir, 'locations.csv'), index=False)
|
| 272 |
+
|
| 273 |
+
# Calculate distances and times using OSRM with improved error handling
|
| 274 |
+
if use_osrm:
|
| 275 |
+
print("Using OSRM API for road distances...")
|
| 276 |
+
distance_matrix, base_time_matrix = get_road_distance(all_locations, all_locations, use_osrm=True)
|
| 277 |
+
else:
|
| 278 |
+
print("Using haversine distance with road factor adjustment...")
|
| 279 |
+
distance_matrix, base_time_matrix = get_road_distance(all_locations, all_locations, use_osrm=False)
|
| 280 |
+
|
| 281 |
+
# Create DataFrames for the matrices
|
| 282 |
+
location_ids = [loc['id'] for loc in all_locations]
|
| 283 |
+
|
| 284 |
+
distance_df = pd.DataFrame(distance_matrix, index=location_ids, columns=location_ids)
|
| 285 |
+
time_df = pd.DataFrame(base_time_matrix, index=location_ids, columns=location_ids)
|
| 286 |
+
|
| 287 |
+
# Save distance and base time matrices early in case later steps fail
|
| 288 |
+
distance_df.to_csv(os.path.join(time_matrix_dir, 'distance_matrix.csv'))
|
| 289 |
+
time_df.to_csv(os.path.join(time_matrix_dir, 'base_time_matrix.csv'))
|
| 290 |
+
print("Basic distance and time matrices saved successfully.")
|
| 291 |
+
|
| 292 |
+
# Create time matrices for different hours of the day
|
| 293 |
+
hourly_time_matrices = {}
|
| 294 |
+
for hour in range(24):
|
| 295 |
+
traffic_factor = traffic_factors[hour][0]
|
| 296 |
+
hourly_time = base_time_matrix * traffic_factor
|
| 297 |
+
hourly_time_matrices[f"{hour:02d}:00"] = pd.DataFrame(hourly_time, index=location_ids, columns=location_ids)
|
| 298 |
+
|
| 299 |
+
# Save a sample of time matrices (e.g., rush hour and normal time)
|
| 300 |
+
try:
|
| 301 |
+
hourly_time_matrices['08:00'].to_csv(os.path.join(time_matrix_dir, 'morning_rush_time_matrix.csv'))
|
| 302 |
+
hourly_time_matrices['18:00'].to_csv(os.path.join(time_matrix_dir, 'evening_rush_time_matrix.csv'))
|
| 303 |
+
hourly_time_matrices['12:00'].to_csv(os.path.join(time_matrix_dir, 'midday_time_matrix.csv'))
|
| 304 |
+
hourly_time_matrices['00:00'].to_csv(os.path.join(time_matrix_dir, 'night_time_matrix.csv'))
|
| 305 |
+
print("Time matrices for different hours saved successfully.")
|
| 306 |
+
except Exception as e:
|
| 307 |
+
print(f"Error saving hourly time matrices: {e}")
|
| 308 |
+
print("Continuing with basic matrices only.")
|
| 309 |
+
|
| 310 |
+
print("Travel matrices generation complete.")
|
| 311 |
+
return distance_df, time_df, hourly_time_matrices
|
| 312 |
+
|
| 313 |
+
if __name__ == "__main__":
|
| 314 |
+
# For development, allow falling back to haversine if needed
|
| 315 |
+
import argparse
|
| 316 |
+
|
| 317 |
+
parser = argparse.ArgumentParser(description="Generate travel matrices for delivery route optimization")
|
| 318 |
+
parser.add_argument("--use-osrm", action="store_true", help="Use OSRM API for real road distances")
|
| 319 |
+
parser.add_argument("--use-haversine", action="store_true", help="Use haversine distance only (faster)")
|
| 320 |
+
|
| 321 |
+
args = parser.parse_args()
|
| 322 |
+
|
| 323 |
+
if args.use_haversine:
|
| 324 |
+
generate_travel_matrix(use_osrm=False)
|
| 325 |
+
else:
|
| 326 |
+
# Default to OSRM unless explicitly disabled
|
| 327 |
+
generate_travel_matrix(use_osrm=True)
|
src/utils/generate_vehicle_data.py
ADDED
|
@@ -0,0 +1,168 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
import numpy as np
|
| 3 |
+
import random
|
| 4 |
+
from datetime import datetime, timedelta
|
| 5 |
+
import os
|
| 6 |
+
|
| 7 |
+
# Set random seed for reproducibility
|
| 8 |
+
np.random.seed(43)
|
| 9 |
+
|
| 10 |
+
def generate_vehicle_data(n_vehicles=10):
|
| 11 |
+
"""
|
| 12 |
+
Generate synthetic vehicle data for a delivery fleet optimization problem.
|
| 13 |
+
|
| 14 |
+
This function creates a realistic delivery fleet with various vehicle types,
|
| 15 |
+
capacities, and operational parameters to be used in route optimization.
|
| 16 |
+
|
| 17 |
+
Parameters:
|
| 18 |
+
-----------
|
| 19 |
+
n_vehicles : int, default=10
|
| 20 |
+
Number of vehicles to generate in the fleet
|
| 21 |
+
|
| 22 |
+
Returns:
|
| 23 |
+
--------
|
| 24 |
+
pd.DataFrame
|
| 25 |
+
DataFrame containing the generated vehicle data
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
# Vehicle IDs
|
| 29 |
+
vehicle_ids = [f'VEH{str(i).zfill(3)}' for i in range(1, n_vehicles + 1)]
|
| 30 |
+
|
| 31 |
+
# Vehicle types
|
| 32 |
+
vehicle_types = []
|
| 33 |
+
for _ in range(n_vehicles):
|
| 34 |
+
vehicle_type = random.choices(['Standard', 'Large', 'Refrigerated'],
|
| 35 |
+
weights=[0.7, 0.2, 0.1])[0]
|
| 36 |
+
vehicle_types.append(vehicle_type)
|
| 37 |
+
|
| 38 |
+
# Vehicle capacities based on type
|
| 39 |
+
max_weights = []
|
| 40 |
+
max_volumes = []
|
| 41 |
+
for v_type in vehicle_types:
|
| 42 |
+
if v_type == 'Standard':
|
| 43 |
+
max_weights.append(random.uniform(800, 1200))
|
| 44 |
+
max_volumes.append(random.uniform(8, 12))
|
| 45 |
+
elif v_type == 'Large':
|
| 46 |
+
max_weights.append(random.uniform(1500, 2500))
|
| 47 |
+
max_volumes.append(random.uniform(15, 25))
|
| 48 |
+
else: # Refrigerated
|
| 49 |
+
max_weights.append(random.uniform(600, 1000))
|
| 50 |
+
max_volumes.append(random.uniform(6, 10))
|
| 51 |
+
|
| 52 |
+
# Realistic depot/warehouse locations in Singapore industrial areas
|
| 53 |
+
# [name, latitude, longitude]
|
| 54 |
+
warehouse_locations = [
|
| 55 |
+
["Tuas Logistics Hub", 1.3187, 103.6390],
|
| 56 |
+
["Jurong Industrial Estate", 1.3233, 103.6994],
|
| 57 |
+
["Loyang Industrial Park", 1.3602, 103.9761],
|
| 58 |
+
["Changi Logistics Centre", 1.3497, 103.9742],
|
| 59 |
+
["Keppel Distripark", 1.2706, 103.8219],
|
| 60 |
+
["Pandan Logistics Hub", 1.3187, 103.7509],
|
| 61 |
+
["Alexandra Distripark", 1.2744, 103.8012],
|
| 62 |
+
["Kallang Way Industrial", 1.3315, 103.8731],
|
| 63 |
+
["Defu Industrial Park", 1.3610, 103.8891],
|
| 64 |
+
["Woodlands Industrial", 1.4428, 103.7875]
|
| 65 |
+
]
|
| 66 |
+
|
| 67 |
+
# Assign warehouses to vehicles (multiple vehicles can be from same warehouse)
|
| 68 |
+
# Either assign sequentially to ensure all warehouses are used at least once (if n_vehicles >= len(warehouse_locations)),
|
| 69 |
+
# or randomly select from the list
|
| 70 |
+
depot_names = []
|
| 71 |
+
depot_lats = []
|
| 72 |
+
depot_lons = []
|
| 73 |
+
|
| 74 |
+
if n_vehicles <= len(warehouse_locations):
|
| 75 |
+
# Use first n_vehicles warehouses (one vehicle per warehouse)
|
| 76 |
+
selected_warehouses = warehouse_locations[:n_vehicles]
|
| 77 |
+
else:
|
| 78 |
+
# Ensure every warehouse is used at least once
|
| 79 |
+
selected_warehouses = warehouse_locations.copy()
|
| 80 |
+
# Then add random ones for remaining vehicles
|
| 81 |
+
remaining = n_vehicles - len(warehouse_locations)
|
| 82 |
+
selected_warehouses.extend([random.choice(warehouse_locations) for _ in range(remaining)])
|
| 83 |
+
|
| 84 |
+
# Shuffle to avoid sequential assignment
|
| 85 |
+
random.shuffle(selected_warehouses)
|
| 86 |
+
|
| 87 |
+
# Extract depot information
|
| 88 |
+
for warehouse in selected_warehouses:
|
| 89 |
+
depot_names.append(warehouse[0])
|
| 90 |
+
depot_lats.append(warehouse[1])
|
| 91 |
+
depot_lons.append(warehouse[2])
|
| 92 |
+
|
| 93 |
+
# Add small variation for vehicles from the same warehouse (within warehouse compound)
|
| 94 |
+
# This makes each vehicle's position slightly different, simulating different loading bays
|
| 95 |
+
for i in range(len(depot_lats)):
|
| 96 |
+
# Much smaller variation - within warehouse compound (approximately 50-100m variation)
|
| 97 |
+
depot_lats[i] += random.uniform(-0.0005, 0.0005)
|
| 98 |
+
depot_lons[i] += random.uniform(-0.0005, 0.0005)
|
| 99 |
+
|
| 100 |
+
# Driver names
|
| 101 |
+
first_names = ['Ahmad', 'Raj', 'Michael', 'Wei', 'Siti', 'Kumar', 'Chong', 'David', 'Suresh', 'Ali']
|
| 102 |
+
last_names = ['Tan', 'Singh', 'Lee', 'Wong', 'Kumar', 'Abdullah', 'Zhang', 'Lim', 'Raj', 'Teo']
|
| 103 |
+
driver_names = []
|
| 104 |
+
|
| 105 |
+
for i in range(n_vehicles):
|
| 106 |
+
if i < len(first_names):
|
| 107 |
+
driver_names.append(f"{first_names[i]} {random.choice(last_names)}")
|
| 108 |
+
else:
|
| 109 |
+
driver_names.append(f"{random.choice(first_names)} {random.choice(last_names)}")
|
| 110 |
+
|
| 111 |
+
# Vehicle availability
|
| 112 |
+
start_times = [f"{random.randint(7, 10):02d}:00" for _ in range(n_vehicles)]
|
| 113 |
+
end_times = [f"{random.randint(17, 21):02d}:00" for _ in range(n_vehicles)]
|
| 114 |
+
|
| 115 |
+
# Max working hours
|
| 116 |
+
max_working_hours = [random.randint(8, 10) for _ in range(n_vehicles)]
|
| 117 |
+
|
| 118 |
+
# Average speed (km/h)
|
| 119 |
+
avg_speeds = [random.uniform(30, 50) for _ in range(n_vehicles)]
|
| 120 |
+
|
| 121 |
+
# Cost per km
|
| 122 |
+
cost_per_km = [random.uniform(0.5, 1.5) for _ in range(n_vehicles)]
|
| 123 |
+
|
| 124 |
+
# Vehicle status
|
| 125 |
+
statuses = np.random.choice(['Available', 'In Service', 'Maintenance'], n_vehicles, p=[0.7, 0.2, 0.1])
|
| 126 |
+
|
| 127 |
+
# License plates (Singapore format)
|
| 128 |
+
letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
| 129 |
+
license_plates = []
|
| 130 |
+
for _ in range(n_vehicles):
|
| 131 |
+
letter_part = ''.join(random.choices(letters, k=3))
|
| 132 |
+
number_part = random.randint(1000, 9999)
|
| 133 |
+
license_plates.append(f"S{letter_part}{number_part}")
|
| 134 |
+
|
| 135 |
+
# Create DataFrame
|
| 136 |
+
df = pd.DataFrame({
|
| 137 |
+
'vehicle_id': vehicle_ids,
|
| 138 |
+
'vehicle_type': vehicle_types,
|
| 139 |
+
'license_plate': license_plates,
|
| 140 |
+
'driver_name': driver_names,
|
| 141 |
+
'max_weight_kg': np.array(max_weights).round(2),
|
| 142 |
+
'max_volume_m3': np.array(max_volumes).round(2),
|
| 143 |
+
'depot_name': depot_names,
|
| 144 |
+
'depot_latitude': np.array(depot_lats).round(6),
|
| 145 |
+
'depot_longitude': np.array(depot_lons).round(6),
|
| 146 |
+
'start_time': start_times,
|
| 147 |
+
'end_time': end_times,
|
| 148 |
+
'max_working_hours': max_working_hours,
|
| 149 |
+
'avg_speed_kmh': np.array(avg_speeds).round(2),
|
| 150 |
+
'cost_per_km': np.array(cost_per_km).round(2),
|
| 151 |
+
'status': statuses
|
| 152 |
+
})
|
| 153 |
+
|
| 154 |
+
# Ensure the directory exists
|
| 155 |
+
data_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'data', 'vehicle-data')
|
| 156 |
+
os.makedirs(data_dir, exist_ok=True)
|
| 157 |
+
|
| 158 |
+
# Save to CSV
|
| 159 |
+
output_path = os.path.join(data_dir, 'vehicle_data.csv')
|
| 160 |
+
df.to_csv(output_path, index=False)
|
| 161 |
+
print(f"Vehicle data generated and saved to {output_path}")
|
| 162 |
+
return df
|
| 163 |
+
|
| 164 |
+
if __name__ == "__main__":
|
| 165 |
+
# Generate vehicle data
|
| 166 |
+
vehicle_data = generate_vehicle_data(10)
|
| 167 |
+
print("Sample of vehicle data:")
|
| 168 |
+
print(vehicle_data.head())
|