- docker build -t kenyaemr-inference .
Check that these files exist:
- /opt/ml/iit/settings.json -- facility specific settings
- /opt/ml/iit/locational_variables_latest.csv -- facility location variables
- /opt/ml/iit/models/thresholds_latest.pkl -- thresholds
- /opt/ml/iit/models/site_thresholds_latest.pkl -- site thresholds
- /opt/ml/iit/models/ohe_latest.pkl -- model
- /opt/ml/iit/models/mod_latest.json -- model
- /opt/ml/iit/models/feature_order.pkl -- features
- /opt/ml/iit/models/mod_latest.so -- model
- /opt/ml/iit/models/iit_test.sqlite -- demo db
- /opt/ml/iit/models/locational_variables_latest.csv -- facility location variables
- docker run -v /opt/ml/iit/settings.json:/app/data/settings.json
-v /opt/ml/iit/locational_variables_latest.csv:/app/data/locational_variables_latest.csv
-v /opt/ml/iit/models/thresholds_latest.pkl:/app/data/models/thresholds_latest.pkl
-v /opt/ml/iit/models/site_thresholds_latest.pkl:/app/data/models/site_thresholds_latest.pkl
-v /opt/ml/iit/models/ohe_latest.pkl:/app/data/models/ohe_latest.pkl
-v /opt/ml/iit/models/mod_latest.json:/app/data/models/mod_latest.json
-v /opt/ml/iit/models/feature_order.pkl:/app/data/models/feature_order.pkl
-v /opt/ml/iit/models/mod_latest.so:/app/data/models/mod_latest.so
-v /opt/ml/iit/models/iit_test.sqlite:/app/data/models/iit_test.sqlite
-v /opt/ml/iit/models/locational_variables_latest.csv:/app/data/models/locational_variables_latest.csv
--add-host=host.docker.internal:host-gateway
--network host
-p 8000:8000
kenyaemr-inference
docker compose up --build
docker compose up
- docker container prune
- docker image ls
- docker rmi "IMAGE ID"
- docker builder prune
- docker rmi -f $(docker images -aq)
- docker system prune -a --volumes -f
curl -X POST "http://localhost:8000/inference" -H "Content-Type: application/json" -d '{"ppk": "7E14A8034F39478149EE6A4CA37A247C631D17907C746BE0336D3D7CEC68F66F", "sc": "13074", "start_date": "2021-01-01", "end_date": "2025-01-01"}'
- python3.12 -m venv myenv
- source myenv/bin/activate
- pip --version
- python --version
- pip install --no-cache-dir -r requirements-inference.txt
- uvicorn src.inference.api:app --host 0.0.0.0 --port 8000