diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 984c1db8..96acfd23 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -9,9 +9,6 @@ "ghcr.io/devcontainers/features/node:1": { "nodeGypDependencies": true, "version": "18" - }, - "ghcr.io/devcontainers-contrib/features/vue-cli:2": { - "version": "latest" } }, @@ -48,7 +45,6 @@ "ms-vscode.cpptools", "aaron-bond.better-comments", "alefragnani.project-manager", - "bungcip.better-toml", "dbaeumer.vscode-eslint", "DavidAnson.vscode-markdownlint", "DotJoshJohnson.xml", @@ -56,7 +52,6 @@ "ecmel.vscode-html-css", "esbenp.prettier-vscode", "george-alisson.html-preview-vscode", - "genieai.chatgpt-vscode", "Gruntfuggly.todo-tree", "hnw.vscode-auto-open-markdown-preview", "kamikillerto.vscode-colorize", @@ -73,7 +68,6 @@ "pnp.polacode", "redhat.vscode-yaml", "streetsidesoftware.code-spell-checker", - "Vue.volar", "waderyan.gitblame", "wayou.vscode-todo-highlight", "wholroyd.jinja", diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index 3e9eb650..642a48d4 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -17,13 +17,19 @@ jobs: run: | python -m pip install --upgrade pip pip install setuptools build twine - - name: Build and publish - env: - TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: | - python -m build - twine upload dist/* + # - name: Build and publish + # env: + # TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} + # TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} + # run: | + # python -m build + # twine upload dist/* + - name: Build package + run: python -m build + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + password: ${{ secrets.PYPI_API_TOKEN }} build: name: Deploy docs runs-on: ubuntu-latest diff --git a/CHANGELOG.md b/CHANGELOG.md index 690c75f1..1a464213 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,67 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ## Latest Changes +### Adding new db functions ([v2024.11.28.1](https://github.com/devsetgo/devsetgo_lib/releases/tag/v2024.11.28.1)) + +#### What's Changed +* Adding new general execute queries and adding deprecation (#459) @devsetgo +* pip(deps): bump tox from 4.23.0 to 4.23.2 (#455) @dependabot +* pip(deps): bump fastapi[all] from 0.115.2 to 0.115.4 (#454) @dependabot +* pip(deps): bump tqdm from 4.66.5 to 4.66.6 (#456) @dependabot +* pip(deps): bump pymdown-extensions from 10.11.2 to 10.12 (#457) @dependabot +* pip(deps): bump ruff from 0.7.0 to 0.7.1 (#458) @dependabot + + +Published Date: 2024 November 28, 22:01 + +### Moving to Calendar Versioning ([2024.10.20.1](https://github.com/devsetgo/devsetgo_lib/releases/tag/2024.10.20.1)) + +#### What's Changed +* moving to calendar versioning (#453) @devsetgo +* pip(deps): bump tox from 4.21.0 to 4.23.0 (#452) @dependabot +* pip(deps): bump fastapi[all] from 0.114.2 to 0.115.0 (#451) @dependabot +* pip(deps): bump tox from 4.18.1 to 4.21.0 (#450) @dependabot +* pip(deps): bump watchdog from 5.0.2 to 5.0.3 (#449) @dependabot +* pip(deps): bump pylint from 3.2.7 to 3.3.1 (#448) @dependabot +* pip(deps): bump ruff from 0.6.5 to 0.6.8 (#447) @dependabot + + +Published Date: 2024 October 20, 16:30 + +### Complete Replacement of CX-Oracle for OracleDB ([v0.14.4](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.14.4)) + +#### What's Changed +* Remove CX-Oracle for OracleDB cleanup (#446) @devsetgo +* pip(deps): bump pylint from 3.2.6 to 3.2.7 (#442) @dependabot +* pip(deps): bump mkdocs-material from 9.5.33 to 9.5.34 (#443) @dependabot +* github actionts(deps): bump actions/checkout from 2 to 4 (#444) @dependabot +* github actionts(deps): bump actions/setup-python from 2 to 5 (#445) @dependabot + + +Published Date: 2024 September 15, 15:28 + +### Standard Logging Suppression by Default ([v0.14.3](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.14.3)) + +#### What's Changed +* Limit Standard Logging being Displayed (#441) @devsetgo + + +Published Date: 2024 August 31, 17:33 + +### Improvements and fixes ([v0.14.2](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.14.2)) + +#### What's Changed +* Improvements and fixes (#440) @devsetgo + + +#### Breaking changes +* save_text function no longer adds .txt by default. +* Change from cx-oracle to oracledb +* Improvements to documentation + + +Published Date: 2024 August 31, 00:02 + ### Adding DB Disconnect ([v0.14.1](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.14.1)) #### What's Changed @@ -508,191 +569,3 @@ Major changes are in PR #304 Published Date: 2023 April 01, 00:27 - -### Open CSV enhancements and library updates ([v0.9.0](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.9.0)) - -# What's Changed -* fix of latest changes (#288) @devsetgo -* Open_CSV Enhancements (#287) @devsetgo -* pip(deps): bump pytest-cov from 3.0.0 to 4.0.0 (#274) @dependabot -* pip(deps): bump mkdocs-material from 8.4.2 to 8.5.5 (#276) @dependabot -* pip(deps): bump autoflake from 1.5.3 to 1.6.1 (#275) @dependabot -* pip(deps): bump tqdm from 4.64.0 to 4.64.1 (#273) @dependabot -* pip(deps): bump pytest from 7.1.2 to 7.1.3 (#272) @dependabot -* pip(deps): bump mkdocs from 1.3.1 to 1.4.0 (#271) @dependabot -* pip(deps): bump tox from 3.25.1 to 3.26.0 (#269) @dependabot -* pip(deps): bump pylint from 2.15.0 to 2.15.3 (#270) @dependabot -* pip(deps): bump mkdocs-material from 8.3.9 to 8.4.2 (#268) @dependabot -* pip(deps): bump autopep8 from 1.6.0 to 1.7.0 (#264) @dependabot -* pip(deps): bump pylint from 2.14.5 to 2.15.0 (#265) @dependabot -* pip(deps): bump autoflake from 1.4 to 1.5.3 (#263) @dependabot -* pip(deps): bump black from 22.6.0 to 22.8.0 (#267) @dependabot -* pip(deps): bump flake8 from 5.0.1 to 5.0.4 (#266) @dependabot -* pip(deps): bump pre-commit from 2.19.0 to 2.20.0 (#260) @dependabot -* pip(deps): bump mkdocs from 1.3.0 to 1.3.1 (#261) @dependabot -* pip(deps): bump flake8 from 4.0.1 to 5.0.1 (#259) @dependabot -* pip(deps): bump mkdocs-material from 8.3.8 to 8.3.9 (#258) @dependabot -* pip(deps): bump pylint from 2.14.4 to 2.14.5 (#262) @dependabot -* pip(deps): bump twine from 4.0.0 to 4.0.1 (#252) @dependabot -* pip(deps): bump pylint from 2.14.0 to 2.14.4 (#251) @dependabot -* pip(deps): bump mkdocs-material from 8.2.16 to 8.3.8 (#253) @dependabot -* pip(deps): bump black from 22.3.0 to 22.6.0 (#254) @dependabot -* pip(deps): bump tox from 3.25.0 to 3.25.1 (#255) @dependabot -* pip(deps): bump watchdog from 2.1.8 to 2.1.9 (#256) @dependabot -* github actionts(deps): bump actions/setup-python from 3 to 4 (#257) @dependabot -* pip(deps): bump pylint from 2.13.7 to 2.14.0 (#250) @dependabot -* pip(deps): bump watchdog from 2.1.7 to 2.1.8 (#246) @dependabot -* pip(deps): bump pre-commit from 2.18.1 to 2.19.0 (#248) @dependabot -* pip(deps): bump mkdocs-material from 8.2.12 to 8.2.16 (#249) @dependabot -* pip(deps): bump tox from 3.24.5 to 3.25.0 (#242) @dependabot -* pip(deps): bump pre-commit from 2.17.0 to 2.18.1 (#243) @dependabot -* pip(deps): bump click from 8.1.2 to 8.1.3 (#245) @dependabot -* pip(deps): bump pylint from 2.13.4 to 2.13.7 (#240) @dependabot -* pip(deps): bump tqdm from 4.63.1 to 4.64.0 (#244) @dependabot -* pip(deps): bump mkdocs-material from 8.2.8 to 8.2.12 (#241) @dependabot -* pip(deps): bump pytest from 7.1.1 to 7.1.2 (#239) @dependabot -* pip(deps): bump watchdog from 2.1.6 to 2.1.7 (#238) @dependabot -* pip(deps): bump pylint from 2.12.2 to 2.13.4 (#237) @dependabot -* pip(deps): bump mkdocs from 1.2.3 to 1.3.0 (#234) @dependabot -* pip(deps): bump tqdm from 4.63.0 to 4.63.1 (#233) @dependabot -* pip(deps): bump black from 22.1.0 to 22.3.0 (#236) @dependabot -* pip(deps): bump pytest from 7.0.1 to 7.1.1 (#231) @dependabot -* pip(deps): bump click from 8.0.4 to 8.1.2 (#235) @dependabot -* pip(deps): bump mkdocs-material from 8.2.5 to 8.2.8 (#232) @dependabot -* pip(deps): bump twine from 3.8.0 to 4.0.0 (#230) @dependabot -* document updates (#229) @devsetgo - - -Published Date: 2022 December 04, 16:55 - -### Additional Logging Configuration ([v0.8.0](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.8.0)) - -# What's Changed -* New Logging Configuration items (#228) @devsetgo -* pip(deps): bump tqdm from 4.62.3 to 4.63.0 (#224) @dependabot -* pip(deps): bump mkdocs-material from 8.2.3 to 8.2.4 (#227) @dependabot -* github actionts(deps): bump actions/setup-python from 2.3.1 to 3 (#226) @dependabot -* pip(deps): bump mkdocs-material from 8.1.9 to 8.2.3 (#225) @dependabot -* pip(deps): bump twine from 3.7.1 to 3.8.0 (#223) @dependabot -* pip(deps): bump pytest from 6.2.5 to 7.0.1 (#222) @dependabot -* pip(deps): bump pytest-runner from 5.3.1 to 6.0.0 (#221) @dependabot -* pip(deps): bump loguru from 0.5.3 to 0.6.0 (#218) @dependabot -* pip(deps): bump black from 21.12b0 to 22.1.0 (#219) @dependabot -* pip(deps): bump mkdocs-material from 8.1.8 to 8.1.9 (#220) @dependabot - - -Published Date: 2022 March 12, 21:07 - -### ([v0.7.1](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.7.1)) - -# What's Changed -* Bump version: 0.7.0 → 0.7.1 (#217) @devsetgo -* Hotfix for setup file (#216) @devsetgo - - -Published Date: 2022 January 29, 01:51 - -### Logging to Beta Testing ([v0.7.0](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.7.0)) - -Logging is now has basic unit tests and is more ready to use with live application. - -# What's Changed -* Adding Logging Config (#215) @devsetgo -* pip(deps): bump pre-commit from 2.15.0 to 2.16.0 (#210) @dependabot -* pip(deps): bump pylint from 2.12.1 to 2.12.2 (#211) @dependabot -* pip(deps): bump tox from 3.24.4 to 3.24.5 (#212) @dependabot -* pip(deps): bump black from 21.11b1 to 21.12b0 (#213) @dependabot -* pip(deps): bump twine from 3.6.0 to 3.7.1 (#214) @dependabot -* pip(deps): bump twine from 3.5.0 to 3.6.0 (#204) @dependabot -* pip(deps): bump coverage-badge from 1.0.2 to 1.1.0 (#205) @dependabot -* pip(deps): bump mkdocs-material from 7.3.6 to 8.0.2 (#206) @dependabot -* pip(deps): bump pylint from 2.11.1 to 2.12.1 (#207) @dependabot -* pip(deps): bump black from 21.10b0 to 21.11b1 (#208) @dependabot -* github actionts(deps): bump actions/setup-python from 2.2.2 to 2.3.1 (#209) @dependabot -* Dev (#203) @devsetgo -* pip(deps): bump tox from 3.24.3 to 3.24.4 (#193) @dependabot -* pip(deps): bump tqdm from 4.62.2 to 4.62.3 (#194) @dependabot -* pip(deps): bump pylint from 2.10.2 to 2.11.1 (#195) @dependabot -* pip(deps): bump mkdocs-material from 7.2.6 to 7.3.0 (#196) @dependabot -* pip(deps): bump black from 21.8b0 to 21.9b0 (#197) @dependabot -* pip(deps): bump mkdocs-material from 7.2.4 to 7.2.6 (#189) @dependabot -* pip(deps): bump pytest from 6.2.4 to 6.2.5 (#191) @dependabot -* pip(deps): bump watchdog from 2.1.3 to 2.1.5 (#192) @dependabot -* pip(deps): bump tox from 3.24.1 to 3.24.3 (#190) @dependabot -* pip(deps): bump pre-commit from 2.14.0 to 2.15.0 (#188) @dependabot -* pip(deps): bump black from 21.7b0 to 21.8b0 (#187) @dependabot -* pip(deps): bump pylint from 2.9.6 to 2.10.2 (#184) @dependabot -* pip(deps): bump tqdm from 4.62.0 to 4.62.2 (#185) @dependabot -* github actionts(deps): bump actions/setup-python from 1 to 2.2.2 (#182) @dependabot -* Bump wheel from 0.36.2 to 0.37.0 (#180) @dependabot -* Bump mkdocs-material from 7.2.2 to 7.2.4 (#181) @dependabot -* Bump tox from 3.24.0 to 3.24.1 (#177) @dependabot -* Bump mkdocs-material from 7.2.1 to 7.2.2 (#178) @dependabot -* Bump pre-commit from 2.13.0 to 2.14.0 (#179) @dependabot -* Bump pylint from 2.9.5 to 2.9.6 (#176) @dependabot -* Bump tqdm from 4.61.2 to 4.62.0 (#175) @dependabot -* Bump mkdocs-material from 7.1.10 to 7.2.1 (#174) @dependabot -* Bump twine from 3.4.1 to 3.4.2 (#171) @dependabot -* Bump pylint from 2.9.3 to 2.9.5 (#170) @dependabot -* Bump mkdocs from 1.2.1 to 1.2.2 (#173) @dependabot -* documentation update (#169) @devsetgo -* README fix (#168) @devsetgo - - -Published Date: 2022 January 29, 01:42 - -### Logging Configuration ([v0.6.0](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.6.0)) - -# What's Changed -* Adding Logging and Cleanup (#167) @devsetgo -* Bump tqdm from 4.61.1 to 4.61.2 (#166) @dependabot -* Bump pylint from 2.8.3 to 2.9.3 (#165) @dependabot -* Bump watchdog from 2.1.2 to 2.1.3 (#164) @dependabot -* Bump mkdocs-material from 7.1.8 to 7.1.9 (#163) @dependabot -* Bump tqdm from 4.61.0 to 4.61.1 (#162) @dependabot -* Bump mkdocs-material from 7.1.7 to 7.1.8 (#161) @dependabot -* Bump mkdocs from 1.1.2 to 1.2.1 (#159) @dependabot -* Bump black from 21.5b2 to 21.6b0 (#158) @dependabot -* Bump mkdocs-material from 7.1.6 to 7.1.7 (#160) @dependabot -* Bump pytest-cov from 2.12.0 to 2.12.1 (#154) @dependabot -* Bump pylint from 2.8.2 to 2.8.3 (#155) @dependabot -* Bump black from 21.5b1 to 21.5b2 (#156) @dependabot -* Bump mkdocs-material from 7.1.5 to 7.1.6 (#157) @dependabot -* Bump tqdm from 4.60.0 to 4.61.0 (#153) @dependabot -* Bump pre-commit from 2.12.1 to 2.13.0 (#151) @dependabot -* Bump pytest-runner from 5.3.0 to 5.3.1 (#152) @dependabot -* Bump mkdocs-material from 7.1.4 to 7.1.5 (#150) @dependabot -* Bump watchdog from 2.1.1 to 2.1.2 (#149) @dependabot -* Bump click from 7.1.2 to 8.0.1 (#148) @dependabot -* Bump black from 21.5b0 to 21.5b1 (#147) @dependabot -* Bump watchdog from 2.1.0 to 2.1.1 (#146) @dependabot -* Bump pytest-cov from 2.11.1 to 2.12.0 (#145) @dependabot -* Bump flake8 from 3.9.1 to 3.9.2 (#143) @dependabot -* Bump pytest from 6.2.3 to 6.2.4 (#139) @dependabot -* Bump watchdog from 2.0.3 to 2.1.0 (#138) @dependabot -* Bump black from 21.4b2 to 21.5b0 (#140) @dependabot -* Bump mkdocs-material from 7.1.3 to 7.1.4 (#141) @dependabot -* Dev (#142) @devsetgo -* Bump tox from 3.23.0 to 3.23.1 (#137) @dependabot -* Bump autopep8 from 1.5.6 to 1.5.7 (#136) @dependabot -* Bump pylint from 2.7.4 to 2.8.2 (#135) @dependabot -* Bump black from 20.8b1 to 21.4b2 (#134) @dependabot -* Bump mkdocs-material from 7.1.2 to 7.1.3 (#133) @dependabot -* Adding SonarCloud Code Coverage (#130) @devsetgo -* Bump mkdocs-material from 7.1.1 to 7.1.2 (#132) @dependabot -* Bump watchdog from 2.0.2 to 2.0.3 (#131) @dependabot -* Bump pre-commit from 2.12.0 to 2.12.1 (#129) @dependabot -* Bump flake8 from 3.9.0 to 3.9.1 (#128) @dependabot -* Bump mkdocs-material from 7.1.0 to 7.1.1 (#127) @dependabot -* Bump tqdm from 4.59.0 to 4.60.0 (#124) @dependabot -* Bump pytest from 6.2.2 to 6.2.3 (#125) @dependabot -* Bump pre-commit from 2.11.1 to 2.12.0 (#126) @dependabot -* Bump pylint from 2.7.2 to 2.7.4 (#122) @dependabot -* Bump mkdocs-material from 7.0.6 to 7.1.0 (#123) @dependabot -* Bump mkdocs-material from 7.0.5 to 7.0.6 (#121) @dependabot -* Bump flake8 from 3.8.4 to 3.9.0 (#120) @dependabot -* Bump twine from 3.3.0 to 3.4.1 (#118) @dependabot -* Bump autopep8 from 1.5.5 to 1.5.6 (#119) @dependabot - - -Published Date: 2021 July 16, 23:44 diff --git a/README.md b/README.md index 77283bc2..0a643fa2 100644 --- a/README.md +++ b/README.md @@ -31,6 +31,8 @@ SonarCloud: `devsetgo_lib` is a versatile library designed to provide common functions for Python applications. Its main goal is to increase reusability and reduce the need to rewrite the same functions across multiple applications. This also allows for quick defect resolution and propagation of fixes across all dependent projects. +Read the Full Documentation [here](https://devsetgo.github.io/devsetgo_lib/). + ## Key Features ### **Common Functions**: diff --git a/bin/act b/bin/act new file mode 100755 index 00000000..c5bb6822 Binary files /dev/null and b/bin/act differ diff --git a/coverage-badge.svg b/coverage-badge.svg index 05796758..073992bf 100644 --- a/coverage-badge.svg +++ b/coverage-badge.svg @@ -1 +1 @@ -coverage: 100.00%coverage100.00% +coverage: 99.16%coverage99.16% diff --git a/coverage.xml b/coverage.xml index f71424a8..4671dca0 100644 --- a/coverage.xml +++ b/coverage.xml @@ -1,6 +1,6 @@ - - + + /github/workspace @@ -25,14 +25,14 @@ - + - - - + + + @@ -42,23 +42,23 @@ - - - + + - - - + + + - - + - - - - - + + + + + + + @@ -132,237 +132,237 @@ - + - - + - + - - - + + + - - + + - + - - + - - - - + + + + - - - - - - - - - - + + + + + + + + + + - - - - - - - + + + - - - - - - - + + + + + + + - - - - - - - - - - + + + + + - - - - - - - - - - + + + + + + + + + + + - - + + - - - - - + + + + + + - + + - - - - - - - - - - - - - - - + + + + + + + + + - - - + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + - - - - - - - - - - + + + + + + + + + + + + - - + - - + - + - - - - - - - + + + + + + + + + - - - + + + + - - + + + - - + + - - - - - - - - + + + + + + - - + + + - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + - - - - - - - + + + + + + + + + + - + @@ -446,7 +446,7 @@ - + @@ -460,163 +460,254 @@ - - + - + - - - + + + - + - + - - - - - + + + + - - + + + + + - - + + + - - + - - - - - - - + - - + + + - - - - - - - - - + + + + + + - + - + - - - + + + - - - - - - - - - - - - - - - - - + + + + + + + + + - - - - + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + - - - - - - - - - - - - - - - - - - - + + + + + + - - + + - + - - + + + + + - - - + + + + + - - - - - - - - - - + - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -686,43 +777,43 @@ - - - + - - - + + + - - - - - + + - + - - - - - - - + + + + + + + + + - - + - - - - + + + + + + + + @@ -761,6 +852,31 @@ + + + + + + + + + + + + + + + + + + + + + + + + + @@ -768,22 +884,22 @@ - - - - - - + + + + + - - - - - - - - + + + + + + + + + diff --git a/docs/common_functions/file_mover.md b/docs/common_functions/file_mover.md new file mode 100644 index 00000000..52fc5722 --- /dev/null +++ b/docs/common_functions/file_mover.md @@ -0,0 +1,4 @@ +# Reference + +::: dsg_lib.common_functions.file_mover + handler: python diff --git a/docs/fastapi/default_endpoints.md b/docs/fastapi/default_endpoints.md new file mode 100644 index 00000000..285fff70 --- /dev/null +++ b/docs/fastapi/default_endpoints.md @@ -0,0 +1,4 @@ +# Reference + +::: dsg_lib.fastapi_functions.default_endpoints + handler: python diff --git a/docs/index.md b/docs/index.md index f882bb04..0a643fa2 100644 --- a/docs/index.md +++ b/docs/index.md @@ -9,6 +9,8 @@ Support Python Versions ![Static Badge](https://img.shields.io/badge/Python-3.12%20%7C%203.11%20%7C%203.10%20%7C%203.9-blue) [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) +[![Coverage Status](./coverage-badge.svg?dummy=8484744)](./reports/coverage/index.html) +[![Tests Status](./tests-badge.svg?dummy=8484744)](./reports/coverage/index.html) CI/CD Pipeline: @@ -29,6 +31,8 @@ SonarCloud: `devsetgo_lib` is a versatile library designed to provide common functions for Python applications. Its main goal is to increase reusability and reduce the need to rewrite the same functions across multiple applications. This also allows for quick defect resolution and propagation of fixes across all dependent projects. +Read the Full Documentation [here](https://devsetgo.github.io/devsetgo_lib/). + ## Key Features ### **Common Functions**: diff --git a/docs/release-notes.md b/docs/release-notes.md index 5e0d0472..1a464213 100644 --- a/docs/release-notes.md +++ b/docs/release-notes.md @@ -4,6 +4,103 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ## Latest Changes +### Adding new db functions ([v2024.11.28.1](https://github.com/devsetgo/devsetgo_lib/releases/tag/v2024.11.28.1)) + +#### What's Changed +* Adding new general execute queries and adding deprecation (#459) @devsetgo +* pip(deps): bump tox from 4.23.0 to 4.23.2 (#455) @dependabot +* pip(deps): bump fastapi[all] from 0.115.2 to 0.115.4 (#454) @dependabot +* pip(deps): bump tqdm from 4.66.5 to 4.66.6 (#456) @dependabot +* pip(deps): bump pymdown-extensions from 10.11.2 to 10.12 (#457) @dependabot +* pip(deps): bump ruff from 0.7.0 to 0.7.1 (#458) @dependabot + + +Published Date: 2024 November 28, 22:01 + +### Moving to Calendar Versioning ([2024.10.20.1](https://github.com/devsetgo/devsetgo_lib/releases/tag/2024.10.20.1)) + +#### What's Changed +* moving to calendar versioning (#453) @devsetgo +* pip(deps): bump tox from 4.21.0 to 4.23.0 (#452) @dependabot +* pip(deps): bump fastapi[all] from 0.114.2 to 0.115.0 (#451) @dependabot +* pip(deps): bump tox from 4.18.1 to 4.21.0 (#450) @dependabot +* pip(deps): bump watchdog from 5.0.2 to 5.0.3 (#449) @dependabot +* pip(deps): bump pylint from 3.2.7 to 3.3.1 (#448) @dependabot +* pip(deps): bump ruff from 0.6.5 to 0.6.8 (#447) @dependabot + + +Published Date: 2024 October 20, 16:30 + +### Complete Replacement of CX-Oracle for OracleDB ([v0.14.4](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.14.4)) + +#### What's Changed +* Remove CX-Oracle for OracleDB cleanup (#446) @devsetgo +* pip(deps): bump pylint from 3.2.6 to 3.2.7 (#442) @dependabot +* pip(deps): bump mkdocs-material from 9.5.33 to 9.5.34 (#443) @dependabot +* github actionts(deps): bump actions/checkout from 2 to 4 (#444) @dependabot +* github actionts(deps): bump actions/setup-python from 2 to 5 (#445) @dependabot + + +Published Date: 2024 September 15, 15:28 + +### Standard Logging Suppression by Default ([v0.14.3](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.14.3)) + +#### What's Changed +* Limit Standard Logging being Displayed (#441) @devsetgo + + +Published Date: 2024 August 31, 17:33 + +### Improvements and fixes ([v0.14.2](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.14.2)) + +#### What's Changed +* Improvements and fixes (#440) @devsetgo + + +#### Breaking changes +* save_text function no longer adds .txt by default. +* Change from cx-oracle to oracledb +* Improvements to documentation + + +Published Date: 2024 August 31, 00:02 + +### Adding DB Disconnect ([v0.14.1](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.14.1)) + +#### What's Changed +* Adding Database Disconnect (#439) @devsetgo +* pip(deps): bump pre-commit from 3.7.1 to 3.8.0 (#434) @dependabot +* updates to deal with stashing pages (#437) @devsetgo +* working on issue for deployment (#436) @devsetgo +* Adding MKDocs Workflow (#435) @devsetgo +* Version 0.14.0 (#433) @devsetgo + + +Published Date: 2024 August 25, 18:47 + +### Fix of version for Pypi ([v0.14.0-a](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.14.0-a)) + +#### What's Changed +* Version 0.14.0 (#433) @devsetgo + + +Published Date: 2024 July 27, 22:40 + +### High Speed Multi-Processing Improvements ([v0.14.0](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.14.0)) + +#### What's Changed +* High Speed Logging for Loguru Multi-Processing (#432) @devsetgo +* Resilient Sink Fixes (#431) @devsetgo +* Fix of bug in resilient sink (#430) @devsetgo +* Adding Resiliency to Logging Config (#429) @devsetgo +* pip(deps): bump mkdocs-print-site-plugin from 2.4.1 to 2.5.0 (#422) @dependabot +* pip(deps): bump ruff from 0.4.5 to 0.4.7 (#420) @dependabot +* pip(deps): bump autopep8 from 2.1.1 to 2.2.0 (#421) @dependabot +* pip(deps): bump mkdocs-material from 9.5.24 to 9.5.25 (#423) @dependabot + + +Published Date: 2024 July 27, 22:28 + ### ([v0.13.0-republish](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.13.0-republish)) #### What's Changed @@ -472,301 +569,3 @@ Major changes are in PR #304 Published Date: 2023 April 01, 00:27 - -### Open CSV enhancements and library updates ([v0.9.0](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.9.0)) - -# What's Changed -* fix of latest changes (#288) @devsetgo -* Open_CSV Enhancements (#287) @devsetgo -* pip(deps): bump pytest-cov from 3.0.0 to 4.0.0 (#274) @dependabot -* pip(deps): bump mkdocs-material from 8.4.2 to 8.5.5 (#276) @dependabot -* pip(deps): bump autoflake from 1.5.3 to 1.6.1 (#275) @dependabot -* pip(deps): bump tqdm from 4.64.0 to 4.64.1 (#273) @dependabot -* pip(deps): bump pytest from 7.1.2 to 7.1.3 (#272) @dependabot -* pip(deps): bump mkdocs from 1.3.1 to 1.4.0 (#271) @dependabot -* pip(deps): bump tox from 3.25.1 to 3.26.0 (#269) @dependabot -* pip(deps): bump pylint from 2.15.0 to 2.15.3 (#270) @dependabot -* pip(deps): bump mkdocs-material from 8.3.9 to 8.4.2 (#268) @dependabot -* pip(deps): bump autopep8 from 1.6.0 to 1.7.0 (#264) @dependabot -* pip(deps): bump pylint from 2.14.5 to 2.15.0 (#265) @dependabot -* pip(deps): bump autoflake from 1.4 to 1.5.3 (#263) @dependabot -* pip(deps): bump black from 22.6.0 to 22.8.0 (#267) @dependabot -* pip(deps): bump flake8 from 5.0.1 to 5.0.4 (#266) @dependabot -* pip(deps): bump pre-commit from 2.19.0 to 2.20.0 (#260) @dependabot -* pip(deps): bump mkdocs from 1.3.0 to 1.3.1 (#261) @dependabot -* pip(deps): bump flake8 from 4.0.1 to 5.0.1 (#259) @dependabot -* pip(deps): bump mkdocs-material from 8.3.8 to 8.3.9 (#258) @dependabot -* pip(deps): bump pylint from 2.14.4 to 2.14.5 (#262) @dependabot -* pip(deps): bump twine from 4.0.0 to 4.0.1 (#252) @dependabot -* pip(deps): bump pylint from 2.14.0 to 2.14.4 (#251) @dependabot -* pip(deps): bump mkdocs-material from 8.2.16 to 8.3.8 (#253) @dependabot -* pip(deps): bump black from 22.3.0 to 22.6.0 (#254) @dependabot -* pip(deps): bump tox from 3.25.0 to 3.25.1 (#255) @dependabot -* pip(deps): bump watchdog from 2.1.8 to 2.1.9 (#256) @dependabot -* github actionts(deps): bump actions/setup-python from 3 to 4 (#257) @dependabot -* pip(deps): bump pylint from 2.13.7 to 2.14.0 (#250) @dependabot -* pip(deps): bump watchdog from 2.1.7 to 2.1.8 (#246) @dependabot -* pip(deps): bump pre-commit from 2.18.1 to 2.19.0 (#248) @dependabot -* pip(deps): bump mkdocs-material from 8.2.12 to 8.2.16 (#249) @dependabot -* pip(deps): bump tox from 3.24.5 to 3.25.0 (#242) @dependabot -* pip(deps): bump pre-commit from 2.17.0 to 2.18.1 (#243) @dependabot -* pip(deps): bump click from 8.1.2 to 8.1.3 (#245) @dependabot -* pip(deps): bump pylint from 2.13.4 to 2.13.7 (#240) @dependabot -* pip(deps): bump tqdm from 4.63.1 to 4.64.0 (#244) @dependabot -* pip(deps): bump mkdocs-material from 8.2.8 to 8.2.12 (#241) @dependabot -* pip(deps): bump pytest from 7.1.1 to 7.1.2 (#239) @dependabot -* pip(deps): bump watchdog from 2.1.6 to 2.1.7 (#238) @dependabot -* pip(deps): bump pylint from 2.12.2 to 2.13.4 (#237) @dependabot -* pip(deps): bump mkdocs from 1.2.3 to 1.3.0 (#234) @dependabot -* pip(deps): bump tqdm from 4.63.0 to 4.63.1 (#233) @dependabot -* pip(deps): bump black from 22.1.0 to 22.3.0 (#236) @dependabot -* pip(deps): bump pytest from 7.0.1 to 7.1.1 (#231) @dependabot -* pip(deps): bump click from 8.0.4 to 8.1.2 (#235) @dependabot -* pip(deps): bump mkdocs-material from 8.2.5 to 8.2.8 (#232) @dependabot -* pip(deps): bump twine from 3.8.0 to 4.0.0 (#230) @dependabot -* document updates (#229) @devsetgo - - -Published Date: 2022 December 04, 16:55 - -### Additional Logging Configuration ([v0.8.0](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.8.0)) - -# What's Changed -* New Logging Configuration items (#228) @devsetgo -* pip(deps): bump tqdm from 4.62.3 to 4.63.0 (#224) @dependabot -* pip(deps): bump mkdocs-material from 8.2.3 to 8.2.4 (#227) @dependabot -* github actionts(deps): bump actions/setup-python from 2.3.1 to 3 (#226) @dependabot -* pip(deps): bump mkdocs-material from 8.1.9 to 8.2.3 (#225) @dependabot -* pip(deps): bump twine from 3.7.1 to 3.8.0 (#223) @dependabot -* pip(deps): bump pytest from 6.2.5 to 7.0.1 (#222) @dependabot -* pip(deps): bump pytest-runner from 5.3.1 to 6.0.0 (#221) @dependabot -* pip(deps): bump loguru from 0.5.3 to 0.6.0 (#218) @dependabot -* pip(deps): bump black from 21.12b0 to 22.1.0 (#219) @dependabot -* pip(deps): bump mkdocs-material from 8.1.8 to 8.1.9 (#220) @dependabot - - -Published Date: 2022 March 12, 21:07 - -### ([v0.7.1](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.7.1)) - -# What's Changed -* Bump version: 0.7.0 → 0.7.1 (#217) @devsetgo -* Hotfix for setup file (#216) @devsetgo - - -Published Date: 2022 January 29, 01:51 - -### Logging to Beta Testing ([v0.7.0](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.7.0)) - -Logging is now has basic unit tests and is more ready to use with live application. - -# What's Changed -* Adding Logging Config (#215) @devsetgo -* pip(deps): bump pre-commit from 2.15.0 to 2.16.0 (#210) @dependabot -* pip(deps): bump pylint from 2.12.1 to 2.12.2 (#211) @dependabot -* pip(deps): bump tox from 3.24.4 to 3.24.5 (#212) @dependabot -* pip(deps): bump black from 21.11b1 to 21.12b0 (#213) @dependabot -* pip(deps): bump twine from 3.6.0 to 3.7.1 (#214) @dependabot -* pip(deps): bump twine from 3.5.0 to 3.6.0 (#204) @dependabot -* pip(deps): bump coverage-badge from 1.0.2 to 1.1.0 (#205) @dependabot -* pip(deps): bump mkdocs-material from 7.3.6 to 8.0.2 (#206) @dependabot -* pip(deps): bump pylint from 2.11.1 to 2.12.1 (#207) @dependabot -* pip(deps): bump black from 21.10b0 to 21.11b1 (#208) @dependabot -* github actionts(deps): bump actions/setup-python from 2.2.2 to 2.3.1 (#209) @dependabot -* Dev (#203) @devsetgo -* pip(deps): bump tox from 3.24.3 to 3.24.4 (#193) @dependabot -* pip(deps): bump tqdm from 4.62.2 to 4.62.3 (#194) @dependabot -* pip(deps): bump pylint from 2.10.2 to 2.11.1 (#195) @dependabot -* pip(deps): bump mkdocs-material from 7.2.6 to 7.3.0 (#196) @dependabot -* pip(deps): bump black from 21.8b0 to 21.9b0 (#197) @dependabot -* pip(deps): bump mkdocs-material from 7.2.4 to 7.2.6 (#189) @dependabot -* pip(deps): bump pytest from 6.2.4 to 6.2.5 (#191) @dependabot -* pip(deps): bump watchdog from 2.1.3 to 2.1.5 (#192) @dependabot -* pip(deps): bump tox from 3.24.1 to 3.24.3 (#190) @dependabot -* pip(deps): bump pre-commit from 2.14.0 to 2.15.0 (#188) @dependabot -* pip(deps): bump black from 21.7b0 to 21.8b0 (#187) @dependabot -* pip(deps): bump pylint from 2.9.6 to 2.10.2 (#184) @dependabot -* pip(deps): bump tqdm from 4.62.0 to 4.62.2 (#185) @dependabot -* github actionts(deps): bump actions/setup-python from 1 to 2.2.2 (#182) @dependabot -* Bump wheel from 0.36.2 to 0.37.0 (#180) @dependabot -* Bump mkdocs-material from 7.2.2 to 7.2.4 (#181) @dependabot -* Bump tox from 3.24.0 to 3.24.1 (#177) @dependabot -* Bump mkdocs-material from 7.2.1 to 7.2.2 (#178) @dependabot -* Bump pre-commit from 2.13.0 to 2.14.0 (#179) @dependabot -* Bump pylint from 2.9.5 to 2.9.6 (#176) @dependabot -* Bump tqdm from 4.61.2 to 4.62.0 (#175) @dependabot -* Bump mkdocs-material from 7.1.10 to 7.2.1 (#174) @dependabot -* Bump twine from 3.4.1 to 3.4.2 (#171) @dependabot -* Bump pylint from 2.9.3 to 2.9.5 (#170) @dependabot -* Bump mkdocs from 1.2.1 to 1.2.2 (#173) @dependabot -* documentation update (#169) @devsetgo -* README fix (#168) @devsetgo - - -Published Date: 2022 January 29, 01:42 - -### Logging Configuration ([v0.6.0](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.6.0)) - -# What's Changed -* Adding Logging and Cleanup (#167) @devsetgo -* Bump tqdm from 4.61.1 to 4.61.2 (#166) @dependabot -* Bump pylint from 2.8.3 to 2.9.3 (#165) @dependabot -* Bump watchdog from 2.1.2 to 2.1.3 (#164) @dependabot -* Bump mkdocs-material from 7.1.8 to 7.1.9 (#163) @dependabot -* Bump tqdm from 4.61.0 to 4.61.1 (#162) @dependabot -* Bump mkdocs-material from 7.1.7 to 7.1.8 (#161) @dependabot -* Bump mkdocs from 1.1.2 to 1.2.1 (#159) @dependabot -* Bump black from 21.5b2 to 21.6b0 (#158) @dependabot -* Bump mkdocs-material from 7.1.6 to 7.1.7 (#160) @dependabot -* Bump pytest-cov from 2.12.0 to 2.12.1 (#154) @dependabot -* Bump pylint from 2.8.2 to 2.8.3 (#155) @dependabot -* Bump black from 21.5b1 to 21.5b2 (#156) @dependabot -* Bump mkdocs-material from 7.1.5 to 7.1.6 (#157) @dependabot -* Bump tqdm from 4.60.0 to 4.61.0 (#153) @dependabot -* Bump pre-commit from 2.12.1 to 2.13.0 (#151) @dependabot -* Bump pytest-runner from 5.3.0 to 5.3.1 (#152) @dependabot -* Bump mkdocs-material from 7.1.4 to 7.1.5 (#150) @dependabot -* Bump watchdog from 2.1.1 to 2.1.2 (#149) @dependabot -* Bump click from 7.1.2 to 8.0.1 (#148) @dependabot -* Bump black from 21.5b0 to 21.5b1 (#147) @dependabot -* Bump watchdog from 2.1.0 to 2.1.1 (#146) @dependabot -* Bump pytest-cov from 2.11.1 to 2.12.0 (#145) @dependabot -* Bump flake8 from 3.9.1 to 3.9.2 (#143) @dependabot -* Bump pytest from 6.2.3 to 6.2.4 (#139) @dependabot -* Bump watchdog from 2.0.3 to 2.1.0 (#138) @dependabot -* Bump black from 21.4b2 to 21.5b0 (#140) @dependabot -* Bump mkdocs-material from 7.1.3 to 7.1.4 (#141) @dependabot -* Dev (#142) @devsetgo -* Bump tox from 3.23.0 to 3.23.1 (#137) @dependabot -* Bump autopep8 from 1.5.6 to 1.5.7 (#136) @dependabot -* Bump pylint from 2.7.4 to 2.8.2 (#135) @dependabot -* Bump black from 20.8b1 to 21.4b2 (#134) @dependabot -* Bump mkdocs-material from 7.1.2 to 7.1.3 (#133) @dependabot -* Adding SonarCloud Code Coverage (#130) @devsetgo -* Bump mkdocs-material from 7.1.1 to 7.1.2 (#132) @dependabot -* Bump watchdog from 2.0.2 to 2.0.3 (#131) @dependabot -* Bump pre-commit from 2.12.0 to 2.12.1 (#129) @dependabot -* Bump flake8 from 3.9.0 to 3.9.1 (#128) @dependabot -* Bump mkdocs-material from 7.1.0 to 7.1.1 (#127) @dependabot -* Bump tqdm from 4.59.0 to 4.60.0 (#124) @dependabot -* Bump pytest from 6.2.2 to 6.2.3 (#125) @dependabot -* Bump pre-commit from 2.11.1 to 2.12.0 (#126) @dependabot -* Bump pylint from 2.7.2 to 2.7.4 (#122) @dependabot -* Bump mkdocs-material from 7.0.6 to 7.1.0 (#123) @dependabot -* Bump mkdocs-material from 7.0.5 to 7.0.6 (#121) @dependabot -* Bump flake8 from 3.8.4 to 3.9.0 (#120) @dependabot -* Bump twine from 3.3.0 to 3.4.1 (#118) @dependabot -* Bump autopep8 from 1.5.5 to 1.5.6 (#119) @dependabot - - -Published Date: 2021 July 16, 23:44 - -### Fixing Publish ([v0.5.0-2](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.5.0-2)) - -# What's Changed -* adding update for publish (#117) @devsetgo - - -Published Date: 2021 March 18, 17:19 - -### Calendar and RegEx Function + Documentation ([v0.5.0](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.5.0)) - -# What's Changed -* Adding Calendar Functions (#116) @devsetgo -* Bump pre-commit from 2.10.1 to 2.11.1 (#113) @dependabot -* update to Saturday (#115) @devsetgo -* Bump tqdm from 4.58.0 to 4.59.0 (#112) @dependabot -* Bump mkdocs-material from 7.0.4 to 7.0.5 (#114) @dependabot -* fixes for mkdoc material update (#111) @devsetgo -* Bump tox from 3.22.0 to 3.23.0 (#109) @dependabot -* Bump mkdocs-material from 7.0.2 to 7.0.4 (#108) @dependabot -* Bump pylint from 2.7.1 to 2.7.2 (#107) @dependabot -* Bump coverage from 5.4 to 5.5 (#110) @dependabot -* Bump pylint from 2.6.2 to 2.7.1 (#103) @dependabot -* Bump mkdocs-material from 6.2.8 to 7.0.2 (#104) @dependabot -* Bump watchdog from 2.0.1 to 2.0.2 (#105) @dependabot -* Bump tqdm from 4.57.0 to 4.58.0 (#106) @dependabot -* Bump tox from 3.21.4 to 3.22.0 (#101) @dependabot -* Bump watchdog from 2.0.0 to 2.0.1 (#99) @dependabot -* Bump pylint from 2.6.0 to 2.6.2 (#102) @dependabot -* Bump tqdm from 4.56.2 to 4.57.0 (#100) @dependabot -* Bump pytest-runner from 5.2 to 5.3.0 (#98) @dependabot -* Bump tqdm from 4.56.0 to 4.56.2 (#97) @dependabot -* Bump watchdog from 1.0.2 to 2.0.0 (#96) @dependabot -* Bump pre-commit from 2.10.0 to 2.10.1 (#95) @dependabot -* Bump mkdocs-material from 6.2.6 to 6.2.8 (#94) @dependabot -* Bump tox from 3.21.3 to 3.21.4 (#93) @dependabot -* Bump autopep8 from 1.5.4 to 1.5.5 (#92) @dependabot -* Bump tox from 3.21.2 to 3.21.3 (#87) @dependabot -* Bump mkdocs-material from 6.2.5 to 6.2.6 (#88) @dependabot -* Bump pytest from 6.2.1 to 6.2.2 (#89) @dependabot -* Bump coverage from 5.3.1 to 5.4 (#91) @dependabot -* Bump pre-commit from 2.9.3 to 2.10.0 (#90) @dependabot -* Bump tox from 3.21.1 to 3.21.2 (#84) @dependabot -* Bump mkdocs-material from 6.2.4 to 6.2.5 (#85) @dependabot -* Bump pytest-cov from 2.10.1 to 2.11.1 (#86) @dependabot -* Bump tox from 3.20.1 to 3.21.1 (#81) @dependabot -* Bump mkdocs-material from 6.2.3 to 6.2.4 (#82) @dependabot -* Bump tqdm from 4.55.1 to 4.56.0 (#83) @dependabot -* Bump tqdm from 4.55.0 to 4.55.1 (#80) @dependabot -* Bump mkdocs-material from 6.2.2 to 6.2.3 (#79) @dependabot - - -Published Date: 2021 March 18, 17:06 - -### Minor updates and library updates. ([v0.4.1](https://github.com/devsetgo/devsetgo_lib/releases/tag/v0.4.1)) - -# What's Changed -* Updates and Minor updates (#78) @devsetgo -* Bump tqdm from 4.54.1 to 4.55.0 (#77) @dependabot -* Bump twine from 3.2.0 to 3.3.0 (#76) @dependabot -* Bump coverage from 5.3 to 5.3.1 (#74) @dependabot -* Bump mkdocs-material from 6.1.7 to 6.2.2 (#75) @dependabot -* Bump watchdog from 0.10.4 to 1.0.2 (#73) @dependabot -* Bump pytest from 6.1.2 to 6.2.1 (#71) @dependabot -* Bump wheel from 0.36.1 to 0.36.2 (#70) @dependabot -* Bump tqdm from 4.54.0 to 4.54.1 (#67) @dependabot -* Bump mkdocs-material from 6.1.6 to 6.1.7 (#68) @dependabot -* Bump pre-commit from 2.9.2 to 2.9.3 (#69) @dependabot -* Bump wheel from 0.36.0 to 0.36.1 (#66) @dependabot -* Bump wheel from 0.35.1 to 0.36.0 (#64) @dependabot -* Bump tqdm from 4.53.0 to 4.54.0 (#65) @dependabot -* Bump pre-commit from 2.8.2 to 2.9.2 (#61) @dependabot -* Bump mkdocs-material from 6.1.5 to 6.1.6 (#60) @dependabot -* Bump tqdm from 4.52.0 to 4.53.0 (#62) @dependabot -* Bump watchdog from 0.10.3 to 0.10.4 (#63) @dependabot -* Bump tqdm from 4.51.0 to 4.52.0 (#59) @dependabot -* Bump mkdocs-material from 6.1.4 to 6.1.5 (#58) @dependabot -* Bump mkdocs-material from 6.1.2 to 6.1.4 (#57) @dependabot -* Bump pre-commit from 2.8.0 to 2.8.2 (#55) @dependabot -* Bump mkdocs-material from 6.1.0 to 6.1.2 (#56) @dependabot -* Bump pytest from 6.1.1 to 6.1.2 (#52) @dependabot -* Bump pre-commit from 2.7.1 to 2.8.0 (#53) @dependabot -* Bump tqdm from 4.50.2 to 4.51.0 (#54) @dependabot -* Bump mkdocs-material from 6.0.2 to 6.1.0 (#51) @dependabot -* Bump tqdm from 4.50.1 to 4.50.2 (#49) @dependabot -* Bump tox from 3.20.0 to 3.20.1 (#50) @dependabot -* Bump pytest from 6.1.0 to 6.1.1 (#48) @dependabot -* Bump mkdocs-material from 6.0.1 to 6.0.2 (#47) @dependabot -* Bump flake8 from 3.8.3 to 3.8.4 (#45) @dependabot -* Bump tqdm from 4.50.0 to 4.50.1 (#44) @dependabot -* Bump bump2version from 1.0.0 to 1.0.1 (#46) @dependabot -* Bump tqdm from 4.49.0 to 4.50.0 (#42) @dependabot -* Bump black from 19.10b0 to 20.8b1 (#43) @dependabot -* Bump tqdm from 4.46.0 to 4.49.0 (#40) @dependabot -* Bump pytest from 5.4.2 to 6.1.0 (#39) @dependabot -* Bump coverage from 5.1 to 5.3 (#38) @dependabot -* Bump autoflake from 1.3.1 to 1.4 (#41) @dependabot -* Bump twine from 3.1.1 to 3.2.0 (#37) @dependabot -* Bump wheel from 0.34.2 to 0.35.1 (#34) @dependabot -* Bump pytest-cov from 2.9.0 to 2.10.1 (#36) @dependabot -* Bump watchdog from 0.10.2 to 0.10.3 (#35) @dependabot -* Bump mkdocs-material from 5.2.2 to 6.0.1 (#33) @dependabot -* Bump pylint from 2.5.2 to 2.6.0 (#32) @dependabot-preview -* Bump pre-commit from 2.4.0 to 2.7.1 (#31) @dependabot-preview -* Bump tox from 3.15.1 to 3.20.0 (#30) @dependabot-preview -* Bump flake8 from 3.8.2 to 3.8.3 (#29) @dependabot-preview -* Bump autopep8 from 1.5.2 to 1.5.4 (#28) @dependabot-preview - - -Published Date: 2020 December 26, 23:51 diff --git a/dsg_lib/__init__.py b/dsg_lib/__init__.py index 179fd161..615b63f3 100644 --- a/dsg_lib/__init__.py +++ b/dsg_lib/__init__.py @@ -3,12 +3,12 @@ DevSetGo Library ========= -Author: Mike Ryan +Author: Mike Ryan License: MIT """ from datetime import date -__version__ = "2024-11-28-001" +__version__ = "25.04.05-001" __author__ = "Mike Ryan" __license__ = "MIT" __copyright__ = f"Copyright© 2021-{date.today().year}" diff --git a/dsg_lib/async_database_functions/__import_sqlalchemy.py b/dsg_lib/async_database_functions/__import_sqlalchemy.py index 161cdde9..445ccf4d 100644 --- a/dsg_lib/async_database_functions/__import_sqlalchemy.py +++ b/dsg_lib/async_database_functions/__import_sqlalchemy.py @@ -6,14 +6,21 @@ Usage example: ```python - from import_sqlalchemy import import_sqlalchemy + from dsg_lib.async_database_functions.__import_sqlalchemy import import_sqlalchemy - sqlalchemy_components = import_sqlalchemy() - sqlalchemy, MetaData, create_engine, text, Column, DateTime, String, IntegrityError, SQLAlchemyError, AsyncSession = sqlalchemy_components + ( + sqlalchemy, MetaData, create_engine, text, IntegrityError, SQLAlchemyError, + AsyncSession, create_async_engine, select, declarative_base, sessionmaker, + Column, DateTime, String, func, NoResultFound, + ) = import_sqlalchemy() - # Example usage of imported components + # Synchronous example engine = create_engine('sqlite:///example.db') metadata = MetaData() + + # Asynchronous example + async_engine = create_async_engine('sqlite+aiosqlite:///example.db') + async_session = AsyncSession(async_engine) ``` Author(s): @@ -22,7 +29,7 @@ Date Created: 2024/05/16 Date Updated: - 2024/07/26 + 2025/02/15 - docstring and comments updated """ from typing import Tuple @@ -46,28 +53,41 @@ def import_sqlalchemy() -> Tuple: Tuple: A tuple containing the following SQLAlchemy components: - sqlalchemy: The SQLAlchemy module. - MetaData: The MetaData class from SQLAlchemy. - - create_engine: The create_engine function from SQLAlchemy. - - text: The text function from SQLAlchemy. - - Column: The Column class from SQLAlchemy. - - DateTime: The DateTime class from SQLAlchemy. - - String: The String class from SQLAlchemy. - - IntegrityError: The IntegrityError exception from SQLAlchemy. - - SQLAlchemyError: The SQLAlchemyError exception from SQLAlchemy. - - AsyncSession: The AsyncSession class from SQLAlchemy. + - create_engine: The function to create a synchronous engine. + - text: The function that creates SQL text expressions. + - IntegrityError: The exception raised on integrity constraint violations. + - SQLAlchemyError: The base exception class for SQLAlchemy errors. + - AsyncSession: The class for managing asynchronous database sessions. + - create_async_engine: The function to create an asynchronous engine. + - select: The future select function for query construction. + - declarative_base: The factory function for creating a declarative base class. + - sessionmaker: The configurable session factory. + - Column: The class used to define a table column. + - DateTime: The type used for temporal column definitions. + - String: The type used for textual column definitions. + - func: A namespace for SQL functions. + - NoResultFound: The exception raised when a query returns no result. Raises: - ImportError: If SQLAlchemy is not installed or the version is below the minimum required version. + ImportError: If SQLAlchemy is not installed or if its version is below the minimum required version. Example: ```python - from import_sqlalchemy import import_sqlalchemy + from dsg_lib.async_database_functions.__import_sqlalchemy import import_sqlalchemy - sqlalchemy_components = import_sqlalchemy() - sqlalchemy, MetaData, create_engine, text, Column, DateTime, String, IntegrityError, SQLAlchemyError, AsyncSession = sqlalchemy_components + ( + sqlalchemy, MetaData, create_engine, text, IntegrityError, SQLAlchemyError, + AsyncSession, create_async_engine, select, declarative_base, sessionmaker, + Column, DateTime, String, func, NoResultFound, + ) = import_sqlalchemy() - # Example usage of imported components + # Synchronous engine usage example engine = create_engine('sqlite:///example.db') metadata = MetaData() + + # Example usage of asynchronous components + async_engine = create_async_engine('sqlite+aiosqlite:///example.db') + async_session = AsyncSession(async_engine) ``` """ min_version = "2.0.0" # Minimum required version of SQLAlchemy diff --git a/dsg_lib/async_database_functions/async_database.py b/dsg_lib/async_database_functions/async_database.py index ede01d8c..568b3349 100644 --- a/dsg_lib/async_database_functions/async_database.py +++ b/dsg_lib/async_database_functions/async_database.py @@ -1,55 +1,59 @@ # -*- coding: utf-8 -*- -"""async_database.py. +""" +async_database.py This module provides classes for managing asynchronous database operations using SQLAlchemy and asyncio. Classes: - - DBConfig: Manages the database configuration. - - AsyncDatabase: Manages the asynchronous database operations. - -The DBConfig class initializes the database configuration and creates a -SQLAlchemy engine and a MetaData instance. - -The AsyncDatabase class uses an instance of DBConfig to perform asynchronous -database operations. It provides methods to get a database session and to create -tables in the database. + - DBConfig: Initializes and manages the database configuration including the + creation of the SQLAlchemy engine and MetaData instance. + - AsyncDatabase: Leverages a DBConfig instance to perform asynchronous + database operations such as obtaining sessions, creating tables, and disconnecting + from the database. -This module uses the logger from the dsg_lib.common_functions for logging. +Logging is performed using the logger from dsg_lib.common_functions. Example: -```python -from dsg_lib.async_database_functions import ( - async_database, - base_schema, - database_config, - database_operations, -) - -# Create a DBConfig instance -config = { - "database_uri": "sqlite+aiosqlite:///:memory:?cache=shared", - "echo": False, - "future": True, - "pool_recycle": 3600, -} - -# create database configuration -db_config = database_config.DBConfig(config) - -# Create an AsyncDatabase instance -async_db = async_database.AsyncDatabase(db_config) - -# Create a DatabaseOperations instance -db_ops = database_operations.DatabaseOperations(async_db) -``` - -Author: Mike Ryan -Date: 2024/05/16 -License: MIT + ```python + from dsg_lib.async_database_functions import ( + async_database, + base_schema, + database_config, + database_operations, + ) + + # Define database configuration + config = { + "database_uri": "sqlite+aiosqlite:///:memory:?cache=shared", + "echo": False, + "future": True, + "pool_recycle": 3600, + } + + # Create the configuration instance + db_config = database_config.DBConfig(config) + + # Instantiate AsyncDatabase with the given configuration + async_db = async_database.AsyncDatabase(db_config) + + # Optionally, create a DatabaseOperations instance + db_ops = database_operations.DatabaseOperations(async_db) + ``` + +Author: + Mike Ryan + +Date Created: + 2024/05/16 + +Date Updated: + 2025/02/15 - docstring and comments updated + +License: + MIT """ - # from loguru import logger # import logging as logger from .. import LOGGER as logger @@ -58,77 +62,88 @@ class AsyncDatabase: """ - A class used to manage the asynchronous database operations. + Manages asynchronous database operations. + + This class provides methods to acquire database sessions, create tables asynchronously, + and disconnect the database engine safely. Attributes ---------- db_config : DBConfig - an instance of DBConfig class containing the database configuration + An instance of DBConfig containing the database configuration such as the engine. Base : Base - the declarative base model for SQLAlchemy + The declarative base model used by SQLAlchemy to define database models. Methods ------- get_db_session(): - Returns a context manager that provides a new database session. + Returns a context manager that yields a new asynchronous database session. create_tables(): - Asynchronously creates all tables in the database. + Asynchronously creates all tables as defined in the metadata. + disconnect(): + Asynchronously disconnects the database engine. """ def __init__(self, db_config: DBConfig): - """Initialize the AsyncDatabase class with an instance of DBConfig. - - Parameters: - db_config (DBConfig): An instance of DBConfig class containing the - database configuration. + """ + Initialize AsyncDatabase with a database configuration. - Returns: None + Parameters + ---------- + db_config : DBConfig + An instance of DBConfig containing the necessary database configurations. """ self.db_config = db_config self.Base = BASE logger.debug("AsyncDatabase initialized") def get_db_session(self): - """This method returns a context manager that provides a new database - session. - - Parameters: None + """ + Obtain a new asynchronous database session. - Returns: contextlib._GeneratorContextManager: A context manager that - provides a new database session. + Returns + ------- + contextlib._GeneratorContextManager + A context manager that yields a new database session. """ logger.debug("Getting database session") return self.db_config.get_db_session() async def create_tables(self): - """This method asynchronously creates all tables in the database. + """ + Asynchronously create all tables defined in the metadata. - Parameters: None + This method binds the engine to the Base metadata and runs the table creation + in a synchronous manner within an asynchronous transaction. - Returns: None + Raises + ------ + Exception + Propagates any exceptions encountered during table creation. """ logger.debug("Creating tables") try: - # Bind the engine to the metadata of the base class + # Bind the engine to the Base metadata self.Base.metadata.bind = self.db_config.engine - # Begin a new transaction + # Begin an asynchronous transaction and create tables synchronously async with self.db_config.engine.begin() as conn: - # Run a function in a synchronous manner await conn.run_sync(self.Base.metadata.create_all) logger.info("Tables created successfully") except Exception as ex: # pragma: no cover - # Log the error and raise it logger.error(f"Error creating tables: {ex}") # pragma: no cover raise # pragma: no cover - async def disconnect(self): # pragma: no cover + async def disconnect(self): # pragma: no cover """ - This method asynchronously disconnects the database engine. + Asynchronously disconnect the database engine. - Parameters: None + Closes all connections and disposes of the engine resources. - Returns: None + Raises + ------ + Exception + Propagates any exceptions encountered during disconnection. """ logger.debug("Disconnecting from database") try: diff --git a/dsg_lib/async_database_functions/database_operations.py b/dsg_lib/async_database_functions/database_operations.py index 6ba73287..29bc8fd1 100644 --- a/dsg_lib/async_database_functions/database_operations.py +++ b/dsg_lib/async_database_functions/database_operations.py @@ -4,14 +4,9 @@ The `DatabaseOperations` class includes the following methods: - - `create_one`: Creates a single record in the database. - - `create_many`: Creates multiple records in the database. - - `read_one`: Reads a single record from the database. - - `read_many`: Reads multiple records from the database. - - `update_one`: Updates a single record in the database. - - `update_many`: Updates multiple records in the database. - - `delete_one`: Deletes a single record from the database. - - `delete_many`: Deletes multiple records from the database. + - `execute_one`: Executes a single non-read SQL query asynchronously. + - `execute_many`: Executes multiple non-read SQL queries asynchronously within a single transaction. + - 'read_one_record': Retrieves a single record from the database based on the provided query. - `read_query`: Executes a fetch query on the database and returns a list of records that match the query. - `read_multi_query`: Executes multiple fetch queries on the database and returns a dictionary of results for each query. - `count_query`: Counts the number of records that match a given query. @@ -19,13 +14,20 @@ - `get_primary_keys`: Gets the primary keys of a table. - `get_table_names`: Gets the names of all tables in the database. + Deprecated Methods: + - `create_one`: [Deprecated] Use `execute_one` with an INSERT query instead. + - `create_many`: [Deprecated] Use `execute_many` with INSERT queries instead. + - `update_one`: [Deprecated] Use `execute_one` with an UPDATE query instead. + - `update_many`: [Deprecated] Use `execute_many` with UPDATE queries instead. + - `delete_one`: [Deprecated] Use `execute_one` with a DELETE query instead. + - `delete_many`: [Deprecated] Use `execute_many` with DELETE queries instead. Each method is designed to handle errors correctly and provide a simple interface for performing database operations. This module also imports the necessary SQLAlchemy and loguru modules, and the `AsyncDatabase` class from the local `async_database` module. Author: Mike Ryan -Date: 2024/05/16 +Date: 2024/11/29 License: MIT """ import functools @@ -39,6 +41,7 @@ from .. import LOGGER as logger from .__import_sqlalchemy import import_sqlalchemy + # Importing AsyncDatabase class from local module async_database from .async_database import AsyncDatabase @@ -119,7 +122,9 @@ async def wrapped(*args, **kwargs): stacklevel=2, ) return await func(*args, **kwargs) + return wrapped + return decorator @@ -129,14 +134,9 @@ class DatabaseOperations: The methods include: - - `create_one`: Creates a single record in the database. - - `create_many`: Creates multiple records in the database. - - `read_one`: Reads a single record from the database. - - `read_many`: Reads multiple records from the database. - - `update_one`: Updates a single record in the database. - - `update_many`: Updates multiple records in the database. - - `delete_one`: Deletes a single record from the database. - - `delete_many`: Deletes multiple records from the database. + - `execute_one`: Executes a single non-read SQL query asynchronously. + - `execute_many`: Executes multiple non-read SQL queries asynchronously within a single transaction. + - `read_one_record`: Retrieves a single record from the database based on the provided query. - `read_query`: Executes a fetch query on the database and returns a list of records that match the query. - `read_multi_query`: Executes multiple fetch queries on the database and returns a dictionary of results for each query. - `count_query`: Counts the number of records that match a given query. @@ -144,8 +144,23 @@ class DatabaseOperations: - `get_primary_keys`: Gets the primary keys of a table. - `get_table_names`: Gets the names of all tables in the database. + Deprecated Methods: + - `create_one`: [Deprecated] Use `execute_one` with an INSERT query instead. + - `create_many`: [Deprecated] Use `execute_many` with INSERT queries instead. + - `update_one`: [Deprecated] Use `execute_one` with an UPDATE query instead. + - `delete_one`: [Deprecated] Use `execute_one` with a DELETE query instead. + - `delete_many`: [Deprecated] Use `execute_many` with DELETE queries instead. + Examples: ```python + from sqlalchemy import insert, select + from dsg_lib.async_database_functions import ( + async_database, + base_schema, + database_config, + database_operations, + ) + # Create a DBConfig instance config = { "database_uri": "sqlite+aiosqlite:///:memory:?cache=shared", @@ -159,10 +174,14 @@ class DatabaseOperations: async_db = async_database.AsyncDatabase(db_config) # Create a DatabaseOperations instance db_ops = database_operations.DatabaseOperations(async_db) + # create one record - data = await db_ops.create_one(User(name='John Doe')) + query = insert(User).values(name='John Doe') + result = await db_ops.execute_one(query) + # read one record - record = await db_ops.read_one(User, 1) + query = select(User).where(User.name == 'John Doe') + record = await db_ops.read_query(query) ``` """ @@ -451,165 +470,6 @@ async def get_table_names(self): logger.error(f"Exception occurred: {ex}") # pragma: no cover return handle_exceptions(ex) # pragma: no cover - @deprecated("Use `execute_one` with an INSERT query instead.") - async def create_one(self, record): - """ - Adds a single record to the database. - - This asynchronous method accepts a record object and adds it to the - database. If the operation is successful, it returns the added record. - The method is useful for inserting a new row into a database table. - - Parameters: - record (Base): An instance of the SQLAlchemy declarative base class - representing the record to be added to the database. - - Returns: - Base: The instance of the record that was added to the database. - - Raises: - Exception: If any error occurs during the database operation. - - Example: - ```python - from dsg_lib.async_database_functions import ( - async_database, - base_schema, - database_config, - database_operations, - ) - # Create a DBConfig instance - config = { - # "database_uri": "postgresql+asyncpg://postgres:postgres@postgresdb/postgres", - "database_uri": "sqlite+aiosqlite:///:memory:?cache=shared", - "echo": False, - "future": True, - # "pool_pre_ping": True, - # "pool_size": 10, - # "max_overflow": 10, - "pool_recycle": 3600, - # "pool_timeout": 30, - } - # create database configuration - db_config = database_config.DBConfig(config) - # Create an AsyncDatabase instance - async_db = async_database.AsyncDatabase(db_config) - # Create a DatabaseOperations instance - db_ops = database_operations.DatabaseOperations(async_db) - # create one record - record = await db_ops.create_one(User(name='John Doe')) - ``` - """ - # Log the start of the operation - logger.debug("Starting create_one operation") - - try: - # Start a new database session - async with self.async_db.get_db_session() as session: - # Log the record being added - logger.debug(f"Adding record to session: {record.__dict__}") - - # Add the record to the session and commit the changes - session.add(record) - await session.commit() - - # Log the successful record addition - logger.debug(f"Record added successfully: {record}") - - return record - - except Exception as ex: - # Handle any exceptions that occur during the record addition - logger.error(f"Exception occurred: {ex}") - return handle_exceptions(ex) - - @deprecated("Use `execute_one` with an INSERT query instead.") - async def create_many(self, records): - """ - Adds multiple records to the database. - - This asynchronous method accepts a list of record objects and adds them - to the database. If the operation is successful, it returns the added - records. This method is useful for bulk inserting multiple rows into a - database table efficiently. - - Parameters: - records (list[Base]): A list of instances of the SQLAlchemy - declarative base class, each representing a record to be added to - the database. - - Returns: - list[Base]: A list of instances of the records that were added to - the database. - - Raises: - Exception: If any error occurs during the database operation. - - Example: - ```python - from dsg_lib.async_database_functions import ( - async_database, - base_schema, - database_config, - database_operations, - ) - # Create a DBConfig instance - config = { - # "database_uri": "postgresql+asyncpg://postgres:postgres@postgresdb/postgres", - "database_uri": "sqlite+aiosqlite:///:memory:?cache=shared", - "echo": False, - "future": True, - # "pool_pre_ping": True, - # "pool_size": 10, - # "max_overflow": 10, - "pool_recycle": 3600, - # "pool_timeout": 30, - } - # create database configuration - db_config = database_config.DBConfig(config) - # Create an AsyncDatabase instance - async_db = async_database.AsyncDatabase(db_config) - # Create a DatabaseOperations instance - db_ops = database_operations.DatabaseOperations(async_db) - # create many records - records = await db_ops.create_many([User(name='John Doe'), User(name='Jane Doe')]) - ``` - """ - # Log the start of the operation - logger.debug("Starting create_many operation") - - try: - # Start a timer to measure the operation time - t0 = time.time() - - # Start a new database session - async with self.async_db.get_db_session() as session: - # Log the number of records being added - logger.debug(f"Adding {len(records)} records to session") - - # Add the records to the session and commit the changes - session.add_all(records) - await session.commit() - - # Log the added records - records_data = [record.__dict__ for record in records] - logger.debug(f"Records added to session: {records_data}") - - # Calculate the operation time and log the successful record - # addition - num_records = len(records) - t1 = time.time() - t0 - logger.debug( - f"Record operations were successful. {num_records} records were created in {t1:.4f} seconds." - ) - - return records - - except Exception as ex: - # Handle any exceptions that occur during the record addition - logger.error(f"Exception occurred: {ex}") - return handle_exceptions(ex) - async def count_query(self, query): """ Executes a count query on the database and returns the number of records @@ -922,9 +782,260 @@ async def read_multi_query(self, queries: Dict[str, str]): logger.error(f"Exception occurred: {ex}") return handle_exceptions(ex) + async def execute_one( + self, query: ClauseElement, values: Optional[Dict[str, Any]] = None + ) -> Union[str, Dict[str, str]]: + """ + Executes a single non-read SQL query asynchronously. + + This method executes a single SQL statement that modifies the database, + such as INSERT, UPDATE, or DELETE. It handles the execution within an + asynchronous session and commits the transaction upon success. + + Args: + query (ClauseElement): An SQLAlchemy query object representing the SQL statement to execute. + values (Optional[Dict[str, Any]]): A dictionary of parameter values to bind to the query. + Defaults to None. + + Returns: + Union[str, Dict[str, str]]: "complete" if the query executed and committed successfully, + or an error dictionary if an exception occurred. + + Example: + ```python + from sqlalchemy import insert + + query = insert(User).values(name='John Doe') + result = await db_ops.execute_one(query) + ``` + """ + logger.debug("Starting execute_one operation") + try: + async with self.async_db.get_db_session() as session: + logger.debug(f"Executing query: {query}") + await session.execute(query, params=values) + await session.commit() + logger.debug("Query executed successfully") + return "complete" + except Exception as ex: + logger.error(f"Exception occurred: {ex}") + return handle_exceptions(ex) + + async def execute_many( + self, queries: List[Tuple[ClauseElement, Optional[Dict[str, Any]]]] + ) -> Union[str, Dict[str, str]]: + """ + Executes multiple non-read SQL queries asynchronously within a single transaction. + + This method executes a list of SQL statements that modify the database, + such as multiple INSERTs, UPDATEs, or DELETEs. All queries are executed + within the same transaction, which is committed if all succeed, or rolled + back if any fail. + + Args: + queries (List[Tuple[ClauseElement, Optional[Dict[str, Any]]]]): A list of tuples, each containing + a query and an optional dictionary of parameter values. Each tuple should be of the form + `(query, values)` where: + - `query` is an SQLAlchemy query object. + - `values` is a dictionary of parameters to bind to the query (or None). + + Returns: + Union[str, Dict[str, str]]: "complete" if all queries executed and committed successfully, + or an error dictionary if an exception occurred. + + Example: + ```python + from sqlalchemy import insert + + queries = [ + (insert(User), {'name': 'User1'}), + (insert(User), {'name': 'User2'}), + (insert(User), {'name': 'User3'}), + ] + result = await db_ops.execute_many(queries) + ``` + """ + logger.debug("Starting execute_many operation") + try: + async with self.async_db.get_db_session() as session: + for query, values in queries: + logger.debug(f"Executing query: {query}") + await session.execute(query, params=values) + await session.commit() + logger.debug("All queries executed successfully") + return "complete" + except Exception as ex: + logger.error(f"Exception occurred: {ex}") + return handle_exceptions(ex) + + @deprecated("Use `execute_one` with an INSERT query instead.") + async def create_one(self, record): + """ + This method is deprecated. Use `execute_one` with an INSERT query instead. + + Adds a single record to the database. + + This asynchronous method accepts a record object and adds it to the + database. If the operation is successful, it returns the added record. + The method is useful for inserting a new row into a database table. + + Parameters: + record (Base): An instance of the SQLAlchemy declarative base class + representing the record to be added to the database. + + Returns: + Base: The instance of the record that was added to the database. + + Raises: + Exception: If any error occurs during the database operation. + + Example: + ```python + from dsg_lib.async_database_functions import ( + async_database, + base_schema, + database_config, + database_operations, + ) + # Create a DBConfig instance + config = { + # "database_uri": "postgresql+asyncpg://postgres:postgres@postgresdb/postgres", + "database_uri": "sqlite+aiosqlite:///:memory:?cache=shared", + "echo": False, + "future": True, + # "pool_pre_ping": True, + # "pool_size": 10, + # "max_overflow": 10, + "pool_recycle": 3600, + # "pool_timeout": 30, + } + # create database configuration + db_config = database_config.DBConfig(config) + # Create an AsyncDatabase instance + async_db = async_database.AsyncDatabase(db_config) + # Create a DatabaseOperations instance + db_ops = database_operations.DatabaseOperations(async_db) + # create one record + record = await db_ops.create_one(User(name='John Doe')) + ``` + """ + # Log the start of the operation + logger.debug("Starting create_one operation") + + try: + # Start a new database session + async with self.async_db.get_db_session() as session: + # Log the record being added + logger.debug(f"Adding record to session: {record.__dict__}") + + # Add the record to the session and commit the changes + session.add(record) + await session.commit() + + # Log the successful record addition + logger.debug(f"Record added successfully: {record}") + + return record + + except Exception as ex: + # Handle any exceptions that occur during the record addition + logger.error(f"Exception occurred: {ex}") + return handle_exceptions(ex) + + @deprecated("Use `execute_one` with an INSERT query instead.") + async def create_many(self, records): + """ + This method is deprecated. Use `execute_many` with INSERT queries instead. + + Adds multiple records to the database. + + This asynchronous method accepts a list of record objects and adds them + to the database. If the operation is successful, it returns the added + records. This method is useful for bulk inserting multiple rows into a + database table efficiently. + + Parameters: + records (list[Base]): A list of instances of the SQLAlchemy + declarative base class, each representing a record to be added to + the database. + + Returns: + list[Base]: A list of instances of the records that were added to + the database. + + Raises: + Exception: If any error occurs during the database operation. + + Example: + ```python + from dsg_lib.async_database_functions import ( + async_database, + base_schema, + database_config, + database_operations, + ) + # Create a DBConfig instance + config = { + # "database_uri": "postgresql+asyncpg://postgres:postgres@postgresdb/postgres", + "database_uri": "sqlite+aiosqlite:///:memory:?cache=shared", + "echo": False, + "future": True, + # "pool_pre_ping": True, + # "pool_size": 10, + # "max_overflow": 10, + "pool_recycle": 3600, + # "pool_timeout": 30, + } + # create database configuration + db_config = database_config.DBConfig(config) + # Create an AsyncDatabase instance + async_db = async_database.AsyncDatabase(db_config) + # Create a DatabaseOperations instance + db_ops = database_operations.DatabaseOperations(async_db) + # create many records + records = await db_ops.create_many([User(name='John Doe'), User(name='Jane Doe')]) + ``` + """ + # Log the start of the operation + logger.debug("Starting create_many operation") + + try: + # Start a timer to measure the operation time + t0 = time.time() + + # Start a new database session + async with self.async_db.get_db_session() as session: + # Log the number of records being added + logger.debug(f"Adding {len(records)} records to session") + + # Add the records to the session and commit the changes + session.add_all(records) + await session.commit() + + # Log the added records + records_data = [record.__dict__ for record in records] + logger.debug(f"Records added to session: {records_data}") + + # Calculate the operation time and log the successful record + # addition + num_records = len(records) + t1 = time.time() - t0 + logger.debug( + f"Record operations were successful. {num_records} records were created in {t1:.4f} seconds." + ) + + return records + + except Exception as ex: + # Handle any exceptions that occur during the record addition + logger.error(f"Exception occurred: {ex}") + return handle_exceptions(ex) + @deprecated("Use `execute_one` with a UPDATE query instead.") async def update_one(self, table, record_id: str, new_values: dict): """ + This method is deprecated. Use `execute_one` with an UPDATE query instead. + Updates a single record in the database identified by its ID. This asynchronous method takes a SQLAlchemy `Table` object, a record ID, @@ -1019,6 +1130,8 @@ async def update_one(self, table, record_id: str, new_values: dict): @deprecated("Use `execute_many` with a DELETE query instead.") async def delete_one(self, table, record_id: str): """ + This method is deprecated. Use `execute_one` with a DELETE query instead. + Deletes a single record from the database based on the provided table and record ID. @@ -1128,6 +1241,8 @@ async def delete_many( id_values: List[int] = None, ) -> int: """ + This method is deprecated. Use `execute_many` with a DELETE query instead. + Deletes multiple records from the specified table in the database. This method takes a table, an optional id column name, and a list of id values. It deletes the records in the table where the id column matches any of the id values in the list. @@ -1206,93 +1321,3 @@ async def delete_many( # Handle any exceptions that occur during the record deletion logger.error(f"Exception occurred: {ex}") return handle_exceptions(ex) - - - async def execute_one( - self, - query: ClauseElement, - values: Optional[Dict[str, Any]] = None - ) -> Union[str, Dict[str, str]]: - """ - Executes a single non-read SQL query asynchronously. - - This method executes a single SQL statement that modifies the database, - such as INSERT, UPDATE, or DELETE. It handles the execution within an - asynchronous session and commits the transaction upon success. - - Args: - query (ClauseElement): An SQLAlchemy query object representing the SQL statement to execute. - values (Optional[Dict[str, Any]]): A dictionary of parameter values to bind to the query. - Defaults to None. - - Returns: - Union[str, Dict[str, str]]: "complete" if the query executed and committed successfully, - or an error dictionary if an exception occurred. - - Example: - ```python - from sqlalchemy import insert - - query = insert(User).values(name='John Doe') - result = await db_ops.execute_one(query) - ``` - """ - logger.debug("Starting execute_one operation") - try: - async with self.async_db.get_db_session() as session: - logger.debug(f"Executing query: {query}") - await session.execute(query, params=values) - await session.commit() - logger.debug("Query executed successfully") - return "complete" - except Exception as ex: - logger.error(f"Exception occurred: {ex}") - return handle_exceptions(ex) - - async def execute_many( - self, - queries: List[Tuple[ClauseElement, Optional[Dict[str, Any]]]] - ) -> Union[str, Dict[str, str]]: - """ - Executes multiple non-read SQL queries asynchronously within a single transaction. - - This method executes a list of SQL statements that modify the database, - such as multiple INSERTs, UPDATEs, or DELETEs. All queries are executed - within the same transaction, which is committed if all succeed, or rolled - back if any fail. - - Args: - queries (List[Tuple[ClauseElement, Optional[Dict[str, Any]]]]): A list of tuples, each containing - a query and an optional dictionary of parameter values. Each tuple should be of the form - `(query, values)` where: - - `query` is an SQLAlchemy query object. - - `values` is a dictionary of parameters to bind to the query (or None). - - Returns: - Union[str, Dict[str, str]]: "complete" if all queries executed and committed successfully, - or an error dictionary if an exception occurred. - - Example: - ```python - from sqlalchemy import insert - - queries = [ - (insert(User), {'name': 'User1'}), - (insert(User), {'name': 'User2'}), - (insert(User), {'name': 'User3'}), - ] - result = await db_ops.execute_many(queries) - ``` - """ - logger.debug("Starting execute_many operation") - try: - async with self.async_db.get_db_session() as session: - for query, values in queries: - logger.debug(f"Executing query: {query}") - await session.execute(query, params=values) - await session.commit() - logger.debug("All queries executed successfully") - return "complete" - except Exception as ex: - logger.error(f"Exception occurred: {ex}") - return handle_exceptions(ex) diff --git a/dsg_lib/common_functions/file_functions.py b/dsg_lib/common_functions/file_functions.py index 183c8fba..0779a5eb 100644 --- a/dsg_lib/common_functions/file_functions.py +++ b/dsg_lib/common_functions/file_functions.py @@ -79,10 +79,13 @@ def delete_file(file_name: str) -> str: ```python from dsg_lib.common_functions import file_functions - file_functions.delete_file("test.csv") + file_functions.delete_file(file_name="test.csv") - # Outputs: 'File deleted successfully' + # Outputs: 'complete' ``` + Additional usage info: + - Returns "complete" if file is successfully deleted. + - Ensure correct file permissions and directory structure. """ logger.info(f"Deleting file: {file_name}") @@ -145,15 +148,19 @@ def save_json(file_name: str, data, root_folder: str = None) -> str: ```python from dsg_lib.common_functions import file_functions - data = {"key": "value"} - - file_functions.save_json("test.json", data, "/path/to/directory") + json_data = {"key": "value"} + file_functions.save_json(file_name="test.json", data=json_data, root_folder="/path/to/directory") # Saves data to '/path/to/directory/test.json' ``` + Additional usage info: + - Suitable for config files, logs, or structured application data. + - Returns "File saved successfully" on success. + Additional clarification: + - Defaults to "data/json" if no root_folder is provided. + - You can supply any valid file path in root_folder to override. """ try: - # Validate inputs if not isinstance(data, (list, dict)): raise TypeError( f"data must be a list or a dictionary instead of type {type(data)}" @@ -161,29 +168,19 @@ def save_json(file_name: str, data, root_folder: str = None) -> str: if "/" in file_name or "\\" in file_name: raise ValueError(f"{file_name} cannot contain / or \\") - # Add extension if not present in file_name - if not file_name.endswith(".json"): # pragma: no cover - file_name += ".json" # pragma: no cover - - if root_folder is None: - root_folder = directory_to_files - - # Determine directory - json_directory = Path(root_folder) / "json" + if not file_name.endswith(".json"): + file_name += ".json" - # Construct file path - file_path = json_directory / file_name + target_folder = Path(root_folder) if root_folder else Path("data/json") + # Create the target folder if it doesn't exist + target_folder.mkdir(parents=True, exist_ok=True) - # Create the json directory if it does not exist - json_directory.mkdir(parents=True, exist_ok=True) + file_path = target_folder / file_name - # Write data to file with open(file_path, "w") as write_file: json.dump(data, write_file) - # Log success message logger.info(f"File created: {file_path}") - return "File saved successfully" except (TypeError, ValueError) as e: @@ -204,6 +201,17 @@ def open_json(file_name: str) -> dict: Raises: TypeError: If the file name is not a string. FileNotFoundError: If the file does not exist. + + Example: + ```python + from dsg_lib.common_functions import file_functions + + result_dict = file_functions.open_json(file_name="test.json") + # result_dict is a dictionary loaded from 'test.json' + ``` + Additional usage info: + - Returns a dictionary loaded from the JSON file. + - Commonly used for reading app settings or user data. """ # Check if file name is a string if not isinstance(file_name, str): @@ -229,9 +237,6 @@ def open_json(file_name: str) -> dict: return result -# CSV File Processing TODO: Append CSV - - def save_csv( file_name: str, data: list, @@ -264,23 +269,27 @@ def save_csv( quotechar is not a string. ValueError: If the file name does not end with '.csv'. - Example: ```python + Example: + ```python from dsg_lib.common_functions import file_functions - data = [{"column1": "value1", "column2": "value2"}] - - file_functions.save_csv("test.csv", data, "/path/to/directory", delimiter=";", quotechar="'") + csv_data = [ + ["column1", "column2"], + ["value1", "value2"] + ] + file_functions.save_csv(file_name="test.csv", data=csv_data,root_folder="/path/to/directory", delimiter=";", quotechar="'") # Saves data to '/path/to/directory/test.csv' ``` + Additional usage info: + - Ideal for exporting data for spreadsheet analysis. + - Returns "complete" if file is saved successfully. + Additional clarification: + - Defaults to "data/csv" if no root_folder is provided. + - You can supply any valid file path in root_folder to override. """ - # Set the root folder to directory_to_files if None - if root_folder is None: - root_folder = directory_to_files - - # Create the csv directory if it does not exist - csv_directory = Path(root_folder) / "csv" - csv_directory.mkdir(parents=True, exist_ok=True) + target_folder = Path(root_folder) if root_folder else Path("data/csv") + target_folder.mkdir(parents=True, exist_ok=True) # Check that delimiter and quotechar are single characters if len(delimiter) != 1: @@ -301,7 +310,7 @@ def save_csv( file_name += ".csv" # Create the file path - file_path = csv_directory / file_name + file_path = target_folder / file_name # Write data to file with open(file_path, "w", encoding="utf-8", newline="") as csv_file: @@ -312,41 +321,99 @@ def save_csv( return "complete" -def open_csv( +def append_csv( file_name: str, + data: list, + root_folder: str = None, delimiter: str = ",", - quote_level: str = "minimal", - skip_initial_space: bool = True, -) -> list: + quotechar: str = '"', +) -> str: """ - Opens a CSV file with the specified file name and returns its contents as a - list of dictionaries. + Appends a list of rows to an existing CSV file with the specified file name + in the specified directory. Each element of the `data` list should be a row + (list of values), and the header in `data[0]` must match the existing CSV's + header. Args: - file_name (str): The name of the file to open. Should include the '.csv' - extension. delimiter (str, optional): The character used to separate - fields in the CSV file. Defaults to ','. quote_level (str, optional): - The quoting level used in the CSV file. Valid levels are "none", - "minimal", and "all". Defaults to "minimal". skip_initial_space (bool, - optional): Whether to skip initial whitespace in the CSV file. Defaults - to True. + file_name (str): The name of the CSV file to append data to. Can be + provided without the '.csv' extension. data (list): Rows to append + (list of lists), where the first row is the header. root_folder (str, + optional): The root directory where the file is located. If None, the + default directory is used. Defaults to None. delimiter (str, optional): + The character used to separate fields in the CSV file. Defaults to ','. + quotechar (str, optional): The character used to quote fields in the CSV + file. Defaults to '"'. Returns: - list: The contents of the CSV file as a list of dictionaries. Each - dictionary represents a row in the CSV file, where the keys are column - names and the values are the data for those columns. + str: Returns "appended" if the rows were successfully appended. Raises: - TypeError: If `file_name` is not a string. ValueError: If `quote_level` - is not a valid level. FileNotFoundError: If the file does not exist. + FileNotFoundError: If the CSV file does not exist. + ValueError: If the header row in `data` does not match the existing + header in the file. TypeError: If `data` is not a list or `file_name` is + not valid. Example: ```python - from dsg_lib.common_functions import file_functions data = - file_functions.open_csv("test.csv", delimiter=";", quote_level="all", - skip_initial_space=False) # Returns: [{'column1': 'value1', 'column2': - 'value2'}] + from dsg_lib.common_functions import file_functions + + csv_rows = [ + ["column1", "column2"], + ["appended_value1", "appended_value2"] + ] + result = file_functions.append_csv( + file_name="test.csv", + data=csv_rows, + root_folder="/path/to/directory" + ) + # result would be "appended" on success ``` + + Additional usage info: + - Ideal for appending more rows to an existing CSV with matching header. + - Defaults to "data/csv" if no root_folder is provided. + - You can supply any valid file path in root_folder to override. + """ + target_folder = Path(root_folder) if root_folder else Path("data/csv") + file_path = target_folder / ( + file_name if file_name.endswith(".csv") else f"{file_name}.csv" + ) + + if not file_path.is_file(): + raise FileNotFoundError(f"CSV not found: {file_path}") + + if not isinstance(data, list): + raise TypeError("data must be a list of rows") + + # Read existing CSV header + with file_path.open("r", encoding="utf-8") as csv_file: + reader = csv.reader(csv_file, delimiter=delimiter, quotechar=quotechar) + existing_header = next(reader) + + # Check new data's header + new_header = data[0] + if existing_header != new_header: + raise ValueError("Headers do not match. Cannot append.") + + # Append the new rows + with file_path.open("a", encoding="utf-8", newline="") as csv_file: + writer = csv.writer(csv_file, delimiter=delimiter, quotechar=quotechar) + # Skip first row (header) to avoid duplication + writer.writerows(data[1:]) + + return "appended" + + +def open_csv( + file_name: str, + delimiter: str = ",", + quote_level: str = "minimal", + skip_initial_space: bool = True, + **kwargs, +) -> list: + """ + Opens a CSV file with the specified file name and returns its contents + as a list of dictionaries. """ # A dictionary that maps quote levels to csv quoting constants quote_levels = { @@ -354,14 +421,21 @@ def open_csv( "minimal": csv.QUOTE_MINIMAL, "all": csv.QUOTE_ALL, } - # Check that file name is a string if not isinstance(file_name, str): error = f"{file_name} is not a valid string" logger.error(error) raise TypeError(error) - # Check that quote level is valid + # Check delimiter is single character + if len(delimiter) != 1: + raise TypeError(f"{delimiter} can only be a single character") + + # Reject any 'quotechar' usage for now + if "quotechar" in kwargs: + raise TypeError("quotechar is not supported in open_csv") + + # Validate quote_level quote_level = quote_level.lower() if quote_level not in quote_levels: error = f"Invalid quote level: {quote_level}. Valid levels are: {', '.join(quote_levels)}" @@ -369,18 +443,14 @@ def open_csv( raise ValueError(error) quoting = quote_levels[quote_level] - # Add extension to file name and create file path - file_name = f"{file_name}.csv" file_directory = Path.cwd().joinpath(directory_to_files).joinpath("csv") file_path = file_directory.joinpath(file_name) - # Check that file exists if not file_path.is_file(): error = f"File not found: {file_path}" logger.error(error) raise FileNotFoundError(error) - # Read CSV file data = [] with file_path.open(encoding="utf-8") as f: reader = csv.DictReader( @@ -396,6 +466,128 @@ def open_csv( return data +def save_text(file_name: str, data: str, root_folder: str = None) -> str: + """ + Saves a string of text to a file with the specified file name in the + specified directory. + + Args: + file_name (str): The name of the file to save the data in. Should not + include the '.txt' extension. data (str): The text data to be saved. + root_folder (str, optional): The root directory where the file will be + saved. If None, the file will be saved in the current directory. + Defaults to None. + + Returns: + str: A message indicating whether the file has been saved successfully + or an error occurred. + + Raises: + TypeError: If the `data` parameter is not a string, or the `file_name` + contains a forward slash or backslash. FileNotFoundError: If the + directory does not exist. + + Example: + ```python + from dsg_lib.common_functions import file_functions + + text_data = "This is a test text file." + file_functions.save_text(file_name="test.txt", data=text_data, root_folder="/path/to/directory") + + # Saves data to '/path/to/directory/test.txt' + ``` + Additional usage info: + - Writes plain text data to a .txt file. + - Returns "complete" on success. + Additional clarification: + - Defaults to "data/text" if no root_folder is provided. + - You can supply any valid file path in root_folder to override. + """ + # If no root folder is provided, use the default directory + if root_folder is None: # pragma: no cover + root_folder = directory_to_files # pragma: no cover + + # Determine the directory for text files + text_directory = Path(root_folder) / "text" + + # Construct the file path for text files + file_path = text_directory / file_name + + # Create the text directory if it does not exist + text_directory.mkdir(parents=True, exist_ok=True) + + # Check that data is a string and that file_name does not contain invalid + # characters + if not isinstance(data, str): + logger.error(f"{file_name} is not a valid string") + raise TypeError(f"{file_name} is not a valid string") + elif "/" in file_name or "\\" in file_name: + logger.error(f"{file_name} cannot contain \\ or /") + raise ValueError(f"{file_name} cannot contain \\ or /") + + # Add extension to file_name if needed + if not file_name.endswith(".txt"): + file_name += ".txt" + # Open or create the file and write the data + with open(file_path, "w+", encoding="utf-8") as file: + file.write(data) + + logger.info(f"File created: {file_path}") + return "complete" + + +def open_text(file_name: str) -> str: + """ + Opens a text file with the specified file name and returns its contents as a + string. + + Args: + file_name (str): The name of the file to open. Should include the '.txt' + extension. + + Returns: + str: The contents of the text file as a string. + + Raises: + TypeError: If the `file_name` parameter is not a string or contains a + forward slash. FileNotFoundError: If the file does not exist. + + Example: + ```python + from dsg_lib.common_functions import file_functions + + text_content = file_functions.open_text(file_name="test.txt") + # text_content is the file's contents as a single string + ``` + Additional usage info: + - Retrieves contents as a single string. + - Handy for reading simple text resources or logs. + """ + # Replace backslashes with forward slashes in the file name + if "\\" in file_name: # pragma: no cover + file_name = file_name.replace("\\", "/") # pragma: no cover + + # Check that file_name does not contain invalid characters + if "/" in file_name: + logger.error(f"{file_name} cannot contain /") + raise TypeError(f"{file_name} cannot contain /") + + # Get the path to the text directory and the file path + file_directory = os.path.join(directory_to_files, "text") + file_path = Path.cwd().joinpath(file_directory, file_name) + + # Check if the file exists + if not file_path.is_file(): + raise FileNotFoundError(f"file not found error: {file_path}") + + # Open the file and read the data + with open(file_path, "r", encoding="utf-8") as file: + data = file.read() + + logger.info(f"File opened: {file_path}") + return data + + # A list of first names to randomly select from first_name: List[str] = [ "Amy", @@ -455,9 +647,12 @@ def create_sample_files(file_name: str, sample_size: int) -> None: ```python from dsg_lib.common_functions import file_functions - file_functions.create_sample_files("test", 100) - - # Creates 'test.csv' and 'test.json' each with 100 rows of random data ``` + file_functions.create_sample_files(file_name="test.csv", sample_size=100) + # Creates 'test.csv' and 'test.json' each with 100 rows of random data + ``` + Additional usage info: + - Creates CSV and JSON sample files with random data. + - Useful for testing or seeding databases. """ logger.debug(f"Creating sample files for {file_name} with {sample_size} rows.") @@ -515,9 +710,12 @@ def generate_random_date() -> str: Example: ```python from dsg_lib.common_functions import file_functions - random_date = file_functions.generate_random_date() - # Returns: '1992-03-15 10:30:45.123456' + + random_timestamp = file_functions.generate_random_date() + # random_timestamp might look like '1992-03-15 10:30:45.123456' ``` + Additional usage info: + - Can help produce test data with random timestamps. """ # Define the minimum and maximum years for the date range min_year: int = 1905 @@ -536,114 +734,3 @@ def generate_random_date() -> str: # Format the datetime string and return it return f"{date_value:%Y-%m-%d %H:%M:%S.%f}" - - -def save_text(file_name: str, data: str, root_folder: str = None) -> str: - """ - Saves a string of text to a file with the specified file name in the - specified directory. - - Args: - file_name (str): The name of the file to save the data in. Should not - include the '.txt' extension. data (str): The text data to be saved. - root_folder (str, optional): The root directory where the file will be - saved. If None, the file will be saved in the current directory. - Defaults to None. - - Returns: - str: A message indicating whether the file has been saved successfully - or an error occurred. - - Raises: - TypeError: If the `data` parameter is not a string, or the `file_name` - contains a forward slash or backslash. FileNotFoundError: If the - directory does not exist. - - Example: - ```python - from dsg_lib.common_functions import file_functions - - file_functions.save_text("test", "This is a test text file.", "/path/to/directory") - - # Saves data to '/path/to/directory/test.txt' - ``` - """ - # If no root folder is provided, use the default directory - if root_folder is None: # pragma: no cover - root_folder = directory_to_files # pragma: no cover - - # Determine the directory for text files - text_directory = Path(root_folder) / "text" - - # Construct the file path for text files - file_path = text_directory / file_name - - # Create the text directory if it does not exist - text_directory.mkdir(parents=True, exist_ok=True) - - # Check that data is a string and that file_name does not contain invalid - # characters - if not isinstance(data, str): - logger.error(f"{file_name} is not a valid string") - raise TypeError(f"{file_name} is not a valid string") - elif "/" in file_name or "\\" in file_name: - logger.error(f"{file_name} cannot contain \\ or /") - raise ValueError(f"{file_name} cannot contain \\ or /") - - # Add extension to file_name if needed - if not file_name.endswith(".txt"): - file_name += ".txt" - # Open or create the file and write the data - with open(file_path, "w+", encoding="utf-8") as file: - file.write(data) - - logger.info(f"File created: {file_path}") - return "complete" - - -def open_text(file_name: str) -> str: - """ - Opens a text file with the specified file name and returns its contents as a - string. - - Args: - file_name (str): The name of the file to open. Should include the '.txt' - extension. - - Returns: - str: The contents of the text file as a string. - - Raises: - TypeError: If the `file_name` parameter is not a string or contains a - forward slash. FileNotFoundError: If the file does not exist. - - Example: - ```python - from dsg_lib.common_functions import file_functions - data = file_functions.open_text("test.txt") - # Returns: 'This is a test text file.' - ``` - """ - # Replace backslashes with forward slashes in the file name - if "\\" in file_name: # pragma: no cover - file_name = file_name.replace("\\", "/") # pragma: no cover - - # Check that file_name does not contain invalid characters - if "/" in file_name: - logger.error(f"{file_name} cannot contain /") - raise TypeError(f"{file_name} cannot contain /") - - # Get the path to the text directory and the file path - file_directory = os.path.join(directory_to_files, "text") - file_path = Path.cwd().joinpath(file_directory, file_name) - - # Check if the file exists - if not file_path.is_file(): - raise FileNotFoundError(f"file not found error: {file_path}") - - # Open the file and read the data - with open(file_path, "r", encoding="utf-8") as file: - data = file.read() - - logger.info(f"File opened: {file_path}") - return data diff --git a/dsg_lib/common_functions/file_mover.py b/dsg_lib/common_functions/file_mover.py new file mode 100644 index 00000000..ff3e90d4 --- /dev/null +++ b/dsg_lib/common_functions/file_mover.py @@ -0,0 +1,207 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Module: file_mover +Detailed file processing flow that continuously monitors and processes files +from a source directory, optionally compresses them, and then moves them to a +final destination. Ensures no files are lost during transfer. + +Functions: + process_files_flow( + source_dir: str, + temp_dir: str, + final_dir: str, + file_pattern: str, + compress: bool = False, + max_iterations: Optional[int] = None + ) -> None: + Continuously monitors the source directory for files matching the given + pattern, moves them to a temporary directory, optionally compresses them, + and then transfers them to the final directory. + + _process_file(file_path: Path, temp_path: Path, final_path: Path, compress: bool) -> None: + Handles the internal logic of moving and optionally compressing a single file. + +Usage Example: +```python +from dsg_lib.common_functions.file_mover import process_files_flow + +process_files_flow( + source_dir="/some/source", + temp_dir="/some/temp", + final_dir="/some/final", + file_pattern="*.txt", + compress=True +) +``` +""" + +import shutil +from pathlib import Path +from time import sleep +from loguru import logger +from watchfiles import watch +from datetime import datetime +from typing import Optional +from itertools import islice # Import islice to limit generator iterations + + +def process_files_flow( + source_dir: str, + temp_dir: str, + final_dir: str, + file_pattern: str, + compress: bool = False, + max_iterations: Optional[int] = None, +) -> None: + """ + Continuously monitors a source directory for files. Moves files matching + file_pattern to a temporary directory, optionally compresses them, then + moves them to a final destination directory. + + Args: + source_dir (str): Path to the source directory to watch. + temp_dir (str): Path to the temporary directory for processing. + final_dir (str): Path to the final destination directory. + file_pattern (str): Glob pattern for matching files (e.g. "*.txt"). + compress (bool, optional): If True, compress files before moving. Defaults to False. + max_iterations (Optional[int], optional): Limit iterations in watch loop. Defaults to None. + + Returns: + None + + Raises: + Exception: Propagated if file operations fail. + + Example: + process_files_flow("/source", "/temp", "/final", "*.pdf", compress=True) + """ + temp_path: Path = Path(temp_dir) + final_path: Path = Path(final_dir) + source_path: Path = Path(source_dir) + + # Ensure temporary and final directories exist. + for path in (temp_path, final_path): + path.mkdir(parents=True, exist_ok=True) + + # Process existing files in the source directory at startup + logger.info(f"Processing existing files in source directory: {source_dir}") + for file in source_path.glob(file_pattern): + if file.is_file(): + try: + logger.info(f"Processing existing file: {file}") + _process_file(file, temp_path, final_path, compress) + except Exception as e: + logger.error(f"Error processing existing file '{file}': {e}") + raise + + # The clear_source deletion block has been removed so that files remain in the source directory + # if they have not already been processed. + + logger.info( + f"Starting file processing flow: monitoring '{source_dir}' for pattern '{file_pattern}'." + ) + + # Monitor the source directory for changes + changes_generator = watch(source_dir) + if max_iterations is not None: + changes_generator = islice(changes_generator, max_iterations) + + for changes in changes_generator: + logger.debug(f"Detected changes: {changes}") + for _change_type, file_str in changes: + file_path: Path = Path(file_str) + if file_path.is_file() and file_path.match(file_pattern): + try: + logger.info(f"Detected file for processing: {file_path}") + _process_file(file_path, temp_path, final_path, compress) + except Exception as e: + logger.error(f"Error processing file '{file_path}': {e}") + raise + sleep(1) # Small delay to minimize CPU usage + + +def _process_file( + file_path: Path, temp_path: Path, final_path: Path, compress: bool +) -> None: + """ + Handles the internal logic of relocating and optionally compressing a single file. + + Args: + file_path (Path): Full path to the file being processed. + temp_path (Path): Temporary directory path. + final_path (Path): Final destination directory path. + compress (bool): Flag indicating whether to compress the file. + + Returns: + None + + Raises: + Exception: Raised if errors occur during file move or compression steps. + """ + logger.debug(f"Starting to process file: {file_path}") + # Step 1: Move the file to the temporary directory + temp_file_path: Path = temp_path / file_path.name + logger.debug(f"Attempting to move file to temporary directory: {temp_file_path}") + shutil.move(str(file_path), str(temp_file_path)) + logger.info(f"Moved file to temporary directory: {temp_file_path}") + + processed_file_path: Path = temp_file_path + + # Step 2: Optionally compress the file + if compress: + try: + logger.debug(f"Starting compression for file: {temp_file_path}") + timestamp_suffix = datetime.now().strftime("%Y-%m-%d-%H-%M-%S") + base_for_zip: Path = ( + temp_file_path.parent / f"{temp_file_path.stem}_{timestamp_suffix}" + ) + shutil.make_archive( + base_name=str(base_for_zip), + format="zip", + root_dir=temp_file_path.parent, + base_dir=temp_file_path.name, + ) + zipped_file: Path = base_for_zip.with_suffix(".zip") + logger.info(f"Compressed file to: {zipped_file}") + processed_file_path = zipped_file + + # Attempt to remove the uncompressed file + try: + logger.debug( + f"Attempting to delete uncompressed file: {temp_file_path}" + ) + temp_file_path.unlink() + logger.info(f"Deleted uncompressed file: {temp_file_path}") + except Exception as cleanup_err: + logger.error( + f"Error deleting temporary file {temp_file_path}: {cleanup_err}" + ) + + except Exception as compression_err: + logger.error(f"Error compressing file {temp_file_path}: {compression_err}") + raise + + # Step 3: Move the (processed) file to the final directory + final_file_path: Path = final_path / processed_file_path.name + logger.debug( + f"Attempting to move processed file to final directory: {final_file_path}" + ) + shutil.move(str(processed_file_path), str(final_file_path)) + logger.info(f"Moved file to final destination: {final_file_path}") + + +if __name__ == "__main__": + SOURCE_DIRECTORY: str = "/path/to/source" + TEMPORARY_DIRECTORY: str = "/path/to/temp" + FINAL_DIRECTORY: str = "/path/to/final" + FILE_PATTERN: str = "*.txt" + COMPRESS_FILE: bool = True + + process_files_flow( + SOURCE_DIRECTORY, + TEMPORARY_DIRECTORY, + FINAL_DIRECTORY, + FILE_PATTERN, + COMPRESS_FILE, + ) diff --git a/dsg_lib/common_functions/logging_config.py b/dsg_lib/common_functions/logging_config.py index edc487a4..4d67e1f9 100644 --- a/dsg_lib/common_functions/logging_config.py +++ b/dsg_lib/common_functions/logging_config.py @@ -55,6 +55,7 @@ rotation_lock = Lock() + class SafeFileSink: """ A class to handle safe file logging with rotation and retention policies. @@ -82,6 +83,7 @@ class SafeFileSink: # This will set up a log file at 'logs/app.log' with rotation at 100 MB, # retention for 30 days, and compression using zip. """ + def __init__(self, path, rotation, retention, compression=None): self.path = path self.rotation_size = self.parse_size(rotation) @@ -89,7 +91,7 @@ def __init__(self, path, rotation, retention, compression=None): self.compression = compression @staticmethod - def parse_size(size_str): # pragma: no cover + def parse_size(size_str): # pragma: no cover """ Parses a size string and returns the size in bytes. @@ -100,17 +102,17 @@ def parse_size(size_str): # pragma: no cover int: The size in bytes. """ size_str = size_str.upper() - if size_str.endswith('MB'): + if size_str.endswith("MB"): return int(size_str[:-2]) * 1024 * 1024 - elif size_str.endswith('GB'): + elif size_str.endswith("GB"): return int(size_str[:-2]) * 1024 * 1024 * 1024 - elif size_str.endswith('KB'): + elif size_str.endswith("KB"): return int(size_str[:-2]) * 1024 else: return int(size_str) @staticmethod - def parse_duration(duration_str): # pragma: no cover + def parse_duration(duration_str): # pragma: no cover """ Parses a duration string and returns a timedelta object. @@ -121,16 +123,16 @@ def parse_duration(duration_str): # pragma: no cover timedelta: The duration as a timedelta object. """ duration_str = duration_str.lower() - if 'day' in duration_str: + if "day" in duration_str: return timedelta(days=int(duration_str.split()[0])) - elif 'hour' in duration_str: + elif "hour" in duration_str: return timedelta(hours=int(duration_str.split()[0])) - elif 'minute' in duration_str: + elif "minute" in duration_str: return timedelta(minutes=int(duration_str.split()[0])) else: return timedelta(days=0) - def __call__(self, message): # pragma: no cover + def __call__(self, message): # pragma: no cover """ Handles the logging of a message, including writing, rotating, and applying retention policies. @@ -145,7 +147,7 @@ def __call__(self, message): # pragma: no cover self.rotate_logs() self.apply_retention() - def write_message(self, message): # pragma: no cover + def write_message(self, message): # pragma: no cover """ Writes a log message to the log file. @@ -154,10 +156,10 @@ def write_message(self, message): # pragma: no cover This method opens the log file in append mode and writes the message to it. """ - with open(self.path, 'a') as f: + with open(self.path, "a") as f: f.write(message) - def rotate_logs(self): # pragma: no cover + def rotate_logs(self): # pragma: no cover """ Rotates the log file if it exceeds the specified rotation size. @@ -173,14 +175,19 @@ def rotate_logs(self): # pragma: no cover OSError: If there is an error renaming or compressing the log file. """ if os.path.getsize(self.path) >= self.rotation_size: - timestamp = datetime.now().strftime('%Y%m%d_%H%M%S') + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") rotated_path = f"{self.path}.{timestamp}" os.rename(self.path, rotated_path) if self.compression: - shutil.make_archive(rotated_path, self.compression, root_dir=os.path.dirname(rotated_path), base_dir=os.path.basename(rotated_path)) + shutil.make_archive( + rotated_path, + self.compression, + root_dir=os.path.dirname(rotated_path), + base_dir=os.path.basename(rotated_path), + ) os.remove(rotated_path) - def apply_retention(self): # pragma: no cover + def apply_retention(self): # pragma: no cover """ Applies the retention policy to remove old log files. @@ -197,12 +204,16 @@ def apply_retention(self): # pragma: no cover """ now = datetime.now() for filename in os.listdir(os.path.dirname(self.path)): - if filename.startswith(os.path.basename(self.path)) and len(filename.split('.')) > 1: + if ( + filename.startswith(os.path.basename(self.path)) + and len(filename.split(".")) > 1 + ): file_path = os.path.join(os.path.dirname(self.path), filename) file_time = datetime.fromtimestamp(os.path.getmtime(file_path)) if now - file_time > self.retention_days: os.remove(file_path) + def config_log( logging_directory: str = "log", log_name: str = "log", @@ -217,7 +228,7 @@ def config_log( append_app_name: bool = False, enqueue: bool = True, intercept_standard_logging: bool = True, - compression: str = 'zip', + compression: str = "zip", ): """ Configures the logging settings for the application. @@ -309,7 +320,12 @@ def config_log( # Add loguru logger with specified configuration logger.add( - SafeFileSink(log_path, rotation=log_rotation, retention=log_retention, compression=compression), + SafeFileSink( + log_path, + rotation=log_rotation, + retention=log_retention, + compression=compression, + ), level=logging_level.upper(), format=log_format, enqueue=enqueue, diff --git a/dsg_lib/fastapi_functions/default_endpoints.py b/dsg_lib/fastapi_functions/default_endpoints.py new file mode 100644 index 00000000..5b6b8c3e --- /dev/null +++ b/dsg_lib/fastapi_functions/default_endpoints.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +from typing import Dict, List + +from fastapi import APIRouter, Response +from loguru import logger + + +def create_default_router(config: List[Dict[str, str]]) -> APIRouter: + """ + Creates a router with default endpoints, including a configurable robots.txt. + + Args: + config (List[Dict[str, str]]): A list of dictionaries specifying which bots are allowed or disallowed. + + Returns: + APIRouter: A FastAPI router with the default endpoints. + """ + router = APIRouter() + + @router.get("/robots.txt", response_class=Response) + async def robots_txt(): + """ + Generates a robots.txt file based on the provided configuration. + + Returns: + Response: The robots.txt content. + """ + logger.info("Generating robots.txt") + lines = ["User-agent: *"] + for entry in config: + bot = entry.get("bot") + allow = entry.get("allow", True) + if bot: + logger.debug(f"Configuring bot: {bot}, Allow: {allow}") + lines.append(f"User-agent: {bot}") + lines.append("Disallow: /" if not allow else "Allow: /") + robots_txt_content = "\n".join(lines) + logger.info("robots.txt generated successfully") + return Response(robots_txt_content, media_type="text/plain") + + return router diff --git a/dsg_lib/fastapi_functions/http_codes.py b/dsg_lib/fastapi_functions/http_codes.py index 1abc95f3..346057ef 100644 --- a/dsg_lib/fastapi_functions/http_codes.py +++ b/dsg_lib/fastapi_functions/http_codes.py @@ -46,7 +46,9 @@ # # type: ignore""" -def generate_code_dict(codes: List[int], description_only: bool = False) -> Dict[int, Union[str, Dict[str, str]]]: +def generate_code_dict( + codes: List[int], description_only: bool = False +) -> Dict[int, Union[str, Dict[str, str]]]: """ Generate a dictionary of specific HTTP error codes from the http_codes dictionary. @@ -111,7 +113,7 @@ def generate_code_dict(codes: List[int], description_only: bool = False) -> Dict # Usage: A list of common HTTP status codes used in various HTTP methods -common_codes:list = [200, 400, 401, 403, 404, 408, 429, 500, 503] +common_codes: list = [200, 400, 401, 403, 404, 408, 429, 500, 503] # A dictionary of common HTTP status codes and additional codes specific to GET # requests diff --git a/examples/csv_example.py b/examples/csv_example.py index 6f952cdb..e3bd9919 100644 --- a/examples/csv_example.py +++ b/examples/csv_example.py @@ -4,7 +4,11 @@ Date: 2024/05/16 License: MIT """ -from dsg_lib.common_functions.file_functions import create_sample_files, open_csv, save_csv +from dsg_lib.common_functions.file_functions import ( + create_sample_files, + open_csv, + save_csv, +) from dsg_lib.common_functions.logging_config import config_log config_log(logging_level="DEBUG") diff --git a/examples/csv_example_with_timer.py b/examples/csv_example_with_timer.py new file mode 100644 index 00000000..193e5934 --- /dev/null +++ b/examples/csv_example_with_timer.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +""" +Author: Mike Ryan +Date: 2024/05/16 +License: MIT +""" +import time +from datetime import datetime +from dsg_lib.common_functions.file_functions import save_csv +from dsg_lib.common_functions.logging_config import config_log +import random + +config_log(logging_level="DEBUG") + +example_list = [ + ["thing_one", "thing_two"], + ["a", "b"], + ["c", "d"], + ["e", "f"], + ["g", "h"], +] + + +def create_sample_list(qty=10): + """ + Create a sample list of lists with specified quantity. + """ + headers = ["thing_one", "thing_two", "thing_three", "thing_four", "thing_five"] + sample_list = [headers] + for i in range(qty): + sample_list.append( + [f"item_{i+1}", f"item_{i+2}", f"item_{i+3}", f"item_{i+4}", f"item_{i+5}"] + ) + return sample_list + + +def save_data_with_timer(): + """ + Saves a new CSV file every 5 seconds with a unique timestamped name. + """ + while True: + example_list = create_sample_list(qty=random.randint(10, 100000)) + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + file_name = f"data_{timestamp}.csv" + save_csv( + file_name=file_name, + data=example_list, + root_folder="/workspaces/devsetgo_lib/data/move/source", + delimiter="|", + quotechar='"', + ) + print(f"Saved file: {file_name}") + time.sleep(5) + + +if __name__ == "__main__": + save_data_with_timer() diff --git a/examples/fastapi_example.py b/examples/fastapi_example.py index 6188cf3b..e843d812 100644 --- a/examples/fastapi_example.py +++ b/examples/fastapi_example.py @@ -24,10 +24,27 @@ database_operations, ) from dsg_lib.common_functions import logging_config -from dsg_lib.fastapi_functions import system_health_endpoints # , system_tools_endpoints +from dsg_lib.fastapi_functions import default_endpoints, system_health_endpoints + +config = [ + {"bot": "Bytespider", "allow": False}, + {"bot": "GPTBot", "allow": False}, + {"bot": "ClaudeBot", "allow": True}, + {"bot": "ImagesiftBot", "allow": True}, + {"bot": "CCBot", "allow": False}, + {"bot": "ChatGPT-User", "allow": True}, + {"bot": "omgili", "allow": False}, + {"bot": "Diffbot", "allow": False}, + {"bot": "Claude-Web", "allow": True}, + {"bot": "PerplexityBot", "allow": False}, +] logging_config.config_log( - logging_level="INFO", log_serializer=False, logging_directory="log", log_name="log.log", intercept_standard_logging=False + logging_level="INFO", + log_serializer=False, + logging_directory="log", + log_name="log.log", + intercept_standard_logging=False, ) # Create a DBConfig instance config = { @@ -91,6 +108,7 @@ class Address(base_schema.SchemaBaseSQLite, async_db.Base): "User", back_populates="addresses" ) # Relationship to the User class + @asynccontextmanager async def lifespan(app: FastAPI): logger.info("starting up") @@ -107,7 +125,6 @@ async def lifespan(app: FastAPI): print("That's all folks!") - # Create an instance of the FastAPI class app = FastAPI( title="FastAPI Example", # The title of the API @@ -136,18 +153,48 @@ async def root(): return response -config_health = { +# Example configuration +config = { "enable_status_endpoint": True, "enable_uptime_endpoint": True, "enable_heapdump_endpoint": True, + "enable_robots_endpoint": True, + "user_agents": [ + {"bot": "Bytespider", "allow": False}, + {"bot": "GPTBot", "allow": False}, + {"bot": "ClaudeBot", "allow": True}, + {"bot": "ImagesiftBot", "allow": True}, + {"bot": "CCBot", "allow": False}, + {"bot": "ChatGPT-User", "allow": True}, + {"bot": "omgili", "allow": False}, + {"bot": "Diffbot", "allow": False}, + {"bot": "Claude-Web", "allow": True}, + {"bot": "PerplexityBot", "allow": False}, + {"bot": "Googlebot", "allow": True}, + {"bot": "Bingbot", "allow": True}, + {"bot": "Baiduspider", "allow": False}, + {"bot": "YandexBot", "allow": False}, + {"bot": "DuckDuckBot", "allow": True}, + {"bot": "Sogou", "allow": False}, + {"bot": "Exabot", "allow": False}, + {"bot": "facebot", "allow": False}, + {"bot": "ia_archiver", "allow": False}, + ], } -app.include_router( - system_health_endpoints.create_health_router(config=config_health), - prefix="/api/health", - tags=["system-health"], -) +# Create and include the health router if enabled +if ( + config["enable_status_endpoint"] + or config["enable_uptime_endpoint"] + or config["enable_heapdump_endpoint"] +): + health_router = system_health_endpoints.create_health_router(config) + app.include_router(health_router, prefix="/api/health", tags=["system-health"]) +# Create and include the default router if enabled +if config["enable_robots_endpoint"]: + default_router = default_endpoints.create_default_router(config["user_agents"]) + app.include_router(default_router, prefix="", tags=["default"]) async def create_a_bunch_of_users(single_entry=0, many_entries=0): @@ -346,12 +393,15 @@ async def execute_query(query: str = Body(...)): # add a user with execute_one logger.info(f"Executing query: {query}") - query = insert(User).values(first_name='John', last_name='Doe',email='x@abc.com') + query = insert(User).values(first_name="John", last_name="Doe", email="x@abc.com") result = await db_ops.execute_one(query) logger.info(f"Executed query: {result}") - query_return = await db_ops.read_query(Select(User).where(User.first_name == 'John')) + query_return = await db_ops.read_query( + Select(User).where(User.first_name == "John") + ) return query_return + @app.post("/database/execute-many", tags=["Database Examples"]) async def execute_many(query: str = Body(...)): # multiple users with execute_many @@ -359,7 +409,9 @@ async def execute_many(query: str = Body(...)): queries = [] for i in range(10): - query = insert(User).values(first_name=f'User{i}', last_name='Doe',email='x@abc.com') + query = insert(User).values( + first_name=f"User{i}", last_name="Doe", email="x@abc.com" + ) queries.append(query) results = await db_ops.execute_many(queries) @@ -367,6 +419,7 @@ async def execute_many(query: str = Body(...)): query_return = await db_ops.read_query(Select(User)) return query_return + if __name__ == "__main__": import uvicorn diff --git a/examples/file_monitor.py b/examples/file_monitor.py new file mode 100644 index 00000000..05da7f9f --- /dev/null +++ b/examples/file_monitor.py @@ -0,0 +1,65 @@ +import os +import asyncio +from pathlib import Path +from loguru import logger +from dsg_lib.common_functions.file_mover import process_files_flow + +# Define source, temporary, and destination directories +SOURCE_DIRECTORY = "/workspaces/devsetgo_lib/data/move/source/csv" +TEMPORARY_DIRECTORY = "/workspaces/devsetgo_lib/data/move/temp" +DESTINATION_DIRECTORY = "/workspaces/devsetgo_lib/data/move/destination" +FILE_PATTERN = "*.csv" # File pattern to monitor (e.g., '*.txt') +COMPRESS_FILES = True # Set to True to compress files before moving +CLEAR_SOURCE = True # Set to True to clear the source directory before starting + +# Ensure directories exist +os.makedirs(SOURCE_DIRECTORY, exist_ok=True) +os.makedirs(TEMPORARY_DIRECTORY, exist_ok=True) +os.makedirs(DESTINATION_DIRECTORY, exist_ok=True) + + +async def create_sample_files(): + """ + Periodically create sample files in the source directory for demonstration purposes. + """ + while True: + file_name = f"sample_{Path(SOURCE_DIRECTORY).glob('*').__len__() + 1}.txt" + file_path = Path(SOURCE_DIRECTORY) / file_name + file_path.write_text("This is a sample file for testing the file mover.") + logger.info(f"Created sample file: {file_path}") + await asyncio.sleep(10) # Create a new file every 10 seconds + + +async def main(): + """ + Main function to demonstrate the file mover library. + """ + # Start the sample file creation task + file_creator_task = asyncio.create_task(create_sample_files()) + + # Run the file processing flow in a separate thread + loop = asyncio.get_event_loop() + await loop.run_in_executor( + None, + process_files_flow, + SOURCE_DIRECTORY, + TEMPORARY_DIRECTORY, + DESTINATION_DIRECTORY, + FILE_PATTERN, + COMPRESS_FILES, + CLEAR_SOURCE, # Pass the clear_source flag + ) + + # Cancel the file creator task when done + file_creator_task.cancel() + try: + await file_creator_task + except asyncio.CancelledError: + logger.info("File creation task cancelled.") + + +if __name__ == "__main__": + try: + asyncio.run(main()) + except KeyboardInterrupt: + logger.info("File monitor example stopped.") diff --git a/examples/log_example.py b/examples/log_example.py index 5f879c0a..ee18f16e 100644 --- a/examples/log_example.py +++ b/examples/log_example.py @@ -17,11 +17,11 @@ # Configure logging as before logging_config.config_log( - logging_directory='log', - log_name='log', - logging_level='DEBUG', - log_rotation='100 MB', - log_retention='10 days', + logging_directory="log", + log_name="log", + logging_level="DEBUG", + log_rotation="100 MB", + log_retention="10 days", log_backtrace=True, log_serializer=True, log_diagnose=True, @@ -37,8 +37,8 @@ def div_zero(x, y): try: return x / y except ZeroDivisionError as e: - logger.error(f'{e}') - logging.error(f'{e}') + logger.error(f"{e}") + logging.error(f"{e}") # @logger.catch @@ -46,30 +46,30 @@ def div_zero_two(x, y): try: return x / y except ZeroDivisionError as e: - logger.error(f'{e}') - logging.error(f'{e}') + logger.error(f"{e}") + logging.error(f"{e}") def log_big_string(lqty=100, size=256): big_string = secrets.token_urlsafe(size) for _ in range(lqty): - logging.debug(f'Lets make this a big message {big_string}') + logging.debug(f"Lets make this a big message {big_string}") div_zero(x=1, y=0) div_zero_two(x=1, y=0) # after configuring logging # use loguru to log messages - logger.debug('This is a loguru debug message') - logger.info('This is an loguru info message') - logger.error('This is an loguru error message') - logger.warning('This is a loguru warning message') - logger.critical('This is a loguru critical message') + logger.debug("This is a loguru debug message") + logger.info("This is an loguru info message") + logger.error("This is an loguru error message") + logger.warning("This is a loguru warning message") + logger.critical("This is a loguru critical message") # will intercept all standard logging messages also - logging.debug('This is a standard logging debug message') - logging.info('This is an standard logging info message') - logging.error('This is an standard logging error message') - logging.warning('This is a standard logging warning message') - logging.critical('This is a standard logging critical message') + logging.debug("This is a standard logging debug message") + logging.info("This is an standard logging info message") + logging.error("This is an standard logging error message") + logging.warning("This is a standard logging warning message") + logging.critical("This is a standard logging critical message") def worker(wqty=1000, lqty=100, size=256): @@ -77,13 +77,26 @@ def worker(wqty=1000, lqty=100, size=256): log_big_string(lqty=lqty, size=size) -def main(wqty: int = 100, lqty: int = 10, size: int = 256, workers: int = 16, thread_test: bool = False, process_test: bool = False): +def main( + wqty: int = 100, + lqty: int = 10, + size: int = 256, + workers: int = 16, + thread_test: bool = False, + process_test: bool = False, +): if process_test: processes = [] # Create worker processes for _ in tqdm(range(workers), desc="Multi-Processing Start", leave=True): p = multiprocessing.Process( - target=worker, args=(wqty, lqty, size,)) + target=worker, + args=( + wqty, + lqty, + size, + ), + ) processes.append(p) p.start() @@ -96,8 +109,17 @@ def main(wqty: int = 100, lqty: int = 10, size: int = 256, workers: int = 16, th if thread_test: threads = [] - for _ in tqdm(range(workers), desc="Threading Start", leave=True): # Create worker threads - t = threading.Thread(target=worker, args=(wqty, lqty, size,)) + for _ in tqdm( + range(workers), desc="Threading Start", leave=True + ): # Create worker threads + t = threading.Thread( + target=worker, + args=( + wqty, + lqty, + size, + ), + ) threads.append(t) t.start() @@ -107,6 +129,7 @@ def main(wqty: int = 100, lqty: int = 10, size: int = 256, workers: int = 16, th if __name__ == "__main__": from time import time + start = time() main(wqty=5, lqty=50, size=64, workers=8, thread_test=False, process_test=True) print(f"Execution time: {time()-start:.2f} seconds") diff --git a/makefile b/makefile index 342d7aba..b86779c3 100644 --- a/makefile +++ b/makefile @@ -1,6 +1,6 @@ # Variables REPONAME = devsetgo_lib - +APP_VERSION = 25.04.05-001 PYTHON = python3 PIP = $(PYTHON) -m pip PYTEST = $(PYTHON) -m pytest @@ -33,22 +33,23 @@ black: ## Reformat Python code to follow the Black code style bump: ## Bump the version of the project bumpcalver --build - cleanup: isort ruff autoflake ## Run isort, ruff, autoflake create-docs: ## Build and deploy the project's documentation python3 scripts/changelog.py - mkdocs build cp /workspaces/$(REPONAME)/README.md /workspaces/$(REPONAME)/docs/index.md cp /workspaces/$(REPONAME)/CONTRIBUTING.md /workspaces/$(REPONAME)/docs/contribute.md cp /workspaces/$(REPONAME)/CHANGELOG.md /workspaces/$(REPONAME)/docs/release-notes.md + mkdocs build mkdocs gh-deploy create-docs-local: ## Build and deploy the project's documentation python3 scripts/changelog.py + cp /workspaces/$(REPONAME)/README.md /workspaces/$(REPONAME)/docs/index.md + cp /workspaces/$(REPONAME)/CONTRIBUTING.md /workspaces/$(REPONAME)/docs/contribute.md + cp /workspaces/$(REPONAME)/CHANGELOG.md /workspaces/$(REPONAME)/docs/release-notes.md mkdocs build - cp /workspaces/devsetgo_lib/README.md /workspaces/devsetgo_lib/docs/index.md - cp /workspaces/devsetgo_lib/CONTRIBUTING.md /workspaces/devsetgo_lib/docs/contribute.md + flake8: ## Run flake8 to check Python code for PEP8 compliance flake8 --tee . > htmlcov/_flake8Report.txt @@ -131,6 +132,16 @@ ex-email: ## Run the example calendar script python3 ex.py rm /workspaces/devsetgo_lib/ex.py +ex-fm: ## Run the example calendar script + cp /workspaces/devsetgo_lib/examples/file_monitor.py /workspaces/devsetgo_lib/ex.py + python3 ex.py + rm /workspaces/devsetgo_lib/ex.py + +ex-fm-timer: ## Run the example calendar script + cp /workspaces/devsetgo_lib/examples/csv_example_with_timer.py /workspaces/devsetgo_lib/ex-timer.py + python3 ex-timer.py + rm /workspaces/devsetgo_lib/ex-timer.py + ex-all: ## Run all the examples, but fastapi make ex-log @@ -140,3 +151,4 @@ ex-all: ## Run all the examples, but fastapi make ex-pattern make ex-text make ex-email + make ex-fm diff --git a/mkdocs.yml b/mkdocs.yml index de5233ce..0ff338c9 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -9,7 +9,7 @@ repo_name: 'devsetgo/dsg_lib' repo_url: 'https://github.com/devsetgo/dsg_lib' # Copyright -copyright: 'Copyright © 2016 - 2024 Mike Ryan' +copyright: 'Copyright © 2016 - 2025 Mike Ryan' nav: - Introduction: 'index.md' @@ -17,12 +17,14 @@ nav: - Common Functions: - Logging: 'common_functions/logging.md' - File Functions: 'common_functions/file_functions.md' + - File Mover: 'common_functions/file_mover.md' - Folder Functions: 'common_functions/folder_functions.md' - RegEx Functions: 'common_functions/regex.md' - Calendar Functions: 'common_functions/calendar_functions.md' - FastAPI Functions: - HTTP Codes: 'fastapi/http_codes.md' - System Health: 'fastapi/system_health_endpoints.md' + - Default Endpoints: 'fastapi/default_endpoints.md' - Database Functions: - Base Schema: 'database/base_schema.md' - Database Configuration: 'database/database_configuration.md' diff --git a/pyproject.toml b/pyproject.toml index 36c29974..c8a88d03 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,13 +4,13 @@ build-backend = "hatchling.build" [project] name = "devsetgo_lib" -version = "2024.11.28.1" +version = "25.4.5.1" requires-python = ">=3.9" description = "DevSetGo Library is a Python library offering reusable functions for efficient coding. It includes file operations, calendar utilities, pattern matching, advanced logging with loguru, FastAPI endpoints, async database handling, and email validation. Designed for ease of use and versatility, it's a valuable tool for Python developers.\n" keywords = [ "python", "library", "reusable functions", "file operations", "calendar utilities", "pattern matching", "logging", "loguru", "FastAPI", "async database", "CRUD operations", "email validation", "development tools",] readme = "README.md" classifiers = [ "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Operating System :: POSIX :: Linux", "Operating System :: POSIX", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows",] -dependencies = [ "loguru>=0.7.0", "packaging>=20.0", "email-validator>=2.1.1",] +dependencies = [ "loguru>=0.7.0", "packaging>=22.0", "email-validator>=2.1.1", "watchfiles>=1.0.1,<1.1.1",] [[project.authors]] name = "Mike Ryan" email = "mikeryan56@gmail.com" @@ -62,9 +62,16 @@ line_length = 100 [tool.bumpcalver] version_format = "{current_date}-{build_count:03}" +date_format = "%y.%m.%d" timezone = "America/New_York" -git_tag = true -auto_commit = true +git_tag = false +auto_commit = false +[[tool.bumpcalver.file]] +path = "makefile" +file_type = "makefile" +variable = "APP_VERSION" +version_standard = "standard" + [[tool.bumpcalver.file]] path = "pyproject.toml" file_type = "toml" @@ -87,7 +94,7 @@ quote-style = "single" [tool.coverage.run] source = [ "dsg_lib",] -omit = [ "setup.py", "_venv/*", "tests/*", "examples/*",] +omit = [ "setup.py", "_venv/*", "tests/*", "examples/*", "ex.py", "ex_*.py",] [tool.coverage.report] exclude_lines = [ "pragma: no cover", "if __name__", "def main", "import_sqlalchemy",] diff --git a/report.xml b/report.xml index c1433aca..fa0c488b 100644 --- a/report.xml +++ b/report.xml @@ -1 +1 @@ - + diff --git a/requirements.txt b/requirements.txt index 974968ab..ddbd4d13 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,44 +1,44 @@ aiomysql==0.2.0 # Vulnerabilities: None -aiosqlite==0.20.0 # Vulnerabilities: None -asyncpg==0.30.0 # From 0.29.0 | Vulnerabilities: None +aiosqlite==0.21.0 # Vulnerabilities: None +asyncpg==0.30.0 # Vulnerabilities: None autoflake==2.3.1 # Vulnerabilities: None -autopep8==2.3.1 # Vulnerabilities: None +autopep8==2.3.2 # Vulnerabilities: None black==25.1.0 # Vulnerabilities: None -bumpcalver==2024.12.14.1 # From 2024.10.20.4 | Vulnerabilities: None -click==8.1.7 # Vulnerabilities: None -fastapi[all]==0.115.5 # From 0.115.2 | Vulnerabilities: None -flake8==7.1.2 # Vulnerabilities: None -genbadge[all]==1.1.1 # Vulnerabilities: None -hatchling==1.27.0 # From 1.25.0 | Vulnerabilities: None +bumpcalver==2024.12.14.1 # Vulnerabilities: None +click==8.1.8 # Vulnerabilities: None +fastapi[all]==0.115.12 # Vulnerabilities: None +flake8==7.2.0 # Vulnerabilities: None +genbadge[all]==1.1.2 # Vulnerabilities: None +hatchling==1.27.0 # Vulnerabilities: None loguru==0.7.3 # Vulnerabilities: None mike==2.1.3 # Vulnerabilities: None -mkdocs-material==9.5.47 # From 9.5.41 | Vulnerabilities: None -mkdocs-print-site-plugin==2.6.0 # Vulnerabilities: None -mkdocstrings[python,shell]==0.29.1 # From 0.26.2 | Vulnerabilities: None -oracledb==2.5.0 # From 2.4.1 | Vulnerabilities: None -packaging==24.2 # From 24.1 | Vulnerabilities: None +mkdocs-material==9.6.11 # From 9.6.9 | Vulnerabilities: None +mkdocs-print-site-plugin==2.7.2 # From 2.7.1 | Vulnerabilities: None +mkdocstrings[python,shell]==0.29.1 # From 0.29.0 | Vulnerabilities: None +oracledb==3.1.0 # From 3.0.0 | Vulnerabilities: None +packaging==24.2 # Vulnerabilities: None pre-commit==4.2.0 # Vulnerabilities: None psycopg2==2.9.10 # Vulnerabilities: None Pygments==2.19.1 # Vulnerabilities: None -pylint==3.3.1 # Vulnerabilities: None -pymdown-extensions==10.14.3 # From 10.11.2 | Vulnerabilities: None -pytest==8.3.4 # Vulnerabilities: None +pylint==3.3.6 # Vulnerabilities: None +pymdown-extensions==10.14.3 # Vulnerabilities: None +pytest==8.3.5 # Vulnerabilities: None pytest-asyncio==0.26.0 # Vulnerabilities: None -pytest-cov==6.0.0 # From 5.0.0 | Vulnerabilities: None -pytest-html==4.1.1 # From none | Vulnerabilities: None +pytest-cov==6.1.1 # From 6.0.0 | Vulnerabilities: None +pytest-html==4.1.1 # Vulnerabilities: None pytest-mock==3.14.0 # Vulnerabilities: None pytest-runner==6.0.1 # Vulnerabilities: None pytest-xdist==3.6.1 # Vulnerabilities: None -python-json-logger==3.2.1 # Vulnerabilities: None +python-json-logger==3.3.0 # Vulnerabilities: None PyYAML==6.0.2 # Vulnerabilities: None -ruff==0.11.2 # From 0.7.0 | Vulnerabilities: None -SQLAlchemy==2.0.36 # Vulnerabilities: None -structlog==25.1.0 # Vulnerabilities: None +ruff==0.11.4 # From 0.11.2 | Vulnerabilities: None +SQLAlchemy==2.0.40 # Vulnerabilities: None +structlog==25.2.0 # Vulnerabilities: None toml==0.10.2 # Vulnerabilities: None -tox==4.23.2 # From 4.23.0 | Vulnerabilities: None -tqdm==4.67.1 # From 4.66.5 | Vulnerabilities: None +tox==4.25.0 # Vulnerabilities: None +tqdm==4.67.1 # Vulnerabilities: None twine==6.1.0 # Vulnerabilities: None -watchdog==6.0.0 # From 5.0.3 | Vulnerabilities: None -wheel==0.45.1 # From 0.44.0 | Vulnerabilities: None +watchdog==6.0.0 # Vulnerabilities: None +watchfiles==1.0.4 # Vulnerabilities: None +wheel==0.45.1 # Vulnerabilities: None xmltodict==0.14.2 # Vulnerabilities: None - diff --git a/tests-badge.svg b/tests-badge.svg index 306ea6a6..90005645 100644 --- a/tests-badge.svg +++ b/tests-badge.svg @@ -1 +1 @@ -tests: 121tests121 +tests: 134tests134 diff --git a/tests/test_common_functions/test_file_functions/test_append_csv.py b/tests/test_common_functions/test_file_functions/test_append_csv.py new file mode 100644 index 00000000..8eea6b12 --- /dev/null +++ b/tests/test_common_functions/test_file_functions/test_append_csv.py @@ -0,0 +1,51 @@ +import unittest +from pathlib import Path +from unittest.mock import patch +from dsg_lib.common_functions.file_functions import append_csv, save_csv + + +class TestAppendCSV(unittest.TestCase): + def setUp(self): + self.test_data = [ + ["Name", "Email"], + ["John Doe", "jdoe@example.com"], + ] + self.append_data = [ + ["Name", "Email"], + ["Jane Smith", "jsmith@example.com"], + ] + self.csv_path = Path("data/csv/test_append.csv") + if self.csv_path.exists(): + self.csv_path.unlink() + + def tearDown(self): + if self.csv_path.exists(): + self.csv_path.unlink() + + @patch("dsg_lib.common_functions.file_functions.directory_to_files", "data") + def test_append_csv_valid_data(self): + save_csv("test_append", self.test_data) + result = append_csv("test_append", self.append_data) + self.assertEqual(result, "appended") + self.assertTrue(self.csv_path.exists()) + + @patch("dsg_lib.common_functions.file_functions.directory_to_files", "data") + def test_append_csv_file_not_found(self): + with self.assertRaises(FileNotFoundError): + append_csv("non_existent", self.append_data) + + @patch("dsg_lib.common_functions.file_functions.directory_to_files", "data") + def test_append_csv_header_mismatch(self): + save_csv("test_append", self.test_data) + mismatch_data = [ + ["Different", "Header"], + ["John Doe", "jdoe@example.com"], + ] + with self.assertRaises(ValueError): + append_csv("test_append", mismatch_data) + + @patch("dsg_lib.common_functions.file_functions.directory_to_files", "data") + def test_append_csv_invalid_data_type(self): + save_csv("test_append", self.test_data) + with self.assertRaises(TypeError): + append_csv("test_append", "not a list") diff --git a/tests/test_common_functions/test_file_functions/test_open_csv.py b/tests/test_common_functions/test_file_functions/test_open_csv.py index c49a0dd6..e9588e78 100644 --- a/tests/test_common_functions/test_file_functions/test_open_csv.py +++ b/tests/test_common_functions/test_file_functions/test_open_csv.py @@ -19,7 +19,8 @@ def tearDownClass(cls): @patch("dsg_lib.common_functions.file_functions.directory_to_files", "data") def test_open_csv_with_valid_file(self): - data = open_csv("test_file") + # Updated to include '.csv' + data = open_csv("test_file.csv") self.assertEqual(len(data), 2) self.assertEqual(data[0]["col1"], "1") self.assertEqual(data[0]["col2"], "2") diff --git a/tests/test_common_functions/test_file_functions/test_save_csv.py b/tests/test_common_functions/test_file_functions/test_save_csv.py index 86d0e959..8486db1b 100644 --- a/tests/test_common_functions/test_file_functions/test_save_csv.py +++ b/tests/test_common_functions/test_file_functions/test_save_csv.py @@ -52,7 +52,7 @@ def test_save_csv_with_custom_quotechar(self): def test_save_csv_with_custom_root_folder(self): result = save_csv("test_file", self.test_data, root_folder="data/custom") self.assertEqual(result, "complete") - custom_path = Path("data/custom/csv/test_file.csv") + custom_path = Path("data/custom/test_file.csv") self.assertTrue(custom_path.exists()) @patch("dsg_lib.common_functions.file_functions.directory_to_files", "data") diff --git a/tests/test_common_functions/test_file_mover/__init__.py b/tests/test_common_functions/test_file_mover/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_common_functions/test_file_mover/test_file_mover.py b/tests/test_common_functions/test_file_mover/test_file_mover.py new file mode 100644 index 00000000..48b96082 --- /dev/null +++ b/tests/test_common_functions/test_file_mover/test_file_mover.py @@ -0,0 +1,194 @@ +import tempfile +import unittest +from pathlib import Path +from unittest.mock import patch +from watchfiles import Change # Import Change from watchfiles +import logging + +from dsg_lib.common_functions.file_mover import process_files_flow + +logger = logging.getLogger(__name__) +logging.basicConfig(level=logging.DEBUG) + + +class TestFileMover(unittest.TestCase): + def setUp(self): + self.test_dir = tempfile.TemporaryDirectory() + self.src_folder = Path(self.test_dir.name) / "src" + self.temp_folder = Path(self.test_dir.name) / "temp" + self.dest_folder = Path(self.test_dir.name) / "dest" + self.src_folder.mkdir() + self.temp_folder.mkdir() + self.dest_folder.mkdir() + self.test_file = self.src_folder / "test.txt" + self.test_file.write_text("This is a test file.") + + def tearDown(self): + self.test_dir.cleanup() + + def test_process_files_flow_move_only(self): + # Test moving a file without compression + with patch( + "dsg_lib.common_functions.file_mover.watch", + return_value=[[(Change.added, str(self.test_file))]], + ): + process_files_flow( + source_dir=str(self.src_folder), + temp_dir=str(self.temp_folder), + final_dir=str(self.dest_folder), + file_pattern="*.txt", + compress=False, + max_iterations=1, # Limit iterations for testing + ) + self.assertFalse(self.test_file.exists()) + self.assertTrue((self.dest_folder / "test.txt").exists()) + + def test_process_files_flow_with_compression(self): + # Test moving and compressing a file + with patch( + "dsg_lib.common_functions.file_mover.watch", + return_value=iter([[(Change.added, str(self.test_file))]]), + ): + process_files_flow( + source_dir=str(self.src_folder), + temp_dir=str(self.temp_folder), + final_dir=str(self.dest_folder), + file_pattern="*.txt", + compress=True, + max_iterations=1, # Limit iterations for testing + ) + self.assertFalse(self.test_file.exists()) + compressed_file = next(self.dest_folder.glob("*.zip"), None) + self.assertIsNotNone( + compressed_file + ) # Ensure a compressed file exists in the destination folder + + def test_process_files_flow_invalid_pattern(self): + # Test with a file that does not match the pattern + with patch( + "dsg_lib.common_functions.file_mover.watch", + return_value=[[(Change.added, str(self.test_file))]], + ): + process_files_flow( + source_dir=str(self.src_folder), + temp_dir=str(self.temp_folder), + final_dir=str(self.dest_folder), + file_pattern="*.log", + compress=False, + max_iterations=1, # Limit iterations for testing + ) + self.assertTrue(self.test_file.exists()) + self.assertFalse((self.dest_folder / "test.txt").exists()) + + def test_process_files_flow_error_handling(self): + # Test error handling during file processing + with patch( + "dsg_lib.common_functions.file_mover.shutil.move", + side_effect=Exception("Mocked error"), + ): + with patch( + "dsg_lib.common_functions.file_mover.watch", + return_value=[[(Change.added, str(self.test_file))]], + ): + with self.assertRaises(Exception): # Ensure exception is raised + process_files_flow( + source_dir=str(self.src_folder), + temp_dir=str(self.temp_folder), + final_dir=str(self.dest_folder), + file_pattern="*.txt", + compress=False, + max_iterations=1, # Limit iterations for testing + ) + self.assertTrue(self.test_file.exists()) + + def test_process_files_flow_compression_error(self): + # Test error handling during compression + with patch( + "dsg_lib.common_functions.file_mover.shutil.make_archive", + side_effect=Exception("Mocked compression error"), + ): + with patch( + "dsg_lib.common_functions.file_mover.watch", + return_value=[[(Change.added, str(self.test_file))]], + ): + with self.assertRaises(Exception): # Ensure exception is raised + process_files_flow( + source_dir=str(self.src_folder), + temp_dir=str(self.temp_folder), + final_dir=str(self.dest_folder), + file_pattern="*.txt", + compress=True, + max_iterations=1, # Limit iterations for testing + ) + # Verify that the original file remains in the temporary folder + self.assertTrue((self.temp_folder / "test.txt").exists()) + # Verify that the compressed file does not exist + self.assertFalse((self.temp_folder / "test.zip").exists()) + + def test_process_files_flow_cleanup_error(self): + # Test error handling during cleanup of uncompressed files + with patch( + "dsg_lib.common_functions.file_mover.Path.unlink", + side_effect=Exception("Mocked cleanup error"), + ): + with patch( + "dsg_lib.common_functions.file_mover.watch", + return_value=[[(Change.added, str(self.test_file))]], + ): + process_files_flow( + source_dir=str(self.src_folder), + temp_dir=str(self.temp_folder), + final_dir=str(self.dest_folder), + file_pattern="*.txt", + compress=True, + max_iterations=1, # Limit iterations for testing + ) + # Verify that the compressed file exists in the destination folder + compressed_file = next(self.dest_folder.glob("*.zip"), None) + self.assertIsNotNone(compressed_file) # Ensure a compressed file exists + + def test_process_files_flow_existing_files(self): + logger.debug("Starting test_process_files_flow_existing_files") + with patch( + "dsg_lib.common_functions.file_mover.watch", return_value=iter([[]]) + ): + process_files_flow( + source_dir=str(self.src_folder), + temp_dir=str(self.temp_folder), + final_dir=str(self.dest_folder), + file_pattern="*.txt", # Ensure the pattern matches the test file + compress=False, + max_iterations=1, # Limit iterations for testing + ) + logger.debug( + "Finished process_files_flow in test_process_files_flow_existing_files" + ) + # Verify that the file is moved to the destination directory + self.assertFalse(self.test_file.exists()) + self.assertTrue((self.dest_folder / "test.txt").exists()) + + def test_watch_processing_error_exception(self): + # Force _process_file to throw an exception during watch processing + with patch("dsg_lib.common_functions.file_mover.Path.glob", return_value=[]): + with patch( + "dsg_lib.common_functions.file_mover._process_file", + side_effect=Exception("Mocked processing error"), + ): + with patch( + "dsg_lib.common_functions.file_mover.watch", + return_value=iter([[(Change.added, str(self.test_file))]]), + ): + with self.assertRaises(Exception) as context: + process_files_flow( + source_dir=str(self.src_folder), + temp_dir=str(self.temp_folder), + final_dir=str(self.dest_folder), + file_pattern="*.txt", + compress=False, + max_iterations=1, # Limit iterations for testing + ) + self.assertIn("Mocked processing error", str(context.exception)) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_common_functions/test_logging_config.py b/tests/test_common_functions/test_logging_config.py index d06e056d..44ecaedc 100644 --- a/tests/test_common_functions/test_logging_config.py +++ b/tests/test_common_functions/test_logging_config.py @@ -11,7 +11,7 @@ class TestConfigLog(unittest.TestCase): def test_config_log_with_valid_params(self, mock_logger): config_log( logging_directory="log", - log_name ="log", + log_name="log", logging_level="INFO", log_rotation="100 MB", log_retention="30 days", @@ -23,7 +23,7 @@ def test_config_log_with_valid_params(self, mock_logger): append_app_name=False, enqueue=True, intercept_standard_logging=True, - compression='zip', + compression="zip", ) mock_logger.configure.assert_called_once() mock_logger.add.assert_called_once() diff --git a/tests/test_database_functions/test_async_database.py b/tests/test_database_functions/test_async_database.py index 368b16cc..99c143d5 100644 --- a/tests/test_database_functions/test_async_database.py +++ b/tests/test_database_functions/test_async_database.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- import asyncio import secrets - +import pytest_asyncio import pytest from sqlalchemy import Column, Integer, String, delete, insert, select from sqlalchemy.exc import IntegrityError, SQLAlchemyError @@ -28,6 +28,22 @@ class User(async_db.Base): name = Column(String, unique=True) +@pytest_asyncio.fixture(scope="class", autouse=True) +async def setup_database(): + await async_db.create_tables() + yield + # await async_db.drop_tables() + + +@pytest_asyncio.fixture(scope="function", autouse=True) +async def setup_teardown(): + # Clean the database before each test + await db_ops.execute_one(delete(User)) + yield + # Clean the database after each test + await db_ops.execute_one(delete(User)) + + class TestDatabaseOperations: @pytest.fixture(scope="session") def db_ops(self): @@ -440,34 +456,19 @@ async def test_delete_many_exception(self, db_ops): # assert result contains "error" assert "error" in result - @pytest.fixture(scope="class", autouse=True) - async def setup_database(self): - await async_db.create_tables() - yield - await async_db.drop_tables() - - @pytest.fixture(scope="function", autouse=True) - async def setup_teardown(self): - # Clean the database before each test - await db_ops.execute_one(delete(User)) - yield - # Clean the database after each test - await db_ops.execute_one(delete(User)) @pytest.mark.asyncio async def test_execute_one_insert(self): - query = insert(User).values(name='Test User') + query = insert(User).values(name="Test User") result = await db_ops.execute_one(query) assert result == "complete" - r_query = select(User).where(User.name == 'Test User') + r_query = select(User).where(User.name == "Test User") user = await db_ops.read_one_record(query=r_query) - assert user.name == 'Test User' + assert user.name == "Test User" @pytest.mark.asyncio async def test_execute_many_insert(self): - queries = [ - (insert(User), {'name': f'User {i}'}) for i in range(1, 6) - ] + queries = [(insert(User), {"name": f"User {i}"}) for i in range(1, 6)] result = await db_ops.execute_many(queries) assert result == "complete" r_query = select(User) @@ -476,30 +477,26 @@ async def test_execute_many_insert(self): @pytest.mark.asyncio async def test_execute_one_delete(self): - query = insert(User).values(name='Test User') + query = insert(User).values(name="Test User") await db_ops.execute_one(query) - query = delete(User).where(User.name == 'Test User') + query = delete(User).where(User.name == "Test User") result = await db_ops.execute_one(query) assert result == "complete" - r_query = select(User).where(User.name == 'Test User') + r_query = select(User).where(User.name == "Test User") user = await db_ops.read_one_record(query=r_query) assert user is None @pytest.mark.asyncio async def test_execute_many_delete(self): # Insert users to delete - queries = [ - (insert(User), {'name': f'User {i}'}) for i in range(1, 6) - ] + queries = [(insert(User), {"name": f"User {i}"}) for i in range(1, 6)] await db_ops.execute_many(queries) # Fetch all users r_query = select(User) users = await db_ops.read_query(query=r_query) # Create delete queries based on pkid user_pkids = [user.pkid for user in users] - queries = [ - (delete(User).where(User.pkid == pkid), None) for pkid in user_pkids - ] + queries = [(delete(User).where(User.pkid == pkid), None) for pkid in user_pkids] result = await db_ops.execute_many(queries) assert result == "complete" # Verify all users are deleted diff --git a/tests/test_database_functions/test_base_schema.py b/tests/test_database_functions/test_base_schema.py index 10395e75..d759fdfd 100644 --- a/tests/test_database_functions/test_base_schema.py +++ b/tests/test_database_functions/test_base_schema.py @@ -7,7 +7,10 @@ from sqlalchemy import Column, String, create_engine from sqlalchemy.orm import declarative_base, sessionmaker -from dsg_lib.async_database_functions.base_schema import SchemaBasePostgres, SchemaBaseSQLite +from dsg_lib.async_database_functions.base_schema import ( + SchemaBasePostgres, + SchemaBaseSQLite, +) # Get the database URL from the environment variable database_url = os.getenv( diff --git a/tests/test_endpoints/test_default_endpoints.py b/tests/test_endpoints/test_default_endpoints.py new file mode 100644 index 00000000..3838411d --- /dev/null +++ b/tests/test_endpoints/test_default_endpoints.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +from fastapi import FastAPI +from fastapi.testclient import TestClient + +from dsg_lib.fastapi_functions.default_endpoints import create_default_router + +# Create a FastAPI app for testing +app = FastAPI() +client = TestClient(app) + +# User configuration for default endpoints +config = [ + {"bot": "Bytespider", "allow": False}, + {"bot": "GPTBot", "allow": False}, + {"bot": "ClaudeBot", "allow": True}, + {"bot": "ImagesiftBot", "allow": True}, + {"bot": "CCBot", "allow": False}, + {"bot": "ChatGPT-User", "allow": True}, + {"bot": "omgili", "allow": False}, + {"bot": "Diffbot", "allow": False}, + {"bot": "Claude-Web", "allow": True}, + {"bot": "PerplexityBot", "allow": False}, + {"bot": "Googlebot", "allow": True}, + {"bot": "Bingbot", "allow": True}, + {"bot": "Baiduspider", "allow": False}, + {"bot": "YandexBot", "allow": False}, + {"bot": "DuckDuckBot", "allow": True}, + {"bot": "Sogou", "allow": False}, + {"bot": "Exabot", "allow": False}, + {"bot": "facebot", "allow": False}, + {"bot": "ia_archiver", "allow": False}, +] + +# Default router +default_router = create_default_router(config) +app.include_router(default_router, prefix="", tags=["default"]) + + +def test_robots_txt(): + response = client.get("/robots.txt") + assert response.status_code == 200 + content = response.text + assert "User-agent: Bytespider" in content + assert "Disallow: /" in content + assert "User-agent: GPTBot" in content + assert "Disallow: /" in content + assert "User-agent: ClaudeBot" in content + assert "Allow: /" in content + assert "User-agent: Googlebot" in content + assert "Allow: /" in content + assert "User-agent: Baiduspider" in content + assert "Disallow: /" in content