Skip to content

Commit 934ebec

Browse files
Update README.md
1 parent f3d4aac commit 934ebec

File tree

1 file changed

+11
-40
lines changed

1 file changed

+11
-40
lines changed

README.md

Lines changed: 11 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -17,34 +17,18 @@ This client acts simply as a global resource that may live as long as your appli
1717

1818
### Vanilla Python (Sync)
1919

20+
You may use the `ArachnidShield` client that has all the methods needed to consume the Arachnid Shield API.
21+
2022
```python
2123
from arachnid_shield_sdk import ArachnidShield
2224

2325
shield = ArachnidShield(username="", password="")
24-
25-
26-
class HarmfulMediaFoundException(Exception):
27-
"""Raised when a CSAM/Harmful to Children media is found to be uploaded on the server"""
28-
user = None
29-
scanned_media_metadata = None
30-
31-
def __init__(self, user, scanned_media_metadata):
32-
self.user = user
33-
self.scanned_media_metadata = scanned_media_metadata
34-
35-
36-
def process_media_for_user(user_id, contents):
37-
"""
3826

39-
Raises:
40-
HarmfulMediaFoundException If the
41-
"""
27+
def process_media(contents):
4228

4329
scanned_media = shield.scan_media_from_bytes(contents, "image/jpeg")
4430
if scanned_media.matches_known_image:
45-
raise HarmfulMediaFoundException(user=user_id, scanned_media_metadata=scanned_media)
46-
47-
# do more processing here.
31+
print(f"harmful media found!: {scanned_media}")
4832
...
4933

5034

@@ -53,52 +37,39 @@ def main():
5337
with open("some-image.jpeg", "rb") as f:
5438
contents = f.read()
5539

56-
process_media_for_user(user_id=1, contents=contents)
40+
process_media_for_user(contents)
5741

5842

5943
if __name__ == '__main__':
6044
main()
45+
6146
```
6247

6348
### Vanilla Python (Async)
6449

50+
In `async` environments, you may use the `ArachnidShieldAsync` client which has the exact same interface as the `ArachnidShield` client but where all the methods are awaitable coroutines.
51+
6552
```python
6653
import asyncio
6754
from arachnid_shield_sdk import ArachnidShieldAsync as ArachnidShield
6855

6956
shield = ArachnidShield(username="", password="")
7057

71-
72-
class HarmfulMediaFoundException(Exception):
73-
"""Raised when a CSAM/Harmful to Children media is found to be uploaded on the server"""
74-
user = None
75-
scanned_media_metadata = None
76-
77-
def __init__(self, scanned_media_metadata):
78-
self.scanned_media_metadata = scanned_media_metadata
79-
80-
8158
async def process_media(contents):
82-
"""
83-
84-
Raises:
85-
HarmfulMediaFoundException If the
86-
"""
8759

8860
scanned_media = await shield.scan_media_from_bytes(contents, "image/jpeg")
8961
if scanned_media.matches_known_image:
90-
raise HarmfulMediaFoundException(scanned_media)
91-
92-
# do more processing here.
62+
print(f"harmful media found!: {scanned_media}")
9363
...
9464

9565

9666
async def main():
9767
with open("some-image.jpeg", "rb") as f:
9868
contents = f.read()
99-
await process_media(contents=contents)
69+
await process_media(contents)
10070

10171

10272
if __name__ == '__main__':
10373
asyncio.get_event_loop().run_until_complete(main())
74+
10475
```

0 commit comments

Comments
 (0)