diff --git a/.github/workflows/test-dev.yml b/.github/workflows/test-dev.yml
index ace91192..4a05d624 100644
--- a/.github/workflows/test-dev.yml
+++ b/.github/workflows/test-dev.yml
@@ -5,13 +5,14 @@ on:
pull_request:
branches:
- main
- - main-next-release
+ # - main-next-release
push:
branches:
- main
- - main-next-release
+ # - main-next-release
jobs:
+
test:
strategy:
matrix:
@@ -30,6 +31,8 @@ jobs:
runs-on: ${{ matrix.os }}
+ if: github.ref == 'refs/heads/main'
+
steps:
- name: Checkout code
uses: actions/checkout@v4
@@ -59,3 +62,46 @@ jobs:
uses: codecov/codecov-action@v4.0.1
with:
token: ${{ secrets.CODECOV_TOKEN }}
+
+
+ test-next-release:
+ strategy:
+ matrix:
+ include:
+ # Define specific Python versions for each OS
+ - os: ubuntu-latest
+ python-version: 3.11
+ # - os: windows-latest
+ # python-version: 3.11
+ # - os: macos-latest
+ # python-version: 3.11
+ - os: ubuntu-latest
+ python-version: 3.12
+
+ runs-on: ${{ matrix.os }}
+
+ if: github.ref == 'refs/heads/main-next-release'
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v3
+ with:
+ python-version: ${{ matrix.python-version }}
+
+ - name: Install dependencies
+ run: pip install -r tests/requirements.txt
+
+ - name: Run unit tests and generate coverage report
+ run: |
+ pip install coverage
+ coverage run -m unittest discover -s tests -p 'test_*.py'
+ coverage report -m
+
+
+
+
+
+
diff --git a/.gitmodules b/.gitmodules
index b293d9cc..62657cd1 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,6 +1,6 @@
[submodule "examples"]
path = examples
- url = https://github.com/VigneshVSV/hololinked-examples.git
+ url = https://github.com/hololinked-dev/examples.git
[submodule "doc"]
path = doc
- url = https://github.com/VigneshVSV/hololinked-docs.git
+ url = https://github.com/hololinked-dev/docs-v2.git
diff --git a/README.md b/README.md
index 80a19aae..3ddffd6b 100644
--- a/README.md
+++ b/README.md
@@ -26,6 +26,11 @@ Or, clone the repository (main branch for latest codebase) and install `pip inst
For next-release code base, see [main-next-release](https://github.com/hololinked-dev/hololinked/tree/main-next-release) branch. Out of the many improvements, an attempt to align better with Web of Things is being made, along with a layer of protocol agnosticim.
+### Main Next-Release
+
+This branch is the main branch for the next release. The current release is 0.2 and the next release is 0.3.
+Not all features may work, but unit tests need to pass while merging.
+
### Usage/Quickstart
`hololinked` is compatible with the [Web of Things](https://www.w3.org/WoT/) recommended pattern for developing hardware/instrumentation control software.
@@ -64,35 +69,30 @@ class OceanOpticsSpectrometer(Thing):
#### Instantiating properties
-Say, we wish to make device serial number, integration time and the captured intensity as properties. There are certain predefined properties available like `String`, `Number`, `Boolean` etc.
-or one may define one's own. To create properties:
+Say, we wish to make device serial number, integration time and the captured intensity as properties. There are certain predefined properties available like `String`, `Number`, `Boolean` etc.
+or one may define one's own using [pydantic or JSON schema](https://docs.staging.hololinked.dev/howto/articles/properties/#schema-constrained-property). To create properties:
```python
class OceanOpticsSpectrometer(Thing):
"""class doc"""
- serial_number = String(default=None, allow_None=True, URL_path='/serial-number',
+ serial_number = String(default=None, allow_None=True,
doc="serial number of the spectrometer to connect/or connected",
- http_method=("GET", "PUT"))
- # GET and PUT is default for reading and writing the property respectively.
- # So you can leave it out, especially if you are going to use ZMQ and dont understand HTTP
-
+ )
+
integration_time = Number(default=1000, bounds=(0.001, None), crop_to_bounds=True,
- URL_path='/integration-time',
doc="integration time of measurement in milliseconds")
intensity = List(default=None, allow_None=True,
doc="captured intensity", readonly=True,
fget=lambda self: self._intensity)
- def __init__(self, instance_name, serial_number, **kwargs):
- super().__init__(instance_name=instance_name, serial_number=serial_number, **kwargs)
+ def __init__(self, id, serial_number, **kwargs):
+ super().__init__(id=id, serial_number=serial_number, **kwargs)
```
-> There is an ongoing work to remove HTTP API from the property API and completely move them to the HTTP server
-
In non-expert terms, properties look like class attributes however their data containers are instantiated at object instance level by default.
For example, the `integration_time` property defined above as `Number`, whenever set/written, will be validated as a float or int, cropped to bounds and assigned as an attribute to each instance of the `OceanOpticsSpectrometer` class with an internally generated name. It is not necessary to know this internally generated name as the property value can be accessed again in any python logic, say, `print(self.integration_time)`.
@@ -102,7 +102,6 @@ To overload the get-set (or read-write) of properties, one may do the following:
class OceanOpticsSpectrometer(Thing):
integration_time = Number(default=1000, bounds=(0.001, None), crop_to_bounds=True,
- URL_path='/integration-time',
doc="integration time of measurement in milliseconds")
@integration_time.setter # by default called on http PUT method
@@ -148,8 +147,7 @@ If you are not familiar with Web
what the property represents and how to interact with it from somewhere else. Such a JSON is both human-readable, yet consumable by any application that may use the property - say, a client provider to create a client object to interact with the property or a GUI application to autogenerate a suitable input field for this property.
For example, the Eclipse ThingWeb [node-wot](https://github.com/eclipse-thingweb/node-wot) supports this feature to produce a HTTP(s) client that can issue `readProperty("integration_time")` and `writeProperty("integration_time", 1000)` to read and write this property.
-The URL path segment `../spectrometer/..` in href field is taken from the `instance_name` which was specified in the `__init__`.
-This is a mandatory key word argument to the parent class `Thing` to generate a unique name/id for the instance. One should use URI compatible strings.
+[Full Documentation](https://docs.staging.hololinked.dev/howto/articles/properties/)
#### Specify methods as actions
@@ -159,7 +157,7 @@ decorate with `action` decorator on a python method to claim it as a network acc
class OceanOpticsSpectrometer(Thing):
- @action(URL_path='/connect', http_method="POST") # POST is default for actions
+ @action(input_schema={"type": "object", "properties": {"serial_number": {"type": "string"}}})
def connect(self, serial_number = None):
"""connect to spectrometer with given serial number"""
if serial_number is not None:
@@ -206,6 +204,8 @@ and how to interact with it):
> input and output schema ("input" field above which describes the argument type `serial_number`) are optional and will be discussed in docs
+[Full Documentation](https://docs.staging.hololinked.dev/howto/articles/actions/)
+
#### Defining and pushing events
create a named event using `Event` object that can push any arbitrary data:
@@ -215,7 +215,6 @@ class OceanOpticsSpectrometer(Thing):
# only GET HTTP method possible for events
intensity_measurement_event = Event(name='intensity-measurement-event',
- URL_path='/intensity/measurement-event',
doc="""event generated on measurement of intensity,
max 30 per second even if measurement is faster.""",
schema=intensity_event_schema)
@@ -240,14 +239,14 @@ class OceanOpticsSpectrometer(Thing):
})
last_time = time.time()
- @action(URL_path='/acquisition/start', http_method="POST")
+ @action()
def start_acquisition(self):
if self._acquisition_thread is not None and self._acquisition_thread.is_alive():
return
self._acquisition_thread = threading.Thread(target=self.capture)
self._acquisition_thread.start()
- @action(URL_path='/acquisition/stop', http_method="POST")
+ @action()
def stop_acquisition(self):
self._run = False
```
@@ -289,6 +288,8 @@ what the event represents and how to subscribe to it) with subprotocol SSE (HTTP
> data schema ("data" field above which describes the event payload) are optional and discussed later
+[Full Documentation](https://docs.staging.hololinked.dev/howto/articles/events/)
+
Events follow a pub-sub model with '1 publisher to N subscribers' per `Event` object, both through ZMQ and HTTP SSE.
To start the Thing, a configurable HTTP Server is already available (from `hololinked.server.HTTPServer`) which redirects HTTP requests to the object:
@@ -304,7 +305,7 @@ if __name__ == '__main__':
keyfile = f'assets{os.sep}security{os.sep}key.pem')
O = OceanOpticsSpectrometer(
- instance_name='spectrometer',
+ id='spectrometer',
serial_number='S14155',
log_level=logging.DEBUG
)
@@ -314,10 +315,6 @@ if __name__ == '__main__':
# both interprocess communication & TCP, no HTTP
```
-> There is an ongoing work to remove HTTP API from the API of all of properties, actions and events and completely move them to the HTTP server for a more accurate syntax. The functionality will not change though.
-
-Here one can see the use of `instance_name` and why it turns up in the URL path. See the detailed example of the above code [here](https://gitlab.com/hololinked-examples/oceanoptics-spectrometer/-/blob/simple/oceanoptics_spectrometer/device.py?ref_type=heads).
-
##### NOTE - The package is under active development. Contributors welcome, please check CONTRIBUTING.md and the open issues. Some issues can also be independently dealt without much knowledge of this package.
- [examples repository](https://github.com/hololinked-dev/examples) - detailed examples for both clients and servers
diff --git a/doc b/doc
index d4e965b5..64487693 160000
--- a/doc
+++ b/doc
@@ -1 +1 @@
-Subproject commit d4e965b5ad5b8c0b88f807d031b72e76acf9cde9
+Subproject commit 64487693cb1132f3c248e07c3f9602833069c898
diff --git a/examples b/examples
index aceda901..c9de52c4 160000
--- a/examples
+++ b/examples
@@ -1 +1 @@
-Subproject commit aceda901043b7da53f087b1cf46fcaaa7206f393
+Subproject commit c9de52c473156cf4854afa0feff9ab9af8d766ae
diff --git a/hololinked/__init__.py b/hololinked/__init__.py
index b5c9b6cb..260c070a 100644
--- a/hololinked/__init__.py
+++ b/hololinked/__init__.py
@@ -1 +1 @@
-__version__ = "0.2.12"
+__version__ = "0.3.1"
diff --git a/hololinked/client/abstractions.py b/hololinked/client/abstractions.py
new file mode 100644
index 00000000..3a7eaafb
--- /dev/null
+++ b/hololinked/client/abstractions.py
@@ -0,0 +1,382 @@
+"""
+MIT License
+
+Copyright (c) 2018 CTIC Centro Tecnologico
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+"""
+from types import FunctionType, MethodType
+import typing
+import builtins
+
+from ..td import PropertyAffordance, ActionAffordance, EventAffordance
+
+
+
+class ConsumedThingAction:
+
+ # action call abstraction
+ # Dont add doc otherwise __doc__ in slots will conflict with class variable
+
+ def __init__(self,
+ resource: ActionAffordance,
+ owner_inst: typing.Optional[typing.Any] = None,
+ # schema_validator: typing.Type[BaseSchemaValidator] | None = None
+ **kwargs
+ ) -> None:
+ """
+ Parameters
+ ----------
+ resource: ActionAffordance
+ dataclass object representing the action
+ """
+ self._resource = resource
+ self._schema_validator = None # schema_validator
+ self._owner_inst = owner_inst
+
+ def get_last_return_value(self, raise_exception: bool = False) -> typing.Any:
+ """retrieve return value of the last call to the action"""
+ raise NotImplementedError("implement get_last_return_value per protocol")
+
+ last_return_value = property(fget=get_last_return_value,
+ doc="cached return value of the last call to the method")
+
+ def __call__(self, *args, **kwargs) -> typing.Any:
+ """
+ Invoke action/method on server
+
+ Parameters
+ ----------
+ *args: typing.Any
+ arguments to the action
+ **kwargs: typing.Any
+ keyword arguments to the action
+ """
+ raise NotImplementedError("implement action _call__ per protocol")
+
+ async def async_call(self, *args, **kwargs) -> typing.Any:
+ """
+ async invoke action on server - asynchronous at the network level, may not necessarily be at the server level.
+
+ Parameters
+ ----------
+ *args: typing.Any
+ arguments to the action
+ **kwargs: typing.Any
+ keyword arguments to the action
+ """
+ raise NotImplementedError("implement action async_call per protocol")
+
+ def oneway(self, *args, **kwargs) -> None:
+ """
+ Only invokes the action on the server and does not wait for reply,
+ neither does the server reply to this invokation.
+
+ Parameters
+ ----------
+ *args: typing.Any
+ arguments to the action
+ **kwargs: typing.Any
+ keyword arguments to the action
+ """
+ raise NotImplementedError("implement action oneway call per protocol")
+
+ def noblock(self, *args, **kwargs) -> str:
+ """
+ Invoke the action and collect the reply later
+
+ Parameters
+ ----------
+ *args: typing.Any
+ arguments to the action
+ **kwargs: typing.Any
+ keyword arguments to the action
+
+ Returns
+ -------
+ str
+ id of the request or message (UUID4 as string)
+ """
+ raise NotImplementedError("implement action noblock call per protocol")
+
+ def read_reply(self, message_id: str, timeout: float | int | None = None) -> typing.Any:
+ """
+ Read the reply of the action call
+
+ Parameters
+ ----------
+ message_id: str
+ id of the request or message (UUID4 as string)
+
+ Returns
+ -------
+ typing.Any
+ reply of the action call
+ """
+ raise NotImplementedError("implement action read_reply per protocol")
+
+ def __hash__(self):
+ return hash(self._resource.name)
+
+ def __eq__(self, other):
+ if not isinstance(other, ConsumedThingAction):
+ return False
+ return self._resource.name == other._resource.name
+
+
+class ConsumedThingProperty:
+
+ # property get set abstraction
+ # Dont add doc otherwise __doc__ in slots will conflict with class variable
+
+ def __init__(self,
+ resource: PropertyAffordance,
+ owner_inst: typing.Optional[typing.Any] = None,
+ **kwargs
+ ) -> None:
+ """
+ Parameters
+ ----------
+ resource: PropertyAffordance
+ dataclass object representing the property
+ """
+ self._resource = resource
+ self._owner_inst = owner_inst
+
+ @property # i.e. cannot have setter
+ def last_read_value(self) -> typing.Any:
+ """cache of last read value"""
+ raise NotImplementedError("implement last_read_value per protocol")
+
+ def set(self, value: typing.Any) -> None:
+ """
+ Set or write property value.
+
+ Parameters
+ ----------
+ value: typing.Any
+ value to set
+ """
+ raise NotImplementedError("implement property set per protocol")
+
+ def get(self) -> typing.Any:
+ """
+ Get or read property value.
+
+ Returns
+ -------
+ typing.Any
+ property value
+ """
+ raise NotImplementedError("implement property get per protocol")
+
+ async def async_set(self, value: typing.Any) -> None:
+ """
+ Async set property value - asynchronous at the network level, may not necessarily be at the server level.
+
+ Parameters
+ ----------
+ value: typing.Any
+ value to set
+ """
+ raise NotImplementedError("implement async property set per protocol")
+
+ async def async_get(self) -> typing.Any:
+ """
+ Async get or read property value.
+
+ Returns
+ -------
+ typing.Any
+ property value
+ """
+ raise NotImplementedError("implement async property get per protocol")
+
+ def noblock_get(self) -> str:
+ """
+ Get or read property value without blocking, i.e. collect it later as the method returns immediately.
+
+ Returns
+ -------
+ str
+ id of the request or message (UUID4 as string)
+ """
+ raise NotImplementedError("implement property noblock get per protocol")
+
+ def noblock_set(self, value: typing.Any) -> str:
+ """
+ Set or write property value without blocking, i.e. collect it later as the method returns immediately.
+
+ Parameters
+ ----------
+ value: typing.Any
+ value to set
+
+ Returns
+ -------
+ str
+ id of the request or message (UUID4 as string)
+ """
+ raise NotImplementedError("implement property noblock set per protocol")
+
+ def oneway_set(self, value: typing.Any) -> None:
+ """
+ Set property value without waiting for acknowledgement. The server also does not send any reply.
+
+ Parameters
+ ----------
+ value: typing.Any
+ value to set
+ """
+ raise NotImplementedError("implement property oneway set per protocol")
+
+ def observe(self, *callbacks: typing.Callable) -> None:
+ """
+ Observe property value changes
+
+ Parameters
+ ----------
+ *callbacks: typing.Callable
+ callback to call when property value changes
+ """
+ raise NotImplementedError("implement property observe per protocol")
+
+ def unobserve(self) -> None:
+ """Stop observing property value changes"""
+ raise NotImplementedError("implement property unobserve per protocol")
+
+ def read_reply(self, message_id: str, timeout: float | int | None = None) -> typing.Any:
+ """
+ Read the reply of the action call
+
+ Parameters
+ ----------
+ message_id: str
+ id of the request or message (UUID4 as string)
+
+ Returns
+ -------
+ typing.Any
+ reply of the action call
+ """
+ raise NotImplementedError("implement action read_reply per protocol")
+
+
+class ConsumedThingEvent:
+
+ # event subscription
+ # Dont add class doc otherwise __doc__ in slots will conflict with class variable
+
+ def __init__(self,
+ resource: EventAffordance,
+ **kwargs
+ ) -> None:
+ """
+ Parameters
+ ----------
+ resource: EventAffordance
+ dataclass object representing the event
+ """
+ self._resource = resource
+ self._callbacks = None
+ self._thread_callbacks = False
+ self._logger = kwargs.get('logger', None)
+
+ def add_callbacks(self, *callbacks : typing.Union[typing.List[typing.Callable], typing.Callable]) -> None:
+ """
+ add callbacks to the event
+
+ Parameters
+ ----------
+ *callbacks: typing.List[typing.Callable] | typing.Callable
+ callback or list of callbacks to add
+ """
+ if not self._callbacks:
+ self._callbacks = []
+ if isinstance(callbacks, (FunctionType, MethodType)):
+ self._callbacks.append(callbacks)
+ elif isinstance(callbacks, (list, tuple)):
+ self._callbacks.extend(callbacks)
+ else:
+ raise TypeError("callbacks must be a callable or a list of callables")
+
+ def subscribe(self,
+ callbacks: typing.Union[typing.List[typing.Callable], typing.Callable],
+ thread_callbacks: bool = False,
+ deserialize: bool = True
+ ) -> None:
+ """
+ subscribe to the event
+
+ Parameters
+ ----------
+ callbacks: typing.List[typing.Callable] | typing.Callable
+ callback or list of callbacks to add
+ thread_callbacks: bool
+ whether to run each callback in a separate thread
+ deserialize: bool
+ whether to deserialize the event payload
+ """
+ raise NotImplementedError("implement subscribe per protocol")
+
+ def unsubscribe(self, join_thread: bool = True):
+ """
+ unsubscribe from the event
+
+ Parameters
+ ----------
+ join_thread: bool
+ whether to join the event thread after unsubscribing
+ """
+ raise NotImplementedError("implement unsubscribe per protocol")
+
+ def listen(self):
+ """
+ listen to events and call the callbacks
+ """
+ raise NotImplementedError("implement listen per protocol")
+
+
+
+def raise_local_exception(error_message : typing.Dict[str, typing.Any]) -> None:
+ """
+ raises an exception on client side using an exception from server by mapping it to the correct one based on
+ exception type.
+
+ Parameters
+ ----------
+ exception: Dict[str, Any]
+ exception dictionary made by server with following keys - type, message, traceback, notes
+ """
+ if isinstance(error_message, Exception):
+ raise error_message from None
+ elif isinstance(error_message, dict) and 'exception' in error_message.keys():
+ error_message = error_message["exception"]
+ message = error_message["message"]
+ exc = getattr(builtins, error_message["type"], None)
+ if exc is None:
+ ex = Exception(message)
+ else:
+ ex = exc(message)
+ error_message["traceback"][0] = f"Server {error_message['traceback'][0]}"
+ ex.__notes__ = error_message["traceback"][0:-1]
+ raise ex from None
+ elif isinstance(error_message, str) and error_message in ['invokation', 'execution']:
+ raise TimeoutError(f"{error_message[0].upper()}{error_message[1:]} timeout occured. Server did not respond within specified timeout") from None
+ raise RuntimeError("unknown error occurred on server side") from None
\ No newline at end of file
diff --git a/hololinked/client/exceptions.py b/hololinked/client/exceptions.py
new file mode 100644
index 00000000..d3f8cca1
--- /dev/null
+++ b/hololinked/client/exceptions.py
@@ -0,0 +1,4 @@
+
+
+class ReplyNotArrivedError(Exception):
+ pass
\ No newline at end of file
diff --git a/hololinked/client/factory.py b/hololinked/client/factory.py
new file mode 100644
index 00000000..38d08b1d
--- /dev/null
+++ b/hololinked/client/factory.py
@@ -0,0 +1,116 @@
+import uuid
+
+from .abstractions import ConsumedThingAction, ConsumedThingProperty, ConsumedThingEvent
+from .zmq.consumed_interactions import ZMQAction, ZMQEvent, ZMQProperty, WriteMultipleProperties, ReadMultipleProperties
+from ..core.zmq import SyncZMQClient, AsyncZMQClient
+from ..core import Thing, Action
+from ..td.interaction_affordance import PropertyAffordance, ActionAffordance, EventAffordance
+
+
+
+class ClientFactory:
+
+ __allowed_attribute_types__ = (ConsumedThingProperty, ConsumedThingAction, ConsumedThingEvent)
+ __WRAPPER_ASSIGNMENTS__ = ('__name__', '__qualname__', '__doc__')
+
+ @classmethod
+ def zmq(self, server_id: str, thing_id: str, protocol: str, **kwargs):
+ from .proxy import ObjectProxy
+ id = f"{server_id}|{thing_id}|{protocol}|{uuid.uuid4()}"
+ object_proxy = ObjectProxy(id, **kwargs)
+ sync_zmq_client = SyncZMQClient(
+ f"{id}|sync",
+ server_id=server_id,
+ logger=object_proxy.logger,
+ **kwargs
+ )
+ async_zmq_client = AsyncZMQClient(
+ f"{id}|async",
+ server_id=server_id,
+ logger=object_proxy.logger,
+ **kwargs
+ )
+ assert isinstance(Thing.get_thing_model, Action)
+ FetchTDAffordance = Thing.get_thing_model.to_affordance()
+ FetchTDAffordance._thing_id = thing_id
+ FetchTD = ZMQAction(
+ resource=FetchTDAffordance,
+ sync_client=sync_zmq_client,
+ async_client=async_zmq_client,
+ )
+ TD = FetchTD(ignore_errors=True)
+ object_proxy.td = TD
+ for name in TD["properties"]:
+ affordance = PropertyAffordance.from_TD(name, TD)
+ consumed_property = ZMQProperty(
+ resource=affordance,
+ sync_client=sync_zmq_client,
+ async_client=async_zmq_client,
+ owner_inst=object_proxy,
+ invokation_timeout=object_proxy.invokation_timeout,
+ execution_timeout=object_proxy.execution_timeout,
+ )
+ self.add_property(object_proxy, consumed_property)
+ for action in TD["actions"]:
+ affordance = ActionAffordance.from_TD(action, TD)
+ consumed_action = ZMQAction(
+ resource=affordance,
+ sync_client=sync_zmq_client,
+ async_client=async_zmq_client,
+ owner_inst=object_proxy,
+ invokation_timeout=object_proxy.invokation_timeout,
+ execution_timeout=object_proxy.execution_timeout,
+ )
+ self.add_action(object_proxy, consumed_action)
+ for event in TD["events"]:
+ affordance = EventAffordance.from_TD(event, TD)
+ consumed_event = ZMQEvent(
+ resource=affordance,
+ sync_zmq_client=sync_zmq_client,
+ async_zmq_client=async_zmq_client,
+ owner_inst=object_proxy,
+ invokation_timeout=object_proxy.invokation_timeout,
+ execution_timeout=object_proxy.execution_timeout,
+ )
+ self.add_event(object_proxy, consumed_event)
+ for opname, ophandler in zip(['_get_properties', '_set_properties'], [ReadMultipleProperties, WriteMultipleProperties]):
+ setattr(
+ object_proxy,
+ opname,
+ ophandler(
+ sync_client=sync_zmq_client,
+ async_client=async_zmq_client,
+ owner_inst=object_proxy
+ )
+ )
+ return object_proxy
+
+ @classmethod
+ def add_action(self, client, action: ConsumedThingAction) -> None:
+ # if not func_info.top_owner:
+ # return
+ # raise RuntimeError("logic error")
+ # for dunder in ClientFactory.__WRAPPER_ASSIGNMENTS__:
+ # if dunder == '__qualname__':
+ # info = '{}.{}'.format(client.__class__.__name__, func_info.get_dunder_attr(dunder).split('.')[1])
+ # else:
+ # info = func_info.get_dunder_attr(dunder)
+ # setattr(action, dunder, info)
+ setattr(client, action._resource.name, action)
+
+ @classmethod
+ def add_property(self, client, property: ConsumedThingProperty) -> None:
+ # if not property_info.top_owner:
+ # return
+ # raise RuntimeError("logic error")
+ # for attr in ['__doc__', '__name__']:
+ # # just to imitate _add_method logic
+ # setattr(property, attr, property_info.get_dunder_attr(attr))
+ setattr(client, property._resource.name, property)
+
+ @classmethod
+ def add_event(cls, client, event: ConsumedThingEvent) -> None:
+ setattr(client, event._resource.name, event)
+
+
+
diff --git a/tests/__init__.py b/hololinked/client/http/__init__.py
similarity index 100%
rename from tests/__init__.py
rename to hololinked/client/http/__init__.py
diff --git a/hololinked/client/http/client.py b/hololinked/client/http/client.py
new file mode 100644
index 00000000..4b651886
--- /dev/null
+++ b/hololinked/client/http/client.py
@@ -0,0 +1,338 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
+Classes that contain the client logic for the HTTP protocol.
+"""
+
+import asyncio
+import json
+import logging
+import time
+import urllib.parse as parse
+
+import tornado.httpclient
+# from rx import Observable
+from tornado.simple_httpclient import HTTPTimeoutError
+
+from hololinked.client.abstractions import ConsumedThingAction
+from hololinked.td.interaction_affordance import ActionAffordance
+
+# from wotpy.protocols.client import BaseProtocolClient
+# from wotpy.protocols.enums import InteractionVerbs, Protocols
+# from wotpy.protocols.exceptions import ClientRequestTimeout, FormNotFoundException
+# from wotpy.protocols.http.enums import HTTPSchemes
+# from wotpy.protocols.utils import is_scheme_form
+# from wotpy.utils.utils import handle_observer_finalization
+# from wotpy.wot.events import (
+# EmittedEvent,
+# PropertyChangeEmittedEvent,
+# PropertyChangeEventInit,
+# )
+
+
+class HTTPClient(BaseProtocolClient):
+ """Implementation of the protocol client interface for the HTTP protocol."""
+
+ JSON_HEADERS = {"Content-Type": "application/json"}
+ DEFAULT_CON_TIMEOUT = 60
+ DEFAULT_REQ_TIMEOUT = 60
+
+ def __init__(
+ self, connect_timeout=DEFAULT_CON_TIMEOUT, request_timeout=DEFAULT_REQ_TIMEOUT
+ ):
+ self._connect_timeout = connect_timeout
+ self._request_timeout = request_timeout
+ self._logr = logging.getLogger(__name__)
+ super(HTTPClient, self).__init__()
+
+ @classmethod
+ def pick_http_href(cls, td, forms, op=None):
+ """Picks the most appropriate HTTP form href from the given list of forms."""
+
+ def is_op_form(form):
+ try:
+ return op is None or op == form.op or op in form.op
+ except TypeError:
+ return False
+
+ def find_href(scheme):
+ try:
+ return next(
+ form.href
+ for form in forms
+ if is_scheme_form(form, td.base, scheme) and is_op_form(form)
+ )
+ except StopIteration:
+ return None
+
+ form_https = find_href(HTTPSchemes.HTTPS)
+
+ return form_https if form_https is not None else find_href(HTTPSchemes.HTTP)
+
+ @property
+ def protocol(self):
+ """Protocol of this client instance.
+ A member of the Protocols enum."""
+
+ return Protocols.HTTP
+
+ @property
+ def connect_timeout(self):
+ """Returns the default connection timeout for all HTTP requests."""
+
+ return self._connect_timeout
+
+ @property
+ def request_timeout(self):
+ """Returns the default request timeout for all HTTP requests."""
+
+ return self._request_timeout
+
+ def is_supported_interaction(self, td, name):
+ """Returns True if the any of the Forms for the Interaction
+ with the given name is supported in this Protocol Binding client."""
+
+ forms = td.get_forms(name)
+
+ forms_http = [
+ form for form in forms if is_scheme_form(form, td.base, HTTPSchemes.list())
+ ]
+
+ return len(forms_http) > 0
+
+
+
+class Action(ConsumedThingAction):
+
+ def __init__(self,
+ resource: ActionAffordance,
+ connect_timeout: int = 60,
+ request_timeout: int = 60,
+ **kwargs
+ ) -> None:
+ super().__init__(resource, **kwargs)
+ self._connect_timeout = connect_timeout
+ self._request_timeout = request_timeout
+
+ async def __call__(self, *args, **kwargs):
+
+ # td, name, input_value, timeout=None):
+ # """Invokes an Action on a remote Thing.
+ # Returns a Future."""
+
+ now = time.time()
+
+ href = self.pick_http_href(td, td.get_action_forms(name))
+
+ if href is None:
+ raise FormNotFoundException()
+
+ body = json.dumps({"input": input_value})
+ http_client = tornado.httpclient.AsyncHTTPClient()
+
+ try:
+ http_request = tornado.httpclient.HTTPRequest(
+ href,
+ method=self.invokation_form["htv:methodName"] or "POST",
+ body=body,
+ headers=self.JSON_HEADERS,
+ connect_timeout=self._connect_timeout,
+ request_timeout=self._request_timeout,
+ )
+ except HTTPTimeoutError as ex:
+ raise ClientRequestTimeout from ex
+
+ response = await http_client.fetch(http_request)
+ invocation_url = json.loads(response.body).get("invocation")
+
+ async def check_invocation():
+ parsed = parse.urlparse(href)
+
+ invoc_href = "{}://{}/{}".format(
+ parsed.scheme, parsed.netloc, invocation_url.lstrip("/")
+ )
+
+ invoc_http_req = tornado.httpclient.HTTPRequest(
+ invoc_href,
+ method="GET",
+ connect_timeout=con_timeout,
+ request_timeout=req_timeout,
+ )
+
+ self._logr.debug("Checking invocation: {}".format(invocation_url))
+
+ try:
+ invoc_res = await http_client.fetch(invoc_http_req)
+ except HTTPTimeoutError:
+ self._logr.debug(
+ "Timeout checking invocation: {}".format(invocation_url)
+ )
+ return (False, None)
+
+ status = json.loads(invoc_res.body)
+
+ if status.get("done") is False:
+ return (False, None)
+
+ if status.get("error") is not None:
+ return (True, Exception(status.get("error")))
+ else:
+ return (True, status.get("result"))
+
+ while True:
+ done, result = await check_invocation()
+
+ if done and isinstance(result, Exception):
+ raise result
+ elif done:
+ return result
+ elif timeout and (time.time() - now) > timeout:
+ raise ClientRequestTimeout
+
+
+class Property(ConsumedThingProperty):
+
+ async def write_property(self, td, name, value, timeout=None):
+ """Updates the value of a Property on a remote Thing.
+ Returns a Future."""
+
+ con_timeout = timeout if timeout else self._connect_timeout
+ req_timeout = timeout if timeout else self._request_timeout
+
+ href = self.pick_http_href(td, td.get_property_forms(name))
+
+ if href is None:
+ raise FormNotFoundException()
+
+ http_client = tornado.httpclient.AsyncHTTPClient()
+ body = json.dumps({"value": value})
+
+ try:
+ http_request = tornado.httpclient.HTTPRequest(
+ href,
+ method="PUT",
+ body=body,
+ headers=self.JSON_HEADERS,
+ connect_timeout=con_timeout,
+ request_timeout=req_timeout,
+ )
+ except HTTPTimeoutError as ex:
+ raise ClientRequestTimeout from ex
+
+ await http_client.fetch(http_request)
+
+ async def read_property(self, td, name, timeout=None):
+ """Reads the value of a Property on a remote Thing.
+ Returns a Future."""
+
+ con_timeout = timeout if timeout else self._connect_timeout
+ req_timeout = timeout if timeout else self._request_timeout
+
+ href = self.pick_http_href(td, td.get_property_forms(name))
+
+ if href is None:
+ raise FormNotFoundException()
+
+ http_client = tornado.httpclient.AsyncHTTPClient()
+
+ try:
+ http_request = tornado.httpclient.HTTPRequest(
+ href,
+ method="GET",
+ connect_timeout=con_timeout,
+ request_timeout=req_timeout,
+ )
+ except HTTPTimeoutError as ex:
+ raise ClientRequestTimeout from ex
+
+ response = await http_client.fetch(http_request)
+ result = json.loads(response.body)
+ result = result.get("value", result)
+
+ return result
+
+
+class Event(ConsumedThingEvent):
+
+ def on_event(self, td, name):
+ """Subscribes to an event on a remote Thing.
+ Returns an Observable."""
+
+ href = self.pick_http_href(td, td.get_event_forms(name))
+
+ if href is None:
+ raise FormNotFoundException()
+
+ def subscribe(observer):
+ """Subscription function to observe events using the HTTP protocol."""
+
+ state = {"active": True}
+
+ @handle_observer_finalization(observer)
+ async def callback():
+ http_client = tornado.httpclient.AsyncHTTPClient()
+ http_request = tornado.httpclient.HTTPRequest(href, method="GET")
+
+ while state["active"]:
+ try:
+ response = await http_client.fetch(http_request)
+ payload = json.loads(response.body).get("payload")
+ observer.on_next(EmittedEvent(init=payload, name=name))
+ except HTTPTimeoutError:
+ pass
+
+ def unsubscribe():
+ state["active"] = False
+
+ asyncio.create_task(callback())
+
+ return unsubscribe
+
+ return Observable.create(subscribe)
+
+ def on_property_change(self, td, name):
+ """Subscribes to property changes on a remote Thing.
+ Returns an Observable"""
+
+ href = self.pick_http_href(
+ td, td.get_property_forms(name), op=InteractionVerbs.OBSERVE_PROPERTY
+ )
+
+ if href is None:
+ raise FormNotFoundException()
+
+ def subscribe(observer):
+ """Subscription function to observe property updates using the HTTP protocol."""
+
+ state = {"active": True}
+
+ @handle_observer_finalization(observer)
+ async def callback():
+ http_client = tornado.httpclient.AsyncHTTPClient()
+ http_request = tornado.httpclient.HTTPRequest(href, method="GET")
+
+ while state["active"]:
+ try:
+ response = await http_client.fetch(http_request)
+ value = json.loads(response.body)
+ value = value.get("value", value)
+ init = PropertyChangeEventInit(name=name, value=value)
+ observer.on_next(PropertyChangeEmittedEvent(init=init))
+ except HTTPTimeoutError:
+ pass
+
+ def unsubscribe():
+ state["active"] = False
+
+ asyncio.create_task(callback())
+
+ return unsubscribe
+
+ return Observable.create(subscribe)
+
+ def on_td_change(self, url):
+ """Subscribes to Thing Description changes on a remote Thing.
+ Returns an Observable."""
+
+ raise NotImplementedError
diff --git a/hololinked/client/proxy.py b/hololinked/client/proxy.py
index a4a4d6db..d5285700 100644
--- a/hololinked/client/proxy.py
+++ b/hololinked/client/proxy.py
@@ -1,16 +1,15 @@
+import builtins
import threading
import warnings
import typing
import logging
import uuid
+from ..constants import ZMQ_TRANSPORTS
+from .abstractions import ConsumedThingAction, ConsumedThingProperty, ConsumedThingEvent
+from .exceptions import ReplyNotArrivedError
+from .factory import ClientFactory
-from ..server.config import global_config
-from ..server.constants import JSON, CommonRPC, ServerMessage, ResourceTypes, ZMQ_PROTOCOLS
-from ..server.serializers import BaseSerializer
-from ..server.dataklasses import ZMQResource, ServerSentEvent
-from ..server.zmq_message_brokers import AsyncZMQClient, SyncZMQClient, EventConsumer, PROXY
-from ..server.schema_validators import BaseSchemaValidator
class ObjectProxy:
@@ -20,7 +19,7 @@ class ObjectProxy:
Parameters
----------
- instance_name: str
+ id: str
instance name of the server to connect.
invokation_timeout: float, int
timeout to schedule a method call or property read/write in server. execution time wait is controlled by
@@ -52,57 +51,55 @@ class ObjectProxy:
_own_attrs = frozenset([
'__annotations__',
- 'zmq_client', 'async_zmq_client', '_allow_foreign_attributes',
- 'identity', 'instance_name', 'logger', 'execution_timeout', 'invokation_timeout',
- '_execution_timeout', '_invokation_timeout', '_events', '_noblock_messages',
+ '_allow_foreign_attributes',
+ 'id', 'logger', 'td',
+ 'execution_timeout', 'invokation_timeout', '_execution_timeout', '_invokation_timeout',
+ '_events',
+ '_noblock_messages',
'_schema_validator'
])
- def __init__(self, instance_name : str, protocol : str = ZMQ_PROTOCOLS.IPC, invokation_timeout : float = 5,
- load_thing = True, **kwargs) -> None:
+ def __init__(self,
+ id: str,
+ **kwargs
+ ) -> None:
self._allow_foreign_attributes = kwargs.get('allow_foreign_attributes', False)
- self.instance_name = instance_name
- self.invokation_timeout = invokation_timeout
- self.execution_timeout = kwargs.get("execution_timeout", None)
- self.identity = f"{instance_name}|{uuid.uuid4()}"
- self.logger = kwargs.pop('logger', logging.Logger(self.identity, level=kwargs.get('log_level', logging.INFO)))
- self._noblock_messages = dict()
+ self._noblock_messages = dict() # type: typing.Dict[str, ConsumedThingAction | ConsumedThingProperty]
self._schema_validator = kwargs.get('schema_validator', None)
+ self.id = id
+ self.logger = kwargs.pop(
+ 'logger',
+ logging.Logger(self.id, level=kwargs.get('log_level', logging.INFO))
+ )
+ self.invokation_timeout = kwargs.get("invokation_timeout", 5)
+ self.execution_timeout = kwargs.get("execution_timeout", 5)
+ self.td = kwargs.get('td', dict()) # type: typing.Dict[str, typing.Any]
# compose ZMQ client in Proxy client so that all sending and receiving is
# done by the ZMQ client and not by the Proxy client directly. Proxy client only
# bothers mainly about __setattr__ and _getattr__
- self.async_zmq_client = None
- self.zmq_client = SyncZMQClient(instance_name, self.identity, client_type=PROXY, protocol=protocol,
- zmq_serializer=kwargs.get('serializer', None), handshake=load_thing,
- logger=self.logger, **kwargs)
- if kwargs.get("async_mixin", False):
- self.async_zmq_client = AsyncZMQClient(instance_name, self.identity + '|async', client_type=PROXY, protocol=protocol,
- zmq_serializer=kwargs.get('serializer', None), handshake=load_thing,
- logger=self.logger, **kwargs)
- if load_thing:
- self.load_thing()
+ # ClientFactory.zmq(self, **kwargs)
def __getattribute__(self, __name: str) -> typing.Any:
obj = super().__getattribute__(__name)
- if isinstance(obj, _Property):
+ if isinstance(obj, ConsumedThingProperty):
return obj.get()
return obj
def __setattr__(self, __name : str, __value : typing.Any) -> None:
if (__name in ObjectProxy._own_attrs or (__name not in self.__dict__ and
- isinstance(__value, __allowed_attribute_types__)) or self._allow_foreign_attributes):
- # allowed attribute types are _Property and _RemoteMethod defined after this class
+ isinstance(__value, ClientFactory.__allowed_attribute_types__)) or self._allow_foreign_attributes):
+ # allowed attribute types are ConsumedThingProperty and ConsumedThingAction defined after this class
return super(ObjectProxy, self).__setattr__(__name, __value)
elif __name in self.__dict__:
obj = self.__dict__[__name]
- if isinstance(obj, _Property):
+ if isinstance(obj, ConsumedThingProperty):
obj.set(value=__value)
return
- raise AttributeError(f"Cannot set attribute {__name} again to ObjectProxy for {self.instance_name}.")
- raise AttributeError(f"Cannot set foreign attribute {__name} to ObjectProxy for {self.instance_name}. Given attribute not found in server object.")
+ raise AttributeError(f"Cannot set attribute {__name} again to ObjectProxy for {self.id}.")
+ raise AttributeError(f"Cannot set foreign attribute {__name} to ObjectProxy for {self.id}. Given attribute not found in server object.")
def __repr__(self) -> str:
- return f'ObjectProxy {self.identity}'
+ return f'ObjectProxy {self.id}'
def __enter__(self):
return self
@@ -111,26 +108,27 @@ def __exit__(self, exc_type, exc_value, traceback):
pass
def __bool__(self) -> bool:
- try:
- self.zmq_client.handshake(num_of_tries=10)
- return True
- except RuntimeError:
- return False
+ raise NotImplementedError("Cannot convert ObjectProxy to bool. Use is_connected() instead.")
+ # try:
+ # self.zmq_client.handshake(num_of_tries=10)
+ # return True
+ # except RuntimeError:
+ # return False
def __eq__(self, other) -> bool:
if other is self:
return True
- return (isinstance(other, ObjectProxy) and other.instance_name == self.instance_name and
+ return (isinstance(other, ObjectProxy) and other.id == self.id and
other.zmq_client.protocol == self.zmq_client.protocol)
def __ne__(self, other) -> bool:
if other and isinstance(other, ObjectProxy):
- return (other.instance_name != self.instance_name or
+ return (other.id != self.id or
other.zmq_client.protocol != self.zmq_client.protocol)
return True
def __hash__(self) -> int:
- return hash(self.identity)
+ return hash(self.id)
def get_invokation_timeout(self) -> typing.Union[float, int]:
return self._invokation_timeout
@@ -163,9 +161,19 @@ def set_execution_timeout(self, value : typing.Union[float, int]) -> None:
"Defaults to None (i.e. waits indefinitely until return) and network times not considered."
)
+ # @abstractmethod
+ # def is_supported_interaction(self, td, name):
+ # """Returns True if the any of the Forms for the Interaction
+ # with the given name is supported in this Protocol Binding client."""
+ # raise NotImplementedError()
- def invoke_action(self, method : str, oneway : bool = False, noblock : bool = False,
- *args, **kwargs) -> typing.Any:
+
+ def invoke_action(
+ self,
+ name: str,
+ *args,
+ **kwargs
+ ) -> typing.Any:
"""
call a method specified by name on the server with positional/keyword arguments
@@ -194,20 +202,25 @@ def invoke_action(self, method : str, oneway : bool = False, noblock : bool = Fa
Exception:
server raised exception are propagated
"""
- method = getattr(self, method, None) # type: _RemoteMethod
- if not isinstance(method, _RemoteMethod):
- raise AttributeError(f"No remote method named {method}")
- if oneway:
+ method = getattr(self, name, None) # type: ConsumedThingAction
+ if not isinstance(method, ConsumedThingAction):
+ raise AttributeError(f"No remote method named {method} in Thing {self.td['id']}")
+ oneway = kwargs.pop('oneway', False)
+ noblock = kwargs.pop('noblock', False)
+ if noblock:
+ return method.noblock(*args, **kwargs)
+ elif oneway:
method.oneway(*args, **kwargs)
- elif noblock:
- msg_id = method.noblock(*args, **kwargs)
- self._noblock_messages[msg_id] = method
- return msg_id
else:
return method(*args, **kwargs)
- async def async_invoke_action(self, method : str, *args, **kwargs) -> typing.Any:
+ async def async_invoke_action(
+ self,
+ name: str,
+ *args,
+ **kwargs
+ ) -> typing.Any:
"""
async(io) call a method specified by name on the server with positional/keyword
arguments. noblock and oneway not supported for async calls.
@@ -235,13 +248,13 @@ async def async_invoke_action(self, method : str, *args, **kwargs) -> typing.Any
Exception:
server raised exception are propagated
"""
- method = getattr(self, method, None) # type: _RemoteMethod
- if not isinstance(method, _RemoteMethod):
+ method = getattr(self, name, None) # type: ConsumedThingAction
+ if not isinstance(method, ConsumedThingAction):
raise AttributeError(f"No remote method named {method}")
return await method.async_call(*args, **kwargs)
- def read_property(self, name : str, noblock : bool = False) -> typing.Any:
+ def read_property(self, name: str, noblock: bool = False) -> typing.Any:
"""
get property specified by name on server.
@@ -259,19 +272,22 @@ def read_property(self, name : str, noblock : bool = False) -> typing.Any:
Exception:
server raised exception are propagated
"""
- prop = self.__dict__.get(name, None) # type: _Property
- if not isinstance(prop, _Property):
+ prop = self.__dict__.get(name, None) # type: ConsumedThingProperty
+ if not isinstance(prop, ConsumedThingProperty):
raise AttributeError(f"No property named {prop}")
if noblock:
- msg_id = prop.noblock_get()
- self._noblock_messages[msg_id] = prop
- return msg_id
+ return prop.noblock_get()
else:
return prop.get()
- def write_property(self, name : str, value : typing.Any, oneway : bool = False,
- noblock : bool = False) -> None:
+ def write_property(
+ self,
+ name: str,
+ value: typing.Any,
+ oneway: bool = False,
+ noblock: bool = False
+ ) -> None:
"""
set property specified by name on server with specified value.
@@ -294,20 +310,18 @@ def write_property(self, name : str, value : typing.Any, oneway : bool = False,
Exception:
server raised exception are propagated
"""
- prop = self.__dict__.get(name, None) # type: _Property
- if not isinstance(prop, _Property):
+ prop = self.__dict__.get(name, None) # type: ConsumedThingProperty
+ if not isinstance(prop, ConsumedThingProperty):
raise AttributeError(f"No property named {prop}")
if oneway:
prop.oneway_set(value)
elif noblock:
- msg_id = prop.noblock_set(value)
- self._noblock_messages[msg_id] = prop
- return msg_id
+ return prop.noblock_set(value)
else:
prop.set(value)
- async def async_read_property(self, name : str) -> None:
+ async def async_read_property(self, name: str) -> None:
"""
async(io) get property specified by name on server.
@@ -323,13 +337,13 @@ async def async_read_property(self, name : str) -> None:
Exception:
server raised exception are propagated
"""
- prop = self.__dict__.get(name, None) # type: _Property
- if not isinstance(prop, _Property):
+ prop = self.__dict__.get(name, None) # type: ConsumedThingProperty
+ if not isinstance(prop, ConsumedThingProperty):
raise AttributeError(f"No property named {prop}")
return await prop.async_get()
- async def async_write_property(self, name : str, value : typing.Any) -> None:
+ async def async_write_property(self, name: str, value: typing.Any) -> None:
"""
async(io) set property specified by name on server with specified value.
noblock and oneway not supported for async calls.
@@ -348,13 +362,13 @@ async def async_write_property(self, name : str, value : typing.Any) -> None:
Exception:
server raised exception are propagated
"""
- prop = self.__dict__.get(name, None) # type: _Property
- if not isinstance(prop, _Property):
+ prop = self.__dict__.get(name, None) # type: ConsumedThingProperty
+ if not isinstance(prop, ConsumedThingProperty):
raise AttributeError(f"No property named {prop}")
await prop.async_set(value)
- def read_multiple_properties(self, names : typing.List[str], noblock : bool = False) -> typing.Any:
+ def read_multiple_properties(self, names: typing.List[str], noblock: bool = False) -> typing.Any:
"""
get properties specified by list of names.
@@ -370,19 +384,21 @@ def read_multiple_properties(self, names : typing.List[str], noblock : bool = Fa
Dict[str, Any]:
dictionary with names as keys and values corresponding to those keys
"""
- method = getattr(self, '_get_properties', None) # type: _RemoteMethod
+ method = getattr(self, '_get_properties', None) # type: ConsumedThingAction
if not method:
raise RuntimeError("Client did not load server resources correctly. Report issue at github.")
if noblock:
- msg_id = method.noblock(names=names)
- self._noblock_messages[msg_id] = method
- return msg_id
+ return method.noblock(names=names)
else:
return method(names=names)
- def write_multiple_properties(self, oneway : bool = False, noblock : bool = False,
- **properties : typing.Dict[str, typing.Any]) -> None:
+ def write_multiple_properties(
+ self,
+ oneway: bool = False,
+ noblock: bool = False,
+ **properties : typing.Dict[str, typing.Any]
+ ) -> None:
"""
set properties whose name is specified by keys of a dictionary
@@ -405,20 +421,18 @@ def write_multiple_properties(self, oneway : bool = False, noblock : bool = Fals
"""
if len(properties) == 0:
raise ValueError("no properties given to set_properties")
- method = getattr(self, '_set_properties', None) # type: _RemoteMethod
+ method = getattr(self, '_set_properties', None) # type: ConsumedThingAction
if not method:
raise RuntimeError("Client did not load server resources correctly. Report issue at github.")
if oneway:
method.oneway(**properties)
elif noblock:
- msg_id = method.noblock(**properties)
- self._noblock_messages[msg_id] = method
- return msg_id
+ return method.noblock(**properties)
else:
return method(**properties)
- async def async_read_multiple_properties(self, names) -> None:
+ async def async_read_multiple_properties(self, names: typing.List[str]) -> None:
"""
async(io) get properties specified by list of names. no block gets are not supported for asyncio.
@@ -432,13 +446,13 @@ async def async_read_multiple_properties(self, names) -> None:
Dict[str, Any]:
dictionary with property names as keys and values corresponding to those keys
"""
- method = getattr(self, '_get_properties', None) # type: _RemoteMethod
+ method = getattr(self, '_get_properties', None) # type: ConsumedThingAction
if not method:
raise RuntimeError("Client did not load server resources correctly. Report issue at github.")
return await method.async_call(names=names)
- async def async_write_multiple_properties(self, **properties) -> None:
+ async def async_write_multiple_properties(self, **properties: dict[str, typing.Any]) -> None:
"""
async(io) set properties whose name is specified by keys of a dictionary
@@ -456,14 +470,44 @@ async def async_write_multiple_properties(self, **properties) -> None:
"""
if len(properties) == 0:
raise ValueError("no properties given to set_properties")
- method = getattr(self, '_set_properties', None) # type: _RemoteMethod
+ method = getattr(self, '_set_properties', None) # type: ConsumedThingAction
if not method:
raise RuntimeError("Client did not load server resources correctly. Report issue at github.")
await method.async_call(**properties)
- def subscribe_event(self, name : str, callbacks : typing.Union[typing.List[typing.Callable], typing.Callable],
- thread_callbacks : bool = False, deserialize : bool = True) -> None:
+ def observe_property(
+ self,
+ name: str,
+ callbacks: typing.Union[typing.List[typing.Callable], typing.Callable],
+ thread_callbacks: bool = False,
+ deserialize: bool = True
+ ) -> None:
+ raise NotImplementedError("observe_property not implemented yet.")
+
+ def unobserve_property(self, name: str) -> None:
+ """
+ Unsubscribe to property specified by name.
+
+ Parameters
+ ----------
+ name: str
+ name of the property
+ callbacks: Callable | List[Callable]
+ one or more callbacks that will be executed
+ thread_callbacks: bool
+ thread the callbacks otherwise the callbacks will be executed serially
+ """
+ raise NotImplementedError("unobserve_property not implemented yet.")
+
+
+ def subscribe_event(
+ self,
+ name: str,
+ callbacks: typing.Union[typing.List[typing.Callable], typing.Callable],
+ thread_callbacks: bool = False,
+ deserialize: bool = True
+ ) -> None:
"""
Subscribe to event specified by name. Events are listened in separate threads and supplied callbacks are
are also called in those threads.
@@ -483,16 +527,17 @@ def subscribe_event(self, name : str, callbacks : typing.Union[typing.List[typin
AttributeError:
if no event with specified name is found
"""
- event = getattr(self, name, None) # type: _Event
- if not isinstance(event, _Event):
+ event = getattr(self, name, None) # type: ConsumedThingEvent
+ if not isinstance(event, ConsumedThingEvent):
raise AttributeError(f"No event named {name}")
+ event._deserialize = deserialize
if event._subscribed:
event.add_callbacks(callbacks)
else:
event.subscribe(callbacks, thread_callbacks, deserialize)
- def unsubscribe_event(self, name : str):
+ def unsubscribe_event(self, name: str):
"""
Unsubscribe to event specified by name.
@@ -510,354 +555,59 @@ def unsubscribe_event(self, name : str):
AttributeError:
if no event with specified name is found
"""
- event = getattr(self, name, None) # type: _Event
- if not isinstance(event, _Event):
+ event = getattr(self, name, None) # type: ConsumedThingEvent
+ if not isinstance(event, ConsumedThingEvent):
raise AttributeError(f"No event named {name}")
event.unsubscribe()
- def read_reply(self, message_id : bytes, timeout : typing.Optional[float] = 5000) -> typing.Any:
+ def read_reply(self, message_id: str, timeout: typing.Optional[float] = 5000) -> typing.Any:
"""
read reply of no block calls of an action or a property read/write.
"""
obj = self._noblock_messages.get(message_id, None)
if not obj:
raise ValueError('given message id not a one way call or invalid.')
- reply = self.zmq_client._reply_cache.get(message_id, None)
- if not reply:
- reply = self.zmq_client.recv_reply(message_id=message_id, timeout=timeout,
- raise_client_side_exception=True)
- if not reply:
- raise ReplyNotArrivedError(f"could not fetch reply within timeout for message id '{message_id}'")
- if isinstance(obj, _RemoteMethod):
- obj._last_return_value = reply
- return obj.last_return_value # note the missing underscore
- elif isinstance(obj, _Property):
- obj._last_value = reply
- return obj.last_read_value
-
-
- def load_thing(self):
- """
- Get exposed resources from server (methods, properties, events) and remember them as attributes of the proxy.
- """
- fetch = _RemoteMethod(self.zmq_client, CommonRPC.zmq_resource_read(instance_name=self.instance_name),
- invokation_timeout=self._invokation_timeout) # type: _RemoteMethod
- reply = fetch() # type: typing.Dict[str, typing.Dict[str, typing.Any]]
-
- for name, data in reply.items():
- if isinstance(data, dict):
- try:
- if data["what"] == ResourceTypes.EVENT:
- data = ServerSentEvent(**data)
- else:
- data = ZMQResource(**data)
- except Exception as ex:
- ex.add_note("Did you correctly configure your serializer? " +
- "This exception occurs when given serializer does not work the same way as server serializer")
- raise ex from None
- elif not isinstance(data, (ZMQResource, ServerSentEvent)):
- raise RuntimeError("Logic error - deserialized info about server not instance of hololinked.server.data_classes.ZMQResource")
- if data.what == ResourceTypes.ACTION:
- _add_method(self, _RemoteMethod(self.zmq_client, data.instruction, self.invokation_timeout,
- self.execution_timeout, data.argument_schema, self.async_zmq_client, self._schema_validator), data)
- elif data.what == ResourceTypes.PROPERTY:
- _add_property(self, _Property(self.zmq_client, data.instruction, self.invokation_timeout,
- self.execution_timeout, self.async_zmq_client), data)
- elif data.what == ResourceTypes.EVENT:
- assert isinstance(data, ServerSentEvent)
- event = _Event(self.zmq_client, data.name, data.obj_name, data.unique_identifier, data.socket_address,
- serialization_specific=data.serialization_specific, serializer=self.zmq_client.zmq_serializer, logger=self.logger)
- _add_event(self, event, data)
- self.__dict__[data.name] = event
-
-
-
-
-
-# SM = Server Message
-SM_INDEX_ADDRESS = ServerMessage.ADDRESS.value
-SM_INDEX_SERVER_TYPE = ServerMessage.SERVER_TYPE.value
-SM_INDEX_MESSAGE_TYPE = ServerMessage.MESSAGE_TYPE.value
-SM_INDEX_MESSAGE_ID = ServerMessage.MESSAGE_ID.value
-SM_INDEX_DATA = ServerMessage.DATA.value
-SM_INDEX_ENCODED_DATA = ServerMessage.ENCODED_DATA.value
-
-class _RemoteMethod:
-
- __slots__ = ['_zmq_client', '_async_zmq_client', '_instruction', '_invokation_timeout', '_execution_timeout',
- '_schema', '_schema_validator', '_last_return_value', '__name__', '__qualname__', '__doc__']
- # method call abstraction
- # Dont add doc otherwise __doc__ in slots will conflict with class variable
-
- def __init__(self, sync_client : SyncZMQClient, instruction : str, invokation_timeout : typing.Optional[float] = 5,
- execution_timeout : typing.Optional[float] = None, argument_schema : typing.Optional[JSON] = None,
- async_client : typing.Optional[AsyncZMQClient] = None,
- schema_validator : typing.Optional[typing.Type[BaseSchemaValidator]] = None) -> None:
- """
- Parameters
- ----------
- sync_client: SyncZMQClient
- synchronous ZMQ client
- async_zmq_client: AsyncZMQClient
- asynchronous ZMQ client for async calls
- instruction: str
- The instruction needed to call the method
- """
- self._zmq_client = sync_client
- self._async_zmq_client = async_client
- self._instruction = instruction
- self._invokation_timeout = invokation_timeout
- self._execution_timeout = execution_timeout
- self._schema = argument_schema
- self._schema_validator = schema_validator(self._schema) if schema_validator and argument_schema and global_config.validate_schema_on_client else None
-
- @property # i.e. cannot have setter
- def last_return_value(self):
+ return obj.read_reply(message_id=message_id, timeout=timeout)
+
+
+ @property
+ def properties(self) -> typing.List[ConsumedThingProperty]:
"""
- cached return value of the last call to the method
+ list of properties in the server object
"""
- if len(self._last_return_value[SM_INDEX_ENCODED_DATA]) > 0:
- return self._last_return_value[SM_INDEX_ENCODED_DATA]
- return self._last_return_value[SM_INDEX_DATA]
+ return [prop for prop in self.__dict__.values() if isinstance(prop, ConsumedThingProperty)]
@property
- def last_zmq_message(self) -> typing.List:
- return self._last_return_value
-
- def __call__(self, *args, **kwargs) -> typing.Any:
- """
- execute method on server
- """
- if len(args) > 0:
- kwargs["__args__"] = args
- elif self._schema_validator:
- self._schema_validator.validate(kwargs)
- self._last_return_value = self._zmq_client.execute(instruction=self._instruction, arguments=kwargs,
- invokation_timeout=self._invokation_timeout, execution_timeout=self._execution_timeout,
- raise_client_side_exception=True, argument_schema=self._schema)
- return self.last_return_value # note the missing underscore
-
- def oneway(self, *args, **kwargs) -> None:
- """
- only issues the method call to the server and does not wait for reply,
- neither does the server reply to this call.
- """
- if len(args) > 0:
- kwargs["__args__"] = args
- elif self._schema_validator:
- self._schema_validator.validate(kwargs)
- self._zmq_client.send_instruction(instruction=self._instruction, arguments=kwargs,
- invokation_timeout=self._invokation_timeout, execution_timeout=None,
- context=dict(oneway=True), argument_schema=self._schema)
-
- def noblock(self, *args, **kwargs) -> None:
- if len(args) > 0:
- kwargs["__args__"] = args
- elif self._schema_validator:
- self._schema_validator.validate(kwargs)
- return self._zmq_client.send_instruction(instruction=self._instruction, arguments=kwargs,
- invokation_timeout=self._invokation_timeout, execution_timeout=self._execution_timeout,
- argument_schema=self._schema)
-
- async def async_call(self, *args, **kwargs):
- """
- async execute method on server
- """
- if not self._async_zmq_client:
- raise RuntimeError("async calls not possible as async_mixin was not set at __init__()")
- if len(args) > 0:
- kwargs["__args__"] = args
- elif self._schema_validator:
- self._schema_validator.validate(kwargs)
- self._last_return_value = await self._async_zmq_client.async_execute(instruction=self._instruction,
- arguments=kwargs, invokation_timeout=self._invokation_timeout,
- raise_client_side_exception=True,
- argument_schema=self._schema)
- return self.last_return_value # note the missing underscore
+ def actions(self) -> typing.List[ConsumedThingAction]:
+ """
+ list of actions in the server object
+ """
+ return [action for action in self.__dict__.values() if isinstance(action, ConsumedThingAction)]
-
-class _Property:
-
- __slots__ = ['_zmq_client', '_async_zmq_client', '_read_instruction', '_write_instruction',
- '_invokation_timeout', '_execution_timeout', '_last_value', '__name__', '__doc__']
- # property get set abstraction
- # Dont add doc otherwise __doc__ in slots will conflict with class variable
-
- def __init__(self, client : SyncZMQClient, instruction : str, invokation_timeout : typing.Optional[float] = 5,
- execution_timeout : typing.Optional[float] = None, async_client : typing.Optional[AsyncZMQClient] = None) -> None:
- self._zmq_client = client
- self._async_zmq_client = async_client
- self._invokation_timeout = invokation_timeout
- self._execution_timeout = execution_timeout
- self._read_instruction = instruction + '/read'
- self._write_instruction = instruction + '/write'
-
- @property # i.e. cannot have setter
- def last_read_value(self) -> typing.Any:
- """
- cache of last read value
- """
- if len(self._last_value[SM_INDEX_ENCODED_DATA]) > 0:
- return self._last_value[SM_INDEX_ENCODED_DATA]
- return self._last_value[SM_INDEX_DATA]
-
@property
- def last_zmq_message(self) -> typing.List:
+ def events(self) -> typing.List[ConsumedThingEvent]:
"""
- cache of last message received for this property
+ list of events in the server object
"""
- return self._last_value
-
- def set(self, value : typing.Any) -> None:
- self._last_value = self._zmq_client.execute(self._write_instruction, dict(value=value),
- raise_client_side_exception=True)
-
- def get(self) -> typing.Any:
- self._last_value = self._zmq_client.execute(self._read_instruction,
- invokation_timeout=self._invokation_timeout,
- raise_client_side_exception=True)
- return self.last_read_value
-
- async def async_set(self, value : typing.Any) -> None:
- if not self._async_zmq_client:
- raise RuntimeError("async calls not possible as async_mixin was not set at __init__()")
- self._last_value = await self._async_zmq_client.async_execute(self._write_instruction, dict(value=value),
- invokation_timeout=self._invokation_timeout,
- execution_timeout=self._execution_timeout,
- raise_client_side_exception=True)
+ return [event for event in self.__dict__.values() if isinstance(event, ConsumedThingEvent)]
- async def async_get(self) -> typing.Any:
- if not self._async_zmq_client:
- raise RuntimeError("async calls not possible as async_mixin was not set at __init__()")
- self._last_value = await self._async_zmq_client.async_execute(self._read_instruction,
- invokation_timeout=self._invokation_timeout,
- execution_timeout=self._execution_timeout,
- raise_client_side_exception=True)
- return self.last_read_value
-
- def noblock_get(self) -> None:
- return self._zmq_client.send_instruction(self._read_instruction,
- invokation_timeout=self._invokation_timeout,
- execution_timeout=self._execution_timeout)
-
- def noblock_set(self, value : typing.Any) -> None:
- return self._zmq_client.send_instruction(self._write_instruction, dict(value=value),
- invokation_timeout=self._invokation_timeout,
- execution_timeout=self._execution_timeout)
-
- def oneway_set(self, value : typing.Any) -> None:
- self._zmq_client.send_instruction(self._write_instruction, dict(value=value),
- invokation_timeout=self._invokation_timeout,
- execution_timeout=self._execution_timeout)
-
-
-
-class _Event:
-
- __slots__ = ['_zmq_client', '_name', '_obj_name', '_unique_identifier', '_socket_address', '_callbacks', '_serialization_specific',
- '_serializer', '_subscribed', '_thread', '_thread_callbacks', '_event_consumer', '_logger', '_deserialize']
- # event subscription
- # Dont add class doc otherwise __doc__ in slots will conflict with class variable
-
- def __init__(self, client : SyncZMQClient, name : str, obj_name : str, unique_identifier : str, socket : str,
- serialization_specific : bool = False, serializer : BaseSerializer = None, logger : logging.Logger = None) -> None:
- self._zmq_client = client
- self._name = name
- self._obj_name = obj_name
- self._unique_identifier = unique_identifier
- self._socket_address = socket
- self._serialization_specific = serialization_specific
- self._callbacks = None
- self._serializer = serializer
- self._logger = logger
- self._subscribed = False
- self._deserialize = True
-
- def add_callbacks(self, callbacks : typing.Union[typing.List[typing.Callable], typing.Callable]) -> None:
- if not self._callbacks:
- self._callbacks = []
- if isinstance(callbacks, list):
- self._callbacks.extend(callbacks)
- else:
- self._callbacks.append(callbacks)
-
- def subscribe(self, callbacks : typing.Union[typing.List[typing.Callable], typing.Callable],
- thread_callbacks : bool = False, deserialize : bool = True) -> None:
- self._event_consumer = EventConsumer(
- 'zmq-' + self._unique_identifier if self._serialization_specific else self._unique_identifier,
- self._socket_address, f"{self._name}|RPCEvent|{uuid.uuid4()}", b'PROXY',
- zmq_serializer=self._serializer, logger=self._logger
- )
- self.add_callbacks(callbacks)
- self._subscribed = True
- self._deserialize = deserialize
- self._thread_callbacks = thread_callbacks
- self._thread = threading.Thread(target=self.listen)
- self._thread.start()
-
- def listen(self):
- while self._subscribed:
- try:
- data = self._event_consumer.receive(deserialize=self._deserialize)
- if data == 'INTERRUPT':
- break
- for cb in self._callbacks:
- if not self._thread_callbacks:
- cb(data)
- else:
- threading.Thread(target=cb, args=(data,)).start()
- except Exception as ex:
- warnings.warn(f"Uncaught exception from {self._name} event - {str(ex)}",
- category=RuntimeWarning)
- try:
- self._event_consumer.exit()
- except:
- pass
-
-
- def unsubscribe(self, join_thread : bool = True):
- self._subscribed = False
- self._event_consumer.interrupt()
- if join_thread:
- self._thread.join()
-
-
-
-
-
-__allowed_attribute_types__ = (_Property, _RemoteMethod, _Event)
-__WRAPPER_ASSIGNMENTS__ = ('__name__', '__qualname__', '__doc__')
-
-def _add_method(client_obj : ObjectProxy, method : _RemoteMethod, func_info : ZMQResource) -> None:
- if not func_info.top_owner:
- return
- raise RuntimeError("logic error")
- for dunder in __WRAPPER_ASSIGNMENTS__:
- if dunder == '__qualname__':
- info = '{}.{}'.format(client_obj.__class__.__name__, func_info.get_dunder_attr(dunder).split('.')[1])
- else:
- info = func_info.get_dunder_attr(dunder)
- setattr(method, dunder, info)
- client_obj.__setattr__(func_info.obj_name, method)
-
-def _add_property(client_obj : ObjectProxy, property : _Property, property_info : ZMQResource) -> None:
- if not property_info.top_owner:
- return
- raise RuntimeError("logic error")
- for attr in ['__doc__', '__name__']:
- # just to imitate _add_method logic
- setattr(property, attr, property_info.get_dunder_attr(attr))
- client_obj.__setattr__(property_info.obj_name, property)
-
-def _add_event(client_obj : ObjectProxy, event : _Event, event_info : ServerSentEvent) -> None:
- setattr(client_obj, event_info.obj_name, event)
+ @property
+ def thing_id(self) -> str:
+ """
+ id of the server object
+ """
+ return self.td.get("id", None)
+ @property
+ def TD(self) -> typing.Dict[str, typing.Any]:
+ """
+ Thing description of the server object
+ """
+ return self.td
-class ReplyNotArrivedError(Exception):
- pass
-
-
-__all__ = ['ObjectProxy']
+__all__ = [
+ ObjectProxy.__name__
+]
diff --git a/hololinked/client/zmq/__init__.py b/hololinked/client/zmq/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/hololinked/client/zmq/consumed_interactions.py b/hololinked/client/zmq/consumed_interactions.py
new file mode 100644
index 00000000..6b8f4ca6
--- /dev/null
+++ b/hololinked/client/zmq/consumed_interactions.py
@@ -0,0 +1,499 @@
+import asyncio
+import logging
+import typing
+import threading
+import warnings
+import traceback
+
+from ...utils import get_current_async_loop
+from ...constants import Operations
+from ...serializers import BaseSerializer, Serializers
+from ...serializers.payloads import SerializableData, PreserializedData
+from ...td import PropertyAffordance, ActionAffordance, EventAffordance
+from ...client.abstractions import ConsumedThingAction, ConsumedThingEvent, ConsumedThingProperty, raise_local_exception
+from ...core.zmq.message import ResponseMessage
+from ...core.zmq.message import EMPTY_BYTE, REPLY, TIMEOUT, ERROR, INVALID_MESSAGE
+from ...core.zmq.brokers import SyncZMQClient, AsyncZMQClient, EventConsumer, AsyncEventConsumer
+from ...core import Thing, Action
+from ..exceptions import ReplyNotArrivedError
+
+
+
+__error_message_types__ = [TIMEOUT, ERROR, INVALID_MESSAGE]
+
+
+class ZMQConsumedAffordanceMixin:
+
+ __slots__ = ['_resource', '_schema_validator', '__name__', '__qualname__', '__doc__', '_owner_inst',
+ '_sync_zmq_client', '_async_zmq_client', '_invokation_timeout', '_execution_timeout',
+ '_thing_execution_context', '_last_zmq_response' ] # __slots__ dont support multiple inheritance
+
+ def __init__(self,
+ sync_client: SyncZMQClient,
+ async_client: AsyncZMQClient | None = None,
+ **kwargs
+ # schema_validator: typing.Type[BaseSchemaValidator] | None = None
+ ) -> None:
+ self._sync_zmq_client = sync_client
+ self._async_zmq_client = async_client
+ self._invokation_timeout = kwargs.get('invokation_timeout', 5)
+ self._execution_timeout = kwargs.get('execution_timeout', 5)
+ self._thing_execution_context = dict(fetch_execution_logs=False)
+ self._last_zmq_response = None # type: typing.Optional[ResponseMessage]
+
+ def get_last_return_value(self, raise_exception: bool = False) -> typing.Any:
+ """
+ cached return value of the last call to the method
+ """
+ if self._last_zmq_response is None:
+ raise RuntimeError("No last response available. Did you make an operation?")
+ payload = self._last_zmq_response.payload.deserialize()
+ preserialized_payload = self._last_zmq_response.preserialized_payload.value
+ if self._last_zmq_response.type in __error_message_types__ and raise_exception:
+ raise_local_exception(payload)
+ if preserialized_payload != EMPTY_BYTE:
+ if payload is None:
+ return preserialized_payload
+ return payload, preserialized_payload
+ return payload
+
+ @property
+ def last_zmq_response(self) -> ResponseMessage:
+ """
+ cache of last message received for this property
+ """
+ return self._last_zmq_response
+
+ def read_reply(self, message_id: str, timeout: int = None) -> typing.Any:
+ if self._owner_inst._noblock_messages.get(message_id) != self:
+ raise RuntimeError(f"Message ID {message_id} does not belong to this property.")
+ self._last_zmq_response = self._sync_zmq_client.recv_response(message_id=message_id)
+ if not self._last_zmq_response:
+ raise ReplyNotArrivedError(f"could not fetch reply within timeout for message id '{message_id}'")
+ return ZMQConsumedAffordanceMixin.get_last_return_value(self, True)
+
+
+class ZMQAction(ZMQConsumedAffordanceMixin, ConsumedThingAction):
+
+ # method call abstraction
+ # Dont add doc otherwise __doc__ in slots will conflict with class variable
+
+ def __init__(self,
+ resource: ActionAffordance,
+ sync_client: SyncZMQClient,
+ async_client: AsyncZMQClient | None = None,
+ owner_inst: typing.Optional[typing.Any] = None,
+ **kwargs
+ # schema_validator: typing.Type[BaseSchemaValidator] | None = None
+ ) -> None:
+ """
+ Parameters
+ ----------
+ resource: ActionAffordance
+ dataclass object representing the action
+ sync_client: SyncZMQClient
+ synchronous ZMQ client
+ async_zmq_client: AsyncZMQClient
+ asynchronous ZMQ client for async calls
+ """
+ ConsumedThingAction.__init__(self, resource=resource, owner_inst=owner_inst)
+ ZMQConsumedAffordanceMixin.__init__(self, sync_client=sync_client, async_client=async_client, **kwargs)
+ self._resource # type: ActionAffordance
+
+ last_return_value = property(fget=ZMQConsumedAffordanceMixin.get_last_return_value,
+ doc="cached return value of the last call to the method")
+
+ def __call__(self, *args, **kwargs) -> typing.Any:
+ if len(args) > 0:
+ kwargs["__args__"] = args
+ elif self._schema_validator:
+ self._schema_validator.validate(kwargs)
+ self._last_zmq_response = self._sync_zmq_client.execute(
+ thing_id=self._resource.thing_id,
+ objekt=self._resource.name,
+ operation=Operations.invokeAction,
+ payload=SerializableData(
+ value=kwargs,
+ content_type=self._resource.retrieve_form('invokeAction', {}).get(
+ 'contentType', 'application/json')
+ ),
+ server_execution_context=dict(
+ invokation_timeout=self._invokation_timeout,
+ execution_timeout=self._execution_timeout
+ ),
+ thing_execution_context=self._thing_execution_context
+ )
+ return ZMQConsumedAffordanceMixin.get_last_return_value(self, True)
+
+ async def async_call(self, *args, **kwargs) -> typing.Any:
+ if not self._async_zmq_client:
+ raise RuntimeError("async calls not possible as async_mixin was not set True at __init__()")
+ if len(args) > 0:
+ kwargs["__args__"] = args
+ elif self._schema_validator:
+ self._schema_validator.validate(kwargs)
+ self._last_zmq_response = await self._async_zmq_client.async_execute(
+ thing_id=self._resource.thing_id,
+ objekt=self._resource.name,
+ operation=Operations.invokeAction,
+ payload=SerializableData(
+ value=kwargs,
+ content_type=self._resource.retrieve_form('invokeAction', {}).get(
+ 'contentType', 'application/json')
+ ),
+ server_execution_context=dict(
+ invokation_timeout=self._invokation_timeout,
+ execution_timeout=self._execution_timeout,
+ ),
+ thing_execution_context=self._thing_execution_context
+ )
+ return ZMQConsumedAffordanceMixin.get_last_return_value(self, True)
+
+ def oneway(self, *args, **kwargs) -> None:
+ if len(args) > 0:
+ kwargs["__args__"] = args
+ elif self._schema_validator:
+ self._schema_validator.validate(kwargs)
+ self._sync_zmq_client.send_request(
+ thing_id=self._resource.thing_id,
+ objekt=self._resource.name,
+ operation=Operations.invokeAction,
+ payload=SerializableData(
+ value=kwargs,
+ content_type=self._resource.retrieve_form('invokeAction', {}).get(
+ 'contentType', 'application/json')
+ ),
+ server_execution_context=dict(
+ invokation_timeout=self._invokation_timeout,
+ execution_timeout=self._execution_timeout,
+ oneway=True
+ ),
+ thing_execution_context=self._thing_execution_context
+ )
+
+ def noblock(self, *args, **kwargs) -> str:
+ if len(args) > 0:
+ kwargs["__args__"] = args
+ elif self._schema_validator:
+ self._schema_validator.validate(kwargs)
+ msg_id = self._sync_zmq_client.send_request(
+ thing_id=self._resource.thing_id,
+ objekt=self._resource.name,
+ operation=Operations.invokeAction,
+ payload=SerializableData(
+ value=kwargs,
+ content_type=self._resource.retrieve_form('invokeAction', {}).get(
+ 'contentType', 'application/json')
+ ),
+ server_execution_context=dict(
+ invokation_timeout=self._invokation_timeout,
+ execution_timeout=self._execution_timeout,
+ ),
+ thing_execution_context=self._thing_execution_context
+ )
+ self._owner_inst._noblock_messages[msg_id] = self
+ return msg_id
+
+
+
+class ZMQProperty(ZMQConsumedAffordanceMixin, ConsumedThingProperty):
+
+ # property get set abstraction
+ # Dont add doc otherwise __doc__ in slots will conflict with class variable
+
+ def __init__(self,
+ resource: PropertyAffordance,
+ sync_client: SyncZMQClient,
+ async_client: AsyncZMQClient | None = None,
+ owner_inst: typing.Optional[typing.Any] = None,
+ **kwargs
+ ) -> None:
+ """
+ Parameters
+ ----------
+ resource: PropertyAffordance
+ dataclass object representing the property
+ sync_client: SyncZMQClient
+ synchronous ZMQ client
+ async_client: AsyncZMQClient
+ asynchronous ZMQ client for async calls
+ """
+ ConsumedThingProperty.__init__(self, resource=resource, owner_inst=owner_inst)
+ ZMQConsumedAffordanceMixin.__init__(self, sync_client=sync_client, async_client=async_client, **kwargs)
+ self._resource # type: PropertyAffordance
+
+ last_read_value = property(fget=ZMQConsumedAffordanceMixin.get_last_return_value,
+ doc="cached return value of the last call to the method")
+
+ def set(self, value: typing.Any) -> None:
+ self._last_zmq_response = self._sync_zmq_client.execute(
+ thing_id=self._resource.thing_id,
+ objekt=self._resource.name,
+ operation=Operations.writeProperty,
+ payload=SerializableData(
+ value=value,
+ content_type=self._resource.retrieve_form('writeProperty', {}).get(
+ 'contentType', 'application/json')
+ ),
+ server_execution_context=dict(
+ invokation_timeout=self._invokation_timeout,
+ execution_timeout=self._execution_timeout
+ ),
+ thing_execution_context=self._thing_execution_context
+ )
+ ZMQConsumedAffordanceMixin.get_last_return_value(self, True)
+
+ def get(self) -> typing.Any:
+ self._last_zmq_response = self._sync_zmq_client.execute(
+ thing_id=self._resource.thing_id,
+ objekt=self._resource.name,
+ operation=Operations.readProperty,
+ server_execution_context=dict(
+ invocation_timeout=self._invokation_timeout,
+ execution_timeout=self._execution_timeout
+ ),
+ thing_execution_context=self._thing_execution_context
+ )
+ return ZMQConsumedAffordanceMixin.get_last_return_value(self, True)
+
+ async def async_set(self, value: typing.Any) -> None:
+ if not self._async_zmq_client:
+ raise RuntimeError("async calls not possible as async_mixin was not set at __init__()")
+ self._last_zmq_response = await self._async_zmq_client.async_execute(
+ thing_id=self._resource.thing_id,
+ objekt=self._resource.name,
+ operation=Operations.writeProperty,
+ payload=SerializableData(
+ value=value,
+ content_type=self._resource.retrieve_form('writeProperty', {}).get(
+ 'contentType', 'application/json')
+ ),
+ server_execution_context=dict(
+ invokation_timeout=self._invokation_timeout,
+ execution_timeout=self._execution_timeout
+ ),
+ thing_execution_context=self._thing_execution_context
+ )
+
+ async def async_get(self) -> typing.Any:
+ if not self._async_zmq_client:
+ raise RuntimeError("async calls not possible as async_mixin was not set at __init__()")
+ self._last_zmq_response = await self._async_zmq_client.async_execute(
+ thing_id=self._resource.thing_id,
+ objekt=self._resource.name,
+ operation=Operations.readProperty,
+ server_execution_context=dict(
+ invokation_timeout=self._invokation_timeout,
+ execution_timeout=self._execution_timeout
+ ),
+ thing_execution_context=self._thing_execution_context
+ )
+ return ZMQConsumedAffordanceMixin.get_last_return_value(self, True)
+
+ def oneway_set(self, value: typing.Any) -> None:
+ self._sync_zmq_client.send_request(
+ thing_id=self._resource.thing_id,
+ objekt=self._resource.name,
+ operation=Operations.writeProperty,
+ payload=SerializableData(
+ value=value,
+ content_type=self._resource.retrieve_form('writeProperty', {}).get(
+ 'contentType', 'application/json')
+ ),
+ server_execution_context=dict(
+ invokation_timeout=self._invokation_timeout,
+ execution_timeout=self._execution_timeout,
+ oneway=True
+ ),
+ )
+
+ def noblock_get(self) -> None:
+ msg_id = self._sync_zmq_client.send_request(
+ thing_id=self._resource.thing_id,
+ objekt=self._resource.name,
+ operation=Operations.readProperty,
+ server_execution_context=dict(
+ invokation_timeout=self._invokation_timeout,
+ execution_timeout=self._execution_timeout
+ ),
+ thing_execution_context=self._thing_execution_context
+ )
+ self._owner_inst._noblock_messages[msg_id] = self
+ return msg_id
+
+ def noblock_set(self, value: typing.Any) -> None:
+ msg_id = self._sync_zmq_client.send_request(
+ thing_id=self._resource.thing_id,
+ objekt=self._resource.name,
+ operation=Operations.writeProperty,
+ payload=SerializableData(
+ value=value,
+ content_type=self._resource.retrieve_form('writeProperty', {}).get(
+ 'contentType', 'application/json')
+ ),
+ server_execution_context=dict(
+ invokation_timeout=self._invokation_timeout,
+ execution_timeout=self._execution_timeout
+ ),
+ thing_execution_context=self._thing_execution_context
+ )
+ self._owner_inst._noblock_messages[msg_id] = self
+ return msg_id
+
+
+
+class ZMQEvent(ConsumedThingEvent, ZMQConsumedAffordanceMixin):
+
+ __slots__ = ['__name__', '__qualname__', '__doc__',
+ '_sync_zmq_client', '_async_zmq_client', '_default_scheduling_mode',
+ '_event_consumer', '_callbacks',
+ '_serializer', '_subscribed', '_thread', '_thread_callbacks', '_logger', '_deserialize']
+
+ # event subscription
+ # Dont add class doc otherwise __doc__ in slots will conflict with class variable
+
+ def __init__(self,
+ resource: EventAffordance,
+ sync_zmq_client: EventConsumer,
+ async_zmq_client: AsyncEventConsumer | None = None,
+ default_scheduling_mode: str = 'sync',
+ logger: logging.Logger = None,
+ **kwargs
+ ) -> None:
+ super().__init__(resource=resource, logger=logger, **kwargs)
+ self._sync_zmq_client = sync_zmq_client
+ self._async_zmq_client = async_zmq_client
+ self._default_scheduling_mode = default_scheduling_mode
+ self._thread = None
+
+ def subscribe(self,
+ callbacks: typing.Union[typing.List[typing.Callable], typing.Callable],
+ thread_callbacks: bool = False,
+ deserialize: bool = True
+ ) -> None:
+ if self._default_scheduling_mode == 'sync':
+ self._sync_zmq_client.subscribe()
+ elif self._default_scheduling_mode == 'async':
+ self._async_zmq_client.subscribe()
+ else:
+ raise ValueError(f"Invalid scheduling mode: {self._default_scheduling_mode}. Must be 'sync' or 'async'.")
+ self.add_callbacks(callbacks)
+ self._subscribed = True
+ self._deserialize = deserialize
+ self._thread_callbacks = thread_callbacks
+ if self._default_scheduling_mode == 'sync':
+ self._thread = threading.Thread(target=self.listen)
+ self._thread.start()
+ else:
+ get_current_async_loop().call_soon(lambda: asyncio.create_task(self.async_listen()))
+
+ def listen(self):
+ while self._subscribed:
+ try:
+ event_message = self._sync_zmq_client.receive()
+ self._last_zmq_response = event_message
+ value = self.get_last_return_value(raise_exception=True)
+ if value == 'INTERRUPT':
+ break
+ for cb in self._callbacks:
+ if not self._thread_callbacks:
+ cb(value)
+ else:
+ threading.Thread(target=cb, args=(value,)).start()
+ except Exception as ex:
+ import traceback
+ # traceback.print_exc()
+ # TODO: some minor bug here within the umq receive loop when the loop is interrupted
+ # uncomment the above line to see the traceback
+ warnings.warn(f"Uncaught exception from {self._resource.name} event - {str(ex)}\n{traceback.print_exc()}",
+ category=RuntimeWarning)
+
+
+ async def async_listen(self):
+ while self._subscribed:
+ try:
+ event_message = await self._async_zmq_client.receive()
+ self._last_zmq_response = event_message
+ value = self.get_last_return_value(raise_exception=True)
+ if value == 'INTERRUPT':
+ break
+ for cb in self._callbacks:
+ if not self._thread_callbacks:
+ if asyncio.iscoroutinefunction(cb):
+ await cb(value)
+ else:
+ cb(value)
+ else:
+ threading.Thread(target=cb, args=(value,)).start()
+ except Exception as ex:
+ #
+ # traceback.print_exc()
+ # if "There is no current event loop in thread" and not self._subscribed:
+ # # TODO: some minor bug here within the umq receive loop when the loop is interrupted
+ # # uncomment the above line to see the traceback
+ # pass
+ # else:
+ warnings.warn(f"Uncaught exception from {self._resource.name} event - {str(ex)}\n{traceback.print_exc()}",
+ category=RuntimeWarning)
+
+ def unsubscribe(self, join_thread: bool = True) -> None:
+ self._subscribed = False
+ self._sync_zmq_client.interrupt()
+ if join_thread and self._thread is not None and self._thread.is_alive():
+ self._thread.join()
+ self._thread = None
+
+
+
+class WriteMultipleProperties(ZMQAction):
+ """
+ Read and write multiple properties at once
+ """
+
+ def __init__(self,
+ sync_client: SyncZMQClient,
+ async_client: AsyncZMQClient | None = None,
+ owner_inst: typing.Optional[typing.Any] = None,
+ **kwargs
+ ) -> None:
+ action = Thing._set_properties # type: Action
+ resource = action.to_affordance(Thing)
+ resource._thing_id = owner_inst.thing_id
+ super().__init__(
+ resource=resource,
+ sync_client=sync_client,
+ async_client=async_client,
+ owner_inst=owner_inst,
+ **kwargs
+ )
+
+
+class ReadMultipleProperties(ZMQAction):
+ """
+ Read multiple properties at once
+ """
+
+ def __init__(
+ self,
+ sync_client: SyncZMQClient,
+ async_client: AsyncZMQClient | None = None,
+ owner_inst: typing.Optional[typing.Any] = None,
+ **kwargs
+ ) -> None:
+ action = Thing._get_properties # type: Action
+ resource = action.to_affordance(Thing)
+ resource._thing_id = owner_inst.thing_id
+ super().__init__(
+ resource=resource,
+ sync_client=sync_client,
+ async_client=async_client,
+ owner_inst=owner_inst,
+ **kwargs
+ )
+
+
+__all__ = [
+ ZMQAction.__name__,
+ ZMQProperty.__name__,
+ ZMQEvent.__name__,
+]
\ No newline at end of file
diff --git a/hololinked/server/config.py b/hololinked/config.py
similarity index 91%
rename from hololinked/server/config.py
rename to hololinked/config.py
index d502eec6..733b71b0 100644
--- a/hololinked/server/config.py
+++ b/hololinked/config.py
@@ -1,5 +1,6 @@
-# adapted from pyro - https://github.com/irmen/Pyro5 - see following license
"""
+adapted from pyro - https://github.com/irmen/Pyro5 - see following license
+
MIT License
Copyright (c) Irmen de Jong
@@ -26,9 +27,10 @@
import os
import typing
import warnings
+import zmq.asyncio
-from .. import __version__
-from .serializers import PythonBuiltinJSONSerializer
+from . import __version__
+from .serializers.serializers import PythonBuiltinJSONSerializer
class Configuration:
@@ -74,19 +76,22 @@ class Configuration:
__slots__ = [
# folders
"TEMP_DIR",
- # TCP socket
+ # TCP sockets
"TCP_SOCKET_SEARCH_START_PORT", "TCP_SOCKET_SEARCH_END_PORT",
- # system view
- "PRIMARY_HOST", "LOCALHOST_PORT",
- # database
- "DB_CONFIG_FILE",
# HTTP server
"COOKIE_SECRET",
# credentials
"PWD_HASHER_TIME_COST", "PWD_HASHER_MEMORY_COST",
+ # system view
+ "PRIMARY_HOST", "LOCALHOST_PORT",
+ # database
+ "DB_CONFIG_FILE",
# Eventloop
"USE_UVLOOP", "TRACE_MALLOC",
- 'validate_schema_on_client', 'validate_schemas'
+ # Schema
+ 'VALIDATE_SCHEMA_ON_CLIENT', 'VALIDATE_SCHEMAS',
+ # ZMQ
+ "ZMQ_CONTEXT"
]
def __init__(self, use_environment : bool = False):
@@ -104,8 +109,9 @@ def load_variables(self, use_environment : bool = False):
self.PWD_HASHER_TIME_COST = 15
self.USE_UVLOOP = False
self.TRACE_MALLOC = False
- self.validate_schema_on_client = False
- self.validate_schemas = True
+ self.VALIDATE_SCHEMA_ON_CLIENT = False
+ self.VALIDATE_SCHEMAS = True
+ self.ZMQ_CONTEXT = zmq.asyncio.Context()
if not use_environment:
return
diff --git a/hololinked/server/constants.py b/hololinked/constants.py
similarity index 51%
rename from hololinked/server/constants.py
rename to hololinked/constants.py
index 17d79184..b275aff2 100644
--- a/hololinked/server/constants.py
+++ b/hololinked/constants.py
@@ -5,9 +5,15 @@
# types
-JSONSerializable = typing.Union[typing.Dict[str, typing.Any], list, str, int, float, None]
+JSONSerializable = typing.Union[
+ str, int, float, bool, None,
+ typing.Dict[str, typing.Any],
+ typing.List
+]
JSON = typing.Dict[str, JSONSerializable]
+byte_types = (bytes, bytearray, memoryview)
+
# decorator constants
# naming
USE_OBJECT_NAME : str = "USE_OBJECT_NAME"
@@ -26,34 +32,8 @@ class ResourceTypes(StrEnum):
PROPERTY = "PROPERTY"
ACTION = "ACTION"
EVENT = "EVENT"
- IMAGE_STREAM = "IMAGE_STREAM"
- FILE = "FILE"
-
-
-class CommonRPC(StrEnum):
- """some common RPC and their associated instructions for quick access by lower level code"""
-
- ZMQ_RESOURCES = '/resources/zmq-object-proxy'
- HTTP_RESOURCES = '/resources/http-server'
- OBJECT_INFO = '/object-info'
- PING = '/ping'
-
- @classmethod
- def zmq_resource_read(cls, instance_name : str) -> str:
- return f"/{instance_name}{cls.ZMQ_RESOURCES}/read"
-
- @classmethod
- def http_resource_read(cls, instance_name : str) -> str:
- return f"/{instance_name}{cls.HTTP_RESOURCES}/read"
-
- @classmethod
- def object_info_read(cls, instance_name : str) -> str:
- return f"/{instance_name}{cls.OBJECT_INFO}/read"
+ THING = "THING"
- @classmethod
- def object_info_write(cls, instance_name : str) -> str:
- return f"/{instance_name}{cls.OBJECT_INFO}/write"
-
class REGEX(StrEnum):
"""common regexes"""
@@ -87,7 +67,7 @@ class LOGLEVEL(IntEnum):
# ZMQ
-class ZMQ_PROTOCOLS(StrEnum):
+class ZMQ_TRANSPORTS(StrEnum):
"""
supported ZMQ transport protocols - TCP, IPC, INPROC
@@ -100,51 +80,6 @@ class ZMQ_PROTOCOLS(StrEnum):
INPROC = "INPROC"
-class ClientMessage(IntEnum):
- """
- ZMQ client sent message indexing for accessing message indices with names
- instead of numbers
- """
- ADDRESS = 0
- CLIENT_TYPE = 2
- MESSAGE_TYPE = 3
- MESSAGE_ID = 4
- TIMEOUT = 5
- INSTRUCTION = 6
- ARGUMENTS = 7
- EXECUTION_CONTEXT = 8
-
-
-class ServerMessage(IntEnum):
- """
- ZMQ server sent message indexing for accessing message indices with names
- instead of numbers
- """
- ADDRESS = 0
- SERVER_TYPE = 2
- MESSAGE_TYPE = 3
- MESSAGE_ID = 4
- DATA = 5
- ENCODED_DATA = 6
-
-
-class ServerTypes(Enum):
- "type of ZMQ servers"
-
- UNKNOWN_TYPE = b'UNKNOWN'
- EVENTLOOP = b'EVENTLOOP'
- THING = b'THING'
- POOL = b'POOL'
-
-
-class ClientTypes(Enum):
- "type of ZMQ clients"
-
- HTTP_SERVER = b'HTTP_SERVER'
- PROXY = b'PROXY'
- TUNNELER = b'TUNNELER' # message passer from inproc client to inrproc server within RPC
-
-
class HTTPServerTypes(StrEnum):
"types of HTTP server"
@@ -152,21 +87,6 @@ class HTTPServerTypes(StrEnum):
THING_SERVER = 'THING_SERVER'
-class Serializers(StrEnum):
- """
- allowed serializers
-
- - PICKLE : pickle
- - JSON : msgspec.json
- - SERPENT : serpent
- - MSGPACK : msgspec.msgpack
- """
- PICKLE = 'pickle'
- JSON = 'json'
- SERPENT = 'serpent'
- MSGPACK = 'msgpack'
-
-
class ZMQSocketType(IntEnum):
PAIR = zmq.PAIR
PUB = zmq.PUB
@@ -190,10 +110,25 @@ class ZMQSocketType(IntEnum):
ZMQ_EVENT_MAP[value] = name
+# Function to get the socket type name from the enum
+def get_socket_type_name(socket_type):
+ try:
+ return ZMQSocketType(socket_type).name
+ except ValueError:
+ return "UNKNOWN"
+
+
+class Operations(StrEnum):
+ readProperty = 'readProperty'
+ writeProperty = 'writeProperty'
+ deleteProperty = 'deleteProperty'
+ observeProperty = 'observeProperty'
+ invokeAction = 'invokeAction'
+ subscribeEvent = 'subscribeEvent'
+ unsubscribeEvent = 'unsubscribeEvent'
__all__ = [
- Serializers.__name__,
HTTP_METHODS.__name__,
- ZMQ_PROTOCOLS.__name__
+ ZMQ_TRANSPORTS.__name__
]
\ No newline at end of file
diff --git a/hololinked/core/__init__.py b/hololinked/core/__init__.py
new file mode 100644
index 00000000..92ea4476
--- /dev/null
+++ b/hololinked/core/__init__.py
@@ -0,0 +1,6 @@
+# Order of import is reflected in this file to avoid circular imports
+from .events import *
+from .actions import *
+from .property import *
+from .thing import *
+from .meta import ThingMeta
diff --git a/hololinked/core/actions.py b/hololinked/core/actions.py
new file mode 100644
index 00000000..5fa8a956
--- /dev/null
+++ b/hololinked/core/actions.py
@@ -0,0 +1,326 @@
+import typing
+import warnings
+import jsonschema
+from enum import Enum
+from types import FunctionType, MethodType
+from inspect import iscoroutinefunction, getfullargspec
+from pydantic import BaseModel, RootModel
+
+from ..param.parameterized import ParameterizedFunction
+from ..constants import JSON
+from ..config import global_config
+from ..utils import (get_return_type_from_signature, has_async_def, get_input_model_from_signature,
+ issubklass, isclassmethod)
+from ..exceptions import StateMachineError
+from ..schema_validators.validators import JSONSchemaValidator, PydanticSchemaValidator
+from .dataklasses import ActionInfoValidator
+
+
+
+class Action:
+ """
+ Object that models an action.
+ These actions are unbound and return a bound action when accessed using the owning object.
+ """
+ __slots__ = ['obj', 'owner', '_execution_info']
+
+ def __init__(self, obj: FunctionType) -> None:
+ self.obj = obj
+
+ def __set_name__(self, owner, name):
+ self.owner = owner
+
+ def __str__(self) -> str:
+ return f""
+
+ def __eq__(self, other) -> bool:
+ if not isinstance(other, Action):
+ return False
+ return self.obj == other.obj
+
+ def __hash__(self) -> int:
+ return hash(self.obj)
+
+ def __get__(self, instance, owner):
+ if instance is None and not self._execution_info.isclassmethod:
+ return self
+ if self._execution_info.iscoroutine:
+ return BoundAsyncAction(self.obj, self, instance, owner)
+ return BoundSyncAction(self.obj, self, instance, owner)
+
+ def __call__(self, *args, **kwargs):
+ raise NotImplementedError(f"Cannot invoke unbound action {self.name} of {self.owner.__name__}." +
+ " Bound methods must be called, not the action itself. Use the appropriate instance to call the method.")
+
+ @property
+ def name(self) -> str:
+ """name of the action"""
+ return self.obj.__name__
+
+ @property
+ def execution_info(self) -> ActionInfoValidator:
+ return self._execution_info
+
+ @execution_info.setter
+ def execution_info(self, value: ActionInfoValidator) -> None:
+ if not isinstance(value, ActionInfoValidator):
+ raise TypeError("execution_info must be of type ActionInfoValidator")
+ self._execution_info = value # type: ActionInfoValidator
+
+ def to_affordance(self, owner_inst = None):
+ from ..td import ActionAffordance
+ return ActionAffordance.generate(self, owner_inst or self.owner)
+
+
+class BoundAction:
+
+ __slots__ = ['obj', 'execution_info', 'descriptor', 'owner_inst', 'owner', 'bound_obj']
+
+ def __init__(self, obj: FunctionType, descriptor: Action, owner_inst, owner) -> None:
+ self.obj = obj
+ self.descriptor = descriptor
+ self.execution_info = descriptor._execution_info
+ self.owner = owner
+ self.owner_inst = owner_inst
+ self.bound_obj = owner if self.execution_info.isclassmethod else owner_inst
+
+ def __post_init__(self):
+ # never called, neither possible to call, only type hinting
+ from .thing import ThingMeta, Thing
+ # owner class and instance
+ self.owner: ThingMeta
+ self.owner_inst: Thing
+ self.obj: FunctionType
+ # the validator that was used to accept user inputs to this action.
+ # stored only for reference, hardly used.
+ self._execution_info: ActionInfoValidator
+
+ def validate_call(self, args, kwargs : typing.Dict[str, typing.Any]) -> None:
+ """
+ Validate the call to the action, like payload, state machine state etc.
+ Errors are raised as exceptions.
+ """
+ if self.execution_info.isparameterized and len(args) > 0:
+ raise RuntimeError("parameterized functions cannot have positional arguments")
+ if self.owner_inst is None:
+ return
+ if self.execution_info.state is None or (hasattr(self.owner_inst, 'state_machine') and
+ self.owner_inst.state_machine.current_state in self.execution_info.state):
+ if self.execution_info.schema_validator is not None:
+ self.execution_info.schema_validator.validate_method_call(args, kwargs)
+ else:
+ raise StateMachineError("Thing '{}' is in '{}' state, however action can be executed only in '{}' state".format(
+ f'{self.owner.__class__}.{self.owner_inst.id}', self.owner_inst.state, self.execution_info.state))
+
+ @property
+ def name(self) -> str:
+ """name of the action"""
+ return self.obj.__name__
+
+ def __call__(self, *args, **kwargs):
+ raise NotImplementedError("call must be implemented by subclass")
+
+ def external_call(self, *args, **kwargs):
+ """validated call to the action with state machine and payload checks"""
+ raise NotImplementedError("external_call must be implemented by subclass")
+
+ def __str__(self):
+ return f""
+
+ def __eq__(self, value):
+ if not isinstance(value, BoundAction):
+ return False
+ return self.obj == value.obj
+
+ def __hash__(self):
+ return hash(str(self))
+
+ def __getattribute__(self, name):
+ "Emulate method_getset() in Objects/classobject.c"
+ # https://docs.python.org/3/howto/descriptor.html#functions-and-methods
+ if name == '__doc__':
+ return self.obj.__doc__
+ return super().__getattribute__(name)
+
+ def to_affordance(self):
+ return Action.to_affordance(self.descriptor, self.owner_inst or self.owner)
+
+
+class BoundSyncAction(BoundAction):
+ """
+ non async(io) action call. The call is passed to the method as-it-is to allow local
+ invocation without state machine checks.
+ """
+ def external_call(self, *args, **kwargs):
+ """validated call to the action with state machine and payload checks"""
+ self.validate_call(args, kwargs)
+ return self.__call__(*args, **kwargs)
+
+ def __call__(self, *args, **kwargs):
+ if self.execution_info.isclassmethod:
+ return self.obj(*args, **kwargs)
+ return self.obj(self.bound_obj, *args, **kwargs)
+
+
+class BoundAsyncAction(BoundAction):
+ """
+ async(io) action call. The call is passed to the method as-it-is to allow local
+ invocation without state machine checks.
+ """
+ async def external_call(self, *args, **kwargs):
+ """validated call to the action with state machine and payload checks"""
+ self.validate_call(args, kwargs)
+ return await self.__call__(*args, **kwargs)
+
+ async def __call__(self, *args, **kwargs):
+ if self.execution_info.isclassmethod:
+ return await self.obj(*args, **kwargs)
+ return await self.obj(self.bound_obj, *args, **kwargs)
+
+
+
+__action_kw_arguments__ = ['safe', 'idempotent', 'synchronous']
+
+def action(
+ input_schema: JSON | BaseModel | RootModel | None = None,
+ output_schema: JSON | BaseModel | RootModel | None = None,
+ state : str | Enum | None = None,
+ **kwargs
+ ) -> Action:
+ """
+ decorate on your methods with this function to make them accessible remotely or create 'actions' out of them.
+
+ Parameters
+ ----------
+ input_schema: JSON
+ schema for arguments to validate them.
+ output_schema: JSON
+ schema for return value, currently only used to inform clients which is supposed to validate on its won.
+ state: str | Tuple[str], optional
+ state machine state under which the object can executed. When not provided,
+ the action can be executed under any state.
+ **kwargs:
+ - safe: bool,
+ indicate in thing description if action is safe to execute
+ - idempotent: bool,
+ indicate in thing description if action is idempotent (for example, allows HTTP client to cache return value)
+ - synchronous: bool,
+ indicate in thing description if action is synchronous (not long running)
+
+ Returns
+ -------
+ Action
+ returns the callable object wrapped in an `Action` object
+ """
+
+ def inner(obj):
+ input_schema = inner._arguments.get('input_schema', None)
+ output_schema = inner._arguments.get('output_schema', None)
+ state = inner._arguments.get('state', None)
+ kwargs = inner._arguments.get('kwargs', {})
+
+ original = obj
+ if (
+ not isinstance(obj, (FunctionType, MethodType, Action, BoundAction)) and
+ not isclassmethod(obj) and not issubklass(obj, ParameterizedFunction)
+ ):
+ raise TypeError(f"target for action or is not a function/method. Given type {type(obj)}") from None
+ if isclassmethod(obj):
+ obj = obj.__func__
+ if isinstance(obj, (Action, BoundAction)):
+ if obj.execution_info.isclassmethod:
+ raise RuntimeError(f"cannot wrap a classmethod as action once again, please skip")
+ warnings.warn(f"{obj.name} is already wrapped as an action, wrapping it again with newer settings.",
+ category=UserWarning)
+ obj = obj.obj
+ if obj.__name__.startswith('__'):
+ raise ValueError(f"dunder objects cannot become remote : {obj.__name__}")
+ execution_info_validator = ActionInfoValidator()
+ if state is not None:
+ if isinstance(state, (Enum, str)):
+ execution_info_validator.state = (state,)
+ else:
+ execution_info_validator.state = state
+ if 'request' in getfullargspec(obj).kwonlyargs:
+ execution_info_validator.request_as_argument = True
+ execution_info_validator.isaction = True
+ execution_info_validator.obj = original
+ execution_info_validator.create_task = kwargs.get('create_task', False)
+ execution_info_validator.safe = kwargs.get('safe', False)
+ execution_info_validator.idempotent = kwargs.get('idempotent', False)
+ execution_info_validator.synchronous = kwargs.get('synchronous', True)
+
+ if isclassmethod(original):
+ execution_info_validator.iscoroutine = has_async_def(obj)
+ execution_info_validator.isclassmethod = True
+ elif issubklass(obj, ParameterizedFunction):
+ execution_info_validator.iscoroutine = iscoroutinefunction(obj.__call__)
+ execution_info_validator.isparameterized = True
+ else:
+ execution_info_validator.iscoroutine = iscoroutinefunction(obj)
+
+ if not input_schema:
+ try:
+ input_schema = get_input_model_from_signature(obj, remove_first_positional_arg=True)
+ except Exception as ex:
+ if global_config.VALIDATE_SCHEMAS:
+ warnings.warn(
+ f"Could not infer input schema for {obj.__name__} due to {str(ex)}. " +
+ "Considering filing a bug report if you think this should have worked correctly",
+ category=RuntimeWarning
+ )
+ if global_config.VALIDATE_SCHEMAS and input_schema:
+ if isinstance(input_schema, dict):
+ execution_info_validator.schema_validator = JSONSchemaValidator(input_schema)
+ elif issubklass(input_schema, (BaseModel, RootModel)):
+ execution_info_validator.schema_validator = PydanticSchemaValidator(input_schema)
+ else:
+ raise TypeError("input schema must be a JSON schema or a Pydantic model, got {}".format(type(input_schema)))
+ if isinstance(input_schema, (BaseModel, RootModel)):
+ execution_info_validator.argument_schema = input_schema.model_json_schema()
+ elif isinstance(input_schema, dict):
+ execution_info_validator.argument_schema = input_schema
+
+ if output_schema:
+ # output is not validated by us, so we just check the schema and dont create a validator
+ if isinstance(output_schema, dict):
+ jsonschema.Draft7Validator.check_schema(output_schema)
+ execution_info_validator.return_value_schema = output_schema
+ elif isinstance(output_schema, (BaseModel, RootModel)):
+ execution_info_validator.return_value_schema = output_schema.model_json_schema()
+ else:
+ try:
+ output_schema_model = get_return_type_from_signature(obj)
+ execution_info_validator.return_value_schema = output_schema_model.model_json_schema()
+ except Exception as ex:
+ warnings.warn(
+ f"Could not infer output schema for {obj.__name__} due to {ex}. " +
+ "Considering filing a bug report if you think this should have worked correctly",
+ category=RuntimeError
+ )
+
+ final_obj = Action(original) # type: Action
+ final_obj.execution_info = execution_info_validator
+ return final_obj
+ if callable(input_schema):
+ raise TypeError("input schema should be a JSON or pydantic BaseModel, not a function/method, " +
+ "did you decorate your action wrongly? use @action() instead of @action")
+ if any(key not in __action_kw_arguments__ for key in kwargs.keys()):
+ raise ValueError("Only 'safe', 'idempotent', 'synchronous' are allowed as keyword arguments, " +
+ f"unknown arguments found {kwargs.keys()}")
+ inner._arguments = dict(
+ input_schema=input_schema,
+ output_schema=output_schema,
+ state=state,
+ kwargs=kwargs
+ )
+ return inner
+
+
+
+__all__ = [
+ action.__name__,
+ Action.__name__
+]
+
+
diff --git a/hololinked/core/dataklasses.py b/hololinked/core/dataklasses.py
new file mode 100644
index 00000000..536d3c10
--- /dev/null
+++ b/hololinked/core/dataklasses.py
@@ -0,0 +1,269 @@
+"""
+The following is a list of all dataclasses used to store information on the exposed
+resources on the network. These classese are generally not for consumption by the package-end-user.
+"""
+import typing
+import warnings
+from enum import Enum
+from dataclasses import dataclass, fields
+from types import FunctionType, MethodType
+
+from ..param.parameters import String, Boolean, Tuple, ClassSelector, Parameter
+from ..param.parameterized import ParameterizedMetaclass
+from ..constants import JSON, USE_OBJECT_NAME, UNSPECIFIED, REGEX, JSONSerializable, ResourceTypes
+from ..utils import SerializableDataclass, get_signature, pep8_to_dashed_name
+from ..config import global_config
+from ..schema_validators import BaseSchemaValidator
+
+
+class RemoteResourceInfoValidator:
+ """
+ A validator class for saving remote access related information on a resource. Currently callables (functions,
+ methods and those with__call__) and class/instance property store this information as their own attribute under
+ the variable ``_execution_info_validator``. This is later split into information suitable for HTTP server, ZMQ client & ``EventLoop``.
+
+ Attributes
+ ----------
+ state : str, default None
+ State machine state at which a callable will be executed or attribute/property can be
+ written. Does not apply to read-only attributes/properties.
+ obj_name : str, default - extracted object name
+ the name of the object which will be supplied to the ``ObjectProxy`` class to populate
+ its own namespace. For HTTP clients, HTTP method and URL path is important and for
+ object proxies clients, the obj_name is important.
+ isaction : bool, default False
+ True for a method or function or callable
+ isproperty : bool, default False
+ True for a property
+ """
+ state = Tuple(default=None, item_type=(Enum, str), allow_None=True, accept_list=True, accept_item=True,
+ doc="State machine state at which a callable will be executed or attribute/property can be written.") # type: typing.Tuple[typing.Union[Enum, str]]
+ obj = ClassSelector(default=None, allow_None=True, class_=(FunctionType, MethodType, classmethod, Parameter, ParameterizedMetaclass), # Property will need circular import so we stick to base class Parameter
+ doc="the unbound object like the unbound method")
+ obj_name = String(default=USE_OBJECT_NAME,
+ doc="the name of the object which will be supplied to the ``ObjectProxy`` class to populate its own namespace.") # type: str
+ isaction = Boolean(default=False,
+ doc="True for a method or function or callable") # type: bool
+ isproperty = Boolean(default=False,
+ doc="True for a property") # type: bool
+
+ def __init__(self, **kwargs) -> None:
+ # No full-scale checks for unknown keyword arguments as the class
+ # is used by the developer, so please try to be error-proof
+ for key, value in kwargs.items():
+ setattr(self, key, value)
+
+
+
+class ActionInfoValidator(RemoteResourceInfoValidator):
+ """
+ request_as_argument : bool, default False
+ if True, http/ZMQ request object will be passed as an argument to the callable.
+ The user is warned to not use this generally.
+ argument_schema: JSON, default None
+ JSON schema validations for arguments of a callable. Assumption is therefore arguments will be JSON complaint.
+ return_value_schema: JSON, default None
+ schema for return value of a callable. Assumption is therefore return value will be JSON complaint.
+ create_task: bool, default True
+ default for async methods/actions
+ safe: bool, default True
+ metadata information whether the action is safe to execute
+ idempotent: bool, default False
+ metadata information whether the action is idempotent
+ synchronous: bool, default True
+ metadata information whether the action is synchronous
+ """
+ request_as_argument = Boolean(default=False,
+ doc="if True, http/RPC request object will be passed as an argument to the callable.") # type: bool
+ argument_schema = ClassSelector(default=None, allow_None=True, class_=dict,
+ # due to schema validation, this has to be a dict, and not a special dict like TypedDict
+ doc="JSON schema validations for arguments of a callable")
+ return_value_schema = ClassSelector(default=None, allow_None=True, class_=dict,
+ # due to schema validation, this has to be a dict, and not a special dict like TypedDict
+ doc="schema for return value of a callable")
+ create_task = Boolean(default=True,
+ doc="should a coroutine be tasked or run in the same loop?") # type: bool
+ iscoroutine = Boolean(default=False, # not sure if isFuture or isCoroutine is correct, something to fix later
+ doc="whether the callable should be awaited") # type: bool
+ safe = Boolean(default=True,
+ doc="metadata information whether the action is safe to execute") # type: bool
+ idempotent = Boolean(default=False,
+ doc="metadata information whether the action is idempotent") # type: bool
+ synchronous = Boolean(default=True,
+ doc="metadata information whether the action is synchronous") # type: bool
+ isparameterized = Boolean(default=False,
+ doc="True for a parameterized function") # type: bool
+ isclassmethod = Boolean(default=False,
+ doc="True for a classmethod") # type: bool
+ schema_validator = ClassSelector(default=None, allow_None=True, class_=BaseSchemaValidator,
+ doc="schema validator for the callable if to be validated server side") # type: BaseSchemaValidator
+
+
+
+def build_our_temp_TD(instance, authority : typing.Optional[str] = None ,
+ ignore_errors : bool = False) -> typing.Dict[str, JSONSerializable]:
+ """
+ A temporary extension of TD used to build GUI of thing control panel.
+ Will be later replaced by a more sophisticated TD builder which is compliant to the actual spec & its theory.
+ """
+ from .thing import Thing
+
+ assert isinstance(instance, Thing), f"got invalid type {type(instance)}"
+
+ our_TD = instance.get_thing_description(authority=authority, ignore_errors=ignore_errors)
+ our_TD["inheritance"] = [class_.__name__ for class_ in instance.__class__.mro()]
+
+ for instruction, remote_info in instance.zmq_resources.items():
+ if remote_info.isaction and remote_info.obj_name in our_TD["actions"]:
+ if isinstance(remote_info.obj, classmethod):
+ our_TD["actions"][remote_info.obj_name]["type"] = 'classmethod'
+ our_TD["actions"][remote_info.obj_name]["signature"] = get_signature(remote_info.obj)[0]
+ elif remote_info.isproperty and remote_info.obj_name in our_TD["properties"]:
+ our_TD["properties"][remote_info.obj_name].update(instance.__class__.properties.webgui_info(remote_info.obj)[remote_info.obj_name])
+ return our_TD
+
+
+
+def get_organised_resources(instance):
+ """
+ organise the exposed attributes, actions and events into the dataclasses defined above
+ so that the specific servers and event loop can use them.
+ """
+ from .thing import Thing
+ from .property import Property
+ from .events import Event, EventDispatcher
+
+ assert isinstance(instance, Thing), f"got invalid type {type(instance)}"
+
+ zmq_resources = dict() # type: typing.Dict[str, ZMQResource]
+ # The following dict will be used by the event loop
+ # create unique identifier for the instance
+ if instance._owner is not None:
+ instance._qualified_id = f'{instance._owner._qualified_id}.{instance.id}'
+ else:
+ instance._qualified_id = instance.id
+
+ # First add methods and callables
+ # properties
+ for prop in instance.properties.descriptors.values():
+ if not isinstance(prop, Property) or prop._execution_info_validator is None:
+ continue
+ if not isinstance(prop._execution_info_validator, RemoteResourceInfoValidator):
+ raise TypeError("instance member {} has unknown sub-member '_execution_info_validator' of type {}.".format(
+ prop, type(prop._execution_info_validator)))
+ # above condition is just a gaurd in case somebody does some unpredictable patching activities
+ execution_info = prop._execution_info_validator
+ if execution_info.obj_name in zmq_resources:
+ raise ValueError(f"Duplicate resource name {execution_info.obj_name} found in {instance.__class__.__name__}")
+ zmq_resources[execution_info.obj_name] = ZMQResource(
+ what=ResourceTypes.PROPERTY,
+ class_name=instance.__class__.__name__,
+ id=instance.id,
+ obj_name=execution_info.obj_name,
+ qualname=instance.__class__.__name__ + '.' + execution_info.obj_name,
+ doc=prop.__doc__
+ )
+ prop.execution_info = execution_info.to_dataclass(obj=prop, bound_obj=instance)
+ del prop._execution_info_validator
+ if not prop._observable:
+ continue
+ # observable properties
+ assert isinstance(prop._observable_event_descriptor, Event), f"observable event not yet set for {prop.name}. logic error."
+ unique_identifier = f"{instance._qualified_id}/{pep8_to_dashed_name(prop._observable_event_descriptor.friendly_name)}"
+ dispatcher = EventDispatcher(unique_identifier=unique_identifier, publisher=prop._observable_event_descriptor._publisher)
+ prop._observable_event_descriptor.__set__(instance, dispatcher)
+ prop._observable_event_descriptor.publisher.register(dispatcher)
+ remote_info = ZMQEvent(
+ what=ResourceTypes.EVENT,
+ class_name=instance.__class__.__name__,
+ id=instance.id,
+ obj_name=prop._observable_event_descriptor.name,
+ friendly_name=prop._observable_event_descriptor.friendly_name,
+ qualname=f'{instance.__class__.__name__}.{prop._observable_event_descriptor.name}',
+ unique_identifier=unique_identifier,
+ socket_address=dispatcher.publisher.socket_address,
+ serialization_specific=dispatcher._unique_zmq_identifier != dispatcher._unique_zmq_identifier,
+ doc=prop._observable_event_descriptor.doc
+ )
+ if unique_identifier in zmq_resources:
+ raise ValueError(f"Duplicate resource name {unique_identifier} found in {instance.__class__.__name__}")
+ zmq_resources[unique_identifier] = remote_info
+ # methods
+ for name, action in instance.actions.items():
+ if not isinstance(action._execution_info_validator, ActionInfoValidator):
+ raise TypeError("instance member {} has unknown sub-member '_execution_info_validator' of type {}.".format(
+ action, type(action._execution_info_validator)) +
+ " This is a reserved variable, please dont modify it.")
+ execution_info = action._execution_info_validator
+ if execution_info.obj_name in zmq_resources:
+ warnings.warn(f"Duplicate resource name {execution_info.obj_name} found in {instance.__class__.__name__}",
+ UserWarning)
+ # methods are already bound
+ assert execution_info.isaction, ("remote info from inspect.ismethod is not a callable",
+ "logic error - visit https://github.com/VigneshVSV/hololinked/issues to report")
+ # needs to be cleaned up for multiple HTTP methods
+ zmq_resources[execution_info.obj_name] = ZMQAction(
+ what=ResourceTypes.ACTION,
+ class_name=instance.__class__.__name__,
+ id=instance.id,
+ obj_name=getattr(action, '__name__'),
+ qualname=getattr(action, '__qualname__'),
+ doc=getattr(action, '__doc__'),
+ argument_schema=execution_info.argument_schema,
+ return_value_schema=execution_info.return_value_schema,
+ request_as_argument=execution_info.request_as_argument
+ )
+ action.execution_info = execution_info.to_dataclass(obj=action, bound_obj=instance)
+ # Events
+ for name, evt in instance.events.items():
+ assert isinstance(evt, Event), ("thing event query from inspect.ismethod is not an Event",
+ "logic error - visit https://github.com/VigneshVSV/hololinked/issues to report")
+ if getattr(instance, name, None):
+ continue
+ # above assertion is only a typing convenience
+ unique_identifier = f"{instance._qualified_id}{pep8_to_dashed_name(evt.friendly_name)}"
+ if unique_identifier in zmq_resources:
+ raise ValueError(f"Duplicate resource name {unique_identifier} found in {instance.__class__.__name__}")
+ dispatcher = EventDispatcher(unique_identifier=unique_identifier, publisher=evt._publisher)
+ evt.__set__(instance, dispatcher)
+ evt._publisher.register(dispatcher)
+ remote_info = ZMQEvent(
+ what=ResourceTypes.EVENT,
+ class_name=instance.__class__.__name__,
+ id=instance.id,
+ obj_name=name,
+ friendly_name=evt.friendly_name,
+ qualname=f'{instance.__class__.__name__}.{name}',
+ unique_identifier=unique_identifier,
+ serialization_specific=dispatcher._unique_zmq_identifier != dispatcher._unique_zmq_identifier,
+ socket_address=dispatcher.publisher.socket_address,
+ doc=evt.doc,
+ )
+ zmq_resources[unique_identifier] = remote_info
+ # Other objects
+ for name, resource in instance.sub_things.items():
+ assert isinstance(resource, Thing), ("thing children query from inspect.ismethod is not a Thing",
+ "logic error - visit https://github.com/VigneshVSV/hololinked/issues to report")
+ # above assertion is only a typing convenience
+ if name == '_owner':
+ # second condition allows sharing of Things without adding once again to the list of exposed resources
+ # for example, a shared logger
+ continue
+ resource._owner = instance
+ resource._prepare_resources() # trigger again after the owner has been set to make it work correctly
+ if resource._qualified_id in zmq_resources:
+ raise ValueError(f"Duplicate resource name {resource.id} found in {instance.__class__.__name__}")
+ zmq_resources[resource._qualified_id] = ZMQResource(
+ what=ResourceTypes.THING,
+ class_name=resource.__class__.__name__,
+ id=resource.id,
+ obj_name=name,
+ qualname=f'{instance.__class__.__name__}.{resource.__class__.__name__}',
+ doc=resource.__doc__,
+ request_as_argument=False
+ )
+
+
+ # The above for-loops can be used only once, the division is only for readability
+ # following are in _internal_fixed_attributes - allowed to set only once
+ return zmq_resources
\ No newline at end of file
diff --git a/hololinked/core/events.py b/hololinked/core/events.py
new file mode 100644
index 00000000..4adce161
--- /dev/null
+++ b/hololinked/core/events.py
@@ -0,0 +1,144 @@
+import typing
+import jsonschema
+
+from ..serializers.payloads import SerializableData
+from ..serializers import Serializers
+
+from ..param.parameterized import Parameterized, ParameterizedMetaclass
+from ..constants import JSON
+from ..utils import pep8_to_dashed_name
+from ..config import global_config
+
+
+
+
+class Event:
+ """
+ Asynchronously push arbitrary messages to clients. Apart from default events created by the package (like state
+ change event, observable properties etc.), events are supposed to be created at class level or at `__init__`
+ as a instance attribute, otherwise their publishing socket is unbound and will lead to `AttributeError`.
+
+ Parameters
+ ----------
+ name: str
+ name of the event, specified name may contain dashes and can be used on client side to subscribe to this event.
+ doc: str
+ docstring for the event
+ schema: JSON
+ schema of the event, if the event is JSON complaint. HTTP clients can validate the data with this schema. There
+ is no validation on server side.
+ """
+ # security: Any
+ # security necessary to access this event.
+
+ __slots__ = ['name', '_internal_name', '_publisher', '_observable',
+ 'doc', 'schema', 'security', 'label', 'owner']
+
+
+ def __init__(self,
+ doc : typing.Optional[str] = None,
+ schema : typing.Optional[JSON] = None, # security : typing.Optional[BaseSecurityDefinition] = None,
+ label : typing.Optional[str] = None
+ ) -> None:
+ self.doc = doc
+ if global_config.VALIDATE_SCHEMAS and schema:
+ jsonschema.Draft7Validator.check_schema(schema)
+ self.schema = schema
+ # self.security = security
+ self.label = label
+ self._observable = False
+
+ def __set_name__(self, owner: ParameterizedMetaclass, name: str) -> None:
+ self._internal_name = pep8_to_dashed_name(name)
+ self.name = name
+ self.owner = owner
+
+ @typing.overload
+ def __get__(self, obj, objtype) -> "EventDispatcher":
+ ...
+
+ def __get__(self, obj: Parameterized, objtype: ParameterizedMetaclass = None):
+ try:
+ if not obj:
+ return self
+ # uncomment for type hinting
+ # from .thing import Thing
+ # assert isinstance(obj, Thing)
+ return EventDispatcher(
+ unique_identifier=f'{obj._qualified_id}/{self._internal_name}',
+ publisher=obj.rpc_server.event_publisher if obj.rpc_server else None,
+ owner_inst=obj,
+ descriptor=self
+ )
+ except KeyError:
+ raise AttributeError("Event object not yet initialized, please dont access now." +
+ " Access after Thing is running.")
+
+ def to_affordance(self, owner_inst = None):
+ from ..td import EventAffordance
+ return EventAffordance.generate(self, owner_inst or self.owner)
+
+
+class EventDispatcher:
+ """
+ The actual worker which pushes the event. The separation is necessary between `Event` and
+ `EventDispatcher` to allow class level definitions of the `Event`
+ """
+
+ __slots__ = ['_unique_identifier', '_publisher', '_owner_inst', '_descriptor']
+
+ def __init__(self, unique_identifier: str, publisher: "EventPublisher", owner_inst: ParameterizedMetaclass, descriptor: Event) -> None:
+ self._unique_identifier = unique_identifier
+ self._owner_inst = owner_inst
+ self._descriptor = descriptor
+ self.publisher = publisher
+
+ @property
+ def publisher(self) -> "EventPublisher":
+ """
+ Event publishing PUB socket owning object.
+ """
+ return self._publisher
+
+ @publisher.setter
+ def publisher(self, value: "EventPublisher") -> None:
+ if not hasattr(self, '_publisher'):
+ self._publisher = value
+ elif not isinstance(value, EventPublisher):
+ raise AttributeError("Publisher must be of type EventPublisher. Given type: " + str(type(value)))
+ if self._publisher is not None:
+ self._publisher.register(self)
+
+ def push(self, data: typing.Any) -> None:
+ """
+ publish the event.
+
+ Parameters
+ ----------
+ data: Any
+ payload of the event
+ """
+ self.publisher.publish(self, data=data)
+
+ def receive_acknowledgement(self, timeout : typing.Union[float, int, None]) -> bool:
+ """
+ Receive acknowlegement for event receive. When the timeout argument is present and not None,
+ it should be a floating point number specifying a timeout for the operation in seconds (or fractions thereof).
+ """
+ raise NotImplementedError("Event acknowledgement is not implemented yet.")
+ return self._synchronize_event.wait(timeout=timeout)
+
+ def _set_acknowledgement(self, *args, **kwargs) -> None:
+ """
+ Method to be called by RPC server when an acknowledgement is received. Not for user to be set.
+ """
+ raise NotImplementedError("Event acknowledgement is not implemented yet.")
+ self._synchronize_event.set()
+
+
+
+from .zmq.brokers import EventPublisher
+
+__all__ = [
+ Event.__name__,
+]
\ No newline at end of file
diff --git a/hololinked/server/logger.py b/hololinked/core/logger.py
similarity index 72%
rename from hololinked/server/logger.py
rename to hololinked/core/logger.py
index 1db610d5..319f6b3a 100644
--- a/hololinked/server/logger.py
+++ b/hololinked/core/logger.py
@@ -6,12 +6,12 @@
import time
from collections import deque
-from .constants import HTTP_METHODS
+from ..utils import get_default_logger
from .events import Event
from .properties import List
from .properties import Integer, Number
from .thing import Thing as RemoteObject
-from .action import action as remote_method
+from .actions import action as remote_method
@@ -64,12 +64,12 @@ class RemoteAccessHandler(logging.Handler, RemoteObject):
}
"""
- def __init__(self, instance_name : str = 'logger', maxlen : int = 500, stream_interval : float = 1.0,
+ def __init__(self, id : str = 'logger', maxlen : int = 500, stream_interval : float = 1.0,
**kwargs) -> None:
"""
Parameters
----------
- instance_name: str, default 'logger'
+ id: str, default 'logger'
instance name of the object, generally only one instance per ``Thing`` necessary, therefore defaults to
'logger'
maxlen: int, default 500
@@ -88,19 +88,18 @@ def __init__(self, instance_name : str = 'logger', maxlen : int = 500, stream_in
len_critical: int
length of critical logs, default maxlen/5
"""
+ RemoteObject.__init__(self, id=id, **kwargs)
logging.Handler.__init__(self)
- RemoteObject.__init__(self, instance_name=instance_name, **kwargs)
self.set_maxlen(maxlen, **kwargs)
self.stream_interval = stream_interval
self.diff_logs = []
self._push_events = False
self._events_thread = None
- events = Event(friendly_name='log-events', URL_path='/events', doc='stream logs',
- schema=log_message_schema)
+ log_events = Event(doc='stream logs', schema=log_message_schema)
stream_interval = Number(default=1.0, bounds=(0.025, 60.0), crop_to_bounds=True, step=0.05,
- URL_path='/stream-interval', doc="interval at which logs should be published to a client.")
+ doc="interval at which logs should be published to a client.")
def get_maxlen(self):
return self._maxlen
@@ -114,11 +113,11 @@ def set_maxlen(self, value, **kwargs):
self._critical_logs = deque(maxlen=kwargs.pop('len_critical', int(value/5)))
self._execution_logs = deque(maxlen=value)
- maxlen = Integer(default=100, bounds=(1, None), crop_to_bounds=True, URL_path='/maxlen',
+ maxlen = Integer(default=100, bounds=(1, None), crop_to_bounds=True,
fget=get_maxlen, fset=set_maxlen, doc="length of execution log history to store")
- @remote_method(http_method=HTTP_METHODS.POST, URL_path='/events/start')
+ @remote_method()
def push_events(self, scheduling : str = 'threaded', stream_interval : float = 1) -> None:
"""
Push events to client. This method is intended to be called remotely for
@@ -142,7 +141,7 @@ def push_events(self, scheduling : str = 'threaded', stream_interval : float = 1
else:
raise ValueError(f"scheduling can only be 'threaded' or 'async'. Given value {scheduling}")
- @remote_method(http_method=HTTP_METHODS.POST, URL_path='/events/stop')
+ @remote_method()
def stop_events(self) -> None:
"""
stop pushing events
@@ -193,26 +192,61 @@ async def _async_push_diff_logs(self) -> None:
self.diff_logs.clear()
self._owner.logger.info(f"ending log events.")
- debug_logs = List(default=[], readonly=True, URL_path='/logs/debug', fget=lambda self: self._debug_logs,
+ debug_logs = List(default=[], readonly=True, fget=lambda self: self._debug_logs,
doc="logs at logging.DEBUG level")
- warn_logs = List(default=[], readonly=True, URL_path='/logs/warn', fget=lambda self: self._warn_logs,
+ warn_logs = List(default=[], readonly=True, fget=lambda self: self._warn_logs,
doc="logs at logging.WARN level")
- info_logs = List(default=[], readonly=True, URL_path='/logs/info', fget=lambda self: self._info_logs,
+ info_logs = List(default=[], readonly=True, fget=lambda self: self._info_logs,
doc="logs at logging.INFO level")
- error_logs = List(default=[], readonly=True, URL_path='/logs/error', fget=lambda self: self._error_logs,
+ error_logs = List(default=[], readonly=True, fget=lambda self: self._error_logs,
doc="logs at logging.ERROR level")
- critical_logs = List(default=[], readonly=True, URL_path='/logs/critical', fget=lambda self: self._critical_logs,
+ critical_logs = List(default=[], readonly=True, fget=lambda self: self._critical_logs,
doc="logs at logging.CRITICAL level")
- execution_logs = List(default=[], readonly=True, URL_path='/logs/execution', fget=lambda self: self._execution_logs,
+ execution_logs = List(default=[], readonly=True, fget=lambda self: self._execution_logs,
doc="logs at all levels accumulated in order of collection/execution")
+def prepare_object_logger(instance: RemoteObject, log_level: int, log_file: str, remote_access: bool = False) -> None:
+ """
+ Setup logger for the object with default settings. If a logger is already present, it is not recreated.
+ If remote access is present, it is not recreated. This is a single-shot method to be run at __init__.
+
+ Parameters
+ ----------
+ log_level: int
+ logging level.
+ log_file: str
+ log file path. A FileHandler is attached to the logger if this is not None.
+ remote_access: bool
+ if True, a RemoteAccessHandler is attached to the logger.
+ """
+ if instance.logger is None:
+ instance.logger = get_default_logger(
+ instance.id,
+ logging.INFO if not log_level else log_level,
+ None if not log_file else log_file
+ )
+
+ if remote_access and not any(isinstance(handler, RemoteAccessHandler) for handler in instance.logger.handlers):
+ instance._remote_access_loghandler = RemoteAccessHandler(
+ id='logger', maxlen=500,
+ emit_interval=1, logger=instance.logger
+ )
+ # we set logger=instance.logger because so that we dont recreate one for remote access handler
+ instance.logger.addHandler(instance._remote_access_loghandler)
+
+ if not isinstance(instance, RemoteAccessHandler):
+ for handler in instance.logger.handlers:
+ # if remote access is True or not, if such a handler is found, make it a sub thing
+ if isinstance(handler, RemoteAccessHandler):
+ instance._remote_access_loghandler = handler
+
__all__ = [
ListHandler.__name__,
RemoteAccessHandler.__name__
diff --git a/hololinked/core/meta.py b/hololinked/core/meta.py
new file mode 100644
index 00000000..b28b2eea
--- /dev/null
+++ b/hololinked/core/meta.py
@@ -0,0 +1,870 @@
+
+import copy
+import inspect
+from types import FunctionType
+import typing
+
+from ..param.parameterized import (EventResolver as ParamEventResolver, EventDispatcher as ParamEventDispatcher,
+ Parameter, Parameterized, ParameterizedMetaclass, ClassParameters,
+ edit_constant as edit_constant_parameters)
+from ..utils import getattr_without_descriptor_read
+from ..constants import JSON, JSONSerializable
+from ..serializers import Serializers
+from .actions import Action, BoundAction, action
+from .property import Property
+from .events import Event, EventPublisher, EventDispatcher
+
+
+
+class ThingMeta(ParameterizedMetaclass):
+ """
+ Metaclass for `Thing`, implements a `__post_init__()` call and instantiation of a registry for properties', actions'
+ and events' descriptor objects.
+ Accessing properties, actions and events at the class level returns the descriptor object through the `DescriptorRegistry`
+ implementation. Accessing properties, actions and events at instance level return their values (for example -
+ the value of Property `foo` being '5'). At instance level, the descriptors can be accessed through the `descriptors`
+ property of the `DescriptorRegistry`.
+ Currently `__post_init__()`, which is run after the user's `__init__()` method, properties that can be
+ loaded from a database are loaded and written.
+
+ [UML Diagram](https://docs.hololinked.dev/UML/PDF/Thing.pdf)
+ """
+ def __init__(mcs, name, bases, dict_):
+ super().__init__(name, bases, dict_)
+ mcs._create_actions_registry()
+ mcs._create_events_registry()
+
+ def __call__(mcls, *args, **kwargs):
+ instance = super().__call__(*args, **kwargs)
+ instance.__post_init__()
+ return instance
+
+ def _create_param_container(cls, cls_members: dict) -> None:
+ """
+ creates `PropertiesRegistry` instead of `param`'s own `Parameters`
+ as the default container for descriptors. All properties have definitions
+ copied from `param`.
+ """
+ cls._param_container = PropertiesRegistry(cls, cls_members)
+
+ def _create_actions_registry(cls) -> None:
+ """
+ creates `Actions` instead of `param`'s own `Parameters`
+ as the default container for descriptors. All actions have definitions
+ copied from `param`.
+ """
+ cls._actions_registry = ActionsRegistry(cls)
+
+ def _create_events_registry(cls) -> None:
+ """
+ creates `Events` instead of `param`'s own `Parameters`
+ as the default container for descriptors. All events have definitions
+ copied from `param`.
+ """
+ cls._events_registry = EventsRegistry(cls)
+
+ @property
+ def properties(cls) -> "PropertiesRegistry":
+ """
+ Container object for Property descriptors. Returns `PropertiesRegistry` instance instead of `param`'s own
+ `Parameters` instance.
+ """
+ return cls._param_container
+
+ @property
+ def actions(cls) -> "ActionsRegistry":
+ """Container object for Action descriptors"""
+ return cls._actions_registry
+
+ @property
+ def events(cls) -> "EventsRegistry":
+ """Container object for Event descriptors"""
+ return cls._events_registry
+
+
+
+class DescriptorRegistry:
+ """
+ A registry for the descriptors of a `Thing` class or `Thing` instance.
+ Provides a dictionary interface to access the descriptors under the `descriptors` attribute.
+ Each of properties, actions and events subclasss from here to implement a registry of their available objects.
+
+ [UML Diagram](https://docs.hololinked.dev/UML/PDF/DescriptorRegistry.pdf)
+ """
+
+ def __init__(self, owner_cls: ThingMeta, owner_inst = None) -> None:
+ """
+ Parameters
+ ----------
+ owner_cls: ThingMeta
+ The class/subclass of the `Thing` that owns the registry.
+ owner_inst: Thing
+ The instance of the `Thing` that owns the registry, optional
+ """
+ super().__init__()
+ self.owner_cls = owner_cls
+ self.owner_inst = owner_inst
+ self.clear()
+
+
+ @property
+ def owner(self):
+ """
+ The owner of the registry - the instance of a `Thing` if a `Thing` has been instantiated
+ or the class/subclass of `Thing` when accessed as a class attribute.
+ """
+ return self.owner_inst if self.owner_inst is not None else self.owner_cls
+
+ @property
+ def _qualified_prefix(self) -> str:
+ """
+ A unique prefix for `descriptors` attribute according to the `Thing`'s subclass and instance id.
+ For internal use.
+ """
+ try:
+ return self._qualified__prefix
+ except AttributeError:
+ prefix = inspect.getfile(self.__class__) + self.__class__.__name__.lower()
+ if self.owner_inst is not None:
+ prefix += f'_{self.owner_inst.id}'
+ self._qualified__prefix = prefix
+ return prefix
+
+ @property
+ def descriptor_object(self) -> type[Property | Action | Event]:
+ """The type of descriptor object that this registry holds, i.e. `Property`, `Action` or `Event`"""
+ raise NotImplementedError("Implement descriptor_object in subclass")
+
+ @property
+ def descriptors(self) -> typing.Dict[str, type[Property | Action | Event]]:
+ """A dictionary with all the descriptors as values and their names as keys."""
+ raise NotImplementedError("Implement descriptors in subclass")
+
+ @property
+ def names(self) -> typing.KeysView[str]:
+ """The names of the descriptors objects as a dictionary key view"""
+ return self.descriptors.keys()
+
+ @property
+ def values(self) -> typing.Dict[str, typing.Any]:
+ """
+ The values contained within the descriptors after reading when accessed at instance level, otherwise,
+ the descriptor objects as dictionary when accessed at class level.
+ """
+ raise NotImplementedError("Implement values in subclass")
+
+ def clear(self) -> None:
+ """
+ Deletes the descriptors dictionary (value of the `descriptors` proeprty) so that it can be recreated.
+ Does not delete the descriptors themselves. Call this method once if new descriptors are added to the
+ class/instance dynamically in runtime.
+ """
+ for name in ['', '_values']:
+ try:
+ delattr(self, f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}{name}')
+ except AttributeError:
+ pass
+
+ def __getitem__(self, key: str) -> Property | Action | Event:
+ """Returns the descriptor object for the given key."""
+ raise NotImplementedError("Implement __getitem__ in subclass")
+
+ def __contains__(self, obj: Property | Action | Event) -> bool:
+ """Returns True if the descriptor object is in the descriptors dictionary."""
+ raise NotImplementedError("contains not implemented yet")
+
+ def __dir__(self) -> typing.List[str]:
+ """Adds descriptor object to the dir"""
+ return super().__dir__() + self.descriptors.keys() # type: ignore
+
+ def __iter__(self):
+ """Iterates over the descriptors of this object."""
+ yield from self.descriptors
+
+ def __len__(self) -> int:
+ """The number of descriptors in this object."""
+ return len(self.descriptors)
+
+ def __hash__(self) -> int:
+ return hash(self._qualified__prefix)
+
+ def __str__(self) -> int:
+ if self.owner_inst:
+ return f""
+ return f""
+
+ def get_descriptors(self, recreate: bool = False) -> typing.Dict[str, Property | Action | Event]:
+ """
+ a dictionary with all the descriptors as values and their names as keys.
+
+ Parameters
+ ----------
+ recreate: bool
+ if True, the descriptors dictionary is recreated and returned, otherwise, the cached dictionary is returned.
+ """
+ if recreate:
+ self.clear()
+ try:
+ return getattr(self, f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}')
+ except AttributeError:
+ descriptors = dict()
+ for name, objekt in inspect._getmembers(
+ self.owner_cls,
+ lambda f: isinstance(f, self.descriptor_object),
+ getattr_without_descriptor_read
+ ):
+ descriptors[name] = objekt
+ setattr(self, f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}', descriptors)
+ # We cache the parameters because this method is called often,
+ # and parameters are rarely added (and cannot be deleted)
+ return descriptors
+
+ def get_values(self) -> typing.Dict[str, typing.Any]:
+ """
+ the values contained within the descriptors after reading when accessed at instance level, otherwise,
+ the descriptor objects as dictionary when accessed at class level.
+ For example, if a `Thing` instance's property contains a value of 5, this method will return
+ { property_name : 5 } when accessed at instance level, and { property_name : property_object } when accessed
+ at class level.
+ This method is also the getter of the `values` property.
+ """
+ if self.owner_inst is None:
+ return self.descriptors
+ try:
+ return getattr(self, f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}_values')
+ except AttributeError:
+ values = dict()
+ for name, value in self.descriptors.items():
+ values[name] = value.__get__(self.owner_inst, self.owner_cls)
+ setattr(self, f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}_values', values)
+ return values
+
+
+def supports_only_instance_access(
+ error_msg: str = "This method is only supported at instance level"
+ ) -> FunctionType:
+ """
+ decorator to raise an error if a method is called at class level instead of instance level
+ within the registry functionality.
+ """
+ def inner(func: FunctionType) -> FunctionType:
+ def wrapper(self: DescriptorRegistry, *args, **kwargs):
+ if self.owner_inst is None:
+ error_msg = inner._error_msg
+ raise AttributeError(error_msg)
+ return func(self, *args, **kwargs)
+ return wrapper
+ inner._error_msg = error_msg
+ return inner
+
+
+class PropertiesRegistry(DescriptorRegistry):
+ """
+ A `DescriptorRegistry` for properties of a `Thing` class or `Thing` instance.
+
+ [UML Diagram](https://docs.hololinked.dev/UML/PDF/DescriptorRegistry.pdf)
+ """
+
+ def __init__(self, owner_cls: ThingMeta, owner_class_members: dict, owner_inst=None):
+ super().__init__(owner_cls, owner_inst)
+ if self.owner_inst is None and owner_class_members is not None:
+ # instantiated by class
+ self.event_resolver = ParamEventResolver(owner_cls=owner_cls)
+ self.event_dispatcher = ParamEventDispatcher(owner_cls, self.event_resolver)
+ self.event_resolver.create_unresolved_watcher_info(owner_class_members)
+ else:
+ # instantiated by instance
+ self._instance_params = {}
+ self.event_resolver = self.owner_cls.properties.event_resolver
+ self.event_dispatcher = ParamEventDispatcher(owner_inst, self.event_resolver)
+ self.event_dispatcher.prepare_instance_dependencies()
+
+
+ @property
+ def descriptor_object(self) -> type[Parameter]:
+ return Parameter
+
+ @property
+ def descriptors(self) -> typing.Dict[str, Parameter]:
+ if self.owner_inst is None:
+ return super().get_descriptors()
+ return dict(super().get_descriptors(), **self._instance_params)
+
+ values = property(DescriptorRegistry.get_values,
+ doc=DescriptorRegistry.get_values.__doc__) # type: typing.Dict[str, Parameter | Property | typing.Any]
+
+ def __getitem__(self, key: str) -> Property | Parameter:
+ return self.descriptors[key]
+
+ def __contains__(self, value: str | Property | Parameter) -> bool:
+ return value in self.descriptors.values() or value in self.descriptors
+
+ @property
+ def defaults(self) -> typing.Dict[str, typing.Any]:
+ """default values of all properties as a dictionary with property names as keys"""
+ defaults = {}
+ for key, val in self.descriptors.items():
+ defaults[key] = val.default
+ return defaults
+
+ @property
+ def remote_objects(self) -> typing.Dict[str, Property]:
+ """
+ dictionary of properties that are remotely accessible (`remote=True`),
+ which is also a default setting for all properties
+ """
+ try:
+ return getattr(self, f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}_remote')
+ except AttributeError:
+ props = self.descriptors
+ remote_props = {}
+ for name, desc in props.items():
+ if not isinstance(desc, Property):
+ continue
+ if desc.is_remote:
+ remote_props[name] = desc
+ setattr(
+ self,
+ f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}_remote',
+ remote_props
+ )
+ return remote_props
+
+ @property
+ def db_objects(self) -> typing.Dict[str, Property]:
+ """
+ dictionary of properties that are stored or loaded from the database
+ (`db_init`, `db_persist` or `db_commit` set to True)
+ """
+ try:
+ return getattr(self, f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}_db')
+ except AttributeError:
+ propdict = self.descriptors
+ db_props = {}
+ for name, desc in propdict.items():
+ if not isinstance(desc, Property):
+ continue
+ if desc.db_init or desc.db_persist or desc.db_commit:
+ db_props[name] = desc
+ setattr(
+ self,
+ f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}_db',
+ db_props
+ )
+ return db_props
+
+ @property
+ def db_init_objects(self) -> typing.Dict[str, Property]:
+ """dictionary of properties that are initialized from the database (`db_init` or `db_persist` set to True)"""
+ try:
+ return getattr(self, f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}_db_init')
+ except AttributeError:
+ propdict = self.db_objects
+ db_init_props = {}
+ for name, desc in propdict.items():
+ if desc.db_init or desc.db_persist:
+ db_init_props[name] = desc
+ setattr(
+ self,
+ f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}_db_init',
+ db_init_props
+ )
+ return db_init_props
+
+ @property
+ def db_commit_objects(self) -> typing.Dict[str, Property]:
+ """dictionary of properties that are committed to the database (`db_commit` or `db_persist` set to True)"""
+ try:
+ return getattr(self, f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}_db_commit')
+ except AttributeError:
+ propdict = self.db_objects
+ db_commit_props = {}
+ for name, desc in propdict.items():
+ if desc.db_commit or desc.db_persist:
+ db_commit_props[name] = desc
+ setattr(
+ self,
+ f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}_db_commit',
+ db_commit_props
+ )
+ return db_commit_props
+
+ @property
+ def db_persisting_objects(self) -> typing.Dict[str, Property]:
+ """dictionary of properties that are persisted through the database (`db_persist` set to True)"""
+ try:
+ return getattr(self, f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}_db_persisting')
+ except AttributeError:
+ propdict = self.db_objects
+ db_persisting_props = {}
+ for name, desc in propdict.items():
+ if desc.db_persist:
+ db_persisting_props[name] = desc
+ setattr(
+ self,
+ f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}_db_persisting',
+ db_persisting_props
+ )
+ return db_persisting_props
+
+ def get(self, **kwargs: typing.Dict[str, typing.Any]) -> typing.Dict[str, typing.Any]:
+ """
+ read properties from the object, implements WoT operations `readAllProperties` and `readMultipleProperties`
+
+ Parameters
+ ----------
+ **kwargs: typing.Dict[str, typing.Any]
+ - names: `List[str]`
+ list of property names to be fetched
+ - name: `str`
+ name of the property to be fetched, along with a 'rename' for the property in the response.
+ For example { 'foo_prop' : 'fooProp' } will return the property 'foo_prop' as 'fooProp' in the response.
+
+ Returns
+ -------
+ typing.Dict[str, typing.Any]
+ dictionary of property names and their values
+
+ Raises
+ ------
+ TypeError
+ if property name is not a string or requested new name is not a string
+ AttributeError
+ if property does not exist or is not remote accessible
+ """
+ data = {}
+ if len(kwargs) == 0:
+ # read all properties
+ for name, prop in self.remote_objects.items():
+ if self.owner_inst is None and not prop.class_member:
+ continue
+ data[name] = prop.__get__(self.owner_inst, self.owner_cls)
+ return data
+ elif 'names' in kwargs:
+ names = kwargs.get('names')
+ if not isinstance(names, (list, tuple, str)):
+ raise TypeError("Specify properties to be fetched as a list, tuple or comma separated names. " +
+ f"Given type {type(names)}")
+ if isinstance(names, str):
+ names = names.split(',')
+ kwargs = {name: name for name in names}
+ for requested_prop, rename in kwargs.items():
+ if not isinstance(requested_prop, str):
+ raise TypeError(f"property name must be a string. Given type {type(requested_prop)}")
+ if not isinstance(rename, str):
+ raise TypeError(f"requested new name must be a string. Given type {type(rename)}")
+ if requested_prop not in self.descriptors:
+ raise AttributeError(f"property {requested_prop} does not exist")
+ if requested_prop not in self.remote_objects:
+ raise AttributeError(f"property {requested_prop} is not remote accessible")
+ prop = self.descriptors[requested_prop]
+ if self.owner_inst is None and not prop.class_member:
+ continue
+ data[rename] = prop.__get__(self.owner_inst, self.owner_cls)
+ return data
+
+ def set(self, **values : typing.Dict[str, typing.Any]) -> None:
+ """
+ set properties whose name is specified by keys of a dictionary; implements WoT operations `writeMultipleProperties`
+ or `writeAllProperties`.
+
+ Parameters
+ ----------
+ values: typing.Dict[str, typing.Any]
+ dictionary of property names and its new values
+
+ Raises
+ ------
+ AttributeError
+ if property does not exist or is not remote accessible
+ """
+ errors = ''
+ for name, value in values.items():
+ try:
+ if name not in self.descriptors:
+ raise AttributeError(f"property {name} does not exist")
+ if name not in self.remote_objects:
+ raise AttributeError(f"property {name} is not remote accessible")
+ prop = self.descriptors[name]
+ if self.owner_inst is None and not prop.class_member:
+ raise AttributeError(f"property {name} is not a class member and cannot be set at class level")
+ setattr(self.owner, name, value)
+ except Exception as ex:
+ errors += f'{name}: {str(ex)}\n'
+ if errors:
+ ex = RuntimeError("Some properties could not be set due to errors. " +
+ "Check exception notes or server logs for more information.")
+ ex.__notes__ = errors
+ raise ex from None
+
+ def add(self, name: str, config: JSON) -> None:
+ """
+ add a property to the object
+
+ Parameters
+ ----------
+ name: str
+ name of the property
+ config: JSON
+ configuration of the property, i.e. keyword arguments to the `__init__` method of the property class
+ """
+ prop = self.get_type_from_name(**config)
+ setattr(self.owner_cls, name, prop)
+ prop.__set_name__(self.owner_cls, name)
+ if prop.deepcopy_default:
+ self._deep_copy_param_descriptor(prop)
+ self._deep_copy_param_default(prop)
+ self.clear()
+
+ def clear(self):
+ super().clear()
+ self._instance_params = {}
+ for attr in ['_db', '_db_init', '_db_persisting', '_remote']:
+ try:
+ delattr(self, f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}{attr}')
+ except AttributeError:
+ pass
+
+ @supports_only_instance_access("database operations are only supported at instance level")
+ def get_from_DB(self) -> typing.Dict[str, typing.Any]:
+ """
+ get all properties (i.e. their values) currently stored in the database
+
+ Returns
+ -------
+ Dict[str, typing.Any]
+ dictionary of property names and their values
+ """
+ if not hasattr(self.owner_inst, 'db_engine'):
+ raise AttributeError("database engine not set, this object is not connected to a database")
+ props = self.owner_inst.db_engine.get_all_properties() # type: typing.Dict
+ final_list = {}
+ for name, prop in props.items():
+ try:
+ # serializer = Serializers.for_object(self.owner_inst.id, self.owner_cls.__name__, name)
+ # if name in self.db_commit_objects:
+ # continue
+ final_list[name] = prop
+ except Exception as ex:
+ self.owner_inst.logger.error(
+ f"could not deserialize property {name} due to error - {str(ex)}, skipping this property"
+ )
+ return final_list
+
+ @supports_only_instance_access("database operations are only supported at instance level")
+ def load_from_DB(self):
+ """
+ Load and apply property values from database which have `db_init` or `db_persist` set to `True`
+ """
+ if not hasattr(self.owner_inst, 'db_engine'):
+ return
+ # raise AttributeError("database engine not set, this object is not connected to a database")
+ missing_properties = self.owner_inst.db_engine.create_missing_properties(
+ self.db_init_objects,
+ get_missing_property_names=True
+ )
+ # 4. read db_init and db_persist objects
+ with edit_constant_parameters(self.owner_inst):
+ for db_prop, value in self.get_from_DB().items():
+ try:
+ if db_prop not in missing_properties:
+ setattr(self.owner_inst, db_prop, value) # type: ignore
+ except Exception as ex:
+ self.owner_inst.logger.error(f"could not set attribute {db_prop} due to error {str(ex)}")
+
+ @classmethod
+ def get_type_from_name(cls, name: str) -> typing.Type[Property]:
+ return Property
+
+ @supports_only_instance_access("additional property setup is required only for instances")
+ def _setup_parameters(self, **parameters):
+ """
+ Initialize default and keyword parameter values.
+
+ First, ensures that all Parameters with 'deepcopy_default=True'
+ (typically used for mutable Parameters) are copied directly
+ into each object, to ensure that there is an independent copy
+ (to avoid surprising aliasing errors). Then sets each of the
+ keyword arguments, warning when any of them are not defined as
+ parameters.
+
+ Constant Parameters can be set during calls to this method.
+ """
+ ## Deepcopy all 'deepcopy_default=True' parameters
+ # (building a set of names first to avoid redundantly
+ # instantiating a later-overridden parent class's parameter)
+ param_default_values_to_deepcopy = {}
+ param_descriptors_to_deepcopy = {}
+ for (k, v) in self.owner_cls.properties.descriptors.items():
+ if v.deepcopy_default and k != "name":
+ # (avoid replacing name with the default of None)
+ param_default_values_to_deepcopy[k] = v
+ if v.per_instance_descriptor and k != "name":
+ param_descriptors_to_deepcopy[k] = v
+
+ for p in param_default_values_to_deepcopy.values():
+ self._deep_copy_param_default(p)
+ for p in param_descriptors_to_deepcopy.values():
+ self._deep_copy_param_descriptor(p)
+
+ ## keyword arg setting
+ if len(parameters) > 0:
+ descs = self.descriptors
+ for name, val in parameters.items():
+ desc = descs.get(name, None) # pylint: disable-msg=E1101
+ if desc:
+ setattr(self.owner_inst, name, val)
+ # Its erroneous to set a non-descriptor (& non-param-descriptor) with a value from init.
+ # we dont know what that value even means, so we silently ignore
+
+ @supports_only_instance_access("additional property setup is required only for instances")
+ def _deep_copy_param_default(self, param_obj : 'Parameter') -> None:
+ # deepcopy param_obj.default into self.__dict__ (or dict_ if supplied)
+ # under the parameter's _internal_name (or key if supplied)
+ _old = self.owner_inst.__dict__.get(param_obj._internal_name, NotImplemented)
+ _old = _old if _old is not NotImplemented else param_obj.default
+ new_object = copy.deepcopy(_old)
+ # remember : simply setting in the dict does not activate post setter and remaining logic which is sometimes important
+ self.owner_inst.__dict__[param_obj._internal_name] = new_object
+
+ @supports_only_instance_access("additional property setup is required only for instances")
+ def _deep_copy_param_descriptor(self, param_obj : Parameter):
+ param_obj_copy = copy.deepcopy(param_obj)
+ self._instance_params[param_obj.name] = param_obj_copy
+
+
+class ActionsRegistry(DescriptorRegistry):
+ """
+ A `DescriptorRegistry` for actions of a `Thing` class or `Thing` instance.
+
+ [UML Diagram](https://docs.hololinked.dev/UML/PDF/DescriptorRegistry.pdf)
+ """
+
+ @property
+ def descriptor_object(self) -> type[Action]:
+ return Action
+
+ descriptors = property(DescriptorRegistry.get_descriptors) # type: typing.Dict[str, Action]
+
+ values = property(DescriptorRegistry.get_values,
+ doc=DescriptorRegistry.get_values.__doc__) # type: typing.Dict[str, Action]
+
+ def __getitem__(self, key: str) -> Action | BoundAction:
+ if self.owner_inst is not None:
+ return self.descriptors[key].__get__(self.owner_inst, self.owner_cls)
+ return self.descriptors[key]
+
+ def __contains__(self, action: str | Action | BoundAction) -> bool:
+ return action in self.descriptors.values() or action in self.descriptors
+
+
+class EventsRegistry(DescriptorRegistry):
+ """
+ A `DescriptorRegistry` for events of a `Thing` class or `Thing` instance.
+
+ [UML Diagram](https://docs.hololinked.dev/UML/PDF/DescriptorRegistry.pdf)
+ """
+
+ @property
+ def descriptor_object(self):
+ return Event
+
+ descriptors = property(DescriptorRegistry.get_descriptors) # type: typing.Dict[str, Event]
+
+ values = property(DescriptorRegistry.get_values,
+ doc=DescriptorRegistry.get_values.__doc__) # type: typing.Dict[str, EventDispatcher]
+
+ def __getitem__(self, key: str) -> Event | EventDispatcher:
+ if self.owner_inst is not None:
+ return self.descriptors[key].__get__(self.owner_inst, self.owner_cls)
+ return self.descriptors[key]
+
+ def __contains__(self, event: Event) -> bool:
+ return event in self.descriptors.values() or event in self.descriptors
+
+ def clear(self):
+ super().clear()
+ for attr in ['_change_events', '_observables']:
+ try:
+ delattr(self, f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}{attr}')
+ except AttributeError:
+ pass
+
+ @property
+ def change_events(self) -> typing.Dict[str, Event]:
+ """dictionary of change events of observable properties"""
+ try:
+ return getattr(self, f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}_change_events')
+ except AttributeError:
+ change_events = dict()
+ for name, evt in self.descriptors.items():
+ if not evt._observable:
+ continue
+ change_events[name] = evt
+ setattr(
+ self,
+ f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}_change_events',
+ change_events
+ )
+ return change_events
+
+ @property
+ def observables(self) -> typing.Dict[str, Property]:
+ """dictionary of all properties that are observable, i.e. that which push change events"""
+ try:
+ return getattr(self, f'_{self._qualified__prefix}_{self.__class__.__name__.lower()}_observables')
+ except AttributeError:
+ props = dict()
+ for name, prop in self.owner_cls.properties.descriptors.items():
+ if not isinstance(prop, Property) or not prop.observable:
+ continue
+ props[name] = prop
+ setattr(
+ self,
+ f'_{self._qualified_prefix}_{self.__class__.__name__.lower()}_observables',
+ props
+ )
+ return props
+
+
+
+class Propertized(Parameterized):
+ """
+ Base class providing additional functionality related to properties,
+ like setting up a registry, allowing values to be set at `__init__()` etc.
+ It is not meant to be subclassed directly by the end-user.
+
+ [UML Diagram](https://docs.hololinked.dev/UML/PDF/Thing.pdf)
+ """
+
+ # There is a word called Property+ize in english dictionary
+ # https://en.wiktionary.org/wiki/propertization
+
+ id : str
+
+ # creating name without underscore causes clash with the metaclass method
+ # with same name
+ def create_param_container(self, **params):
+ self._properties_registry = PropertiesRegistry(self.__class__, None, self)
+ self._properties_registry._setup_parameters(**params)
+ self._param_container = self._properties_registry # backwards compatibility with param
+
+ @property
+ def properties(self) -> PropertiesRegistry:
+ """container for the property descriptors of the object."""
+ return self._properties_registry
+
+ @action()
+ def _get_properties(self, **kwargs) -> typing.Dict[str, typing.Any]:
+ """
+ """
+ return self.properties.get(**kwargs)
+
+ @action()
+ def _set_properties(self, **values : typing.Dict[str, typing.Any]) -> None:
+ """
+ set properties whose name is specified by keys of a dictionary
+
+ Parameters
+ ----------
+ values: Dict[str, Any]
+ dictionary of property names and its values
+ """
+ return self.properties.set(**values) # returns None
+
+ @action()
+ def _get_properties_in_db(self) -> typing.Dict[str, JSONSerializable]:
+ """
+ get all properties in the database
+
+ Returns
+ -------
+ Dict[str, JSONSerializable]
+ dictionary of property names and their values
+ """
+ return self.properties.get_from_DB()
+
+ @action()
+ def _add_property(self, name: str, prop: JSON) -> None:
+ """
+ add a property to the object
+
+ Parameters
+ ----------
+ name: str
+ name of the property
+ prop: Property
+ property object
+ """
+ raise NotImplementedError("this method will be implemented properly in a future release")
+ prop = Property(**prop)
+ self.properties.add(name, prop)
+ self._prepare_resources()
+ # instruct the clients to fetch the new resources
+
+
+class RemoteInvokable:
+ """
+ Base class providing additional functionality related to actions,
+ it is not meant to be subclassed directly by the end-user.
+
+ [UML Diagram](https://docs.hololinked.dev/UML/PDF/Thing.pdf)
+ """
+ id : str
+
+ def __init__(self):
+ super().__init__()
+ self.create_actions_registry()
+
+ # creating name without underscore causes clash with the metaclass method
+ # with same name
+ def create_actions_registry(self) -> None:
+ """creates a registry for available `Actions` based on `ActionsRegistry`"""
+ self._actions_registry = ActionsRegistry(self.__class__, self)
+
+ @property
+ def actions(self) -> ActionsRegistry:
+ """container for the action descriptors of the object."""
+ return self._actions_registry
+
+
+class EventSource:
+ """
+ Base class to add event functionality to an object,
+ it is not meant to be subclassed directly by the end-user.
+
+ [UML Diagram](https://docs.hololinked.dev/UML/PDF/Thing.pdf)
+ """
+
+ id: str
+
+ def __init__(self) -> None:
+ self.create_events_registry()
+
+ # creating name without underscore causes clash with the metaclass method
+ # with same name
+ def create_events_registry(self) -> None:
+ """creates a registry for available `Events` based on `EventsRegistry`"""
+ self._events_registry = EventsRegistry(self.__class__, self)
+
+ @property
+ def events(self) -> EventsRegistry:
+ """container for the event descriptors of the object."""
+ return self._events_registry
+
+ @property
+ def event_publisher(self) -> "EventPublisher":
+ """
+ event publishing object `EventPublisher` that owns the zmq.PUB socket, valid only after
+ creating an RPC server or calling a `run()` method on the `Thing` instance.
+ """
+ try:
+ return self.rpc_server.event_publisher if self.rpc_server else None
+ except AttributeError:
+ return None
+
+
+
+
+
+
\ No newline at end of file
diff --git a/hololinked/server/properties.py b/hololinked/core/properties.py
similarity index 90%
rename from hololinked/server/properties.py
rename to hololinked/core/properties.py
index 17f487da..2fcffa05 100644
--- a/hololinked/server/properties.py
+++ b/hololinked/core/properties.py
@@ -10,12 +10,11 @@
from ..param.utils import *
from ..param.exceptions import *
-from ..param.parameterized import Parameterized, dt_types, Parameter
-
+from ..param.parameterized import Parameterized, dt_types, Parameter
from ..param.parameters import (TypeConstrainedList, TypeConstrainedDict, abbreviate_paths,
TypedKeyMappingsConstrainedDict, resolve_path, concrete_descendents, named_objs)
from .property import Property
-from .constants import USE_OBJECT_NAME, HTTP_METHODS
+from ..constants import USE_OBJECT_NAME, HTTP_METHODS
GET = HTTP_METHODS.GET
PUT = HTTP_METHODS.PUT
@@ -40,9 +39,6 @@ class String(Property):
def __init__(self, default : typing.Optional[str] = "", *, regex : typing.Optional[str] = None,
doc : typing.Optional[str] = None, constant : bool = False,
readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -52,7 +48,7 @@ def __init__(self, default : typing.Optional[str] = "", *, regex : typing.Option
precedence : typing.Optional[float] = None, metadata : typing.Optional[typing.Dict] = None, **kwargs
) -> None:
super().__init__(default=default, doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -123,9 +119,6 @@ def __init__(self, default : typing.Optional[str] = "0.0.0.0", *, allow_ipv4 : b
allow_localhost : bool = True,
doc : typing.Optional[str] = None, constant : bool = False,
readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -135,7 +128,7 @@ def __init__(self, default : typing.Optional[str] = "0.0.0.0", *, allow_ipv4 : b
precedence : typing.Optional[float] = None, metadata : typing.Optional[typing.Dict] = None, **kwargs
) -> None:
super().__init__(default=default, doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -365,9 +358,6 @@ def __init__(self, default : typing.Optional[typing.Union[float, int]] = 0.0, *,
crop_to_bounds : bool = False, inclusive_bounds : typing.Tuple = (True,True), step : typing.Any = None,
doc : typing.Optional[str] = None, constant : bool = False, soft_bounds : typing.Optional[typing.Tuple] = None,
readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -377,7 +367,7 @@ def __init__(self, default : typing.Optional[typing.Union[float, int]] = 0.0, *,
precedence : typing.Optional[float] = None, metadata : typing.Optional[typing.Dict] = None, **kwargs
) -> None:
super().__init__(default=default, doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -494,10 +484,7 @@ class Integer(Number):
def __init__(self, default : typing.Optional[int] = 0, *, bounds : typing.Optional[typing.Tuple] = None,
crop_to_bounds : bool = False, inclusive_bounds : typing.Tuple = (True,True), step : typing.Any = None,
doc : typing.Optional[str] = None, constant : bool = False, soft_bounds : typing.Optional[typing.Tuple] = None,
- readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
+ readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -508,7 +495,7 @@ def __init__(self, default : typing.Optional[int] = 0, *, bounds : typing.Option
) -> None:
super().__init__(default=default, bounds=bounds, crop_to_bounds=crop_to_bounds, inclusive_bounds=inclusive_bounds,
soft_bounds=soft_bounds, step=step, doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -527,9 +514,6 @@ class Boolean(Property):
def __init__(self, default : typing.Optional[bool] = False, *,
doc : typing.Optional[str] = None, constant : bool = False,
readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -539,7 +523,7 @@ def __init__(self, default : typing.Optional[bool] = False, *,
precedence : typing.Optional[float] = None, metadata : typing.Optional[typing.Dict] = None, **kwargs
) -> None:
super().__init__(default=default, doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -561,9 +545,6 @@ def __init__(self, default : typing.Any, *, bounds : typing.Optional[typing.Tupl
length : typing.Optional[int] = None, item_type : typing.Optional[typing.Tuple] = None,
doc : typing.Optional[str] = None, constant : bool = False, deepcopy_default : bool = False,
readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -573,7 +554,7 @@ def __init__(self, default : typing.Any, *, bounds : typing.Optional[typing.Tupl
precedence : typing.Optional[float] = None, metadata : typing.Optional[typing.Dict] = None, **kwargs
) -> None:
super().__init__(default=default, doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -618,10 +599,7 @@ def __init__(self, default : typing.Any = None, *, bounds : typing.Optional[typi
length: typing.Optional[int] = None, item_type : typing.Optional[typing.Tuple] = None,
accept_list : bool = False, deepcopy_default : bool = False,
doc : typing.Optional[str] = None, constant : bool = False,
- readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
+ readonly : bool = False, allow_None : bool = True, label : typing.Optional[str] = None,
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -632,7 +610,7 @@ def __init__(self, default : typing.Any = None, *, bounds : typing.Optional[typi
) -> None:
super().__init__(default=default, bounds=bounds, length=length, item_type=item_type,
doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -678,10 +656,7 @@ def __init__(self, default: typing.Any = None, *, bounds : typing.Optional[typin
length : typing.Optional[int] = None, item_type : typing.Optional[typing.Tuple] = None,
accept_tuple : bool = False, deepcopy_default : bool = False,
doc : typing.Optional[str] = None, constant : bool = False,
- readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
+ readonly : bool = False, allow_None : bool = True, label : typing.Optional[str] = None,
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -692,7 +667,7 @@ def __init__(self, default: typing.Any = None, *, bounds : typing.Optional[typin
) -> None:
super().__init__(default=default, bounds=bounds, length=length, item_type=item_type,
doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -744,9 +719,7 @@ class Composite(Property):
def __init__(self, attribs : typing.List[typing.Union[str, Property]], *,
doc : typing.Optional[str] = None, constant : bool = False,
- readonly : bool = False, label : typing.Optional[str] = None, URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
+ readonly : bool = False, label : typing.Optional[str] = None,
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -757,7 +730,7 @@ def __init__(self, attribs : typing.List[typing.Union[str, Property]], *,
) -> None:
kwargs.pop('allow_None')
super().__init__(None, doc=doc, constant=constant, readonly=readonly, allow_None=True,
- label=label, URL_path=URL_path, http_method=http_method, state=state,
+ label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -837,9 +810,6 @@ class Selector(SelectorBase):
def __init__(self, *, objects : typing.List[typing.Any], default : typing.Any = None, empty_default : bool = False,
doc : typing.Optional[str] = None, constant : bool = False,
readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -849,7 +819,7 @@ def __init__(self, *, objects : typing.List[typing.Any], default : typing.Any =
precedence : typing.Optional[float] = None, metadata : typing.Optional[typing.Dict] = None, **kwargs
) -> None:
super().__init__(default=default, doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -905,9 +875,6 @@ class ClassSelector(SelectorBase):
def __init__(self, *, class_ , default : typing.Any, isinstance : bool = True, deepcopy_default : bool = False,
doc : typing.Optional[str] = None, constant : bool = False,
readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -917,7 +884,7 @@ def __init__(self, *, class_ , default : typing.Any, isinstance : bool = True, d
precedence : typing.Optional[float] = None, metadata : typing.Optional[typing.Dict] = None, **kwargs
) -> None:
super().__init__(default=default, doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -985,9 +952,6 @@ class TupleSelector(Selector):
def __init__(self, *, objects : typing.List, default : typing.Any, accept_list : bool = True,
doc : typing.Optional[str] = None, constant : bool = False,
readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -998,7 +962,7 @@ def __init__(self, *, objects : typing.List, default : typing.Any, accept_list :
) -> None:
super().__init__(objects=objects, default=default, empty_default=True,
doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -1055,9 +1019,6 @@ class Path(Property):
def __init__(self, default : typing.Any = '', *, search_paths : typing.Optional[str] = None,
doc : typing.Optional[str] = None, constant : bool = False,
readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -1067,7 +1028,7 @@ def __init__(self, default : typing.Any = '', *, search_paths : typing.Optional[
precedence : typing.Optional[float] = None, metadata : typing.Optional[typing.Dict] = None, **kwargs
) -> None:
super().__init__(default=default, doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -1168,10 +1129,7 @@ class FileSelector(Selector):
def __init__(self, default : typing.Any, *, objects : typing.List, path : str = "",
doc : typing.Optional[str] = None, constant : bool = False,
- readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
+ readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -1182,7 +1140,7 @@ def __init__(self, default : typing.Any, *, objects : typing.List, path : str =
) -> None:
super().__init__(default=default, objects=objects, empty_default=True,
doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -1215,9 +1173,6 @@ class MultiFileSelector(FileSelector):
def __init__(self, default : typing.Any, *, path : str = "",
doc : typing.Optional[str] = None, constant : bool = False,
readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -1227,7 +1182,7 @@ def __init__(self, default : typing.Any, *, path : str = "",
precedence : typing.Optional[float] = None, metadata : typing.Optional[typing.Dict] = None, **kwargs
) -> None:
super().__init__(default=default, objects=None, doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -1251,9 +1206,6 @@ def __init__(self, default, *, bounds : typing.Union[typing.Tuple, None] = None,
crop_to_bounds : bool = False, inclusive_bounds : typing.Tuple = (True,True), step : typing.Any = None,
doc : typing.Optional[str] = None, constant : bool = False,
readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -1264,7 +1216,7 @@ def __init__(self, default, *, bounds : typing.Union[typing.Tuple, None] = None,
) -> None:
super().__init__(default=default, bounds=bounds, crop_to_bounds=crop_to_bounds,
inclusive_bounds=inclusive_bounds, step=step, doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -1299,10 +1251,7 @@ class CalendarDate(Number):
def __init__(self, default, *, bounds : typing.Union[typing.Tuple, None] = None,
crop_to_bounds : bool = False, inclusive_bounds : typing.Tuple = (True,True), step : typing.Any = None,
doc : typing.Optional[str] = None, constant : bool = False,
- readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
+ readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -1313,7 +1262,7 @@ def __init__(self, default, *, bounds : typing.Union[typing.Tuple, None] = None,
) -> None:
super().__init__(default=default, bounds=bounds, crop_to_bounds=crop_to_bounds,
inclusive_bounds=inclusive_bounds, step=step, doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -1386,9 +1335,6 @@ class CSS3Color(Property):
def __init__(self, default, *, allow_named : bool = True,
doc : typing.Optional[str] = None, constant : bool = False,
readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -1398,7 +1344,7 @@ def __init__(self, default, *, allow_named : bool = True,
precedence : typing.Optional[float] = None, metadata : typing.Optional[typing.Dict] = None, **kwargs
) -> None:
super().__init__(default=default, doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -1433,9 +1379,6 @@ def __init__(self, default : typing.Optional[typing.Tuple] = None, *,
item_type : typing.Optional[typing.Tuple] = None, softbounds=None, inclusive_bounds=(True,True), step=None,
doc : typing.Optional[str] = None, constant : bool = False,
readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -1449,7 +1392,7 @@ def __init__(self, default : typing.Optional[typing.Tuple] = None, *,
self.step = step
super().__init__(default=default, bounds=bounds, item_type=item_type, length=length,
doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -1587,9 +1530,7 @@ class TypedList(ClassSelector):
def __init__(self, default : typing.Optional[typing.List[typing.Any]] = None, *, item_type : typing.Any = None,
deepcopy_default : bool = True, allow_None : bool = True, bounds : tuple = (0,None),
doc : typing.Optional[str] = None, constant : bool = False,
- readonly : bool = False, label : typing.Optional[str] = None,URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
+ readonly : bool = False, label : typing.Optional[str] = None,
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -1603,7 +1544,7 @@ def __init__(self, default : typing.Optional[typing.List[typing.Any]] = None, *,
skip_validate=False)
super().__init__(class_=TypeConstrainedList, default=default, isinstance=True,
doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -1636,9 +1577,7 @@ class TypedDict(ClassSelector):
def __init__(self, default : typing.Optional[typing.Dict] = None, *, key_type : typing.Any = None,
item_type : typing.Any = None, deepcopy_default : bool = True, allow_None : bool = True,
bounds : tuple = (0, None), doc : typing.Optional[str] = None, constant : bool = False,
- readonly : bool = False, label : typing.Optional[str] = None, URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
+ readonly : bool = False, label : typing.Optional[str] = None,
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -1655,7 +1594,7 @@ def __init__(self, default : typing.Optional[typing.Dict] = None, *, key_type :
self.bounds = bounds
super().__init__(class_=TypeConstrainedDict, default=default, isinstance=True,
doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
@@ -1684,9 +1623,6 @@ def __init__(self, default : typing.Optional[typing.Dict[typing.Any, typing.Any]
type_mapping : typing.Dict, allow_unspecified_keys : bool = True, bounds : tuple = (0, None),
deepcopy_default : bool = True, allow_None : bool = True, doc : typing.Optional[str] = None,
constant : bool = False, readonly : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
observable : bool = False, class_member : bool = False,
@@ -1704,7 +1640,7 @@ def __init__(self, default : typing.Optional[typing.Dict[typing.Any, typing.Any]
self.bounds = bounds
super().__init__(class_=TypedKeyMappingsConstrainedDict, default=default, isinstance=True,
doc=doc, constant=constant, readonly=readonly,
- allow_None=allow_None, label=label, URL_path=URL_path, http_method=http_method, state=state,
+ allow_None=allow_None, label=label, state=state,
db_persist=db_persist, db_init=db_init, db_commit=db_commit, class_member=class_member,
observable=observable, remote=remote, fget=fget, fset=fset, fdel=fdel, fcomparator=fcomparator,
metadata=metadata, precedence=precedence, per_instance_descriptor=per_instance_descriptor,
diff --git a/hololinked/server/property.py b/hololinked/core/property.py
similarity index 50%
rename from hololinked/server/property.py
rename to hololinked/core/property.py
index a405d736..c4a0ec03 100644
--- a/hololinked/server/property.py
+++ b/hololinked/core/property.py
@@ -1,14 +1,12 @@
import typing
-from types import FunctionType, MethodType
from enum import Enum
-
-from ..param.parameterized import Parameter, ClassParameters, Parameterized, ParameterizedMetaclass
-from .utils import issubklass, pep8_to_URL_path
+from ..param.parameterized import Parameter, Parameterized, ParameterizedMetaclass
+from ..utils import issubklass
+from ..exceptions import StateMachineError
+from ..schema_validators import JSONSchemaValidator
from .dataklasses import RemoteResourceInfoValidator
-from .constants import USE_OBJECT_NAME, HTTP_METHODS
from .events import Event, EventDispatcher
-from .schema_validators import JsonSchemaValidator
@@ -40,13 +38,6 @@ class Property(Parameter):
if True, None is accepted as a valid value for this Property, in addition to any other values that are
allowed.
- URL_path: str, uses object name by default
- resource locator under which the attribute is accessible through HTTP. When not given, the variable name
- is used and underscores are replaced with dash
-
- http_method: tuple, default ("GET", "PUT", "DELETE")
- http methods for read, write and delete respectively
-
observable: bool, default False
set to True to receive change events. Supply a function if interested to evaluate on what conditions the change
event must be emitted. Default condition is a plain not-equal-to operator.
@@ -111,58 +102,16 @@ class Property(Parameter):
"""
- __slots__ = ['db_persist', 'db_init', 'db_commit', 'metadata', 'model', 'validator', '_remote_info',
- '_observable', '_observable_event_descriptor', 'fcomparator', '_old_value_internal_name']
-
- # RPC only init - no HTTP methods for those who dont like
- @typing.overload
- def __init__(self, default: typing.Any = None, *, doc : typing.Optional[str] = None, constant : bool = False,
- readonly : bool = False, allow_None : bool = False, observable : bool = False,
- state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
- db_persist : bool = False, db_init : bool = False, db_commit : bool = False, remote : bool = True,
- class_member : bool = False, fget : typing.Optional[typing.Callable] = None,
- fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None,
- ) -> None:
- ...
-
- @typing.overload
- def __init__(self, default: typing.Any = None, *, doc : typing.Optional[str] = None, constant : bool = False,
- readonly : bool = False, allow_None : bool = False, URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str]] = (HTTP_METHODS.GET, HTTP_METHODS.PUT),
- observable : bool = False, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
- db_persist : bool = False, db_init : bool = False, db_commit : bool = False, remote : bool = True,
- class_member : bool = False, fget : typing.Optional[typing.Callable] = None,
- fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None,
- metadata : typing.Optional[typing.Dict] = None
- ) -> None:
- ...
-
- @typing.overload
- def __init__(self, default: typing.Any = None, *, doc : typing.Optional[str] = None, constant : bool = False,
- readonly : bool = False, allow_None : bool = False,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
- observable : bool = False, change_comparator : typing.Optional[typing.Union[FunctionType, MethodType]] = None,
- state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
- db_persist : bool = False, db_init : bool = False, db_commit : bool = False, remote : bool = True,
- class_member : bool = False, fget : typing.Optional[typing.Callable] = None,
- fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None,
- fcomparator : typing.Optional[typing.Callable] = None,
- deepcopy_default : bool = False, per_instance_descriptor : bool = False,
- precedence : typing.Optional[float] = None, metadata : typing.Optional[typing.Dict] = None
- ) -> None:
- ...
+ __slots__ = ['db_persist', 'db_init', 'db_commit', 'model', 'metadata', '_execution_info_validator', 'execution_info',
+ '_observable_event_descriptor', 'fcomparator', '_old_value_internal_name', 'validator']
+
def __init__(self, default: typing.Any = None, *,
doc : typing.Optional[str] = None, constant : bool = False,
readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None,
- URL_path : str = USE_OBJECT_NAME,
- http_method : typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]] =
- (HTTP_METHODS.GET, HTTP_METHODS.PUT, HTTP_METHODS.DELETE),
state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None,
db_persist : bool = False, db_init : bool = False, db_commit : bool = False,
- observable : bool = False, class_member : bool = False, model = None,
+ observable : bool = False, model : typing.Optional["BaseModel"] = None, class_member : bool = False,
fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None,
fdel : typing.Optional[typing.Callable] = None, fcomparator : typing.Optional[typing.Callable] = None,
deepcopy_default : bool = False, per_instance_descriptor : bool = False, remote : bool = True,
@@ -172,26 +121,28 @@ def __init__(self, default: typing.Any = None, *,
label=label, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default,
class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence)
self.db_persist = db_persist
- self.db_init = db_init
- self.db_commit = db_commit
+ self.db_init = db_init
+ self.db_commit = db_commit
self.fcomparator = fcomparator
self.metadata = metadata
- self._observable = observable
- self._observable_event_descriptor : Event = None
- self._remote_info = None
+ self._observable_event_descriptor = None
+ if observable:
+ self._observable_event_descriptor = Event()
+ self._execution_info_validator = None
+ self.execution_info = None # typing.Optional[RemoteResource]
if remote:
- self._remote_info = RemoteResourceInfoValidator(
- http_method=http_method,
- URL_path=URL_path,
+ self._execution_info_validator = RemoteResourceInfoValidator(
state=state,
- isproperty=True
+ isproperty=True,
+ obj=self
)
+ self.execution_info = self._execution_info_validator # TODO: use dataclass or remove this attribute
self.model = None
self.validator = None
if model:
if isinstance(model, dict):
self.model = model
- self.validator = JsonSchemaValidator(model)
+ self.validator = JSONSchemaValidator(model).validate
else:
self.model = wrap_plain_types_in_rootmodel(model) # type: BaseModel
self.validator = self.model.model_validate
@@ -199,42 +150,37 @@ def __init__(self, default: typing.Any = None, *,
def __set_name__(self, owner: typing.Any, attrib_name: str) -> None:
super().__set_name__(owner, attrib_name)
- self._old_value_internal_name = f'{self._internal_name}_old_value'
- if self._remote_info is not None:
- if self._remote_info.URL_path == USE_OBJECT_NAME:
- self._remote_info.URL_path = f'/{pep8_to_URL_path(self.name)}'
- elif not self._remote_info.URL_path.startswith('/'):
- raise ValueError(f"URL_path should start with '/', please add '/' before '{self._remote_info.URL_path}'")
- self._remote_info.obj_name = self.name
- if self._observable:
+ if self._execution_info_validator:
+ self._execution_info_validator.obj_name = attrib_name
+ if self._observable_event_descriptor:
_observable_event_name = f'{self.name}_change_event'
- # This is a descriptor object, so we need to set it on the owner class
- self._observable_event_descriptor = Event(
- friendly_name=_observable_event_name,
- URL_path=f'{self._remote_info.URL_path}/change-event',
- doc=f"change event for {self.name}"
- ) # type: Event
+ self._old_value_internal_name = f'{self._internal_name}_old_value'
+ self._observable_event_descriptor.doc=f"change event for {self.name}"
+ self._observable_event_descriptor._observable = True
self._observable_event_descriptor.__set_name__(owner, _observable_event_name)
+ # This is a descriptor object, so we need to set it on the owner class
setattr(owner, _observable_event_name, self._observable_event_descriptor)
def __get__(self, obj: Parameterized, objtype: ParameterizedMetaclass) -> typing.Any:
read_value = super().__get__(obj, objtype)
- self._push_change_event_if_needed(obj, read_value)
+ self.push_change_event(obj, read_value)
return read_value
-
+
- def _push_change_event_if_needed(self, obj, value : typing.Any) -> None:
+ def push_change_event(self, obj, value : typing.Any) -> None:
"""
Pushes change event both on read and write if an event publisher object is available
on the owning Thing.
"""
- if self._observable and obj.event_publisher:
- event_dispatcher = getattr(obj, self._observable_event_descriptor._obj_name, None) # type: EventDispatcher
+ if obj is None:
+ return
+ if self._observable_event_descriptor and obj.event_publisher:
+ event_dispatcher = getattr(obj, self._observable_event_descriptor.name, None) # type: EventDispatcher
old_value = obj.__dict__.get(self._old_value_internal_name, NotImplemented)
obj.__dict__[self._old_value_internal_name] = value
if self.fcomparator:
- if issubklass(self.fcomparator):
+ if issubklass(self.fcomparator, classmethod):
if not self.fcomparator(self.owner, old_value, value):
return
elif not self.fcomparator(obj, old_value, value):
@@ -247,23 +193,37 @@ def _push_change_event_if_needed(self, obj, value : typing.Any) -> None:
def validate_and_adapt(self, value) -> typing.Any:
if value is None:
if self.allow_None:
- return value
+ return
else:
raise ValueError(f"Property {self.name} does not allow None values")
if self.model:
if isinstance(self.model, dict):
- self.validator.validate(value)
- elif issubklass(self.model, BaseModel):
self.validator(value)
+ elif issubklass(self.model, BaseModel):
+ value = self.model(**value)
+ elif issubklass(self.model, RootModel):
+ value = self.model(value)
return super().validate_and_adapt(value)
+ def external_set(self, obj: Parameterized, value : typing.Any) -> None:
+ """
+ Set the value of the property from an external source, e.g. a remote client.
+ """
+ if self.execution_info.state is None or (hasattr(obj, 'state_machine') and
+ obj.state_machine.current_state in self.execution_info.state):
+ return self.__set__(obj, value)
+ else:
+ raise StateMachineError("Thing {} is in `{}` state, however attribute can be written only in `{}` state".format(
+ obj.id, obj.state_machine.current_state, self.execution_info.state))
+
+
def _post_value_set(self, obj, value : typing.Any) -> None:
if (self.db_persist or self.db_commit) and hasattr(obj, 'db_engine'):
from .thing import Thing
assert isinstance(obj, Thing), f"database property {self.name} bound to a non Thing, currently not supported"
obj.db_engine.set_property(self, value)
- self._push_change_event_if_needed(obj, value)
+ self.push_change_event(obj, value)
return super()._post_value_set(obj, value)
@@ -275,83 +235,25 @@ def comparator(self, func : typing.Callable) -> typing.Callable:
self.fcomparator = func
return func
-
+ @property
+ def is_remote(self):
+ """Returns False if the property is not remotely accessible, i.e. it is not a RemoteResource."""
+ return self._execution_info_validator is not None
-__property_info__ = [
- 'allow_None' , 'class_member', 'db_init', 'db_persist',
- 'db_commit', 'deepcopy_default', 'per_instance_descriptor',
- 'state', 'precedence', 'constant', 'default'
- # 'scada_info', 'property_type' # descriptor related info is also necessary
- ]
-
-
-class ClassProperties(ClassParameters):
- """
- Object that holds the namespace and implementation of Parameterized methods as well as any state that is not
- in __slots__ or the Properties themselves.
- Exists at metaclass level (instantiated by the metaclass). Contains state specific to the class.
- """
-
- @property
- def db_persisting_objects(self):
- try:
- return getattr(self.owner_cls, f'_{self.owner_cls.__name__}_db_persisting_remote_params')
- except AttributeError:
- paramdict = self.remote_objects
- db_persisting_remote_params = {}
- for name, desc in paramdict.items():
- if desc.db_persist:
- db_persisting_remote_params[name] = desc
- setattr(self.owner_cls, f'_{self.owner_cls.__name__}_db_persisting_remote_params', db_persisting_remote_params)
- return getattr(self.owner_cls, f'_{self.owner_cls.__name__}_db_persisting_remote_params')
-
@property
- def db_init_objects(self) -> typing.Dict[str, Property]:
- try:
- return getattr(self.owner_cls, f'_{self.owner_cls.__name__}_db_init_remote_params')
- except AttributeError:
- paramdict = self.remote_objects
- init_load_params = {}
- for name, desc in paramdict.items():
- if desc.db_init or desc.db_persist:
- init_load_params[name] = desc
- setattr(self.owner_cls, f'_{self.owner_cls.__name__}_db_init_remote_params', init_load_params)
- return getattr(self.owner_cls, f'_{self.owner_cls.__name__}_db_init_remote_params')
+ def observable(self) -> bool:
+ """Returns True if the property is observable, i.e. it has an event descriptor."""
+ return self._observable_event_descriptor is not None
+
+ def to_affordance(self, owner_inst = None):
+ from ..td import PropertyAffordance
+ return PropertyAffordance.generate(self, owner_inst or self.owner)
- @property
- def remote_objects(self) -> typing.Dict[str, Property]:
- try:
- return getattr(self.owner_cls, f'_{self.owner_cls.__name__}_remote_params')
- except AttributeError:
- paramdict = super().descriptors
- remote_params = {}
- for name, desc in paramdict.items():
- if isinstance(desc, Property):
- remote_params[name] = desc
- setattr(self.owner_cls, f'_{self.owner_cls.__name__}_remote_params', remote_params)
- return getattr(self.owner_cls, f'_{self.owner_cls.__name__}_remote_params')
-
- def webgui_info(self, for_remote_params : typing.Union[Property, typing.Dict[str, Property], None] = None):
- info = {}
- if isinstance(for_remote_params, dict):
- objects = for_remote_params
- elif isinstance(for_remote_params, Property):
- objects = { for_remote_params.name : for_remote_params }
- else:
- objects = self.remote_objects
- for param in objects.values():
- state = param.__getstate__()
- info[param.name] = dict(
- python_type = param.__class__.__name__,
- )
- for field in __property_info__:
- info[param.name][field] = state.get(field, None)
- return info
-
+
try:
from pydantic import BaseModel, RootModel, create_model
- def wrap_plain_types_in_rootmodel(model : type) -> type["BaseModel"]:
+ def wrap_plain_types_in_rootmodel(model: type) -> type[BaseModel] | type[RootModel]:
"""
Ensure a type is a subclass of BaseModel.
@@ -360,13 +262,13 @@ def wrap_plain_types_in_rootmodel(model : type) -> type["BaseModel"]:
In the future, we may explicitly check that the argument is a type
and not a model instance.
"""
- try: # This needs to be a `try` as basic types are not classes
- assert issubclass(model, BaseModel)
+ if issubklass(model, BaseModel):
return model
- except (TypeError, AssertionError):
- return create_model(f"{model!r}", root=(model, ...), __base__=RootModel)
- except NameError:
- raise ImportError("pydantic is not installed, please install it to use this feature") from None
+ return create_model(
+ f"{model!r}",
+ root=(model, ...),
+ __base__=RootModel
+ )
except ImportError:
def wrap_plain_types_in_rootmodel(model : type) -> type:
raise ImportError("pydantic is not installed, please install it to use this feature") from None
diff --git a/hololinked/server/state_machine.py b/hololinked/core/state_machine.py
similarity index 50%
rename from hololinked/server/state_machine.py
rename to hololinked/core/state_machine.py
index d0ba1a7c..5f9cb660 100644
--- a/hololinked/server/state_machine.py
+++ b/hololinked/core/state_machine.py
@@ -1,23 +1,24 @@
import typing
-import inspect
from types import FunctionType, MethodType
from enum import EnumMeta, Enum, StrEnum
-from ..param.parameterized import Parameterized, edit_constant
-from .utils import getattr_without_descriptor_read
-from .exceptions import StateMachineError
-from .dataklasses import RemoteResourceInfoValidator
+from ..param import edit_constant
+from ..exceptions import StateMachineError
from .property import Property
from .properties import ClassSelector, TypedDict, Boolean
-
+from .thing import Thing
+from .meta import ThingMeta
+from .actions import Action
class StateMachine:
"""
- A container class for state machine related information. Each ``Thing`` class can only have one state machine
- instantiated in a reserved class-level attribute ``state_machine``. The ``state`` attribute defined at the ``Thing``
- can be subscribed for state change events from this state machine.
+ A finite state machine to constrain property and action execution. Each `Thing` class can only have one state machine
+ instantiated in a reserved class-level attribute named `state_machine`. Other instantiations are not respected.
+ The `state` attribute defined as a `Thing`'s property reflects the current state of the state machine and
+ can be subscribed for state change events. When `state_machine` is accessed by a `Thing` instance,
+ a `BoundFSM` object is returned.
"""
initial_state = ClassSelector(default=None, allow_None=True, constant=True, class_=(Enum, str),
doc="initial state of the machine") # type: typing.Union[Enum, str]
@@ -25,26 +26,27 @@ class StateMachine:
doc="list/enum of allowed states") # type: typing.Union[EnumMeta, tuple, list]
on_enter = TypedDict(default=None, allow_None=True, key_type=str,
doc="""callbacks to execute when a certain state is entered;
- specfied as map with state as keys and callbacks as list""") # typing.Dict[str, typing.List[typing.Callable]]
+ specfied as map with state as keys and callbacks as list""") # type: typing.Dict[str, typing.List[typing.Callable]]
on_exit = TypedDict(default=None, allow_None=True, key_type=str,
doc="""callbacks to execute when certain state is exited;
- specfied as map with state as keys and callbacks as list""") # typing.Dict[str, typing.List[typing.Callable]]
+ specfied as map with state as keys and callbacks as list""") # type: typing.Dict[str, typing.List[typing.Callable]]
machine = TypedDict(default=None, allow_None=True, item_type=(list, tuple), key_type=str, # i.e. its like JSON
- doc="the machine specification with state as key and objects as list") # typing.Dict[str, typing.List[typing.Callable, Property]]
+ doc="the machine specification with state as key and objects as list") # type: typing.Dict[str, typing.List[typing.Callable, Property]]
valid = Boolean(default=False, readonly=True, fget=lambda self: self._valid,
doc="internally computed, True if states, initial_states and the machine is valid")
def __init__(self,
- states : typing.Union[EnumMeta, typing.List[str], typing.Tuple[str]], *,
- initial_state : typing.Union[StrEnum, str], push_state_change_event : bool = True,
- on_enter : typing.Dict[str, typing.Union[typing.List[typing.Callable], typing.Callable]] = {},
- on_exit : typing.Dict[str, typing.Union[typing.List[typing.Callable], typing.Callable]] = {},
- **machine : typing.Dict[str, typing.Union[typing.Callable, Property]]
+ states: EnumMeta | typing.List[str] | typing.Tuple[str], *,
+ initial_state: StrEnum | str,
+ push_state_change_event : bool = True,
+ on_enter: typing.Dict[str, typing.List[typing.Callable] | typing.Callable] = None,
+ on_exit: typing.Dict[str, typing.List[typing.Callable] | typing.Callable] = None,
+ **machine: typing.Dict[str, typing.Callable | Property]
) -> None:
"""
Parameters
----------
- states: Enum
+ states: EnumMeta | List[str] | Tuple[str]
enumeration of states
initial_state: str
initial state of machine
@@ -61,7 +63,8 @@ def __init__(self,
directly pass the state name as an argument along with the methods/properties which are allowed to execute
in that state
"""
- self._valid = False
+ self._valid = False#
+ self.name = None
self.on_enter = on_enter
self.on_exit = on_exit
# None cannot be passed in, but constant is necessary.
@@ -69,50 +72,57 @@ def __init__(self,
self.initial_state = initial_state
self.machine = machine
self.push_state_change_event = push_state_change_event
- # if :
- # self.state_change_event = Event('state-change')
+
+ def __set_name__(self, owner: ThingMeta, name: str) -> None:
+ self.name = name
+ self.owner = owner
+
+ def validate(self, owner: Thing) -> None:
+ # cannot merge this with __set_name__ because descriptor objects are not ready at that time.
+ # reason - metaclass __init__ is called after __set_name__ of descriptors, therefore the new "proper" desriptor
+ # registries are available only after that. Until then only the inherited descriptor registries are available,
+ # which do not correctly account the subclass's objects.
- def _prepare(self, owner : Parameterized) -> None:
if self.states is None and self.initial_state is None:
self._valid = False
- self._state = None
return
elif self.initial_state not in self.states:
raise AttributeError(f"specified initial state {self.initial_state} not in Enum of states {self.states}.")
- self._state = self._get_machine_compliant_state(self.initial_state)
- self.owner = owner
- owner_properties = owner.parameters.descriptors.values() # same as owner.properties.descriptors.values()
- owner_methods = [obj[0] for obj in inspect._getmembers(owner, inspect.ismethod, getattr_without_descriptor_read)]
+ # owner._state_machine_state = self._get_machine_compliant_state(self.initial_state)
+ owner_properties = owner.properties.get_descriptors(recreate=True).values()
+ owner_methods = owner.actions.get_descriptors(recreate=True).values()
if isinstance(self.states, list):
- self.__class__.states.constant = False
- self.states = tuple(self.states) # freeze the list of states
- self.__class__.states.constant = True
+ with edit_constant(self.__class__.states): # type: ignore
+ self.states = tuple(self.states) # freeze the list of states
# first validate machine
for state, objects in self.machine.items():
if state in self:
for resource in objects:
- if hasattr(resource, '_remote_info'):
- assert isinstance(resource._remote_info, RemoteResourceInfoValidator) # type definition
- if resource._remote_info.isaction and resource._remote_info.obj_name not in owner_methods:
+ if isinstance(resource, Action):
+ if resource not in owner_methods:
raise AttributeError("Given object {} for state machine does not belong to class {}".format(
resource, owner))
- if resource._remote_info.isproperty and resource not in owner_properties:
- raise AttributeError("Given object {} - {} for state machine does not belong to class {}".format(
- resource.name, resource, owner))
- if resource._remote_info.state is None:
- resource._remote_info.state = self._get_machine_compliant_state(state)
- else:
- resource._remote_info.state = resource._remote_info.state + (self._get_machine_compliant_state(state), )
+ elif isinstance(resource, Property):
+ if resource not in owner_properties:
+ raise AttributeError("Given object {} for state machine does not belong to class {}".format(
+ resource, owner))
+ continue # for now
else:
raise AttributeError(f"Object {resource} was not made remotely accessible," +
" use state machine with properties and actions only.")
+ if resource.execution_info.state is None:
+ resource.execution_info.state = self._get_machine_compliant_state(state)
+ else:
+ resource.execution_info.state = resource._execution_info.state + (self._get_machine_compliant_state(state), )
else:
- raise StateMachineError("Given state {} not in states Enum {}".format(state, self.states.__members__))
+ raise StateMachineError("Given state {} not in allowed states ({})".format(state, self.states.__members__))
# then the callbacks
+ if self.on_enter is None:
+ self.on_enter = {}
for state, objects in self.on_enter.items():
if isinstance(objects, list):
self.on_enter[state] = tuple(objects)
@@ -122,6 +132,8 @@ def _prepare(self, owner : Parameterized) -> None:
if not isinstance(obj, (FunctionType, MethodType)):
raise TypeError(f"on_enter accept only methods. Given type {type(obj)}.")
+ if self.on_exit is None:
+ self.on_exit = {}
for state, objects in self.on_exit.items():
if isinstance(objects, list):
self.on_exit[state] = tuple(objects) # type: ignore
@@ -131,14 +143,22 @@ def _prepare(self, owner : Parameterized) -> None:
if not isinstance(obj, (FunctionType, MethodType)):
raise TypeError(f"on_enter accept only methods. Given type {type(obj)}.")
self._valid = True
-
- def __contains__(self, state : typing.Union[str, StrEnum]):
+
+ def __get__(self, instance, owner) -> "BoundFSM":
+ if instance is None:
+ return self
+ return BoundFSM(instance, self)
+
+ def __set__(self, instance, value) -> None:
+ raise AttributeError("Cannot set state machine directly. It is a class level attribute and can be defined only once.")
+
+ def __contains__(self, state: typing.Union[str, StrEnum]):
if isinstance(self.states, EnumMeta) and state in self.states.__members__:
return True
elif isinstance(self.states, tuple) and state in self.states:
return True
return False
-
+
def _get_machine_compliant_state(self, state) -> typing.Union[StrEnum, str]:
"""
In case of not using StrEnum or iterable of str,
@@ -148,9 +168,35 @@ def _get_machine_compliant_state(self, state) -> typing.Union[StrEnum, str]:
return state
if isinstance(state, Enum):
return state.name
- raise TypeError(f"cannot comply state to a string : {state} which is of type {type(state)}.")
+ raise TypeError(f"cannot comply state to a string: {state} which is of type {type(state)}. owner - {self.owner}.")
+
+
+ def contains_object(self, object: typing.Union[Property, typing.Callable]) -> bool:
+ """
+ returns True if specified object is found in any of the state machine states.
+ Supply unbound method for checking methods, as state machine is specified at class level
+ when the methods are unbound.
+ """
+ for objects in self.machine.values():
+ if object in objects:
+ return True
+ return False
+
+class BoundFSM:
+ """
+ A FSM bound to a `Thing` instance, returned when accessed as a instance attribute (`self.state_machine`).
+ There is no need to instantiate this class directly.
+ """
+
+ def __init__(self, owner: Thing, state_machine: StateMachine) -> None:
+ self.descriptor = state_machine
+ self.push_state_change_event = state_machine.push_state_change_event
+ self.owner = owner
+ # self.owner._state_machine_state = state_machine.initial_state
+ # self.state_machine._prepare(owner)
+
def get_state(self) -> typing.Union[str, StrEnum, None]:
"""
return the current state. one can also access the property `current state`.
@@ -159,10 +205,16 @@ def get_state(self) -> typing.Union[str, StrEnum, None]:
-------
current state: str
"""
- return self._state
+ try:
+ return self.owner._state_machine_state
+ except AttributeError:
+ return self.initial_state
- def set_state(self, value : typing.Union[str, StrEnum, Enum], push_event : bool = True,
- skip_callbacks : bool = False) -> None:
+ def set_state(self,
+ value : typing.Union[str, StrEnum, Enum],
+ push_event : bool = True,
+ skip_callbacks : bool = False
+ ) -> None:
"""
set state of state machine. Also triggers state change callbacks if skip_callbacks=False and pushes a state
change event when push_event=True. One can also set state using '=' operator of `current_state` property in which case
@@ -176,8 +228,9 @@ def set_state(self, value : typing.Union[str, StrEnum, Enum], push_event : bool
"""
if value in self.states:
- previous_state = self._state
- self._state = self._get_machine_compliant_state(value)
+ previous_state = self.current_state
+ next_state = self.descriptor._get_machine_compliant_state(value)
+ self.owner._state_machine_state = next_state
if push_event and self.push_state_change_event and hasattr(self.owner, 'event_publisher'):
self.owner.state # just acces to trigger the observable event
if skip_callbacks:
@@ -185,8 +238,8 @@ def set_state(self, value : typing.Union[str, StrEnum, Enum], push_event : bool
if previous_state in self.on_exit:
for func in self.on_exit[previous_state]:
func(self.owner)
- if self._state in self.on_enter:
- for func in self.on_enter[self._state]:
+ if next_state in self.on_enter:
+ for func in self.on_enter[next_state]:
func(self.owner)
else:
raise ValueError("given state '{}' not in set of allowed states : {}.".format(value, self.states))
@@ -194,15 +247,66 @@ def set_state(self, value : typing.Union[str, StrEnum, Enum], push_event : bool
current_state = property(get_state, set_state, None,
doc = """read and write current state of the state machine""")
- def has_object(self, object : typing.Union[Property, typing.Callable]) -> bool:
+ def contains_object(self, object: typing.Union[Property, typing.Callable]) -> bool:
"""
returns True if specified object is found in any of the state machine states.
Supply unbound method for checking methods, as state machine is specified at class level
when the methods are unbound.
"""
- for objects in self.machine.values():
- if object in objects:
- return True
- return False
+ return self.descriptor.contains_object(object)
+
+ def __hash__(self):
+ return hash(self.owner.id + (str(state) for state in self.states) + str(self.initial_state) + self.owner.__class__.__name__)
+
+ def __str__(self):
+ return f"StateMachine(owner={self.owner.__class__.__name__} id={self.owner.id} initial_state={self.initial_state}, states={self.states})"
+
+ def __eq__(self, other) -> bool:
+ if not isinstance(other, StateMachine):
+ return False
+ return (
+ self.states == other.states and
+ self.initial_state == other.initial_state and
+ self.owner.__class__ == other.owner.__class__ and
+ self.owner.id == other.owner.id
+ )
+
+ def __contains__(self, state: typing.Union[str, StrEnum]) -> bool:
+ return state in self.descriptor
+ @property
+ def initial_state(self):
+ """initial state of the machine"""
+ return self.descriptor.initial_state
+
+ @property
+ def states(self):
+ """list of allowed states"""
+ return self.descriptor.states
+
+ @property
+ def on_enter(self):
+ """callbacks to execute when a certain state is entered"""
+ return self.descriptor.on_enter
+
+ @property
+ def on_exit(self):
+ """callbacks to execute when certain state is exited"""
+ return self.descriptor.on_exit
+ @property
+ def machine(self):
+ """the machine specification with state as key and objects as list"""
+ return self.descriptor.machine
+
+
+
+def prepare_object_FSM(instance: Thing) -> None:
+ """
+ prepare state machine attached to thing class
+ """
+ assert isinstance(instance, Thing), "state machine can only be attached to a Thing class."
+ cls = instance.__class__
+ if cls.state_machine and isinstance(cls.state_machine, StateMachine):
+ cls.state_machine.validate(instance)
+ instance.logger.debug("setup state machine")
\ No newline at end of file
diff --git a/hololinked/core/thing.py b/hololinked/core/thing.py
new file mode 100644
index 00000000..f559ca44
--- /dev/null
+++ b/hololinked/core/thing.py
@@ -0,0 +1,405 @@
+import logging
+import inspect
+import threading
+import ssl
+import time
+import typing
+
+from ..constants import JSON, ZMQ_TRANSPORTS
+from ..utils import *
+from ..exceptions import *
+from ..serializers import Serializers, BaseSerializer, JSONSerializer
+from ..server.server import BaseProtocolServer
+from .dataklasses import build_our_temp_TD
+from .properties import String, ClassSelector
+from .property import Property
+from .actions import BoundAction, action
+from .events import EventDispatcher
+from .meta import ThingMeta, Propertized, RemoteInvokable, EventSource
+
+
+
+class Thing(Propertized, RemoteInvokable, EventSource, metaclass=ThingMeta):
+ """
+ Subclass from here to expose hardware or python objects on the network. Remotely accessible members of a `Thing` are
+ segragated into properties, actions & events. Utilize properties for data that can be read and written,
+ actions to instruct the object to perform tasks and events to get notified of any relevant information. State Machines
+ can be used to contrain operations on properties and actions.
+
+ [UML Diagram](https://docs.hololinked.dev/UML/PDF/Thing.pdf)
+ """
+
+ # local properties
+ id = String(default=None, regex=r'[A-Za-z]+[A-Za-z_0-9\-\/]*', constant=True, remote=False,
+ doc="""String identifier of the instance. For an interconnected system of hardware,
+ IDs are recommended to be unique. This value is used for many operations,
+ for example - creating zmq socket address, tables in databases, and to identify the instance
+ in the HTTP Server - (http(s)://{domain and sub domain}/{id}).""") # type: str
+
+ logger = ClassSelector(class_=logging.Logger, default=None, allow_None=True, remote=False,
+ doc="""logging.Logger instance to track log messages. Default logger with a IO-stream handler
+ and network accessible handler is created if none supplied.""") # type: logging.Logger
+
+ state_machine = None # type: typing.Optional["StateMachine"]
+
+ # remote properties
+ state = String(default=None, allow_None=True, readonly=True, observable=True,
+ fget=lambda self: self.state_machine.current_state if self.state_machine else None,
+ doc="""current state machine's state if state machine present, `None` indicates absence of state machine.
+ State machine returned state is always a string even if specified as an Enum in the state machine.""") #type: typing.Optional[str]
+
+ # object_info = Property(doc="contains information about this object like the class name, script location etc.") # type: ThingInformation
+
+
+ def __new__(cls, *args, **kwargs):
+ obj = super().__new__(cls)
+ # defines some internal fixed attributes. attributes created by us that require no validation but
+ # cannot be modified are called _internal_fixed_attributes
+ obj._internal_fixed_attributes = ['_internal_fixed_attributes', '_owners']
+ return obj
+
+
+ def __init__(self, *,
+ id: str,
+ logger: typing.Optional[logging.Logger] = None,
+ serializer: typing.Optional[BaseSerializer | JSONSerializer] = None,
+ **kwargs: typing.Dict[str, typing.Any]
+ ) -> None:
+ """
+ Parameters
+ ----------
+ id: str
+ String identifier of the instance. For an interconnected system of hardware,
+ IDs are recommended to be unique. This value is used for many operations, for example -
+ creating zmq socket address, tables in databases, and to identify the instance in a
+ HTTP Server - (http(s)://{domain and sub domain}/{id}).
+ logger: logging.Logger, optional
+ logging.Logger instance to track log messages. Default logger with a IO-stream handler
+ and network accessible handler is created if None supplied.
+ serializer: BaseSerializer | JSONSerializer, optional
+ Serializer to be used for serializing and deserializing data - preferred is a JSON Serializer.
+ If not supplied, a `msgspec` based JSON Serializer is used.
+ **kwargs: typing.Dict[str, Any]
+ - remote_accessible_logger: `bool`, Default False.
+ if False, network accessible handler is not attached to the logger. `remote_accessible_logger` can also be set as a
+ class attribute.
+ - use_default_db: `bool`, Default False.
+ if True, default SQLite database is created where properties can be stored and loaded. There is no need to supply
+ any database credentials. `use_default_db` value can also be set as a class attribute.
+ - db_config_file: `str`, optional.
+ if not using a default database, supply a JSON configuration file to create a database connection. Check documentaion
+ of `hololinked.core.database`.
+ - use_json_file: bool, Default False
+ if True, a JSON file will be used as the property storage instead of a database. This value can also be
+ set as a class attribute.
+ - json_filename: str, optional
+ If using JSON storage, this filename is used to persist property values. If not provided, a default filename
+ is generated based on the instance name.
+ """
+ Propertized.__init__(self, id=id, logger=logger, **kwargs)
+ RemoteInvokable.__init__(self)
+ EventSource.__init__(self)
+ if self.id.startswith('/'):
+ self.id = self.id[1:]
+ self.logger.info("removed leading '/' from id")
+ if serializer is not None:
+ Serializers.register_for_thing_instance(self.id, serializer)
+
+ from .logger import prepare_object_logger
+ from .state_machine import prepare_object_FSM
+ from ..storage import prepare_object_storage
+
+ prepare_object_logger(
+ instance=self,
+ log_level=kwargs.get('log_level', None),
+ log_file=kwargs.get('log_file', None),
+ remote_access=kwargs.get(
+ 'remote_accessible_logger',
+ self.__class__.remote_accessible_logger if hasattr(
+ self.__class__, 'remote_accessible_logger') else False
+ )
+ )
+ prepare_object_FSM(self)
+ prepare_object_storage(self, **kwargs) # use_default_db, db_config_file, use_json_file, json_filename
+
+ self._qualified_id = self.id # filler for now - TODO
+ # thing._qualified_id = f'{self._qualified_id}/{thing.id}'
+
+
+ def __post_init__(self):
+ from .zmq.rpc_server import RPCServer
+ from ..server.zmq import ZMQServer
+ from .logger import RemoteAccessHandler
+ from ..storage.database import prepare_object_database, ThingDB
+ # Type definitions
+ self.rpc_server = None # type: typing.Optional[RPCServer | ZMQServer]
+ self.db_engine: typing.Optional[ThingDB]
+ self._owners = None if not hasattr(self, '_owners') else self._owners # type: typing.Optional[typing.List[Thing]]
+ self._remote_access_loghandler: typing.Optional[RemoteAccessHandler]
+ self._internal_fixed_attributes: typing.List[str]
+ self._qualified_id: str
+ self._state_machine_state: str
+ # database operations
+ self.properties.load_from_DB()
+ # object is ready
+ self.logger.info(f"initialialised Thing class {self.__class__.__name__} with id {self.id}")
+
+
+ def __setattr__(self, __name: str, __value: typing.Any) -> None:
+ if __name == '_internal_fixed_attributes' or __name in self._internal_fixed_attributes:
+ # order of 'or' operation for above 'if' matters
+ if not hasattr(self, __name) or getattr(self, __name, None) is None:
+ # allow setting of fixed attributes once
+ super().__setattr__(__name, __value)
+ else:
+ raise AttributeError(f"Attempted to set {__name} more than once. " +
+ "Cannot assign a value to this variable after creation.")
+ else:
+ super().__setattr__(__name, __value)
+
+
+ @property
+ def sub_things(self) -> typing.Dict[str, "Thing"]:
+ """other `Thing`'s that are composed within this `Thing`."""
+ things = dict()
+ for name, subthing in inspect._getmembers(
+ self,
+ lambda obj: isinstance(obj, Thing),
+ getattr_without_descriptor_read
+ ):
+ if not hasattr(subthing, '_owners') or subthing._owners is None:
+ subthing._owners = []
+ if self not in subthing._owners:
+ subthing._owners.append(self)
+ things[name] = subthing
+ return things
+
+
+ @action()
+ def get_thing_model(self, ignore_errors: bool = False):
+ """
+ generate the [Thing Model](https://www.w3.org/TR/wot-thing-description11/#introduction-tm) of the object.
+ The model is a JSON that describes the object's properties, actions, events and their metadata, without the
+ protocol information. The model can be used by a client to understand the object's capabilities.
+
+ Parameters
+ ----------
+ ignore_errors: bool, optional, Default False
+ if True, offending interaction affordances will be removed from the JSON.
+ This is useful to build partial but always working ThingModel. `
+
+ Returns
+ -------
+ hololinked.td.ThingModel
+ represented as an object in python, gets automatically serialized to JSON when pushed out of the socket.
+ """
+ # allow_loose_schema: bool, optional, Default False
+ # Experimental properties, actions or events for which schema was not given will be supplied with a suitable
+ # value for node-wot to ignore validation or claim the accessed value for complaint with the schema.
+ # In other words, schema validation will always pass.
+ from ..td.tm import ThingModel
+ return ThingModel(
+ instance=self,
+ ignore_errors=ignore_errors
+ ).produce()
+
+ thing_model = property(get_thing_model, doc=get_thing_model.__doc__)
+
+
+ @action()
+ def get_our_thing_model(self, ignore_errors: bool = False) -> JSON:
+ """
+ Certain customizations to the Thing Model to facilitate features that are not part of the standard yet.
+
+ Parameters
+ ----------
+ ignore_errors: bool, optional, Default False
+ if True, offending interaction affordances will be removed from the JSON.
+ This is useful to build partial but always working ThingModel.
+ """
+ return build_our_temp_TD(self, ignore_errors=ignore_errors)
+
+
+ @forkable
+ def run_with_zmq_server(self,
+ transports: typing.Sequence[ZMQ_TRANSPORTS] | ZMQ_TRANSPORTS = ZMQ_TRANSPORTS.IPC,
+ forked: bool = False, # used by decorator
+ # expose_eventloop : bool = False,
+ **kwargs: typing.Dict[str, typing.Any]
+ ) -> None:
+ """
+ Quick-start to serve `Thing` over ZMQ. This method is fully blocking. This
+ method is blocking until `exit()` is called.
+
+ Parameters
+ ----------
+ transports: Sequence[ZMQ_TRANSPORTS] | ZMQ_TRANSPORTS, Default ZMQ_TRANSPORTS.IPC or "IPC"
+ ZMQ transport layers at which the object is exposed:
+
+ - TCP - custom implemented protocol in plain TCP - supply a socket address additionally or a random port
+ will be automatically used.
+ - IPC - inter process communication - connection can be made from other processes running
+ locally within same computer. No client on the network will be able to contact the object using
+ this transport. Beginners may use this transport for learning and testing without worrying about
+ network security or technicalities of a sophisticated protocol like HTTP or MQTT. Also, use this transport
+ if you wish to avoid configuring your firewall.
+ - INPROC - one main python process spawns several threads in one of which the `Thing`
+ will be running. The object can be contacted by a client on another thread but not from other processes
+ or the network. One may use more than one form of transport. All requests made will be anyway queued internally
+ irrespective of origin.
+
+ For multiple transports, supply a list of transports. For example: `[ZMQ_TRANSPORTS.TCP, ZMQ_TRANSPORTS.IPC]`,
+ `["TCP", "IPC"]` or `["IPC", "INPROC"]`.
+
+ **kwargs:
+ - tcp_socket_address: `str`, optional,
+ socket address for TCP access, for example: tcp://0.0.0.0:61234
+ - context: `zmq.asyncio.Context`, optional,
+ ZMQ context object to be used for creating sockets. If not supplied, a new context is created.
+ For INPROC clients, you need to provide the same context used here.
+ """
+ from .zmq.rpc_server import prepare_rpc_server
+ prepare_rpc_server(instance=self, transports=transports, **kwargs)
+ self.rpc_server.run()
+
+
+ @forkable
+ def run_with_http_server(self, port: int = 8080, address: str = '0.0.0.0',
+ # host: str = None,
+ allowed_clients: str | typing.Iterable[str] | None = None,
+ ssl_context: ssl.SSLContext | None = None,
+ # protocol_version : int = 1,
+ # network_interface : str = 'Ethernet',
+ forked: bool = False, # used by forkable decorator
+ **kwargs: typing.Dict[str, typing.Any]
+ ) -> None:
+ """
+ Quick-start to serve `Thing` over HTTP. This method is fully blocking.
+
+ Parameters
+ ----------
+ port: int
+ the port at which the HTTP server should be run (unique)
+ address: str
+ A convenience option to set IP address apart from 0.0.0.0 (which is default)
+ ssl_context: ssl.SSLContext | None
+ use it for customized SSL context to provide encrypted communication. For certificate file and key file,
+ one may also use `certfile` and `keyfile` options.
+ allowed_clients: typing.Iterable[str] | str | None
+ serves request and sets CORS only from these clients, other clients are rejected with 403. Uses remote IP
+ header value to achieve this. Unlike CORS, the server resource is not even executed if the client is not an allowed client.
+ Note that the remote IP in a HTTP request is believable only from a trusted HTTP client, not a modified one.
+ **kwargs: typing.Dict[str, typing.Any]
+ - certfile: `str`,
+ alternative to SSL context, provide certificate file & key file to allow the server to create a SSL connection on its own
+ - keyfile: `str`,
+ alternative to SSL context, provide certificate file & key file to allow the server to create a SSL connection on its own
+ - property_handler: `BaseHandler` | `PropertyHandler`,
+ custom web request handler for property operations
+ - action_handler: `BaseHandler` | `ActionHandler`,
+ custom web request handler for action operations
+ - event_handler: `BaseHandler` | `EventHandler`,
+ custom event handler of your choice for handling events
+ """
+ # network_interface: str
+ # Currently there is no logic to detect the IP addresss (as externally visible) correctly, therefore please
+ # send the network interface name to retrieve the IP. If a DNS server is present, you may leave this field
+ # host: str
+ # Host Server to subscribe to coordinate starting sequence of things & web GUI
+ from ..server.http import HTTPServer
+ self.run_with_zmq_server(
+ transports=ZMQ_TRANSPORTS.INPROC,
+ forked=True
+ )
+ while not self.rpc_server or not self.rpc_server.is_running:
+ time.sleep(0.01)
+ http_server = HTTPServer(
+ [],
+ port=port,
+ address=address,
+ logger=self.logger,
+ ssl_context=ssl_context,
+ allowed_clients=allowed_clients,
+ # network_interface=network_interface,
+ **kwargs
+ )
+ http_server.zmq_client_pool.context = self.rpc_server.context # TODO: issue https://github.com/hololinked-dev/hololinked/issues/84
+ http_server.add_thing(dict(INPROC=self.id))
+ assert http_server.all_ok
+ http_server.listen()
+
+
+ @forkable
+ def run(self, servers: typing.Sequence[BaseProtocolServer], forked: bool = False) -> None:
+ """
+ Expose the object with the given servers. This method is blocking until `exit()` is called.
+
+ Parameters
+ ----------
+ servers: Sequence[BaseProtocolServer]
+ list of instantiated servers to expose the object.
+ """
+ from ..server.http import HTTPServer
+ from ..server.zmq import ZMQServer
+ from .zmq.rpc_server import RPCServer, prepare_rpc_server
+
+ if not any(isinstance(server, (RPCServer, ZMQServer)) for server in servers):
+ prepare_rpc_server(transports=ZMQ_TRANSPORTS.INPROC)
+ for server in servers:
+ if isinstance(server, HTTPServer):
+ server.add_thing(self)
+ threading.Thread(target=server.listen).start()
+ self.rpc_server.run()
+
+
+ @action()
+ def exit(self) -> None:
+ """Stop serving the object. This method can only be called remotely"""
+ if self.rpc_server is None:
+ self.logger.debug("exit() called on a object that is not exposed yet.")
+ return
+ if self._owners:
+ raise NotImplementedError("call exit on the top-level object, composed objects cannot exit the loop. "+
+ f"This object belongs to {self._owners.__class__.__name__} with ID {self._owners.id}.")
+ self.rpc_server.stop()
+
+
+ @action()
+ def ping(self) -> None:
+ """
+ ping the `Thing` to see if it is alive. Ping successful when action succeeds with no return value and
+ no timeout or exception raised on the client side.
+ """
+ pass
+
+
+ def __hash__(self) -> int:
+ filename = inspect.getfile(self.__class__)
+ if filename is not None:
+ return hash(filename + self.__class__.__name__ + self.id)
+ return hash(self.__class__.__name__ + self.id)
+ # i.e. unique to a computer
+
+ def __eq__(self, other) -> bool:
+ if not isinstance(other, Thing):
+ return False
+ return self.__class__ == other.__class__ and self.id == other.id
+
+ def __str__(self) -> str:
+ return f"{self.__class__.__name__}({self.id})"
+
+ def __contains__(self, item: Property | BoundAction | EventDispatcher) -> bool:
+ return item in self.properties or item in self.actions or item in self.events
+
+ def __enter__(self) -> "Thing":
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback) -> None:
+ pass
+
+
+from .state_machine import StateMachine
+
+__all__ = [
+ Thing.__name__
+]
\ No newline at end of file
diff --git a/hololinked/core/zmq/__init__.py b/hololinked/core/zmq/__init__.py
new file mode 100644
index 00000000..5adfe410
--- /dev/null
+++ b/hololinked/core/zmq/__init__.py
@@ -0,0 +1 @@
+from .brokers import *
diff --git a/hololinked/core/zmq/brokers.py b/hololinked/core/zmq/brokers.py
new file mode 100644
index 00000000..db58d523
--- /dev/null
+++ b/hololinked/core/zmq/brokers.py
@@ -0,0 +1,2170 @@
+import os
+import time
+import warnings
+import zmq
+import zmq.asyncio
+import asyncio
+import logging
+import typing
+from uuid import uuid4
+from zmq.utils.monitor import parse_monitor_message
+
+from ...utils import *
+from ...config import global_config
+from ...constants import ZMQ_EVENT_MAP, ZMQ_TRANSPORTS, get_socket_type_name
+from ...serializers.serializers import JSONSerializer, Serializers
+from ...exceptions import BreakLoop
+from .message import (EMPTY_BYTE, ERROR, EXIT, HANDSHAKE, INVALID_MESSAGE, REPLY, SERVER_DISCONNECTED, TIMEOUT,
+ EventMessage, RequestMessage, ResponseMessage, SerializableData, PreserializedData, ServerExecutionContext, ThingExecutionContext,
+ SerializableNone, PreserializedEmptyByte, default_server_execution_context, default_thing_execution_context)
+
+
+
+class BaseZMQ:
+ """
+ Base class for all ZMQ message brokers. Implements socket creation & logger
+ which is common to all server and client implementations.
+ """
+
+ def __init__(self, id: str, logger: logging.Logger | None, **kwargs) -> None:
+ super().__init__()
+ self.id = id # type: str
+ if not logger:
+ logger = get_default_logger('{}|{}'.format(self.__class__.__name__, self.id),
+ kwargs.get('log_level', logging.INFO))
+ self.logger = logger
+ self.context = self.context if hasattr(self, 'context') and self.context else None # type: zmq.Context | zmq.asyncio.Context
+ self.socket = self.socket if hasattr(self, 'socket') and self.socket else None # type: zmq.Socket | None
+ self.socket_address = self.socket_address if hasattr(self, 'socket_address') and self.socket_address else None # type: str | None
+
+
+ def exit(self) -> None:
+ """
+ Cleanup method to terminate ZMQ sockets and contexts before quitting. Called by `__del__()`
+ automatically. Each subclass server/client should implement their version of exiting if necessary.
+ """
+ if hasattr(self, 'logger') and not self.logger:
+ self.logger = get_default_logger('{}|{}'.format(
+ self.__class__.__name__, self.id), logging.INFO)
+
+ def __del__(self) -> None:
+ self.exit()
+
+
+ @classmethod
+ def get_socket(cls, *, id: str, node_type: str, context: zmq.asyncio.Context | zmq.Context,
+ transport: ZMQ_TRANSPORTS = ZMQ_TRANSPORTS.IPC, socket_type: zmq.SocketType = zmq.ROUTER,
+ **kwargs) -> typing.Tuple[zmq.Socket, str]:
+ """
+ Create a socket with certain specifications. Supported ZeroMQ transports are TCP, IPC & INPROC.
+ For IPC sockets, a file is created under TEMP_DIR of global configuration.
+
+ Parameters
+ ----------
+ id: str
+ Each ROUTER socket require unique identity to correctly route the messages.
+ node_type: str
+ server or client? i.e. whether to bind (server) or connect (client) as per ZMQ definition
+ context: zmq.Context or zmq.asyncio.Context
+ ZeroMQ Context object that creates the socket
+ transport: Enum
+ TCP, IPC or INPROC. Message crafting/passing/routing is transport invariant as suggested by ZMQ.
+ Speed relationship - INPROC > IPC > TCP.
+ socket_type: zmq.SocketType, default zmq.ROUTER
+ Usually a ROUTER socket is implemented for both client-server and peer-to-peer communication
+ **kwargs: dict
+ - socket_address: str,
+ applicable only for TCP socket to find the correct socket to connect
+
+ Returns
+ -------
+ socket: zmq.Socket
+ created socket
+ socket_address: str
+ qualified address of the socket created for any transport type
+
+ Raises
+ ------
+ NotImplementedError
+ if transport other than TCP, IPC or INPROC is used
+ RuntimeError
+ if transport is TCP and a socket connect from client side is requested but a socket address is not supplied
+ """
+ if kwargs.get('socket_class', None) is not None:
+ socket_class = kwargs.get('socket_class')
+ if not issubclass(socket_class, zmq.Socket):
+ raise TypeError("socket_class must be a subclass of zmq.Socket")
+ socket = context.socket(socket_type, socket_class=socket_class)
+ else:
+ socket = context.socket(socket_type)
+ socket.setsockopt_string(zmq.IDENTITY, id)
+ socket_address = kwargs.get('socket_address', None)
+ bind = node_type == 'server'
+ if transport == ZMQ_TRANSPORTS.IPC or transport.lower() == "ipc":
+ if socket_address is None or not socket_address.endswith('.ipc'):
+ if not socket_address:
+ split_id = id.split('/')
+ elif not socket_address.endswith('.ipc'):
+ split_id = socket_address.split('/')
+ socket_dir = os.sep + os.sep.join(split_id[:-1]) if len(split_id) > 1 else ''
+ directory = global_config.TEMP_DIR + socket_dir
+ if not os.path.exists(directory):
+ os.makedirs(directory)
+ # re-compute for IPC because it looks for a file in a directory
+ socket_address = "ipc://{}{}{}.ipc".format(directory, os.sep, split_id[-1])
+ if bind:
+ socket.bind(socket_address)
+ else:
+ socket.connect(socket_address)
+ elif transport == ZMQ_TRANSPORTS.TCP or transport.lower() == "tcp":
+ if bind:
+ if not socket_address:
+ for i in range(global_config.TCP_SOCKET_SEARCH_START_PORT, global_config.TCP_SOCKET_SEARCH_END_PORT):
+ socket_address = "tcp://0.0.0.0:{}".format(i)
+ try:
+ socket.bind(socket_address)
+ break
+ except zmq.error.ZMQError as ex:
+ if not ex.strerror.startswith('Address in use'):
+ raise ex from None
+ else:
+ socket.bind(socket_address)
+ elif socket_address:
+ socket.connect(socket_address)
+ else:
+ raise RuntimeError(f"Socket address not supplied for TCP connection to identity - {id}")
+ elif transport == ZMQ_TRANSPORTS.INPROC or transport.lower() == "inproc":
+ # inproc_id = id.replace('/', '_').replace('-', '_')
+ if socket_address is None:
+ socket_address = f'inproc://{id}'
+ elif not socket_address.startswith('inproc://'):
+ socket_address = f'inproc://{socket_address}'
+ if bind:
+ socket.bind(socket_address)
+ else:
+ socket.connect(socket_address)
+ else:
+ raise NotImplementedError("transports other than IPC, TCP & INPROC are not implemented now for {}.".format(cls.__name__) +
+ f" Given transport {transport}.")
+
+ return socket, socket_address
+
+
+
+class BaseAsyncZMQ(BaseZMQ):
+ """
+ Base class for all async ZMQ servers and clients.
+ """
+ # init of this class must always take empty arguments due to inheritance structure
+
+ def create_socket(self, *, id: str, node_type: str = 'server', context: zmq.asyncio.Context | None = None,
+ transport: str = "IPC", socket_type: zmq.SocketType = zmq.ROUTER, **kwargs) -> None:
+ """
+ Overloads ``create_socket()`` to create, bind/connect an async socket. A async context is created if none is supplied.
+ """
+ if context and not isinstance(context, zmq.asyncio.Context):
+ raise TypeError("async ZMQ message broker accepts only async ZMQ context. supplied type {}".format(type(context)))
+ self.context = context or zmq.asyncio.Context()
+ self.socket, self.socket_address = BaseZMQ.get_socket(id=id, node_type=node_type, context=self.context,
+ transport=transport, socket_type=socket_type, **kwargs)
+ self.logger.info("created socket {} with address {} & identity {} and {}".format(get_socket_type_name(socket_type),
+ self.socket_address, id, "bound" if node_type == 'server' else "connected"))
+
+
+class BaseSyncZMQ(BaseZMQ):
+ """
+ Base class for all sync ZMQ servers and clients.
+ """
+ # init of this class must always take empty arguments due to inheritance structure
+
+ def create_socket(self, *, id: str, node_type: str = 'server', context: zmq.Context | None = None,
+ transport: str = "IPC", socket_type: zmq.SocketType = zmq.ROUTER, **kwargs) -> None:
+ """
+ Overloads ``create_socket()`` to create, bind/connect a synchronous socket. A synchronous context is created
+ if none is supplied.
+ """
+ socket_class = None
+ if context:
+ if not isinstance(context, zmq.Context):
+ raise TypeError("sync ZMQ message broker accepts only sync ZMQ context. supplied type {}".format(type(context)))
+ if isinstance(context, zmq.asyncio.Context):
+ # create sync socket when async context is supplied for sync brokers.
+ # especially useful for INPROC sync client where teh context needs to be shared and the server is async.
+ socket_class = zmq.Socket
+ self.context = context or zmq.Context()
+ self.socket, self.socket_address = BaseZMQ.get_socket(id=id, node_type=node_type, context=self.context,
+ transport=transport, socket_type=socket_type, socket_class=socket_class,
+ **kwargs)
+ self.logger.info("created socket {} with address {} & identity {} and {}".format(get_socket_type_name(socket_type),
+ self.socket_address, id, "bound" if node_type == 'server' else "connected"))
+
+
+
+class BaseZMQServer(BaseZMQ):
+ """
+ Base class for all ZMQ servers irrespective of sync and async.
+ """
+
+ def handshake(self, request_message: RequestMessage) -> None:
+ """
+ Pass a handshake message to client. Absolutely mandatory to ensure initial messages do not get lost
+ because of ZMQ's very tiny but significant initial delay after creating socket.
+
+ Parameters
+ ----------
+ request_message: List[bytes]
+ the client message for which the handshake is being sent
+
+ Returns
+ -------
+ None
+ """
+ run_callable_somehow(self._handshake(request_message))
+
+ def _handshake(self, request_message: RequestMessage) -> None:
+ raise NotImplementedError(f"handshake cannot be handled - implement _handshake in {self.__class__} to handshake.")
+
+
+ def handle_invalid_message(self, request_message: RequestMessage, exception: Exception) -> None:
+ """
+ Pass an invalid message to the client when an exception occurred while parsing the message from the client
+ (``parse_client_message()``)
+
+ Parameters
+ ----------
+ request_message: List[bytes]
+ the client message parsing which the exception occurred
+ exception: Exception
+ exception object raised
+
+ Returns
+ -------
+ None
+ """
+ run_callable_somehow(self._handle_invalid_message(request_message, exception))
+
+ def _handle_invalid_message(self, message: RequestMessage, exception: Exception) -> None:
+ raise NotImplementedError("invalid message cannot be handled" +
+ f" - implement _handle_invalid_message in {self.__class__} to handle invalid messages.")
+
+
+ def handle_timeout(self, request_message: RequestMessage, timeout_type: str) -> None:
+ """
+ Pass timeout message to the client when the operation could not be executed within specified timeouts
+
+ Parameters
+ ----------
+ request_message: List[bytes]
+ the client message which could not executed within the specified timeout. timeout value is
+ generally specified within the execution context values.
+
+ Returns
+ -------
+ None
+ """
+ run_callable_somehow(self._handle_timeout(request_message, timeout_type=timeout_type))
+
+ def _handle_timeout(self, request_message: RequestMessage, timeout_type: str) -> None:
+ raise NotImplementedError("timeouts cannot be handled ",
+ f"- implement _handle_timeout in {self.__class__} to handle timeout.")
+
+
+ def handle_error_message(self, request_message: RequestMessage, exception: Exception) -> None:
+ """
+ Pass an exception message to the client when an exception occurred while executing the operation
+
+ Parameters
+ ----------
+ request_message: List[bytes]
+ the client message for which the exception occurred
+ exception: Exception
+ exception object raised
+
+ Returns
+ -------
+ None
+ """
+ run_callable_somehow(self._handle_error_message(request_message, exception))
+
+ def _handle_error_message(self, request_message: RequestMessage, exception: Exception) -> None:
+ raise NotImplementedError("exceptions cannot be handled ",
+ f"- implement _handle_error_message in {self.__class__} to handle exceptions.")
+
+
+ def handled_default_message_types(self, request_message: RequestMessage) -> bool:
+ """
+ Handle default cases for the server. This method is called when the message type is not recognized
+ or the message is not a valid message.
+
+ Parameters
+ ----------
+ request_message: List[bytes]
+ the client message which could not executed within the specified timeout. timeout value is
+ generally specified within the execution context values.
+ receiver_socket: zmq.Socket
+ the socket to which the response must be sent.
+
+ Returns
+ -------
+ None
+ """
+ if request_message.type == HANDSHAKE:
+ self.handshake(request_message)
+ return True
+ elif request_message.type == EXIT:
+ # self.send response with message type EXIT
+ raise BreakLoop(f"exit message received from {request_message.sender_id} with msg-ID {request_message.id}")
+ return False
+
+
+
+class AsyncZMQServer(BaseZMQServer, BaseAsyncZMQ):
+ """
+ Implements both blocking (non-polled) and non-blocking/polling form of receive messages and send replies
+ This server can be stopped from server side by calling ``stop_polling()`` unlike ``AsyncZMQServer`` which
+ cannot be stopped manually unless a message arrives.
+
+ Parameters
+ ----------
+ id: str
+ ``id`` of the Thing which the server serves
+ server_type: str
+ server type metadata - currently not useful/important
+ context: Optional, zmq.asyncio.Context
+ ZeroMQ Context object to use. All sockets share this context. Automatically created when None is supplied.
+ socket_type: zmq.SocketType, default zmq.ROUTER
+ socket type of ZMQ socket, default is ROUTER (enables address based routing of messages)
+ transport: Enum, default ZMQ_TRANSPORTS.IPC
+ Use TCP for network access, IPC for multi-process applications, and INPROC for multi-threaded applications.
+ poll_timeout: int, default 25
+ time in milliseconds to poll the sockets specified under ``procotols``. Useful for calling ``stop_polling()``
+ where the max delay to stop polling will be ``poll_timeout``
+ """
+
+ def __init__(self, *, id: str, context: typing.Union[zmq.asyncio.Context, None] = None,
+ socket_type: zmq.SocketType = zmq.ROUTER, transport: ZMQ_TRANSPORTS = ZMQ_TRANSPORTS.IPC,
+ poll_timeout = 25, **kwargs) -> None:
+ super().__init__(id=id, **kwargs)
+ self.create_socket(id=id, node_type='server', context=context, transport=transport,
+ socket_type=socket_type, **kwargs)
+ self._terminate_context = context == None # terminate if it was created by instance
+ self.poller = zmq.asyncio.Poller()
+ self.poller.register(self.socket, zmq.POLLIN)
+ self.poll_timeout = poll_timeout
+
+
+ @property
+ def poll_timeout(self) -> int:
+ """
+ socket polling timeout in milliseconds greater than 0.
+ """
+ return self._poll_timeout
+
+ @poll_timeout.setter
+ def poll_timeout(self, value) -> None:
+ if not isinstance(value, int) or value < 0:
+ raise ValueError(f"polling period must be an integer greater than 0, not {value}. Value is considered in milliseconds.")
+ self._poll_timeout = value
+
+
+ async def async_recv_request(self) -> RequestMessage:
+ """
+ Receive one message in a blocking form. Async for multi-server paradigm, each server should schedule
+ this method in the event loop explicitly. This is taken care by the ``Eventloop`` & ``RPCServer``.
+
+ Returns
+ -------
+ message: RequestMessage
+ received message with important content (operation, arguments, thing execution context) deserialized.
+ """
+ while True:
+ raw_message = await self.socket.recv_multipart()
+ request_message = RequestMessage(raw_message)
+ if not self.handled_default_message_types(request_message) and raw_message:
+ self.logger.debug(f"received message from client '{request_message.sender_id}' with msg-ID '{request_message.id}'")
+ return request_message
+
+
+ async def async_recv_requests(self) -> typing.List[RequestMessage]:
+ """
+ Receive all currently available messages in blocking form. Async for multi-server paradigm, each server should schedule
+ this method in the event loop explicitly. This is taken care by the ``Eventloop`` & ``RPCServer``.
+
+ Returns
+ -------
+ messages: typing.List[RequestMessage]
+ list of received messages with important content (operation, arguments, execution context) deserialized.
+ """
+ messages = [await self.async_recv_request()]
+ while True:
+ try:
+ raw_message = await self.socket.recv_multipart(zmq.NOBLOCK)
+ request_message = RequestMessage(raw_message)
+ if not self.handled_default_message_types(request_message) and raw_message:
+ self.logger.debug(f"received message from client '{request_message.sender_id}' with msg-ID '{request_message.id}'")
+ messages.append(request_message)
+ except zmq.Again:
+ break
+ return messages
+
+
+ async def async_send_response(self,
+ request_message: RequestMessage,
+ payload: SerializableData = SerializableNone,
+ preserialized_payload: PreserializedData = PreserializedEmptyByte
+ ) -> None:
+ """
+ Send response message for a request message.
+
+ Parameters
+ ----------
+ request_message: List[bytes]
+ original message so that the response can be properly crafted and routed
+ data: Any
+ serializable data to be sent as response
+ pre_encoded_data: bytes
+ pre-encoded data, generally used for large or custom data that is already serialized
+
+ Returns
+ -------
+ None
+ """
+ await self.socket.send_multipart(
+ ResponseMessage.craft_reply_from_request(
+ request_message=request_message,
+ payload=payload,
+ preserialized_payload=preserialized_payload
+ ).byte_array
+ )
+ self.logger.debug(f"sent response to client '{request_message.sender_id}' with msg-ID '{request_message.id}'")
+
+
+ async def async_send_response_with_message_type(self,
+ request_message: RequestMessage,
+ message_type: str,
+ payload: SerializableData = SerializableNone,
+ preserialized_payload: PreserializedData = PreserializedEmptyByte
+ ) -> None:
+ """
+ Send response message for a request message.
+
+ Parameters
+ ----------
+ request_message: List[bytes]
+ original message so that the response can be properly crafted and routed
+ data: Any
+ serializable data to be sent as response
+
+ Returns
+ -------
+ None
+ """
+ await self.socket.send_multipart(
+ ResponseMessage.craft_from_arguments(
+ receiver_id=request_message.sender_id,
+ sender_id=self.id,
+ message_type=message_type or REPLY,
+ message_id=request_message.id,
+ payload=payload,
+ preserialized_payload=preserialized_payload
+ ).byte_array
+ )
+ self.logger.debug(f"sent response to client '{request_message.sender_id}' with msg-ID '{request_message.id}'")
+
+
+ async def poll_requests(self) -> typing.List[RequestMessage]:
+ """
+ poll for messages with specified timeout (``poll_timeout``) and return if any messages are available.
+ This method blocks, so make sure other methods are scheduled which can stop polling.
+
+ Returns
+ -------
+ messages: List[List[bytes]]
+ list of received messages with important content (operation, arguments, thing execution context) deserialized.
+ """
+ self.stop_poll = False
+ messages = []
+ while not self.stop_poll:
+ sockets = await self.poller.poll(self._poll_timeout) # type hints dont work in this line
+ for socket, _ in sockets:
+ while True:
+ try:
+ raw_message = await socket.recv_multipart(zmq.NOBLOCK)
+ except zmq.Again:
+ break
+ else:
+ request_message = RequestMessage(raw_message)
+ if not self.handled_default_message_types(request_message) and raw_message:
+ self.logger.debug(f"received message from client '{request_message.sender_id}' with msg-ID '{request_message.id}'")
+ messages.append(request_message)
+ if len(messages) > 0:
+ break
+ return messages
+
+
+ def stop_polling(self) -> None:
+ """
+ stop polling and unblock ``poll_messages()`` method
+ """
+ self.stop_poll = True
+
+
+ async def _handshake(self, request_message: RequestMessage) -> None:
+ """
+ Inner method that handles handshake. Scheduled by ``handshake()`` method, signature same as ``handshake()``.
+ """
+ # Note that for ROUTER sockets, once the message goes through the sending socket, the address of the receiver
+ # is replaced by the address of the sender once received
+ await self.socket.send_multipart(
+ ResponseMessage.craft_from_arguments(
+ receiver_id=request_message.sender_id,
+ sender_id=self.id,
+ message_type=HANDSHAKE,
+ message_id=request_message.id
+ ).byte_array
+ )
+ self.logger.info(f"sent handshake to client '{request_message.sender_id}'")
+
+
+ async def _handle_timeout(self, request_message: RequestMessage, timeout_type: str) -> None:
+ """
+ Inner method that handles timeout. Scheduled by ``handle_timeout()``, signature same as ``handle_timeout``.
+ """
+ await self.socket.send_multipart(
+ ResponseMessage.craft_from_arguments(
+ receiver_id=request_message.sender_id,
+ sender_id=self.id,
+ message_type=TIMEOUT,
+ message_id=request_message.id,
+ payload=SerializableData(timeout_type, content_type='application/json')
+ ).byte_array
+ )
+ self.logger.info(f"sent timeout to client '{request_message.sender_id}'")
+
+
+ async def _handle_invalid_message(self, request_message: RequestMessage, exception: Exception) -> None:
+ """
+ Inner method that handles invalid messages. Scheduled by ``handle_invalid_message()``,
+ signature same as ``handle_invalid_message()``.
+ """
+ await self.socket.send_multipart(
+ ResponseMessage.craft_from_arguments(
+ receiver_id=request_message.sender_id,
+ sender_id=self.id,
+ message_type=INVALID_MESSAGE,
+ message_id=request_message.id,
+ payload=SerializableData(dict(exception=format_exception_as_json(exception)), content_type='application/json')
+ ).byte_array
+ )
+ self.logger.info(f"sent invalid message to client '{request_message.sender_id}'." +
+ f" exception - {str(exception)}")
+
+
+ async def _handle_error_message(self,
+ request_message: RequestMessage,
+ exception: Exception
+ ) -> None:
+ response_message = ResponseMessage.craft_with_message_type(
+ request_message=request_message,
+ message_type=ERROR,
+ payload=SerializableData(exception, content_type='application/json')
+ )
+ await self.socket.send_multipart(response_message.byte_array)
+ self.logger.info(f"sent exception message to client '{response_message.receiver_id}'." +
+ f" exception - {str(exception)}")
+
+
+ def exit(self) -> None:
+ """
+ unregister socket from poller and terminate socket and context.
+ """
+ try:
+ BaseZMQ.exit(self)
+ self.poller.unregister(self.socket)
+ self.socket.close(0)
+ self.logger.info(f"terminated socket of server '{self.id}' of type {self.__class__}")
+ except Exception as ex:
+ self.logger.warning(f"could not unregister socket {self.id} from polling - {str(ex)}")
+ try:
+ if self._terminate_context:
+ self.context.term()
+ self.logger.info("terminated context of socket '{}' of type '{}'".format(self.id, self.__class__))
+ except Exception as ex:
+ self.logger.warning("could not properly terminate context or attempted to terminate an already terminated " +
+ f" context '{self.id}'. Exception message: {str(ex)}")
+
+
+
+class ZMQServerPool(BaseZMQServer):
+ """
+ Implements pool of async ZMQ servers (& their sockets)
+ """
+
+ def __init__(self, *, ids: typing.List[str] | None = None, **kwargs) -> None:
+ self.context = zmq.asyncio.Context()
+ self.poller = zmq.asyncio.Poller()
+ self.pool = dict() # type: typing.Dict[str, AsyncZMQServer]
+ if ids:
+ for id in ids:
+ self.pool[id] = AsyncZMQServer(id=id, context=self.context, **kwargs)
+ for server in self.pool.values():
+ self.poller.register(server.socket, zmq.POLLIN)
+ super().__init__(id="pool", **kwargs)
+
+
+ def create_socket(self, *, id: str, bind: bool, context: typing.Union[zmq.asyncio.Context, zmq.Context],
+ transport: ZMQ_TRANSPORTS = ZMQ_TRANSPORTS.IPC, socket_type: zmq.SocketType = zmq.ROUTER, **kwargs) -> None:
+ raise NotImplementedError("create socket not supported by ZMQServerPool")
+ # we override this method to prevent socket creation. id set to pool is simply a filler
+ return super().create_socket(id=id, node_type=node_type, context=context, transport=transport,
+ socket_type=socket_type, **kwargs)
+
+ def register_server(self, server: AsyncZMQServer) -> None:
+ if not isinstance(server, (AsyncZMQServer)):
+ raise TypeError("registration possible for servers only subclass of AsyncZMQServer." +
+ f" Given type {type(server)}")
+ self.pool[server.id] = server
+ self.poller.register(server.socket, zmq.POLLIN)
+
+ def deregister_server(self, server: AsyncZMQServer) -> None:
+ self.poller.unregister(server.socket)
+ self.pool.pop(server.id)
+
+ @property
+ def poll_timeout(self) -> int:
+ """
+ socket polling timeout in milliseconds greater than 0.
+ """
+ return self._poll_timeout
+
+ @poll_timeout.setter
+ def poll_timeout(self, value) -> None:
+ if not isinstance(value, int) or value < 0:
+ raise ValueError("polling period must be an integer greater than 0, not {}. Value is considered in milliseconds.".format(value))
+ self._poll_timeout = value
+
+ async def async_recv_request(self, id: str) -> RequestMessage:
+ """
+ receive message for server instance name
+
+ Parameters
+ ----------
+ id: str
+ instance name of the ZMQ server.
+ """
+ return await self.pool[id].async_recv_request()
+
+ async def async_recv_requests(self, id: str) -> typing.List[RequestMessage]:
+ """
+ receive all available messages for server instance name
+
+ Parameters
+ ----------
+ id: str
+ instance name of the ZMQ server.
+ """
+ return await self.pool[id].async_recv_requests()
+
+ async def async_send_response(self, *,
+ id: str,
+ request_message: RequestMessage,
+ payload: SerializableData = SerializableNone,
+ preserialized_payload: PreserializedData = PreserializedEmptyByte
+ ) -> None:
+ """
+ send response for instance name
+
+ Parameters
+ ----------
+ id: str
+ instance name of the ``Thing`` or in this case, the ZMQ server.
+ request_message: List[bytes]
+ request message for which response is being given
+ data: Any
+ data to be given as response
+ """
+ await self.pool[id].async_send_response(
+ request_message=request_message,
+ payload=payload,
+ preserialized_payload=preserialized_payload
+ )
+
+ async def poll(self) -> typing.List[typing.List[typing.Any]]:
+ """
+ Pool for messages in the entire server pool. User of this method may map the message to the correct instance
+ using the 0th index of the message.
+ """
+ self.stop_poll = False
+ messages = []
+ while not self.stop_poll:
+ sockets = await self.poller.poll(self._poll_timeout)
+ for socket, _ in sockets:
+ while True:
+ try:
+ raw_message = await socket.recv_multipart(zmq.NOBLOCK)
+ except zmq.Again:
+ break
+ else:
+ if raw_message:
+ request_message = RequestMessage(raw_message)
+ self.logger.debug(f"received message from client '{request_message.sender_id}' with msg-ID '{request_message.id}'")
+ messages.append(request_message)
+ return messages
+
+ def stop_polling(self) -> None:
+ """
+ stop polling method ``poll()``
+ """
+ self.stop_poll = True
+
+ def __getitem__(self, key) -> AsyncZMQServer:
+ return self.pool[key]
+
+ def __iter__(self) -> typing.Iterator[str]:
+ return self.pool.__iter__()
+
+ def __contains__(self, name: str) -> bool:
+ return name in self.pool.keys()
+
+ def exit(self) -> None:
+ for server in self.pool.values():
+ try:
+ self.poller.unregister(server.socket)
+ server.exit()
+ except Exception as ex:
+ self.logger.warning(f"could not unregister poller and exit server {server.id} - {str(ex)}")
+ try:
+ self.context.term()
+ self.logger.info("context terminated for {}".format(self.__class__))
+ except Exception as ex:
+ self.logger.warning("could not properly terminate context or attempted to terminate an already terminated context " +
+ f" Exception message: {str(ex)}")
+
+
+
+class BaseZMQClient(BaseZMQ):
+ """
+ Base class for all ZMQ clients irrespective of sync and async.
+
+ server's response to client
+ ::
+
+ [address, bytes(), server_type, message_type, message id, data, pre encoded data]|br|
+ [ 0 , 1 , 2 , 3 , 4 , 5 , 6 ]|br|
+
+ Parameters
+ ----------
+ server_id: str
+ The instance name of the server (or ``Thing``)
+ client_type: str
+ ZMQ or HTTP Server
+ server_type: str
+ server type metadata
+ zmq_serializer: BaseSerializer
+ custom implementation of ZMQ serializer if necessary
+ http_serializer: JSONSerializer
+ custom implementation of JSON serializer if necessary
+ """
+
+ def __init__(self, *,
+ id: str,
+ server_id: str,
+ logger: typing.Optional[logging.Logger] = None,
+ **kwargs
+ ) -> None:
+ super().__init__(id=id, logger=logger, **kwargs)
+ self.server_id = server_id
+ self._monitor_socket = None
+ self._response_cache = dict()
+ self._terminate_context = False
+ self.socket: zmq.Socket | zmq.asyncio.Socket
+ self.poller: zmq.Poller | zmq.asyncio.Poller
+ self._poll_timeout = kwargs.get('poll_timeout', 1000) # default to 1000 ms
+ self._stop = False # in general, stop any loop with this variaböe
+
+ @property
+ def poll_timeout(self) -> int:
+ """
+ socket polling timeout in milliseconds greater than 0.
+ """
+ return self._poll_timeout
+
+ @poll_timeout.setter
+ def poll_timeout(self, value: int) -> None:
+ if not isinstance(value, int) or value <= 0:
+ raise ValueError(f"polling period must be an integer greater than 0, not {value}. Value is considered in milliseconds.")
+ self._poll_timeout = value
+
+ def exit(self) -> None:
+ BaseZMQ.exit(self)
+ try:
+ self.poller.unregister(self.socket)
+ # TODO - there is some issue here while quitting
+ # print("poller exception did not occur 1")
+ if self._monitor_socket is not None:
+ # print("poller exception did not occur 2")
+ self.poller.unregister(self._monitor_socket)
+ # print("poller exception did not occur 3")
+ except Exception as ex:
+ self.logger.warning(f"unable to deregister from poller - {str(ex)}")
+
+ try:
+ if self._monitor_socket is not None:
+ self._monitor_socket.close(0)
+ self.socket.close(0)
+ self.logger.info("terminated socket of server '{}' of type '{}'".format(self.id, self.__class__))
+ except Exception as ex:
+ self.logger.warning("could not properly terminate socket or attempted to terminate an already terminated " +
+ f"socket '{self.id}' of type '{self.__class__}'. Exception message: {str(ex)}")
+ try:
+ if self._terminate_context:
+ self.context.term()
+ self.logger.info("terminated context of socket '{}' of type '{}'".format(self.id, self.__class__))
+ except Exception as ex:
+ self.logger.warning("could not properly terminate context or attempted to terminate an already terminated context" +
+ "'{}'. Exception message: {}".format(self.id, str(ex)))
+
+
+ def handled_default_message_types(self, response_message: RequestMessage) -> bool:
+ """
+ Handle default cases for the client. This method is called when the message type is not recognized
+ or the message is not a valid message.
+
+ Parameters
+ ----------
+ response_message: List[bytes]
+ the client message which could not executed within the specified timeout. timeout value is
+ generally specified within the execution context values.
+
+ Returns
+ -------
+ None
+ """
+ if len(response_message.byte_array) == 2: # socket monitor message, not our message
+ try:
+ if ZMQ_EVENT_MAP[parse_monitor_message(response_message.byte_array)['event']] == SERVER_DISCONNECTED:
+ raise ConnectionAbortedError(f"server disconnected for {self.id}")
+ return True # True should simply continue polling
+ except RuntimeError as ex:
+ raise RuntimeError(f'message received from monitor socket cannot be deserialized for {self.id}') from None
+ if response_message.type == HANDSHAKE:
+ return True
+ return False
+
+
+ def stop(self) -> None:
+ """
+ stop the client.
+ """
+ self._stop = True
+
+
+
+class SyncZMQClient(BaseZMQClient, BaseSyncZMQ):
+ """
+ Synchronous ZMQ client that connect with sync or async server based on ZMQ transport. Works like REQ-REP socket.
+ Each request is blocking until response is received. Suitable for most purposes.
+
+ Parameters
+ ----------
+ server_id: str
+ The instance name of the server (or ``Thing``)
+ id: str
+ Unique id of the client to receive messages from the server. Each client connecting to same server must
+ still have unique ID.
+ client_type: str
+ ZMQ or HTTP Server
+ handshake: bool
+ when true, handshake with the server first before allowing first message and block until that handshake was
+ accomplished.
+ transport: str | Enum, TCP, IPC or INPROC, default IPC
+ transport implemented by the server
+ **kwargs:
+ socket_address: str
+ socket address for connecting to TCP server
+ """
+
+ def __init__(self,
+ id: str,
+ server_id: str,
+ handshake: bool = True,
+ transport: str = ZMQ_TRANSPORTS.IPC,
+ context: zmq.Context | None = None,
+ **kwargs
+ ) -> None:
+ super().__init__(id=id, server_id=server_id, **kwargs)
+ socket_address=server_id if str(transport) in ["IPC", "INPROC"] else kwargs.pop('socket_address', None)
+ kwargs['socket_address'] = socket_address
+ self.create_socket(
+ id=id,
+ node_type='client',
+ context=context,
+ transport=transport,
+ **kwargs
+ )
+ self._terminate_context = context == None
+ self.poller = zmq.Poller()
+ self.poller.register(self.socket, zmq.POLLIN)
+ # print("context on client", self.context)
+ if handshake:
+ self.handshake(kwargs.pop("handshake_timeout", 60000))
+
+ def send_request(self,
+ thing_id: bytes,
+ objekt: str,
+ operation: str,
+ payload: SerializableData = SerializableNone,
+ preserialized_payload: PreserializedData = PreserializedEmptyByte,
+ server_execution_context: ServerExecutionContext = default_server_execution_context,
+ thing_execution_context: ThingExecutionContext = default_thing_execution_context
+ ) -> bytes:
+ """
+ send message to server.
+
+ Parameters
+ ----------
+ operation: str
+ unique str identifying a server side or ``Thing`` resource. These values corresponding
+ to automatically extracted name from the object name or the URL_path prepended with the instance name.
+ arguments: Dict[str, Any]
+ if the operation invokes a method, arguments of that method.
+ server_execution_context: Dict[str, Any]
+ see execution context definitions
+ thing_execution_context: Dict[str, Any]
+ see execution context definitions
+
+ Returns
+ -------
+ message id: bytes
+ a byte representation of message id
+ """
+ request_message = RequestMessage.craft_from_arguments(
+ receiver_id=self.server_id,
+ sender_id=self.id,
+ thing_id=thing_id,
+ objekt=objekt,
+ operation=operation,
+ payload=payload,
+ preserialized_payload=preserialized_payload,
+ server_execution_context=server_execution_context,
+ thing_execution_context=thing_execution_context
+ )
+ self.socket.send_multipart(request_message.byte_array)
+ self.logger.debug(f"sent operation '{operation}' on thing '{thing_id}' to server '{self.server_id}' with msg-id '{request_message.id}'")
+ return request_message.id
+
+ def recv_response(self, message_id: bytes) -> ResponseMessage:
+ """
+ Receives response from server. Messages are identified by message id, so call this method immediately after
+ calling ``send_request()`` to avoid receiving messages out of order. Or, use other methods like
+ ``execute()``, ``read_attribute()`` or ``write_attribute()``.
+
+ Parameters
+ ----------
+ raise_client_side_exception: bool, default False
+ if True, any exceptions raised during execution inside ``Thing`` instance will be raised on the client.
+ See docs of ``raise_local_exception()`` for info on exception
+ """
+ self._stop = False
+ while not self._stop:
+ if message_id in self._response_cache:
+ return self._response_cache.pop(message_id)
+ sockets = self.poller.poll(self.poll_timeout)
+ response_message = None # type: ResponseMessage
+ for socket, _ in sockets:
+ try:
+ raw_message = socket.recv_multipart(zmq.NOBLOCK)
+ response_message = ResponseMessage(raw_message)
+ except zmq.Again:
+ pass
+ if response_message:
+ if self.handled_default_message_types(response_message):
+ continue
+ if message_id != response_message.id:
+ self._response_cache[response_message.id] = response_message
+ self.logger.debug("cached response with msg-id {}".format(response_message.id))
+ else:
+ self.logger.debug("received response with msg-id {}".format(response_message.id))
+ return response_message
+
+
+ def execute(self,
+ thing_id: bytes,
+ objekt: str,
+ operation: str,
+ payload: SerializableData = SerializableNone,
+ preserialized_payload: PreserializedData = PreserializedEmptyByte,
+ server_execution_context: ServerExecutionContext = default_server_execution_context,
+ thing_execution_context: ThingExecutionContext = default_thing_execution_context,
+ ) -> ResponseMessage:
+ """
+ send an operation and receive the response for it.
+
+ Parameters
+ ----------
+ operation: str
+ unique str identifying a server side or ``Thing`` resource. These values corresponding
+ to automatically extracted name from the object name or the URL_path prepended with the instance name.
+ arguments: Dict[str, Any]
+ if the operation invokes a method, arguments of that method.
+ server_execution_context: Dict[str, Any]
+ see execution context definitions
+ thing_execution_context: Dict[str, Any]
+ see execution context definitions
+ raise_client_side_exception: bool, default False
+ if True, any exceptions raised during execution inside ``Thing`` instance will be raised on the client.
+ See docs of ``raise_local_exception()`` for info on exception
+ deserialize_response: bool, default True
+ if True, deserializes the response from server
+
+ Returns
+ -------
+ message id: bytes
+ a byte representation of message id
+ """
+ message_id = self.send_request(
+ thing_id=thing_id,
+ objekt=objekt,
+ operation=operation,
+ payload=payload,
+ preserialized_payload=preserialized_payload,
+ server_execution_context=server_execution_context,
+ thing_execution_context=thing_execution_context
+ )
+ return self.recv_response(message_id=message_id)
+
+
+ def handshake(self, timeout: typing.Union[float, int] = 60000) -> None:
+ """
+ hanshake with server before sending first message
+ """
+ self._stop = False
+ start_time = time.time_ns()
+ while not self._stop:
+ if timeout is not None and (time.time_ns() - start_time)/1e6 > timeout:
+ raise ConnectionError(f"Unable to contact server '{self.server_id}' from client '{self.id}'")
+ self.socket.send_multipart(RequestMessage.craft_with_message_type(self.id, self.server_id, HANDSHAKE).byte_array)
+ self.logger.info(f"sent Handshake to server '{self.server_id}'")
+ if self.poller.poll(500):
+ try:
+ raw_message = self.socket.recv_multipart(zmq.NOBLOCK)
+ response_message = ResponseMessage(raw_message)
+ except zmq.Again:
+ pass
+ else:
+ if response_message.type == HANDSHAKE:
+ self.logger.info(f"client '{self.id}' handshook with server '{self.server_id}'")
+ break
+ elif self.handled_default_message_types(response_message):
+ continue
+ else:
+ warnings.warn(f"Handshake cannot be done with '{self.server_id}'. " +
+ f"Another message arrived before handshake complete - {response_message.type}",
+ category=RuntimeWarning)
+ self._response_cache[response_message.id] = response_message
+ else:
+ self.logger.info('got no response for handshake')
+ self._monitor_socket = self.socket.get_monitor_socket()
+ self.poller.register(self._monitor_socket, zmq.POLLIN)
+ # sufficient to know when server dies only while receiving messages, not continuous polling
+
+
+
+class AsyncZMQClient(BaseZMQClient, BaseAsyncZMQ):
+ """
+ Asynchronous client to talk to a ZMQ server where the server is identified by the instance name. The identity
+ of the client needs to be different from the server, unlike the ZMQ Server. The client will also perform handshakes
+ if necessary.
+
+ Parameters
+ ----------
+ server_id: str
+ The instance name of the server (or ``Thing``)
+ id: str
+ Unique identity of the client to receive messages from the server. Each client connecting to same server must
+ still have unique ID.
+ client_type: str
+ ZMQ or HTTP Server
+ handshake: bool
+ when true, handshake with the server first before allowing first message and block until that handshake was
+ accomplished.
+ transport: str | Enum, TCP, IPC or INPROC, default IPC
+ transport implemented by the server
+ **kwargs:
+ socket_address: str
+ socket address for connecting to TCP server
+ zmq_serializer:
+ custom implementation of ZMQ serializer if necessary
+ http_serializer:
+ custom implementation of JSON serializer if necessary
+ """
+
+ def __init__(self,
+ id: str,
+ server_id: str,
+ handshake: bool = True,
+ transport: str = "IPC",
+ context: zmq.asyncio.Context | None = None,
+ **kwargs
+ ) -> None:
+ super().__init__(id=id, server_id=server_id, **kwargs)
+ socket_address=server_id if str(transport) in ["IPC", "INPROC"] else kwargs.pop('socket_address', None)
+ kwargs['socket_address'] = socket_address
+ self.create_socket(
+ id=id,
+ node_type='client',
+ context=context,
+ transport=transport,
+ **kwargs
+ )
+ self._monitor_socket = self.socket.get_monitor_socket()
+ self.poller = zmq.asyncio.Poller()
+ self.poller.register(self.socket, zmq.POLLIN)
+ self.poller.register(self._monitor_socket, zmq.POLLIN)
+ self._terminate_context = context == None
+ self._handshake_event = asyncio.Event()
+ self._handshake_event.clear()
+ if handshake:
+ self.handshake(kwargs.pop("handshake_timeout", 60000))
+
+ def handshake(self, timeout: int | None = 60000) -> None:
+ """
+ automatically called when handshake argument at init is True. When not automatically called, it is necessary
+ to call this method before awaiting ``handshake_complete()``.
+ """
+ run_callable_somehow(self._handshake(timeout))
+
+ async def _handshake(self, timeout: float | int | None = 60000) -> None:
+ """
+ hanshake with server before sending first message
+ """
+ self._stop = False
+ if self._monitor_socket is not None and self._monitor_socket in self.poller:
+ self.poller.unregister(self._monitor_socket)
+ self._handshake_event.clear()
+ start_time = time.time_ns()
+ while not self._stop:
+ if timeout is not None and (time.time_ns() - start_time)/1e6 > timeout:
+ raise ConnectionError(f"Unable to contact server '{self.server_id}' from client '{self.id}'")
+ await self.socket.send_multipart(RequestMessage.craft_with_message_type(self.id, self.server_id, HANDSHAKE).byte_array)
+ self.logger.info(f"sent Handshake to server '{self.server_id}'")
+ if await self.poller.poll(500):
+ try:
+ raw_message = await self.socket.recv_multipart(zmq.NOBLOCK)
+ response_message = ResponseMessage(raw_message)
+ except zmq.Again:
+ pass
+ else:
+ if response_message.type == HANDSHAKE: # type: ignore
+ self.logger.info(f"client '{self.id}' handshook with server '{self.server_id}'")
+ break
+ elif self.handled_default_message_types(response_message):
+ continue
+ else:
+ warnings.warn(f"Handshake cannot be done with '{self.server_id}'. " +
+ f"Another message arrived before handshake complete - {response_message.type}",
+ category=RuntimeWarning)
+ self._response_cache[response_message.id] = response_message
+ else:
+ self.logger.info('got no response for handshake')
+ self._handshake_event.set()
+
+ async def handshake_complete(self):
+ """
+ wait for handshake to complete
+ """
+ await self._handshake_event.wait()
+
+ async def async_send_request(self,
+ thing_id: str,
+ objekt: str,
+ operation: str,
+ payload: SerializableData = SerializableNone,
+ preserialized_payload: PreserializedData = PreserializedEmptyByte,
+ server_execution_context: ServerExecutionContext = default_server_execution_context,
+ thing_execution_context: typing.Dict[str, typing.Any] = default_thing_execution_context
+ ) -> str:
+ """
+ send message to server.
+
+ client's message to server:
+ ::
+ [address, bytes(), client type, message type, messsage id,
+ [ 0 , 1 , 2 , 3 , 4 ,
+
+ server execution context, operation, arguments, thing execution context]
+ 5 , 6 , 7 , 8 ]
+
+ Server Execution Context Definitions (typing.Dict[str, typing.Any] or JSON):
+ - "invokation_timeout" - time in seconds to wait for server to start executing the operation
+ - "execution_timeout" - time in seconds to wait for server to complete the operation
+ - "oneway" - if True, server will not send a response back
+
+ Thing Execution Context Definitions (typing.Dict[str, typing.Any] or JSON):
+ - "fetch_execution_logs" - fetches logs that were accumulated while execution
+
+ Parameters
+ ----------
+ operation: str
+ unique str identifying a server side or ``Thing`` resource. These values corresponding
+ to automatically extracted name from the object name or the URL_path prepended with the instance name.
+ arguments: Dict[str, Any]
+ if the operation invokes a method, arguments of that method.
+ server_execution_context: Dict[str, Any]
+ see execution context definitions
+ thing_execution_context: Dict[str, Any]
+ see execution context definitions
+
+ Returns
+ -------
+ message id: bytes
+ a byte representation of message id
+ """
+ request_message = RequestMessage.craft_from_arguments(
+ receiver_id=self.server_id,
+ sender_id=self.id,
+ thing_id=thing_id,
+ objekt=objekt,
+ operation=operation,
+ payload=payload,
+ preserialized_payload=preserialized_payload,
+ server_execution_context=server_execution_context,
+ thing_execution_context=thing_execution_context
+ )
+ await self.socket.send_multipart(request_message.byte_array)
+ self.logger.debug(f"sent operation '{operation}' to server '{self.id}' with msg-id '{request_message.id}'")
+ return request_message.id
+
+ async def async_recv_response(self, message_id: str) -> typing.List[ResponseMessage]:
+ """
+ Receives response from server. Messages are identified by message id, so call this method immediately after
+ calling ``send_request()`` to avoid receiving messages out of order. Or, use other methods like
+ ``execute()``.
+
+ Parameters
+ ----------
+ message_id: bytes
+ message id of the message sent to server
+ timeout: int
+ time in milliseconds to wait for response
+ raise_client_side_exception: bool, default False
+ if True, any exceptions raised during execution inside ``Thing`` instance will be raised on the client.
+ See docs of ``raise_local_exception()`` for info on exception
+ deserialize_response: bool
+ deserializes the data field of the message
+ """
+ self._stop = False
+ while not self._stop:
+ if message_id in self._response_cache:
+ return self._response_cache.pop(message_id)
+ sockets = await self.poller.poll(self._poll_timeout)
+ response_message = None
+ for socket, _ in sockets:
+ try:
+ raw_message = await socket.recv_multipart(zmq.NOBLOCK)
+ response_message = ResponseMessage(raw_message)
+ except zmq.Again:
+ pass
+ if response_message:
+ if self.handled_default_message_types(response_message):
+ continue
+ if message_id != response_message.id:
+ self._response_cache[response_message.id] = response_message
+ self.logger.debug("cached response with msg-id {}".format(response_message.id))
+ else:
+ self.logger.debug(f"received response with msg-id {response_message.id}")
+ return response_message
+
+ async def async_execute(self,
+ thing_id: str,
+ objekt: str,
+ operation: str,
+ payload: SerializableData = SerializableNone,
+ preserialized_payload: PreserializedData = PreserializedEmptyByte,
+ server_execution_context: ServerExecutionContext = default_server_execution_context,
+ thing_execution_context: ThingExecutionContext = default_thing_execution_context
+ ) -> ResponseMessage:
+ """
+ send an operation and receive the response for it.
+
+ Parameters
+ ----------
+ operation: str
+ unique str identifying a server side or ``Thing`` resource. These values corresponding
+ to automatically extracted name from the object name or the URL_path prepended with the instance name.
+ arguments: Dict[str, Any]
+ if the operation invokes a method, arguments of that method.
+ server_execution_context: Dict[str, Any]
+ see execution context definitions
+ thing_execution_context: Dict[str, Any]
+ see execution context definitions
+ raise_client_side_exception: bool
+ if True, any exceptions raised during execution inside ``Thing`` instance will be raised on the client.
+ deserialize_response: bool
+ deserializes the data field of the message
+
+ Returns
+ -------
+ message id: bytes
+ a byte representation of message id
+ """
+ message_id = await self.async_send_request(
+ thing_id=thing_id,
+ objekt=objekt,
+ operation=operation,
+ payload=payload,
+ preserialized_payload=preserialized_payload,
+ server_execution_context=server_execution_context,
+ thing_execution_context=thing_execution_context
+ )
+ return await self.async_recv_response(message_id)
+
+
+
+class MessageMappedZMQClientPool(BaseZMQClient):
+ """
+ Pool of clients where message ID can track the replies irrespective of order of arrival.
+
+ Parameters
+ ----------
+ server_ids: List[str]
+ list of instance names of servers to connect to
+ id: str
+ Unique identity of the client to receive messages from the server. Each client connecting to same server must
+ still have unique ID.
+ client_type: str
+ ZMQ or HTTP Server
+ handshake: bool
+ when true, handshake with the server first before allowing first message and block until that handshake was
+ accomplished.
+ poll_timeout: int
+ socket polling timeout in milliseconds greater than 0.
+ transport: str
+ transport implemented by ZMQ server
+ context: zmq.asyncio.Context
+ ZMQ context
+ deserialize_server_messages: bool
+ deserializes the data field of the message
+ **kwargs:
+ zmq_serializer: BaseSerializer
+ custom implementation of ZMQ serializer if necessary
+ http_serializer: JSONSerializer
+ custom implementation of JSON serializer if necessary
+ """
+
+ def __init__(self,
+ id: str,
+ client_ids: typing.List[str],
+ server_ids: typing.List[str],
+ handshake: bool = True,
+ poll_timeout: int = 25,
+ transport: str = 'IPC',
+ context: zmq.asyncio.Context = None,
+ **kwargs
+ ) -> None:
+ super().__init__(id=id, server_id=None, **kwargs)
+ if len(client_ids) != len(server_ids):
+ raise ValueError("client_ids and server_ids must have same length")
+ # this class does not call create_socket method
+ self.context = context or zmq.asyncio.Context()
+ self.pool = dict() # type: typing.Dict[str, AsyncZMQClient]
+ self.poller = zmq.asyncio.Poller()
+ for client_id, server_id in zip(client_ids, server_ids):
+ client = AsyncZMQClient(
+ id=client_id,
+ server_id=server_id,
+ handshake=handshake,
+ transport=transport,
+ context=self.context,
+ logger=self.logger
+ )
+ self.register(client)
+ # Both the client pool as well as the individual client get their serializers and client_types
+ # This is required to implement pool level sending and receiving messages like polling of pool of sockets
+ self.event_pool = AsyncioEventPool(len(server_ids))
+ self.events_map = dict() # type: typing.Dict[bytes, asyncio.Event]
+ self.message_map = dict()
+ self.cancelled_messages = []
+ self.poll_timeout = poll_timeout
+ self.stop_poll = False
+ self._thing_to_client_map = dict() # type: typing.Dict[str, AsyncZMQClient]
+
+
+ def create_new(self, id: str, server_id: str, transport: str = 'IPC') -> None:
+ """
+ Create new server with specified transport. other arguments are taken from pool specifications.
+
+ Parameters
+ ----------
+ id: str
+ instance name of server
+ transport: str
+ transport implemented by ZMQ server
+ """
+ if server_id not in self.pool.keys():
+ client = AsyncZMQClient(
+ id=id,
+ server_id=server_id,
+ handshake=True,
+ transport=transport,
+ context=self.context,
+ logger=self.logger
+ )
+ client._monitor_socket = client.socket.get_monitor_socket()
+ self.poller.register(client._monitor_socket, zmq.POLLIN)
+ self.pool[server_id] = client
+ else:
+ raise ValueError(f"client for instance name '{server_id}' already present in pool")
+
+
+ def register(self, client: AsyncZMQClient, thing_id: str) -> None:
+ """
+ Register a client with the pool.
+
+ Parameters
+ ----------
+ client: AsyncZMQClient
+ client to be registered
+ """
+ if not isinstance(client, AsyncZMQClient):
+ raise TypeError("registration possible for clients only subclass of AsyncZMQClient." +
+ f" Given type {type(client)}")
+ self.pool[client.id] = client
+ self.poller.register(client.socket, zmq.POLLIN)
+ self.poller.register(client._monitor_socket, zmq.POLLIN)
+ self._thing_to_client_map[thing_id] = client.id
+
+ def get_client_id_from_thing_id(self, thing_id: str) -> typing.Dict[str, AsyncZMQClient]:
+ """
+ map of thing_id to client
+ """
+ if thing_id not in self._thing_to_client_map:
+ raise ValueError(f"client for thing_id '{thing_id}' not present in pool")
+ return self._thing_to_client_map.get(thing_id, None)
+
+ @property
+ def poll_timeout(self) -> int:
+ """
+ socket polling timeout in milliseconds greater than 0.
+ """
+ return self._poll_timeout
+
+ @poll_timeout.setter
+ def poll_timeout(self, value) -> None:
+ if not isinstance(value, int) or value < 0:
+ raise ValueError("polling period must be an integer greater than 0, not {}. Value is considered in milliseconds".format(value))
+ self._poll_timeout = value
+
+
+ async def handshake_complete(self) -> None:
+ """
+ wait for handshake to complete for all clients in the pool
+ """
+ for client in self.pool.values():
+ await client.handshake_complete() # sufficient to wait serially
+
+
+ def handshake(self, timeout: int | None = 60000) -> None:
+ """
+ automatically called when handshake argument at init is True. When not automatically called, it is necessary
+ to call this method before awaiting ``handshake_complete()``.
+ """
+ for client in self.pool.values():
+ client.handshake(timeout)
+
+
+ async def poll_responses(self) -> None:
+ """
+ Poll for replies from server. Since the client is message mapped, this method should be independently started
+ in the event loop. Sending message and retrieving a message mapped is still carried out by other methods.
+ """
+ self.logger.info("client polling started for sockets for {}".format(list(self.pool.keys())))
+ self.stop_poll = False
+ event_loop = asyncio.get_event_loop()
+ while not self.stop_poll:
+ sockets = await self.poller.poll(self.poll_timeout) # type hints dont work in this line
+ for socket, _ in sockets:
+ while True:
+ try:
+ raw_response = await socket.recv_multipart(zmq.NOBLOCK)
+ except zmq.Again:
+ # errors in handle_message should reach the client.
+ break
+ except ConnectionAbortedError:
+ for client in self.pool.values():
+ if client.socket.get_monitor_socket() == socket:
+ self.poller.unregister(client.socket) # leave the monitor in the pool
+ client.handshake(timeout=None)
+ self.logger.error(f"{client.id} disconnected." +
+ " Unregistering from poller temporarily until server comes back.")
+ break
+ else:
+ response_message = ResponseMessage(raw_response)
+ if self.handled_default_message_types(response_message):
+ continue
+ message_id = response_message.id
+ self.logger.debug(f"received response from server '{response_message.sender_id}' with msg-ID '{message_id}'")
+ if message_id in self.cancelled_messages:
+ self.cancelled_messages.remove(message_id)
+ self.logger.debug(f"msg-ID '{message_id}' cancelled")
+ continue
+ self.message_map[message_id] = response_message
+ event = self.events_map.get(message_id, None)
+ if event:
+ event.set()
+ else:
+ invalid_event_task = asyncio.create_task(self._resolve_response(message_id, response_message))
+ event_loop.call_soon(lambda: invalid_event_task)
+
+
+ async def _resolve_response(self, message_id: str, data: typing.Any) -> None:
+ """
+ This method is called when there is an asyncio Event not available for a message ID. This can happen only
+ when the server replied before the client created a asyncio.Event object. check ``async_execute()`` for details.
+
+ Parameters
+ ----------
+ message_id: bytes
+ the message for which the event was not created
+ data: bytes
+ the data given by the server which needs to mapped to the message
+ """
+ max_number_of_retries = 100
+ for i in range(max_number_of_retries):
+ await asyncio.sleep(0.025)
+ try:
+ event = self.events_map[message_id]
+ except KeyError:
+ if message_id in self.cancelled_messages:
+ # Only for safety, likely should never reach here
+ self.cancelled_messages.remove(message_id)
+ self.logger.debug(f'message_id {message_id} cancelled')
+ return
+ if i >= max_number_of_retries - 1:
+ self.logger.error("unknown message id {} without corresponding event object".format(message_id))
+ return
+ else:
+ self.message_map[message_id] = data
+ event.set()
+ break
+
+ def assert_client_ready(self, client: AsyncZMQClient):
+ if not client._handshake_event.is_set():
+ raise ConnectionAbortedError(f"{client.id} is currently not alive")
+ if not client.socket in self.poller._map:
+ raise ConnectionError("handshake complete, server is alive but client socket not yet ready to be polled." +
+ "Application using MessageMappedClientPool should register the socket manually for polling." +
+ "If using hololinked.server.HTTPServer, socket is waiting until HTTP Server updates its "
+ "routing logic as the server has just now come alive, please try again soon.")
+
+ async def async_send_request(self,
+ client_id: str,
+ thing_id: str,
+ objekt: str,
+ operation: str,
+ payload: SerializableData = SerializableNone,
+ preserialized_payload: PreserializedData = PreserializedEmptyByte,
+ server_execution_context: ServerExecutionContext = default_server_execution_context,
+ thing_execution_context: ThingExecutionContext = default_thing_execution_context
+ ) -> bytes:
+ """
+ Send operation to server with instance name. Replies are automatically polled & to be retrieved using
+ ``async_recv_response()``
+
+ Parameters
+ ----------
+ id: str
+ instance name of the server
+ operation: str
+ unique str identifying a server side or ``Thing`` resource. These values corresponding
+ to automatically extracted name from the object name or the URL_path prepended with the instance name.
+ arguments: Dict[str, Any]
+ if the operation invokes a method, arguments of that method.
+ server_execution_context: Dict[str, Any]
+ see execution context definitions
+ thing_execution_context: Dict[str, Any]
+ see execution context definitions
+
+ Returns
+ -------
+ message_id: bytes
+ created message ID
+ """
+ self.assert_client_ready(self.pool[client_id])
+ message_id = await self.pool[client_id].async_send_request(
+ thing_id=thing_id,
+ objekt=objekt,
+ operation=operation,
+ payload=payload,
+ preserialized_payload=preserialized_payload,
+ server_execution_context=server_execution_context,
+ thing_execution_context=thing_execution_context
+ )
+ event = self.event_pool.pop()
+ self.events_map[message_id] = event
+ return message_id
+
+ async def async_recv_response(self,
+ client_id: str,
+ message_id: bytes,
+ timeout: float | int | None = None
+ ) -> ResponseMessage:
+ """
+ Receive response for specified message ID.
+
+ Parameters
+ ----------
+ message_id: bytes
+ the message id for which response needs to eb fetched
+ raise_client_side_exceptions: bool, default False
+ raise exceptions from server on client side
+ timeout: float,
+ client side timeout, not the same as timeout passed to server, recommended to be None in general cases.
+ Server side timeouts ensure start of execution of operations within specified timeouts and
+ drops execution altogether if timeout occured. Client side timeouts only wait for message to come within
+ the timeout, but do not gaurantee non-execution.
+
+ Returns
+ -------
+ response: dict, Any
+ dictionary when plain response is False, any value returned from execution on the server side if plain response is
+ True.
+
+ Raises
+ ------
+ ValueError:
+ if supplied message id is not valid
+ TimeoutError:
+ if timeout is not None and response did not arrive
+ """
+ try:
+ event = self.events_map[message_id]
+ except KeyError:
+ raise ValueError(f"message id {message_id} unknown.") from None
+ while True:
+ try:
+ await asyncio.wait_for(event.wait(), timeout)
+ # default 5 seconds because we want to check if server is also dead
+ if event.is_set(): # i.e. if timeout is not None, check if event is set
+ break
+ self.assert_client_ready(self.pool[client_id])
+ except TimeoutError:
+ self.cancelled_messages.append(message_id)
+ self.logger.debug(f'message_id {message_id} added to list of cancelled messages')
+ raise TimeoutError(f"Execution not completed within {timeout} seconds") from None
+ self.events_map.pop(message_id)
+ self.event_pool.completed(event)
+ response = self.message_map.pop(message_id)
+ return response
+
+ async def async_execute(self,
+ client_id: str,
+ thing_id: str,
+ objekt: str,
+ operation: str,
+ payload: SerializableData = SerializableNone,
+ preserialized_payload: PreserializedData = PreserializedEmptyByte,
+ server_execution_context: ServerExecutionContext = default_server_execution_context,
+ thing_execution_context: ThingExecutionContext = default_thing_execution_context,
+ ) -> ResponseMessage:
+ """
+ sends message and receives response.
+
+ Parameters
+ ----------
+ id: str
+ instance name of the server
+ operation: str
+ unique str identifying a server side or ``Thing`` resource. These values corresponding
+ to automatically extracted name from the object name or the URL_path prepended with the instance name.
+ arguments: Dict[str, Any]
+ if the operation invokes a method, arguments of that method.
+ context: Dict[str, Any]
+ see execution context definitions
+ raise_client_side_exceptions: bool, default False
+ raise exceptions from server on client side
+ invokation_timeout: float, default 5
+ server side timeout
+ execution_timeout: float, default None
+ client side timeout, not the same as timeout passed to server, recommended to be None in general cases.
+ Server side timeouts ensure start of execution of operations within specified timeouts and
+ drops execution altogether if timeout occured. Client side timeouts only wait for message to come within
+ the timeout, but do not gaurantee non-execution.
+ """
+ message_id = await self.async_send_request(
+ client_id=client_id,
+ thing_id=thing_id,
+ objekt=objekt,
+ operation=operation,
+ payload=payload,
+ preserialized_payload=preserialized_payload,
+ server_execution_context=server_execution_context,
+ thing_execution_context=thing_execution_context
+ )
+ return await self.async_recv_response(
+ client_id=client_id,
+ message_id=message_id,
+ )
+
+ def start_polling(self) -> None:
+ """
+ register the server message polling loop in the asyncio event loop.
+ """
+ event_loop = asyncio.get_event_loop()
+ event_loop.call_soon(lambda: asyncio.create_task(self.poll_responses()))
+
+ def stop_polling(self):
+ """
+ stop polling for replies from server
+ """
+ self.stop_poll = True
+ for client in self.pool.values():
+ client.stop()
+
+ async def async_execute_in_all(self,
+ objekt: str,
+ operation: str,
+ payload: SerializableData = SerializableNone,
+ preserialized_payload: PreserializedData = PreserializedEmptyByte,
+ ids: typing.Optional[typing.List[str]] = None,
+ server_execution_context: ServerExecutionContext = default_server_execution_context,
+ thing_execution_context: ThingExecutionContext = default_thing_execution_context,
+ ) -> typing.Dict[str, typing.Any]:
+ """
+ execute a specified operation in all Thing including eventloops
+ """
+ if not ids:
+ ids = self.pool.keys()
+ gathered_replies = await asyncio.gather(*[
+ self.async_execute(id=id, objekt=objekt, operation=operation, payload=payload,
+ preserialized_payload=preserialized_payload,
+ server_execution_context=server_execution_context,
+ thing_execution_context=thing_execution_context
+ )
+ for id in ids])
+ replies = dict()
+ for id, response in zip(ids, gathered_replies):
+ replies[id] = response
+ return replies
+
+ async def async_execute_in_all_things(self,
+ objekt: str,
+ operation: str,
+ payload: SerializableData = SerializableNone,
+ preserialized_payload: PreserializedData = PreserializedEmptyByte,
+ server_execution_context: ServerExecutionContext = default_server_execution_context,
+ thing_execution_context: ThingExecutionContext = default_thing_execution_context,
+ ) -> typing.Dict[str, typing.Any]:
+ """
+ execute the same operation in all Things, eventloops are excluded.
+ """
+ return await self.async_execute_in_all(
+ objekt=objekt, operation=operation, payload=payload,
+ preserialized_payload=preserialized_payload,
+ ids=[id for id, client in self.pool.items()],
+ server_execution_context=server_execution_context,
+ thing_execution_context=thing_execution_context
+ )
+
+ async def ping_all_servers(self):
+ """
+ ping all servers connected to the client pool, calls ping() on Thing
+ """
+ return await self.async_execute_in_all() #operation='invokeAction', objekt=CommonRPC.PING)
+
+ def __contains__(self, name: str) -> bool:
+ return name in self.pool
+
+ def __getitem__(self, key) ->AsyncZMQClient:
+ return self.pool[key]
+
+ def __iter__(self) -> typing.Iterator[AsyncZMQClient]:
+ return iter(self.pool.values())
+
+ def exit(self) -> None:
+ BaseZMQ.exit(self)
+ for client in self.pool.values():
+ self.poller.unregister(client.socket)
+ self.poller.unregister(client.socket.get_monitor_socket())
+ client.exit()
+ self.logger.info("all client socket unregistered from pool for '{}'".format(self.__class__))
+ try:
+ self.context.term()
+ self.logger.info("context terminated for '{}'".format(self.__class__))
+ except Exception as ex:
+ self.logger.warning("could not properly terminate context or attempted to terminate an already terminated context" +
+ "'{}'. Exception message: {}".format(self.identity, str(ex)))
+
+ """
+ BaseZMQ
+ BaseAsyncZMQ
+ BaseSyncZMQ
+ BaseZMQClient
+ SyncZMQClient
+ AsyncZMQClient
+ MessageMappedClientPool
+ """
+
+
+
+class AsyncioEventPool:
+ """
+ creates a pool of asyncio Events to be used as a synchronisation object for MessageMappedClientPool
+
+ Parameters
+ ----------
+ initial_number_of_events: int
+ initial pool size of events
+ """
+
+ def __init__(self, initial_number_of_events: int) -> None:
+ self.pool = [asyncio.Event() for i in range(initial_number_of_events)]
+ self.size = initial_number_of_events
+
+ def pop(self) -> asyncio.Event:
+ """
+ pop an event, new one is created if nothing left in pool
+ """
+ try:
+ event = self.pool.pop(0)
+ except IndexError:
+ self.size += 1
+ event = asyncio.Event()
+ event.clear()
+ return event
+
+ def completed(self, event: asyncio.Event) -> None:
+ """
+ put an event back into the pool
+ """
+ self.pool.append(event)
+
+
+
+class EventPublisher(BaseZMQServer, BaseSyncZMQ):
+
+ def __init__(self,
+ id: str,
+ transport: str,
+ context: zmq.Context | None = None,
+ **kwargs
+ ) -> None:
+ super().__init__(id=id, **kwargs)
+ self.create_socket(id=id, node_type='server', context=context,
+ transport=transport, socket_type=zmq.PUB, **kwargs)
+ self.logger.info(f"created event publishing socket at {self.socket_address}")
+ self.events = set() # type is typing.Set[EventDispatcher]
+ self.event_ids = set() # type: typing.Set[str]
+ self._terminate_context = context == None
+
+ def register(self, event) -> None:
+ """
+ register event with a specific (unique) name
+
+ Parameters
+ ----------
+ event: ``Event``
+ ``Event`` object that needs to be registered. Events created at ``__init__()`` of Thing are
+ automatically registered.
+ """
+ from ...core.events import EventDispatcher
+ assert isinstance(event, EventDispatcher), "event must be an instance of EventDispatcher"
+ if event._unique_identifier in self.events and event not in self.events:
+ raise AttributeError(f"event {event._unique_identifier} already found in list of events, please use another name.")
+ self.event_ids.add(event._unique_identifier)
+ self.events.add(event)
+
+ def unregister(self, event: "EventDispatcher") -> None:
+ """
+ unregister event with a specific (unique) name
+
+ Parameters
+ ----------
+ event: ``Event``
+ ``Event`` object that needs to be unregistered.
+ """
+ if event in self.events:
+ self.events.remove(event)
+ self.event_ids.remove(event._unique_identifier)
+ else:
+ warnings.warn(f"event {event._name} not found in list of events, please use another name.", UserWarning)
+
+ def publish(self, event, data: typing.Any) -> None:
+ """
+ publish an event with given unique name.
+
+ Parameters
+ ----------
+ unique_identifier: bytes
+ unique identifier of the event
+ data: Any
+ payload of the event
+ serialize: bool, default True
+ serialize the payload before pushing, set to False when supplying raw bytes
+ """
+ # uncomment for type definitions
+ # from ...core.events import EventDispatcher
+ # assert isinstance(event, EventDispatcher), "event must be an instance of EventDispatcher"
+ if event._unique_identifier in self.event_ids:
+ payload = SerializableData(data, serializer=Serializers.for_object(event._owner_inst.id, event._owner_inst.__class__.__name__, event._descriptor)) if not isinstance(data, bytes) else SerializableNone
+ preserialized_payload = PreserializedData(data) if isinstance(data, bytes) else PreserializedEmptyByte
+ event_message = EventMessage.craft_from_arguments(
+ event._unique_identifier, self.id,
+ payload=payload,
+ preserialized_payload=preserialized_payload
+ )
+ self.socket.send_multipart(event_message.byte_array)
+ self.logger.debug("published event with unique identifier {}".format(event._unique_identifier))
+ # print("published event with unique identifier {}".format(event._unique_identifier))
+ else:
+ raise AttributeError("event name {} not yet registered with socket {}".format(event._unique_identifier,
+ self.socket_address))
+
+ def exit(self):
+ if not hasattr(self, 'logger'):
+ self.logger = get_default_logger('{}|{}'.format(self.__class__.__name__, uuid4()))
+ try:
+ self.socket.close(0)
+ self.logger.info("terminated event publishing socket with address '{}'".format(self.socket_address))
+ except Exception as E:
+ self.logger.warning("could not properly terminate context or attempted to terminate an already terminated context at address '{}'. Exception message: {}".format(
+ self.socket_address, str(E)))
+ try:
+ if self._terminate_context:
+ self.context.term()
+ self.logger.info("terminated context of event publishing socket with address '{}'".format(self.socket_address))
+ except Exception as E:
+ self.logger.warning("could not properly terminate socket or attempted to terminate an already terminated socket of event publishing socket at address '{}'. Exception message: {}".format(
+ self.socket_address, str(E)))
+
+
+
+class BaseEventConsumer(BaseZMQClient):
+ """
+ Consumes events published at PUB sockets using SUB socket.
+
+ Parameters
+ ----------
+ unique_identifier: str
+ identifier of the event registered at the PUB socket
+ socket_address: str
+ socket address of the event publisher (``EventPublisher``)
+ identity: str
+ unique identity for the consumer
+ client_type: bytes
+ b'HTTP_SERVER' or b'PROXY'
+ **kwargs:
+ transport: str
+ TCP, IPC or INPROC
+ http_serializer: JSONSerializer
+ json serializer instance for HTTP_SERVER client type
+ zmq_serializer: BaseSerializer
+ serializer for ZMQ clients
+ server_id: str
+ instance name of the Thing publishing the event
+ """
+
+ def __init__(self,
+ id: str,
+ event_unique_identifier: str,
+ socket_address: str,
+ context: zmq.Context | None = None,
+ **kwargs
+ ) -> None:
+ self._terminate_context = context == None
+ if isinstance(self, BaseSyncZMQ):
+ self.context = context or zmq.Context()
+ self.poller = zmq.Poller()
+ elif isinstance(self, BaseAsyncZMQ):
+ self.context = context or zmq.asyncio.Context()
+ self.poller = zmq.asyncio.Poller()
+ else:
+ raise TypeError("BaseEventConsumer must be subclassed by either BaseSyncZMQ or BaseAsyncZMQ")
+ super().__init__(id=id, server_id=kwargs.get('server_id', None), **kwargs)
+ self.create_socket(
+ id=id,
+ node_type='client',
+ context=self.context,
+ socket_type=zmq.SUB,
+ socket_address=socket_address,
+ transport=socket_address.split('://', 1)[0].upper(),
+ **kwargs
+ )
+ self.event_unique_identifier = bytes(event_unique_identifier, encoding='utf-8')
+ self.interruptor = self.context.socket(zmq.PAIR)
+ self.interruptor.setsockopt_string(zmq.IDENTITY, f'interrupting-server')
+ self.interruptor.bind(f'inproc://{self.id}/interruption')
+ self.interrupting_peer = self.context.socket(zmq.PAIR)
+ self.interrupting_peer.setsockopt_string(zmq.IDENTITY, f'interrupting-client')
+ self.interrupting_peer.connect(f'inproc://{self.id}/interruption')
+
+
+ def subscribe(self) -> None:
+ self.socket.setsockopt(zmq.SUBSCRIBE, self.event_unique_identifier)
+ # pair sockets cannot be polled unforunately, so we use router
+ # if self.socket in self.poller._map:
+ # self.poller.unregister(self.socket)
+ # if self.interruptor in self.poller._map:
+ # self.poller.unregister(self.interruptor)
+ self.poller.register(self.socket, zmq.POLLIN)
+ self.poller.register(self.interruptor, zmq.POLLIN)
+
+
+ def craft_interrupt_message(self) -> EventMessage:
+ return EventMessage.craft_from_arguments(
+ event_id=f'{self.id}/interrupting-server',
+ sender_id=self.id,
+ payload=SerializableData("INTERRUPT")
+ )
+
+
+ def exit(self):
+ if not hasattr(self, 'logger'):
+ self.logger = get_default_logger('{}|{}'.format(self.__class__.__name__, uuid4()))
+ try:
+ self.poller.unregister(self.socket)
+ self.poller.unregister(self.interruptor)
+ except Exception as E:
+ self.logger.warning("could not properly terminate socket or attempted to terminate an already terminated socket of event consuming socket at address '{}'. Exception message: {}".format(
+ self.socket_address, str(E)))
+ try:
+ self.socket.close(0)
+ self.interruptor.close(0)
+ self.interrupting_peer.close(0)
+ self.logger.info("terminated event consuming socket with address '{}'".format(self.socket_address))
+ except:
+ self.logger.warning("could not terminate sockets")
+
+ try:
+ if self._terminate_context:
+ self.context.term()
+ self.logger.info("terminated context of event consuming socket with address '{}'".format(self.socket_address))
+ except Exception as E:
+ self.logger.warning("could not properly terminate context or attempted to terminate an already terminated context at address '{}'. Exception message: {}".format(
+ self.socket_address, str(E)))
+
+
+class EventConsumer(BaseEventConsumer, BaseSyncZMQ):
+ """
+ Listens to events published at PUB sockets using SUB socket, listen in blocking fashion or use in threads.
+
+ Parameters
+ ----------
+ unique_identifier: str
+ identifier of the event registered at the PUB socket
+ socket_address: str
+ socket address of the event publisher (``EventPublisher``)
+ identity: str
+ unique identity for the consumer
+ **kwargs:
+ transport: str
+ TCP, IPC or INPROC
+ server_id: str
+ instance name of the Thing publishing the event
+ """
+ def receive(self, timeout: typing.Optional[float] = None) -> EventMessage:
+ """
+ receive event with given timeout
+
+ Parameters
+ ----------
+ timeout: float, int, None
+ timeout in milliseconds, None for blocking
+ deserialize: bool, default True
+ deseriliaze the data, use False for HTTP server sent event to simply bypass
+ """
+ while True:
+ sockets = self.poller.poll(timeout) # typing.List[typing.Tuple[zmq.Socket, int]]
+ if len(sockets) > 1:
+ # if there is an interrupt message as well as an event,
+ # give preference to interrupt message.
+ if socket[0] == self.interrupting_peer:
+ sockets = [socket[0]]
+ elif sockets[1] == self.interrupting_peer:
+ sockets = [socket[1]]
+ for socket, _ in sockets:
+ try:
+ raw_message = socket.recv_multipart(zmq.NOBLOCK)
+ return EventMessage(raw_message)
+ except zmq.Again:
+ pass
+
+
+ def interrupt(self):
+ """
+ interrupts the event consumer and returns a 'INTERRUPT' string from the receive() method,
+ generally should be used for exiting this object
+ """
+ self.interrupting_peer.send_multipart(
+ self.craft_interrupt_message().byte_array
+ )
+
+
+class AsyncEventConsumer(BaseEventConsumer, BaseAsyncZMQ):
+ """
+ Listens to events published at PUB sockets using SUB socket, use in async loops.
+
+ Parameters
+ ----------
+ unique_identifier: str
+ identifier of the event registered at the PUB socket
+ socket_address: str
+ socket address of the event publisher (``EventPublisher``)
+ identity: str
+ unique identity for the consumer
+ **kwargs:
+ transport: str
+ TCP, IPC or INPROC
+ server_id: str
+ instance name of the Thing publishing the event
+ """
+ async def receive(self, timeout: typing.Optional[float] = None, raise_interrupt_as_exception: bool = False) -> EventMessage:
+ """
+ receive event with given timeout
+
+ Parameters
+ ----------
+ timeout: float, int, None
+ timeout in milliseconds, None for blocking
+ deserialize: bool, default True
+ deseriliaze the data, use False for HTTP server sent event to simply bypass
+ """
+ while True:
+ sockets = await self.poller.poll(timeout)
+ if len(sockets) > 1:
+ # if there is an interrupt message as well as an event,
+ # give preference to interrupt message.
+ if socket[0] == self.interrupting_peer:
+ sockets = [socket[0]]
+ elif sockets[1] == self.interrupting_peer:
+ sockets = [socket[1]]
+ for socket, _ in sockets:
+ try:
+ raw_message = await socket.recv_multipart(zmq.NOBLOCK)
+ return EventMessage(raw_message)
+ except zmq.Again:
+ pass
+
+
+ async def interrupt(self):
+ """
+ interrupts the event consumer and returns a 'INTERRUPT' string from the receive() method,
+ generally should be used for exiting this object
+ """
+ await self.interrupting_peer.send_multipart(
+ self.craft_interrupt_message().byte_array
+ )
+
+
+
+# from ...core.events import EventDispatcher
+
+__all__ = [
+ AsyncZMQServer.__name__,
+ ZMQServerPool.__name__,
+ SyncZMQClient.__name__,
+ AsyncZMQClient.__name__,
+ MessageMappedZMQClientPool.__name__,
+ AsyncEventConsumer.__name__,
+ EventConsumer.__name__
+]
\ No newline at end of file
diff --git a/hololinked/core/zmq/message.py b/hololinked/core/zmq/message.py
new file mode 100644
index 00000000..441ea122
--- /dev/null
+++ b/hololinked/core/zmq/message.py
@@ -0,0 +1,666 @@
+import typing
+import msgspec
+from uuid import uuid4
+from zmq.utils.monitor import parse_monitor_message
+
+from ...constants import JSON, ZMQ_EVENT_MAP, byte_types
+from ...serializers.serializers import Serializers
+from ...serializers.payloads import SerializableData, PreserializedData
+from ...param.parameters import Integer
+
+# message types
+# both directions
+HANDSHAKE = 'HANDSHAKE' # 1 - find out if the server is alive/connect to it
+# client to server
+OPERATION = 'OPERATION' # 2 - i.e. message type is a request to perform an operation on the interaction affordance
+EXIT = 'EXIT' # 3 - exit the server
+# server to client
+REPLY = 'REPLY' # 4 - response for operation
+TIMEOUT = 'TIMEOUT' # 5 - timeout message, operation could not be completed
+ERROR = 'EXCEPTION' # 6 - exception occurred while executing operation
+INVALID_MESSAGE = 'INVALID_MESSAGE' # 7 - invalid message
+SERVER_DISCONNECTED = 'EVENT_DISCONNECTED' # 8 - socket died - zmq's builtin event EVENT_DISCONNECTED
+# peer to peer
+INTERRUPT = 'INTERRUPT' # 9 - interrupt a socket while polling
+
+# not used now
+EVENT = 'EVENT'
+EVENT_SUBSCRIPTION = 'EVENT_SUBSCRIPTION'
+SUCCESS = 'SUCCESS'
+
+
+EMPTY_BYTE = b''
+"""
+Message indices
+
+| Index | 0 | 1 | 2 | 3 |
+|-------|---------|--------|---------|-----------------------|
+| Desc | address | header | payload | preserialized payload |
+
+"""
+# CM = Client Message
+INDEX_ADDRESS = 0
+INDEX_HEADER= 1
+INDEX_BODY = 2
+INDEX_PRESERIALIZED_BODY = 3
+
+
+class ServerExecutionContext(msgspec.Struct):
+ invokationTimeout: float
+ executionTimeout: float
+ oneway: bool
+
+class ThingExecutionContext(msgspec.Struct):
+ fetchExecutionLogs: bool
+
+default_server_execution_context = ServerExecutionContext(
+ invokationTimeout=5,
+ executionTimeout=5,
+ oneway=False
+)
+
+default_thing_execution_context = ThingExecutionContext(
+ fetchExecutionLogs=False
+)
+
+SerializableNone = SerializableData(None, content_type='application/json')
+PreserializedEmptyByte = PreserializedData(EMPTY_BYTE, content_type='text/plain')
+
+
+
+class RequestHeader(msgspec.Struct):
+ """
+ Header of a request message
+ For detailed schema, visit [here](https://hololinked.readthedocs.io/en/latest/protocols/zmq/request-message-header.json).
+ """
+ messageType: str
+ messageID: str
+ senderID: str
+ receiverID: str
+ serverExecutionContext: ServerExecutionContext = msgspec.field(default_factory=lambda: default_server_execution_context)
+ thingExecutionContext: ThingExecutionContext = msgspec.field(default_factory=lambda: default_thing_execution_context)
+ thingID: typing.Optional[str] = ''
+ objekt: typing.Optional[str] = ''
+ operation: typing.Optional[str] = ''
+ payloadContentType: typing.Optional[str] = 'application/json'
+ preencodedPayloadContentType: typing.Optional[str] = 'text/plain'
+
+ def __getitem__(self, key: str) -> typing.Any:
+ try:
+ return getattr(self, key)
+ except AttributeError:
+ raise KeyError(f"key {key} not found in {self.__class__.__name__}") from None
+
+ def json(self):
+ return {f: getattr(self, f) for f in self.__struct_fields__}
+
+
+class ResponseHeader(msgspec.Struct):
+ """
+ Header of a response message
+ For detailed schema, visit [here](https://hololinked.readthedocs.io/en/latest/protocols/zmq/response-message-header.json).
+ """
+ messageType: str
+ messageID: str
+ receiverID: str
+ senderID: str
+ payloadContentType: typing.Optional[str] = 'application/json'
+ preencodedPayloadContentType: typing.Optional[str] = ''
+
+ def __getitem__(self, key: str) -> typing.Any:
+ try:
+ return getattr(self, key)
+ except AttributeError:
+ raise KeyError(f"key {key} not found in {self.__class__.__name__}") from None
+
+ def json(self):
+ return {f: getattr(self, f) for f in self.__struct_fields__}
+
+
+class EventHeader(msgspec.Struct):
+ """
+ Header of an event message
+ For detailed schema, visit [here](https://hololinked.readthedocs.io/en/latest/protocols/zmq/event-message-header.json).
+ """
+ messageType: str
+ messageID: str
+ senderID: str
+ eventID: str
+ payloadContentType: typing.Optional[str] = 'application/json'
+ preencodedPayloadContentType: typing.Optional[str] = ''
+
+ def __getitem__(self, key: str) -> typing.Any:
+ try:
+ return getattr(self, key)
+ except AttributeError:
+ raise KeyError(f"key {key} not found in {self.__class__.__name__}") from None
+
+ def json(self):
+ return {f: getattr(self, f) for f in self.__struct_fields__}
+
+
+class RequestMessage:
+ """
+ A single unit of message from a ZMQ client to server. The message may be parsed and deserialized into header and body.
+
+ Message indices:
+
+ | Index | 0 | 1 | 2 | 3 |
+ |-------|---------|--------|---------|-----------------------|
+ | Desc | address | header | payload | preserialized payload |
+
+ The header is a JSON with the following (shortened) schema:
+
+ ```json
+
+ {
+ "messageType": "string",
+ "messageID": "string",
+ "senderID": "string",
+ "serverExecutionContext": {
+ "invokationTimeout": "number",
+ "executionTimeout": "number",
+ "oneway": "boolean"
+ },
+ "thingID": "string",
+ "objekt": "string",
+ "operation": "string",
+ "payloadContentType": "string",
+ "preencodedPayloadContentType": "string",
+ "thingExecutionContext": {
+ "fetchExecutionLogs": "boolean"
+ }
+ }
+ ```
+
+ For detailed schema, visit [here](https://hololinked.readthedocs.io/en/latest/protocols/zmq/message.json).
+ """
+ length = Integer(default=4, readonly=True,
+ doc="length of the message") # type: int
+
+ def __init__(self, msg : typing.List[bytes]) -> None:
+ self._bytes = msg
+ self._header = None # deserialized header
+ self._body = None # type: typing.Optional[typing.Tuple[SerializableData, PreserializedData]]
+ self._sender_id = None
+
+ @property
+ def byte_array(self) -> typing.List[bytes]:
+ """returns the message in bytes"""
+ return self._bytes
+
+ @property
+ def header(self) -> typing.Tuple[bytes, bytes, bytes, bytes, bytes, typing.Dict[str, typing.Any]]:
+ """
+ returns the header of the message, namely index 1 from the following:
+
+ | Index | 0 | 1 | 2 | 3 |
+ |-------|---------|---------|---------|------------------------|
+ | Desc | address | header | payload | preserialized payload |
+
+ deserizalized to a dictionary.
+ """
+ if self._header is None:
+ self.parse_header()
+ return self._header
+
+ @property
+ def body(self) -> typing.Tuple[bytes, bytes, bytes, typing.Any, typing.Dict[str, typing.Any]]:
+ """
+ payload of the message
+ """
+ if self._body is None:
+ self.parse_body()
+ return self._body
+
+ @property
+ def id(self) -> str:
+ """ID of the message"""
+ return self.header['messageID']
+
+ @property
+ def receiver_id(self) -> str:
+ """ID of the sender"""
+ return self.header['receiverID']
+
+ @property
+ def sender_id(self) -> str:
+ """ID of the receiver"""
+ return self.header['senderID']
+
+ @property
+ def thing_id(self) -> str:
+ """ID of the thing on which the operation is to be performed"""
+ return self.header['thingID']
+
+ @property
+ def type(self) -> str:
+ """type of the message"""
+ return self.header['messageType']
+
+ @property
+ def server_execution_context(self) -> typing.Dict[str, typing.Any]:
+ """server execution context"""
+ return self.header['serverExecutionContext']
+
+ @property
+ def thing_execution_context(self) -> typing.Dict[str, typing.Any]:
+ """thing execution context"""
+ return self.header['thingExecutionContext']
+
+ @property
+ def qualified_operation(self) -> str:
+ """qualified objekt"""
+ return f"{self.header['thingID']}.{self.header['objekt']}.{self.header['operation']}"
+
+ def parse_header(self) -> None:
+ """
+ extract the header and deserialize the server execution context
+ """
+ if isinstance(self._bytes[INDEX_HEADER], RequestHeader):
+ self._header = self._bytes[INDEX_HEADER]
+ elif isinstance(self._bytes[INDEX_HEADER], byte_types):
+ self._header = RequestHeader(**Serializers.json.loads(self._bytes[INDEX_HEADER]))
+ else:
+ raise ValueError(f"header must be of type RequestHeader or bytes, not {type(self._bytes[INDEX_HEADER])}")
+
+ def parse_body(self) -> None:
+ """
+ extract the body and deserialize payload and thing execution context
+ """
+ self._body = [
+ SerializableData(self._bytes[INDEX_BODY], content_type=self.header['payloadContentType']),
+ PreserializedData(self._bytes[INDEX_PRESERIALIZED_BODY], content_type=self.header['preencodedPayloadContentType'])
+ ]
+
+
+ @classmethod
+ def craft_from_arguments(cls,
+ receiver_id: str,
+ sender_id: str,
+ thing_id: str,
+ objekt: str,
+ operation: str,
+ payload: SerializableData = SerializableNone,
+ preserialized_payload: PreserializedData = PreserializedEmptyByte,
+ server_execution_context: typing.Dict[str, typing.Any] = default_server_execution_context,
+ thing_execution_context: typing.Dict[str, typing.Any] = default_thing_execution_context
+ ) -> "RequestMessage":
+ """
+ create a request message from the given arguments
+
+ Parameters
+ ----------
+ thing_id: bytes
+ id of the thing to which the operation is to be performed
+ objekt: str
+ objekt of the thing on which the operation is to be performed, i.e. a property, action or event
+ operation: str
+ operation to be performed
+ payload: SerializableData
+ payload for the operation
+ server_execution_context: Dict[str, Any]
+ server-level execution context while performing the operation
+ thing_execution_context: Dict[str, Any]
+ thing-level execution context while performing the operation
+
+ Returns
+ -------
+ message: RequestMessage
+ the crafted message
+ """
+ message = RequestMessage([])
+ message._header = RequestHeader(
+ messageID=str(uuid4()),
+ messageType=OPERATION,
+ senderID=sender_id,
+ receiverID=receiver_id,
+ # i.e. the message type is 'OPERATION', not 'HANDSHAKE', 'REPLY', 'TIMEOUT' etc.
+ serverExecutionContext=server_execution_context,
+ thingID=thing_id,
+ objekt=objekt,
+ operation=operation,
+ payloadContentType=payload.content_type,
+ preencodedPayloadContentType=preserialized_payload.content_type,
+ thingExecutionContext=thing_execution_context
+ )
+ message._body = [payload, preserialized_payload]
+ message._bytes = [
+ bytes(receiver_id, encoding='utf-8'),
+ Serializers.json.dumps(message._header.json()),
+ payload.serialize(),
+ preserialized_payload.value
+ ]
+ return message
+
+
+ @classmethod
+ def craft_with_message_type(cls,
+ sender_id: str,
+ receiver_id: str,
+ message_type: bytes = HANDSHAKE
+ ) -> "RequestMessage":
+ """
+ create a plain message with a certain type, for example a handshake message.
+
+ Parameters
+ ----------
+ receiver_id: str
+ id of the server
+ message_type: bytes
+ message type to be sent
+
+ Returns
+ -------
+ message: RequestMessage
+ the crafted message
+ """
+
+ message = RequestMessage([])
+ message._header = RequestHeader(
+ messageID=str(uuid4()),
+ messageType=message_type,
+ senderID=sender_id,
+ receiverID=receiver_id,
+ serverExecutionContext=default_server_execution_context
+ )
+ payload = SerializableNone
+ preserialized_payload = PreserializedEmptyByte
+ message._body = [
+ payload,
+ preserialized_payload
+ ]
+ message._bytes = [
+ bytes(receiver_id, encoding='utf-8'),
+ Serializers.json.dumps(message._header.json()),
+ payload.serialize(),
+ preserialized_payload.value
+ ]
+ return message
+
+
+class ResponseMessage:
+ """
+ A single unit of message from a ZMQ server to client.
+ The message may be parsed and deserialized into header and body.
+
+ Message indices:
+
+ | Index | 0 | 2 | 3 | 4 |
+ |-------|---------|--------|------|------------------|
+ | Desc | address | header | data | pre encoded data |
+
+
+ The header is a JSON with the following (shortened) schema:
+
+ ```json
+ {
+ "messageType": "string",
+ "messageID": "string",
+ "payloadContentType": "string",
+ "preencodedPayloadContentType": "string"
+ }
+ ```
+
+ For detailed schema, visit [here](https://hololinked.readthedocs.io/en/latest/protocols/zmq/response-message-header.json).
+ """
+
+ length = Integer(default=4, readonly=True,
+ doc="length of the message") # type: int
+
+ def __init__(self, msg: typing.List[bytes]):
+ self._bytes = msg
+ self._header = None
+ self._body = None
+ self._sender_id = None
+
+ @property
+ def byte_array(self) -> typing.List[bytes]:
+ """returns the message in bytes"""
+ return self._bytes
+
+ @property
+ def id(self) -> str:
+ """ID of the message"""
+ return self.header['messageID']
+
+ @property
+ def type(self) -> str:
+ """type of the message"""
+ return self.header['messageType']
+
+ @property
+ def receiver_id(self) -> str:
+ """ID of the sender"""
+ return self.header['receiverID']
+
+ @property
+ def sender_id(self) -> str:
+ """ID of the receiver"""
+ return self.header['senderID']
+
+ @property
+ def header(self) -> JSON:
+ """Returns the header of the message"""
+ if self._header is None:
+ self.parse_header()
+ return self._header
+
+ @property
+ def body(self) -> typing.Tuple[bytes, bytes, bytes, bytes, bytes]:
+ """Returns the body of the message"""
+ if self._body is None:
+ self.parse_body()
+ return self._body
+
+ @property
+ def payload(self) -> SerializableData:
+ """Returns the payload of the message"""
+ return self.body[0]
+
+ @property
+ def preserialized_payload(self) -> PreserializedData:
+ """Returns the pre-encoded payload of the message"""
+ return self.body[1]
+
+ def parse_header(self) -> None:
+ """parse the header"""
+ if isinstance(self._bytes[INDEX_HEADER], ResponseHeader):
+ self._header = self._bytes[INDEX_HEADER]
+ elif isinstance(self._bytes[INDEX_HEADER], byte_types):
+ self._header = ResponseHeader(**Serializers.json.loads(self._bytes[INDEX_HEADER]))
+ else:
+ raise ValueError(f"header must be of type ResponseHeader or bytes, not {type(self._bytes[INDEX_HEADER])}")
+
+ def parse_body(self) -> None:
+ """parse the body"""
+ self._body = [
+ SerializableData(self._bytes[INDEX_BODY], content_type=self.header['payloadContentType']),
+ PreserializedData(self._bytes[INDEX_PRESERIALIZED_BODY], content_type=self.header['preencodedPayloadContentType'])
+ ]
+
+ @classmethod
+ def craft_from_arguments(cls,
+ receiver_id: str,
+ sender_id: str,
+ message_type: str,
+ message_id: bytes = b'',
+ payload: SerializableData = SerializableNone,
+ preserialized_payload: PreserializedData = PreserializedEmptyByte
+ ) -> "ResponseMessage":
+ """
+ Crafts an arbitrary response to the client using the method's arguments.
+
+ Parameters
+ ----------
+ address: bytes
+ the ROUTER address of the client
+ message_type: bytes
+ type of the message, possible values are 'REPLY', 'HANDSHAKE' and 'TIMEOUT'
+ message_id: bytes
+ message id of the original client message for which the response is being crafted
+ data: Any
+ serializable data
+ preserialized_payload: bytes
+ pre-encoded data, generally used for large or custom data that is already serialized
+
+ Returns
+ -------
+ message: List[bytes]
+ the crafted response with information in the correct positions within the list
+ """
+ message = ResponseMessage([])
+ message._header = ResponseHeader(
+ messageType=message_type,
+ messageID=message_id,
+ receiverID=receiver_id,
+ senderID=sender_id,
+ payloadContentType=payload.content_type,
+ preencodedPayloadContentType=preserialized_payload.content_type
+ )
+ message._body = [payload, preserialized_payload]
+ message._bytes = [
+ bytes(receiver_id, encoding='utf-8'),
+ Serializers.json.dumps(message._header.json()),
+ payload.serialize(),
+ preserialized_payload.value
+ ]
+ return message
+
+
+ @classmethod
+ def craft_reply_from_request(cls,
+ request_message: RequestMessage,
+ payload: SerializableData = SerializableNone,
+ preserialized_payload: PreserializedData = PreserializedEmptyByte
+ ) -> "ResponseMessage":
+ """
+ Craft a response with certain data extracted from an originating client message,
+ like the client's address, message id etc.
+
+ Parameters
+ ----------
+ original_client_message: List[bytes]
+ The message originated by the clieht for which the response is being crafted
+ data: Any
+ serializable data
+ preserialized_payload: bytes
+ pre-encoded data, generally used for large or custom data that is already serialized
+
+ Returns
+ -------
+ message: List[bytes]
+ the crafted response with information in the correct positions within the list
+ """
+ message = ResponseMessage([])
+ message._header = ResponseHeader(
+ messageType=REPLY,
+ messageID=request_message.id,
+ receiverID=request_message.sender_id,
+ senderID=request_message.receiver_id,
+ payloadContentType=payload.content_type,
+ preencodedPayloadContentType=preserialized_payload.content_type
+ )
+ message._body = [payload, preserialized_payload]
+ message._bytes = [
+ bytes(request_message.sender_id, encoding='utf-8'),
+ Serializers.json.dumps(message._header.json()),
+ payload.serialize(),
+ preserialized_payload.value
+ ]
+ return message
+
+
+ @classmethod
+ def craft_with_message_type(cls,
+ request_message: RequestMessage,
+ message_type: str,
+ payload: SerializableData = SerializableNone,
+ preserialized_payload: PreserializedData = PreserializedEmptyByte
+ ) -> "ResponseMessage":
+ """
+ create a plain message with a certain type, for example a handshake message.
+
+ Parameters
+ ----------
+ receiver_id: str
+ id of the server
+ message_type: bytes
+ message type to be sent
+ """
+ message = ResponseMessage([])
+ message._header = ResponseHeader(
+ messageType=message_type,
+ messageID=request_message.id,
+ receiverID=request_message.sender_id,
+ senderID=request_message.receiver_id,
+ payloadContentType=payload.content_type,
+ preencodedPayloadContentType=preserialized_payload.content_type
+ )
+ message._body = [payload, preserialized_payload]
+ message._bytes = [
+ bytes(request_message.sender_id, encoding='utf-8'),
+ Serializers.json.dumps(message._header.json()),
+ payload.serialize(),
+ preserialized_payload.value
+ ]
+ return message
+
+
+class EventMessage(ResponseMessage):
+
+ @classmethod
+ def craft_from_arguments(cls,
+ event_id: str,
+ sender_id: str,
+ message_type: str = EVENT,
+ payload: SerializableData = SerializableNone,
+ preserialized_payload: PreserializedData = PreserializedEmptyByte
+ ) -> "EventMessage":
+ """
+ create a plain message with a certain type, for example a handshake message.
+
+ Parameters
+ ----------
+ event_id: str
+ id of the server
+ message_type: bytes
+ message type to be sent
+
+ Returns
+ -------
+ message: RequestMessage
+ the crafted message
+ """
+ message = EventMessage([])
+ message._header = EventHeader(
+ messageType=message_type,
+ messageID=str(uuid4()),
+ eventID=event_id,
+ senderID=sender_id,
+ payloadContentType=payload.content_type,
+ preencodedPayloadContentType=preserialized_payload.content_type
+ )
+ message._body = [payload, preserialized_payload]
+ message._bytes = [
+ bytes(event_id, encoding='utf-8'),
+ Serializers.json.dumps(message._header.json()),
+ payload.serialize(),
+ preserialized_payload.value
+ ]
+ return message
+
+ @property
+ def event_id(self) -> str:
+ """unique ID of the event by which ZMQ pub-sub works"""
+ return self.header['eventID']
+
+ def parse_header(self) -> None:
+ """parse the header"""
+ if isinstance(self._bytes[INDEX_HEADER], EventHeader):
+ self._header = self._bytes[INDEX_HEADER]
+ elif isinstance(self._bytes[INDEX_HEADER], byte_types):
+ self._header = EventHeader(**Serializers.json.loads(self._bytes[INDEX_HEADER]))
+ else:
+ raise ValueError(f"header must be of type ResponseHeader or bytes, not {type(self._bytes[INDEX_HEADER])}")
+
diff --git a/hololinked/core/zmq/request_message_header_schema.json b/hololinked/core/zmq/request_message_header_schema.json
new file mode 100644
index 00000000..b6823799
--- /dev/null
+++ b/hololinked/core/zmq/request_message_header_schema.json
@@ -0,0 +1,81 @@
+{
+ "type": "object",
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "description": "header of the message",
+ "properties": {
+ "senderID" : {
+ "type": "string",
+ "description": "id of the sender of the message"
+ },
+ "receiverID" : {
+ "type": "string",
+ "description": "id of the receiver of the message"
+ },
+ "messageType" : {
+ "type": "string",
+ "description": "type of the message being sent",
+ "enum": [
+ "HANDSHAKE", "OPERATION", "EXIT", "INTERRUPT"
+ ]
+ },
+ "messageID" : {
+ "type": "string",
+ "format": "uuid",
+ "description": "unique id of the message to correlate with the response"
+ },
+ "serverExecutionContext" : {
+ "type": "object",
+ "description": "activities to be taken care by the server while performing the operation",
+ "properties": {
+ "oneway": {
+ "type": "boolean"
+ },
+ "invokationTimeout": {
+ "type": "number",
+ "minimum": 0
+ },
+ "executionTimeout": {
+ "type": "number",
+ "minimum": 0
+ }
+ }
+ },
+ "thingID" : {
+ "type": "string",
+ "description": "id of the thing on which the operation is to be performed"
+ },
+ "object" : {
+ "type": "string",
+ "description" : "a property, action or event - object of the thing on which the operation is to be performed"
+ },
+ "operation" : {
+ "type": "string",
+ "description" : "name of the peration to be performed",
+ "enum" : [
+ "readProperty", "writeProperty", "invokeAction", "subscribeEvent",
+ "unsubscribeEvent", "readMultipleProperties", "writeMultipleProperties"
+ ]
+ },
+ "payloadContentType" : {
+ "type": "string",
+ "description" : "content type of the payload",
+ "enum" : [
+ "application/json", "pickle", "x-msgpack", "text", "text/plain"
+ ]
+ },
+ "preencodedPayloadContentType" : {
+ "type": "string",
+ "description" : "content type of the pre-encoded payload"
+ },
+ "thingExecutionContext" : {
+ "type": "object",
+ "description" : "thing execution context while performing the operation",
+ "properties" : {
+ "fetchExecutionLogs" : {
+ "type" : "boolean"
+ }
+ }
+ }
+ },
+ "required": ["messageType", "messageID", "serverExecutionContext"]
+}
\ No newline at end of file
diff --git a/hololinked/core/zmq/response_message_header_schema.json b/hololinked/core/zmq/response_message_header_schema.json
new file mode 100644
index 00000000..bdf21746
--- /dev/null
+++ b/hololinked/core/zmq/response_message_header_schema.json
@@ -0,0 +1,29 @@
+{
+ "type": "object",
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "properties" : {
+ "messageID" : {
+ "type": "string",
+ "format": "uuid",
+ "description": "unique id of the message to correlate with the request"
+ },
+ "messageType" : {
+ "type": "string",
+ "description": "type of the message",
+ "enum": ["REPLY", "ERROR", "TIMEOUT",
+ "INVALID_MESSAGE", "EVENT_DISCONNECTED"]
+ },
+ "payloadContentType" : {
+ "type": "string",
+ "description" : "content type of the payload",
+ "enum" : [
+ "application/json", "pickle", "x-msgpack", "text", "text/plain"
+ ]
+ },
+ "preencodedPayloadContentType" : {
+ "type": "string",
+ "description" : "content type of the pre-encoded payload"
+ }
+ },
+ "required": ["messageType", "messageID"]
+}
\ No newline at end of file
diff --git a/hololinked/core/zmq/rpc_server.py b/hololinked/core/zmq/rpc_server.py
new file mode 100644
index 00000000..b49846c4
--- /dev/null
+++ b/hololinked/core/zmq/rpc_server.py
@@ -0,0 +1,840 @@
+import zmq
+import zmq.asyncio
+import sys
+import warnings
+import asyncio
+import typing
+import threading
+import logging
+import tracemalloc
+from collections import deque
+
+
+from ...exceptions import *
+from ...constants import ZMQ_TRANSPORTS
+from ...utils import format_exception_as_json, get_all_sub_things_recusively, get_current_async_loop, get_default_logger
+from ...config import global_config
+from ...serializers import Serializers
+from .message import EMPTY_BYTE, ERROR, REPLY, PreserializedData, RequestMessage, SerializableData
+from .brokers import AsyncZMQServer, BaseZMQServer, EventPublisher
+from ..thing import Thing
+from ..property import Property
+from ..properties import TypedDict
+from ..actions import BoundAction, action as remote_method
+from ..logger import ListHandler
+
+
+
+if global_config.TRACE_MALLOC:
+ tracemalloc.start()
+
+def set_event_loop_policy():
+ if sys.platform.lower().startswith('win'):
+ asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
+
+ if global_config.USE_UVLOOP:
+ if sys.platform.lower() in ['linux', 'darwin', 'linux2']:
+ import uvloop
+ asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
+ else:
+ warnings.warn("uvloop not supported for windows, using default windows selector loop.", RuntimeWarning)
+
+set_event_loop_policy()
+
+
+Undefined = NotImplemented
+
+RemoteObject = Thing # reading convenience
+
+class RPCServer(BaseZMQServer):
+ """
+ The `RPCServer` implements a infinite loop where ZMQ sockets listen for messages, in any transport layer possible
+ (`INPROC`, `IPC` or `TCP`). Once requests are received, jobs are dispatched to the `Thing` instances which are being served,
+ with timeouts or any other execution requirements (called execution context). Within the jobs, the requested
+ operation information is made available which is extracted and executed by a `Thing` instance.
+ The results are then sent back to the client. Operations information include `Thing` ID, the property, action or
+ event to be executed (events are usually PUB-SUB and are largely handled by the `EventPublisher` directly),
+ what to do on them (`readProperty`, `invokeAction` etc.), the payload and the execution contexts (like timeouts).
+ This is structured as a JSON.
+
+ Jobs determine how to execute the operations on the `Thing` instance, whether in queued, async or threaded modes.
+ This is their main function. Queued mode is the default as it is assumed that multiple physical operations in the physical world is not
+ always practical. Jobs also help the `Thing` instance to retrieve operation information from a request object.
+
+ Default ZMQ transport layer is `INPROC`, but `IPC` or `TCP` can also be added simultaneously. The purpose of `INPROC`
+ being default is that, the `RPCServer` is the only server implementing the operations directly on the `Thing`
+ instances. All other protocols like HTTP, MQTT, CoAP etc. will be used to send requests to the `RPCServer` only
+ and do not directly operate on the `Thing` instances. Instead, the incoming requests in other protocols are converted
+ to the above stated "Operation Information" which are in JSON. `INPROC` is the fastest and most efficient way to communicate between
+ multiple independently running loops, whether the loop belongs to a specific protocol's request listener or
+ the `RPCServer` itself. The same `INPROC` messaging contract is also used for `IPC` and `TCP`, thus eliminating the
+ need to separately implement messaging contracts at different layers of communication.
+
+ Therefore, if a `Thing` instance is to be served by a well known protocol, say HTTP, the server behaves like HTTP-RPC.
+
+ [UML Diagram](http://localhost:8000/UML/PDF/RPCServer.pdf)
+ """
+
+ things = TypedDict(key_type=(str,), item_type=(Thing,), bounds=(0, 100), allow_None=True, default=None,
+ doc="list of Things which are being executed", remote=False) # type: typing.Dict[str, Thing]
+
+
+ def __init__(self, *,
+ id: str,
+ things: typing.List[Thing],
+ context: zmq.asyncio.Context | None = None,
+ transport: ZMQ_TRANSPORTS = ZMQ_TRANSPORTS.INPROC,
+ **kwargs: typing.Dict[str, typing.Any]
+ ) -> None:
+ """
+ Parameters
+ ----------
+ id: str
+ `id` of the server
+ things: List[Thing]
+ list of `Thing` instances to be served
+ context: Optional, zmq.asyncio.Context
+ ZeroMQ async Context object to use. All sockets except those created by event publisher share this context.
+ Automatically created when None is supplied.
+ transport: ZMQ_TRANSPORTS
+ transport layer to be used for the server, default is `INPROC`
+ **kwargs:
+ tcp_socket_address: str
+ address of the `TCP` socket, if not given, a random port is chosen
+ """
+ super().__init__(id=id, **kwargs)
+ self.things = dict()
+ for thing in things:
+ self.things[thing.id] = thing
+
+ if self.logger is None:
+ self.logger = get_default_logger('{}|{}|{}|{}'.format(self.__class__.__name__,
+ 'RPC', 'MIXED', self.id), kwargs.get('log_level', logging.INFO))
+ kwargs['logger'] = self.logger
+ # contexts and poller
+ self._run = False # flag to stop all the
+ self._terminate_context = context is None
+ self.context = context or zmq.asyncio.Context()
+
+ self.req_rep_server = AsyncZMQServer(
+ id=self.id,
+ context=self.context,
+ transport=transport,
+ poll_timeout=1000,
+ **kwargs
+ )
+ self.event_publisher = EventPublisher(
+ id=f'{self.id}/event-publisher',
+ context=self.context,
+ transport=transport,
+ **kwargs
+ )
+ self.schedulers = dict()
+
+ # setup scheduling requirements
+ for instance in self.things.values():
+ all_things = get_all_sub_things_recusively(instance)
+ for instance in all_things:
+ assert isinstance(instance, Thing), "instance must be of type Thing"
+ instance.rpc_server = self
+ for action in instance.actions.descriptors.values():
+ if action.execution_info.iscoroutine and not action.execution_info.synchronous:
+ self.schedulers[f'{instance.id}.{action.name}.invokeAction'] = AsyncScheduler
+ elif not action.execution_info.synchronous:
+ self.schedulers[f'{instance.id}.{action.name}.invokeAction'] = ThreadedScheduler
+ # else QueuedScheduler which is default
+ # properties need not dealt yet, but may be in future
+
+ schedulers: typing.Dict[str, "QueuedScheduler"]
+
+ def __post_init__(self):
+ super().__post_init__()
+ self.logger.info("Server with name '{}' can be started using run().".format(self.id))
+
+
+ @property
+ def is_running(self) -> bool:
+ """
+ Check if the server is running or not.
+ """
+ return self._run
+
+
+ async def recv_requests_and_dispatch_jobs(self, server: AsyncZMQServer) -> None:
+ """
+ Continuously receives messages from different clients and dispatches them as jobs according to the specific
+ requirements of a how an object (property/action/event) must be executed (queued/threaded/async).
+ Also handles messages that dont need separate jobs like `HANDSHAKE`, `EXIT`, timeouts etc.
+ """
+ eventloop = asyncio.get_event_loop()
+ while self._run:
+ try:
+ request_messages = await server.poll_requests()
+ # when stop poll is set, this will exit with an empty list
+ except BreakLoop:
+ break
+ except Exception as ex:
+ self.logger.error(f"exception occurred while polling for server '{server.id}' - {str(ex)}")
+ continue
+
+ for request_message in request_messages:
+ try:
+ # handle invokation timeout
+ invokation_timeout = request_message.server_execution_context.get("invokation_timeout", None)
+
+ ready_to_process_event = None
+ timeout_task = None
+ if invokation_timeout is not None:
+ ready_to_process_event = asyncio.Event()
+ timeout_task = asyncio.create_task(
+ self._process_timeouts(
+ request_message=request_message,
+ ready_to_process_event=ready_to_process_event,
+ timeout=invokation_timeout,
+ origin_server=server,
+ timeout_type='invokation'
+ )
+ )
+ eventloop.call_soon(lambda : timeout_task)
+
+ # check object level scheduling requirements and schedule the message
+ # append to messages list - message, event, timeout task, origin socket
+ job = (server, request_message, timeout_task, ready_to_process_event) # type: Scheduler.JobInvokationType
+ if request_message.qualified_operation in self.schedulers:
+ scheduler = self.schedulers[request_message.qualified_operation](self.things[request_message.thing_id], self)
+ else:
+ scheduler = self.schedulers[request_message.thing_id]
+ scheduler.dispatch_job(job)
+
+ except Exception as ex:
+ # handle invalid message
+ self.logger.error(f"exception occurred for message id '{request_message.id}' - {str(ex)}")
+ invalid_message_task = asyncio.create_task(
+ server._handle_invalid_message(
+ request_message=request_message,
+ exception=ex
+ )
+ )
+ eventloop.call_soon(lambda: invalid_message_task)
+ self.stop()
+ self.logger.info(f"stopped polling for server '{server.id}' {server.socket_address.split(':')[0].upper()}")
+
+
+ async def tunnel_message_to_things(self, scheduler: "Scheduler") -> None:
+ """
+ message tunneler between external sockets and interal inproc client
+ """
+ eventloop = get_current_async_loop()
+ while self._run and scheduler.run:
+ # wait for message first
+ if not scheduler.has_job:
+ await scheduler.wait_for_job()
+ # this means in next loop it wont be in this block as a message arrived
+ continue
+
+ # retrieve from messages list - message, execution context, event, timeout task, origin socket
+ origin_server, request_message, timeout_task, ready_to_process_event = scheduler.next_job
+ server_execution_context = request_message.server_execution_context
+
+ # handle invokation timeout
+ invokation_timed_out = True
+ if ready_to_process_event is not None:
+ ready_to_process_event.set() # releases timeout task
+ invokation_timed_out = await timeout_task
+ if ready_to_process_event is not None and invokation_timed_out:
+ # drop call to thing, timeout message was already sent in _process_timeouts()
+ continue
+
+ # handle execution through thing
+ scheduler.last_operation_request = scheduler.extract_operation_tuple_from_request(request_message)
+
+ # schedule an execution timeout
+ execution_timeout = server_execution_context.get("execution_timeout", None)
+ execution_completed_event = None
+ execution_timeout_task = None
+ execution_timed_out = True
+ if execution_timeout is not None:
+ execution_completed_event = asyncio.Event()
+ execution_timeout_task = asyncio.create_task(
+ self._process_timeouts(
+ request_message=request_message,
+ ready_to_process_event=execution_completed_event,
+ timeout=execution_timeout,
+ origin_server=origin_server,
+ timeout_type='execution'
+ )
+ )
+ eventloop.call_soon(lambda: execution_timeout_task)
+
+ # always wait for reply from thing, since this loop is asyncio task (& in its own thread in RPC server),
+ # timeouts always reach client without truly blocking by the GIL. If reply does not arrive, all other requests
+ # get invokation timeout.
+ # await eventloop.run_in_executor(None, scheduler.wait_for_reply)
+ await scheduler.wait_for_reply(eventloop)
+ # check if reply is never undefined, Undefined is a sensible placeholder for NotImplemented singleton
+ if scheduler.last_operation_reply is Undefined:
+ # this is a logic error, as the reply should never be undefined
+ await origin_server._handle_error_message(
+ request_message=request_message,
+ exception=RuntimeError("No reply from thing - logic error")
+ )
+ continue
+ payload, preserialized_payload, reply_message_type = scheduler.last_operation_reply
+ scheduler.reset_operation_reply()
+
+ # check if execution completed within time
+ if execution_completed_event is not None:
+ execution_completed_event.set() # releases timeout task
+ execution_timed_out = await execution_timeout_task
+ if execution_timeout_task is not None and execution_timed_out:
+ # drop reply to client as timeout was already sent
+ continue
+ if server_execution_context.get("oneway", False):
+ # drop reply if oneway
+ continue
+
+ # send reply to client
+ await origin_server.async_send_response_with_message_type(
+ request_message=request_message,
+ message_type=reply_message_type,
+ payload=payload,
+ preserialized_payload=preserialized_payload
+ )
+
+ scheduler.cleanup()
+ self.logger.info("stopped tunneling messages to things")
+
+
+ async def run_thing_instance(self, instance: Thing, scheduler: typing.Optional["Scheduler"] = None) -> None:
+ """
+ run a single `Thing` instance in an infinite loop by allowing the scheduler to schedule operations on it.
+
+ Parameters
+ ----------
+ instance: Thing
+ instance of the `Thing`
+ scheduler: Optional[Scheduler]
+ scheduler that schedules operations on the `Thing` instance, a default is always available.
+ """
+ self.logger.info("starting to run operations on thing {} of class {}".format(instance.id, instance.__class__.__name__))
+ if self.logger.level >= logging.ERROR:
+ # sleep added to resolve some issue with logging related IO bound tasks in asyncio - not really clear what it is.
+ # This loop crashes for log levels above ERROR without the following statement
+ await asyncio.sleep(0.001)
+ scheduler = scheduler or self.schedulers[instance.id]
+ eventloop = get_current_async_loop()
+
+ while self._run and scheduler.run:
+ # print("starting to serve thing {}".format(instance.id))
+ await scheduler.wait_for_operation(eventloop)
+ # await scheduler.wait_for_operation()
+ if scheduler.last_operation_request is Undefined:
+ instance.logger.warning("No operation request found in thing '{}'".format(instance.id))
+ continue
+
+ try:
+ # fetch operation_request which is a tuple of
+ # (thing_id, objekt, operation, payload, preserialized_payload, execution_context)
+ thing_id, objekt, operation, payload, preserialized_payload, execution_context = scheduler.last_operation_request
+
+ # deserializing the payload required to execute the operation
+ payload = payload.deserialize()
+ preserialized_payload = preserialized_payload.value
+ instance.logger.debug(f"thing {instance.id} with {thing_id} starting execution of operation {operation} on {objekt}")
+
+ # start activities related to thing execution context
+ fetch_execution_logs = execution_context.pop("fetch_execution_logs", False)
+ if fetch_execution_logs:
+ list_handler = ListHandler([])
+ list_handler.setLevel(logging.DEBUG)
+ list_handler.setFormatter(instance.logger.handlers[0].formatter)
+ instance.logger.addHandler(list_handler)
+
+ # execute the operation
+ return_value = await self.execute_operation(instance, objekt, operation, payload, preserialized_payload)
+
+ # handle return value
+ if isinstance(return_value, tuple) and len(return_value) == 2 and (
+ isinstance(return_value[1], bytes) or
+ isinstance(return_value[1], PreserializedData)
+ ):
+ if fetch_execution_logs:
+ return_value[0] = {
+ "return_value" : return_value[0],
+ "execution_logs" : list_handler.log_list
+ }
+ payload = SerializableData(return_value[0], Serializers.for_object(thing_id, instance.__class__.__name__, objekt))
+ if isinstance(return_value[1], bytes):
+ preserialized_payload = PreserializedData(return_value[1])
+ # elif isinstance(return_value, PreserializedData):
+ # if fetch_execution_logs:
+ # return_value = {
+ # "return_value" : return_value.value,
+ # "execution_logs" : list_handler.log_list
+ # }
+ # payload = SerializableData(return_value.value, content_type='application/json')
+ # preserialized_payload = return_value
+
+ elif isinstance(return_value, bytes):
+ payload = SerializableData(None, content_type='application/json')
+ preserialized_payload = PreserializedData(return_value)
+ else:
+ # complete thing execution context
+ if fetch_execution_logs:
+ return_value = {
+ "return_value" : return_value,
+ "execution_logs" : list_handler.log_list
+ }
+ payload = SerializableData(return_value, Serializers.for_object(thing_id, instance.__class__.__name__, objekt))
+ preserialized_payload = PreserializedData(EMPTY_BYTE, content_type='text/plain')
+ # set reply
+ scheduler.last_operation_reply = (payload, preserialized_payload, REPLY)
+ except BreakInnerLoop:
+ # exit the loop and stop the thing
+ instance.logger.info("Thing {} with instance name {} exiting event loop.".format(
+ instance.__class__.__name__, instance.id))
+ return_value = None
+ if fetch_execution_logs:
+ return_value = {
+ "return_value" : None,
+ "execution_logs" : list_handler.log_list
+ }
+ scheduler.last_operation_reply = (
+ SerializableData(return_value, content_type='application/json'),
+ PreserializedData(EMPTY_BYTE, content_type='text/plain'),
+ None
+ )
+ return
+ except Exception as ex:
+ # error occurred while executing the operation
+ instance.logger.error("Thing {} with ID {} produced error : {} - {}.".format(
+ instance.__class__.__name__, instance.id, type(ex), ex))
+ return_value = dict(exception=format_exception_as_json(ex))
+ if fetch_execution_logs:
+ return_value["execution_logs"] = list_handler.log_list
+ scheduler.last_operation_reply = (
+ SerializableData(return_value, content_type='application/json'),
+ PreserializedData(EMPTY_BYTE, content_type='text/plain'),
+ ERROR
+ )
+ finally:
+ # cleanup
+ if fetch_execution_logs:
+ instance.logger.removeHandler(list_handler)
+ instance.logger.debug("thing {} with instance name {} completed execution of operation {} on {}".format(
+ instance.__class__.__name__, instance.id, operation, objekt))
+ self.logger.info("stopped running thing {}".format(instance.id))
+
+
+ @classmethod
+ async def execute_operation(cls,
+ instance: Thing,
+ objekt: str,
+ operation: str,
+ payload: typing.Any,
+ preserialized_payload: bytes
+ ) -> typing.Any:
+ """
+ Execute a given operation on a thing instance.
+
+ Parameters
+ ----------
+ instance: Thing
+ instance of the thing
+ objekt: str
+ name of the property, action or event
+ operation: str
+ operation to be executed on the property, action or event
+ payload: Any
+ payload to be used for the operation
+ preserialized_payload: bytes
+ preserialized payload to be used for the operation
+ """
+ if operation == 'readProperty':
+ prop = instance.properties[objekt] # type: Property
+ return getattr(instance, prop.name)
+ elif operation == 'writeProperty':
+ prop = instance.properties[objekt] # type: Property
+ if preserialized_payload != EMPTY_BYTE:
+ prop_value = preserialized_payload
+ else:
+ prop_value = payload
+ return prop.external_set(instance, prop_value)
+ elif operation == 'deleteProperty':
+ prop = instance.properties[objekt] # type: Property
+ del prop # raises NotImplementedError when deletion is not implemented which is mostly the case
+ elif operation == 'invokeAction':
+ action = instance.actions[objekt] # type: BoundAction
+ if payload is None:
+ payload = dict()
+ args = payload.pop('__args__', tuple())
+ # payload then become kwargs
+ if preserialized_payload != EMPTY_BYTE:
+ args = (preserialized_payload,) + args
+ if action.execution_info.iscoroutine:
+ # the actual scheduling as a purely async task is done by the scheduler, not here,
+ # this will be a blocking call
+ return await action.external_call(*args, **payload)
+ return action.external_call(*args, **payload)
+ elif operation == 'readMultipleProperties' or operation == 'readAllProperties':
+ if objekt is None:
+ return instance._get_properties()
+ return instance._get_properties(names=objekt)
+ elif operation == 'writeMultipleProperties' or operation == 'writeAllProperties':
+ return instance._set_properties(payload)
+ raise NotImplementedError("Unimplemented execution path for Thing {} for operation {}".format(
+ instance.id, operation))
+
+
+ async def _process_timeouts(self,
+ request_message: RequestMessage,
+ ready_to_process_event: asyncio.Event,
+ origin_server: AsyncZMQServer,
+ timeout: float | int | None,
+ timeout_type : str
+ ) -> bool:
+ """
+ replies timeout to client if timeout occured along with returning `True` to indicate that.
+ If timeout did not occur, the `ready_to_process_event` is set to indicate that the operation can be processed.
+ `False` is returned in this case.
+ """
+ try:
+ await asyncio.wait_for(ready_to_process_event.wait(), timeout)
+ return False
+ except TimeoutError:
+ await origin_server._handle_timeout(request_message, timeout_type)
+ return True
+
+
+ def run_zmq_request_listener(self):
+ """
+ Runs ZMQ's socket polling in an async loop. This method is blocking and is automatically called by `run()`
+ method. Please dont call this method when the async loop is already running.
+ """
+ self.logger.info("starting external message listener thread")
+ self._run = True
+ eventloop = get_current_async_loop()
+ existing_tasks = asyncio.all_tasks(eventloop)
+ eventloop.run_until_complete(
+ asyncio.gather(
+ self.recv_requests_and_dispatch_jobs(self.req_rep_server),
+ *[self.tunnel_message_to_things(scheduler) for scheduler in self.schedulers.values()],
+ *existing_tasks
+ )
+ )
+ self.logger.info("exiting external listener event loop {}".format(self.id))
+ eventloop.close()
+
+
+ def run_things(self, things: typing.List[Thing]):
+ """
+ Run loop that executes operations on `Thing` instances. This method is blocking and is called by `run()` method.
+
+ Parameters
+ ----------
+ things: List[Thing]
+ list of `Thing` instances to be executed
+ """
+ thing_executor_loop = get_current_async_loop()
+ self.logger.info(f"starting thing executor loop in thread {threading.get_ident()} for {[obj.id for obj in things]}")
+ thing_executor_loop.run_until_complete(
+ asyncio.gather(*[self.run_thing_instance(instance) for instance in things])
+ )
+ self.logger.info(f"exiting event loop in thread {threading.get_ident()}")
+ thing_executor_loop.close()
+
+
+ def run(self):
+ """
+ Start the server. This method is blocking.
+ Creates job schedulers for each `Thing`, dispatches each `Thing` to its own thread and starts the ZMQ sockets
+ polling loop. Call stop() (threadsafe) to stop the server.
+ """
+ self.logger.info(f"starting RPC server {self.id}")
+ for thing in self.things.values():
+ self.schedulers[thing.id] = QueuedScheduler(thing, self)
+ threads = dict() # type: typing.Dict[int, threading.Thread]
+ for thing in self.things.values():
+ thread = threading.Thread(target=self.run_things, args=([thing],))
+ thread.start()
+ threads[thread.ident] = thread
+ self.run_zmq_request_listener()
+ for thread in threads.values():
+ thread.join()
+ self.logger.info(f"server stopped {self.id}")
+
+
+ def stop(self):
+ """Stop the server. This method is threadsafe."""
+ self._run = False
+ self.req_rep_server.stop_polling()
+ for scheduler in self.schedulers.values():
+ scheduler.cleanup()
+
+
+ def exit(self):
+ try:
+ self.stop()
+ if self.req_rep_server is not None:
+ self.req_rep_server.exit()
+ if self.event_publisher is not None:
+ self.event_publisher.exit()
+ except:
+ pass
+ if self._terminate_context:
+ self.context.term()
+ self.logger.info("terminated context of socket '{}' of type '{}'".format(self.id, self.__class__))
+
+
+ def __hash__(self):
+ return hash(str(self))
+
+ def __eq__(self, other):
+ if not isinstance(other, RPCServer):
+ return False
+ return self.id == other.id
+
+ def __str__(self):
+ return f"RPCServer({self.id})"
+
+
+
+class Scheduler:
+ """
+ Scheduler class to schedule the operations of a thing either in queued mode, or a one-shot mode in either
+ async or threaded loops.
+
+ [UML Diagram](http://localhost:8000/UML/PDF/RPCServer.pdf)
+
+ [UML Diagram subclasses](http://localhost:8000/UML/PDF/Scheduler.pdf)
+ """
+
+ OperationRequest = typing.Tuple[str, str, str, SerializableData, PreserializedData, typing.Dict[str, typing.Any]]
+ OperationReply = typing.Tuple[SerializableData, PreserializedData, str]
+ JobInvokationType = typing.Tuple[AsyncZMQServer, RequestMessage, asyncio.Task, asyncio.Event]
+ # [UML Diagram](http://localhost:8000/UML/PDF/RPCServer.pdf)
+ _operation_execution_complete_event: asyncio.Event | threading.Event
+ _operation_execution_ready_event: asyncio.Event | threading.Event
+
+ def __init__(self, instance: Thing, rpc_server: RPCServer) -> None:
+ self.instance = instance # type: Thing
+ self.rpc_server = rpc_server # type: RPCServer
+ self.run = True # type: bool
+ self._one_shot = False # type: bool
+ self._last_operation_request = Undefined # type: Scheduler.OperationRequest
+ self._last_operation_reply = Undefined # type: Scheduler.OperationRequest
+ self._job_queued_event = asyncio.Event() # type: asyncio.Event
+
+ @property
+ def last_operation_request(self) -> OperationRequest:
+ return self._last_operation_request
+
+ @last_operation_request.setter
+ def last_operation_request(self, value: OperationRequest):
+ self._last_operation_request = value
+ self._operation_execution_ready_event.set()
+
+ def reset_operation_request(self) -> None:
+ self._last_operation_request = Undefined
+
+ @property
+ def last_operation_reply(self) -> OperationReply:
+ return self._last_operation_reply
+
+ @last_operation_reply.setter
+ def last_operation_reply(self, value: OperationReply):
+ self._last_operation_request = Undefined
+ self._last_operation_reply = value
+ self._operation_execution_complete_event.set()
+ if self._one_shot:
+ self.run = False
+
+ def reset_operation_reply(self) -> None:
+ self._last_operation_reply = Undefined
+
+ async def wait_for_job(self) -> None:
+ await self._job_queued_event.wait()
+ self._job_queued_event.clear()
+
+ async def wait_for_operation(self, eventloop: asyncio.AbstractEventLoop | None) -> None:
+ # assert isinstance(self._operation_execution_ready_event, threading.Event), "not a threaded scheduler"
+ if isinstance(self._operation_execution_ready_event, threading.Event):
+ await eventloop.run_in_executor(None, self._operation_execution_ready_event.wait)
+ else:
+ await self._operation_execution_ready_event.wait()
+ self._operation_execution_ready_event.clear()
+
+ async def wait_for_reply(self, eventloop: asyncio.AbstractEventLoop | None) -> None:
+ if isinstance(self._operation_execution_complete_event, threading.Event):
+ await eventloop.run_in_executor(None, self._operation_execution_complete_event.wait)
+ else:
+ await self._operation_execution_complete_event.wait()
+ self._operation_execution_complete_event.clear()
+
+ @property
+ def has_job(self) -> bool:
+ raise NotImplementedError("has_job method must be implemented in the subclass")
+
+ @property
+ def next_job(self) -> JobInvokationType:
+ raise NotImplementedError("next_job method must be implemented in the subclass")
+
+ def dispatch_job(self, job: JobInvokationType) -> None:
+ raise NotImplementedError("dispatch_job method must be implemented in the subclass")
+
+ def cleanup(self):
+ self.run = False
+ self._job_queued_event.set()
+ self._operation_execution_ready_event.set()
+ self._operation_execution_complete_event.set()
+
+ @classmethod
+ def extract_operation_tuple_from_request(self, request_message: RequestMessage) -> OperationRequest:
+ """thing execution info"""
+ return (request_message.header['thingID'], request_message.header['objekt'], request_message.header['operation'],
+ request_message.body[0], request_message.body[1], request_message.header['thingExecutionContext'])
+
+ @classmethod
+ def format_reply_tuple(self, return_value: typing.Any) -> OperationReply:
+ pass
+
+
+
+class QueuedScheduler(Scheduler):
+ """
+ Scheduler class to schedule the operations of a thing in a queued loop.
+ """
+ def __init__(self, instance: Thing, rpc_server: RPCServer) -> None:
+ super().__init__(instance, rpc_server)
+ self.queue = deque()
+ self._one_shot = False
+ self._operation_execution_ready_event = threading.Event()
+ self._operation_execution_complete_event = threading.Event()
+
+ @property
+ def has_job(self) -> bool:
+ return len(self.queue) > 0
+
+ @property
+ def next_job(self) -> Scheduler.JobInvokationType:
+ return self.queue.popleft()
+
+ def dispatch_job(self, job: Scheduler.JobInvokationType) -> None:
+ """
+ append a request message to the queue after ticking the invokation timeout clock
+
+ Parameters
+ ----------
+ item: Tuple[RequestMessage, asyncio.Event, asyncio.Task, AsyncZMQServer]
+ tuple of request message, event to indicate if request message can be executed, invokation timeout task
+ and originating server of the request
+ """
+ self.queue.append(job)
+ self._job_queued_event.set()
+
+ def cleanup(self):
+ self.queue.clear()
+ return super().cleanup()
+
+
+class AsyncScheduler(Scheduler):
+ """
+ Scheduler class to schedule the operations of a thing in an async loop.
+ """
+ def __init__(self, instance: Thing, rpc_server: RPCServer) -> None:
+ super().__init__(instance, rpc_server)
+ self._job = None
+ self._one_shot = True
+ self._operation_execution_ready_event = asyncio.Event()
+ self._operation_execution_complete_event = asyncio.Event()
+
+ @property
+ def has_job(self) -> bool:
+ return self._job is not None
+
+ @property
+ def next_job(self) -> Scheduler.JobInvokationType:
+ if self._job is None:
+ raise RuntimeError("No job to execute")
+ return self._job
+
+ def dispatch_job(self, job: Scheduler.JobInvokationType) -> None:
+ self._job = job
+ eventloop = get_current_async_loop()
+ eventloop.call_soon(lambda: asyncio.create_task(self.rpc_server.tunnel_message_to_things(self)))
+ eventloop.call_soon(lambda: asyncio.create_task(self.rpc_server.run_thing_instance(self.instance, self)))
+ self._job_queued_event.set()
+
+
+class ThreadedScheduler(Scheduler):
+ """
+ Scheduler class to schedule the operations of a thing in a threaded loop.
+ """
+
+ def __init__(self, instance: Thing, rpc_server: RPCServer) -> None:
+ super().__init__(instance, rpc_server)
+ self._job = None
+ self._execution_thread = None
+ self._one_shot = True
+ self._operation_execution_ready_event = threading.Event()
+ self._operation_execution_complete_event = threading.Event()
+
+ @property
+ def has_job(self) -> bool:
+ return self._job is not None
+
+ @property
+ def next_job(self) -> Scheduler.JobInvokationType:
+ if self._job is None:
+ raise RuntimeError("No job to execute")
+ return self._job
+
+ def dispatch_job(self, job: Scheduler.JobInvokationType) -> None:
+ """"""
+ self._job = job
+ eventloop = get_current_async_loop()
+ eventloop.call_soon(lambda: asyncio.create_task(self.rpc_server.tunnel_message_to_things(self)))
+ self._execution_thread = threading.Thread(
+ target=asyncio.run,
+ args=(self.rpc_server.run_thing_instance(self.instance, self),)
+ )
+ self._execution_thread.start()
+ self._job_queued_event.set()
+
+
+def prepare_rpc_server(
+ instance: Thing,
+ transports: ZMQ_TRANSPORTS,
+ context: zmq.asyncio.Context | None = None,
+ **kwargs
+ ) -> None:
+ # expose_eventloop: bool, False
+ # expose the associated Eventloop which executes the object. This is generally useful for remotely
+ # adding more objects to the same event loop.
+ # dont specify http server as a kwarg, as the other method run_with_http_server has to be used
+ if context is not None and not isinstance(context, zmq.asyncio.Context):
+ raise TypeError("context must be an instance of zmq.asyncio.Context")
+ context = context or zmq.asyncio.Context()
+
+ if transports == 'INPROC' or transports == ZMQ_TRANSPORTS.INPROC:
+ RPCServer(
+ id=instance.id,
+ things=[instance],
+ context=context,
+ logger=instance.logger
+ )
+ else:
+ from ...server.zmq import ZMQServer
+ ZMQServer(
+ id=instance.id,
+ things=[instance],
+ context=context,
+ transports=transports,
+ tcp_socket_address=kwargs.get('tcp_socket_address', None),
+ logger=instance.logger
+ )
+
+
+__all__ = [
+ RPCServer.__name__
+]
\ No newline at end of file
diff --git a/hololinked/server/exceptions.py b/hololinked/exceptions.py
similarity index 57%
rename from hololinked/server/exceptions.py
rename to hololinked/exceptions.py
index 96a460c6..7f979156 100644
--- a/hololinked/server/exceptions.py
+++ b/hololinked/exceptions.py
@@ -10,6 +10,18 @@ class BreakAllLoops(Exception):
"""
pass
+class BreakLoop(Exception):
+ """
+ raise and catch to exit a loop from within another function or method
+ """
+ pass
+
+class BreakFlow(Exception):
+ """
+ raised to break the flow of the program
+ """
+ pass
+
class StateMachineError(Exception):
"""
raise to show errors while calling actions or writing properties in wrong state
@@ -23,4 +35,11 @@ class DatabaseError(Exception):
-__all__ = ['BreakInnerLoop', 'BreakAllLoops', 'StateMachineError']
\ No newline at end of file
+__all__ = [
+ 'BreakInnerLoop',
+ 'BreakAllLoops',
+ 'BreakLoop',
+ 'BreakFlow',
+ 'StateMachineError',
+ 'DatabaseError'
+]
\ No newline at end of file
diff --git a/hololinked/param/parameterized.py b/hololinked/param/parameterized.py
index bebe7247..6f12f39b 100644
--- a/hololinked/param/parameterized.py
+++ b/hololinked/param/parameterized.py
@@ -1903,9 +1903,9 @@ class Foo(Parameterized):
see documentation for the 'logging' module.
"""
def __init__(self, **params):
- self.create_param_containers(**params)
+ self.create_param_container(**params)
- def create_param_containers(self, **params):
+ def create_param_container(self, **params):
self._param_container = InstanceParameters(self.__class__, self)
self._param_container._setup_parameters(**params)
diff --git a/hololinked/rpc/__init__.py b/hololinked/rpc/__init__.py
deleted file mode 100644
index 60a3fee4..00000000
--- a/hololinked/rpc/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-"""
-namespace for pure RPC applications to auto suggest the user. The following were the original names
-given until the WoT schema was being generated. The docs then got totally mixed up with two types
-of names and therefore the WoT naming was taken as final.
-"""
-import typing
-from enum import Enum
-from ..server import Thing as RemoteObject, action
-from ..server.constants import USE_OBJECT_NAME, HTTP_METHODS, JSON
-
-
-def remote_method(URL_path : str = USE_OBJECT_NAME, http_method : str = HTTP_METHODS.POST,
- state : typing.Optional[typing.Union[str, Enum]] = None, argument_schema : typing.Optional[JSON] = None,
- return_value_schema : typing.Optional[JSON] = None, **kwargs) -> typing.Callable:
- return action(URL_path=URL_path, http_method=http_method, state=state, argument_schema=argument_schema,
- return_value_schema=return_value_schema, **kwargs)
\ No newline at end of file
diff --git a/hololinked/schema_validators/__init__.py b/hololinked/schema_validators/__init__.py
new file mode 100644
index 00000000..98d5e0af
--- /dev/null
+++ b/hololinked/schema_validators/__init__.py
@@ -0,0 +1,2 @@
+from .validators import BaseSchemaValidator, JSONSchemaValidator, PydanticSchemaValidator
+from .json_schema import JSONSchema
\ No newline at end of file
diff --git a/hololinked/schema_validators/json_schema.py b/hololinked/schema_validators/json_schema.py
new file mode 100644
index 00000000..b9ffdbb2
--- /dev/null
+++ b/hololinked/schema_validators/json_schema.py
@@ -0,0 +1,82 @@
+
+
+
+
+import typing
+from ..constants import JSON
+
+
+
+class JSONSchema:
+ """type restrictor converting python types to JSON schema types"""
+
+ _allowed_types = ('string', 'object', 'array', 'number', 'integer', 'boolean', None)
+
+ _replacements = {
+ int : 'integer',
+ float : 'number',
+ str : 'string',
+ bool : 'boolean',
+ dict : 'object',
+ list : 'array',
+ tuple : 'array',
+ type(None) : 'null',
+ Exception : {
+ "type": "object",
+ "properties": {
+ "message": {"type": "string"},
+ "type": {"type": "string"},
+ "traceback": {"type": "array", "items": {"type": "string"}},
+ "notes": {"type": ["string", "null"]}
+ },
+ "required": ["message", "type", "traceback"]
+ }
+ }
+
+ _schemas = {
+
+ }
+
+ @classmethod
+ def is_allowed_type(cls, type : typing.Any) -> bool:
+ if type in JSONSchema._replacements.keys():
+ return True
+ return False
+
+ @classmethod
+ def is_supported(cls, typ: typing.Any) -> bool:
+ """"""
+ if typ in JSONSchema._schemas.keys():
+ return True
+ return False
+
+ @classmethod
+ def get_type(cls, typ : typing.Any) -> str:
+ if not JSONSchema.is_allowed_type(typ):
+ raise TypeError(f"Object for wot-td has invalid type for JSON conversion. Given type - {type(typ)}. " +
+ "Use JSONSchema.register_replacements on hololinked.wot.td.JSONSchema object to recognise the type.")
+ return JSONSchema._replacements[typ]
+
+ @classmethod
+ def register_type_replacement(self, type: typing.Any, json_schema_type: str,
+ schema: typing.Optional[JSON] = None) -> None:
+ """
+ specify a python type as a JSON type.
+ schema only supported for array and objects.
+ """
+ if json_schema_type in JSONSchema._allowed_types:
+ JSONSchema._replacements[type] = json_schema_type
+ if schema is not None:
+ if json_schema_type not in ('array', 'object'):
+ raise ValueError(f"schemas support only for array and object JSON schema types, your specified type - {type}.")
+ JSONSchema._schemas[type] = schema
+ else:
+ raise TypeError(f"json schema replacement type must be one of allowed type - 'string', 'object', 'array', 'string', " +
+ f"'number', 'integer', 'boolean', 'null'. Given value {json_schema_type}")
+
+ @classmethod
+ def get(cls, typ : typing.Any):
+ """schema for array and objects only supported"""
+ if not JSONSchema.is_supported(typ):
+ raise ValueError(f"Schema for {typ} not provided. register one with JSONSchema.register_type_replacement()")
+ return JSONSchema._schemas[typ]
diff --git a/hololinked/server/schema_validators.py b/hololinked/schema_validators/validators.py
similarity index 52%
rename from hololinked/server/schema_validators.py
rename to hololinked/schema_validators/validators.py
index 7287f523..3b3c9f63 100644
--- a/hololinked/server/schema_validators.py
+++ b/hololinked/schema_validators/validators.py
@@ -1,19 +1,8 @@
-import typing
-from .constants import JSON
-
-class JSONSchemaError(Exception):
- """
- common error to be raised for JSON schema
- validation irrespective of internal validation used
- """
- pass
+import jsonschema
+from pydantic import BaseModel
-class JSONValidationError(Exception):
- """
- common error to be raised for JSON validation
- irrespective of internal validation used
- """
- pass
+from ..utils import pydantic_validate_args_kwargs, json_schema_merge_args_to_kwargs
+from ..constants import JSON
@@ -22,7 +11,7 @@ class BaseSchemaValidator: # type definition
Base class for all schema validators.
Serves as a type definition.
"""
- def __init__(self, schema : JSON) -> None:
+ def __init__(self, schema: JSON | BaseModel) -> None:
self.schema = schema
def validate(self, data) -> None:
@@ -30,6 +19,69 @@ def validate(self, data) -> None:
validate the data against the schema.
"""
raise NotImplementedError("validate method must be implemented by subclass")
+
+ def validate_method_call(self, args, kwargs) -> None:
+ """
+ validate the method call against the schema.
+ """
+ raise NotImplementedError("validate_method_call method must be implemented by subclass")
+
+
+class JSONSchemaValidator(BaseSchemaValidator):
+ """
+ JSON schema validator according to standard python JSON schema.
+ Somewhat slow, consider msgspec if possible.
+ """
+
+ def __init__(self, schema) -> None:
+ jsonschema.Draft7Validator.check_schema(schema)
+ super().__init__(schema)
+ self.validator = jsonschema.Draft7Validator(schema)
+
+ def validate(self, data) -> None:
+ self.validator.validate(data)
+
+ def validate_method_call(self, args, kwargs) -> None:
+ if len(args) > 0:
+ kwargs = json_schema_merge_args_to_kwargs(self.schema, args, kwargs)
+ self.validate(kwargs)
+
+ def json(self) -> JSON:
+ """allows JSON (de-)serializable of the instance itself"""
+ return self.schema
+
+ def __get_state__(self):
+ return self.schema
+
+ def __set_state__(self, schema):
+ return JSONSchemaValidator(schema)
+
+
+class PydanticSchemaValidator(BaseSchemaValidator):
+ """
+ JSON schema validator according to pydantic.
+ """
+ def __init__(self, schema: BaseModel) -> None:
+ super().__init__(schema)
+ self.validator = schema.model_validate
+
+ def validate(self, data) -> None:
+ self.validator(data)
+
+ def validate_method_call(self, args, kwargs) -> None:
+ pydantic_validate_args_kwargs(self.schema, args, kwargs)
+
+ def json(self) -> JSON:
+ """allows JSON (de-)serializable of the instance itself"""
+ return self.schema.model_dump_json()
+
+ def __get_state__(self):
+ return self.json()
+
+ def __set_state__(self, schema: JSON):
+ return PydanticSchemaValidator(BaseModel(**schema))
+
+
try:
@@ -49,12 +101,14 @@ def __init__(self, schema : JSON) -> None:
def validate(self, data) -> None:
"""validates and raises exception when failed directly to the caller"""
- try:
- self.validator(data)
- except fastjsonschema.JsonSchemaException as ex:
- raise JSONSchemaError(str(ex)) from None
+ self.validator(data)
+
+ def validate_method_call(self, args, kwargs) -> None:
+ if len(args) > 0:
+ kwargs = json_schema_merge_args_to_kwargs(self.schema, args, kwargs)
+ self.validate(kwargs)
- def json(self):
+ def json(self) -> JSON:
"""allows JSON (de-)serializable of the instance itself"""
return self.schema
@@ -68,42 +122,3 @@ def __set_state__(self, schema):
pass
-
-import jsonschema
-
-class JsonSchemaValidator(BaseSchemaValidator):
- """
- JSON schema validator according to standard python JSON schema.
- Somewhat slow, consider msgspec if possible.
- """
-
- def __init__(self, schema) -> None:
- jsonschema.Draft7Validator.check_schema(schema)
- super().__init__(schema)
- self.validator = jsonschema.Draft7Validator(schema)
-
- def validate(self, data) -> None:
- self.validator.validate(data)
-
- def json(self):
- """allows JSON (de-)serializable of the instance itself"""
- return self.schema
-
- def __get_state__(self):
- return self.schema
-
- def __set_state__(self, schema):
- return JsonSchemaValidator(schema)
-
-
-
-def _get_validator_from_user_options(option : typing.Optional[str] = None) -> BaseSchemaValidator:
- """
- returns a JSON schema validator based on user options
- """
- if option == "fastjsonschema":
- return FastJsonSchemaValidator
- elif option == "jsonschema" or not option:
- return JsonSchemaValidator
- else:
- raise ValueError(f"Unknown JSON schema validator option: {option}")
\ No newline at end of file
diff --git a/hololinked/serializers/__init__.py b/hololinked/serializers/__init__.py
new file mode 100644
index 00000000..6248162c
--- /dev/null
+++ b/hololinked/serializers/__init__.py
@@ -0,0 +1 @@
+from .serializers import *
diff --git a/hololinked/serializers/payloads.py b/hololinked/serializers/payloads.py
new file mode 100644
index 00000000..acf59e76
--- /dev/null
+++ b/hololinked/serializers/payloads.py
@@ -0,0 +1,49 @@
+import typing
+from dataclasses import dataclass
+
+from ..constants import byte_types
+from .serializers import Serializers, BaseSerializer
+
+
+
+@dataclass
+class SerializableData:
+ """
+ A container for data that can be serialized.
+ The content type decides the serializer to be used.
+ """
+ value: typing.Any
+ serializer: BaseSerializer | None = None
+ content_type: str = 'application/json'
+
+ def serialize(self):
+ """serialize the value"""
+ if isinstance(self.value, byte_types):
+ return self.value
+ if self.serializer is not None:
+ return self.serializer.dumps(self.value)
+ serializer = Serializers.content_types.get(self.content_type, None)
+ if serializer is not None:
+ return serializer.dumps(self.value)
+ raise ValueError(f"content type {self.content_type} not supported for serialization")
+
+ def deserialize(self):
+ """deserialize the value"""
+ if not isinstance(self.value, byte_types):
+ return self.value
+ if self.serializer is not None:
+ return self.serializer.loads(self.value)
+ serializer = Serializers.content_types.get(self.content_type, None)
+ if serializer is not None:
+ return serializer.loads(self.value)
+ raise ValueError(f"content type {self.content_type} not supported for deserialization")
+
+
+@dataclass
+class PreserializedData:
+ """
+ A container for data that is already serialized.
+ The content type may indicate the serializer used.
+ """
+ value: bytes
+ content_type: str = 'unknown'
\ No newline at end of file
diff --git a/hololinked/serializers/serializers.py b/hololinked/serializers/serializers.py
new file mode 100644
index 00000000..76f28b32
--- /dev/null
+++ b/hololinked/serializers/serializers.py
@@ -0,0 +1,556 @@
+"""
+adopted from pyro - https://github.com/irmen/Pyro5 - see following license
+
+MIT License
+
+Copyright (c) Irmen de Jong
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+"""
+import inspect
+import array
+import datetime
+import uuid
+import decimal
+import typing
+import warnings
+from enum import Enum
+from collections import deque
+from pydantic import validate_call
+# serializers:
+import pickle
+import json as pythonjson
+from msgspec import json as msgspecjson, msgpack, Struct
+# default dytypes:
+try:
+ import numpy
+except ImportError:
+ pass
+
+from ..param.parameters import (TypeConstrainedList, TypeConstrainedDict, TypedKeyMappingsConstrainedDict,
+ ClassSelector, String, Parameter)
+from ..constants import JSONSerializable
+from ..utils import MappableSingleton, format_exception_as_json, issubklass
+
+
+
+class BaseSerializer(object):
+ """
+ Base class for (de)serializer implementations. All serializers must inherit this class
+ and overload dumps() and loads() to be usable by the ZMQ message brokers. Any serializer
+ that returns bytes when serialized and a python object on deserialization will be accepted.
+ Serialization and deserialization errors will be passed as invalid message type
+ (see ZMQ messaging contract) from server side and a exception will be raised on the client.
+ """
+
+ def __init__(self) -> None:
+ super().__init__()
+ self.type = None
+
+ def loads(self, data) -> typing.Any:
+ "method called by ZMQ message brokers to deserialize data"
+ raise NotImplementedError("implement loads()/deserialization in subclass")
+
+ def dumps(self, data) -> bytes:
+ "method called by ZMQ message brokers to serialize data"
+ raise NotImplementedError("implement dumps()/serialization in subclass")
+
+ def convert_to_bytes(self, data) -> bytes:
+ if isinstance(data, bytes):
+ return data
+ if isinstance(data, bytearray):
+ return bytes(data)
+ if isinstance(data, memoryview):
+ return data.tobytes()
+ raise TypeError("serializer convert_to_bytes accepts only bytes, bytearray or memoryview")
+
+ @property
+ def content_type(self) -> str:
+ raise NotImplementedError("serializer must implement a content type")
+
+
+
+dict_keys = type(dict().keys())
+
+class JSONSerializer(BaseSerializer):
+ "(de)serializer that wraps the msgspec JSON serialization protocol, default serializer for all clients."
+
+ _type_replacements = {}
+
+ def __init__(self) -> None:
+ super().__init__()
+ self.type = msgspecjson
+
+ def loads(self, data : typing.Union[bytearray, memoryview, bytes]) -> JSONSerializable:
+ "method called by ZMQ message brokers to deserialize data"
+ return msgspecjson.decode(self.convert_to_bytes(data))
+
+ def dumps(self, data) -> bytes:
+ "method called by ZMQ message brokers to serialize data"
+ return msgspecjson.encode(data, enc_hook=self.default)
+
+ @classmethod
+ def default(cls, obj) -> JSONSerializable:
+ "method called if no serialization option was found."
+ if hasattr(obj, 'json'):
+ # alternative to type replacement
+ return obj.json()
+ if isinstance(obj, Struct):
+ return obj
+ if isinstance(obj, Enum):
+ return obj.name
+ if isinstance(obj, (set, dict_keys, deque, tuple)):
+ # json module can't deal with sets so we make a tuple out of it
+ return list(obj)
+ if isinstance(obj, (TypeConstrainedDict, TypeConstrainedList, TypedKeyMappingsConstrainedDict)):
+ return obj._inner # copy has been implemented with same signature for both types
+ if isinstance(obj, uuid.UUID):
+ return str(obj)
+ if isinstance(obj, (datetime.datetime, datetime.date)):
+ return obj.isoformat()
+ if isinstance(obj, decimal.Decimal):
+ return str(obj)
+ if isinstance(obj, Exception):
+ return format_exception_as_json(obj)
+ if isinstance(obj, array.array):
+ if obj.typecode == 'c':
+ return obj.tostring()
+ if obj.typecode == 'u':
+ return obj.tounicode()
+ return obj.tolist()
+ if 'numpy' in globals() and isinstance(obj, numpy.ndarray):
+ return obj.tolist()
+ replacer = cls._type_replacements.get(type(obj), None)
+ if replacer:
+ return replacer(obj)
+ raise TypeError("Given type cannot be converted to JSON : {}".format(type(obj)))
+
+ @classmethod
+ def register_type_replacement(cls, object_type, replacement_function) -> None:
+ "register custom serialization function for a particular type"
+ if object_type is type or not inspect.isclass(object_type):
+ raise ValueError("refusing to register replacement for a non-type or the type 'type' itself")
+ cls._type_replacements[object_type] = replacement_function
+
+ @property
+ def content_type(self) -> str:
+ return 'application/json'
+
+
+class PythonBuiltinJSONSerializer(JSONSerializer):
+ "(de)serializer that wraps the python builtin JSON serialization protocol."
+
+ def __init__(self) -> None:
+ super().__init__()
+ self.type = pythonjson
+
+ def loads(self, data : typing.Union[bytearray, memoryview, bytes]) -> typing.Any:
+ "method called by ZMQ message brokers to deserialize data"
+ return pythonjson.loads(self.convert_to_bytes(data))
+
+ def dumps(self, data) -> bytes:
+ "method called by ZMQ message brokers to serialize data"
+ data = pythonjson.dumps(data, ensure_ascii=False, allow_nan=True, default=self.default)
+ return data.encode("utf-8")
+
+ def dump(self, data : typing.Dict[str, typing.Any], file_desc) -> None:
+ "write JSON to file"
+ pythonjson.dump(data, file_desc, ensure_ascii=False, allow_nan=True, default=self.default)
+
+ def load(cls, file_desc) -> JSONSerializable:
+ "load JSON from file"
+ return pythonjson.load(file_desc)
+
+
+
+class PickleSerializer(BaseSerializer):
+ "(de)serializer that wraps the pickle serialization protocol, use with encryption for safety."
+
+ def __init__(self) -> None:
+ super().__init__()
+ self.type = pickle
+
+ def dumps(self, data) -> bytes:
+ "method called by ZMQ message brokers to serialize data"
+ return pickle.dumps(data)
+
+ def loads(self, data) -> typing.Any:
+ "method called by ZMQ message brokers to deserialize data"
+ return pickle.loads(self.convert_to_bytes(data))
+
+ @property
+ def content_type(self) -> str:
+ return 'application/octet-stream'
+
+
+
+class MsgpackSerializer(BaseSerializer):
+ """
+ (de)serializer that wraps the msgspec MessagePack serialization protocol, recommended serializer for ZMQ based
+ high speed applications. Set an instance of this serializer to both ``Thing.zmq_serializer`` and
+ ``hololinked.client.ObjectProxy``. Unfortunately, MessagePack is currently not supported for HTTP clients.
+ """
+
+ def __init__(self) -> None:
+ super().__init__()
+ self.type = msgpack
+
+ def dumps(self, value) -> bytes:
+ return msgpack.encode(value)
+
+ def loads(self, value) -> typing.Any:
+ return msgpack.decode(self.convert_to_bytes(value))
+
+ @property
+ def content_type(self) -> str:
+ return 'x-msgpack'
+
+
+class TextSerializer(BaseSerializer):
+ """Converts string or string compatible types to bytes and vice versa"""
+
+ def __init__(self) -> None:
+ super().__init__()
+ self.type = None
+
+ def dumps(self, data) -> bytes:
+ return str(data).encode('utf-8')
+
+ def loads(self, data) -> typing.Any:
+ return data.decode('utf-8')
+
+ @property
+ def content_type(self) -> str:
+ return 'text/plain'
+
+
+
+
+try:
+ import serpent
+
+ class SerpentSerializer(BaseSerializer):
+ """(de)serializer that wraps the serpent serialization protocol."""
+
+ def __init__(self) -> None:
+ super().__init__()
+ self.type = serpent
+
+ def dumps(self, data) -> bytes:
+ "method called by ZMQ message brokers to serialize data"
+ return serpent.dumps(data, module_in_classname=True)
+
+ def loads(self, data) -> typing.Any:
+ "method called by ZMQ message brokers to deserialize data"
+ return serpent.loads(self.convert_to_bytes(data))
+
+ @classmethod
+ def register_type_replacement(cls, object_type, replacement_function) -> None:
+ "register custom serialization function for a particular type"
+ def custom_serializer(obj, serpent_serializer, outputstream, indentlevel):
+ replaced = replacement_function(obj)
+ if replaced is obj:
+ serpent_serializer.ser_default_class(replaced, outputstream, indentlevel)
+ else:
+ serpent_serializer._serialize(replaced, outputstream, indentlevel)
+
+ if object_type is type or not inspect.isclass(object_type):
+ raise ValueError("refusing to register replacement for a non-type or the type 'type' itself")
+ serpent.register_class(object_type, custom_serializer)
+
+ # __all__.append(SerpentSerializer.__name__)
+except ImportError:
+ SerpentSerializer = None
+
+
+
+
+class Serializers(metaclass=MappableSingleton):
+ """
+ A singleton class that holds all serializers and provides a registry for content types.
+ All members are class attributes.
+
+ - For property, the value is serialized using the serializer registered for the property.
+ - For action, the return value is serialized using the serializer registered for the action.
+ - For event, the payload is serialized using the serializer registered for the event.
+
+ Registration of serializer is not mandatory for any property, action or event.
+ The default serializer is `JSONSerializer`, which will be provided to any unregistered object.
+ """
+ json = ClassSelector(default=JSONSerializer(), class_=BaseSerializer, class_member=True,
+ doc="The default serializer for all properties, actions and events") # type: BaseSerializer
+ pickle = ClassSelector(default=PickleSerializer(), class_=BaseSerializer, class_member=True,
+ doc="pickle serializer, unsafe without encryption but useful for faster & flexible serialization of python specific types") # type: BaseSerializer
+ msgpack = ClassSelector(default=MsgpackSerializer(), class_=BaseSerializer, class_member=True,
+ doc="MessagePack serializer, efficient binary format that is both fast & interoperable between languages ") # type: BaseSerializer
+ text = ClassSelector(default=TextSerializer(), class_=BaseSerializer, class_member=True,
+ doc="Text serializer, converts string or string compatible types to bytes and vice versa") # type: BaseSerializer
+ default = ClassSelector(default=json.default, class_=BaseSerializer, class_member=True,
+ doc="The default serialization to be used") # type: BaseSerializer
+ default_content_type = String(default=default.default.content_type, class_member=True,
+ doc="The default content type for the default serializer") # type: str
+
+ content_types = Parameter(default={
+ 'application/json': json.default,
+ 'application/octet-stream': pickle.default,
+ 'x-msgpack': msgpack.default,
+ 'text/plain': text.default
+ }, doc="A dictionary of content types and their serializers",
+ readonly=True, class_member=True) # type: typing.Dict[str, BaseSerializer]
+ object_content_type_map = Parameter(default=dict(), class_member=True,
+ doc="A dictionary of content types for specific properties, actions and events",
+ readonly=True) # type: typing.Dict[str, typing.Dict[str, str]]
+ object_serializer_map = Parameter(default=dict(), class_member=True,
+ doc="A dictionary of serializer for specific properties, actions and events",
+ readonly=True) # type: typing.Dict[str, typing.Dict[str, BaseSerializer]]
+ protocol_serializer_map = Parameter(default=dict(), class_member=True,
+ doc="A dictionary of serializer for a specific protocol",
+ readonly=True) # type: typing.Dict[str, BaseSerializer]
+
+
+ @classmethod
+ def register(cls, serializer: BaseSerializer, name: str | None = None, override: bool = False) -> None:
+ """
+ Register a new serializer. It is recommended to implement a content type property/attribute for the serializer
+ to facilitate automatic deserialization on client side, otherwise deserialization is not gauranteed by this package.
+
+ Parameters
+ ----------
+ serializer: BaseSerializer
+ the serializer to register
+ name: str, optional
+ the name of the serializer to be accessible under the object namespace. If not provided, the name of the
+ serializer class is used.
+ override: bool, optional
+ whether to override the serializer if the content type is already registered, by default False & raises ValueError
+ for duplicate content type.
+
+ Raises
+ ------
+ ValueError
+ if the serializer content type is already registered
+ """
+ try:
+ if serializer.content_type in cls.content_types and not override:
+ raise ValueError("content type already registered : {}".format(serializer.content_type))
+ cls.content_types[serializer.content_type] = serializer
+ except NotImplementedError:
+ warnings.warn("serializer does not implement a content type", category=UserWarning)
+ cls[name or serializer.__name__] = serializer
+
+
+ @classmethod
+ def for_object(cls, thing_id: str, thing_cls: str, objekt: str) -> BaseSerializer:
+ """
+ Retrieve a serializer for a given property, action or event
+
+ Parameters
+ ----------
+ thing: str | typing.Any
+ the id of the Thing or the Thing that owns the property, action or event
+ objekt: str | Property | Action | Event
+ the name of the property, action or event
+
+ Returns
+ -------
+ BaseSerializer | JSONSerializer
+ the serializer for the property, action or event. If no serializer is found, the default JSONSerializer is
+ returned.
+ """
+ if len(cls.object_serializer_map) == 0 and len(cls.object_content_type_map) == 0:
+ return cls.default
+ for thing in [thing_id, thing_cls]: # first thing id, then thing cls
+ if thing in cls.object_serializer_map:
+ if objekt in cls.object_serializer_map[thing]:
+ return cls.object_serializer_map[thing][objekt]
+ if thing in cls.object_content_type_map:
+ if objekt in cls.object_content_type_map[thing]:
+ return cls.content_types[cls.object_content_type_map[thing][objekt]]
+ return cls.default # JSON is default serializer
+
+
+ # @validate_call
+ @classmethod
+ def register_content_type_for_object(cls, objekt: typing.Any, content_type: str) -> None:
+ """
+ Register content type for a property, action, event, or a `Thing` class to use a specific serializer.
+
+ Parameters
+ ----------
+ objekt: Property | Action | Event | Thing
+ the property, action or event. string is not accepted - use `register_content_type_for_object_by_name()` instead.
+ content_type: str
+ the content type for the value of the objekt or the serializer to be used
+
+ Raises
+ ------
+ ValueError
+ if the object is not a Property, Action or Event
+ """
+ if content_type not in cls.content_types:
+ raise ValueError("content type {} unsupported".format(content_type))
+ from ..core import Property, Action, Event, Thing
+ if not isinstance(objekt, (Property, Action, Event)) and not issubklass(objekt, Thing):
+ raise ValueError("object must be a Property, Action or Event, got : {}".format(type(objekt)))
+ if issubklass(objekt, Thing):
+ owner = objekt.__name__
+ elif not objekt.owner:
+ raise ValueError("object owner cannot be determined, cannot register content type: {}".format(objekt))
+ else:
+ owner = objekt.owner.__name__
+ if owner not in cls.object_content_type_map:
+ cls.object_content_type_map[owner] = dict()
+ if issubklass(objekt, Thing):
+ cls.object_content_type_map[owner] = content_type
+ else:
+ cls.object_content_type_map[owner][objekt.name] = content_type
+
+
+ # @validate_call
+ @classmethod
+ def register_content_type_for_object_per_thing_instance(cls, thing_id: str,
+ objekt: str | typing.Any, content_type: str) -> None:
+ """
+ Register an existing content type for a property, action or event to use a specific serializer. Other option is
+ to register a serializer directly, the effects are similar.
+
+ Parameters
+ ----------
+ thing_id: str
+ the id of the Thing that owns the property, action or event
+ objekt: str
+ the name of the property, action or event
+ content_type: str
+ the content type to be used
+ """
+ if not content_type in cls.content_types:
+ raise ValueError("content type {} unsupported".format(content_type))
+ from ..core import Property, Action, Event
+ if not isinstance(objekt, (Property, Action, Event, str)):
+ raise ValueError("object must be a Property, Action or Event, got : {}".format(type(objekt)))
+ if not isinstance(objekt, str):
+ objekt = objekt.name
+ if thing_id not in cls.object_content_type_map:
+ cls.object_content_type_map[thing_id] = dict()
+ cls.object_content_type_map[thing_id][objekt] = content_type
+
+
+ @classmethod
+ def register_content_type_for_thing_instance(cls, thing_id: str, content_type: str) -> None:
+ """
+ Register a content type for a specific Thing instance.
+
+ Parameters
+ ----------
+ thing_id: str
+ the id of the Thing
+ content_type: str
+ the content type to be used
+ """
+ if content_type not in cls.content_types:
+ raise ValueError("content type {} unsupported".format(content_type))
+ cls.object_content_type_map[thing_id] = content_type
+
+ # @validate_call
+ @classmethod
+ def register_for_object(cls, objekt: typing.Any, serializer: BaseSerializer) -> None:
+ """
+ Register (an existing) serializer for a property, action or event. Other option is to register a content type,
+ the effects are similar.
+
+ Parameters
+ ----------
+ objekt: str | Property | Action | Event
+ the property, action or event
+ serializer: BaseSerializer
+ the serializer to be used
+ """
+ if not isinstance(serializer, BaseSerializer):
+ raise ValueError("serializer must be an instance of BaseSerializer, given : {}".format(type(serializer)))
+ from ..core import Property, Action, Event, Thing
+ if not isinstance(objekt, (Property, Action, Event)) and not issubklass(objekt, Thing):
+ raise ValueError("object must be a Property, Action or Event, or Thing, got : {}".format(type(objekt)))
+ if issubklass(objekt, Thing):
+ owner = objekt.__name__
+ elif not objekt.owner:
+ raise ValueError("object owner cannot be determined : {}".format(objekt))
+ else:
+ owner = objekt.owner.__name__
+ if owner not in cls.object_serializer_map:
+ cls.object_serializer_map[owner] = dict()
+ if issubklass(objekt, Thing):
+ cls.object_serializer_map[owner] = serializer
+ else:
+ cls.object_serializer_map[owner][objekt.name] = serializer
+
+ @classmethod
+ def register_for_object_per_thing_instance(cls, thing_id: str, objekt: str, serializer: BaseSerializer) -> None:
+ """
+ Register a serializer for a property, action or event for a specific Thing instance.
+
+ Parameters
+ ----------
+ thing_id: str
+ the id of the Thing that owns the property, action or event
+ objekt: str
+ the name of the property, action or event
+ serializer: BaseSerializer
+ the serializer to be used
+ """
+ if thing_id not in cls.object_serializer_map:
+ cls.object_serializer_map[thing_id] = dict()
+ cls.object_serializer_map[thing_id][objekt] = serializer
+
+
+ @classmethod
+ def register_for_thing_instance(cls, thing_id: str, serializer: BaseSerializer) -> None:
+ """
+ Register a serializer for a specific Thing instance.
+
+ Parameters
+ ----------
+ thing_id: str
+ the id of the Thing
+ serializer: BaseSerializer
+ the serializer to be used
+ """
+ cls.object_serializer_map[thing_id] = serializer
+
+
+ @classmethod
+ def reset(cls) -> None:
+ """
+ Reset the serializer registry.
+ """
+ cls.object_content_type_map.clear()
+ cls.object_serializer_map.clear()
+ cls.protocol_serializer_map.clear()
+ cls.default = cls.json
+
+
+
+
+__all__ = [
+ JSONSerializer.__name__,
+ PickleSerializer.__name__,
+ MsgpackSerializer.__name__,
+ TextSerializer.__name__,
+ BaseSerializer.__name__,
+ Serializers.__name__
+]
\ No newline at end of file
diff --git a/hololinked/server/HTTPServer.py b/hololinked/server/HTTPServer.py
deleted file mode 100644
index 4d56ae5b..00000000
--- a/hololinked/server/HTTPServer.py
+++ /dev/null
@@ -1,400 +0,0 @@
-import asyncio
-from dataclasses import dataclass
-import zmq
-import zmq.asyncio
-import logging
-import socket
-import ssl
-import typing
-from tornado import ioloop
-from tornado.web import Application
-from tornado.httpserver import HTTPServer as TornadoHTTP1Server
-from tornado.httpclient import AsyncHTTPClient, HTTPRequest
-
-# from tornado_http2.server import Server as TornadoHTTP2Server
-from ..param import Parameterized
-from ..param.parameters import (Integer, IPAddress, ClassSelector, Selector, TypedList, String)
-from .constants import ZMQ_PROTOCOLS, CommonRPC, HTTPServerTypes, ResourceTypes, ServerMessage
-from .utils import get_IP_from_interface, issubklass
-from .dataklasses import HTTPResource, ServerSentEvent
-from .utils import get_default_logger
-from .serializers import JSONSerializer
-from .database import ThingInformation
-from .zmq_message_brokers import AsyncZMQClient, MessageMappedZMQClientPool
-from .handlers import RPCHandler, BaseHandler, EventHandler, ThingsHandler, StopHandler
-from .schema_validators import BaseSchemaValidator, JsonSchemaValidator
-from .events import Event
-from .eventloop import EventLoop
-from .config import global_config
-
-
-
-
-@dataclass
-class InteractionAffordance:
- URL_path : str
- obj : Event # typing.Union[Property, Action, Event]
- http_methods : typing.Tuple[str, typing.Optional[str], typing.Optional[str]]
- handler : BaseHandler
- kwargs : dict
-
- def __eq__(self, other : "InteractionAffordance") -> bool:
- return self.obj == other.obj
-
-
-
-class HTTPServer(Parameterized):
- """
- HTTP(s) server to route requests to ``Thing``.
- """
-
- things = TypedList(item_type=str, default=None, allow_None=True,
- doc="instance name of the things to be served by the HTTP server." ) # type: typing.List[str]
- port = Integer(default=8080, bounds=(1, 65535),
- doc="the port at which the server should be run" ) # type: int
- address = IPAddress(default='0.0.0.0',
- doc="IP address") # type: str
- # protocol_version = Selector(objects=[1, 1.1, 2], default=2,
- # doc="for HTTP 2, SSL is mandatory. HTTP2 is recommended. \
- # When no SSL configurations are provided, defaults to 1.1" ) # type: float
- logger = ClassSelector(class_=logging.Logger, default=None, allow_None=True,
- doc="logging.Logger" ) # type: logging.Logger
- log_level = Selector(objects=[logging.DEBUG, logging.INFO, logging.ERROR, logging.WARN,
- logging.CRITICAL, logging.ERROR],
- default=logging.INFO,
- doc="""alternative to logger, this creates an internal logger with the specified log level
- along with a IO stream handler.""" ) # type: int
- serializer = ClassSelector(class_=JSONSerializer, default=None, allow_None=True,
- doc="""json serializer used by the server""" ) # type: JSONSerializer
- ssl_context = ClassSelector(class_=ssl.SSLContext, default=None, allow_None=True,
- doc="SSL context to provide encrypted communication") # type: typing.Optional[ssl.SSLContext]
- certfile = String(default=None, allow_None=True,
- doc="""alternative to SSL context, provide certificate file & key file to allow the server to
- create a SSL context""") # type: str
- keyfile = String(default=None, allow_None=True,
- doc="""alternative to SSL context, provide certificate file & key file to allow the server to
- create a SSL context""") # type: str
- allowed_clients = TypedList(item_type=str,
- doc="""Serves request and sets CORS only from these clients, other clients are rejected with 403.
- Unlike pure CORS, the server resource is not even executed if the client is not
- an allowed client. if None any client is served.""")
- host = String(default=None, allow_None=True,
- doc="Host Server to subscribe to coordinate starting sequence of things & web GUI" ) # type: str
- # network_interface = String(default='Ethernet',
- # doc="Currently there is no logic to detect the IP addresss (as externally visible) correctly, \
- # therefore please send the network interface name to retrieve the IP. If a DNS server is present, \
- # you may leave this field" ) # type: str
- request_handler = ClassSelector(default=RPCHandler, class_=RPCHandler, isinstance=False,
- doc="custom web request handler of your choice for property read-write & action execution" ) # type: typing.Union[BaseHandler, RPCHandler]
- event_handler = ClassSelector(default=EventHandler, class_=(EventHandler, BaseHandler), isinstance=False,
- doc="custom event handler of your choice for handling events") # type: typing.Union[BaseHandler, EventHandler]
- schema_validator = ClassSelector(class_=BaseSchemaValidator, default=JsonSchemaValidator, allow_None=True, isinstance=False,
- doc="""Validator for JSON schema. If not supplied, a default JSON schema validator is created.""") # type: BaseSchemaValidator
-
-
-
- def __init__(self, things : typing.List[str], *, port : int = 8080, address : str = '0.0.0.0',
- host : typing.Optional[str] = None, logger : typing.Optional[logging.Logger] = None, log_level : int = logging.INFO,
- serializer : typing.Optional[JSONSerializer] = None, ssl_context : typing.Optional[ssl.SSLContext] = None,
- schema_validator : typing.Optional[BaseSchemaValidator] = JsonSchemaValidator,
- certfile : str = None, keyfile : str = None,
- # protocol_version : int = 1, network_interface : str = 'Ethernet',
- allowed_clients : typing.Optional[typing.Union[str, typing.Iterable[str]]] = None,
- **kwargs) -> None:
- """
- Parameters
- ----------
- things: List[str]
- instance name of the things to be served as a list.
- port: int, default 8080
- the port at which the server should be run
- address: str, default 0.0.0.0
- IP address
- logger: logging.Logger, optional
- logging.Logger instance
- log_level: int
- alternative to logger, this creates an internal logger with the specified log level along with a IO stream handler.
- serializer: JSONSerializer, optional
- json serializer used by the server
- ssl_context: ssl.SSLContext
- SSL context to provide encrypted communication
- certfile: str
- alternative to SSL context, provide certificate file & key file to allow the server to create a SSL context
- keyfile: str
- alternative to SSL context, provide certificate file & key file to allow the server to create a SSL context
- allowed_clients: List[str]
- serves request and sets CORS only from these clients, other clients are reject with 403. Unlike pure CORS
- feature, the server resource is not even executed if the client is not an allowed client.
- **kwargs:
- rpc_handler: RPCHandler | BaseHandler, optional
- custom web request handler of your choice for property read-write & action execution
- event_handler: EventHandler | BaseHandler, optional
- custom event handler of your choice for handling events
- """
- super().__init__(
- things=things,
- port=port,
- address=address,
- host=host,
- logger=logger,
- log_level=log_level,
- serializer=serializer or JSONSerializer(),
- # protocol_version=1,
- schema_validator=schema_validator,
- certfile=certfile,
- keyfile=keyfile,
- ssl_context=ssl_context,
- # network_interface='Ethernet',# network_interface,
- request_handler=kwargs.get('request_handler', RPCHandler),
- event_handler=kwargs.get('event_handler', EventHandler),
- allowed_clients=allowed_clients if allowed_clients is not None else []
- )
- self._type = HTTPServerTypes.THING_SERVER
- self._lost_things = dict() # see update_router_with_thing
- self._zmq_protocol = ZMQ_PROTOCOLS.IPC
- self._zmq_inproc_socket_context = None
- self._zmq_inproc_event_context = None
- self._local_rules = dict() # type: typing.Dict[str, typing.List[InteractionAffordance]]
-
- @property
- def all_ok(self) -> bool:
- self._IP = f"{self.address}:{self.port}"
- if self.logger is None:
- self.logger = get_default_logger('{}|{}'.format(self.__class__.__name__,
- f"{self.address}:{self.port}"),
- self.log_level)
-
- if self._zmq_protocol == ZMQ_PROTOCOLS.INPROC and (self._zmq_inproc_socket_context is None or self._zmq_inproc_event_context is None):
- raise ValueError("Inproc socket context is not provided. Logic Error.")
-
- self.app = Application(handlers=[
- (r'/remote-objects', ThingsHandler, dict(request_handler=self.request_handler,
- event_handler=self.event_handler)),
- (r'/stop', StopHandler, dict(owner=self))
- ])
-
- self.zmq_client_pool = MessageMappedZMQClientPool(self.things, identity=self._IP,
- deserialize_server_messages=False, handshake=False,
- http_serializer=self.serializer,
- context=self._zmq_inproc_socket_context,
- protocol=self._zmq_protocol,
- logger=self.logger
- )
- # print("client pool context", self.zmq_client_pool.context)
- event_loop = EventLoop.get_async_loop() # sets async loop for a non-possessing thread as well
- self.update_router_with_things()
- event_loop.call_soon(lambda : asyncio.create_task(self.subscribe_to_host()))
- event_loop.call_soon(lambda : asyncio.create_task(self.zmq_client_pool.poll()) )
- for client in self.zmq_client_pool:
- event_loop.call_soon(lambda : asyncio.create_task(client._handshake(timeout=60000)))
-
- self.tornado_event_loop = None
- # set value based on what event loop we use, there is some difference
- # between the asyncio event loop and the tornado event loop
-
- # if self.protocol_version == 2:
- # raise NotImplementedError("Current HTTP2 is not implemented.")
- # self.tornado_instance = TornadoHTTP2Server(self.app, ssl_options=self.ssl_context)
- # else:
- self.tornado_instance = TornadoHTTP1Server(self.app, ssl_options=self.ssl_context)
- return True
-
-
- async def subscribe_to_host(self):
- if self.host is None:
- return
- client = AsyncHTTPClient()
- for i in range(300): # try for five minutes
- try:
- res = await client.fetch(HTTPRequest(
- url=f"{self.host}/subscribers",
- method='POST',
- body=JSONSerializer.dumps(dict(
- hostname=socket.gethostname(),
- IPAddress=get_IP_from_interface(self.network_interface),
- port=self.port,
- type=self._type,
- https=self.ssl_context is not None
- )),
- validate_cert=False,
- headers={"content-type" : "application/json"}
- ))
- except Exception as ex:
- self.logger.error(f"Could not subscribe to host {self.host}. error : {str(ex)}, error type : {type(ex)}.")
- if i >= 299:
- raise ex from None
- else:
- if res.code in [200, 201]:
- self.logger.info(f"subsribed successfully to host {self.host}")
- break
- elif i >= 299:
- raise RuntimeError(f"could not subsribe to host {self.host}. response {JSONSerializer.loads(res.body)}")
- await asyncio.sleep(1)
- # we lose the client anyway so we close it. if we decide to reuse the client, changes needed
- client.close()
-
-
- def listen(self) -> None:
- """
- Start HTTP server. This method is blocking, async event loops intending to schedule the HTTP server should instead use
- the inner tornado instance's (``HTTPServer.tornado_instance``) listen() method.
- """
- assert self.all_ok, 'HTTPServer all is not ok before starting' # Will always be True or cause some other exception
- self.tornado_event_loop = ioloop.IOLoop.current()
- self.tornado_instance.listen(port=self.port, address=self.address)
- self.logger.info(f'started webserver at {self._IP}, ready to receive requests.')
- self.tornado_event_loop.start()
-
-
- async def stop(self) -> None:
- """
- Stop the event loop & the HTTP server. This method is async and should be awaited, mostly within a request
- handler. The stop handler at the path '/stop' with POST request is already implemented.
- """
- self.tornado_instance.stop()
- self.zmq_client_pool.stop_polling()
- await self.tornado_instance.close_all_connections()
- if self.tornado_event_loop is not None:
- self.tornado_event_loop.stop()
-
-
- def update_router_with_things(self) -> None:
- """
- updates HTTP router with paths from ``Thing`` (s)
- """
- event_loop = EventLoop.get_async_loop() # sets async loop for a non-possessing thread as well
- for client in self.zmq_client_pool:
- event_loop.call_soon(lambda : asyncio.create_task(self.update_router_with_thing(client)))
-
-
-
- async def update_router_with_thing(self, client : AsyncZMQClient):
- if client.instance_name in self._lost_things:
- # Just to avoid duplication of this call as we proceed at single client level and not message mapped level
- return
- self.logger.info(f"attempting to update router with thing {client.instance_name}.")
- self._lost_things[client.instance_name] = client
- while True:
- try:
- await client.handshake_complete()
- resources = dict() # type: typing.Dict[str, HTTPResource]
- reply = (await client.async_execute(
- instruction=CommonRPC.http_resource_read(client.instance_name),
- raise_client_side_exception=True
- ))[ServerMessage.DATA]
- resources.update(reply)
-
- handlers = []
- for instruction, http_resource in resources.items():
- if http_resource["what"] in [ResourceTypes.PROPERTY, ResourceTypes.ACTION]:
- resource = HTTPResource(**http_resource)
- handlers.append((resource.fullpath, self.request_handler, dict(
- resource=resource,
- validator=self.schema_validator(resource.argument_schema) if global_config.validate_schema_on_client and resource.argument_schema else None,
- owner=self
- )))
- elif http_resource["what"] == ResourceTypes.EVENT:
- resource = ServerSentEvent(**http_resource)
- if resource.class_name in self._local_rules and any(ia.obj._obj_name == resource.obj_name for ia in self._local_rules[resource.class_name]):
- for ia in self._local_rules[resource.class_name]:
- if ia.obj._obj_name == resource.obj_name:
- handlers.append((f'/{client.instance_name}{ia.URL_path}', ia.handler, dict(resource=resource, validator=None,
- owner=self, **ia.kwargs)))
- else:
- handlers.append((instruction, self.event_handler, dict(
- resource=resource,
- validator=None,
- owner=self
- )))
- """
- for handler based tornado rule matcher, the Rule object has following
- signature
-
- def __init__(
- self,
- matcher: "Matcher",
- target: Any,
- target_kwargs: Optional[Dict[str, Any]] = None,
- name: Optional[str] = None,
- ) -> None:
-
- matcher - based on route
- target - handler
- target_kwargs - given to handler's initialize
- name - ...
-
- len == 2 tuple is route + handler
- len == 3 tuple is route + handler + target kwargs
-
- so we give (path, RPCHandler, {'resource' : HTTPResource})
-
- path is extracted from remote_method(URL_path='....')
- RPCHandler is the base handler of this package for RPC purposes
- resource goes into target kwargs as the HTTPResource generated by
- remote_method and RemoteParamater contains all the info given
- to make RPCHandler work
- """
- self.app.wildcard_router.add_rules(handlers)
- self.logger.info(f"updated router with thing {client.instance_name}.")
- break
- except Exception as ex:
- print("error", ex)
- self.logger.error(f"error while trying to update router with thing - {str(ex)}. " +
- "Trying again in 5 seconds")
- await asyncio.sleep(5)
-
- try:
- reply = (await client.async_execute(
- instruction=CommonRPC.object_info_read(client.instance_name),
- raise_client_side_exception=True
- ))[ServerMessage.DATA]
- object_info = ThingInformation(**reply)
- object_info.http_server ="{}://{}:{}".format("https" if self.ssl_context is not None else "http",
- socket.gethostname(), self.port)
-
- await client.async_execute(
- instruction=CommonRPC.object_info_write(client.instance_name),
- arguments=dict(value=object_info),
- raise_client_side_exception=True
- )
- self.logger.info(f"updated ThingInformation to {client.instance_name}")
- except Exception as ex:
- self.logger.error(f"error while trying to update thing with HTTP server details - {str(ex)}. " +
- "Trying again in 5 seconds")
- self.zmq_client_pool.poller.register(client.socket, zmq.POLLIN)
- self._lost_things.pop(client.instance_name)
-
-
- def add_event(self, URL_path : str, event : Event, handler : typing.Optional[BaseHandler] = None,
- **kwargs) -> None:
- """
- Add an event to be served by HTTP server
-
- Parameters
- ----------
- URL_path : str
- URL path to access the event
- event : Event
- Event to be served
- handler : BaseHandler, optional
- custom handler for the event
- kwargs : dict
- additional keyword arguments to be passed to the handler's __init__
- """
- if not isinstance(event, Event):
- raise TypeError("event should be of type Event")
- if not issubklass(handler, BaseHandler):
- raise TypeError("handler should be subclass of BaseHandler")
- if event.owner.__name__ not in self._local_rules:
- self._local_rules[event.owner.__name__] = []
- obj = InteractionAffordance(URL_path=URL_path, obj=event,
- http_methods=('GET',), handler=handler or self.event_handler,
- kwargs=kwargs)
- if obj not in self._local_rules[event.owner.__name__]:
- self._local_rules[event.owner.__name__].append(obj)
-
-
-
-__all__ = [
- HTTPServer.__name__
-]
\ No newline at end of file
diff --git a/hololinked/server/__init__.py b/hololinked/server/__init__.py
index 08e5508a..e69de29b 100644
--- a/hololinked/server/__init__.py
+++ b/hololinked/server/__init__.py
@@ -1,14 +0,0 @@
-# Order of import is reflected in this file to avoid circular imports
-from .constants import *
-from .serializers import *
-from .config import *
-from .zmq_message_brokers import *
-from .events import *
-from .action import *
-from .property import *
-from .database import *
-from .thing import *
-from .eventloop import *
-from .HTTPServer import *
-
-
diff --git a/hololinked/server/action.py b/hololinked/server/action.py
deleted file mode 100644
index 5d05bc87..00000000
--- a/hololinked/server/action.py
+++ /dev/null
@@ -1,131 +0,0 @@
-import typing
-import jsonschema
-from enum import Enum
-from types import FunctionType, MethodType
-from inspect import iscoroutinefunction, getfullargspec
-
-from ..param.parameterized import ParameterizedFunction
-from .utils import issubklass, pep8_to_URL_path, isclassmethod
-from .dataklasses import ActionInfoValidator
-from .constants import USE_OBJECT_NAME, UNSPECIFIED, HTTP_METHODS, JSON
-from .config import global_config
-
-
-
-__action_kw_arguments__ = ['safe', 'idempotent', 'synchronous']
-
-def action(URL_path : str = USE_OBJECT_NAME, http_method : str = HTTP_METHODS.POST,
- state : typing.Optional[typing.Union[str, Enum]] = None, input_schema : typing.Optional[JSON] = None,
- output_schema : typing.Optional[JSON] = None, create_task : bool = False, **kwargs) -> typing.Callable:
- """
- Use this function as a decorate on your methods to make them accessible remotely. For WoT, an action affordance schema
- for the method is generated.
-
- Parameters
- ----------
- URL_path: str, optional
- The path of URL under which the object is accessible. defaults to name of the object.
- http_method: str, optional
- HTTP method (GET, POST, PUT etc.). defaults to POST.
- state: str | Tuple[str], optional
- state machine state under which the object can executed. When not provided,
- the action can be executed under any state.
- input_schema: JSON
- schema for arguments to validate them.
- output_schema: JSON
- schema for return value, currently only used to inform clients which is supposed to validate on its won.
- **kwargs:
- safe: bool
- indicate in thing description if action is safe to execute
- idempotent: bool
- indicate in thing description if action is idempotent (for example, allows HTTP client to cache return value)
- synchronous: bool
- indicate in thing description if action is synchronous (not long running)
- Returns
- -------
- Callable
- returns the callable object as it is
- """
-
- def inner(obj):
- original = obj
- if (not isinstance(obj, (FunctionType, MethodType)) and not isclassmethod(obj) and
- not issubklass(obj, ParameterizedFunction)):
- raise TypeError(f"target for action or is not a function/method. Given type {type(obj)}") from None
- if isclassmethod(obj):
- obj = obj.__func__
- if obj.__name__.startswith('__'):
- raise ValueError(f"dunder objects cannot become remote : {obj.__name__}")
- if hasattr(obj, '_remote_info') and not isinstance(obj._remote_info, ActionInfoValidator):
- raise NameError(
- "variable name '_remote_info' reserved for hololinked package. ",
- "Please do not assign this variable to any other object except hololinked.server.dataklasses.ActionInfoValidator."
- )
- else:
- obj._remote_info = ActionInfoValidator()
- obj_name = obj.__qualname__.split('.')
- if len(obj_name) > 1: # i.e. its a bound method, used by Thing
- if URL_path == USE_OBJECT_NAME:
- obj._remote_info.URL_path = f'/{pep8_to_URL_path(obj_name[1])}'
- else:
- if not URL_path.startswith('/'):
- raise ValueError(f"URL_path should start with '/', please add '/' before '{URL_path}'")
- obj._remote_info.URL_path = URL_path
- obj._remote_info.obj_name = obj_name[1]
- elif len(obj_name) == 1 and isinstance(obj, FunctionType): # normal unbound function - used by HTTPServer instance
- if URL_path is USE_OBJECT_NAME:
- obj._remote_info.URL_path = f'/{pep8_to_URL_path(obj_name[0])}'
- else:
- if not URL_path.startswith('/'):
- raise ValueError(f"URL_path should start with '/', please add '/' before '{URL_path}'")
- obj._remote_info.URL_path = URL_path
- obj._remote_info.obj_name = obj_name[0]
- else:
- raise RuntimeError(f"Undealt option for decorating {obj} or decorators wrongly used")
- if http_method is not UNSPECIFIED:
- if isinstance(http_method, str):
- obj._remote_info.http_method = (http_method,)
- else:
- obj._remote_info.http_method = http_method
- if state is not None:
- if isinstance(state, (Enum, str)):
- obj._remote_info.state = (state,)
- else:
- obj._remote_info.state = state
- if 'request' in getfullargspec(obj).kwonlyargs:
- obj._remote_info.request_as_argument = True
- obj._remote_info.isaction = True
- obj._remote_info.argument_schema = input_schema
- obj._remote_info.return_value_schema = output_schema
- obj._remote_info.obj = original
- obj._remote_info.create_task = create_task
- obj._remote_info.safe = kwargs.get('safe', False)
- obj._remote_info.idempotent = kwargs.get('idempotent', False)
- obj._remote_info.synchronous = kwargs.get('synchronous', False)
-
- if issubklass(obj, ParameterizedFunction):
- obj._remote_info.iscoroutine = iscoroutinefunction(obj.__call__)
- obj._remote_info.isparameterized = True
- else:
- obj._remote_info.iscoroutine = iscoroutinefunction(obj)
- obj._remote_info.isparameterized = False
- if global_config.validate_schemas and input_schema:
- jsonschema.Draft7Validator.check_schema(input_schema)
- if global_config.validate_schemas and output_schema:
- jsonschema.Draft7Validator.check_schema(output_schema)
-
- return original
- if callable(URL_path):
- raise TypeError("URL_path should be a string, not a function/method, did you decorate your action wrongly?")
- if any(key not in __action_kw_arguments__ for key in kwargs.keys()):
- raise ValueError("Only 'safe', 'idempotent', 'synchronous' are allowed as keyword arguments, " +
- f"unknown arguments found {kwargs.keys()}")
- return inner
-
-
-
-__all__ = [
- action.__name__
-]
-
-
diff --git a/hololinked/server/api_platforms.py b/hololinked/server/api_platforms.py
deleted file mode 100644
index ea5cba77..00000000
--- a/hololinked/server/api_platforms.py
+++ /dev/null
@@ -1,152 +0,0 @@
-from typing import Dict, List, Any, Union
-from dataclasses import dataclass, field, asdict
-
-from .constants import HTTP_METHODS
-from .serializers import JSONSerializer
-
-
-@dataclass
-class postman_collection:
- """
- Generate postman JSON of schema v2.1.0 (https://schema.postman.com/collection/json/v2.1.0/draft-04/collection.json)
- """
- info : "postman_collection_info"
- item : Union[List[Union["postman_item", "postman_itemgroup"]], Dict[str, Any]]
-
- def add_item(self, item : Union["postman_item", "postman_itemgroup"]) -> None:
- if isinstance(self.item, dict):
- raise ValueError("Please define item as a list before adding requests to item.")
- self.item.append(item)
-
- def json(self):
- return asdict(self)
-
- def save_json_file(self, filename = 'collection.json'):
- with open(filename, 'w') as file:
- JSONSerializer.generic_dump(self.json(), file)
-
- @classmethod
- def build(cls, instance, domain_prefix : str) -> Dict[str, Any]:
- from .thing import Thing
- from .dataklasses import HTTPResource, RemoteResource
- assert isinstance(instance, Thing) # type definition
- try:
- return instance._postman_collection
- except AttributeError:
- pass
- properties_folder = postman_itemgroup(name='properties')
- methods_folder = postman_itemgroup(name='methods')
- events_folder = postman_itemgroup(name='events')
-
- collection = postman_collection(
- info = postman_collection_info(
- name = instance.__class__.__name__,
- description = "API endpoints available for Thing",
- ),
- item = [
- properties_folder,
- methods_folder
- ]
- )
-
- for http_method, resource in instance.httpserver_resources.items():
- # i.e. this information is generated only on the httpserver accessible resrouces...
- for URL_path, httpserver_data in resource.items():
- if isinstance(httpserver_data, HTTPResource):
- scada_info : RemoteResource
- try:
- scada_info = instance.instance_resources[httpserver_data.instruction]
- except KeyError:
- property_path_without_RW = httpserver_data.instruction.rsplit('/', 1)[0]
- scada_info = instance.instance_resources[property_path_without_RW]
- item = postman_item(
- name = scada_info.obj_name,
- request = postman_http_request(
- description=scada_info.obj.__doc__,
- url=domain_prefix + URL_path,
- method=http_method,
- )
- )
- if scada_info.isproperty:
- properties_folder.add_item(item)
- elif scada_info.iscallable:
- methods_folder.add_item(item)
-
- instance._postman_collection = collection
- return collection
-
-
-@dataclass
-class postman_collection_info:
- """
- info field of postman collection
- """
- name : str
- description : str
- version : str = field(default="v2.1.0")
- schema : str = field(default="https://schema.getpostman.com/json/collection/v2.1.0/")
-
- def json(self):
- return asdict(self)
-
-@dataclass
-class postman_item:
- """
- item field of postman collection
- """
- name : str
- request : "postman_http_request"
- description : Union[str, None] = field(default=None)
-
- def json(self):
- return asdict(self)
-
-@dataclass
-class postman_http_request:
- """
- HTTP request item of postman collection
- """
- url : str
- header : Union[List[Dict[str, Any]], None] = field(default=None)
- body : Union[Dict[str, Any], None] = field(default=None)
- method : str = field(default=HTTP_METHODS.POST)
- description : Union[str, None] = field(default=None)
-
- def json(self):
- json_dict = asdict(self)
- if self.header is None:
- json_dict.pop("header", None)
- if self.body is None:
- json_dict.pop("body", None)
- return json_dict
-
-@dataclass
-class postman_itemgroup:
- """
- item group of postman collection
- """
- name : str
- item : Union[postman_item, "postman_itemgroup",
- List[Union["postman_item", "postman_itemgroup"]]] = field(default_factory=list)
- description : Union[str, None] = field(default=None)
-
- def add_item(self, item : Union["postman_item", "postman_itemgroup"]) -> None:
- if isinstance(self.item, dict):
- raise ValueError("Please define item as a list before adding requests to item.")
- if isinstance(self.item, list):
- self.item.append(item)
- else:
- raise ValueError(f"itemgroup must be list, not type {type(item)}")
-
- def json(self):
- return asdict(self)
-
-
-
-__all__ = [
- 'postman_collection',
- 'postman_collection_info',
- 'postman_item',
- 'postman_http_request',
- 'postman_itemgroup'
-]
\ No newline at end of file
diff --git a/hololinked/server/dataklasses.py b/hololinked/server/dataklasses.py
deleted file mode 100644
index 320ee9a1..00000000
--- a/hololinked/server/dataklasses.py
+++ /dev/null
@@ -1,581 +0,0 @@
-"""
-The following is a list of all dataclasses used to store information on the exposed
-resources on the network. These classese are generally not for consumption by the package-end-user.
-"""
-import typing
-import platform
-import inspect
-from enum import Enum
-from dataclasses import dataclass, asdict, field, fields
-from types import FunctionType, MethodType
-
-from ..param.parameters import String, Boolean, Tuple, TupleSelector, ClassSelector, Parameter
-from ..param.parameterized import ParameterizedMetaclass, ParameterizedFunction
-from .constants import JSON, USE_OBJECT_NAME, UNSPECIFIED, HTTP_METHODS, REGEX, ResourceTypes, http_methods
-from .utils import get_signature, getattr_without_descriptor_read, pep8_to_URL_path
-from .config import global_config
-from .schema_validators import BaseSchemaValidator
-
-
-class RemoteResourceInfoValidator:
- """
- A validator class for saving remote access related information on a resource. Currently callables (functions,
- methods and those with__call__) and class/instance property store this information as their own attribute under
- the variable ``_remote_info``. This is later split into information suitable for HTTP server, RPC client & ``EventLoop``.
-
- Attributes
- ----------
- URL_path : str, default - extracted object name
- the path in the URL under which the object is accesible.
- Must follow url-regex ('[\-a-zA-Z0-9@:%._\/\+~#=]{1,256}') requirement.
- If not specified, the name of object will be used. Underscores will be converted to dashes
- for PEP 8 names.
- http_method : str, default POST
- HTTP request method under which the object is accessible. GET, POST, PUT, DELETE or PATCH are supported.
- state : str, default None
- State machine state at which a callable will be executed or attribute/property can be
- written. Does not apply to read-only attributes/properties.
- obj_name : str, default - extracted object name
- the name of the object which will be supplied to the ``ObjectProxy`` class to populate
- its own namespace. For HTTP clients, HTTP method and URL path is important and for
- object proxies clients, the obj_name is important.
- iscoroutine : bool, default False
- whether the callable should be awaited
- isaction : bool, default False
- True for a method or function or callable
- isproperty : bool, default False
- True for a property
- """
- URL_path = String(default=USE_OBJECT_NAME,
- doc="the path in the URL under which the object is accesible.") # type: str
- http_method = TupleSelector(default=HTTP_METHODS.POST, objects=http_methods, accept_list=True,
- doc="HTTP request method under which the object is accessible. GET, POST, PUT, DELETE or PATCH are supported.") # typing.Tuple[str]
- state = Tuple(default=None, item_type=(Enum, str), allow_None=True, accept_list=True, accept_item=True,
- doc="State machine state at which a callable will be executed or attribute/property can be written.") # type: typing.Union[Enum, str]
- obj = ClassSelector(default=None, allow_None=True, class_=(FunctionType, MethodType, classmethod, Parameter, ParameterizedMetaclass), # Property will need circular import so we stick to base class Parameter
- doc="the unbound object like the unbound method")
- obj_name = String(default=USE_OBJECT_NAME,
- doc="the name of the object which will be supplied to the ``ObjectProxy`` class to populate its own namespace.") # type: str
- isaction = Boolean(default=False,
- doc="True for a method or function or callable") # type: bool
- isproperty = Boolean(default=False,
- doc="True for a property") # type: bool
-
- def __init__(self, **kwargs) -> None:
- """
- No full-scale checks for unknown keyword arguments as the class
- is used by the developer, so please try to be error-proof
- """
- if kwargs.get('URL_path', None) is not None:
- if not isinstance(kwargs['URL_path'], str):
- raise TypeError(f"URL path must be a string. Given type {type(kwargs['URL_path'])}")
- if kwargs["URL_path"] != USE_OBJECT_NAME and not kwargs["URL_path"].startswith('/'):
- raise ValueError(f"URL path must start with '/'. Given value {kwargs['URL_path']}")
- for key, value in kwargs.items():
- setattr(self, key, value)
-
- def to_dataclass(self, obj : typing.Any = None, bound_obj : typing.Any = None) -> "RemoteResource":
- """
- For a plain, faster and uncomplicated access, a dataclass in created & used by the
- event loop.
-
- Parameters
- ----------
- obj : Union[Property | Callable]
- property or method/action
-
- bound_obj : owner instance
- ``Thing`` instance
-
- Returns
- -------
- RemoteResource
- dataclass equivalent of this object
- """
- return RemoteResource(
- state=tuple(self.state) if self.state is not None else None,
- obj_name=self.obj_name, isaction=self.isaction,
- isproperty=self.isproperty, obj=obj, bound_obj=bound_obj,
- )
- # http method is manually always stored as a tuple
-
-
-class ActionInfoValidator(RemoteResourceInfoValidator):
- """
- request_as_argument : bool, default False
- if True, http/RPC request object will be passed as an argument to the callable.
- The user is warned to not use this generally.
- argument_schema: JSON, default None
- JSON schema validations for arguments of a callable. Assumption is therefore arguments will be JSON complaint.
- return_value_schema: JSON, default None
- schema for return value of a callable. Assumption is therefore return value will be JSON complaint.
- create_task: bool, default True
- default for async methods/actions
- safe: bool, default True
- metadata information whether the action is safe to execute
- idempotent: bool, default False
- metadata information whether the action is idempotent
- synchronous: bool, default True
- metadata information whether the action is synchronous
- """
- request_as_argument = Boolean(default=False,
- doc="if True, http/RPC request object will be passed as an argument to the callable.") # type: bool
- argument_schema = ClassSelector(default=None, allow_None=True, class_=dict,
- # due to schema validation, this has to be a dict, and not a special dict like TypedDict
- doc="JSON schema validations for arguments of a callable")
- return_value_schema = ClassSelector(default=None, allow_None=True, class_=dict,
- # due to schema validation, this has to be a dict, and not a special dict like TypedDict
- doc="schema for return value of a callable")
- create_task = Boolean(default=True,
- doc="should a coroutine be tasked or run in the same loop?") # type: bool
- iscoroutine = Boolean(default=False, # not sure if isFuture or isCoroutine is correct, something to fix later
- doc="whether the callable should be awaited") # type: bool
- safe = Boolean(default=True,
- doc="metadata information whether the action is safe to execute") # type: bool
- idempotent = Boolean(default=False,
- doc="metadata information whether the action is idempotent") # type: bool
- synchronous = Boolean(default=True,
- doc="metadata information whether the action is synchronous") # type: bool
- isparameterized = Boolean(default=False,
- doc="True for a parameterized function") # type: bool
-
-
- def to_dataclass(self, obj : typing.Any = None, bound_obj : typing.Any = None) -> "RemoteResource":
- return ActionResource(
- state=tuple(self.state) if self.state is not None else None,
- obj_name=self.obj_name, isaction=self.isaction, iscoroutine=self.iscoroutine,
- isproperty=self.isproperty, obj=obj, bound_obj=bound_obj,
- schema_validator=(bound_obj.schema_validator)(self.argument_schema) if not global_config.validate_schema_on_client and self.argument_schema else None,
- create_task=self.create_task, isparameterized=self.isparameterized
- )
-
-
-
-class SerializableDataclass:
- """
- Presents uniform serialization for serializers using getstate and setstate and json
- serialization.
- """
- def json(self):
- return asdict(self)
-
- def __getstate__(self):
- return self.json()
-
- def __setstate__(self, values : typing.Dict):
- for key, value in values.items():
- setattr(self, key, value)
-
-
-__dataclass_kwargs = dict(frozen=True)
-if float('.'.join(platform.python_version().split('.')[0:2])) >= 3.11:
- __dataclass_kwargs["slots"] = True
-
-@dataclass(**__dataclass_kwargs)
-class RemoteResource(SerializableDataclass):
- """
- This container class is used by the ``EventLoop`` methods (for example ``execute_once()``) to access resource
- metadata instead of directly using ``RemoteResourceInfoValidator``. Instances of this dataclass is stored under
- ``Thing.instance_resources`` dictionary for each property & method/action. Events use similar dataclass with
- metadata but with much less information.
-
- Attributes
- ----------
- state : str
- State machine state at which a callable will be executed or attribute/property can be
- written. Does not apply to read-only attributes/properties.
- obj_name : str, default - extracted object name
- the name of the object which will be supplied to the ``ObjectProxy`` class to populate
- its own namespace. For HTTP clients, HTTP method and URL path is important and for
- object proxies clients, the obj_name is important.
- isaction : bool
- True for a method or function or callable
- isproperty : bool
- True for a property
- obj : Union[Property | Callable]
- property or method/action
- bound_obj : owner instance
- ``Thing`` instance
- """
- state : typing.Optional[typing.Union[typing.Tuple, str]]
- obj_name : str
- isaction : bool
- isproperty : bool
- obj : typing.Any
- bound_obj : typing.Any
-
- def json(self):
- """
- return this object as a JSON serializable dictionary
- """
- # try:
- # return self._json # accessing dynamic attr from frozen object
- # except AttributeError: # always causes attribute error when slots are True
- json_dict = {}
- for field in fields(self):
- if field.name != 'obj' and field.name != 'bound_obj':
- json_dict[field.name] = getattr(self, field.name)
- # object.__setattr__(self, '_json', json_dict) # because object is frozen - used to work, but not now
- return json_dict
-
-
-@dataclass(**__dataclass_kwargs)
-class ActionResource(RemoteResource):
- """
- Attributes
- ----------
- iscoroutine : bool
- whether the callable should be awaited
- schema_validator : BaseSchemaValidator
- schema validator for the callable if to be validated server side
- """
- iscoroutine : bool
- schema_validator : typing.Optional[BaseSchemaValidator]
- create_task : bool
- isparameterized : bool
- # no need safe, idempotent, synchronous
-
-
-@dataclass
-class HTTPMethodInstructions(SerializableDataclass):
- """
- contains a map of unique strings that identifies the resource operation for each HTTP method, thus acting as
- instructions to be passed to the RPC server. The unique strings are generally made using the URL_path.
- """
- GET : typing.Optional[str] = field(default=None)
- POST : typing.Optional[str] = field(default=None)
- PUT : typing.Optional[str] = field(default=None)
- DELETE : typing.Optional[str] = field(default=None)
- PATCH : typing.Optional[str] = field(default=None)
-
- def __post_init__(self):
- self.supported_methods()
-
- def supported_methods(self): # can be a property
- try:
- return self._supported_methods
- except:
- self._supported_methods = []
- for method in ["GET", "POST", "PUT", "DELETE", "PATCH"]:
- if isinstance(self.__dict__[method], str):
- self._supported_methods.append(method)
- return self._supported_methods
-
- def __contains__(self, value):
- return value in self._supported_methods
-
-
-@dataclass
-class HTTPResource(SerializableDataclass):
- """
- Representation of the resource used by HTTP server for routing and passing information to RPC server on
- "what to do with which resource belonging to which thing? - read, write, execute?".
-
- Attributes
- ----------
-
- what : str
- is it a property, method/action or event?
- instance_name : str
- The ``instance_name`` of the thing which owns the resource. Used by HTTP server to inform
- the message brokers to send the instruction to the correct recipient thing.
- fullpath : str
- URL full path used for routing
- instructions : HTTPMethodInstructions
- unique string that identifies the resource operation for each HTTP method, generally made using the URL_path
- (qualified URL path {instance name}/{URL path}).
- argument_schema : JSON
- argument schema of the method/action for validation before passing over the instruction to the RPC server.
- request_as_argument: bool
- pass the request as a argument to the callable. For HTTP server ``tornado.web.HTTPServerRequest`` will be passed.
- """
- what : str
- class_name : str # just metadata
- instance_name : str
- obj_name : str
- fullpath : str
- instructions : HTTPMethodInstructions
- argument_schema : typing.Optional[JSON]
- request_as_argument : bool = field(default=False)
-
-
- def __init__(self, *, what : str, class_name : str, instance_name : str, obj_name : str, fullpath : str,
- request_as_argument : bool = False, argument_schema : typing.Optional[JSON] = None,
- **instructions) -> None:
- self.what = what
- self.class_name = class_name
- self.instance_name = instance_name
- self.obj_name = obj_name
- self.fullpath = fullpath
- self.request_as_argument = request_as_argument
- self.argument_schema = argument_schema
- if instructions.get('instructions', None):
- self.instructions = HTTPMethodInstructions(**instructions.get('instructions', None))
- else:
- self.instructions = HTTPMethodInstructions(**instructions)
-
-
-@dataclass
-class ZMQResource(SerializableDataclass):
- """
- Representation of resource used by RPC clients for mapping client method/action calls, property read/writes & events
- to a server resource. Used to dynamically populate the ``ObjectProxy``
-
- Attributes
- ----------
-
- what : str
- is it a property, method/action or event?
- instance_name : str
- The ``instance_name`` of the thing which owns the resource. Used by RPC client to inform
- message brokers to send the message to the correct recipient.
- instruction : str
- unique string that identifies the resource, generally made using the URL_path. Although URL path is a HTTP
- concept, it is still used as a unique identifier.
- name : str
- the name of the resource (__name__)
- qualname : str
- the qualified name of the resource (__qualname__)
- doc : str
- the docstring of the resource
- argument_schema : JSON
- argument schema of the method/action for validation before passing over the instruction to the RPC server.
- """
- what : str
- class_name : str # just metadata
- instance_name : str
- instruction : str
- obj_name : str
- qualname : str
- doc : typing.Optional[str]
- top_owner : bool
- argument_schema : typing.Optional[JSON]
- return_value_schema : typing.Optional[JSON]
- request_as_argument : bool = field(default=False)
-
- def __init__(self, *, what : str, class_name : str, instance_name : str, instruction : str, obj_name : str,
- qualname : str, doc : str, top_owner : bool, argument_schema : typing.Optional[JSON] = None,
- return_value_schema : typing.Optional[JSON] = None, request_as_argument : bool = False) -> None:
- self.what = what
- self.class_name = class_name
- self.instance_name = instance_name
- self.instruction = instruction
- self.obj_name = obj_name
- self.qualname = qualname
- self.doc = doc
- self.top_owner = top_owner
- self.argument_schema = argument_schema
- self.return_value_schema = return_value_schema
- self.request_as_argument = request_as_argument
-
- def get_dunder_attr(self, __dunder_name : str):
- name = __dunder_name.strip('_')
- name = 'obj_name' if name == 'name' else name
- return getattr(self, name)
-
-
-@dataclass
-class ServerSentEvent(SerializableDataclass):
- """
- event name and socket address of events to be consumed by clients.
-
- Attributes
- ----------
- name : str
- name of the event, must be unique
- obj_name: str
- name of the event variable used to populate the RPC client
- socket_address : str
- address of the socket
- unique_identifier: str
- unique ZMQ identifier used in PUB-SUB model
- what: str, default EVENT
- is it a property, method/action or event?
- """
- name : str = field(default=UNSPECIFIED)
- obj_name : str = field(default=UNSPECIFIED)
- class_name : str = field(default=UNSPECIFIED) # just metadata
- unique_identifier : str = field(default=UNSPECIFIED)
- serialization_specific : bool = field(default=False)
- socket_address : str = field(default=UNSPECIFIED)
- what : str = field(default=ResourceTypes.EVENT)
-
-
-def build_our_temp_TD(instance):
- """
- A temporary extension of TD used to build GUI of thing control panel.
- Will be later replaced by a more sophisticated TD builder which is compliant to the actual spec & its theory.
- """
- from .thing import Thing
-
- assert isinstance(instance, Thing), f"got invalid type {type(instance)}"
-
- our_TD = instance.get_thing_description(ignore_errors=True)
- our_TD["inheritance"] = [class_.__name__ for class_ in instance.__class__.mro()]
-
- for instruction, remote_info in instance.instance_resources.items():
- if remote_info.isaction and remote_info.obj_name in our_TD["actions"]:
- if isinstance(remote_info.obj, classmethod):
- our_TD["actions"][remote_info.obj_name]["type"] = 'classmethod'
- our_TD["actions"][remote_info.obj_name]["signature"] = get_signature(remote_info.obj)[0]
- elif remote_info.isproperty and remote_info.obj_name in our_TD["properties"]:
- our_TD["properties"][remote_info.obj_name].update(instance.__class__.properties.webgui_info(remote_info.obj)[remote_info.obj_name])
- return our_TD
-
-
-
-def get_organised_resources(instance):
- """
- organise the exposed attributes, actions and events into the dataclasses defined above
- so that the specific servers and event loop can use them.
- """
- from .thing import Thing
- from .events import Event, EventDispatcher
- from .property import Property
-
- assert isinstance(instance, Thing), f"got invalid type {type(instance)}"
-
- httpserver_resources = dict() # type: typing.Dict[str, HTTPResource]
- # The following dict will be given to the object proxy client
- zmq_resources = dict() # type: typing.Dict[str, ZMQResource]
- # The following dict will be used by the event loop
- instance_resources = dict() # type: typing.Dict[str, typing.Union[RemoteResource, ActionResource]]
- # create URL prefix
- if instance._owner is not None:
- instance._full_URL_path_prefix = f'{instance._owner._full_URL_path_prefix}/{instance.instance_name}'
- else:
- instance._full_URL_path_prefix = f'/{instance.instance_name}' # leading '/' was stripped at init
-
- # First add methods and callables
- # properties
- for prop in instance.parameters.descriptors.values():
- if isinstance(prop, Property) and hasattr(prop, '_remote_info') and prop._remote_info is not None:
- if not isinstance(prop._remote_info, RemoteResourceInfoValidator):
- raise TypeError("instance member {} has unknown sub-member '_remote_info' of type {}.".format(
- prop, type(prop._remote_info)))
- # above condition is just a gaurd in case somebody does some unpredictable patching activities
- remote_info = prop._remote_info
- fullpath = f"{instance._full_URL_path_prefix}{remote_info.URL_path}"
- read_http_method = write_http_method = delete_http_method = None
- if len(remote_info.http_method) == 1:
- read_http_method = remote_info.http_method[0]
- instructions = { read_http_method : f"{fullpath}/read" }
- elif len(remote_info.http_method) == 2:
- read_http_method, write_http_method = remote_info.http_method
- instructions = {
- read_http_method : f"{fullpath}/read",
- write_http_method : f"{fullpath}/write"
- }
- else:
- read_http_method, write_http_method, delete_http_method = remote_info.http_method
- instructions = {
- read_http_method : f"{fullpath}/read",
- write_http_method : f"{fullpath}/write",
- delete_http_method : f"{fullpath}/delete"
- }
-
- httpserver_resources[fullpath] = HTTPResource(
- what=ResourceTypes.PROPERTY,
- class_name=instance.__class__.__name__,
- instance_name=instance._owner.instance_name if instance._owner is not None else instance.instance_name,
- obj_name=remote_info.obj_name,
- fullpath=fullpath,
- **instructions
- )
- zmq_resources[fullpath] = ZMQResource(
- what=ResourceTypes.PROPERTY,
- class_name=instance.__class__.__name__,
- instance_name=instance._owner.instance_name if instance._owner is not None else instance.instance_name,
- instruction=fullpath,
- doc=prop.__doc__,
- obj_name=remote_info.obj_name,
- qualname=instance.__class__.__name__ + '.' + remote_info.obj_name,
- # qualname is not correct probably, does not respect inheritance
- top_owner=instance._owner is None,
- )
- data_cls = remote_info.to_dataclass(obj=prop, bound_obj=instance)
- instance_resources[f"{fullpath}/read"] = data_cls
- instance_resources[f"{fullpath}/write"] = data_cls
- instance_resources[f"{fullpath}/delete"] = data_cls
- if prop._observable:
- # There is no real philosophy behind this logic flow, we just set the missing information.
- assert isinstance(prop._observable_event_descriptor, Event), f"observable event not yet set for {prop.name}. logic error."
- evt_fullpath = f"{instance._full_URL_path_prefix}{prop._observable_event_descriptor.URL_path}"
- dispatcher = EventDispatcher(evt_fullpath)
- dispatcher._remote_info.class_name = instance.__class__.__name__
- dispatcher._remote_info.serialization_specific = instance.zmq_serializer != instance.http_serializer
- setattr(instance, prop._observable_event_descriptor._obj_name, dispatcher)
- # prop._observable_event_descriptor._remote_info.unique_identifier = evt_fullpath
- httpserver_resources[evt_fullpath] = dispatcher._remote_info
- zmq_resources[evt_fullpath] = dispatcher._remote_info
- # Methods
- for name, resource in inspect._getmembers(instance, lambda f : inspect.ismethod(f) or (
- hasattr(f, '_remote_info') and isinstance(f._remote_info, ActionInfoValidator)),
- getattr_without_descriptor_read):
- if hasattr(resource, '_remote_info'):
- if not isinstance(resource._remote_info, ActionInfoValidator):
- raise TypeError("instance member {} has unknown sub-member '_remote_info' of type {}.".format(
- resource, type(resource._remote_info)))
- remote_info = resource._remote_info
- # methods are already bound
- assert remote_info.isaction, ("remote info from inspect.ismethod is not a callable",
- "logic error - visit https://github.com/VigneshVSV/hololinked/issues to report")
- fullpath = f"{instance._full_URL_path_prefix}{remote_info.URL_path}"
- instruction = f"{fullpath}/invoke-on-{remote_info.http_method[0]}"
- # needs to be cleaned up for multiple HTTP methods
- httpserver_resources[instruction] = HTTPResource(
- what=ResourceTypes.ACTION,
- class_name=instance.__class__.__name__,
- instance_name=instance._owner.instance_name if instance._owner is not None else instance.instance_name,
- obj_name=remote_info.obj_name,
- fullpath=fullpath,
- argument_schema=remote_info.argument_schema,
- request_as_argument=remote_info.request_as_argument,
- **{ http_method : instruction for http_method in remote_info.http_method },
- )
- zmq_resources[instruction] = ZMQResource(
- what=ResourceTypes.ACTION,
- class_name=instance.__class__.__name__,
- instance_name=instance._owner.instance_name if instance._owner is not None else instance.instance_name,
- instruction=instruction,
- obj_name=getattr(resource, '__name__'),
- qualname=getattr(resource, '__qualname__'),
- doc=getattr(resource, '__doc__'),
- top_owner=instance._owner is None,
- argument_schema=remote_info.argument_schema,
- return_value_schema=remote_info.return_value_schema,
- request_as_argument=remote_info.request_as_argument
- )
- instance_resources[instruction] = remote_info.to_dataclass(obj=resource, bound_obj=instance)
- # Events
- for name, resource in inspect._getmembers(instance, lambda o : isinstance(o, Event), getattr_without_descriptor_read):
- assert isinstance(resource, Event), ("thing event query from inspect.ismethod is not an Event",
- "logic error - visit https://github.com/VigneshVSV/hololinked/issues to report")
- # above assertion is only a typing convenience
- fullpath = f"{instance._full_URL_path_prefix}{resource.URL_path}"
- # resource._remote_info.unique_identifier = fullpath
- dispatcher = EventDispatcher(fullpath)
- dispatcher._remote_info.class_name = instance.__class__.__name__
- dispatcher._remote_info.serialization_specific = instance.zmq_serializer != instance.http_serializer
- setattr(instance, name, dispatcher) # resource._remote_info.unique_identifier))
- httpserver_resources[fullpath] = dispatcher._remote_info
- zmq_resources[fullpath] = dispatcher._remote_info
- # Other objects
- for name, resource in inspect._getmembers(instance, lambda o : isinstance(o, Thing), getattr_without_descriptor_read):
- assert isinstance(resource, Thing), ("thing children query from inspect.ismethod is not a Thing",
- "logic error - visit https://github.com/VigneshVSV/hololinked/issues to report")
- # above assertion is only a typing convenience
- if name == '_owner':
- # second condition allows sharing of Things without adding once again to the list of exposed resources
- # for example, a shared logger
- continue
- if resource._owner is None:
- resource._owner = instance
- resource._prepare_resources()
- httpserver_resources.update(resource.httpserver_resources)
- # zmq_resources.update(resource.zmq_resources)
- instance_resources.update(resource.instance_resources)
-
- # The above for-loops can be used only once, the division is only for readability
- # following are in _internal_fixed_attributes - allowed to set only once
- return zmq_resources, httpserver_resources, instance_resources
\ No newline at end of file
diff --git a/hololinked/server/eventloop.py b/hololinked/server/eventloop.py
deleted file mode 100644
index f1f04553..00000000
--- a/hololinked/server/eventloop.py
+++ /dev/null
@@ -1,404 +0,0 @@
-import sys
-import os
-import warnings
-import subprocess
-import asyncio
-import importlib
-import typing
-import threading
-import logging
-import tracemalloc
-from uuid import uuid4
-
-from .constants import HTTP_METHODS
-from .utils import format_exception_as_json
-from .config import global_config
-from .zmq_message_brokers import ServerTypes
-from .exceptions import *
-from .thing import Thing, ThingMeta
-from .property import Property
-from .properties import ClassSelector, TypedList, List, Boolean, TypedDict
-from .action import action as remote_method
-from .logger import ListHandler
-
-
-if global_config.TRACE_MALLOC:
- tracemalloc.start()
-
-
-def set_event_loop_policy():
- if sys.platform.lower().startswith('win'):
- asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
-
- if global_config.USE_UVLOOP:
- if sys.platform.lower() in ['linux', 'darwin', 'linux2']:
- import uvloop
- asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
- else:
- warnings.warn("uvloop not supported for windows, using default windows selector loop.", RuntimeWarning)
-
-set_event_loop_policy()
-
-
-class Consumer:
- """
- Container class for Thing to pass to eventloop for multiprocessing applications in case
- of rare needs.
- """
- object_cls = ClassSelector(default=None, allow_None=True, class_=Thing, isinstance=False,
- remote=False)
- args = List(default=None, allow_None=True, accept_tuple=True, remote=False)
- kwargs = TypedDict(default=None, allow_None=True, key_type=str, remote=False)
-
- def __init__(self, object_cls : typing.Type[Thing], args : typing.Tuple = tuple(), **kwargs) -> None:
- self.object_cls = object_cls
- self.args = args
- self.kwargs = kwargs
-
-
-
-RemoteObject = Thing # reading convenience
-
-class EventLoop(RemoteObject):
- """
- The EventLoop class implements a infinite loop where zmq ROUTER sockets listen for messages. Each consumer of the
- event loop (an instance of Thing) listen on their own ROUTER socket and execute methods or allow read and write
- of attributes upon receiving instructions. Socket listening is implemented in an async (asyncio) fashion.
- """
- server_type = ServerTypes.EVENTLOOP
-
- expose = Boolean(default=True, remote=False,
- doc="""set to False to use the object locally to avoid alloting network resources
- of your computer for this object""")
-
- things = TypedList(item_type=(Thing, Consumer), bounds=(0,100), allow_None=True, default=None,
- doc="list of Things which are being executed", remote=False) #type: typing.List[Thing]
-
- threaded = Boolean(default=False, remote=False,
- doc="set True to run each thing in its own thread")
-
-
- def __init__(self, *,
- instance_name : str,
- things : typing.Union[Thing, Consumer, typing.List[typing.Union[Thing, Consumer]]] = list(), # type: ignore - requires covariant types
- log_level : int = logging.INFO,
- **kwargs
- ) -> None:
- """
- Parameters
- ----------
- instance_name: str
- instance name of the event loop
- things: List[Thing]
- things to be run/served
- log_level: int
- log level of the event loop logger
- """
- super().__init__(instance_name=instance_name, things=things, log_level=log_level, **kwargs)
- things = [] # type: typing.List[Thing]
- if self.expose:
- things.append(self)
- if self.things is not None:
- for consumer in self.things:
- if isinstance(consumer, Thing):
- things.append(consumer)
- consumer.object_info.eventloop_instance_name = self.instance_name
- elif isinstance(consumer, Consumer):
- instance = consumer.object_cls(*consumer.args, **consumer.kwargs,
- eventloop_name=self.instance_name)
- things.append(instance)
- self.things = things # re-assign the instantiated objects as well
- self.uninstantiated_things = dict()
- self._message_listener_methods = []
-
- def __post_init__(self):
- super().__post_init__()
- self.logger.info("Event loop with name '{}' can be started using EventLoop.run().".format(self.instance_name))
-
-
- # example of overloading
- @remote_method()
- def exit(self):
- """
- Stops the event loop and all its things. Generally, this leads
- to exiting the program unless some code follows the ``run()`` method.
- """
- for thing in self.things:
- thing.exit()
- raise BreakAllLoops
-
-
- uninstantiated_things = TypedDict(default=None, allow_None=True, key_type=str,
- item_type=(Consumer, str), URL_path='/things/uninstantiated')
-
-
- @classmethod
- def _import_thing(cls, file_name : str, object_name : str):
- """
- import a thing specified by ``object_name`` from its
- script or module.
-
- Parameters
- ----------
- file_name : str
- file or module path
- object_name : str
- name of ``Thing`` class to be imported
- """
- module_name = file_name.split(os.sep)[-1]
- spec = importlib.util.spec_from_file_location(module_name, file_name)
- if spec is not None:
- module = importlib.util.module_from_spec(spec)
- spec.loader.exec_module(module)
- else:
- module = importlib.import_module(module_name, file_name.split(os.sep)[0])
- consumer = getattr(module, object_name)
- if issubclass(consumer, Thing):
- return consumer
- else:
- raise ValueError(f"object name {object_name} in {file_name} not a subclass of Thing.",
- f" Only subclasses are accepted (not even instances). Given object : {consumer}")
-
-
- @remote_method(URL_path='/things', http_method=HTTP_METHODS.POST)
- def import_thing(self, file_name : str, object_name : str):
- """
- import thing from the specified path and return the default
- properties to be supplied to instantiate the object.
- """
- consumer = self._import_thing(file_name, object_name) # type: ThingMeta
- id = uuid4()
- self.uninstantiated_things[id] = consumer
- return id
-
-
- @remote_method(URL_path='/things/instantiate',
- http_method=HTTP_METHODS.POST) # remember to pass schema with mandatory instance name
- def instantiate(self, id : str, kwargs : typing.Dict = {}):
- """
- Instantiate the thing that was imported with given arguments
- and add to the event loop
- """
- consumer = self.uninstantiated_things[id]
- instance = consumer(**kwargs, eventloop_name=self.instance_name) # type: Thing
- self.things.append(instance)
- rpc_server = instance.rpc_server
- self.request_listener_loop.call_soon(asyncio.create_task(lambda : rpc_server.poll()))
- self.request_listener_loop.call_soon(asyncio.create_task(lambda : rpc_server.tunnel_message_to_things()))
- if not self.threaded:
- self.thing_executor_loop.call_soon(asyncio.create_task(lambda : self.run_single_target(instance)))
- else:
- _thing_executor = threading.Thread(target=self.run_things_executor, args=([instance],))
- _thing_executor.start()
-
- def run(self):
- """
- start the eventloop
- """
- if not self.threaded:
- _thing_executor = threading.Thread(target=self.run_things_executor, args=(self.things,))
- _thing_executor.start()
- else:
- for thing in self.things:
- _thing_executor = threading.Thread(target=self.run_things_executor, args=([thing],))
- _thing_executor.start()
- self.run_external_message_listener()
- if not self.threaded:
- _thing_executor.join()
-
-
- @classmethod
- def get_async_loop(cls):
- """
- get or automatically create an asnyc loop for the current thread.
- """
- try:
- loop = asyncio.get_event_loop()
- except RuntimeError:
- loop = asyncio.new_event_loop()
- # set_event_loop_policy() - why not?
- asyncio.set_event_loop(loop)
- return loop
-
-
- def run_external_message_listener(self):
- """
- Runs ZMQ's sockets which are visible to clients.
- This method is automatically called by ``run()`` method.
- Please dont call this method when the async loop is already running.
- """
- self.request_listener_loop = self.get_async_loop()
- rpc_servers = [thing.rpc_server for thing in self.things]
- futures = []
- for rpc_server in rpc_servers:
- futures.append(rpc_server.poll())
- futures.append(rpc_server.tunnel_message_to_things())
- self.logger.info("starting external message listener thread")
- self.request_listener_loop.run_until_complete(asyncio.gather(*futures))
- pending_tasks = asyncio.all_tasks(self.request_listener_loop)
- self.request_listener_loop.run_until_complete(asyncio.gather(*pending_tasks))
- self.logger.info("exiting external listener event loop {}".format(self.instance_name))
- self.request_listener_loop.close()
-
-
- def run_things_executor(self, things):
- """
- Run ZMQ sockets which provide queued instructions to ``Thing``.
- This method is automatically called by ``run()`` method.
- Please dont call this method when the async loop is already running.
- """
- thing_executor_loop = self.get_async_loop()
- self.thing_executor_loop = thing_executor_loop # atomic assignment for thread safety
- self.logger.info(f"starting thing executor loop in thread {threading.get_ident()} for {[obj.instance_name for obj in things]}")
- thing_executor_loop.run_until_complete(
- asyncio.gather(*[self.run_single_target(instance) for instance in things])
- )
- self.logger.info(f"exiting event loop in thread {threading.get_ident()}")
- thing_executor_loop.close()
-
-
- @classmethod
- async def run_single_target(cls, instance : Thing) -> None:
- instance_name = instance.instance_name
- while True:
- instructions = await instance.message_broker.async_recv_instructions()
- for instruction in instructions:
- client, _, client_type, _, msg_id, _, instruction_str, arguments, context = instruction
- oneway = context.pop('oneway', False)
- fetch_execution_logs = context.pop("fetch_execution_logs", False)
- if fetch_execution_logs:
- list_handler = ListHandler([])
- list_handler.setLevel(logging.DEBUG)
- list_handler.setFormatter(instance.logger.handlers[0].formatter)
- instance.logger.addHandler(list_handler)
- try:
- instance.logger.debug(f"client {client} of client type {client_type} issued instruction " +
- f"{instruction_str} with message id {msg_id}. starting execution.")
- return_value = await cls.execute_once(instance_name, instance, instruction_str, arguments) #type: ignore
- if oneway:
- await instance.message_broker.async_send_reply_with_message_type(instruction, b'ONEWAY', None)
- continue
- if fetch_execution_logs:
- return_value = {
- "returnValue" : return_value,
- "execution_logs" : list_handler.log_list
- }
- await instance.message_broker.async_send_reply(instruction, return_value)
- # Also catches exception in sending messages like serialization error
- except (BreakInnerLoop, BreakAllLoops):
- instance.logger.info("Thing {} with instance name {} exiting event loop.".format(
- instance.__class__.__name__, instance_name))
- if oneway:
- await instance.message_broker.async_send_reply_with_message_type(instruction, b'ONEWAY', None)
- continue
- return_value = None
- if fetch_execution_logs:
- return_value = {
- "returnValue" : None,
- "execution_logs" : list_handler.log_list
- }
- await instance.message_broker.async_send_reply(instruction, return_value)
- return
- except Exception as ex:
- instance.logger.error("Thing {} with instance name {} produced error : {}.".format(
- instance.__class__.__name__, instance_name, ex))
- if oneway:
- await instance.message_broker.async_send_reply_with_message_type(instruction, b'ONEWAY', None)
- continue
- return_value = dict(exception= format_exception_as_json(ex))
- if fetch_execution_logs:
- return_value["execution_logs"] = list_handler.log_list
- await instance.message_broker.async_send_reply_with_message_type(instruction,
- b'EXCEPTION', return_value)
- finally:
- if fetch_execution_logs:
- instance.logger.removeHandler(list_handler)
-
- @classmethod
- async def execute_once(cls, instance_name : str, instance : Thing, instruction_str : str,
- arguments : typing.Dict[str, typing.Any]) -> typing.Dict[str, typing.Any]:
- resource = instance.instance_resources.get(instruction_str, None)
- if resource is None:
- raise AttributeError(f"unknown remote resource represented by instruction {instruction_str}")
- if resource.isaction:
- if resource.state is None or (hasattr(instance, 'state_machine') and
- instance.state_machine.current_state in resource.state):
- # Note that because we actually find the resource within __prepare_instance__, its already bound
- # and we dont have to separately bind it.
- if arguments is None:
- arguments = dict()
- args = arguments.pop('__args__', tuple())
- if len(args) == 0 and resource.schema_validator is not None:
- resource.schema_validator.validate(arguments)
-
- func = resource.obj
- if resource.iscoroutine:
- if resource.isparameterized:
- if len(args) > 0:
- raise RuntimeError("parameterized functions cannot have positional arguments")
- return await func(resource.bound_obj, *args, **arguments)
- return await func(*args, **arguments) # arguments then become kwargs
- else:
- if resource.isparameterized:
- if len(args) > 0:
- raise RuntimeError("parameterized functions cannot have positional arguments")
- return func(resource.bound_obj, *args, **arguments)
- return func(*args, **arguments) # arguments then become kwargs
- else:
- raise StateMachineError("Thing '{}' is in '{}' state, however command can be executed only in '{}' state".format(
- instance_name, instance.state, resource.state))
-
- elif resource.isproperty:
- action = instruction_str.split('/')[-1]
- prop = resource.obj # type: Property
- owner_inst = resource.bound_obj # type: Thing
- if action == "write":
- if resource.state is None or (hasattr(instance, 'state_machine') and
- instance.state_machine.current_state in resource.state):
- if isinstance(arguments, dict) and len(arguments) == 1 and 'value' in arguments:
- return prop.__set__(owner_inst, arguments['value'])
- return prop.__set__(owner_inst, arguments)
- else:
- raise StateMachineError("Thing {} is in `{}` state, however attribute can be written only in `{}` state".format(
- instance_name, instance.state_machine.current_state, resource.state))
- elif action == "read":
- return prop.__get__(owner_inst, type(owner_inst))
- elif action == "delete":
- if prop.fdel is not None:
- return prop.fdel() # this may not be correct yet
- raise NotImplementedError("This property does not support deletion")
- raise NotImplementedError("Unimplemented execution path for Thing {} for instruction {}".format(instance_name, instruction_str))
-
-
-def fork_empty_eventloop(instance_name : str, logfile : typing.Union[str, None] = None, python_command : str = 'python',
- condaenv : typing.Union[str, None] = None, prefix_command : typing.Union[str, None] = None):
- command_str = '{}{}{}-c "from hololinked.server import EventLoop; E = EventLoop({}); E.run();"'.format(
- f'{prefix_command} ' if prefix_command is not None else '',
- f'call conda activate {condaenv} && ' if condaenv is not None else '',
- f'{python_command} ',
- f"instance_name = '{instance_name}', logfile = '{logfile}'"
- )
- print(f"command to invoke : {command_str}")
- subprocess.Popen(
- command_str,
- shell = True
- )
-
-
-# class ForkedEventLoop:
-
-# def __init__(self, instance_name : str, things : Union[Thing, Consumer, List[Union[Thing, Consumer]]],
-# log_level : int = logging.INFO, **kwargs):
-# self.subprocess = Process(target = forked_eventloop, kwargs = dict(
-# instance_name = instance_name,
-# things = things,
-# log_level = log_level,
-# **kwargs
-# ))
-
-# def start(self):
-# self.Process.start()
-
-
-
-__all__ = ['EventLoop', 'Consumer', 'fork_empty_eventloop']
\ No newline at end of file
diff --git a/hololinked/server/events.py b/hololinked/server/events.py
deleted file mode 100644
index 787288fb..00000000
--- a/hololinked/server/events.py
+++ /dev/null
@@ -1,160 +0,0 @@
-import typing
-import threading
-import jsonschema
-
-from ..param.parameterized import Parameterized, ParameterizedMetaclass
-from .constants import JSON
-from .utils import pep8_to_URL_path
-from .config import global_config
-from .zmq_message_brokers import EventPublisher
-from .dataklasses import ServerSentEvent
-from .security_definitions import BaseSecurityDefinition
-
-
-
-class Event:
- """
- Asynchronously push arbitrary messages to clients. Apart from default events created by the package (like state
- change event, observable properties etc.), events are supposed to be created at class level or at ``__init__``
- as a instance attribute, otherwise their publishing socket is unbound and will lead to ``AttributeError``.
-
- Parameters
- ----------
- name: str
- name of the event, specified name may contain dashes and can be used on client side to subscribe to this event.
- URL_path: str
- URL path of the event if a HTTP server is used. only GET HTTP methods are supported.
- doc: str
- docstring for the event
- schema: JSON
- schema of the event, if the event is JSON complaint. HTTP clients can validate the data with this schema. There
- is no validation on server side.
- """
- # security: Any
- # security necessary to access this event.
-
- __slots__ = ['friendly_name', '_internal_name', '_obj_name',
- 'doc', 'schema', 'URL_path', 'security', 'label', 'owner']
-
-
- def __init__(self, friendly_name : str, URL_path : typing.Optional[str] = None, doc : typing.Optional[str] = None,
- schema : typing.Optional[JSON] = None, # security : typing.Optional[BaseSecurityDefinition] = None,
- label : typing.Optional[str] = None) -> None:
- self.friendly_name = friendly_name
- self.doc = doc
- if global_config.validate_schemas and schema:
- jsonschema.Draft7Validator.check_schema(schema)
- self.schema = schema
- self.URL_path = URL_path or f'/{pep8_to_URL_path(friendly_name)}'
- # self.security = security
- self.label = label
-
-
- def __set_name__(self, owner : ParameterizedMetaclass, name : str) -> None:
- self._internal_name = f"{pep8_to_URL_path(name)}-dispatcher"
- self._obj_name = name
- self.owner = owner
-
- @typing.overload
- def __get__(self, obj, objtype) -> "EventDispatcher":
- ...
-
- def __get__(self, obj : ParameterizedMetaclass, objtype : typing.Optional[type] = None):
- try:
- if not obj:
- return self
- return obj.__dict__[self._internal_name]
- except KeyError:
- raise AttributeError("Event object not yet initialized, please dont access now." +
- " Access after Thing is running.")
-
- def __set__(self, obj : Parameterized, value : typing.Any) -> None:
- if isinstance(value, EventDispatcher):
- value._remote_info.name = self.friendly_name
- value._remote_info.obj_name = self._obj_name
- value._owner_inst = obj
- current_obj = obj.__dict__.get(self._internal_name, None) # type: typing.Optional[EventDispatcher]
- if current_obj and current_obj._publisher:
- current_obj._publisher.unregister(current_obj)
- obj.__dict__[self._internal_name] = value
- else:
- raise TypeError(f"Supply EventDispatcher object to event {self._obj_name}, not type {type(value)}.")
-
-
-
-class EventDispatcher:
- """
- The actual worker which pushes the event. The separation is necessary between ``Event`` and
- ``EventDispatcher`` to allow class level definitions of the ``Event``
- """
- def __init__(self, unique_identifier : str) -> None:
- self._unique_identifier = bytes(unique_identifier, encoding='utf-8')
- self._publisher = None
- self._remote_info = ServerSentEvent(unique_identifier=unique_identifier)
- self._owner_inst = None
-
- @property
- def publisher(self) -> "EventPublisher":
- """
- Event publishing PUB socket owning object.
- """
- return self._publisher
-
- @publisher.setter
- def publisher(self, value : "EventPublisher") -> None:
- if not self._publisher:
- self._publisher = value
- self._publisher.register(self)
- else:
- raise AttributeError("cannot reassign publisher attribute of event {}".format(self.name))
-
- def push(self, data : typing.Any = None, *, serialize : bool = True, **kwargs) -> None:
- """
- publish the event.
-
- Parameters
- ----------
- data: Any
- payload of the event
- serialize: bool, default True
- serialize the payload before pushing, set to False when supplying raw bytes
- **kwargs:
- zmq_clients: bool, default True
- pushes event to RPC clients, irrelevant if ``Thing`` uses only one type of serializer (refer to
- difference between zmq_serializer and http_serializer).
- http_clients: bool, default True
- pushed event to HTTP clients, irrelevant if ``Thing`` uses only one type of serializer (refer to
- difference between zmq_serializer and http_serializer).
- """
- self.publisher.publish(self._unique_identifier, data, zmq_clients=kwargs.get('zmq_clients', True),
- http_clients=kwargs.get('http_clients', True), serialize=serialize)
-
-
-
-
-class CriticalEvent(Event):
- """
- Push events to client and get acknowledgement for that
- """
-
- def __init__(self, name : str, URL_path : typing.Optional[str] = None) -> None:
- super().__init__(name, URL_path)
- self._synchronize_event = threading.Event()
-
- def receive_acknowledgement(self, timeout : typing.Union[float, int, None]) -> bool:
- """
- Receive acknowlegement for event receive. When the timeout argument is present and not None,
- it should be a floating point number specifying a timeout for the operation in seconds (or fractions thereof).
- """
- return self._synchronize_event.wait(timeout=timeout)
-
- def _set_acknowledgement(self):
- """
- Method to be called by RPC server when an acknowledgement is received. Not for user to be set.
- """
- self._synchronize_event.set()
-
-
-__all__ = [
- Event.__name__,
-]
\ No newline at end of file
diff --git a/hololinked/server/handlers.py b/hololinked/server/handlers.py
deleted file mode 100644
index 9165382c..00000000
--- a/hololinked/server/handlers.py
+++ /dev/null
@@ -1,407 +0,0 @@
-import asyncio
-import zmq.asyncio
-import typing
-import uuid
-from tornado.web import RequestHandler, StaticFileHandler
-from tornado.iostream import StreamClosedError
-
-
-from .dataklasses import HTTPResource, ServerSentEvent
-from .utils import *
-from .zmq_message_brokers import AsyncEventConsumer, EventConsumer
-from .schema_validators import BaseSchemaValidator
-
-
-class BaseHandler(RequestHandler):
- """
- Base request handler for RPC operations
- """
-
- def initialize(self, resource : typing.Union[HTTPResource, ServerSentEvent], validator : BaseSchemaValidator,
- owner = None) -> None:
- """
- Parameters
- ----------
- resource: HTTPResource | ServerSentEvent
- resource representation of Thing's exposed object using a dataclass
- owner: HTTPServer
- owner ``hololinked.server.HTTPServer.HTTPServer`` instance
- """
- from .HTTPServer import HTTPServer
- assert isinstance(owner, HTTPServer)
- self.resource = resource
- self.schema_validator = validator
- self.owner = owner
- self.zmq_client_pool = self.owner.zmq_client_pool
- self.serializer = self.owner.serializer
- self.logger = self.owner.logger
- self.allowed_clients = self.owner.allowed_clients
-
- def set_headers(self) -> None:
- """
- override this to set custom headers without having to reimplement entire handler
- """
- raise NotImplementedError("implement set headers in child class to automatically call it" +
- " after directing the request to Thing")
-
- def get_execution_parameters(self) -> typing.Tuple[typing.Dict[str, typing.Any],
- typing.Dict[str, typing.Any], typing.Union[float, int, None]]:
- """
- merges all arguments to a single JSON body and retrieves execution context (like oneway calls, fetching executing
- logs) and timeouts
- """
- if len(self.request.body) > 0:
- arguments = self.serializer.loads(self.request.body)
- else:
- arguments = dict()
- if isinstance(arguments, dict):
- if len(self.request.query_arguments) >= 1:
- for key, value in self.request.query_arguments.items():
- if len(value) == 1:
- arguments[key] = self.serializer.loads(value[0])
- else:
- arguments[key] = [self.serializer.loads(val) for val in value]
- context = dict(fetch_execution_logs=arguments.pop('fetch_execution_logs', False))
- timeout = arguments.pop('timeout', None)
- if timeout is not None and timeout < 0:
- timeout = None
- if self.resource.request_as_argument:
- arguments['request'] = self.request
- return arguments, context, timeout
- return arguments, dict(), 5 # arguments, context is empty, 5 seconds invokation timeout, hardcoded needs to be fixed
-
- @property
- def has_access_control(self) -> bool:
- """
- Checks if a client is an allowed client. Requests from un-allowed clients are reject without execution. Custom
- web handlers can use this property to check if a client has access control on the server or ``Thing``.
- """
- if len(self.allowed_clients) == 0:
- self.set_header("Access-Control-Allow-Origin", "*")
- return True
- # For credential login, access control allow origin cannot be '*',
- # See: https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS#examples_of_access_control_scenarios
- origin = self.request.headers.get("Origin")
- if origin is not None and (origin in self.allowed_clients or origin + '/' in self.allowed_clients):
- self.set_header("Access-Control-Allow-Origin", origin)
- return True
- return False
-
- def set_access_control_allow_headers(self) -> None:
- """
- For credential login, access control allow headers cannot be a wildcard '*'.
- Some requests require exact list of allowed headers for the client to access the response.
- Use this method in set_headers() override if necessary.
- """
- headers = ", ".join(self.request.headers.keys())
- if self.request.headers.get("Access-Control-Request-Headers", None):
- headers += ", " + self.request.headers["Access-Control-Request-Headers"]
- self.set_header("Access-Control-Allow-Headers", headers)
-
-
-
-class RPCHandler(BaseHandler):
- """
- Handler for property read-write and method calls
- """
-
- async def get(self) -> None:
- """
- runs property or action if accessible by 'GET' method. Default for property reads.
- """
- await self.handle_through_thing('GET')
-
- async def post(self) -> None:
- """
- runs property or action if accessible by 'POST' method. Default for action execution.
- """
- await self.handle_through_thing('POST')
-
- async def patch(self) -> None:
- """
- runs property or action if accessible by 'PATCH' method.
- """
- await self.handle_through_thing('PATCH')
-
- async def put(self) -> None:
- """
- runs property or action if accessible by 'PUT' method. Default for property writes.
- """
- await self.handle_through_thing('PUT')
-
- async def delete(self) -> None:
- """
- runs property or action if accessible by 'DELETE' method. Default for property deletes.
- """
- await self.handle_through_thing('DELETE')
-
- def set_headers(self) -> None:
- """
- sets default headers for RPC (property read-write and action execution). The general headers are listed as follows:
-
- .. code-block:: yaml
-
- Content-Type: application/json
- Access-Control-Allow-Credentials: true
- Access-Control-Allow-Origin:
- """
- self.set_header("Content-Type" , "application/json")
- self.set_header("Access-Control-Allow-Credentials", "true")
-
- async def options(self) -> None:
- """
- Options for the resource. Main functionality is to inform the client is a specific HTTP method is supported by
- the property or the action (Access-Control-Allow-Methods).
- """
- if self.has_access_control:
- self.set_status(204)
- self.set_access_control_allow_headers()
- self.set_header("Access-Control-Allow-Credentials", "true")
- self.set_header("Access-Control-Allow-Methods", ', '.join(self.resource.instructions.supported_methods()))
- else:
- self.set_status(401, "forbidden")
- self.finish()
-
-
- async def handle_through_thing(self, http_method : str) -> None:
- """
- handles the actual RPC call, called by each of the HTTP methods with their name as the argument.
- """
- if not self.has_access_control:
- self.set_status(401, "forbidden")
- elif http_method not in self.resource.instructions:
- self.set_status(404, "not found")
- else:
- reply = None
- try:
- arguments, context, timeout = self.get_execution_parameters()
- if self.schema_validator is not None:
- self.schema_validator.validate(arguments)
- reply = await self.zmq_client_pool.async_execute(
- instance_name=self.resource.instance_name,
- instruction=self.resource.instructions.__dict__[http_method],
- arguments=arguments,
- context=context,
- raise_client_side_exception=False,
- invokation_timeout=timeout,
- execution_timeout=None,
- argument_schema=self.resource.argument_schema
- ) # type: ignore
- # message mapped client pool currently strips the data part from return message
- # and provides that as reply directly
- self.set_status(200, "ok")
- except ConnectionAbortedError as ex:
- self.set_status(503, str(ex))
- event_loop = asyncio.get_event_loop()
- event_loop.call_soon(lambda : asyncio.create_task(self.owner.update_router_with_thing(
- self.zmq_client_pool[self.resource.instance_name])))
- except ConnectionError as ex:
- await self.owner.update_router_with_thing(self.zmq_client_pool[self.resource.instance_name])
- await self.handle_through_thing(http_method) # reschedule
- return
- except Exception as ex:
- self.logger.error(f"error while scheduling RPC call - {str(ex)}")
- self.logger.debug(f"traceback - {ex.__traceback__}")
- self.set_status(500, "error while scheduling RPC call")
- reply = self.serializer.dumps({"exception" : format_exception_as_json(ex)})
- self.set_headers()
- if reply:
- self.write(reply)
- self.finish()
-
-
-
-class EventHandler(BaseHandler):
- """
- handles events emitted by ``Thing`` and tunnels them as HTTP SSE.
- """
- def initialize(self, resource, validator: BaseSchemaValidator, owner=None) -> None:
- super().initialize(resource, validator, owner)
- self.data_header = b'data: %s\n\n'
-
- def set_headers(self) -> None:
- """
- sets default headers for event handling. The general headers are listed as follows:
-
- .. code-block:: yaml
-
- Content-Type: text/event-stream
- Cache-Control: no-cache
- Connection: keep-alive
- Access-Control-Allow-Credentials: true
- Access-Control-Allow-Origin:
- """
- self.set_header("Content-Type", "text/event-stream")
- self.set_header("Cache-Control", "no-cache")
- self.set_header("Connection", "keep-alive")
- self.set_header("Access-Control-Allow-Credentials", "true")
-
- async def get(self):
- """
- events are support only with GET method.
- """
- if self.has_access_control:
- self.set_headers()
- await self.handle_datastream()
- else:
- self.set_status(401, "forbidden")
- self.finish()
-
- async def options(self):
- """
- options for the resource.
- """
- if self.has_access_control:
- self.set_status(204)
- self.set_access_control_allow_headers()
- self.set_header("Access-Control-Allow-Credentials", "true")
- self.set_header("Access-Control-Allow-Methods", 'GET')
- else:
- self.set_status(401, "forbidden")
- self.finish()
-
- def receive_blocking_event(self, event_consumer : EventConsumer):
- return event_consumer.receive(timeout=10000, deserialize=False)
-
- async def handle_datastream(self) -> None:
- """
- called by GET method and handles the event.
- """
- try:
- event_consumer_cls = EventConsumer if self.owner._zmq_inproc_event_context else AsyncEventConsumer
- # synchronous context with INPROC pub or asynchronous context with IPC or TCP pub, we handle both in async
- # fashion as HTTP server should be running purely sync(or normal) python method.
- event_consumer = event_consumer_cls(self.resource.unique_identifier, self.resource.socket_address,
- identity=f"{self.resource.unique_identifier}|HTTPEvent|{uuid.uuid4()}",
- logger=self.logger, http_serializer=self.serializer,
- context=self.owner._zmq_inproc_event_context if self.resource.socket_address.startswith('inproc') else None)
- event_loop = asyncio.get_event_loop()
- self.set_status(200)
- except Exception as ex:
- self.logger.error(f"error while subscribing to event - {str(ex)}")
- self.set_status(500, "could not subscribe to event source from thing")
- self.write(self.serializer.dumps({"exception" : format_exception_as_json(ex)}))
- return
-
- while True:
- try:
- if isinstance(event_consumer, AsyncEventConsumer):
- data = await event_consumer.receive(timeout=10000, deserialize=False)
- else:
- data = await event_loop.run_in_executor(None, self.receive_blocking_event, event_consumer)
- if data:
- # already JSON serialized
- self.write(self.data_header % data)
- await self.flush() # log after flushing just to be sure
- self.logger.debug(f"new data sent - {self.resource.name}")
- else:
- self.logger.debug(f"found no new data - {self.resource.name}")
- await self.flush() # heartbeat - raises StreamClosedError if client disconnects
- except StreamClosedError:
- break
- except Exception as ex:
- self.logger.error(f"error while pushing event - {str(ex)}")
- self.write(self.data_header % self.serializer.dumps(
- {"exception" : format_exception_as_json(ex)}))
- try:
- if isinstance(self.owner._zmq_inproc_event_context, zmq.asyncio.Context):
- event_consumer.exit()
- except Exception as ex:
- self.logger.error(f"error while closing event consumer - {str(ex)}" )
-
-
-class JPEGImageEventHandler(EventHandler):
- """
- handles events with images with image data header
- """
- def initialize(self, resource, validator: BaseSchemaValidator, owner = None) -> None:
- super().initialize(resource, validator, owner)
- self.data_header = b'data:image/jpeg;base64,%s\n\n'
-
-
-class PNGImageEventHandler(EventHandler):
- """
- handles events with images with image data header
- """
- def initialize(self, resource, validator: BaseSchemaValidator, owner = None) -> None:
- super().initialize(resource, validator, owner)
- self.data_header = b'data:image/png;base64,%s\n\n'
-
-
-
-class FileHandler(StaticFileHandler):
-
- @classmethod
- def get_absolute_path(cls, root: str, path: str) -> str:
- """
- Returns the absolute location of ``path`` relative to ``root``.
-
- ``root`` is the path configured for this `StaticFileHandler`
- (in most cases the ``static_path`` `Application` setting).
-
- This class method may be overridden in subclasses. By default
- it returns a filesystem path, but other strings may be used
- as long as they are unique and understood by the subclass's
- overridden `get_content`.
-
- .. versionadded:: 3.1
- """
- return root+path
-
-
-
-class ThingsHandler(BaseHandler):
- """
- add or remove things
- """
-
- async def get(self):
- self.set_status(404)
- self.finish()
-
- async def post(self):
- if not self.has_access_control:
- self.set_status(401, 'forbidden')
- else:
- try:
- instance_name = ""
- await self.zmq_client_pool.create_new(server_instance_name=instance_name)
- await self.owner.update_router_with_thing(self.zmq_client_pool[instance_name])
- self.set_status(204, "ok")
- except Exception as ex:
- self.set_status(500, str(ex))
- self.set_headers()
- self.finish()
-
- async def options(self):
- if self.has_access_control:
- self.set_status(204)
- self.set_access_control_allow_headers()
- self.set_header("Access-Control-Allow-Credentials", "true")
- self.set_header("Access-Control-Allow-Methods", 'GET, POST')
- else:
- self.set_status(401, "forbidden")
- self.finish()
-
-
-class StopHandler(BaseHandler):
- """Stops the tornado HTTP server"""
-
- def initialize(self, owner = None) -> None:
- from .HTTPServer import HTTPServer
- assert isinstance(owner, HTTPServer)
- self.owner = owner
- self.allowed_clients = self.owner.allowed_clients
-
- async def post(self):
- if not self.has_access_control:
- self.set_status(401, 'forbidden')
- else:
- try:
- # Stop the Tornado server
- asyncio.get_event_loop().call_soon(lambda : asyncio.create_task(self.owner.stop()))
- self.set_status(204, "ok")
- self.set_header("Access-Control-Allow-Credentials", "true")
- except Exception as ex:
- self.set_status(500, str(ex))
- self.finish()
\ No newline at end of file
diff --git a/hololinked/server/http/__init__.py b/hololinked/server/http/__init__.py
new file mode 100644
index 00000000..98ed26ba
--- /dev/null
+++ b/hololinked/server/http/__init__.py
@@ -0,0 +1,757 @@
+import asyncio
+import warnings
+import logging
+import socket
+import ssl
+import typing
+from tornado import ioloop
+from tornado.web import Application
+from tornado.httpserver import HTTPServer as TornadoHTTP1Server
+from tornado.httpclient import AsyncHTTPClient, HTTPRequest
+# from tornado_http2.server import Server as TornadoHTTP2Server
+
+from ...param import Parameterized
+from ...param.parameters import Integer, IPAddress, ClassSelector, Selector, TypedList, String
+from ...constants import HTTP_METHODS, ZMQ_TRANSPORTS, HTTPServerTypes, Operations
+from ...utils import complete_pending_tasks_in_current_loop, complete_pending_tasks_in_current_loop_async, forkable, get_IP_from_interface, get_current_async_loop, issubklass, pep8_to_dashed_name, get_default_logger, print_pending_tasks_in_current_loop, run_callable_somehow
+from ...serializers.serializers import JSONSerializer
+from ...schema_validators import BaseSchemaValidator, JSONSchemaValidator
+from ...core.property import Property
+from ...core.actions import Action
+from ...core.events import Event
+from ...core.thing import Thing, ThingMeta
+from ...td import ActionAffordance, EventAffordance, PropertyAffordance
+from ...core.zmq.brokers import AsyncZMQClient, MessageMappedZMQClientPool
+from .handlers import ActionHandler, PropertyHandler, BaseHandler, EventHandler, ThingsHandler, StopHandler
+
+
+
+class HTTPServer(Parameterized):
+ """
+ HTTP(s) server to route requests to `Thing`.
+ """
+
+ things = TypedList(item_type=(str, Thing), default=None, allow_None=True,
+ doc="instance name of the things to be served by the HTTP server." ) # type: typing.List[str]
+ port = Integer(default=8080, bounds=(1, 65535),
+ doc="the port at which the server should be run" ) # type: int
+ address = IPAddress(default='0.0.0.0',
+ doc="IP address") # type: str
+ # protocol_version = Selector(objects=[1, 1.1, 2], default=2,
+ # doc="for HTTP 2, SSL is mandatory. HTTP2 is recommended. \
+ # When no SSL configurations are provided, defaults to 1.1" ) # type: float
+ logger = ClassSelector(class_=logging.Logger, default=None, allow_None=True,
+ doc="logging.Logger" ) # type: logging.Logger
+ log_level = Selector(
+ objects=[logging.DEBUG, logging.INFO, logging.ERROR, logging.WARN,
+ logging.CRITICAL, logging.ERROR],
+ default=logging.INFO,
+ doc="""alternative to logger, this creates an internal logger with the specified log level
+ along with a IO stream handler."""
+ ) # type: int
+ serializer = ClassSelector(class_=JSONSerializer, default=None, allow_None=True,
+ doc="""json serializer used by the server""" ) # type: JSONSerializer
+ ssl_context = ClassSelector(class_=ssl.SSLContext, default=None, allow_None=True,
+ doc="SSL context to provide encrypted communication") # type: typing.Optional[ssl.SSLContext]
+ certfile = String(default=None, allow_None=True,
+ doc="""alternative to SSL context, provide certificate file & key file to allow the server to
+ create a SSL context""") # type: str
+ keyfile = String(default=None, allow_None=True,
+ doc="""alternative to SSL context, provide certificate file & key file to allow the server to
+ create a SSL context""") # type: str
+ allowed_clients = TypedList(item_type=str,
+ doc="""Serves request and sets CORS only from these clients, other clients are rejected with 403.
+ Unlike pure CORS, the server resource is not even executed if the client is not
+ an allowed client. if None any client is served.""")
+ host = String(default=None, allow_None=True,
+ doc="Host Server to subscribe to coordinate starting sequence of remote objects & web GUI" ) # type: str
+ # network_interface = String(default='Ethernet',
+ # doc="Currently there is no logic to detect the IP addresss (as externally visible) correctly, \
+ # therefore please send the network interface name to retrieve the IP. If a DNS server is present, \
+ # you may leave this field" ) # type: str
+ property_handler = ClassSelector(default=PropertyHandler, class_=(PropertyHandler, BaseHandler), isinstance=False,
+ doc="custom web request handler of your choice for property read-write & action execution" ) # type: typing.Union[BaseHandler, PropertyHandler]
+ action_handler = ClassSelector(default=ActionHandler, class_=(ActionHandler, BaseHandler), isinstance=False,
+ doc="custom web request handler of your choice for property read-write & action execution" ) # type: typing.Union[BaseHandler, ActionHandler]
+ event_handler = ClassSelector(default=EventHandler, class_=(EventHandler, BaseHandler), isinstance=False,
+ doc="custom event handler of your choice for handling events") # type: typing.Union[BaseHandler, EventHandler]
+ schema_validator = ClassSelector(class_=BaseSchemaValidator, default=JSONSchemaValidator, allow_None=True, isinstance=False,
+ doc="""Validator for JSON schema. If not supplied, a default JSON schema validator is created.""") # type: BaseSchemaValidator
+
+
+
+ def __init__(self,
+ things : typing.List[str] | typing.List[Thing] | typing.List[ThingMeta] | None = None,
+ *,
+ port: int = 8080,
+ address: str = '0.0.0.0',
+ # host: typing.Optional[str] = None,
+ logger: typing.Optional[logging.Logger] = None,
+ log_level: int = logging.INFO,
+ serializer: typing.Optional[JSONSerializer] = None,
+ ssl_context: typing.Optional[ssl.SSLContext] = None,
+ schema_validator: typing.Optional[BaseSchemaValidator] = JSONSchemaValidator,
+ certfile: str = None,
+ keyfile: str = None,
+ # protocol_version : int = 1, network_interface : str = 'Ethernet',
+ allowed_clients: typing.Optional[typing.Union[str, typing.Iterable[str]]] = None,
+ **kwargs
+ ) -> None:
+ """
+ Parameters
+ ----------
+ things: List[str]
+ instance name of the things to be served as a list.
+ port: int, default 8080
+ the port at which the server should be run
+ address: str, default 0.0.0.0
+ IP address
+ logger: logging.Logger, optional
+ logging.Logger instance
+ log_level: int
+ alternative to logger, this creates an internal logger with the specified log level along with a IO stream handler.
+ serializer: JSONSerializer, optional
+ json serializer used by the server
+ ssl_context: ssl.SSLContext
+ SSL context to provide encrypted communication
+ certfile: str
+ alternative to SSL context, provide certificate file & key file to allow the server to create a SSL context
+ keyfile: str
+ alternative to SSL context, provide certificate file & key file to allow the server to create a SSL context
+ allowed_clients: List[str]
+ serves request and sets CORS only from these clients, other clients are reject with 403. Unlike pure CORS
+ feature, the server resource is not even executed if the client is not an allowed client.
+ **kwargs:
+ rpc_handler: RPCHandler | BaseHandler, optional
+ custom web request handler of your choice for property read-write & action execution
+ event_handler: EventHandler | BaseHandler, optional
+ custom event handler of your choice for handling events
+ """
+ super().__init__(
+ things=things,
+ port=port,
+ address=address,
+ # host=host,
+ logger=logger,
+ log_level=log_level,
+ serializer=serializer or JSONSerializer(),
+ # protocol_version=1,
+ schema_validator=schema_validator,
+ certfile=certfile,
+ keyfile=keyfile,
+ ssl_context=ssl_context,
+ # network_interface='Ethernet',# network_interface,
+ property_handler=kwargs.get('property_handler', PropertyHandler),
+ action_handler=kwargs.get('action_handler', ActionHandler),
+ event_handler=kwargs.get('event_handler', EventHandler),
+ allowed_clients=allowed_clients if allowed_clients is not None else []
+ )
+
+ self._IP = f"{self.address}:{self.port}"
+ if self.logger is None:
+ self.logger = get_default_logger('{}|{}'.format(self.__class__.__name__,
+ f"{self.address}:{self.port}"),
+ self.log_level)
+
+ self.tornado_instance = None
+ self.app = Application(handlers=[
+ (r'/things', ThingsHandler, dict(owner_inst=self)),
+ (r'/stop', StopHandler, dict(owner_inst=self))
+ ])
+ self.router = ApplicationRouter(self.app, self)
+ self._checked = False
+
+ self.zmq_client_pool = MessageMappedZMQClientPool(
+ id=self._IP,
+ server_ids=[],
+ client_ids=[],
+ handshake=False,
+ poll_timeout=100,
+ logger=self.logger
+ )
+ self._disconnected_things = dict()
+ self._registered_things = dict() # type: typing.Dict[typing.Type[ThingMeta], typing.List[str]]
+
+ self._zmq_inproc_socket_context = None
+ self._zmq_inproc_event_context = None
+
+ if self.things is not None:
+ self.add_things(*self.things)
+
+
+ @property
+ def all_ok(self) -> bool:
+ """
+ check if all the requirements are met before starting the server, auto invoked by listen().
+ """
+ if self._checked:
+ return True
+ # print("client pool context", self.zmq_client_pool.context)
+ event_loop = get_current_async_loop() # sets async loop for a non-possessing thread as well
+ # event_loop.call_soon(lambda : asyncio.create_task(self.update_router_with_things()))
+ event_loop.call_soon(lambda : asyncio.create_task(self.subscribe_to_host()))
+ event_loop.call_soon(lambda : asyncio.create_task(self.zmq_client_pool.poll_responses()) )
+ self.zmq_client_pool.handshake()
+
+ self.tornado_event_loop = None
+ # set value based on what event loop we use, there is some difference
+ # between the asyncio event loop and the tornado event loop
+
+ # if self.protocol_version == 2:
+ # raise NotImplementedError("Current HTTP2 is not implemented.")
+ # self.tornado_instance = TornadoHTTP2Server(self.app, ssl_options=self.ssl_context)
+ # else:
+ self.tornado_instance = TornadoHTTP1Server(self.app, ssl_options=self.ssl_context) # type: TornadoHTTP1Server
+ self._checked = True
+ return True
+
+
+ @forkable
+ def listen(self, forked: bool = False) -> None:
+ """
+ Start the HTTP server. This method is blocking. Async event loops intending to schedule the HTTP server should instead use
+ the inner tornado instance's (``HTTPServer.tornado_instance``) listen() method.
+ """
+ assert self.all_ok, 'HTTPServer all is not ok before starting' # Will always be True or cause some other exception
+ self.tornado_event_loop = ioloop.IOLoop.current()
+ self.tornado_instance.listen(port=self.port, address=self.address)
+ self.logger.info(f'started webserver at {self._IP}, ready to receive requests.')
+ self.tornado_event_loop.start()
+ if forked:
+ complete_pending_tasks_in_current_loop() # will reach here only when the server is stopped, so complete pending tasks
+
+
+ def stop(self, attempt_async_stop: bool = True) -> None:
+ """
+ Stop the HTTP server - unreliable, use async_stop() if possible.
+ A stop handler at the path '/stop' with POST method is already implemented that invokes this
+ method for the clients.
+ """
+ if attempt_async_stop:
+ run_callable_somehow(self.async_stop())
+ return
+ self.zmq_client_pool.stop_polling()
+ if not self.tornado_instance:
+ return
+ self.tornado_instance.stop()
+ run_callable_somehow(self.tornado_instance.close_all_connections())
+ if self.tornado_event_loop is not None:
+ self.tornado_event_loop.stop()
+ complete_pending_tasks_in_current_loop()
+ print_pending_tasks_in_current_loop()
+
+
+ async def async_stop(self) -> None:
+ """
+ Stop the HTTP server. A stop handler at the path '/stop' with POST method is already implemented that invokes this
+ method for the clients.
+ """
+ self.zmq_client_pool.stop_polling()
+ if not self.tornado_instance:
+ return
+ self.tornado_instance.stop()
+ await self.tornado_instance.close_all_connections()
+ if self.tornado_event_loop is not None:
+ self.tornado_event_loop.stop()
+ print_pending_tasks_in_current_loop()
+ # await complete_pending_tasks_in_current_loop_async()
+
+
+ def add_things(self, *things: Thing | ThingMeta | dict | str) -> None:
+ """
+ Add things to be served by the HTTP server
+
+ Parameters
+ ----------
+ *things: Thing | ThingMeta | dict | str
+ the thing instance(s) or thing classe(s) to be served, or a map of address/ZMQ protocol to thing id,
+ for example - {'tcp://my-pc:5555': 'my-thing-id', 'IPC' : 'my-thing-id-2'}
+ """
+ for thing in things:
+ if isinstance(thing, Thing):
+ self.router.add_thing_instance(thing)
+ elif isinstance(thing, ThingMeta):
+ warnings.warn(f"ThingMeta {thing} is not a thing instance, no need to add it to the server." +
+ f" Just supply a thing instance to the server. skipping...", category=UserWarning)
+ elif isinstance(thing, (dict, str)):
+ self.router.add_zmq_served_thing(thing)
+ elif issubklass(thing, ThingMeta):
+ raise TypeError(f"thing should be of type Thing, given type {type(thing)}")
+
+
+ def add_thing(self, thing: Thing | ThingMeta | dict | str) -> None:
+ """
+ Add thing to be served by the HTTP server
+
+ Parameters
+ ----------
+ thing: str | Thing | ThingMeta
+ id of the thing or the thing instance or thing class to be served
+ """
+ self.add_things(thing)
+
+
+ def register_id_for_thing(
+ self,
+ thing_cls: typing.Type[ThingMeta],
+ thing_id: str
+ ) -> None:
+ """register an expected thing id for a thing class"""
+ assert isinstance(thing_id, str), f"thing_id should be a string, given {type(thing_id)}"
+ if not self._registered_things.get(thing_cls, None):
+ self._registered_things[thing_cls] = []
+ if isinstance(thing_id, list):
+ self._registered_things[thing_cls].extend(thing_id)
+ else:
+ self._registered_things[thing_cls].append(thing_id)
+
+
+ def get_thing_from_id(self, id: str) -> typing.Type[ThingMeta] | None:
+ """get the thing id for a thing class"""
+ for thing_cls, thing_ids in self._registered_things.items():
+ if id in thing_ids:
+ return thing_cls
+ return None
+
+
+ def add_property(self,
+ URL_path: str,
+ property: Property | PropertyAffordance,
+ http_methods: typing.Tuple[str, typing.Optional[str], typing.Optional[str]] | None = ('GET', 'PUT', None),
+ handler: BaseHandler | PropertyHandler = PropertyHandler,
+ **kwargs
+ ) -> None:
+ """
+ Add a property to be accessible by HTTP
+
+ Parameters
+ ----------
+ URL_path: str
+ URL path to access the property
+ property: Property | PropertyAffordance
+ Property (object) to be served or its JSON representation
+ http_methods: Tuple[str, str, str]
+ tuple of http methods to be used for read, write and delete. Use None or omit HTTP method for
+ unsupported operations. For example - for readonly property use ('GET', None, None) or ('GET',)
+ handler: BaseHandler | PropertyHandler, optional
+ custom handler for the property, otherwise the default handler will be used
+ kwargs: dict
+ additional keyword arguments to be passed to the handler's __init__
+ """
+ if not isinstance(property, (Property, PropertyAffordance)):
+ raise TypeError(f"property should be of type Property, given type {type(property)}")
+ if not issubklass(handler, BaseHandler):
+ raise TypeError(f"handler should be subclass of BaseHandler, given type {type(handler)}")
+ if isinstance(property, Property):
+ property = property.to_affordance()
+ read_http_method = write_http_method = delete_http_method = None
+ http_methods = _comply_http_method(http_methods)
+ if len(http_methods) == 1:
+ read_http_method = http_methods[0]
+ elif len(http_methods) == 2:
+ read_http_method, write_http_method = http_methods
+ elif len(http_methods) == 3:
+ read_http_method, write_http_method, delete_http_method = http_methods
+ if read_http_method != 'GET':
+ raise ValueError("read method should be GET")
+ if write_http_method and write_http_method not in ['POST', 'PUT']:
+ raise ValueError("write method should be POST or PUT")
+ if delete_http_method and delete_http_method != 'DELETE':
+ raise ValueError("delete method should be DELETE")
+ kwargs['resource'] = property
+ kwargs['owner_inst'] = self
+ self.router.add_rule(
+ affordance=property,
+ URL_path=URL_path,
+ handler=handler,
+ kwargs=kwargs
+ )
+
+
+ def add_action(self,
+ URL_path: str,
+ action: Action | ActionAffordance,
+ http_method: str | None = 'POST',
+ handler: BaseHandler | ActionHandler = ActionHandler,
+ **kwargs
+ ) -> None:
+ """
+ Add an action to be accessible by HTTP
+
+ Parameters
+ ----------
+ URL_path: str
+ URL path to access the action
+ action: Action | ActionAffordance
+ Action (object) to be served or its JSON representation
+ http_method: str
+ http method to be used for the action
+ handler: BaseHandler | ActionHandler, optional
+ custom handler for the action
+ kwargs : dict
+ additional keyword arguments to be passed to the handler's __init__
+ """
+ if not isinstance(action, (Action, ActionAffordance)):
+ raise TypeError(f"Given action should be of type Action or ActionAffordance, given type {type(action)}")
+ if not issubklass(handler, BaseHandler):
+ raise TypeError(f"handler should be subclass of BaseHandler, given type {type(handler)}")
+ http_methods = _comply_http_method(http_method)
+ if len(http_methods) != 1:
+ raise ValueError("http_method should be a single HTTP method")
+ if isinstance(action, Action):
+ action = action.to_affordance() # type: ActionAffordance
+ # action._build_forms()
+ kwargs['resource'] = action
+ kwargs['owner_inst'] = self
+ self.router.add_rule(
+ affordance=action,
+ URL_path=URL_path,
+ handler=handler,
+ kwargs=kwargs
+ )
+
+
+ def add_event(self,
+ URL_path: str,
+ event: Event | EventAffordance,
+ handler: BaseHandler | EventHandler = EventHandler,
+ **kwargs
+ ) -> None:
+ """
+ Add an event to be accessible by HTTP server; only GET method is supported for events.
+
+ Parameters
+ ----------
+ URL_path: str
+ URL path to access the event
+ event: Event | EventAffordance
+ Event (object) to be served or its JSON representation
+ handler: BaseHandler | EventHandler, optional
+ custom handler for the event
+ kwargs: dict
+ additional keyword arguments to be passed to the handler's __init__
+ """
+ if not isinstance(event, (Event, EventAffordance)):
+ raise TypeError(f"event should be of type Event or EventAffordance, given type {type(event)}")
+ if not issubklass(handler, BaseHandler):
+ raise TypeError(f"handler should be subclass of BaseHandler, given type {type(handler)}")
+ if isinstance(event, Event):
+ event = event.to_affordance()
+ # event._build_forms()
+ kwargs['resource'] = event
+ kwargs['owner_inst'] = self
+ self.router.add_rule(
+ affordance=event,
+ URL_path=URL_path,
+ handler=handler,
+ kwargs=kwargs
+ )
+
+
+ async def subscribe_to_host(self):
+ if self.host is None:
+ return
+ client = AsyncHTTPClient()
+ for i in range(300): # try for five minutes
+ try:
+ res = await client.fetch(HTTPRequest(
+ url=f"{self.host}/subscribers",
+ method='POST',
+ body=JSONSerializer.dumps(dict(
+ hostname=socket.gethostname(),
+ IPAddress=get_IP_from_interface(self.network_interface),
+ port=self.port,
+ type=self._type,
+ https=self.ssl_context is not None
+ )),
+ validate_cert=False,
+ headers={"content-type" : "application/json"}
+ ))
+ except Exception as ex:
+ self.logger.error(f"Could not subscribe to host {self.host}. error : {str(ex)}, error type : {type(ex)}.")
+ if i >= 299:
+ raise ex from None
+ else:
+ if res.code in [200, 201]:
+ self.logger.info(f"subsribed successfully to host {self.host}")
+ break
+ elif i >= 299:
+ raise RuntimeError(f"could not subsribe to host {self.host}. response {JSONSerializer.loads(res.body)}")
+ await asyncio.sleep(1)
+ # we lose the client anyway so we close it. if we decide to reuse the client, changes needed
+ client.close()
+
+
+ def __hash__(self):
+ return hash(self._IP)
+
+ def __eq__(self, other):
+ if not isinstance(other, HTTPServer):
+ return False
+ return self._IP == other._IP
+
+ def __str__(self):
+ return f"{self.__class__.__name__}(address={self.address}, port={self.port})"
+
+
+
+class ApplicationRouter:
+ """
+ Covering implementation of the application router to add rules to the tornado application.
+ Not a real router, which is taken care of by the tornado application automatically.
+ """
+
+ def __init__(self, app: Application, server: HTTPServer) -> None:
+ self.app = app
+ self.server = server
+ self._pending_rules = []
+
+
+ def add_rule(self,
+ affordance: PropertyAffordance | ActionAffordance | EventAffordance,
+ URL_path: str,
+ handler: typing.Type[BaseHandler],
+ kwargs: dict,
+ ) -> None:
+ """
+ Add rules to the application router. This is a temporary method to add rules to the application router.
+ """
+ for rule in self.app.wildcard_router.rules:
+ if rule.matcher == URL_path:
+ warnings.warn(f"URL path {URL_path} already exists in the router -" +
+ f" replacing it for {affordance.what} {affordance.name}", category=UserWarning)
+ for rule in self._pending_rules:
+ if rule[0] == URL_path:
+ warnings.warn(f"URL path {URL_path} already exists in the pending rules -" +
+ f" replacing it for {affordance.what} {affordance.name}", category=UserWarning)
+ if getattr(affordance, 'thing_id', None) is not None:
+ if not URL_path.startswith(f'/{affordance.thing_id}'):
+ URL_path = f'/{affordance.thing_id}{URL_path}'
+ warnings.warn(f"URL path {URL_path} does not start with the thing id {affordance.thing_id},"
+ + f" adding it to the path, new path = {URL_path}. To disable this behavior, "
+ + f" please prepend the path with the thing id.")
+ self.app.wildcard_router.add_rules([(URL_path, handler, kwargs)])
+ else:
+ self._pending_rules.append((URL_path, handler, kwargs))
+ """
+ for handler based tornado rule matcher, the Rule object has following
+ signature
+
+ def __init__(
+ self,
+ matcher: "Matcher",
+ target: Any,
+ target_kwargs: Optional[Dict[str, Any]] = None,
+ name: Optional[str] = None,
+ ) -> None:
+
+ matcher - based on route
+ target - handler
+ target_kwargs - given to handler's initialize
+ name - ...
+
+ len == 2 tuple is route + handler
+ len == 3 tuple is route + handler + target kwargs
+
+ so we give (path, BaseHandler, {'resource' : PropertyAffordance, 'owner' : self})
+
+ path is extracted from interaction affordance name or given by the user
+ BaseHandler is the base handler of this package for interaction affordances
+ resource goes into target kwargs which is needed for the handler to work correctly
+ """
+
+
+ def _resolve_rules(self, id: str) -> None:
+ """
+ Process the pending rules and add them to the application router.
+ """
+ thing_cls = self.server.get_thing_from_id(id)
+ pending_rules = []
+ for rule in self._pending_rules:
+ if rule[2]["resource"].owner != thing_cls:
+ pending_rules.append(rule)
+ continue
+ URL_path, handler, kwargs = rule
+ URL_path = f'/{id}{URL_path}'
+ rule = (URL_path, handler, kwargs)
+ self.app.wildcard_router.add_rules([rule])
+ self._pending_rules = pending_rules
+
+
+ def __contains__(self, item: str | Property | Action | Event | PropertyAffordance | ActionAffordance | EventAffordance) -> bool:
+ """
+ Check if the item is in the application router.
+ Not exact for torando's rules when a string is provided for the URL path,
+ as you need to provide the Matcher object
+ """
+ if isinstance(item, str):
+ for rule in self.app.wildcard_router.rules:
+ if rule.matcher == item:
+ return True
+ for rule in self._pending_rules:
+ if rule[0] == item:
+ return True
+ elif isinstance(item, (Property, Action, Event)):
+ item = item.to_affordance()
+ if isinstance(item, (PropertyAffordance, ActionAffordance, EventAffordance)):
+ for rule in self._pending_rules:
+ if rule[2]["resource"] == item:
+ return True
+ return False
+
+
+ def add_thing_instance(self, thing: Thing | ThingMeta) -> None:
+ """
+ internal method to add a thing instance to be served by the HTTP server. Iterates through the
+ interaction affordances and adds a route for each property, action and event.
+ """
+ self.add_interaction_affordances(
+ [obj.to_affordance(thing) for obj in thing.properties.remote_objects.values()],
+ [obj.to_affordance(thing) for obj in thing.actions.descriptors.values()],
+ [obj.to_affordance(thing) for obj in thing.events.descriptors.values()],
+ )
+
+
+ def add_interaction_affordances(
+ self,
+ properties: typing.Iterable[PropertyAffordance],
+ actions: typing.Iterable[ActionAffordance],
+ events: typing.Iterable[EventAffordance],
+ ) -> None:
+ for property in properties:
+ if property in self:
+ continue
+ if property.thing_id is not None:
+ path = f'/{property.thing_id}/{pep8_to_dashed_name(property.name)}'
+ else:
+ path = f'/{pep8_to_dashed_name(property.name)}'
+ self.server.add_property(
+ URL_path=path,
+ property=property,
+ http_methods=('GET') if property.readOnly else ('GET', 'PUT'), # if prop.fdel is None else ('GET', 'PUT', 'DELETE'),
+ handler=self.server.property_handler
+ )
+ for action in actions:
+ if action in self:
+ continue
+ if action.thing_id is not None:
+ path = f'/{action.thing_id}/{pep8_to_dashed_name(action.name)}'
+ else:
+ path = f'/{pep8_to_dashed_name(action.name)}'
+ self.server.add_action(
+ URL_path=path,
+ action=action,
+ handler=self.server.action_handler
+ )
+ for event in events:
+ if event in self:
+ continue
+ if event.thing_id is not None:
+ path = f'/{event.thing_id}/{pep8_to_dashed_name(event.name)}'
+ else:
+ path = f'/{pep8_to_dashed_name(event.name)}'
+ self.server.add_event(
+ URL_path=path,
+ event=event,
+ handler=self.server.event_handler
+ )
+
+
+ def add_zmq_served_thing(self, *thing_ids: dict[str, typing.Any] | str) -> None:
+ """
+ Add a thing served by ZMQ server to the HTTP server. Mostly useful for INPROC transport which behaves like a local object.
+ Iterates through the interaction affordances and adds a route for each property, action and event.
+ """
+ async def update_server_with_TD(
+ thing_id: str,
+ transport: str = None,
+ socket_address: str = None
+ ) -> None:
+ try:
+ from ...client.zmq.consumed_interactions import ZMQAction
+ from ...core import Thing
+ client = AsyncZMQClient(
+ id=self.server._IP,
+ server_id=thing_id,
+ handshake=False,
+ transport=transport,
+ socket_address=socket_address,
+ context=self.server.zmq_client_pool.context,
+ poll_timeout=self.server.zmq_client_pool.poll_timeout,
+ logger=self.server.logger
+ )
+ client.handshake(timeout=10000)
+ await client.handshake_complete()
+ self.server.zmq_client_pool.register(client, thing_id)
+ assert isinstance(Thing.get_thing_model, Action)
+ FetchTDAffordance = Thing.get_thing_model.to_affordance()
+ FetchTDAffordance._thing_id = thing_id
+ FetchTD = ZMQAction(
+ resource=FetchTDAffordance,
+ sync_client=None,
+ async_client=client
+ )
+ TD = await FetchTD.async_call() # type: typing.Dict[str, typing.Any]
+ self.add_interaction_affordances(
+ [PropertyAffordance.from_TD(name, TD) for name in TD["properties"].keys()],
+ [ActionAffordance.from_TD(name, TD) for name in TD["actions"].keys()],
+ [EventAffordance.from_TD(name, TD) for name in TD["events"].keys()]
+ )
+ self._resolve_rules(thing_id)
+ except ConnectionError:
+ self.server.logger.warning(f"could not connect to {thing_id} using {transport or socket_address} transport")
+ except Exception as ex:
+ self.server.logger.error(f"could not connect to {thing_id} using {transport or socket_address} transport. error: {str(ex)}")
+
+ coroutines = []
+ for thing_id in thing_ids:
+ if isinstance(thing_id, str):
+ for transport in ['INPROC', 'IPC']:
+ coroutines.append(update_server_with_TD(thing_id, transport, None))
+ elif isinstance(thing_id, dict):
+ for socket_address_or_transport, thing_id in thing_id.items():
+ if socket_address_or_transport.startswith('tcp://'):
+ coroutines.append(update_server_with_TD(thing_id, None, socket_address_or_transport))
+ else:
+ coroutines.append(update_server_with_TD(thing_id, socket_address_or_transport, None))
+ get_current_async_loop().run_until_complete(asyncio.gather(*coroutines))
+
+
+ def print_rules(self) -> None:
+ """
+ Print the rules in the application router.
+ """
+ try:
+ from prettytable import PrettyTable
+ table = PrettyTable()
+ table.field_names = ["URL Path", "Handler", "Resource Name"]
+
+ for rule in self.app.wildcard_router.rules:
+ table.add_row([rule.matcher, rule.target.__name__, getattr(rule.target_kwargs.get("resource"), "name", "N/A")])
+ for rule in self._pending_rules:
+ table.add_row([rule[0], rule[1].__name__, rule[2]["resource"].name])
+ print(table)
+ except ImportError:
+ print("Application Router Rules:")
+ for rule in self.app.wildcard_router.rules:
+ print(rule)
+ for rule in self._pending_rules:
+ print(rule[0], rule[2]["resource"].name)
+
+
+
+def _comply_http_method(http_methods : typing.Any):
+ """comply the supplied HTTP method to the router to a tuple and check if the method is supported"""
+ if isinstance(http_methods, str):
+ http_methods = (http_methods,)
+ if not isinstance(http_methods, tuple):
+ raise TypeError("http_method should be a tuple")
+ for method in http_methods:
+ if method not in HTTP_METHODS.__members__.values() and not method is None:
+ raise ValueError(f"method {method} not supported")
+ return http_methods
+
+
+__all__ = [
+ HTTPServer.__name__
+]
\ No newline at end of file
diff --git a/hololinked/server/http/handler_map.py b/hololinked/server/http/handler_map.py
new file mode 100644
index 00000000..dcb196e0
--- /dev/null
+++ b/hololinked/server/http/handler_map.py
@@ -0,0 +1,21 @@
+from ...core.properties import (
+ String,
+ Bytes,
+ Integer,
+ Number,
+ Boolean,
+ List,
+ Tuple
+)
+from .handlers import PropertyHandler, JPEGImageHandler, PNGImageHandler
+
+
+handler_map = {
+ String: PropertyHandler,
+ Bytes: PropertyHandler,
+ Integer: PropertyHandler,
+ Number: PropertyHandler,
+ Boolean: PropertyHandler,
+ List: PropertyHandler,
+ Tuple: PropertyHandler,
+}
\ No newline at end of file
diff --git a/hololinked/server/http/handlers.py b/hololinked/server/http/handlers.py
new file mode 100644
index 00000000..8e75717b
--- /dev/null
+++ b/hololinked/server/http/handlers.py
@@ -0,0 +1,587 @@
+import typing
+import uuid
+import asyncio
+import zmq.asyncio
+from tornado.web import RequestHandler, StaticFileHandler
+from tornado.iostream import StreamClosedError
+
+from ...utils import *
+from ...config import global_config
+from ...core.zmq.brokers import AsyncEventConsumer, EventConsumer
+from ...core.zmq.message import EMPTY_BYTE, ResponseMessage
+from ...constants import Operations
+from ...schema_validators import BaseSchemaValidator
+from ...serializers.payloads import PreserializedData, SerializableData
+from ...td import InteractionAffordance, PropertyAffordance, ActionAffordance, EventAffordance
+
+
+
+__default_supported_content_types__ = [
+ "application/json",
+ "application/x-msgpack",
+ "application/text"
+]
+# octet-stream is not supported
+
+
+class BaseHandler(RequestHandler):
+ """
+ Base request handler for running operations on the Thing
+ """
+
+ def initialize(self,
+ resource: InteractionAffordance | PropertyAffordance | ActionAffordance | EventAffordance,
+ owner_inst = None
+ ) -> None:
+ """
+ Parameters
+ ----------
+ resource: InteractionAffordance | PropertyAffordance | ActionAffordance | EventAffordance
+ JSON representation of Thing's exposed object using a dataclass that can quickly convert to a
+ ZMQ Request object
+ owner_inst: HTTPServer
+ owning `hololinked.server.HTTPServer` instance
+ """
+ from . import HTTPServer
+ assert isinstance(owner_inst, HTTPServer)
+ self.resource = resource
+ self.schema_validator = None # self.owner_inst.schema_validator # not supported yet
+ self.owner_inst = owner_inst
+ self.zmq_client_pool = self.owner_inst.zmq_client_pool
+ self.serializer = self.owner_inst.serializer
+ self.logger = self.owner_inst.logger
+ self.allowed_clients = self.owner_inst.allowed_clients
+
+ @property
+ def has_access_control(self) -> bool:
+ """
+ Checks if a client is an allowed client. Requests from un-allowed clients are reject without execution. Custom
+ web handlers can use this property to check if a client has access control on the server or ``Thing``.
+ """
+ if len(self.allowed_clients) == 0:
+ self.set_header("Access-Control-Allow-Origin", "*")
+ return True
+ # For credential login, access control allow origin cannot be '*',
+ # See: https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS#examples_of_access_control_scenarios
+ origin = self.request.headers.get("Origin")
+ if origin is not None and (origin in self.allowed_clients or origin + '/' in self.allowed_clients):
+ self.set_header("Access-Control-Allow-Origin", origin)
+ return True
+ return False
+
+ def set_access_control_allow_headers(self) -> None:
+ """
+ For credential login, access control allow headers cannot be a wildcard '*'.
+ Some requests require exact list of allowed headers for the client to access the response.
+ Use this method in set_headers() override if necessary.
+ """
+ headers = ", ".join(self.request.headers.keys())
+ if self.request.headers.get("Access-Control-Request-Headers", None):
+ headers += ", " + self.request.headers["Access-Control-Request-Headers"]
+ self.set_header("Access-Control-Allow-Headers", headers)
+
+ def set_headers(self) -> None:
+ """
+ override this to set custom headers without having to reimplement entire handler
+ """
+ raise NotImplementedError("implement set headers in child class to automatically call it" +
+ " while directing the request to Thing")
+
+ def get_execution_parameters(self) -> typing.Tuple[typing.Dict[str, typing.Any], typing.Dict[str, typing.Any]]:
+ """
+ merges all arguments to a single JSON body and retrieves execution context (like oneway calls, fetching executing
+ logs) and timeouts
+ """
+ arguments = dict()
+ if len(self.request.query_arguments) >= 1:
+ for key, value in self.request.query_arguments.items():
+ if len(value) == 1:
+ arguments[key] = self.serializer.loads(value[0])
+ else:
+ arguments[key] = [self.serializer.loads(val) for val in value]
+ thing_execution_context = dict(fetch_execution_logs=arguments.pop('fetch_execution_logs', False))
+ server_execution_context = dict(
+ invokation_timeout=arguments.pop('invokation_timeout', None),
+ execution_timeout=arguments.pop('execution_timeout', None),
+ oneway=arguments.pop('oneway', False)
+ )
+
+ # if timeout is not None and timeout < 0:
+ # timeout = None # reinstate logic soon
+ # if self.resource.request_as_argument:
+ # arguments['request'] = self.request # find some way to pass the request object to the thing
+ return server_execution_context, thing_execution_context
+ return dict(), dict()
+
+ def get_payload(self) -> typing.Tuple[SerializableData, PreserializedData]:
+ """
+ retrieves the payload from the request body and deserializes it.
+ """
+ payload = SerializableData(value=None)
+ preserialized_payload = PreserializedData(value=b'')
+ if self.request.body:
+ try:
+ if self.request.headers.get("Content-Type", None) in __default_supported_content_types__:
+ payload.value = self.request.body
+ payload.content_type = self.request.headers.get("Content-Type", None)
+ else:
+ preserialized_payload.value = self.request.body
+ preserialized_payload.content_type = self.request.headers.get("Content-Type", None)
+ except Exception as ex:
+ self.set_status(400, str(ex))
+ self.write(self.serializer.dumps({"exception" : format_exception_as_json(ex)}))
+ self.finish()
+ return None, None
+ return payload, preserialized_payload
+
+ def get_response_payload(self, zmq_response: ResponseMessage) -> typing.Any:
+ """
+ cached return value of the last call to the method
+ """
+ # print("zmq_response - ", zmq_response)
+ if zmq_response is None:
+ raise RuntimeError("No last response available. Did you make an operation?")
+ payload = zmq_response.payload.value
+ preserialized_payload = zmq_response.preserialized_payload.value
+ if preserialized_payload != EMPTY_BYTE:
+ if payload is None:
+ return preserialized_payload
+ return payload, preserialized_payload
+ return payload
+
+ async def get(self) -> None:
+ """
+ runs property or action if accessible by 'GET' method. Default for property reads.
+ """
+ raise NotImplementedError("implement GET request method in child handler class")
+
+ async def post(self) -> None:
+ """
+ runs property or action if accessible by 'POST' method. Default for action execution.
+ """
+ raise NotImplementedError("implement POST request method in child handler class")
+
+ async def put(self) -> None:
+ """
+ runs property or action if accessible by 'PUT' method. Default for property writes.
+ """
+ raise NotImplementedError("implement PUT request method in child handler class")
+
+ async def delete(self) -> None:
+ """
+ runs property or action if accessible by 'DELETE' method. Default for property deletes
+ (not a valid operation as per web of things semantics).
+ """
+ raise NotImplementedError("implement DELETE request method in child handler class")
+
+ def is_method_allowed(self, method : str) -> bool:
+ """
+ checks if the method is allowed for the property.
+ """
+ raise NotImplementedError("implement is_method_allowed in child handler class")
+
+
+
+class RPCHandler(BaseHandler):
+ """
+ Handler for property read-write and method calls
+ """
+
+ def set_headers(self) -> None:
+ """
+ sets default headers for RPC (property read-write and action execution). The general headers are listed as follows:
+
+ .. code-block:: yaml
+
+ Content-Type: application/json
+ Access-Control-Allow-Credentials: true
+ Access-Control-Allow-Origin:
+ """
+ self.set_header("Content-Type" , "application/json")
+ self.set_header("Access-Control-Allow-Credentials", "true")
+
+ async def options(self) -> None:
+ """
+ Options for the resource. Main functionality is to inform the client is a specific HTTP method is supported by
+ the property or the action (Access-Control-Allow-Methods).
+ """
+ if self.has_access_control:
+ self.set_status(204)
+ self.set_access_control_allow_headers()
+ self.set_header("Access-Control-Allow-Credentials", "true")
+ self.set_header("Access-Control-Allow-Methods", ', '.join(self.resource.instructions.supported_methods()))
+ else:
+ self.set_status(401, "forbidden")
+ self.finish()
+
+ async def handle_through_thing(self, operation : str) -> None:
+ """
+ handles the Thing operations and writes the reply to the HTTP client.
+ """
+ reply = None
+ try:
+ server_exeuction_context, thing_execution_context = self.get_execution_parameters()
+ # print(f"server execution context - {server_exeuction_context}, thing execution context - {thing_execution_context}")
+ payload, preserialized_payload = self.get_payload()
+ # print(f"payload - {payload}, preserialized payload - {preserialized_payload}")
+ if self.schema_validator is not None and global_config.VALIDATE_SCHEMA_ON_CLIENT:
+ self.schema_validator.validate(payload)
+ response_message = await self.zmq_client_pool.async_execute(
+ client_id=self.zmq_client_pool.get_client_id_from_thing_id(self.resource.thing_id),
+ thing_id=self.resource.thing_id,
+ objekt=self.resource.name,
+ operation=operation,
+ payload=payload,
+ preserialized_payload=preserialized_payload,
+ server_execution_context=server_exeuction_context,
+ thing_execution_context=thing_execution_context
+ )
+ # message mapped client pool currently strips the data part from return message
+ # and provides that as reply directly
+ payload = self.get_response_payload(response_message)
+ self.set_status(200, "ok")
+ # print(f"payload {self.request.path} - {payload}")
+ except ConnectionAbortedError as ex:
+ self.set_status(503, str(ex))
+ # event_loop = asyncio.get_event_loop()
+ # event_loop.call_soon(lambda : asyncio.create_task(self.owner_inst.update_router_with_thing(
+ # self.zmq_client_pool[self.resource.instance_name])))
+ # except ConnectionError as ex:
+ # await self.owner_inst.update_router_with_thing(self.zmq_client_pool[self.resource.instance_name])
+ # await self.handle_through_thing(operation) # reschedule
+ # return
+ except Exception as ex:
+ self.logger.error(f"error while scheduling RPC call - {str(ex)}")
+ self.logger.debug(f"traceback - {ex.__traceback__}")
+ self.set_status(500, "error while scheduling RPC call")
+ payload = self.serializer.dumps({"exception" : format_exception_as_json(ex)})
+ self.set_headers()
+ if payload:
+ self.write(payload)
+ self.finish()
+
+
+
+class PropertyHandler(RPCHandler):
+
+ def is_method_allowed(self, method : str) -> bool:
+ """
+ checks if the method is allowed for the property.
+ """
+ if not self.has_access_control:
+ self.set_status(401, "forbidden")
+ return False
+ if (method == 'GET' and self.resource.writeOnly) or (
+ method == 'POST' or method == 'PUT' and self.resource.readOnly):
+ self.set_status(405, "method not allowed")
+ self.finish()
+ return False
+ return True
+
+ async def get(self) -> None:
+ """
+ runs property or action if accessible by 'GET' method. Default for property reads.
+ """
+ if not self.is_method_allowed('GET'):
+ return
+ await self.handle_through_thing(Operations.readProperty)
+
+ async def post(self) -> None:
+ """
+ runs property or action if accessible by 'POST' method. Default for action execution.
+ """
+ if not self.is_method_allowed('POST'):
+ return
+ await self.handle_through_thing(Operations.writeProperty)
+
+ async def put(self) -> None:
+ """
+ runs property or action if accessible by 'PUT' method. Default for property writes.
+ """
+ if not self.is_method_allowed('PUT'):
+ return
+ await self.handle_through_thing(Operations.writeProperty)
+
+ async def delete(self) -> None:
+ """
+ runs property or action if accessible by 'DELETE' method. Default for property deletes.
+ """
+ if not self.is_method_allowed('DELETE'):
+ return
+ await self.handle_through_thing(Operations.deleteProperty)
+
+
+
+class JPEGImageHandler(PropertyHandler):
+
+ def set_headers(self) -> None:
+ """
+ sets default headers for image handling. The general headers are listed as follows:
+
+ ```yaml
+ Content-Type: image/jpeg
+ Access-Control-Allow-Credentials: true
+ Access-Control-Allow-Origin:
+ ```
+ """
+ self.set_header("Content-Type", "image/jpeg")
+ self.set_header("Access-Control-Allow-Credentials", "true")
+
+
+
+class PNGImageHandler(PropertyHandler):
+
+ def set_headers(self) -> None:
+ """
+ sets default headers for image handling. The general headers are listed as follows:
+
+ ```yaml
+ Content-Type: image/png
+ Access-Control-Allow-Credentials: true
+ Access-Control-Allow-Origin:
+ ```
+ """
+ self.set_header("Content-Type", "image/png")
+ self.set_header("Access-Control-Allow-Credentials", "true")
+
+
+
+class ActionHandler(RPCHandler):
+
+ def is_method_allowed(self, method : str) -> bool:
+ """
+ checks if the method is allowed for the property.
+ """
+ if not self.has_access_control:
+ self.set_status(401, "forbidden")
+ return False
+ return True
+
+ async def get(self) -> None:
+ """
+ runs property or action if accessible by 'GET' method. Default for property reads.
+ """
+ if not self.is_method_allowed('GET'):
+ return
+ await self.handle_through_thing(Operations.invokeAction)
+
+ async def post(self) -> None:
+ """
+ runs property or action if accessible by 'POST' method. Default for action execution.
+ """
+ if not self.is_method_allowed('POST'):
+ return
+ await self.handle_through_thing(Operations.invokeAction)
+
+ async def put(self) -> None:
+ """
+ runs property or action if accessible by 'PUT' method. Default for property writes.
+ """
+ if not self.is_method_allowed('PUT'):
+ return
+ await self.handle_through_thing(Operations.invokeAction)
+
+ async def delete(self) -> None:
+ """
+ runs property or action if accessible by 'DELETE' method. Default for property deletes.
+ """
+ if not self.is_method_allowed('DELETE'):
+ return
+ await self.handle_through_thing(Operations.invokeAction)
+
+
+
+class EventHandler(BaseHandler):
+ """
+ handles events emitted by ``Thing`` and tunnels them as HTTP SSE.
+ """
+ def initialize(self, resource, validator: BaseSchemaValidator, owner_inst=None) -> None:
+ super().initialize(resource, validator, owner_inst)
+ self.data_header = b'data: %s\n\n'
+
+ def set_headers(self) -> None:
+ """
+ sets default headers for event handling. The general headers are listed as follows:
+
+ .. code-block:: yaml
+
+ Content-Type: text/event-stream
+ Cache-Control: no-cache
+ Connection: keep-alive
+ Access-Control-Allow-Credentials: true
+ Access-Control-Allow-Origin:
+ """
+ self.set_header("Content-Type", "text/event-stream")
+ self.set_header("Cache-Control", "no-cache")
+ self.set_header("Connection", "keep-alive")
+ self.set_header("Access-Control-Allow-Credentials", "true")
+
+ async def get(self):
+ """
+ events are support only with GET method.
+ """
+ if self.has_access_control:
+ self.set_headers()
+ await self.handle_datastream()
+ else:
+ self.set_status(401, "forbidden")
+ self.finish()
+
+ async def options(self):
+ """
+ options for the resource.
+ """
+ if self.has_access_control:
+ self.set_status(204)
+ self.set_access_control_allow_headers()
+ self.set_header("Access-Control-Allow-Credentials", "true")
+ self.set_header("Access-Control-Allow-Methods", 'GET')
+ else:
+ self.set_status(401, "forbidden")
+ self.finish()
+
+ def receive_blocking_event(self, event_consumer : EventConsumer):
+ return event_consumer.receive(timeout=10000, deserialize=False)
+
+ async def handle_datastream(self) -> None:
+ """
+ called by GET method and handles the event.
+ """
+ try:
+ event_consumer_cls = EventConsumer if self.owner_inst._zmq_inproc_event_context is not None else AsyncEventConsumer
+ # synchronous context with INPROC pub or asynchronous context with IPC or TCP pub, we handle both in async
+ # fashion as HTTP server should be running purely sync(or normal) python method.
+ event_consumer = event_consumer_cls(self.resource.unique_identifier, self.resource.socket_address,
+ identity=f"{self.resource.unique_identifier}|HTTPEvent|{uuid.uuid4()}",
+ logger=self.logger, http_serializer=self.serializer,
+ context=self.owner_inst._zmq_inproc_event_context if self.resource.socket_address.startswith('inproc') else None)
+ event_loop = asyncio.get_event_loop()
+ self.set_status(200)
+ except Exception as ex:
+ self.logger.error(f"error while subscribing to event - {str(ex)}")
+ self.set_status(500, "could not subscribe to event source from thing")
+ self.write(self.serializer.dumps({"exception" : format_exception_as_json(ex)}))
+ return
+
+ while True:
+ try:
+ if isinstance(event_consumer, AsyncEventConsumer):
+ data = await event_consumer.receive(timeout=10000, deserialize=False)
+ else:
+ data = await event_loop.run_in_executor(None, self.receive_blocking_event, event_consumer)
+ if data:
+ # already JSON serialized
+ self.write(self.data_header % data)
+ await self.flush() # log after flushing just to be sure
+ self.logger.debug(f"new data sent - {self.resource.name}")
+ else:
+ self.logger.debug(f"found no new data - {self.resource.name}")
+ await self.flush() # heartbeat - raises StreamClosedError if client disconnects
+ except StreamClosedError:
+ break
+ except Exception as ex:
+ self.logger.error(f"error while pushing event - {str(ex)}")
+ self.write(self.data_header % self.serializer.dumps(
+ {"exception" : format_exception_as_json(ex)}))
+ try:
+ if isinstance(self.owner_inst._zmq_inproc_event_context, zmq.asyncio.Context):
+ event_consumer.exit()
+ except Exception as ex:
+ self.logger.error(f"error while closing event consumer - {str(ex)}" )
+
+
+class JPEGImageEventHandler(EventHandler):
+ """
+ handles events with images with image data header
+ """
+ def initialize(self, resource, validator: BaseSchemaValidator, owner_inst = None) -> None:
+ super().initialize(resource, validator, owner_inst)
+ self.data_header = b'data:image/jpeg;base64,%s\n\n'
+
+
+class PNGImageEventHandler(EventHandler):
+ """
+ handles events with images with image data header
+ """
+ def initialize(self, resource, validator: BaseSchemaValidator, owner_inst = None) -> None:
+ super().initialize(resource, validator, owner_inst)
+ self.data_header = b'data:image/png;base64,%s\n\n'
+
+
+
+class FileHandler(StaticFileHandler):
+
+ @classmethod
+ def get_absolute_path(cls, root: str, path: str) -> str:
+ """
+ Returns the absolute location of ``path`` relative to ``root``.
+
+ ``root`` is the path configured for this `StaticFileHandler`
+ (in most cases the ``static_path`` `Application` setting).
+
+ This class method may be overridden in subclasses. By default
+ it returns a filesystem path, but other strings may be used
+ as long as they are unique and understood by the subclass's
+ overridden `get_content`.
+
+ .. versionadded:: 3.1
+ """
+ return root+path
+
+
+
+class ThingsHandler(BaseHandler):
+ """
+ add or remove things
+ """
+
+ async def get(self):
+ self.set_status(404)
+ self.finish()
+
+ async def post(self):
+ if not self.has_access_control:
+ self.set_status(401, 'forbidden')
+ else:
+ try:
+ instance_name = ""
+ await self.zmq_client_pool.create_new(server_instance_name=instance_name)
+ await self.owner_inst.update_router_with_thing(self.zmq_client_pool[instance_name])
+ self.set_status(204, "ok")
+ except Exception as ex:
+ self.set_status(500, str(ex))
+ self.set_headers()
+ self.finish()
+
+ async def options(self):
+ if self.has_access_control:
+ self.set_status(204)
+ self.set_access_control_allow_headers()
+ self.set_header("Access-Control-Allow-Credentials", "true")
+ self.set_header("Access-Control-Allow-Methods", 'GET, POST')
+ else:
+ self.set_status(401, "forbidden")
+ self.finish()
+
+
+class StopHandler(BaseHandler):
+ """Stops the tornado HTTP server"""
+
+ def initialize(self, owner_inst = None) -> None:
+ from . import HTTPServer
+ assert isinstance(owner_inst, HTTPServer)
+ self.owner_inst = owner_inst
+ self.allowed_clients = self.owner_inst.allowed_clients
+
+ async def post(self):
+ if not self.has_access_control:
+ self.set_status(401, 'forbidden')
+ else:
+ try:
+ # Stop the Tornado server
+ run_callable_somehow(self.owner_inst.async_stop())
+ self.set_status(204, "ok")
+ self.set_header("Access-Control-Allow-Credentials", "true")
+ except Exception as ex:
+ self.set_status(500, str(ex))
+ self.finish()
\ No newline at end of file
diff --git a/hololinked/server/security_definitions.py b/hololinked/server/security_definitions.py
deleted file mode 100644
index c76ff660..00000000
--- a/hololinked/server/security_definitions.py
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-class BaseSecurityDefinition:
- """Type shield for all security definitions"""
diff --git a/hololinked/server/serializers.py b/hololinked/server/serializers.py
deleted file mode 100644
index 29bfd96a..00000000
--- a/hololinked/server/serializers.py
+++ /dev/null
@@ -1,273 +0,0 @@
-# adopted from pyro - https://github.com/irmen/Pyro5 - see following license
-"""
-MIT License
-
-Copyright (c) Irmen de Jong
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-"""
-import pickle
-from msgspec import json as msgspecjson, msgpack
-import json as pythonjson
-import inspect
-import array
-import datetime
-import uuid
-import decimal
-import typing
-import warnings
-from enum import Enum
-from collections import deque
-
-try:
- import numpy
-except ImportError:
- pass
-
-from ..param.parameters import TypeConstrainedList, TypeConstrainedDict, TypedKeyMappingsConstrainedDict
-from .constants import JSONSerializable, Serializers
-from .utils import format_exception_as_json
-
-
-
-class BaseSerializer(object):
- """
- Base class for (de)serializer implementations. All serializers must inherit this class
- and overload dumps() and loads() to be usable by the ZMQ message brokers. Any serializer
- that returns bytes when serialized and a python object on deserialization will be accepted.
- Serialization and deserialization errors will be passed as invalid message type
- (see ZMQ messaging contract) from server side and a exception will be raised on the client.
- """
-
- def __init__(self) -> None:
- super().__init__()
- self.type = None
-
- def loads(self, data) -> typing.Any:
- "method called by ZMQ message brokers to deserialize data"
- raise NotImplementedError("implement in subclass")
-
- def dumps(self, data) -> bytes:
- "method called by ZMQ message brokers to serialize data"
- raise NotImplementedError("implement in subclass")
-
- def convert_to_bytes(self, data) -> bytes:
- if isinstance(data, bytes):
- return data
- if isinstance(data, bytearray):
- return bytes(data)
- if isinstance(data, memoryview):
- return data.tobytes()
- raise TypeError("serializer convert_to_bytes accepts only bytes, bytearray or memoryview")
-
-
-dict_keys = type(dict().keys())
-
-class JSONSerializer(BaseSerializer):
- "(de)serializer that wraps the msgspec JSON serialization protocol, default serializer for all clients."
-
- _type_replacements = {}
-
- def __init__(self) -> None:
- super().__init__()
- self.type = msgspecjson
-
- def loads(self, data : typing.Union[bytearray, memoryview, bytes]) -> JSONSerializable:
- "method called by ZMQ message brokers to deserialize data"
- return msgspecjson.decode(self.convert_to_bytes(data))
-
- def dumps(self, data) -> bytes:
- "method called by ZMQ message brokers to serialize data"
- return msgspecjson.encode(data, enc_hook=self.default)
-
- @classmethod
- def default(cls, obj) -> JSONSerializable:
- "method called if no serialization option was found."
- if hasattr(obj, 'json'):
- # alternative to type replacement
- return obj.json()
- if isinstance(obj, Enum):
- return obj.name
- if isinstance(obj, (set, dict_keys, deque, tuple)):
- # json module can't deal with sets so we make a tuple out of it
- return list(obj)
- if isinstance(obj, (TypeConstrainedDict, TypeConstrainedList, TypedKeyMappingsConstrainedDict)):
- return obj._inner # copy has been implemented with same signature for both types
- if isinstance(obj, uuid.UUID):
- return str(obj)
- if isinstance(obj, (datetime.datetime, datetime.date)):
- return obj.isoformat()
- if isinstance(obj, decimal.Decimal):
- return str(obj)
- if isinstance(obj, Exception):
- return format_exception_as_json(obj)
- if isinstance(obj, array.array):
- if obj.typecode == 'c':
- return obj.tostring()
- if obj.typecode == 'u':
- return obj.tounicode()
- return obj.tolist()
- if 'numpy' in globals() and isinstance(obj, numpy.ndarray):
- return obj.tolist()
- replacer = cls._type_replacements.get(type(obj), None)
- if replacer:
- return replacer(obj)
- raise TypeError("Given type cannot be converted to JSON : {}".format(type(obj)))
-
- @classmethod
- def register_type_replacement(cls, object_type, replacement_function) -> None:
- "register custom serialization function for a particular type"
- if object_type is type or not inspect.isclass(object_type):
- raise ValueError("refusing to register replacement for a non-type or the type 'type' itself")
- cls._type_replacements[object_type] = replacement_function
-
-
-class PythonBuiltinJSONSerializer(JSONSerializer):
- "(de)serializer that wraps the python builtin JSON serialization protocol."
-
- def __init__(self) -> None:
- super().__init__()
- self.type = pythonjson
-
- def loads(self, data : typing.Union[bytearray, memoryview, bytes]) -> typing.Any:
- "method called by ZMQ message brokers to deserialize data"
- return pythonjson.loads(self.convert_to_bytes(data))
-
- def dumps(self, data) -> bytes:
- "method called by ZMQ message brokers to serialize data"
- data = pythonjson.dumps(data, ensure_ascii=False, allow_nan=True, default=self.default)
- return data.encode("utf-8")
-
- def dump(self, data : typing.Dict[str, typing.Any], file_desc) -> None:
- "write JSON to file"
- pythonjson.dump(data, file_desc, ensure_ascii=False, allow_nan=True, default=self.default)
-
- def load(cls, file_desc) -> JSONSerializable:
- "load JSON from file"
- return pythonjson.load(file_desc)
-
-
-class PickleSerializer(BaseSerializer):
- "(de)serializer that wraps the pickle serialization protocol, use with encryption for safety."
-
- def __init__(self) -> None:
- super().__init__()
- self.type = pickle
-
- def dumps(self, data) -> bytes:
- "method called by ZMQ message brokers to serialize data"
- return pickle.dumps(data)
-
- def loads(self, data) -> typing.Any:
- "method called by ZMQ message brokers to deserialize data"
- return pickle.loads(self.convert_to_bytes(data))
-
-
-
-class MsgpackSerializer(BaseSerializer):
- """
- (de)serializer that wraps the msgspec MessagePack serialization protocol, recommended serializer for ZMQ based
- high speed applications. Set an instance of this serializer to both ``Thing.zmq_serializer`` and
- ``hololinked.client.ObjectProxy``. Unfortunately, MessagePack is currently not supported for HTTP clients.
- """
-
- def __init__(self) -> None:
- super().__init__()
- self.type = msgpack
-
- def dumps(self, value) -> bytes:
- return msgpack.encode(value)
-
- def loads(self, value) -> typing.Any:
- return msgpack.decode(self.convert_to_bytes(value))
-
-serializers = {
- None : JSONSerializer,
- 'json' : JSONSerializer,
- 'pickle' : PickleSerializer,
- 'msgpack' : MsgpackSerializer
-}
-
-
-try:
- import serpent
-
- class SerpentSerializer(BaseSerializer):
- """(de)serializer that wraps the serpent serialization protocol."""
-
- def __init__(self) -> None:
- super().__init__()
- self.type = serpent
-
- def dumps(self, data) -> bytes:
- "method called by ZMQ message brokers to serialize data"
- return serpent.dumps(data, module_in_classname=True)
-
- def loads(self, data) -> typing.Any:
- "method called by ZMQ message brokers to deserialize data"
- return serpent.loads(self.convert_to_bytes(data))
-
- @classmethod
- def register_type_replacement(cls, object_type, replacement_function) -> None:
- "register custom serialization function for a particular type"
- def custom_serializer(obj, serpent_serializer, outputstream, indentlevel):
- replaced = replacement_function(obj)
- if replaced is obj:
- serpent_serializer.ser_default_class(replaced, outputstream, indentlevel)
- else:
- serpent_serializer._serialize(replaced, outputstream, indentlevel)
-
- if object_type is type or not inspect.isclass(object_type):
- raise ValueError("refusing to register replacement for a non-type or the type 'type' itself")
- serpent.register_class(object_type, custom_serializer)
-
- serializers['serpent'] = SerpentSerializer
-except ImportError:
- pass
-
-
-
-def _get_serializer_from_user_given_options(
- zmq_serializer : typing.Union[str, BaseSerializer],
- http_serializer : typing.Union[str, JSONSerializer]
- ) -> typing.Tuple[BaseSerializer, JSONSerializer]:
- """
- We give options to specify serializer as a string or an object,
- """
- if http_serializer in [None, 'json'] or isinstance(http_serializer, JSONSerializer):
- http_serializer = http_serializer if isinstance(http_serializer, JSONSerializer) else JSONSerializer()
- else:
- raise ValueError("invalid JSON serializer option : {}".format(http_serializer))
- # could also technically be TypeError
- if isinstance(zmq_serializer, BaseSerializer):
- zmq_serializer = zmq_serializer
- if isinstance(zmq_serializer, PickleSerializer) or zmq_serializer.type == pickle:
- warnings.warn("using pickle serializer which is unsafe, consider another like msgpack.", UserWarning)
- elif zmq_serializer == 'json' or zmq_serializer is None:
- zmq_serializer = http_serializer
- elif isinstance(zmq_serializer, str):
- zmq_serializer = serializers.get(zmq_serializer, JSONSerializer)()
- else:
- raise ValueError("invalid rpc serializer option : {}".format(zmq_serializer))
- return zmq_serializer, http_serializer
-
-
-
-__all__ = ['JSONSerializer', 'PickleSerializer', 'MsgpackSerializer',
- 'serializers', 'BaseSerializer']
\ No newline at end of file
diff --git a/hololinked/server/server.py b/hololinked/server/server.py
new file mode 100644
index 00000000..ec254ed4
--- /dev/null
+++ b/hololinked/server/server.py
@@ -0,0 +1,25 @@
+
+
+
+
+class BaseProtocolServer:
+
+ def add_thing(self, thing):
+ pass
+
+ def remove_thing(self, thing):
+ pass
+
+ def add_property(self, thing, prop):
+ pass
+
+ def add_action(self, thing, action):
+ pass
+
+ def add_event(self, thing, event):
+ pass
+
+ def build_thing_description(self, thing):
+ pass
+
+
diff --git a/hololinked/server/td.py b/hololinked/server/td.py
deleted file mode 100644
index b9abfa03..00000000
--- a/hololinked/server/td.py
+++ /dev/null
@@ -1,869 +0,0 @@
-import typing, inspect
-from dataclasses import dataclass, field
-
-
-
-from .constants import JSON, JSONSerializable
-from .utils import getattr_without_descriptor_read
-from .dataklasses import ActionInfoValidator
-from .events import Event
-from .properties import *
-from .property import Property
-from .thing import Thing
-from .state_machine import StateMachine
-from .eventloop import EventLoop
-
-
-
-@dataclass
-class Schema:
- """
- Base dataclass for all WoT schema; Implements a custom asdict method which replaces dataclasses' asdict
- utility function
- """
-
- skip_keys = [] # override this to skip some dataclass attributes in the schema
-
- replacement_keys = {
- 'context' : '@context',
- 'htv_methodName' : 'htv:methodName'
- }
-
- def asdict(self):
- """dataclass fields as dictionary skip_keys and replacement_keys accounted"""
- schema = dict()
- for field, value in self.__dataclass_fields__.items():
- if getattr(self, field, NotImplemented) is NotImplemented or field in self.skip_keys:
- continue
- if field in self.replacement_keys:
- schema[self.replacement_keys[field]] = getattr(self, field)
- else:
- schema[field] = getattr(self, field)
- return schema
-
- @classmethod
- def format_doc(cls, doc : str):
- """strip tabs, newlines, whitespaces etc."""
- doc_as_list = doc.split('\n')
- final_doc = []
- for index, line in enumerate(doc_as_list):
- line = line.lstrip('\n').rstrip('\n')
- line = line.lstrip('\t').rstrip('\t')
- line = line.lstrip('\n').rstrip('\n')
- line = line.lstrip().rstrip()
- if index > 0:
- line = ' ' + line # add space to left in case of new line
- final_doc.append(line)
- final_doc = ''.join(final_doc)
- final_doc = final_doc.lstrip().rstrip()
- return final_doc
-
-
-
-class JSONSchema:
- """type restrictor converting python types to JSON schema types"""
-
- _allowed_types = ('string', 'object', 'array', 'number', 'integer', 'boolean', None)
-
- _replacements = {
- int : 'integer',
- float : 'number',
- str : 'string',
- bool : 'boolean',
- dict : 'object',
- list : 'array',
- tuple : 'array',
- type(None) : 'null',
- Exception : {
- "type": "object",
- "properties": {
- "message": {"type": "string"},
- "type": {"type": "string"},
- "traceback": {"type": "array", "items": {"type": "string"}},
- "notes": {"type": ["string", "null"]}
- },
- "required": ["message", "type", "traceback"]
- }
- }
-
- _schemas = {
-
- }
-
- @classmethod
- def is_allowed_type(cls, type : typing.Any) -> bool:
- if type in JSONSchema._replacements.keys():
- return True
- return False
-
- @classmethod
- def is_supported(cls, typ: typing.Any) -> bool:
- """"""
- if typ in JSONSchema._schemas.keys():
- return True
- return False
-
- @classmethod
- def get_type(cls, typ : typing.Any) -> str:
- if not JSONSchema.is_allowed_type(typ):
- raise TypeError(f"Object for wot-td has invalid type for JSON conversion. Given type - {type(typ)}. " +
- "Use JSONSchema.register_replacements on hololinked.wot.td.JSONSchema object to recognise the type.")
- return JSONSchema._replacements[typ]
-
- @classmethod
- def register_type_replacement(self, type : typing.Any, json_schema_type : str,
- schema : typing.Optional[typing.Dict[str, JSONSerializable]] = None) -> None:
- """
- specify a python type as a JSON type.
- schema only supported for array and objects.
- """
- if json_schema_type in JSONSchema._allowed_types:
- JSONSchema._replacements[type] = json_schema_type
- if schema is not None:
- if json_schema_type not in ('array', 'object'):
- raise ValueError(f"schemas support only for array and object JSON schema types, your specified type - {type}.")
- JSONSchema._schemas[type] = schema
- else:
- raise TypeError(f"json schema replacement type must be one of allowed type - 'string', 'object', 'array', 'string', " +
- f"'number', 'integer', 'boolean', 'null'. Given value {json_schema_type}")
-
- @classmethod
- def get(cls, typ : typing.Any):
- """schema for array and objects only supported"""
- if not JSONSchema.is_supported(typ):
- raise ValueError(f"Schema for {typ} not provided. register one with JSONSchema.register_type_replacement()")
- return JSONSchema._schemas[typ]
-
-
-
-@dataclass
-class InteractionAffordance(Schema):
- """
- implements schema common to all interaction affordances.
- concepts - https://www.w3.org/TR/wot-thing-description11/#interactionaffordance
- """
- title : str
- titles : typing.Optional[typing.Dict[str, str]]
- description : str
- descriptions : typing.Optional[typing.Dict[str, str]]
- forms : typing.List["Form"]
- # uri variables
-
- def __init__(self):
- super().__init__()
-
-
-
-@dataclass
-class DataSchema(Schema):
- """
- implementes Dataschema attributes.
- https://www.w3.org/TR/wot-thing-description11/#sec-data-schema-vocabulary-definition
- """
- title : str
- titles : typing.Optional[typing.Dict[str, str]]
- description : str
- descriptions : typing.Optional[typing.Dict[str, str]]
- const : bool
- default : typing.Optional[typing.Any]
- readOnly : bool
- writeOnly : bool # write only are to be considered actions with no return value
- format : typing.Optional[str]
- unit : typing.Optional[str]
- type : str
- oneOf : typing.Optional[typing.List[typing.Dict[str, JSONSerializable]]]
- enum : typing.Optional[typing.List[typing.Any]]
-
- def __init__(self):
- super().__init__()
-
- def build(self, property : Property, owner : Thing, authority : str) -> None:
- """generates the schema"""
- self.title = property.label or property.name
- if property.constant:
- self.const = property.constant
- if property.readonly:
- self.readOnly = property.readonly
- if property.fget is None:
- self.default = property.default
- if property.doc:
- self.description = Schema.format_doc(property.doc)
- if property.metadata and property.metadata.get("unit", None) is not None:
- self.unit = property.metadata["unit"]
- if property.allow_None:
- if not hasattr(self, 'oneOf'):
- self.oneOf = []
- if hasattr(self, 'type'):
- self.oneOf.append(dict(type=self.type))
- del self.type
- if not any(types["type"] == None for types in self.oneOf):
- self.oneOf.append(dict(type="null"))
-
-
-
-@dataclass
-class PropertyAffordance(InteractionAffordance, DataSchema):
- """
- creates property affordance schema from ``property`` descriptor object
- schema - https://www.w3.org/TR/wot-thing-description11/#propertyaffordance
- """
- observable : bool
-
- _custom_schema_generators = dict()
-
- def __init__(self):
- super().__init__()
-
- def build(self, property : Property, owner : Thing, authority : str) -> None:
- """generates the schema"""
- DataSchema.build(self, property, owner, authority)
-
- self.forms = []
- for index, method in enumerate(property._remote_info.http_method):
- form = Form()
- # index is the order for http methods for (get, set, delete), generally (GET, PUT, DELETE)
- if (index == 1 and property.readonly) or index >= 2:
- continue # delete property is not a part of WoT, we also mostly never use it, so ignore.
- elif index == 0:
- form.op = 'readproperty'
- elif index == 1:
- form.op = 'writeproperty'
- form.href = f"{authority}{owner._full_URL_path_prefix}{property._remote_info.URL_path}"
- form.htv_methodName = method.upper()
- form.contentType = "application/json"
- self.forms.append(form.asdict())
-
- if property._observable:
- self.observable = property._observable
- form = Form()
- form.op = 'observeproperty'
- form.href = f"{authority}{owner._full_URL_path_prefix}{property._observable_event_descriptor.URL_path}"
- form.htv_methodName = "GET"
- form.subprotocol = "sse"
- form.contentType = "application/json"
- self.forms.append(form.asdict())
-
-
- @classmethod
- def generate_schema(self, property : Property, owner : Thing, authority : str) -> typing.Dict[str, JSONSerializable]:
- if not isinstance(property, Property):
- raise TypeError(f"Property affordance schema can only be generated for Property. "
- f"Given type {type(property)}")
- if isinstance(property, (String, Filename, Foldername, Path)):
- schema = StringSchema()
- elif isinstance(property, (Number, Integer)):
- schema = NumberSchema()
- elif isinstance(property, Boolean):
- schema = BooleanSchema()
- elif isinstance(property, (List, TypedList, Tuple, TupleSelector)):
- schema = ArraySchema()
- elif isinstance(property, Selector):
- schema = EnumSchema()
- elif isinstance(property, (TypedDict, TypedKeyMappingsDict)):
- schema = ObjectSchema()
- elif isinstance(property, ClassSelector):
- schema = OneOfSchema()
- elif self._custom_schema_generators.get(property, NotImplemented) is not NotImplemented:
- schema = self._custom_schema_generators[property]()
- elif isinstance(property, Property) and property.model is not None:
- if isinstance(property.model, dict):
- data_schema = property.model
- else:
- from .td_pydantic_extensions import GenerateJsonSchemaWithoutDefaultTitles, type_to_dataschema
- data_schema = type_to_dataschema(property.model).model_dump(mode='json', exclude_none=True)
- schema = PropertyAffordance()
- schema.build(property=property, owner=owner, authority=authority)
- final_schema = schema.asdict()
- if schema.oneOf: # allow_None = True
- final_schema['oneOf'].append(data_schema)
- else:
- final_schema.update(data_schema)
- return final_schema
- else:
- raise TypeError(f"WoT schema generator for this descriptor/property is not implemented. name {property.name} & type {type(property)}")
- schema.build(property=property, owner=owner, authority=authority)
- return schema.asdict()
-
- @classmethod
- def register_descriptor(cls, descriptor : Property, schema_generator : "PropertyAffordance") -> None:
- if not isinstance(descriptor, Property):
- raise TypeError("custom schema generator can also be registered for Property." +
- f" Given type {type(descriptor)}")
- if not isinstance(schema_generator, PropertyAffordance):
- raise TypeError("schema generator for Property must be subclass of PropertyAfforance. " +
- f"Given type {type(schema_generator)}" )
- PropertyAffordance._custom_schema_generators[descriptor] = schema_generator
-
-
-@dataclass
-class BooleanSchema(PropertyAffordance):
- """
- boolean schema - https://www.w3.org/TR/wot-thing-description11/#booleanschema
- used by Boolean descriptor
- """
- def __init__(self):
- super().__init__()
-
- def build(self, property: Property, owner: Thing, authority: str) -> None:
- """generates the schema"""
- self.type = 'boolean'
- PropertyAffordance.build(self, property, owner, authority)
-
-
-@dataclass
-class StringSchema(PropertyAffordance):
- """
- string schema - https://www.w3.org/TR/wot-thing-description11/#stringschema
- used by String, Filename, Foldername, Path descriptors
- """
- pattern : typing.Optional[str]
-
- def __init__(self):
- super().__init__()
-
- def build(self, property: Property, owner: Thing, authority: str) -> None:
- """generates the schema"""
- self.type = 'string'
- PropertyAffordance.build(self, property, owner, authority)
- if isinstance(property, String):
- if property.regex is not None:
- self.pattern = property.regex
-
-
-@dataclass
-class NumberSchema(PropertyAffordance):
- """
- number schema - https://www.w3.org/TR/wot-thing-description11/#numberschema
- used by String, Filename, Foldername, Path descriptors
- """
- minimum : typing.Optional[typing.Union[int, float]]
- maximum : typing.Optional[typing.Union[int, float]]
- exclusiveMinimum : typing.Optional[bool]
- exclusiveMaximum : typing.Optional[bool]
- step : typing.Optional[typing.Union[int, float]]
-
- def __init__(self):
- super().__init__()
-
- def build(self, property: Property, owner: Thing, authority: str) -> None:
- """generates the schema"""
- if isinstance(property, Integer):
- self.type = 'integer'
- elif isinstance(property, Number): # dont change order - one is subclass of other
- self.type = 'number'
- PropertyAffordance.build(self, property, owner, authority)
- if property.bounds is not None:
- if isinstance(property.bounds[0], (int, float)): # i.e. value is not None which is allowed by param
- if not property.inclusive_bounds[0]:
- self.exclusiveMinimum = property.bounds[0]
- else:
- self.minimum = property.bounds[0]
- if isinstance(property.bounds[1], (int, float)):
- if not property.inclusive_bounds[1]:
- self.exclusiveMaximum = property.bounds[1]
- else:
- self.maximum = property.bounds[1]
- if property.step:
- self.multipleOf = property.step
-
-
-@dataclass
-class ArraySchema(PropertyAffordance):
- """
- array schema - https://www.w3.org/TR/wot-thing-description11/#arrayschema
- Used by List, Tuple, TypedList and TupleSelector
- """
-
- items : typing.Optional[typing.Dict[str, JSONSerializable]]
- minItems : typing.Optional[int]
- maxItems : typing.Optional[int]
-
- def __init__(self):
- super().__init__()
-
- def build(self, property: Property, owner: Thing, authority: str) -> None:
- """generates the schema"""
- self.type = 'array'
- PropertyAffordance.build(self, property, owner, authority)
- self.items = []
- if isinstance(property, (List, Tuple, TypedList)) and property.item_type is not None:
- if property.bounds:
- if property.bounds[0]:
- self.minItems = property.bounds[0]
- if property.bounds[1]:
- self.maxItems = property.bounds[1]
- if isinstance(property.item_type, (list, tuple)):
- for typ in property.item_type:
- self.items.append(dict(type=JSONSchema.get_type(typ)))
- elif property.item_type is not None:
- self.items.append(dict(type=JSONSchema.get_type(property.item_type)))
- elif isinstance(property, TupleSelector):
- objects = list(property.objects)
- for obj in objects:
- if any(types["type"] == JSONSchema._replacements.get(type(obj), None) for types in self.items):
- continue
- self.items.append(dict(type=JSONSchema.get_type(type(obj))))
- if len(self.items) == 0:
- del self.items
- elif len(self.items) > 1:
- self.items = dict(oneOf=self.items)
-
-
-@dataclass
-class ObjectSchema(PropertyAffordance):
- """
- object schema - https://www.w3.org/TR/wot-thing-description11/#objectschema
- Used by TypedDict
- """
- properties : typing.Optional[typing.Dict[str, JSONSerializable]]
- required : typing.Optional[typing.List[str]]
-
- def __init__(self):
- super().__init__()
-
- def build(self, property: Property, owner: Thing, authority: str) -> None:
- """generates the schema"""
- PropertyAffordance.build(self, property, owner, authority)
- properties = None
- required = None
- if hasattr(property, 'json_schema'):
- # Code will not reach here for now as have not implemented schema for typed dictionaries.
- properties = property.json_schema["properties"]
- if property.json_schema.get("required", NotImplemented) is not NotImplemented:
- required = property.json_schema["required"]
- if not property.allow_None:
- self.type = 'object'
- if properties:
- self.properties = properties
- if required:
- self.required = required
- else:
- schema = dict(type='object')
- if properties:
- schema['properties'] = properties
- if required:
- schema['required'] = required
- self.oneOf.append(schema)
-
-
-@dataclass
-class OneOfSchema(PropertyAffordance):
- """
- custom schema to deal with ClassSelector to fill oneOf field correctly
- https://www.w3.org/TR/wot-thing-description11/#dataschema
- """
- properties : typing.Optional[typing.Dict[str, JSONSerializable]]
- required : typing.Optional[typing.List[str]]
- items : typing.Optional[typing.Dict[str, JSONSerializable]]
- minItems : typing.Optional[int]
- maxItems : typing.Optional[int]
- # ClassSelector can technically have a JSON serializable as a class_
-
- def __init__(self):
- super().__init__()
-
- def build(self, property: Property, owner: Thing, authority: str) -> None:
- """generates the schema"""
- self.oneOf = []
- if isinstance(property, ClassSelector):
- if not property.isinstance:
- raise NotImplementedError("WoT TD for ClassSelector with isinstance set to True is not supported yet. " +
- "Consider user this property in a different way.")
- if isinstance(property.class_, (list, tuple)):
- objects = list(property.class_)
- else:
- objects = [property.class_]
- elif isinstance(property, Selector):
- objects = list(property.objects)
- else:
- raise TypeError(f"EnumSchema and OneOfSchema supported only for Selector and ClassSelector. Given Type - {property}")
- for obj in objects:
- if any(types["type"] == JSONSchema._replacements.get(type(obj), None) for types in self.oneOf):
- continue
- if isinstance(property, ClassSelector):
- if not JSONSchema.is_allowed_type(obj):
- raise TypeError(f"Object for wot-td has invalid type for JSON conversion. Given type - {obj}. " +
- "Use JSONSchema.register_replacements on hololinked.wot.td.JSONSchema object to recognise the type.")
- subschema = dict(type=JSONSchema.get_type(obj))
- if JSONSchema.is_supported(obj):
- subschema.update(JSONSchema.get(obj))
- self.oneOf.append(subschema)
- elif isinstance(property, Selector):
- if JSONSchema.get_type(type(obj)) == "null":
- continue
- self.oneOf.append(dict(type=JSONSchema.get_type(type(obj))))
- PropertyAffordance.build(self, property, owner, authority)
- self.cleanup()
-
- def cleanup(self):
- if len(self.oneOf) == 1:
- oneOf = self.oneOf[0]
- self.type = oneOf["type"]
- if oneOf["type"] == 'object':
- if oneOf.get("properties", NotImplemented) is not NotImplemented:
- self.properties = oneOf["properties"]
- if oneOf.get("required", NotImplemented) is not NotImplemented:
- self.required = oneOf["required"]
- elif oneOf["type"] == 'array':
- if oneOf.get("items", NotImplemented) is not NotImplemented:
- self.items = oneOf["items"]
- if oneOf.get("maxItems", NotImplemented) is not NotImplemented:
- self.minItems = oneOf["minItems"]
- if oneOf.get("maxItems", NotImplemented) is not NotImplemented:
- self.maxItems = oneOf["maxItems"]
- del self.oneOf
-
-
-@dataclass
-class EnumSchema(OneOfSchema):
- """
- custom schema to fill enum field of property affordance correctly
- https://www.w3.org/TR/wot-thing-description11/#dataschema
- """
- def __init__(self):
- super().__init__()
-
- def build(self, property: Property, owner: Thing, authority: str) -> None:
- """generates the schema"""
- assert isinstance(property, Selector), f"EnumSchema compatible property is only Selector, not {property.__class__}"
- self.enum = list(property.objects)
- OneOfSchema.build(self, property, owner, authority)
-
-
-@dataclass
-class Link(Schema):
- href : str
- anchor : typing.Optional[str]
- type : typing.Optional[str] = field(default='application/json')
- # rel : typing.Optional[str] = field(default='next')
-
- def __init__(self):
- super().__init__()
-
- def build(self, resource : Thing, owner : Thing, authority : str) -> None:
- self.href = f"{authority}{resource._full_URL_path_prefix}/resources/wot-td"
- self.anchor = f"{authority}{owner._full_URL_path_prefix}"
-
-
-
-@dataclass
-class ExpectedResponse(Schema):
- """
- Form property.
- schema - https://www.w3.org/TR/wot-thing-description11/#expectedresponse
- """
- contentType : str
-
- def __init__(self):
- super().__init__()
-
-
-@dataclass
-class AdditionalExpectedResponse(Schema):
- """
- Form field for additional responses which are different from the usual response.
- schema - https://www.w3.org/TR/wot-thing-description11/#additionalexpectedresponse
- """
- success : bool = field(default=False)
- contentType : str = field(default='application/json')
- schema : typing.Optional[JSON] = field(default='exception')
-
- def __init__(self):
- super().__init__()
-
-
-@dataclass
-class Form(Schema):
- """
- Form hypermedia.
- schema - https://www.w3.org/TR/wot-thing-description11/#form
- """
- href : str
- op : str
- htv_methodName : str
- contentType : typing.Optional[str]
- additionalResponses : typing.Optional[typing.List[AdditionalExpectedResponse]]
- contentEncoding : typing.Optional[str]
- security : typing.Optional[str]
- scopes : typing.Optional[str]
- response : typing.Optional[ExpectedResponse]
- subprotocol : typing.Optional[str]
-
- def __init__(self):
- super().__init__()
-
-
-@dataclass
-class ActionAffordance(InteractionAffordance):
- """
- creates action affordance schema from actions (or methods).
- schema - https://www.w3.org/TR/wot-thing-description11/#actionaffordance
- """
- input : typing.Dict[str, JSONSerializable]
- output : typing.Dict[str, JSONSerializable]
- safe : bool
- idempotent : bool
- synchronous : bool
-
- def __init__(self):
- super(InteractionAffordance, self).__init__()
-
- def build(self, action : typing.Callable, owner : Thing, authority : str) -> None:
- assert isinstance(action._remote_info, ActionInfoValidator)
- if action._remote_info.argument_schema:
- self.input = action._remote_info.argument_schema
- if action._remote_info.return_value_schema:
- self.output = action._remote_info.return_value_schema
- self.title = action.__name__
- if action.__doc__:
- self.description = self.format_doc(action.__doc__)
- if not (hasattr(owner, 'state_machine') and owner.state_machine is not None and
- owner.state_machine.has_object(action._remote_info.obj)) and action._remote_info.idempotent:
- self.idempotent = action._remote_info.idempotent
- if action._remote_info.synchronous:
- self.synchronous = action._remote_info.synchronous
- if action._remote_info.safe:
- self.safe = action._remote_info.safe
- self.forms = []
- for method in action._remote_info.http_method:
- form = Form()
- form.op = 'invokeaction'
- form.href = f'{authority}{owner._full_URL_path_prefix}{action._remote_info.URL_path}'
- form.htv_methodName = method.upper()
- form.contentType = 'application/json'
- # form.additionalResponses = [AdditionalExpectedResponse().asdict()]
- self.forms.append(form.asdict())
-
- @classmethod
- def generate_schema(cls, action : typing.Callable, owner : Thing, authority : str) -> typing.Dict[str, JSONSerializable]:
- schema = ActionAffordance()
- schema.build(action=action, owner=owner, authority=authority)
- return schema.asdict()
-
-
-@dataclass
-class EventAffordance(InteractionAffordance):
- """
- creates event affordance schema from events.
- schema - https://www.w3.org/TR/wot-thing-description11/#eventaffordance
- """
- subscription : str
- data : typing.Dict[str, JSONSerializable]
-
- def __init__(self):
- super().__init__()
-
- def build(self, event : Event, owner : Thing, authority : str) -> None:
- self.title = event.label or event._obj_name
- if event.doc:
- self.description = self.format_doc(event.doc)
- if event.schema:
- self.data = event.schema
-
- form = Form()
- form.op = "subscribeevent"
- form.href = f"{authority}{owner._full_URL_path_prefix}{event.URL_path}"
- form.htv_methodName = "GET"
- form.contentType = "text/plain"
- form.subprotocol = "sse"
- self.forms = [form.asdict()]
-
- @classmethod
- def generate_schema(cls, event : Event, owner : Thing, authority : str) -> typing.Dict[str, JSONSerializable]:
- schema = EventAffordance()
- schema.build(event=event, owner=owner, authority=authority)
- return schema.asdict()
-
-
-@dataclass
-class VersionInfo:
- """
- create version info.
- schema - https://www.w3.org/TR/wot-thing-description11/#versioninfo
- """
- instance : str
- model : str
-
-
-@dataclass
-class SecurityScheme(Schema):
- """
- create security scheme.
- schema - https://www.w3.org/TR/wot-thing-description11/#sec-security-vocabulary-definition
- """
- scheme: str
- description : str
- descriptions : typing.Optional[typing.Dict[str, str]]
- proxy : typing.Optional[str]
-
- def __init__(self):
- super().__init__()
-
- def build(self, name : str, instance):
- self.scheme = 'nosec'
- self.description = 'currently no security scheme supported - use cookie auth directly on hololinked.server.HTTPServer object'
- return { name : self.asdict() }
-
-
-
-@dataclass
-class ThingDescription(Schema):
- """
- generate Thing Description schema of W3 Web of Things standard.
- Refer standard - https://www.w3.org/TR/wot-thing-description11
- Refer schema - https://www.w3.org/TR/wot-thing-description11/#thing
- """
- context : typing.Union[typing.List[str], str, typing.Dict[str, str]]
- type : typing.Optional[typing.Union[str, typing.List[str]]]
- id : str
- title : str
- description : str
- version : typing.Optional[VersionInfo]
- created : typing.Optional[str]
- modified : typing.Optional[str]
- support : typing.Optional[str]
- base : typing.Optional[str]
- properties : typing.List[PropertyAffordance]
- actions : typing.List[ActionAffordance]
- events : typing.List[EventAffordance]
- links : typing.Optional[typing.List[Link]]
- forms : typing.Optional[typing.List[Form]]
- security : typing.Union[str, typing.List[str]]
- securityDefinitions : SecurityScheme
- schemaDefinitions : typing.Optional[typing.List[DataSchema]]
-
- skip_properties = ['expose', 'httpserver_resources', 'zmq_resources', 'gui_resources',
- 'events', 'thing_description', 'GUI', 'object_info' ]
-
- skip_actions = ['_set_properties', '_get_properties', '_add_property', '_get_properties_in_db',
- 'get_postman_collection', 'get_thing_description', 'get_our_temp_thing_description']
-
- # not the best code and logic, but works for now
-
- def __init__(self, instance : Thing, authority : typing.Optional[str] = None,
- allow_loose_schema : typing.Optional[bool] = False, ignore_errors : bool = False) -> None:
- super().__init__()
- self.instance = instance
- self.authority = authority
- self.allow_loose_schema = allow_loose_schema
- self.ignore_errors = ignore_errors
-
- def produce(self) -> typing.Dict[str, typing.Any]:
- self.context = "https://www.w3.org/2022/wot/td/v1.1"
- self.id = f"{self.authority}/{self.instance.instance_name}"
- self.title = self.instance.__class__.__name__
- self.description = Schema.format_doc(self.instance.__doc__) if self.instance.__doc__ else "no class doc provided"
- self.properties = dict()
- self.actions = dict()
- self.events = dict()
- self.forms = NotImplemented
- self.links = NotImplemented
-
- # self.schemaDefinitions = dict(exception=JSONSchema.get_type(Exception))
-
- self.add_interaction_affordances()
- self.add_top_level_forms()
- self.add_security_definitions()
-
- return self.asdict()
-
-
- def add_interaction_affordances(self):
- # properties and actions
- for resource in self.instance.instance_resources.values():
- try:
- if (resource.isproperty and resource.obj_name not in self.properties and
- resource.obj_name not in self.skip_properties and hasattr(resource.obj, "_remote_info") and
- resource.obj._remote_info is not None):
- if (resource.obj_name == 'state' and (not hasattr(self.instance, 'state_machine') or
- not isinstance(self.instance.state_machine, StateMachine))):
- continue
- if resource.obj_name not in self.instance.properties:
- continue
- self.properties[resource.obj_name] = PropertyAffordance.generate_schema(resource.obj,
- self.instance, self.authority)
-
- elif (resource.isaction and resource.obj_name not in self.actions and
- resource.obj_name not in self.skip_actions and hasattr(resource.obj, '_remote_info')):
-
- if resource.bound_obj != self.instance or (resource.obj_name == 'exit' and
- self.instance._owner is not None) or (not hasattr(resource.bound_obj, 'db_engine') and
- resource.obj_name == 'load_properties_from_DB'):
- continue
- self.actions[resource.obj_name] = ActionAffordance.generate_schema(resource.obj,
- self.instance, self.authority)
- except Exception as ex:
- if not self.ignore_errors:
- raise ex from None
- self.instance.logger.error(f"Error while generating schema for {resource.obj_name} - {ex}")
- # Events
- for name, resource in inspect._getmembers(self.instance, lambda o : isinstance(o, Event),
- getattr_without_descriptor_read):
- if not isinstance(resource, Event):
- continue
- if '/change-event' in resource.URL_path:
- continue
- try:
- self.events[name] = EventAffordance.generate_schema(resource, self.instance, self.authority)
- except Exception as ex:
- if not self.ignore_errors:
- raise ex from None
- self.instance.logger.error(f"Error while generating schema for {resource.obj_name} - {ex}")
- for name, resource in inspect._getmembers(self.instance, lambda o : isinstance(o, Thing), getattr_without_descriptor_read):
- if resource is self.instance or isinstance(resource, EventLoop):
- continue
- if self.links is None or self.links == NotImplemented:
- self.links = []
- link = Link()
- link.build(resource, self.instance, self.authority)
- self.links.append(link.asdict())
-
-
- def add_top_level_forms(self):
-
- self.forms = []
-
- properties_end_point = f"{self.authority}{self.instance._full_URL_path_prefix}/properties"
-
- readallproperties = Form()
- readallproperties.href = properties_end_point
- readallproperties.op = "readallproperties"
- readallproperties.htv_methodName = "GET"
- readallproperties.contentType = "application/json"
- # readallproperties.additionalResponses = [AdditionalExpectedResponse().asdict()]
- self.forms.append(readallproperties.asdict())
-
- writeallproperties = Form()
- writeallproperties.href = properties_end_point
- writeallproperties.op = "writeallproperties"
- writeallproperties.htv_methodName = "PUT"
- writeallproperties.contentType = "application/json"
- # writeallproperties.additionalResponses = [AdditionalExpectedResponse().asdict()]
- self.forms.append(writeallproperties.asdict())
-
- readmultipleproperties = Form()
- readmultipleproperties.href = properties_end_point
- readmultipleproperties.op = "readmultipleproperties"
- readmultipleproperties.htv_methodName = "GET"
- readmultipleproperties.contentType = "application/json"
- # readmultipleproperties.additionalResponses = [AdditionalExpectedResponse().asdict()]
- self.forms.append(readmultipleproperties.asdict())
-
- writemultipleproperties = Form()
- writemultipleproperties.href = properties_end_point
- writemultipleproperties.op = "writemultipleproperties"
- writemultipleproperties.htv_methodName = "PATCH"
- writemultipleproperties.contentType = "application/json"
- # writemultipleproperties.additionalResponses = [AdditionalExpectedResponse().asdict()]
- self.forms.append(writemultipleproperties.asdict())
-
- def add_security_definitions(self):
- self.security = 'unimplemented'
- self.securityDefinitions = SecurityScheme().build('unimplemented', self.instance)
-
-
-
-__all__ = [
- ThingDescription.__name__,
- JSONSchema.__name__
-]
\ No newline at end of file
diff --git a/hololinked/server/thing.py b/hololinked/server/thing.py
deleted file mode 100644
index a5338faa..00000000
--- a/hololinked/server/thing.py
+++ /dev/null
@@ -1,658 +0,0 @@
-import logging
-import inspect
-import os
-import re
-import ssl
-import typing
-import warnings
-import zmq
-import zmq.asyncio
-
-from ..param.parameterized import Parameterized, ParameterizedMetaclass, edit_constant as edit_constant_parameters
-from .constants import (JSON, LOGLEVEL, ZMQ_PROTOCOLS, HTTP_METHODS, JSONSerializable)
-from .database import ThingDB, ThingInformation
-from .serializers import _get_serializer_from_user_given_options, BaseSerializer, JSONSerializer
-from .schema_validators import BaseSchemaValidator, JsonSchemaValidator
-from .exceptions import BreakInnerLoop
-from .action import action
-from .dataklasses import HTTPResource, ZMQResource, build_our_temp_TD, get_organised_resources
-from .utils import get_a_filename_from_instance, get_default_logger, getattr_without_descriptor_read
-from .property import Property, ClassProperties
-from .properties import String, ClassSelector, Selector, TypedKeyMappingsConstrainedDict
-from .zmq_message_brokers import RPCServer, ServerTypes, EventPublisher
-from .state_machine import StateMachine
-from .events import Event
-from .json_storage import ThingJsonStorage
-
-
-
-
-class ThingMeta(ParameterizedMetaclass):
- """
- Metaclass for Thing, implements a ``__post_init__()`` call and instantiation of a container for properties' descriptor
- objects. During instantiation of ``Thing``, first serializers, loggers and database connection are created, after which
- the user ``__init__`` is called. In ``__post_init__()``, that runs after user's ``__init__()``, the exposed resources
- are segregated while accounting for any ``Event`` objects or instance specific properties created during init. Properties
- are also loaded from database at this time. One can overload ``__post_init__()`` for any operations that rely on properties
- values loaded from database.
- """
-
- @classmethod
- def __prepare__(cls, name, bases):
- return TypedKeyMappingsConstrainedDict({},
- type_mapping = dict(
- state_machine = (StateMachine, type(None)),
- instance_name = String,
- log_level = Selector,
- logger = ClassSelector,
- logfile = String,
- db_config_file = String,
- object_info = Property, # it should not be set by the user
- ),
- allow_unspecified_keys = True
- )
-
- def __new__(cls, __name, __bases, __dict : TypedKeyMappingsConstrainedDict):
- return super().__new__(cls, __name, __bases, __dict._inner)
-
- def __call__(mcls, *args, **kwargs):
- instance = super().__call__(*args, **kwargs)
-
- instance.__post_init__()
- return instance
-
- def _create_param_container(mcs, mcs_members : dict) -> None:
- """
- creates ``ClassProperties`` instead of ``param``'s own ``Parameters``
- as the default container for descriptors. All properties have definitions
- copied from ``param``.
- """
- mcs._param_container = ClassProperties(mcs, mcs_members)
-
- @property
- def properties(mcs) -> ClassProperties:
- """
- returns ``ClassProperties`` instance instead of ``param``'s own
- ``Parameters`` instance. See code of ``param``.
- """
- return mcs._param_container
-
-
-
-class Thing(Parameterized, metaclass=ThingMeta):
- """
- Subclass from here to expose python objects on the network (with HTTP/TCP) or to other processes (ZeroMQ)
- """
-
- __server_type__ = ServerTypes.THING # not a server, this needs to be removed.
-
- # local properties
- instance_name = String(default=None, regex=r'[A-Za-z]+[A-Za-z_0-9\-\/]*', constant=True, remote=False,
- doc="""Unique string identifier of the instance. This value is used for many operations,
- for example - creating zmq socket address, tables in databases, and to identify the instance
- in the HTTP Server - (http(s)://{domain and sub domain}/{instance name}).
- If creating a big system, instance names are recommended to be unique.""") # type: str
- logger = ClassSelector(class_=logging.Logger, default=None, allow_None=True, remote=False,
- doc="""logging.Logger instance to print log messages. Default
- logger with a IO-stream handler and network accessible handler is created
- if none supplied.""") # type: logging.Logger
- zmq_serializer = ClassSelector(class_=(BaseSerializer, str),
- allow_None=True, default='json', remote=False,
- doc="""Serializer used for exchanging messages with python RPC clients. Subclass the base serializer
- or one of the available serializers to implement your own serialization requirements; or, register
- type replacements. Default is JSON. Some serializers like MessagePack improve performance many times
- compared to JSON and can be useful for data intensive applications within python.""") # type: BaseSerializer
- http_serializer = ClassSelector(class_=(JSONSerializer, str), default=None, allow_None=True, remote=False,
- doc="""Serializer used for exchanging messages with a HTTP clients,
- subclass JSONSerializer to implement your own JSON serialization requirements; or,
- register type replacements. Other types of serializers are currently not allowed for HTTP clients.""") # type: JSONSerializer
- schema_validator = ClassSelector(class_=BaseSchemaValidator, default=JsonSchemaValidator, allow_None=True,
- remote=False, isinstance=False,
- doc="""Validator for JSON schema. If not supplied, a default JSON schema validator is created.""") # type: BaseSchemaValidator
-
- # remote properties
- state = String(default=None, allow_None=True, URL_path='/state', readonly=True, observable=True,
- fget=lambda self : self.state_machine.current_state if hasattr(self, 'state_machine') else None,
- doc="current state machine's state if state machine present, None indicates absence of state machine.") #type: typing.Optional[str]
- httpserver_resources = Property(readonly=True, URL_path='/resources/http-server',
- doc="object's resources exposed to HTTP client (through ``hololinked.server.HTTPServer.HTTPServer``)",
- fget=lambda self: self._httpserver_resources ) # type: typing.Dict[str, HTTPResource]
- zmq_resources = Property(readonly=True, URL_path='/resources/zmq-object-proxy',
- doc="object's resources exposed to RPC client, similar to HTTP resources but differs in details.",
- fget=lambda self: self._zmq_resources) # type: typing.Dict[str, ZMQResource]
- gui_resources = Property(readonly=True, URL_path='/resources/portal-app',
- doc="""object's data read by hololinked-portal GUI client, similar to http_resources but differs
- in details.""",
- fget=lambda self: build_our_temp_TD(self)) # type: typing.Dict[str, typing.Any]
- GUI = Property(default=None, allow_None=True, URL_path='/resources/web-gui', fget = lambda self : self._gui,
- doc="GUI specified here will become visible at GUI tab of hololinked-portal dashboard tool")
- object_info = Property(doc="contains information about this object like the class name, script location etc.",
- URL_path='/object-info') # type: ThingInformation
-
-
- def __init__(self, *, instance_name : str, logger : typing.Optional[logging.Logger] = None,
- serializer : typing.Optional[JSONSerializer] = None, **kwargs) -> None:
- """
- Parameters
- ----------
- instance_name: str
- Unique string identifier of the instance. This value is used for many operations,
- for example - creating zmq socket address, tables in databases, and to identify the instance in the HTTP Server -
- (http(s)://{domain and sub domain}/{instance name}).
- If creating a big system, instance names are recommended to be unique.
- logger: logging.Logger, optional
- logging.Logger instance to print log messages. Default logger with a IO-stream handler and network
- accessible handler is created if none supplied.
- serializer: JSONSerializer, optional
- custom JSON serializer. To use separate serializer for python RPC clients and cross-platform
- HTTP clients, use keyword arguments zmq_serializer and http_serializer and leave this argument at None.
- **kwargs:
- zmq_serializer: BaseSerializer | str, optional
- Serializer used for exchanging messages with python RPC clients. If string value is supplied,
- supported are 'msgpack', 'pickle', 'serpent', 'json'. Subclass the base serializer
- ``hololinked.server.serializer.BaseSerializer`` or one of the available serializers to implement your
- own serialization requirements; or, register type replacements. Default is JSON. Some serializers like
- MessagePack improve performance many times compared to JSON and can be useful for data intensive
- applications within python. The serializer supplied here must also be supplied to object proxy from
- ``hololinked.client``.
- http_serializer: JSONSerializer, optional
- serializer used for cross platform HTTP clients.
- use_default_db: bool, Default False
- if True, default SQLite database is created where properties can be stored and loaded. There is no need to supply
- any database credentials. This value can also be set as a class attribute, see docs.
- logger_remote_access: bool, Default True
- if False, network accessible handler is not attached to the logger. This value can also be set as a
- class attribute, see docs.
- schema_validator: BaseSchemaValidator, optional
- schema validator class for JSON schema validation, not supported by ZMQ clients.
- db_config_file: str, optional
- if not using a default database, supply a JSON configuration file to create a connection. Check documentaion
- of ``hololinked.server.database``.
- use_json_file: bool, Default False
- if True, a JSON file will be used as the property storage instead of a database. This value can also be
- set as a class attribute.
- json_filename: str, optional
- If using JSON storage, this filename is used to persist property values. If not provided, a default filename
- is generated based on the instance name.
-
- """
- if instance_name.startswith('/'):
- instance_name = instance_name[1:]
- # Type definitions
- self._owner : typing.Optional[Thing] = None
- self._internal_fixed_attributes : typing.List[str]
- self._full_URL_path_prefix : str
- self._gui = None # filler for a future feature
- self._event_publisher = None # type : typing.Optional[EventPublisher]
- self.rpc_server = None # type: typing.Optional[RPCServer]
- self.message_broker = None # type : typing.Optional[AsyncPollingZMQServer]
- # serializer
- if not isinstance(serializer, JSONSerializer) and serializer != 'json' and serializer is not None:
- raise TypeError("serializer key word argument must be JSONSerializer. If one wishes to use separate serializers " +
- "for python clients and HTTP clients, use zmq_serializer and http_serializer keyword arguments.")
- zmq_serializer = serializer or kwargs.pop('zmq_serializer', 'json')
- http_serializer = serializer if isinstance(serializer, JSONSerializer) else kwargs.pop('http_serializer', 'json')
- zmq_serializer, http_serializer = _get_serializer_from_user_given_options(
- zmq_serializer=zmq_serializer,
- http_serializer=http_serializer
- )
- super().__init__(instance_name=instance_name, logger=logger,
- zmq_serializer=zmq_serializer, http_serializer=http_serializer, **kwargs)
-
- self._prepare_logger(
- log_level=kwargs.get('log_level', None),
- log_file=kwargs.get('log_file', None),
- remote_access=kwargs.get('logger_remote_access', self.__class__.logger_remote_access if hasattr(
- self.__class__, 'logger_remote_access') else False)
- )
- self._prepare_state_machine()
-
- # choose storage type, if use_json_file is True - use JSON storage, else - use database
- if kwargs.get('use_json_file',
- self.__class__.use_json_file if hasattr(self.__class__, 'use_json_file') else False):
- self._prepare_json_storage(filename=kwargs.get('json_filename', f"{get_a_filename_from_instance(self, 'json')}"))
- else:
- self._prepare_DB(kwargs.get('use_default_db', False), kwargs.get('db_config_file', None))
-
-
- def __post_init__(self):
- self._prepare_resources()
- self.load_properties_from_DB()
- self.logger.info(f"initialialised Thing class {self.__class__.__name__} with instance name {self.instance_name}")
-
-
- def _prepare_resources(self):
- """
- this method analyses the members of the class which have '_remote_info' variable declared
- and extracts information necessary to make RPC functionality work.
- """
- # The following dict is to be given to the HTTP server
- self._zmq_resources, self._httpserver_resources, self.instance_resources = get_organised_resources(self)
-
-
- def _prepare_logger(self, log_level : int, log_file : str, remote_access : bool = False):
- from .logger import RemoteAccessHandler
- if self.logger is None:
- self.logger = get_default_logger(self.instance_name,
- logging.INFO if not log_level else log_level,
- None if not log_file else log_file)
- if remote_access:
- if not any(isinstance(handler, RemoteAccessHandler) for handler in self.logger.handlers):
- self._remote_access_loghandler = RemoteAccessHandler(instance_name='logger',
- maxlen=500, emit_interval=1, logger=self.logger)
- # thing has its own logger so we dont recreate one for
- # remote access handler
- self.logger.addHandler(self._remote_access_loghandler)
-
- if not isinstance(self, logging.Logger):
- for handler in self.logger.handlers:
- # if remote access is True or not, if a default handler is found make a variable for it anyway
- if isinstance(handler, RemoteAccessHandler):
- self._remote_access_loghandler = handler
-
-
- def _prepare_state_machine(self):
- if hasattr(self, 'state_machine'):
- self.state_machine._prepare(self)
- self.logger.debug("setup state machine")
-
-
- def _prepare_DB(self, default_db : bool = False, config_file : str = None):
- if not default_db and not config_file:
- self.object_info
- return
- # 1. create engine
- self.db_engine = ThingDB(instance=self, config_file=None if default_db else config_file,
- serializer=self.zmq_serializer) # type: ThingDB
- # 2. create an object metadata to be used by different types of clients
- object_info = self.db_engine.fetch_own_info()
- if object_info is not None:
- self._object_info = object_info
- # 3. enter properties to DB if not already present
- if self.object_info.class_name != self.__class__.__name__:
- raise ValueError("Fetched instance name and class name from database not matching with the ",
- "current Thing class/subclass. You might be reusing an instance name of another subclass ",
- "and did not remove the old data from database. Please clean the database using database tools to ",
- "start fresh.")
-
- def _prepare_json_storage(self, filename: str = None):
- if not filename:
- filename = f"{get_a_filename_from_instance(self, 'json')}"
- self.db_engine = ThingJsonStorage(filename=filename, instance=self)
-
-
- @object_info.getter
- def _get_object_info(self):
- if not hasattr(self, '_object_info'):
- self._object_info = ThingInformation(
- instance_name = self.instance_name,
- class_name = self.__class__.__name__,
- script = os.path.dirname(os.path.abspath(inspect.getfile(self.__class__))),
- http_server = "USER_MANAGED",
- kwargs = "USER_MANAGED",
- eventloop_instance_name = "USER_MANAGED",
- level = "USER_MANAGED",
- level_type = "USER_MANAGED"
- )
- return self._object_info
-
- @object_info.setter
- def _set_object_info(self, value):
- self._object_info = ThingInformation(**value)
- for name, thing in inspect._getmembers(self, lambda o: isinstance(o, Thing), getattr_without_descriptor_read):
- thing._object_info.http_server = self._object_info.http_server
-
-
- @property
- def properties(self) -> ClassProperties:
- """container for the property descriptors of the object."""
- return self.parameters
-
- @action(URL_path='/properties', http_method=HTTP_METHODS.GET)
- def _get_properties(self, **kwargs) -> typing.Dict[str, typing.Any]:
- """
- """
- skip_props = ["httpserver_resources", "zmq_resources", "gui_resources", "GUI", "object_info"]
- for prop_name in skip_props:
- if prop_name in kwargs:
- raise RuntimeError("GUI, httpserver resources, RPC resources , object info etc. cannot be queried" +
- " using multiple property fetch.")
- data = {}
- if len(kwargs) == 0:
- for name, prop in self.properties.descriptors.items():
- if name in skip_props or not isinstance(prop, Property):
- continue
- if prop._remote_info is None:
- continue
- data[name] = prop.__get__(self, type(self))
- elif 'names' in kwargs:
- names = kwargs.get('names')
- if not isinstance(names, (list, tuple, str)):
- raise TypeError(f"Specify properties to be fetched as a list, tuple or comma separated names. Givent type {type(names)}")
- if isinstance(names, str):
- names = names.split(',')
- for requested_prop in names:
- if not isinstance(requested_prop, str):
- raise TypeError(f"property name must be a string. Given type {type(requested_prop)}")
- if not isinstance(self.properties[requested_prop], Property) or self.properties[requested_prop]._remote_info is None:
- raise AttributeError("this property is not remote accessible")
- data[requested_prop] = self.properties[requested_prop].__get__(self, type(self))
- elif len(kwargs.keys()) != 0:
- for rename, requested_prop in kwargs.items():
- if not isinstance(self.properties[requested_prop], Property) or self.properties[requested_prop]._remote_info is None:
- raise AttributeError("this property is not remote accessible")
- data[rename] = self.properties[requested_prop].__get__(self, type(self))
- return data
-
- @action(URL_path='/properties', http_method=[HTTP_METHODS.PUT, HTTP_METHODS.PATCH])
- def _set_properties(self, **values : typing.Dict[str, typing.Any]) -> None:
- """
- set properties whose name is specified by keys of a dictionary
-
- Parameters
- ----------
- values: Dict[str, Any]
- dictionary of property names and its values
- """
- produced_error = False
- errors = ''
- for name, value in values.items():
- try:
- setattr(self, name, value)
- except Exception as ex:
- self.logger.error(f"could not set attribute {name} due to error {str(ex)}")
- errors += f'{name} : {str(ex)}\n'
- produced_error = True
- if produced_error:
- ex = RuntimeError("Some properties could not be set due to errors. " +
- "Check exception notes or server logs for more information.")
- ex.__notes__ = errors
- raise ex from None
-
- @action(URL_path='/properties/db', http_method=HTTP_METHODS.GET)
- def _get_properties_in_db(self) -> typing.Dict[str, JSONSerializable]:
- """
- get all properties in the database
-
- Returns
- -------
- Dict[str, JSONSerializable]
- dictionary of property names and their values
- """
- if not hasattr(self, 'db_engine'):
- return {}
- props = self.db_engine.get_all_properties()
- final_list = {}
- for name, prop in props.items():
- try:
- self.http_serializer.dumps(prop)
- final_list[name] = prop
- except Exception as ex:
- self.logger.error(f"could not serialize property {name} to JSON due to error {str(ex)}, skipping this property")
- return final_list
-
- @action(URL_path='/properties', http_method=HTTP_METHODS.POST)
- def _add_property(self, name : str, prop : JSON) -> None:
- """
- add a property to the object
-
- Parameters
- ----------
- name: str
- name of the property
- prop: Property
- property object
- """
- raise NotImplementedError("this method will be implemented properly in a future release")
- prop = Property(**prop)
- self.properties.add(name, prop)
- self._prepare_resources()
- # instruct the clients to fetch the new resources
-
- @property
- def event_publisher(self) -> EventPublisher:
- """
- event publishing PUB socket owning object, valid only after
- ``run()`` is called, otherwise raises AttributeError.
- """
- return self._event_publisher
-
- @event_publisher.setter
- def event_publisher(self, value : EventPublisher) -> None:
- if self._event_publisher is not None:
- if value is not self._event_publisher:
- raise AttributeError("Can set event publisher only once")
-
- def recusively_set_event_publisher(obj : Thing, publisher : EventPublisher) -> None:
- for name, evt in inspect._getmembers(obj, lambda o: isinstance(o, Event), getattr_without_descriptor_read):
- assert isinstance(evt, Event), "object is not an event"
- # above is type definition
- e = evt.__get__(obj, type(obj))
- e.publisher = publisher
- e._remote_info.socket_address = publisher.socket_address
- self.logger.info(f"registered event '{evt.friendly_name}' serving at PUB socket with address : {publisher.socket_address}")
- for name, subobj in inspect._getmembers(obj, lambda o: isinstance(o, Thing), getattr_without_descriptor_read):
- if name == '_owner':
- continue
- recusively_set_event_publisher(subobj, publisher)
- obj._event_publisher = publisher
-
- recusively_set_event_publisher(self, value)
-
-
- @action(URL_path='/properties/db-reload', http_method=HTTP_METHODS.POST)
- def load_properties_from_DB(self):
- """
- Load and apply property values which have ``db_init`` or ``db_persist``
- set to ``True`` from database
- """
- if not hasattr(self, 'db_engine'):
- return
- missing_properties = self.db_engine.create_missing_properties(self.__class__.properties.db_init_objects,
- get_missing_property_names=True)
- # 4. read db_init and db_persist objects
- with edit_constant_parameters(self):
- for db_prop, value in self.db_engine.get_all_properties().items():
- try:
- prop_desc = self.properties.descriptors[db_prop]
- if (prop_desc.db_init or prop_desc.db_persist) and db_prop not in missing_properties:
- setattr(self, db_prop, value) # type: ignore
- except Exception as ex:
- self.logger.error(f"could not set attribute {db_prop} due to error {str(ex)}")
-
-
- @action(URL_path='/resources/postman-collection', http_method=HTTP_METHODS.GET)
- def get_postman_collection(self, domain_prefix : str = None):
- """
- organised postman collection for this object
- """
- from .api_platforms import postman_collection
- return postman_collection.build(instance=self,
- domain_prefix=domain_prefix if domain_prefix is not None else self._object_info.http_server)
-
-
- @action(URL_path='/resources/wot-td', http_method=HTTP_METHODS.GET)
- def get_thing_description(self, authority : typing.Optional[str] = None, ignore_errors : bool = False):
- # allow_loose_schema : typing.Optional[bool] = False):
- """
- generate thing description schema of Web of Things https://www.w3.org/TR/wot-thing-description11/.
- one can use the node-wot as a client for the object with the generated schema
- (https://github.com/eclipse-thingweb/node-wot). Other WoT related tools based on TD will be compatible.
- Composed Things that are not the top level object is currently not supported.
-
- Parameters
- ----------
- authority: str, optional
- protocol with DNS or protocol with hostname+port, for example 'https://my-pc:8080' or
- 'http://my-pc:9090' or 'https://IT-given-domain-name'. If absent, a value will be automatically
- given using ``socket.gethostname()`` and the port at which the last HTTPServer (``hololinked.server.HTTPServer``)
- attached to this object was running.
- ignore_errors: bool, optional, Default False
- if True, offending interaction affordances will be removed from the schema. This is useful to build partial but working
- schema always.
- Returns
- -------
- hololinked.wot.td.ThingDescription
- represented as an object in python, gets automatically serialized to JSON when pushed out of the socket.
- """
- # allow_loose_schema: bool, optional, Default False
- # Experimental properties, actions or events for which schema was not given will be supplied with a suitable
- # value for node-wot to ignore validation or claim the accessed value for complaint with the schema.
- # In other words, schema validation will always pass.
- from .td import ThingDescription
- return ThingDescription(instance=self, authority=authority or self._object_info.http_server,
- allow_loose_schema=False, ignore_errors=ignore_errors).produce() #allow_loose_schema)
-
-
- @action(URL_path='/exit', http_method=HTTP_METHODS.POST)
- def exit(self) -> None:
- """
- Exit the object without killing the eventloop that runs this object. If Thing was
- started using the run() method, the eventloop is also killed. This method can
- only be called remotely.
- """
- if self.rpc_server is None:
- return
- if self._owner is None:
- self.rpc_server.stop_polling()
- raise BreakInnerLoop # stops the inner loop of the object
- else:
- warnings.warn("call exit on the top object, composed objects cannot exit the loop.", RuntimeWarning)
-
- @action()
- def ping(self) -> None:
- """ping the Thing to see if it is alive"""
- pass
-
- def run(self,
- zmq_protocols : typing.Union[typing.Sequence[ZMQ_PROTOCOLS],
- ZMQ_PROTOCOLS] = ZMQ_PROTOCOLS.IPC,
- # expose_eventloop : bool = False,
- **kwargs
- ) -> None:
- """
- Quick-start ``Thing`` server by creating a default eventloop & ZMQ servers. This
- method is blocking until exit() is called.
-
- Parameters
- ----------
- zmq_protocols: Sequence[ZMQ_PROTOCOLS] | ZMQ_Protocools, Default ZMQ_PROTOCOLS.IPC or "IPC"
- zmq transport layers at which the object is exposed.
- TCP - provides network access apart from HTTP - please supply a socket address additionally.
- IPC - inter process communication - connection can be made from other processes running
- locally within same computer. No client on the network will be able to contact the object using
- this transport. INPROC - one main python process spawns several threads in one of which the ``Thing``
- the running. The object can be contacted by a client on another thread but neither from other processes
- or the network. One may use more than one form of transport. All requests made will be anyway queued internally
- irrespective of origin.
-
- **kwargs
- tcp_socket_address: str, optional
- socket_address for TCP access, for example: tcp://0.0.0.0:61234
- context: zmq.asyncio.Context, optional
- zmq context to be used. If not supplied, a new context is created.
- For INPROC clients, you need to provide a context.
- """
- # expose_eventloop: bool, False
- # expose the associated Eventloop which executes the object. This is generally useful for remotely
- # adding more objects to the same event loop.
- # dont specify http server as a kwarg, as the other method run_with_http_server has to be used
- self._prepare_resources()
- context = kwargs.get('context', None)
- if context is not None and not isinstance(context, zmq.asyncio.Context):
- raise TypeError("context must be an instance of zmq.asyncio.Context")
- context = context or zmq.asyncio.Context()
-
- self.rpc_server = RPCServer(
- instance_name=self.instance_name,
- server_type=self.__server_type__.value,
- context=context,
- protocols=zmq_protocols,
- zmq_serializer=self.zmq_serializer,
- http_serializer=self.http_serializer,
- tcp_socket_address=kwargs.get('tcp_socket_address', None),
- logger=self.logger
- )
- self.message_broker = self.rpc_server.inner_inproc_server
- self.event_publisher = self.rpc_server.event_publisher
-
- from .eventloop import EventLoop
- self.event_loop = EventLoop(
- instance_name=f'{self.instance_name}/eventloop',
- things=[self],
- logger=self.logger,
- zmq_serializer=self.zmq_serializer,
- http_serializer=self.http_serializer,
- expose=False, # expose_eventloop
- )
-
- if kwargs.get('http_server', None):
- from .HTTPServer import HTTPServer
- httpserver = kwargs.pop('http_server')
- assert isinstance(httpserver, HTTPServer)
- httpserver._zmq_protocol = ZMQ_PROTOCOLS.INPROC
- httpserver._zmq_inproc_socket_context = context
- httpserver._zmq_inproc_event_context = self.event_publisher.context
- assert httpserver.all_ok
- httpserver.tornado_instance.listen(port=httpserver.port, address=httpserver.address)
- self.event_loop.run()
-
-
- def run_with_http_server(self, port : int = 8080, address : str = '0.0.0.0',
- # host : str = None,
- allowed_clients : typing.Union[str, typing.Iterable[str]] = None,
- ssl_context : ssl.SSLContext = None, # protocol_version : int = 1,
- # network_interface : str = 'Ethernet',
- **kwargs):
- """
- Quick-start ``Thing`` server by creating a default eventloop & servers. This
- method is fully blocking.
-
- Parameters
- ----------
- port: int
- the port at which the HTTP server should be run (unique)
- address: str
- set custom IP address, default is localhost (0.0.0.0)
- ssl_context: ssl.SSLContext | None
- use it for highly customized SSL context to provide encrypted communication
- allowed_clients
- serves request and sets CORS only from these clients, other clients are rejected with 403. Unlike pure CORS
- feature, the server resource is not even executed if the client is not an allowed client.
- **kwargs,
- certfile: str
- alternative to SSL context, provide certificate file & key file to allow the server to create a SSL connection on its own
- keyfile: str
- alternative to SSL context, provide certificate file & key file to allow the server to create a SSL connection on its own
- request_handler: RPCHandler
- custom web request handler of your choice
- event_handler: BaseHandler | EventHandler
- custom event handler of your choice for handling events
- """
- # network_interface: str
- # Currently there is no logic to detect the IP addresss (as externally visible) correctly, therefore please
- # send the network interface name to retrieve the IP. If a DNS server is present, you may leave this field
- # host: str
- # Host Server to subscribe to coordinate starting sequence of things & web GUI
-
- from .HTTPServer import HTTPServer
-
- http_server = HTTPServer(
- [self.instance_name], logger=self.logger, serializer=self.http_serializer,
- port=port, address=address, ssl_context=ssl_context,
- allowed_clients=allowed_clients, schema_validator=self.schema_validator,
- # network_interface=network_interface,
- **kwargs,
- )
-
- self.run(
- zmq_protocols=ZMQ_PROTOCOLS.INPROC,
- http_server=http_server,
- context=kwargs.get('context', None)
- ) # blocks until exit is called
-
- http_server.tornado_instance.stop()
-
-
-
-
diff --git a/hololinked/server/utils.py b/hololinked/server/utils.py
deleted file mode 100644
index a13092ed..00000000
--- a/hololinked/server/utils.py
+++ /dev/null
@@ -1,256 +0,0 @@
-import sys
-import logging
-import re
-import asyncio
-import inspect
-import typing
-import asyncio
-import types
-import traceback
-import typing
-import ifaddr
-
-
-
-def get_IP_from_interface(interface_name : str = 'Ethernet', adapter_name = None) -> str:
- """
- Get IP address of specified interface. Generally necessary when connected to the network
- through multiple adapters and a server binds to only one adapter at a time.
-
- Parameters
- ----------
- interface_name: str
- Ethernet, Wifi etc.
- adapter_name: optional, str
- name of the adapter if available
-
- Returns
- -------
- str:
- IP address of the interface
- """
- adapters = ifaddr.get_adapters(include_unconfigured=True)
- for adapter in adapters:
- if not adapter_name:
- for ip in adapter.ips:
- if interface_name == ip.nice_name:
- if ip.is_IPv4:
- return ip.ip
- elif adapter_name == adapter.nice_name:
- for ip in adapter.ips:
- if interface_name == ip.nice_name:
- if ip.is_IPv4:
- return ip.ip
- raise ValueError(f"interface name {interface_name} not found in system interfaces.")
-
-
-def format_exception_as_json(exc : Exception) -> typing.Dict[str, typing.Any]:
- """
- return exception as a JSON serializable dictionary
- """
- return {
- "message" : str(exc),
- "type" : repr(exc).split('(', 1)[0],
- "traceback" : traceback.format_exc().splitlines(),
- "notes" : exc.__notes__ if hasattr(exc, "__notes__") else None
- }
-
-
-def pep8_to_URL_path(word : str) -> str:
- """
- Make an underscored, lowercase form from the expression in the string.
- Example::
- >>> pep8_to_dashed_URL("device_type")
- 'device-type'
- """
- val = re.sub(r'_+', '-', word.lstrip('_').rstrip('_'))
- return val.replace(' ', '-')
-
-
-def get_default_logger(name : str, log_level : int = logging.INFO, log_file = None,
- format : str = '%(levelname)-8s - %(asctime)s:%(msecs)03d - %(name)s - %(message)s' ) -> logging.Logger:
- """
- the default logger used by most of hololinked package, when arguments are not modified.
- StreamHandler is always created, pass log_file for a FileHandler as well.
-
- Parameters
- ----------
- name: str
- name of logger
- log_level: int
- log level
- log_file: str
- valid path to file
- format: str
- log format
-
- Returns
- -------
- logging.Logger:
- created logger
- """
- logger = logging.getLogger(name)
- logger.setLevel(log_level)
- default_handler = logging.StreamHandler(sys.stdout)
- default_handler.setFormatter(logging.Formatter(format, datefmt='%Y-%m-%dT%H:%M:%S'))
- logger.addHandler(default_handler)
- if log_file:
- file_handler = logging.FileHandler(log_file)
- file_handler.setFormatter(logging.Formatter(format, datefmt='%Y-%m-%dT%H:%M:%S'))
- logger.addHandler(file_handler)
- return logger
-
-
-def run_coro_sync(coro : typing.Coroutine):
- """
- run coroutine synchronously
- """
- try:
- eventloop = asyncio.get_event_loop()
- except RuntimeError:
- eventloop = asyncio.new_event_loop()
- asyncio.set_event_loop(eventloop)
- if eventloop.is_running():
- raise RuntimeError(f"asyncio event loop is already running, cannot setup coroutine {coro.__name__} to run sync, please await it.")
- # not the same as RuntimeError catch above.
- else:
- return eventloop.run_until_complete(coro)
-
-
-def run_callable_somehow(method : typing.Union[typing.Callable, typing.Coroutine]) -> typing.Any:
- """
- run method if synchronous, or when async, either schedule a coroutine or run it until its complete
- """
- if not (asyncio.iscoroutinefunction(method) or asyncio.iscoroutine(method)):
- return method()
- try:
- eventloop = asyncio.get_event_loop()
- except RuntimeError:
- eventloop = asyncio.new_event_loop()
- asyncio.set_event_loop(eventloop)
- if asyncio.iscoroutinefunction(method):
- coro = method()
- else:
- coro = method
- if eventloop.is_running():
- # task = # check later if lambda is necessary
- eventloop.create_task(coro)
- else:
- # task = method
- return eventloop.run_until_complete(coro)
-
-
-def get_signature(callable : typing.Callable) -> typing.Tuple[typing.List[str], typing.List[type]]:
- """
- Retrieve the names and types of arguments based on annotations for the given callable.
-
- Parameters
- ----------
- callable: Callable
- function or method (not tested with __call__)
-
- Returns
- -------
- tuple: List[str], List[type]
- arguments name and types respectively
- """
- arg_names = []
- arg_types = []
-
- for param in inspect.signature(callable).parameters.values():
- arg_name = param.name
- arg_type = param.annotation if param.annotation != inspect.Parameter.empty else None
-
- arg_names.append(arg_name)
- arg_types.append(arg_type)
-
- return arg_names, arg_types
-
-
-def getattr_without_descriptor_read(instance, key):
- """
- supply to inspect._get_members (not inspect.get_members) to avoid calling
- __get__ on hardware attributes
- """
- if key in instance.__dict__:
- return instance.__dict__[key]
- mro = mro = (instance.__class__,) + inspect.getmro(instance.__class__)
- for base in mro:
- if key in base.__dict__:
- value = base.__dict__[key]
- if isinstance(value, types.FunctionType):
- method = getattr(instance, key, None)
- if isinstance(method, types.MethodType):
- return method
- return value
- # for descriptor, first try to find it in class dict or instance dict (for instance descriptors (per_instance_descriptor=True))
- # and then getattr from the instance. For descriptors/property, it will be mostly at above two levels.
- return getattr(instance, key, None) # we can deal with None where we use this getter, so dont raise AttributeError
-
-
-def isclassmethod(method):
- """https://stackoverflow.com/questions/19227724/check-if-a-function-uses-classmethod"""
- bound_to = getattr(method, '__self__', None)
- if not isinstance(bound_to, type):
- # must be bound to a class
- return False
- name = method.__name__
- for cls in bound_to.__mro__:
- descriptor = vars(cls).get(name)
- if descriptor is not None:
- return isinstance(descriptor, classmethod)
- return False
-
-
-def issubklass(obj, cls):
- """
- Safely check if `obj` is a subclass of `cls`.
-
- Parameters:
- obj: The object to check if it's a subclass.
- cls: The class (or tuple of classes) to compare against.
-
- Returns:
- bool: True if `obj` is a subclass of `cls`, False otherwise.
- """
- try:
- # Check if obj is a class or a tuple of classes
- if isinstance(obj, type):
- return issubclass(obj, cls)
- elif isinstance(obj, tuple):
- # Ensure all elements in the tuple are classes
- return all(isinstance(o, type) for o in obj) and issubclass(obj, cls)
- else:
- return False
- except TypeError:
- return False
-
-
-def get_a_filename_from_instance(thing: type, extension: str = 'json') -> str:
- class_name = thing.__class__.__name__
-
- # Remove invalid characters from the instance name
- safe_instance_name = re.sub(r'[<>:"/\\|?*\x00-\x1F]+', '_', thing.instance_name)
- # Collapse consecutive underscores into one
- safe_instance_name = re.sub(r'_+', '_', safe_instance_name)
- # Remove leading and trailing underscores
- safe_instance_name = safe_instance_name.strip('_')
-
- filename = f"{class_name}-{safe_instance_name or '_'}.{extension}"
- return filename
-
-
-__all__ = [
- get_IP_from_interface.__name__,
- format_exception_as_json.__name__,
- pep8_to_URL_path.__name__,
- get_default_logger.__name__,
- run_coro_sync.__name__,
- run_callable_somehow.__name__,
- get_signature.__name__,
- isclassmethod.__name__,
- issubklass.__name__
-]
-
-
-
diff --git a/hololinked/server/zmq.py b/hololinked/server/zmq.py
new file mode 100644
index 00000000..ffeaa329
--- /dev/null
+++ b/hololinked/server/zmq.py
@@ -0,0 +1,135 @@
+import asyncio
+import typing
+import zmq
+import zmq.asyncio
+
+from ..constants import ZMQ_TRANSPORTS
+from ..utils import get_current_async_loop
+from ..core.thing import Thing
+from ..core.zmq.brokers import AsyncEventConsumer, AsyncZMQServer, EventPublisher
+from ..core.zmq.rpc_server import RPCServer
+
+
+
+class ZMQServer(RPCServer):
+
+ def __init__(self, *,
+ id: str,
+ things: typing.List["Thing"],
+ context: zmq.asyncio.Context | None = None,
+ transports: ZMQ_TRANSPORTS = ZMQ_TRANSPORTS.IPC,
+ **kwargs
+ ) -> None:
+ self.ipc_server = self.tcp_server = None
+ self.ipc_event_publisher = self.tcp_event_publisher = self.inproc_events_proxy = None
+ super().__init__(id=id, things=things, context=context, **kwargs)
+
+ if isinstance(transports, str):
+ transports = [transports]
+ elif not isinstance(transports, list):
+ raise TypeError(f"unsupported transport type : {type(transports)}")
+ tcp_socket_address = kwargs.pop('tcp_socket_address', None)
+
+
+ # initialise every externally visible protocol
+ if ZMQ_TRANSPORTS.TCP in transports or "TCP" in transports:
+ self.tcp_server = AsyncZMQServer(
+ id=self.id,
+ context=self.context,
+ transport=ZMQ_TRANSPORTS.TCP,
+ socket_address=tcp_socket_address,
+ **kwargs
+ )
+ host, port = tcp_socket_address.rsplit(':', 1)
+ new_port = int(port) + 1
+ tcp_socket_address = f"{host}:{new_port}"
+ self.tcp_event_publisher = EventPublisher(
+ id=f'{self.id}/event-publisher',
+ context=self.context,
+ transport=ZMQ_TRANSPORTS.TCP,
+ socket_address=tcp_socket_address,
+ **kwargs
+ )
+ if ZMQ_TRANSPORTS.IPC in transports or "IPC" in transports:
+ self.ipc_server = AsyncZMQServer(
+ id=self.id,
+ context=self.context,
+ transport=ZMQ_TRANSPORTS.IPC,
+ **kwargs
+ )
+ self.ipc_event_publisher = EventPublisher(
+ id=f'{self.id}/event-publisher',
+ context=self.context,
+ transport=ZMQ_TRANSPORTS.IPC,
+ **kwargs
+ )
+ if self.ipc_event_publisher is not None or self.tcp_event_publisher is not None:
+ self.inproc_events_proxy = AsyncEventConsumer(
+ id=f'{self.id}/event-proxy',
+ event_unique_identifier="",
+ socket_address=self.event_publisher.socket_address,
+ context=self.context,
+ **kwargs
+ )
+
+
+ def run_zmq_request_listener(self) -> None:
+ # doc in parent class
+ eventloop = get_current_async_loop()
+ if self.ipc_server is not None:
+ eventloop.call_soon(lambda : asyncio.create_task(self.recv_requests_and_dispatch_jobs(self.ipc_server)))
+ if self.tcp_server is not None:
+ eventloop.call_soon(lambda : asyncio.create_task(self.recv_requests_and_dispatch_jobs(self.tcp_server)))
+ if self.inproc_events_proxy is not None:
+ eventloop.call_soon(lambda : asyncio.create_task(self.tunnel_events_from_inproc()))
+ super().run_zmq_request_listener()
+
+
+ async def tunnel_events_from_inproc(self) -> None:
+ if not self.inproc_events_proxy:
+ return
+ self.inproc_events_proxy.subscribe()
+ while True:
+ try:
+ event = await self.inproc_events_proxy.receive(raise_interrupt_as_exception=True)
+ if self.ipc_event_publisher is not None:
+ self.ipc_event_publisher.socket.send_multipart(event.byte_array)
+ # print(f"sent event to ipc publisher {event.byte_array}")
+ if self.tcp_event_publisher is not None:
+ self.tcp_event_publisher.socket.send_multipart(event.byte_array)
+ # print(f"sent event to tcp publisher {event.byte_array}")
+ except ConnectionAbortedError:
+ break
+ except Exception as e:
+ self.logger.error(f"error in tunneling events from inproc: {e}")
+ break
+
+
+ def stop(self) -> None:
+ if self.ipc_server is not None:
+ self.ipc_server.stop_polling()
+ if self.tcp_server is not None:
+ self.tcp_server.stop_polling()
+ if self.inproc_events_proxy is not None:
+ get_current_async_loop().call_soon(lambda : asyncio.create_task(self.inproc_events_proxy.interrupt()))
+ super().stop()
+
+
+ def exit(self) -> None:
+ try:
+ self.stop()
+ if self.ipc_server is not None:
+ self.ipc_server.exit()
+ if self.tcp_server is not None:
+ self.tcp_server.exit()
+ if self.req_rep_server is not None:
+ self.req_rep_server.exit()
+ # if self.event_publisher is not None:
+ # self.event_publisher.exit()
+ except:
+ pass
+ if self._terminate_context:
+ self.context.term()
+ self.logger.info("terminated context of socket '{}' of type '{}'".format(self.id, self.__class__))
+
+
diff --git a/hololinked/server/zmq_message_brokers.py b/hololinked/server/zmq_message_brokers.py
deleted file mode 100644
index 2770233e..00000000
--- a/hololinked/server/zmq_message_brokers.py
+++ /dev/null
@@ -1,2511 +0,0 @@
-import builtins
-import os
-import threading
-import time
-import warnings
-import zmq
-import zmq.asyncio
-import asyncio
-import logging
-import typing
-from uuid import uuid4
-from collections import deque
-from enum import Enum
-from zmq.utils.monitor import parse_monitor_message
-
-from .utils import *
-from .config import global_config
-from .constants import JSON, ZMQ_PROTOCOLS, CommonRPC, ServerTypes, ZMQSocketType, ZMQ_EVENT_MAP
-from .serializers import BaseSerializer, JSONSerializer, _get_serializer_from_user_given_options
-
-
-
-# message types
-HANDSHAKE = b'HANDSHAKE'
-INVALID_MESSAGE = b'INVALID_MESSAGE'
-TIMEOUT = b'TIMEOUT'
-INSTRUCTION = b'INSTRUCTION'
-REPLY = b'REPLY'
-EXCEPTION = b'EXCEPTION'
-INTERRUPT = b'INTERRUPT'
-ONEWAY = b'ONEWAY'
-SERVER_DISCONNECTED = 'EVENT_DISCONNECTED'
-EXIT = b'EXIT'
-
-EVENT = b'EVENT'
-EVENT_SUBSCRIPTION = b'EVENT_SUBSCRIPTION'
-SUCCESS = b'SUCCESS'
-
-# empty data
-EMPTY_BYTE = b''
-EMPTY_DICT = {}
-
-# client types
-HTTP_SERVER = b'HTTP_SERVER'
-PROXY = b'PROXY'
-TUNNELER = b'TUNNELER' # message passer from inproc client to inrproc server within RPC
-
-
-"""
-Message indices
-
-client's message to server: |br|
-[address, bytes(), client type, message type, message id, timeout, instruction, arguments, execution_context] |br|
-[ 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 ] |br|
-
-[address, bytes(), server_type, message_type, message id, data]|br|
-[ 0 , 1 , 2 , 3 , 4 , 5 ]|br|
-"""
-# CM = Client Message
-CM_INDEX_ADDRESS = 0
-CM_INDEX_CLIENT_TYPE = 2
-CM_INDEX_MESSAGE_TYPE = 3
-CM_INDEX_MESSAGE_ID = 4
-CM_INDEX_TIMEOUT = 5
-CM_INDEX_INSTRUCTION = 6
-CM_INDEX_ARGUMENTS = 7
-CM_INDEX_EXECUTION_CONTEXT = 8
-
-# SM = Server Message
-SM_INDEX_ADDRESS = 0
-SM_INDEX_SERVER_TYPE = 2
-SM_INDEX_MESSAGE_TYPE = 3
-SM_INDEX_MESSAGE_ID = 4
-SM_INDEX_DATA = 5
-
-# Server types - currently useless metadata
-
-byte_types = (bytes, bytearray, memoryview)
-
-
-# Function to get the socket type name from the enum
-def get_socket_type_name(socket_type):
- try:
- return ZMQSocketType(socket_type).name
- except ValueError:
- return "UNKNOWN"
-
-
-
-class BaseZMQ:
- """
- Base class for all ZMQ message brokers. Implements socket creation, logger, serializer instantiation
- which is common to all server and client implementations. For HTTP clients, http_serializer is necessary and
- for RPC clients, any of the allowed serializer is possible.
-
- Parameters
- ----------
- instance_name: str
- instance name of the serving ``Thing``
- server_type: Enum
- metadata about the nature of the server
- http_serializer: hololinked.server.serializers.JSONSerializer
- serializer used to send message to HTTP Server
- zmq_serializer: any of hololinked.server.serializers.serializer, default serpent
- serializer used to send message to RPC clients
- logger: logging.Logger, Optional
- logger, on will be created while creating a socket automatically if None supplied
- """
- # init of this class must always take empty arguments due to inheritance structure
- def __init__(self) -> None:
- self.instance_name : str
- self.logger : logging.Logger
-
- def exit(self) -> None:
- """
- Cleanup method to terminate ZMQ sockets and contexts before quitting. Called by `__del__()`
- automatically. Each subclass server/client should implement their version of exiting if necessary.
- """
- if self.logger is None:
- self.logger = get_default_logger('{}|{}'.format(self.__class__.__name__, self.instance_name))
-
- def __del__(self) -> None:
- self.exit()
-
- def create_socket(self, *, identity : str, bind : bool, context : typing.Union[zmq.asyncio.Context, zmq.Context],
- protocol : ZMQ_PROTOCOLS = ZMQ_PROTOCOLS.IPC,
- socket_type : zmq.SocketType = zmq.ROUTER, **kwargs) -> None:
- """
- Create a socket with certain specifications. When successful, a logger is also created. Supported ZeroMQ protocols
- are TCP, IPC & INPROC. For IPC sockets, a file is created under TEMP_DIR of global configuration.
-
- Parameters
- ----------
- identity: str
- especially useful for clients to have a different ID other than the ``instance_name`` of the ``Thing``.
- For servers, supplying the ``instance_name`` is sufficient.
- bind: bool
- whether to bind (server) or connect (client)
- context: zmq.Context or zmq.asyncio.Context
- ZeroMQ Context object that creates the socket
- protocol: Enum
- TCP, IPC or INPROC. Message crafting/passing/routing is protocol invariant as suggested by ZeroMQ docs.
- socket_type: zmq.SocketType, default zmq.ROUTER
- Usually a ROUTER socket is implemented for both client-server and peer-to-peer communication
- **kwargs: dict
- socket_address: str
- applicable only for TCP socket to find the correct socket to connect.
- log_level: int
- logging.Logger log level
-
- Returns
- -------
- None
-
- Raises
- ------
- NotImplementedError
- if protocol other than TCP, IPC or INPROC is used
- RuntimeError
- if protocol is TCP, a socket connect from client side is requested but a socket address is not supplied
- """
- self.context = context
- self.identity = identity
- self.socket = self.context.socket(socket_type)
- self.socket.setsockopt_string(zmq.IDENTITY, identity)
- socket_address = kwargs.get('socket_address', None)
- if protocol == ZMQ_PROTOCOLS.IPC or protocol == "IPC":
- if socket_address is None:
- split_instance_name = self.instance_name.split('/')
- socket_dir = os.sep + os.sep.join(split_instance_name[:-1]) if len(split_instance_name) > 1 else ''
- directory = global_config.TEMP_DIR + socket_dir
- if not os.path.exists(directory):
- os.makedirs(directory)
- # re-compute for IPC because it looks for a file in a directory
- socket_address = "ipc://{}{}{}.ipc".format(directory, os.sep, split_instance_name[-1])
- if bind:
- self.socket.bind(socket_address)
- else:
- self.socket.connect(socket_address)
- elif protocol == ZMQ_PROTOCOLS.TCP or protocol == "TCP":
- if bind:
- if not socket_address:
- for i in range(global_config.TCP_SOCKET_SEARCH_START_PORT, global_config.TCP_SOCKET_SEARCH_END_PORT):
- socket_address = "tcp://0.0.0.0:{}".format(i)
- try:
- self.socket.bind(socket_address)
- break
- except zmq.error.ZMQError as ex:
- if not ex.strerror.startswith('Address in use'):
- raise ex from None
- else:
- self.socket.bind(socket_address)
- elif socket_address:
- self.socket.connect(socket_address)
- else:
- raise RuntimeError(f"Socket address not supplied for TCP connection to identity - {identity}")
- elif protocol == ZMQ_PROTOCOLS.INPROC or protocol == "INPROC":
- # inproc_instance_name = instance_name.replace('/', '_').replace('-', '_')
- if socket_address is None:
- socket_address = f'inproc://{self.instance_name}'
- if bind:
- self.socket.bind(socket_address)
- else:
- self.socket.connect(socket_address)
- else:
- raise NotImplementedError("protocols other than IPC, TCP & INPROC are not implemented now for {}".format(
- self.__class__) +
- f" Given protocol {protocol}.")
- self.socket_address = socket_address
- if not self.logger:
- self.logger = get_default_logger('{}|{}|{}|{}'.format(self.__class__.__name__,
- socket_type, protocol, identity), kwargs.get('log_level', logging.INFO))
- self.logger.info("created socket {} with address {} & identity {} and {}".format(get_socket_type_name(socket_type), socket_address,
- identity, "bound" if bind else "connected"))
-
-
-class BaseAsyncZMQ(BaseZMQ):
- """
- Base class for all async ZMQ servers and clients.
- """
- # init of this class must always take empty arguments due to inheritance structure
-
- def create_socket(self, *, identity : str, bind : bool = False, context : typing.Optional[zmq.asyncio.Context] = None,
- protocol : str = "IPC", socket_type : zmq.SocketType = zmq.ROUTER, **kwargs) -> None:
- """
- Overloads ``create_socket()`` to create, bind/connect an async socket. A async context is created if none is supplied.
- """
- if context and not isinstance(context, zmq.asyncio.Context):
- raise TypeError("async ZMQ message broker accepts only async ZMQ context. supplied type {}".format(type(context)))
- context = context or zmq.asyncio.Context()
- super().create_socket(identity=identity, bind=bind, context=context, protocol=protocol,
- socket_type=socket_type, **kwargs)
-
-
-class BaseSyncZMQ(BaseZMQ):
- """
- Base class for all sync ZMQ servers and clients.
- """
- # init of this class must always take empty arguments due to inheritance structure
-
- def create_socket(self, *, identity : str, bind : bool = False, context : typing.Optional[zmq.Context] = None,
- protocol : str = "IPC", socket_type : zmq.SocketType = zmq.ROUTER, **kwargs) -> None:
- """
- Overloads ``create_socket()`` to create, bind/connect a synchronous socket. A (synchronous) context is created
- if none is supplied.
- """
- if context:
- if not isinstance(context, zmq.Context):
- raise TypeError("sync ZMQ message broker accepts only sync ZMQ context. supplied type {}".format(type(context)))
- if isinstance(context, zmq.asyncio.Context):
- raise TypeError("sync ZMQ message broker accepts only sync ZMQ context. supplied type {}".format(type(context)))
- context = context or zmq.Context()
- super().create_socket(identity=identity, bind=bind, context=context, protocol=protocol,
- socket_type=socket_type, **kwargs)
-
-
-
-class BaseZMQServer(BaseZMQ):
- """
- Implements messaging contract as suggested by ZMQ, this is defined as
- as follows:
-
- client's message to server:
- ::
- [address, bytes(), client type, message type, messsage id,
- [ 0 , 1 , 2 , 3 , 4 ,
-
- timeout, instruction, arguments, execution context]
- 5 , 6 , 7 , 8 ]
-
- server's message to client:
- ::
- [address, bytes(), server_type, message_type, message id, data, encoded_data]
- [ 0 , 1 , 2 , 3 , 4 , 5 , 6 ]
-
- The messaging contract does not depend on sync or async implementation.
- """
- def __init__(self,
- instance_name : str,
- server_type : typing.Union[bytes, str],
- http_serializer : typing.Union[None, JSONSerializer] = None,
- zmq_serializer : typing.Union[str, BaseSerializer, None] = None,
- logger : typing.Optional[logging.Logger] = None,
- **kwargs
- ) -> None:
- super().__init__()
- self.zmq_serializer, self.http_serializer = _get_serializer_from_user_given_options(
- zmq_serializer=zmq_serializer,
- http_serializer=http_serializer
- )
- self.instance_name = instance_name
- self.server_type = server_type if isinstance(server_type, bytes) else bytes(server_type, encoding='utf-8')
- self.logger = logger
-
-
- def parse_client_message(self, message : typing.List[bytes]) -> typing.List[typing.Union[bytes, typing.Any]]:
- """
- deserializes important parts of the client's message, namely instruction, arguments, execution context
- based on the client type. For handshake messages, automatically handles handshake. In case of exceptions while
- deserializing, automatically sends an invalid message to client informing the nature of exception with the
- exception metadata.
-
- client's message to server:
- ::
- [address, bytes(), client type, message type, messsage id,
- [ 0 , 1 , 2 , 3 , 4 ,
-
- timeout, instruction, arguments, execution context]
- 5 , 6 , 7 , 8 ]
-
- Execution Context Definitions (typing.Dict[str, typing.Any] or JSON):
- - "oneway" - does not reply to client after executing the instruction
- - "fetch_execution_logs" - fetches logs that were accumulated while execution
-
- Parameters
- ----------
- message: List[bytes]
- message received from client
-
- Returns
- -------
- message: List[bytes | Any]
- message with instruction, arguments and execution context deserialized
-
- """
- try:
- message_type = message[CM_INDEX_MESSAGE_TYPE]
- if message_type == INSTRUCTION:
- client_type = message[CM_INDEX_CLIENT_TYPE]
- if client_type == PROXY:
- message[CM_INDEX_INSTRUCTION] = self.zmq_serializer.loads(message[CM_INDEX_INSTRUCTION]) # type: ignore
- message[CM_INDEX_ARGUMENTS] = self.zmq_serializer.loads(message[CM_INDEX_ARGUMENTS]) # type: ignore
- message[CM_INDEX_EXECUTION_CONTEXT] = self.zmq_serializer.loads(message[CM_INDEX_EXECUTION_CONTEXT]) # type: ignore
- elif client_type == HTTP_SERVER:
- message[CM_INDEX_INSTRUCTION] = self.http_serializer.loads(message[CM_INDEX_INSTRUCTION]) # type: ignore
- message[CM_INDEX_ARGUMENTS] = self.http_serializer.loads(message[CM_INDEX_ARGUMENTS]) # type: ignore
- message[CM_INDEX_EXECUTION_CONTEXT] = self.http_serializer.loads(message[CM_INDEX_EXECUTION_CONTEXT]) # type: ignore
- return message
- elif message_type == HANDSHAKE:
- self.handshake(message)
- except Exception as ex:
- self.handle_invalid_message(message, ex)
-
-
- def craft_reply_from_arguments(self, address : bytes, client_type: bytes, message_type : bytes,
- message_id : bytes = b'', data : typing.Any = None,
- pre_encoded_data : typing.Optional[bytes] = EMPTY_BYTE) -> typing.List[bytes]:
- """
- call this method to craft an arbitrary reply or message to the client using the method arguments.
-
- server's message to client:
- ::
- [address, bytes(), server_type, message_type, message id, data]
- [ 0 , 1 , 2 , 3 , 4 , 5 ]
-
- Parameters
- ----------
- address: bytes
- the ROUTER address of the client
- message_type: bytes
- type of the message, possible values are b'REPLY', b'HANDSHAKE' and b'TIMEOUT'
- message_id: bytes
- message id of the original client message for which the reply is being crafted
- data: Any
- serializable data
-
- Returns
- -------
- message: List[bytes]
- the crafted reply with information in the correct positions within the list
- """
- if client_type == HTTP_SERVER:
- data = self.http_serializer.dumps(data)
- elif client_type == PROXY:
- data = self.zmq_serializer.dumps(data)
-
- return [
- address,
- EMPTY_BYTE,
- self.server_type,
- message_type,
- message_id,
- data,
- pre_encoded_data
- ]
-
-
- def craft_reply_from_client_message(self, original_client_message : typing.List[bytes], data : typing.Any = None,
- pre_encoded_data : bytes = EMPTY_BYTE) -> typing.List[bytes]:
- """
- craft a reply with certain data automatically from an originating client message. The client's address, type required
- for serialization requirements, message id etc. are automatically created from the original message.
-
- server's message to client:
- ::
- [address, bytes(), server_type, message_type, message id, data]
- [ 0 , 1 , 2 , 3 , 4 , 5 ]
-
- Parameters
- ----------
- original_client_message: List[bytes]
- The message originated by the clieht for which the reply is being crafted
- data: Any
- serializable data
-
- Returns
- -------
- message: List[bytes]
- the crafted reply with information in the correct positions within the list
- """
- client_type = original_client_message[CM_INDEX_CLIENT_TYPE]
- if client_type == HTTP_SERVER:
- data = self.http_serializer.dumps(data)
- elif client_type == PROXY:
- data = self.zmq_serializer.dumps(data)
- else:
- raise ValueError(f"invalid client type given '{client_type}' for preparing message to send from " +
- f"'{self.identity}' of type {self.__class__}.")
- return [
- original_client_message[CM_INDEX_ADDRESS],
- EMPTY_BYTE,
- self.server_type,
- REPLY,
- original_client_message[CM_INDEX_MESSAGE_ID],
- data,
- pre_encoded_data
- ]
-
-
- def handshake(self, original_client_message : typing.List[bytes]) -> None:
- """
- pass a handshake message to client. Absolutely mandatory to ensure initial messages do not get lost
- because of ZMQ's very tiny but significant initial delay after creating socket.
-
- Parameters
- ----------
- address: bytes
- the address of the client to send the handshake
-
- Returns
- -------
- None
- """
- run_callable_somehow(self._handshake(original_client_message))
-
- def _handshake(self, original_client_message : typing.List[bytes]) -> None:
- raise NotImplementedError(f"handshake cannot be handled - implement _handshake in {self.__class__} to handshake.")
-
-
- def handle_invalid_message(self, original_client_message : typing.List[bytes], exception : Exception) -> None:
- """
- pass an invalid message to the client when an exception occurred while parsing the message from the client
- (``parse_client_message()``)
-
- Parameters
- ----------
- original_client_message: List[bytes]
- the client message parsing which the exception occurred
- exception: Exception
- exception object raised
-
- Returns
- -------
- None
- """
- run_callable_somehow(self._handle_invalid_message(original_client_message, exception))
-
- def _handle_invalid_message(self, message : typing.List[bytes], exception : Exception) -> None:
- raise NotImplementedError("invalid message cannot be handled" +
- f" - implement _handle_invalid_message in {self.__class__} to handle invalid messages.")
-
-
- def handle_timeout(self, original_client_message : typing.List[bytes]) -> None:
- """
- pass timeout message to the client when the instruction could not be executed within specified timeout
-
- Parameters
- ----------
- original_client_message: List[bytes]
- the client message which could not executed within the specified timeout. timeout value is
- generally specified within the execution context values.
-
- Returns
- -------
- None
- """
- run_callable_somehow(self._handle_timeout(original_client_message))
-
- def _handle_timeout(self, original_client_message : typing.List[bytes]) -> None:
- raise NotImplementedError("timeouts cannot be handled ",
- f"- implement _handle_timeout in {self.__class__} to handle timeout.")
-
-
-
-class BaseAsyncZMQServer(BaseZMQServer):
- """
- Common to all async ZMQ servers
- """
-
- async def _handshake(self, original_client_message : typing.List[bytes]) -> None:
- """
- Inner method that handles handshake. scheduled by ``handshake()`` method, signature same as ``handshake()``.
- """
- await self.socket.send_multipart(self.craft_reply_from_arguments(original_client_message[CM_INDEX_ADDRESS],
- original_client_message[CM_INDEX_CLIENT_TYPE], HANDSHAKE, original_client_message[CM_INDEX_MESSAGE_ID],
- EMPTY_BYTE))
- self.logger.info(f"sent handshake to client '{original_client_message[CM_INDEX_ADDRESS]}'")
-
-
- async def _handle_timeout(self, original_client_message : typing.List[bytes]) -> None:
- """
- Inner method that handles timeout. scheduled by ``handle_timeout()``, signature same as ``handle_timeout``.
- """
- await self.socket.send_multipart(self.craft_reply_from_arguments(original_client_message[CM_INDEX_ADDRESS],
- original_client_message[CM_INDEX_CLIENT_TYPE], TIMEOUT, original_client_message[CM_INDEX_MESSAGE_ID]))
- self.logger.info(f"sent timeout to client '{original_client_message[CM_INDEX_ADDRESS]}'")
-
-
- async def _handle_invalid_message(self, original_client_message : typing.List[bytes], exception : Exception) -> None:
- """
- Inner method that handles invalid messages. scheduled by ``handle_invalid_message()``,
- signature same as ``handle_invalid_message()``.
- """
- await self.socket.send_multipart(self.craft_reply_from_arguments(original_client_message[CM_INDEX_ADDRESS],
- original_client_message[CM_INDEX_CLIENT_TYPE], INVALID_MESSAGE,
- original_client_message[CM_INDEX_MESSAGE_ID]), exception)
- self.logger.info(f"sent exception message to client '{original_client_message[CM_INDEX_ADDRESS]}'." +
- f" exception - {str(exception)}")
-
-
-
-class AsyncZMQServer(BaseAsyncZMQServer, BaseAsyncZMQ):
- """
- Implements blocking (non-polled) but async receive instructions and send replies.
- """
-
- def __init__(self, *, instance_name : str, server_type : Enum, context : typing.Union[zmq.asyncio.Context, None] = None,
- protocol : ZMQ_PROTOCOLS = ZMQ_PROTOCOLS.IPC, socket_type : zmq.SocketType = zmq.ROUTER, **kwargs) -> None:
- BaseAsyncZMQServer.__init__(self, instance_name=instance_name, server_type=server_type, **kwargs)
- BaseAsyncZMQ.__init__(self)
- self.create_socket(identity=instance_name, bind=True, context=context, protocol=protocol,
- socket_type=socket_type, **kwargs)
- self._terminate_context = context == None # terminate if it was created by instance
-
-
- async def async_recv_instruction(self) -> typing.Any:
- """
- Receive one instruction in a blocking form. Async for multi-server paradigm, each server should schedule
- this method in the event loop explicitly. This is taken care by the ``Eventloop`` & ``RPCServer``.
-
- Returns
- -------
- instruction: List[bytes | Any]
- received instruction with important content (instruction, arguments, execution context) deserialized.
- """
- while True:
- instruction = self.parse_client_message(await self.socket.recv_multipart())
- if instruction:
- self.logger.debug(f"received instruction from client '{instruction[CM_INDEX_ADDRESS]}' with msg-ID {instruction[CM_INDEX_MESSAGE_ID]}")
- return instruction
-
-
- async def async_recv_instructions(self) -> typing.List[typing.Any]:
- """
- Receive all currently available instructions in blocking form. Async for multi-server paradigm, each server should schedule
- this method in the event loop explicitly. This is taken care by the ``Eventloop`` & ``RPCServer``.
-
- Returns
- -------
- instructions: List[List[bytes | Any]]
- list of received instructions with important content (instruction, arguments, execution context) deserialized.
- """
- instructions = [await self.async_recv_instruction()]
- while True:
- try:
- instruction = self.parse_client_message(await self.socket.recv_multipart(zmq.NOBLOCK))
- if instruction:
- self.logger.debug(f"received instruction from client '{instruction[CM_INDEX_ADDRESS]}' with msg-ID {instruction[CM_INDEX_MESSAGE_ID]}")
- instructions.append(instruction)
- except zmq.Again:
- break
- return instructions
-
-
- async def async_send_reply(self, original_client_message : typing.List[bytes], data : typing.Any) -> None:
- """
- Send reply for an instruction.
-
- Parameters
- ----------
- original_client_message: List[bytes]
- original message so that the reply can be properly crafted and routed
- data: Any
- serializable data to be sent as reply
-
- Returns
- -------
- None
- """
- await self.socket.send_multipart(self.craft_reply_from_client_message(original_client_message, data))
- self.logger.debug(f"sent reply to client '{original_client_message[CM_INDEX_ADDRESS]}' with msg-ID {original_client_message[CM_INDEX_MESSAGE_ID]}")
-
-
- async def async_send_reply_with_message_type(self, original_client_message : typing.List[bytes],
- message_type: bytes, data : typing.Any) -> None:
- """
- Send reply for an instruction.
-
- Parameters
- ----------
- original_client_message: List[bytes]
- original message so that the reply can be properly crafted and routed
- data: Any
- serializable data to be sent as reply
-
- Returns
- -------
- None
- """
- await self.socket.send_multipart(self.craft_reply_from_arguments(original_client_message[CM_INDEX_ADDRESS],
- original_client_message[CM_INDEX_CLIENT_TYPE], message_type,
- original_client_message[CM_INDEX_MESSAGE_ID], data))
- self.logger.debug(f"sent reply to client '{original_client_message[CM_INDEX_ADDRESS]}' with msg-ID {original_client_message[CM_INDEX_MESSAGE_ID]}")
-
-
- def exit(self) -> None:
- """
- closes socket and context, warns if any error occurs.
- """
- super().exit()
- try:
- self.socket.close(0)
- self.logger.info(f"terminated socket of server '{self.identity}' of type {self.__class__}")
- except Exception as ex:
- self.logger.warning("could not properly terminate socket or attempted to terminate an already terminated " +
- f" socket '{self.identity}' of type {self.__class__}. Exception message : {str(ex)}")
- try:
- if self._terminate_context:
- self.context.term()
- self.logger.info("terminated context of socket '{}' of type '{}'".format(self.identity, self.__class__))
- except Exception as ex:
- self.logger.warning("could not properly terminate context or attempted to terminate an already terminated " +
- f" context '{self.identity}'. Exception message : {str(ex)}")
-
-
-
-class AsyncPollingZMQServer(AsyncZMQServer):
- """
- Identical to AsyncZMQServer, except that instructions are received in non-blocking/polling form.
- This server can be stopped from server side by calling ``stop_polling()`` unlike ``AsyncZMQServer`` which
- cannot be stopped manually unless an instruction arrives.
-
- Parameters
- ----------
- instance_name: str
- ``instance_name`` of the Thing which the server serves
- server_type: str
- server type metadata - currently not useful/important
- context: Optional, zmq.asyncio.Context
- ZeroMQ Context object to use. All sockets share this context. Automatically created when None is supplied.
- socket_type : zmq.SocketType, default zmq.ROUTER
- socket type of ZMQ socket, default is ROUTER (enables address based routing of messages)
- protocol: Enum, default ZMQ_PROTOCOLS.IPC
- Use TCP for network access, IPC for multi-process applications, and INPROC for multi-threaded applications.
- poll_timeout: int, default 25
- time in milliseconds to poll the sockets specified under ``procotols``. Useful for calling ``stop_polling()``
- where the max delay to stop polling will be ``poll_timeout``
-
- **kwargs:
- http_serializer: hololinked.server.serializers.JSONSerializer
- serializer used to send message to HTTP Server
- zmq_serializer: any of hololinked.server.serializers.serializer, default serpent
- serializer used to send message to RPC clients
- """
-
- def __init__(self, *, instance_name : str, server_type : Enum, context : typing.Union[zmq.asyncio.Context, None] = None,
- socket_type : zmq.SocketType = zmq.ROUTER, protocol : ZMQ_PROTOCOLS = ZMQ_PROTOCOLS.IPC,
- poll_timeout = 25, **kwargs) -> None:
- super().__init__(instance_name=instance_name, server_type=server_type, context=context,
- socket_type=socket_type, protocol=protocol, **kwargs)
- self.poller = zmq.asyncio.Poller()
- self.poller.register(self.socket, zmq.POLLIN)
- self.poll_timeout = poll_timeout
-
- @property
- def poll_timeout(self) -> int:
- """
- socket polling timeout in milliseconds greater than 0.
- """
- return self._poll_timeout
-
- @poll_timeout.setter
- def poll_timeout(self, value) -> None:
- if not isinstance(value, int) or value < 0:
- raise ValueError(f"polling period must be an integer greater than 0, not {value}. Value is considered in milliseconds.")
- self._poll_timeout = value
-
- async def poll_instructions(self) -> typing.List[typing.List[bytes]]:
- """
- poll for instructions with specified timeout (``poll_timeout``) and return if any instructions are available.
- This method blocks, so make sure other methods are scheduled which can stop polling.
-
- Returns
- -------
- instructions: List[List[bytes]]
- list of received instructions with important content (instruction, arguments, execution context) deserialized.
- """
- self.stop_poll = False
- instructions = []
- while not self.stop_poll:
- sockets = await self.poller.poll(self._poll_timeout) # type hints dont work in this line
- for socket, _ in sockets:
- while True:
- try:
- instruction = self.parse_client_message(await socket.recv_multipart(zmq.NOBLOCK))
- except zmq.Again:
- break
- else:
- if instruction:
- self.logger.debug(f"received instruction from client '{instruction[CM_INDEX_ADDRESS]}' with msg-ID {instruction[CM_INDEX_MESSAGE_ID]}")
- instructions.append(instruction)
- if len(instructions) > 0:
- break
- return instructions
-
- def stop_polling(self) -> None:
- """
- stop polling and unblock ``poll_instructions()`` method
- """
- self.stop_poll = True
-
- def exit(self) -> None:
- """
- unregister socket from poller and terminate socket and context.
- """
- try:
- BaseZMQ.exit(self)
- self.poller.unregister(self.socket)
- except Exception as ex:
- self.logger.warning(f"could not unregister socket {self.identity} from polling - {str(ex)}")
- return super().exit()
-
-
-
-class ZMQServerPool(BaseZMQServer):
- """
- Implements pool of async ZMQ servers (& their sockets)
- """
-
- def __init__(self, *, instance_names : typing.Union[typing.List[str], None] = None, **kwargs) -> None:
- self.context = zmq.asyncio.Context()
- self.poller = zmq.asyncio.Poller()
- self.pool = dict() # type: typing.Dict[str, typing.Union[AsyncZMQServer, AsyncPollingZMQServer]]
- if instance_names:
- for instance_name in instance_names:
- self.pool[instance_name] = AsyncZMQServer(instance_name=instance_name,
- server_type=ServerTypes.UNKNOWN_TYPE.value, context=self.context, **kwargs)
- for server in self.pool.values():
- self.poller.register(server.socket, zmq.POLLIN)
- super().__init__(instance_name="pool", server_type=ServerTypes.POOL.value, **kwargs)
- self.identity = "pool"
- if self.logger is None:
- self.logger = get_default_logger("pool|polling", kwargs.get('log_level',logging.INFO))
-
- def create_socket(self, *, identity : str, bind: bool, context : typing.Union[zmq.asyncio.Context, zmq.Context],
- protocol : ZMQ_PROTOCOLS = ZMQ_PROTOCOLS.IPC, socket_type : zmq.SocketType = zmq.ROUTER, **kwargs) -> None:
- raise NotImplementedError("create socket not supported by ZMQServerPool")
- # we override this method to prevent socket creation. instance_name set to pool is simply a filler
- return super().create_socket(identity=identity, bind=bind, context=context, protocol=protocol,
- socket_type=socket_type, **kwargs)
-
- def register_server(self, server : typing.Union[AsyncZMQServer, AsyncPollingZMQServer]) -> None:
- if not isinstance(server, (AsyncZMQServer, AsyncPollingZMQServer)):
- raise TypeError("registration possible for servers only subclass of AsyncZMQServer or AsyncPollingZMQServer." +
- f" Given type {type(server)}")
- self.pool[server.instance_name] = server
- self.poller.register(server.socket, zmq.POLLIN)
-
- def deregister_server(self, server : typing.Union[AsyncZMQServer, AsyncPollingZMQServer]) -> None:
- self.poller.unregister(server.socket)
- self.pool.pop(server.instance_name)
-
- @property
- def poll_timeout(self) -> int:
- """
- socket polling timeout in milliseconds greater than 0.
- """
- return self._poll_timeout
-
- @poll_timeout.setter
- def poll_timeout(self, value) -> None:
- if not isinstance(value, int) or value < 0:
- raise ValueError("polling period must be an integer greater than 0, not {}. Value is considered in milliseconds.".format(value))
- self._poll_timeout = value
-
- async def async_recv_instruction(self, instance_name : str) -> typing.List:
- """
- receive instruction for instance name
-
- Parameters
- ----------
- instance_name : str
- instance name of the ``Thing`` or in this case, the ZMQ server.
- """
- return await self.pool[instance_name].async_recv_instruction()
-
- async def async_recv_instructions(self, instance_name : str) -> typing.List[typing.List]:
- """
- receive all available instructions for instance name
-
- Parameters
- ----------
- instance_name : str
- instance name of the ``Thing`` or in this case, the ZMQ server.
- """
- return await self.pool[instance_name].async_recv_instructions()
-
- async def async_send_reply(self, *, instance_name : str, original_client_message : typing.List[bytes],
- data : typing.Any) -> None:
- """
- send reply for instance name
-
- Parameters
- ----------
- instance_name : str
- instance name of the ``Thing`` or in this case, the ZMQ server.
- original_client_message: List[bytes]
- instruction for which reply is being given
- data: Any
- data to be given as reply
- """
- await self.pool[instance_name].async_send_reply(original_client_message, data)
-
- async def poll(self) -> typing.List[typing.List[typing.Any]]:
- """
- Pool for instruction in the entire server pool. Map the instruction to the correct instance
- using the 0th index of the instruction.
- """
- self.stop_poll = False
- instructions = []
- while not self.stop_poll:
- sockets = await self.poller.poll(self._poll_timeout)
- for socket, _ in sockets:
- while True:
- try:
- instruction = self.parse_client_message(await socket.recv_multipart(zmq.NOBLOCK))
- except zmq.Again:
- break
- else:
- if instruction:
- self.logger.debug(f"received instruction from client '{instruction[CM_INDEX_ADDRESS]}' with msg-ID {instruction[CM_INDEX_MESSAGE_ID]}")
- instructions.append(instruction)
- return instructions
-
- def stop_polling(self) -> None:
- """
- stop polling method ``poll()``
- """
- self.stop_poll = True
-
- def __getitem__(self, key) -> typing.Union[AsyncZMQServer, AsyncPollingZMQServer]:
- return self.pool[key]
-
- def __iter__(self) -> typing.Iterator[str]:
- return self.pool.__iter__()
-
- def __contains__(self, name : str) -> bool:
- return name in self.pool.keys()
-
- def exit(self) -> None:
- for server in self.pool.values():
- try:
- self.poller.unregister(server.socket)
- server.exit()
- except Exception as ex:
- self.logger.warning(f"could not unregister poller and exit server {server.identity} - {str(ex)}")
- try:
- self.context.term()
- self.logger.info("context terminated for {}".format(self.__class__))
- except Exception as ex:
- self.logger.warning("could not properly terminate context or attempted to terminate an already terminated context " +
- f"'{self.identity}'. Exception message : {str(ex)}")
-
-
-
-class RPCServer(BaseZMQServer):
- """
- Top level ZMQ RPC server used by ``Thing`` and ``Eventloop``.
-
- Parameters
- ----------
- instance_name: str
- ``instance_name`` of the Thing which the server serves
- server_type: str
- server type metadata - currently not useful/important
- context: Optional, zmq.asyncio.Context
- ZeroMQ Context object to use. All sockets share this context. Automatically created when None is supplied.
- protocols: List[str, Enum], default [ZMQ_PROTOCOLS.TCP, ZMQ_PROTOCOLS.IPC, ZMQ_PROTOCOLS.INPROC]
- all ZeroMQ sockets where instructions can be passed to the RPC server. Use TCP for network access,
- IPC for multi-process applications, and INPROC for multi-threaded applications. Use all for complete access.
- poll_timeout: int, default 25
- time in milliseconds to poll the sockets specified under ``procotols``. Useful for calling ``stop_polling()``
- where the max delay to stop polling will be ``poll_timeout``
- **kwargs:
- tcp_socket_address: str
- address of the TCP socket, if not given, a random port is chosen
- """
-
- def __init__(self, instance_name : str, *, server_type : Enum, context : typing.Union[zmq.asyncio.Context, None] = None,
- protocols : typing.Union[ZMQ_PROTOCOLS, str, typing.List[ZMQ_PROTOCOLS]] = ZMQ_PROTOCOLS.IPC,
- poll_timeout = 25, **kwargs) -> None:
- super().__init__(instance_name=instance_name, server_type=server_type, **kwargs)
-
- self.identity = f"{instance_name}/rpc-server"
- if isinstance(protocols, list):
- protocols = protocols
- elif isinstance(protocols, str):
- protocols = [protocols]
- else:
- raise TypeError(f"unsupported protocols type : {type(protocols)}")
- tcp_socket_address = kwargs.pop('tcp_socket_address', None)
- kwargs["http_serializer"] = self.http_serializer
- kwargs["zmq_serializer"] = self.zmq_serializer
- self.inproc_server = self.ipc_server = self.tcp_server = self.event_publisher = None
- event_publisher_protocol = None
- if self.logger is None:
- self.logger = get_default_logger('{}|{}|{}|{}'.format(self.__class__.__name__,
- 'RPC', 'MIXED', instance_name), kwargs.get('log_level', logging.INFO))
- # contexts and poller
- self.context = context or zmq.asyncio.Context()
- self.poller = zmq.asyncio.Poller()
- self.poll_timeout = poll_timeout
- # initialise every externally visible protocol
- if ZMQ_PROTOCOLS.TCP in protocols or "TCP" in protocols:
- self.tcp_server = AsyncPollingZMQServer(instance_name=instance_name, server_type=server_type,
- context=self.context, protocol=ZMQ_PROTOCOLS.TCP, poll_timeout=poll_timeout,
- socket_address=tcp_socket_address, **kwargs)
- self.poller.register(self.tcp_server.socket, zmq.POLLIN)
- event_publisher_protocol = ZMQ_PROTOCOLS.TCP
- if ZMQ_PROTOCOLS.IPC in protocols or "IPC" in protocols:
- self.ipc_server = AsyncPollingZMQServer(instance_name=instance_name, server_type=server_type,
- context=self.context, protocol=ZMQ_PROTOCOLS.IPC, poll_timeout=poll_timeout, **kwargs)
- self.poller.register(self.ipc_server.socket, zmq.POLLIN)
- event_publisher_protocol = ZMQ_PROTOCOLS.IPC if not event_publisher_protocol else event_publisher_protocol
- if ZMQ_PROTOCOLS.INPROC in protocols or "INPROC" in protocols:
- self.inproc_server = AsyncPollingZMQServer(instance_name=instance_name, server_type=server_type,
- context=self.context, protocol=ZMQ_PROTOCOLS.INPROC, poll_timeout=poll_timeout, **kwargs)
- self.poller.register(self.inproc_server.socket, zmq.POLLIN)
- event_publisher_protocol = ZMQ_PROTOCOLS.INPROC if not event_publisher_protocol else event_publisher_protocol
- self.event_publisher = EventPublisher(
- instance_name=instance_name + '-event-pub',
- protocol=event_publisher_protocol,
- zmq_serializer=self.zmq_serializer,
- http_serializer=self.http_serializer,
- logger=self.logger
- )
- # instruction serializing broker
- self.inner_inproc_client = AsyncZMQClient(
- server_instance_name=f'{instance_name}/inner',
- identity=f'{instance_name}/tunneler',
- client_type=TUNNELER,
- context=self.context,
- protocol=ZMQ_PROTOCOLS.INPROC,
- handshake=False, # handshake manually done later when event loop is run
- logger=self.logger
- )
- self.inner_inproc_server = AsyncZMQServer(
- instance_name=f'{self.instance_name}/inner', # hardcoded be very careful
- server_type=server_type,
- context=self.context,
- protocol=ZMQ_PROTOCOLS.INPROC,
- **kwargs
- )
- self._instructions = deque() # type: deque[typing.Tuple[typing.List[bytes], asyncio.Event, asyncio.Future, zmq.Socket]]
- self._instructions_event = asyncio.Event()
-
-
- async def handshake_complete(self):
- """
- handles inproc client's handshake with ``Thing``'s inproc server
- """
- await self.inner_inproc_client.handshake_complete()
-
-
- def prepare(self):
- """
- registers socket polling method and message tunnelling methods to the running
- asyncio event loop
- """
- eventloop = asyncio.get_event_loop()
- eventloop.call_soon(lambda : asyncio.create_task(self.poll()))
- eventloop.call_soon(lambda : asyncio.create_task(self.tunnel_message_to_things()))
-
-
- @property
- def poll_timeout(self) -> int:
- """
- socket polling timeout in milliseconds greater than 0.
- """
- return self._poll_timeout
-
- @poll_timeout.setter
- def poll_timeout(self, value) -> None:
- if not isinstance(value, int) or value < 0:
- raise ValueError(("polling period must be an integer greater than 0, not {}.",
- "Value is considered in milliseconds.".format(value)))
- self._poll_timeout = value
-
-
- def _get_timeout_from_instruction(self, message : typing.Tuple[bytes]) -> float:
- """
- Unlike ``parse_client_message()``, this method only retrieves the timeout parameter
- """
- client_type = message[CM_INDEX_CLIENT_TYPE]
- if client_type == PROXY:
- return self.zmq_serializer.loads(message[CM_INDEX_TIMEOUT])
- elif client_type == HTTP_SERVER:
- return self.http_serializer.loads(message[CM_INDEX_TIMEOUT])
-
-
- async def poll(self):
- """
- poll for instructions and append them to instructions list to pass them to ``Eventloop``/``Thing``'s inproc
- server using an inner inproc client. Registers the messages for timeout calculation.
- """
- self.stop_poll = False
- eventloop = asyncio.get_event_loop()
- self.inner_inproc_client.handshake()
- await self.inner_inproc_client.handshake_complete()
- if self.inproc_server:
- eventloop.call_soon(lambda : asyncio.create_task(self.recv_instruction(self.inproc_server)))
- if self.ipc_server:
- eventloop.call_soon(lambda : asyncio.create_task(self.recv_instruction(self.ipc_server)))
- if self.tcp_server:
- eventloop.call_soon(lambda : asyncio.create_task(self.recv_instruction(self.tcp_server)))
-
-
- def stop_polling(self):
- """
- stop polling method ``poll()``
- """
- self.stop_poll = True
- self._instructions_event.set()
- if self.inproc_server is not None:
- def kill_inproc_server(instance_name, context, logger):
- # this function does not work when written fully async - reason is unknown
- try:
- event_loop = asyncio.get_event_loop()
- except RuntimeError:
- event_loop = asyncio.new_event_loop()
- asyncio.set_event_loop(event_loop)
- temp_inproc_client = AsyncZMQClient(server_instance_name=instance_name,
- identity=f'{self.instance_name}-inproc-killer',
- context=context, client_type=PROXY, protocol=ZMQ_PROTOCOLS.INPROC,
- logger=logger)
- event_loop.run_until_complete(temp_inproc_client.handshake_complete())
- event_loop.run_until_complete(temp_inproc_client.socket.send_multipart(temp_inproc_client.craft_empty_message_with_type(EXIT)))
- temp_inproc_client.exit()
- threading.Thread(target=kill_inproc_server, args=(self.instance_name, self.context, self.logger), daemon=True).start()
- if self.ipc_server is not None:
- temp_client = SyncZMQClient(server_instance_name=self.instance_name, identity=f'{self.instance_name}-ipc-killer',
- client_type=PROXY, protocol=ZMQ_PROTOCOLS.IPC, logger=self.logger)
- temp_client.socket.send_multipart(temp_client.craft_empty_message_with_type(EXIT))
- temp_client.exit()
- if self.tcp_server is not None:
- socket_address = self.tcp_server.socket_address
- if '/*:' in self.tcp_server.socket_address:
- socket_address = self.tcp_server.socket_address.replace('*', 'localhost')
- # print("TCP socket address", self.tcp_server.socket_address)
- temp_client = SyncZMQClient(server_instance_name=self.instance_name, identity=f'{self.instance_name}-tcp-killer',
- client_type=PROXY, protocol=ZMQ_PROTOCOLS.TCP, logger=self.logger,
- socket_address=socket_address)
- temp_client.socket.send_multipart(temp_client.craft_empty_message_with_type(EXIT))
- temp_client.exit()
-
-
- async def recv_instruction(self, server : AsyncZMQServer):
- eventloop = asyncio.get_event_loop()
- socket = server.socket
- while True:
- try:
- original_instruction = await socket.recv_multipart()
- if original_instruction[CM_INDEX_MESSAGE_TYPE] == HANDSHAKE:
- handshake_task = asyncio.create_task(self._handshake(original_instruction, socket))
- eventloop.call_soon(lambda : handshake_task)
- continue
- if original_instruction[CM_INDEX_MESSAGE_TYPE] == EXIT:
- break
- timeout = self._get_timeout_from_instruction(original_instruction)
- ready_to_process_event = None
- timeout_task = None
- if timeout is not None:
- ready_to_process_event = asyncio.Event()
- timeout_task = asyncio.create_task(self.process_timeouts(original_instruction,
- ready_to_process_event, timeout, socket))
- eventloop.call_soon(lambda : timeout_task)
- except Exception as ex:
- # handle invalid message
- self.logger.error(f"exception occurred for message id {original_instruction[CM_INDEX_MESSAGE_ID]} - {str(ex)}")
- invalid_message_task = asyncio.create_task(self._handle_invalid_message(original_instruction,
- ex, socket))
- eventloop.call_soon(lambda: invalid_message_task)
- else:
- self._instructions.append((original_instruction, ready_to_process_event,
- timeout_task, socket))
- self._instructions_event.set()
- self.logger.info(f"stopped polling for server '{server.identity}' {server.socket_address[0:3].upper() if server.socket_address[0:3] in ['ipc', 'tcp'] else 'INPROC'}")
-
-
- async def tunnel_message_to_things(self):
- """
- message tunneler between external sockets and interal inproc client
- """
- while not self.stop_poll:
- if len(self._instructions) > 0:
- message, ready_to_process_event, timeout_task, origin_socket = self._instructions.popleft()
- timeout = True
- if ready_to_process_event is not None:
- ready_to_process_event.set()
- timeout = await timeout_task
- if ready_to_process_event is None or not timeout:
- original_address = message[CM_INDEX_ADDRESS]
- message[CM_INDEX_ADDRESS] = self.inner_inproc_client.server_address # replace address
- await self.inner_inproc_client.socket.send_multipart(message)
- reply = await self.inner_inproc_client.socket.recv_multipart()
- reply[SM_INDEX_ADDRESS] = original_address
- if reply[SM_INDEX_MESSAGE_TYPE] != ONEWAY:
- await origin_socket.send_multipart(reply)
- else:
- await self._instructions_event.wait()
- self._instructions_event.clear()
- self.logger.info("stopped tunneling messages to things")
-
- async def process_timeouts(self, original_client_message : typing.List, ready_to_process_event : asyncio.Event,
- timeout : typing.Optional[float], origin_socket : zmq.Socket) -> bool:
- """
- replies timeout to client if timeout occured and prevents the instruction from being executed.
- """
- try:
- await asyncio.wait_for(ready_to_process_event.wait(), timeout)
- return False
- except TimeoutError:
- await origin_socket.send_multipart(self.craft_reply_from_arguments(original_client_message[CM_INDEX_ADDRESS],
- original_client_message[CM_INDEX_CLIENT_TYPE], TIMEOUT, original_client_message[CM_INDEX_MESSAGE_ID]))
- return True
-
- async def _handle_invalid_message(self, original_client_message: builtins.list[builtins.bytes],
- exception: builtins.Exception, originating_socket : zmq.Socket) -> None:
- await originating_socket.send_multipart(self.craft_reply_from_arguments(
- original_client_message[CM_INDEX_ADDRESS], original_client_message[CM_INDEX_CLIENT_TYPE],
- INVALID_MESSAGE, original_client_message[CM_INDEX_MESSAGE_ID], exception))
- self.logger.info(f"sent exception message to client '{original_client_message[CM_INDEX_ADDRESS]}'." +
- f" exception - {str(exception)}")
-
- async def _handshake(self, original_client_message: builtins.list[builtins.bytes],
- originating_socket : zmq.Socket) -> None:
- await originating_socket.send_multipart(self.craft_reply_from_arguments(
- original_client_message[CM_INDEX_ADDRESS],
- original_client_message[CM_INDEX_CLIENT_TYPE], HANDSHAKE, original_client_message[CM_INDEX_MESSAGE_ID],
- EMPTY_DICT))
- self.logger.info("sent handshake to client '{}'".format(original_client_message[CM_INDEX_ADDRESS]))
-
-
- def exit(self):
- self.stop_poll = True
- for socket in list(self.poller._map.keys()): # iterating over keys will cause dictionary size change during iteration
- try:
- self.poller.unregister(socket)
- except Exception as ex:
- self.logger.warning(f"could not unregister socket from polling - {str(ex)}") # does not give info about socket
- try:
- self.inproc_server.exit()
- self.ipc_server.exit()
- self.tcp_server.exit()
- self.inner_inproc_client.exit()
- except:
- pass
- self.context.term()
- self.logger.info("terminated context of socket '{}' of type '{}'".format(self.identity, self.__class__))
-
-
-
-class BaseZMQClient(BaseZMQ):
- """
- Base class for all ZMQ clients irrespective of sync and async.
-
- server's reply to client
- ::
-
- [address, bytes(), server type , message_type, message id, content or response or reply]
- [ 0 , 1 , 2 , 3 , 4 , 5 ]
-
- Parameters
- ----------
- server_instance_name: str
- The instance name of the server (or ``Thing``)
- client_type: str
- RPC or HTTP Server
- **kwargs:
- zmq_serializer: BaseSerializer
- custom implementation of RPC serializer if necessary
- http_serializer: JSONSerializer
- custom implementation of JSON serializer if necessary
- """
-
- def __init__(self, *,
- server_instance_name : str, client_type : bytes,
- server_type : typing.Union[bytes, str, Enum] = ServerTypes.UNKNOWN_TYPE,
- http_serializer : typing.Union[None, JSONSerializer] = None,
- zmq_serializer : typing.Union[str, BaseSerializer, None] = None,
- logger : typing.Optional[logging.Logger] = None,
- **kwargs
- ) -> None:
- if client_type in [PROXY, HTTP_SERVER, TUNNELER]:
- self.client_type = client_type
- else:
- raise ValueError("Invalid client type for {}. Given option {}.".format(self.__class__, client_type))
- if server_instance_name:
- self.server_address = bytes(server_instance_name, encoding='utf-8')
- self.instance_name = server_instance_name
- self.zmq_serializer, self.http_serializer = _get_serializer_from_user_given_options(
- zmq_serializer=zmq_serializer,
- http_serializer=http_serializer
- )
- if isinstance(server_type, bytes):
- self.server_type = server_type
- elif isinstance(server_type, Enum):
- self.server_type = server_type.value
- else:
- self.server_type = bytes(server_type, encoding='utf-8')
- self.logger = logger
- self._monitor_socket = None
- self._reply_cache = dict()
- super().__init__()
-
-
- def raise_local_exception(self, exception : typing.Dict[str, typing.Any]) -> None:
- """
- raises an exception on client side using an exception from server by mapping it to the correct one based on type.
-
- Parameters
- ----------
- exception: Dict[str, Any]
- exception dictionary made by server with following keys - type, message, traceback, notes
-
- """
- if isinstance(exception, Exception):
- raise exception from None
- exc = getattr(builtins, exception["type"], None)
- message = exception["message"]
- if exc is None:
- ex = Exception(message)
- else:
- ex = exc(message)
- exception["traceback"][0] = f"Server {exception['traceback'][0]}"
- ex.__notes__ = exception["traceback"][0:-1]
- raise ex from None
-
-
- def parse_server_message(self, message : typing.List[bytes], raise_client_side_exception : bool = False,
- deserialize : bool = True) -> typing.Any:
- """
- server's message to client:
-
- ::
- [address, bytes(), server type , message_type, message id, content or response or reply]
- [ 0 , 1 , 2 , 3 , 4 , 5 ]
-
- Parameters
- ----------
- message: List[bytes]
- message sent be server
- raise_client_side_exception: bool
- raises exception from server on client
-
- Raises
- ------
- NotImplementedError:
- if message type is not reply, handshake or invalid
- """
- if len(message) == 2: # socket monitor message, not our message
- try:
- if ZMQ_EVENT_MAP[parse_monitor_message(message)['event']] == SERVER_DISCONNECTED:
- raise ConnectionAbortedError(f"server disconnected for {self.instance_name}")
- return None # None should simply continue the message receive logic
- except RuntimeError as ex:
- raise RuntimeError(f'message received from monitor socket cannot be deserialized for {self.instance_name}') from None
- message_type = message[SM_INDEX_MESSAGE_TYPE]
- if message_type == REPLY:
- if deserialize:
- if self.client_type == HTTP_SERVER:
- message[SM_INDEX_DATA] = self.http_serializer.loads(message[SM_INDEX_DATA]) # type: ignore
- elif self.client_type == PROXY:
- message[SM_INDEX_DATA] = self.zmq_serializer.loads(message[SM_INDEX_DATA]) # type: ignore
- return message
- elif message_type == HANDSHAKE:
- self.logger.debug("""handshake messages arriving out of order are silently dropped as receiving this message
- means handshake was successful before. Received hanshake from {}""".format(message[0]))
- elif message_type == EXCEPTION or message_type == INVALID_MESSAGE:
- if self.client_type == HTTP_SERVER:
- message[SM_INDEX_DATA] = self.http_serializer.loads(message[SM_INDEX_DATA]) # type: ignore
- elif self.client_type == PROXY:
- message[SM_INDEX_DATA] = self.zmq_serializer.loads(message[SM_INDEX_DATA]) # type: ignore
- if not raise_client_side_exception:
- return message
- if message[SM_INDEX_DATA].get('exception', None) is not None:
- self.raise_local_exception(message[SM_INDEX_DATA]['exception'])
- else:
- raise NotImplementedError("message type {} received. No exception field found, exception field mandatory.".format(
- message_type))
- elif message_type == TIMEOUT:
- exception = TimeoutError("message timed out.")
- if raise_client_side_exception:
- raise exception from None
- message[SM_INDEX_DATA] = format_exception_as_json(exception)
- return message
- else:
- raise NotImplementedError("Unknown message type {} received. This message cannot be dealt.".format(message_type))
-
-
- def craft_instruction_from_arguments(self, instruction : str, arguments : typing.Dict[str, typing.Any] = EMPTY_DICT,
- timeout : typing.Optional[float] = None, context : typing.Dict[str, typing.Any] = EMPTY_DICT) -> typing.List[bytes]:
- """
- message from client to server:
-
- ::
- [address, bytes(), client type, message type, message id, instruction, arguments]
- [ 0 , 1 , 2 , 3 , 4 , 5 , 6 ]
-
- """
- message_id = bytes(str(uuid4()), encoding='utf-8')
- if self.client_type == HTTP_SERVER:
- timeout = self.http_serializer.dumps(timeout) # type: bytes
- instruction = self.http_serializer.dumps(instruction) # type: bytes
- # TODO - following can be improved
- if arguments == b'':
- arguments = self.http_serializer.dumps({}) # type: bytes
- elif not isinstance(arguments, byte_types):
- arguments = self.http_serializer.dumps(arguments) # type: bytes
- context = self.http_serializer.dumps(context) # type: bytes
- elif self.client_type == PROXY:
- timeout = self.zmq_serializer.dumps(timeout) # type: bytes
- instruction = self.zmq_serializer.dumps(instruction) # type: bytes
- if not isinstance(arguments, byte_types):
- arguments = self.zmq_serializer.dumps(arguments) # type: bytes
- context = self.zmq_serializer.dumps(context)
-
- return [
- self.server_address,
- EMPTY_BYTE,
- self.client_type,
- INSTRUCTION,
- message_id,
- timeout,
- instruction,
- arguments,
- context
- ]
-
-
- def craft_empty_message_with_type(self, message_type : bytes = HANDSHAKE):
- """
- create handshake message for example
- """
- return [
- self.server_address,
- EMPTY_BYTE,
- self.client_type,
- message_type,
- EMPTY_BYTE,
- EMPTY_BYTE,
- EMPTY_BYTE,
- EMPTY_BYTE,
- EMPTY_BYTE
- ]
-
- def exit(self) -> None:
- BaseZMQ.exit(self)
- try:
- self.poller.unregister(self.socket)
- # TODO - there is some issue here while quitting
- # print("poller exception did not occur 1")
- if self._monitor_socket is not None:
- # print("poller exception did not occur 2")
- self.poller.unregister(self._monitor_socket)
- # print("poller exception did not occur 3")
- except Exception as ex:
- self.logger.warning(f"unable to deregister from poller - {str(ex)}")
-
- try:
- if self._monitor_socket is not None:
- self._monitor_socket.close(0)
- self.socket.close(0)
- self.logger.info("terminated socket of server '{}' of type '{}'".format(self.identity, self.__class__))
- except Exception as ex:
- self.logger.warning("could not properly terminate socket or attempted to terminate an already terminated " +
- f"socket '{self.identity}' of type '{self.__class__}'. Exception message : {str(ex)}")
- try:
- if self._terminate_context:
- self.context.term()
- self.logger.info("terminated context of socket '{}' of type '{}'".format(self.identity, self.__class__))
- except Exception as ex:
- self.logger.warning("could not properly terminate context or attempted to terminate an already terminated context" +
- "'{}'. Exception message : {}".format(self.identity, str(ex)))
-
-
-
-class SyncZMQClient(BaseZMQClient, BaseSyncZMQ):
- """
- Synchronous ZMQ client that connect with sync or async server based on ZMQ protocol. Works like REQ-REP socket.
- Each request is blocking until response is received. Suitable for most purposes.
-
- Parameters
- ----------
- server_instance_name: str
- The instance name of the server (or ``Thing``)
- identity: str
- Unique identity of the client to receive messages from the server. Each client connecting to same server must
- still have unique ID.
- client_type: str
- RPC or HTTP Server
- handshake: bool
- when true, handshake with the server first before allowing first message and block until that handshake was
- accomplished.
- protocol: str | Enum, TCP, IPC or INPROC, default IPC
- protocol implemented by the server
- **kwargs:
- socket_address: str
- socket address for connecting to TCP server
- zmq_serializer:
- custom implementation of RPC serializer if necessary
- http_serializer:
- custom implementation of JSON serializer if necessary
- """
-
- def __init__(self, server_instance_name : str, identity : str, client_type = HTTP_SERVER,
- handshake : bool = True, protocol : str = ZMQ_PROTOCOLS.IPC,
- context : typing.Union[zmq.Context, None] = None,
- **kwargs) -> None:
- BaseZMQClient.__init__(self, server_instance_name=server_instance_name,
- client_type=client_type, **kwargs)
- BaseSyncZMQ.__init__(self)
- self.create_socket(identity=identity, context=context, protocol=protocol, **kwargs)
- self.poller = zmq.Poller()
- self.poller.register(self.socket, zmq.POLLIN)
- self._terminate_context = context == None
- self._client_queue = threading.RLock()
- # print("context on client", self.context)
- if handshake:
- self.handshake(kwargs.pop("handshake_timeout", 60000))
-
- def send_instruction(self, instruction : str, arguments : typing.Dict[str, typing.Any] = EMPTY_DICT,
- invokation_timeout : typing.Optional[float] = None, execution_timeout : typing.Optional[float] = None,
- context : typing.Dict[str, typing.Any] = EMPTY_DICT,
- argument_schema : typing.Optional[JSON] = None) -> bytes:
- """
- send message to server.
-
- client's message to server:
- ::
-
- [address, bytes(), client type, message type, messsage id,
- [ 0 , 1 , 2 , 3 , 4 ,
-
- timeout, instruction, arguments, execution context]
- 5 , 6 , 7 , 8 ]
-
- Execution Context Definitions (typing.Dict[str, typing.Any] or JSON):
- - "plain_reply" - does not return state
- - "fetch_execution_logs" - fetches logs that were accumulated while execution
-
- Parameters
- ----------
- instruction: str
- unique str identifying a server side or ``Thing`` resource. These values corresponding
- to automatically extracted name from the object name or the URL_path prepended with the instance name.
- arguments: Dict[str, Any]
- if the instruction invokes a method, arguments of that method.
- context: Dict[str, Any]
- see execution context definitions
-
- Returns
- -------
- message id : bytes
- a byte representation of message id
- """
- message = self.craft_instruction_from_arguments(instruction, arguments, invokation_timeout, context)
- self.socket.send_multipart(message)
- self.logger.debug(f"sent instruction '{instruction}' to server '{self.instance_name}' with msg-id '{message[SM_INDEX_MESSAGE_ID]}'")
- return message[SM_INDEX_MESSAGE_ID]
-
- def recv_reply(self, message_id : bytes, timeout : typing.Optional[int] = None, raise_client_side_exception : bool = False,
- deserialize : bool = True) -> typing.List[typing.Union[bytes, typing.Dict[str, typing.Any]]]:
- """
- Receives reply from server. Messages are identified by message id, so call this method immediately after
- calling ``send_instruction()`` to avoid receiving messages out of order. Or, use other methods like
- ``execute()``, ``read_attribute()`` or ``write_attribute()``.
-
- Parameters
- ----------
- raise_client_side_exception: bool, default False
- if True, any exceptions raised during execution inside ``Thing`` instance will be raised on the client.
- See docs of ``raise_local_exception()`` for info on exception
- """
- while True:
- sockets = self.poller.poll(timeout)
- reply = None
- for socket, _ in sockets:
- try:
- message = socket.recv_multipart(zmq.NOBLOCK)
- reply = self.parse_server_message(message, raise_client_side_exception, deserialize) # type: ignore
- except zmq.Again:
- pass
- if reply:
- if message_id != reply[SM_INDEX_MESSAGE_ID]:
- self._reply_cache[message_id] = reply
- continue
- self.logger.debug("received reply with msg-id {}".format(reply[SM_INDEX_MESSAGE_ID]))
- return reply
- if timeout is not None:
- break # this should not break, technically an error, should be fixed when inventing better RPC contract
-
- def execute(self, instruction : str, arguments : typing.Dict[str, typing.Any] = EMPTY_DICT,
- invokation_timeout : typing.Optional[float] = None, execution_timeout : typing.Optional[float] = None,
- context : typing.Dict[str, typing.Any] = EMPTY_DICT, raise_client_side_exception : bool = False,
- argument_schema : typing.Optional[JSON] = None,
- deserialize_reply : bool = True) -> typing.List[typing.Union[bytes, typing.Dict[str, typing.Any]]]:
- """
- send an instruction and receive the reply for it.
-
- Parameters
- ----------
- instruction: str
- unique str identifying a server side or ``Thing`` resource. These values corresponding
- to automatically extracted name from the object name or the URL_path prepended with the instance name.
- arguments: Dict[str, Any]
- if the instruction invokes a method, arguments of that method.
- context: Dict[str, Any]
- see execution context definitions
-
- Returns
- -------
- message id : bytes
- a byte representation of message id
- """
- acquire_timeout = -1 if invokation_timeout is None else invokation_timeout
- acquired = self._client_queue.acquire(timeout=acquire_timeout)
- if not acquired:
- raise TimeoutError("previous request still in progress")
- try:
- msg_id = self.send_instruction(instruction, arguments, invokation_timeout,
- execution_timeout, context, argument_schema)
- return self.recv_reply(msg_id, raise_client_side_exception=raise_client_side_exception, deserialize=deserialize_reply)
- finally:
- self._client_queue.release()
-
-
- def handshake(self, timeout : typing.Union[float, int] = 60000) -> None:
- """
- hanshake with server before sending first message
- """
- start_time = time.time_ns()
- while True:
- if timeout is not None and (time.time_ns() - start_time)/1e6 > timeout:
- raise ConnectionError(f"Unable to contact server '{self.instance_name}' from client '{self.identity}'")
- self.socket.send_multipart(self.craft_empty_message_with_type(HANDSHAKE))
- self.logger.info(f"sent Handshake to server '{self.instance_name}'")
- if self.poller.poll(500):
- try:
- message = self.socket.recv_multipart(zmq.NOBLOCK)
- except zmq.Again:
- pass
- else:
- if message[3] == HANDSHAKE: # type: ignore
- self.logger.info(f"client '{self.identity}' handshook with server '{self.instance_name}'")
- self.server_type = message[SM_INDEX_SERVER_TYPE]
- break
- else:
- raise ConnectionAbortedError(f"Handshake cannot be done with '{self.instance_name}'. Another message arrived before handshake complete.")
- else:
- self.logger.info('got no reply')
- self._monitor_socket = self.socket.get_monitor_socket()
- self.poller.register(self._monitor_socket, zmq.POLLIN)
- # sufficient to know when server dies only while receiving messages, not continuous polling
-
-
-
-class AsyncZMQClient(BaseZMQClient, BaseAsyncZMQ):
- """
- Asynchronous client to talk to a ZMQ server where the server is identified by the instance name. The identity
- of the client needs to be different from the server, unlike the ZMQ Server. The client will also perform handshakes
- if necessary.
- """
-
- def __init__(self, server_instance_name : str, identity : str, client_type = HTTP_SERVER,
- handshake : bool = True, protocol : str = "IPC", context : typing.Union[zmq.asyncio.Context, None] = None,
- **kwargs) -> None:
- BaseZMQClient.__init__(self, server_instance_name=server_instance_name, client_type=client_type, **kwargs)
- BaseAsyncZMQ.__init__(self)
- self.create_socket(context=context, identity=identity, protocol=protocol, **kwargs)
- self.poller = zmq.asyncio.Poller()
- self.poller.register(self.socket, zmq.POLLIN)
- self._terminate_context = context == None
- self._handshake_event = asyncio.Event()
- self._handshake_event.clear()
- self._client_queue = asyncio.Lock()
- if handshake:
- self.handshake(kwargs.pop("handshake_timeout", 60000))
-
- def handshake(self, timeout : typing.Optional[int] = 60000) -> None:
- """
- automatically called when handshake argument at init is True. When not automatically called, it is necessary
- to call this method before awaiting ``handshake_complete()``.
- """
- run_callable_somehow(self._handshake(timeout))
-
- async def _handshake(self, timeout : typing.Union[float, int] = 60000) -> None:
- """
- hanshake with server before sending first message
- """
- if self._monitor_socket is not None and self._monitor_socket in self.poller:
- self.poller.unregister(self._monitor_socket)
- self._handshake_event.clear()
- start_time = time.time_ns()
- while True:
- if timeout is not None and (time.time_ns() - start_time)/1e6 > timeout:
- raise ConnectionError(f"Unable to contact server '{self.instance_name}' from client '{self.identity}'")
- await self.socket.send_multipart(self.craft_empty_message_with_type(HANDSHAKE))
- self.logger.info(f"sent Handshake to server '{self.instance_name}'")
- if await self.poller.poll(500):
- try:
- message = await self.socket.recv_multipart(zmq.NOBLOCK)
- except zmq.Again:
- pass
- else:
- if message[3] == HANDSHAKE: # type: ignore
- self.logger.info(f"client '{self.identity}' handshook with server '{self.instance_name}'")
- self.server_type = message[SM_INDEX_SERVER_TYPE]
- break
- else:
- raise ConnectionAbortedError(f"Handshake cannot be done with '{self.instance_name}'. Another message arrived before handshake complete.")
- else:
- self.logger.info('got no reply')
- self._monitor_socket = self.socket.get_monitor_socket()
- self.poller.register(self._monitor_socket, zmq.POLLIN)
- self._handshake_event.set()
-
-
- async def handshake_complete(self):
- """
- wait for handshake to complete
- """
- await self._handshake_event.wait()
-
- async def async_send_instruction(self, instruction : str, arguments : typing.Dict[str, typing.Any] = EMPTY_DICT,
- invokation_timeout : typing.Optional[float] = None, execution_timeout : typing.Optional[float] = None,
- context : typing.Dict[str, typing.Any] = EMPTY_DICT,
- argument_schema : typing.Optional[JSON] = None) -> bytes:
- """
- send message to server.
-
- client's message to server:
- ::
- [address, bytes(), client type, message type, messsage id,
- [ 0 , 1 , 2 , 3 , 4 ,
-
- timeout, instruction, arguments, execution context]
- 5 , 6 , 7 , 8 ]
-
- Execution Context Definitions (typing.Dict[str, typing.Any] or JSON):
- - "plain_reply" - does not return state
- - "fetch_execution_logs" - fetches logs that were accumulated while execution
-
- Parameters
- ----------
- instruction: str
- unique str identifying a server side or ``Thing`` resource. These values corresponding
- to automatically extracted name from the object name or the URL_path prepended with the instance name.
- arguments: Dict[str, Any]
- if the instruction invokes a method, arguments of that method.
- context: Dict[str, Any]
- see execution context definitions
-
- Returns
- -------
- message id : bytes
- a byte representation of message id
- """
- message = self.craft_instruction_from_arguments(instruction, arguments, invokation_timeout, context)
- await self.socket.send_multipart(message)
- self.logger.debug(f"sent instruction '{instruction}' to server '{self.instance_name}' with msg-id {message[SM_INDEX_MESSAGE_ID]}")
- return message[SM_INDEX_MESSAGE_ID]
-
- async def async_recv_reply(self, message_id : bytes, timeout : typing.Optional[int] = None,
- raise_client_side_exception : bool = False, deserialize : bool = True) -> typing.List[
- typing.Union[bytes, typing.Dict[str, typing.Any]]]:
- """
- Receives reply from server. Messages are identified by message id, so call this method immediately after
- calling ``send_instruction()`` to avoid receiving messages out of order. Or, use other methods like
- ``execute()``, ``read_attribute()`` or ``write_attribute()``.
-
- Parameters
- ----------
- raise_client_side_exception: bool, default False
- if True, any exceptions raised during execution inside ``Thing`` instance will be raised on the client.
- See docs of ``raise_local_exception()`` for info on exception
- """
- while True:
- sockets = await self.poller.poll(timeout)
- reply = None
- for socket, _ in sockets:
- try:
- message = await socket.recv_multipart(zmq.NOBLOCK)
- reply = self.parse_server_message(message, raise_client_side_exception, deserialize) # type: ignore
- except zmq.Again:
- pass
- if reply:
- if message_id != reply[SM_INDEX_MESSAGE_ID]:
- self._reply_cache[message_id] = reply
- continue
- self.logger.debug(f"received reply with message-id '{reply[SM_INDEX_MESSAGE_ID]}'")
- return reply
- if timeout is not None:
- break
-
- async def async_execute(self, instruction : str, arguments : typing.Dict[str, typing.Any] = EMPTY_DICT,
- invokation_timeout : typing.Optional[float] = None, execution_timeout : typing.Optional[float] = None,
- context : typing.Dict[str, typing.Any] = EMPTY_DICT,
- raise_client_side_exception = False, argument_schema : typing.Optional[JSON] = None,
- deserialize_reply : bool = True) -> typing.List[typing.Union[bytes, typing.Dict[str, typing.Any]]]:
- """
- send an instruction and receive the reply for it.
-
- Parameters
- ----------
- instruction: str
- unique str identifying a server side or ``Thing`` resource. These values corresponding
- to automatically extracted name from the object name or the URL_path prepended with the instance name.
- arguments: Dict[str, Any]
- if the instruction invokes a method, arguments of that method.
- context: Dict[str, Any]
- see execution context definitions
-
- Returns
- -------
- message id : bytes
- a byte representation of message id
- """
- try:
- await asyncio.wait_for(self._client_queue.acquire(), timeout=invokation_timeout)
- except TimeoutError:
- raise TimeoutError("previous request still in progress") from None
- try:
- msg_id = await self.async_send_instruction(instruction, arguments, invokation_timeout, execution_timeout,
- context, argument_schema)
- return await self.async_recv_reply(msg_id, raise_client_side_exception=raise_client_side_exception,
- deserialize=deserialize_reply)
- finally:
- self._client_queue.release()
-
-
-
-class MessageMappedZMQClientPool(BaseZMQClient):
- """
- Pool of clients where message ID can track the replies irrespective of order of arrival.
- """
-
- def __init__(self, server_instance_names: typing.List[str], identity: str, client_type = HTTP_SERVER,
- handshake : bool = True, poll_timeout = 25, protocol : str = 'IPC',
- context : zmq.asyncio.Context = None, deserialize_server_messages : bool= True,
- **kwargs) -> None:
- super().__init__(server_instance_name='pool', client_type=client_type, **kwargs)
- self.identity = identity
- self.logger = kwargs.get('logger', get_default_logger('{}|{}'.format(identity, 'pooled'), logging.INFO))
- # this class does not call create_socket method
- self.context = context or zmq.asyncio.Context()
- self.pool = dict() # type: typing.Dict[str, AsyncZMQClient]
- self.poller = zmq.asyncio.Poller()
- for instance_name in server_instance_names:
- client = AsyncZMQClient(server_instance_name=instance_name,
- identity=identity, client_type=client_type, handshake=handshake, protocol=protocol,
- context=self.context, zmq_serializer=self.zmq_serializer, http_serializer=self.http_serializer,
- logger=self.logger)
- client._monitor_socket = client.socket.get_monitor_socket()
- self.poller.register(client._monitor_socket, zmq.POLLIN)
- self.pool[instance_name] = client
- # Both the client pool as well as the individual client get their serializers and client_types
- # This is required to implement pool level sending and receiving messages like polling of pool of sockets
- self.event_pool = AsyncioEventPool(len(server_instance_names))
- self.events_map = dict() # type: typing.Dict[bytes, asyncio.Event]
- self.message_map = dict()
- self.cancelled_messages = []
- self.poll_timeout = poll_timeout
- self.stop_poll = False
- self._deserialize_server_messages = deserialize_server_messages
-
- def create_new(self, server_instance_name : str, protocol : str = 'IPC') -> None:
- """
- Create new server with specified protocol. other arguments are taken from pool specifications.
-
- Parameters
- ----------
- instance_name: str
- instance name of server
- protocol: str
- protocol implemented by ZMQ server
- """
- if server_instance_name not in self.pool.keys():
- client = AsyncZMQClient(server_instance_name=server_instance_name,
- identity=self.identity, client_type=self.client_type, handshake=True, protocol=protocol,
- context=self.context, zmq_serializer=self.zmq_serializer, http_serializer=self.http_serializer,
- logger=self.logger)
- client._monitor_socket = client.socket.get_monitor_socket()
- self.poller.register(client._monitor_socket, zmq.POLLIN)
- self.pool[server_instance_name] = client
- else:
- raise ValueError(f"client for instance name '{server_instance_name}' already present in pool")
-
-
- @property
- def poll_timeout(self) -> int:
- """
- socket polling timeout in milliseconds greater than 0.
- """
- return self._poll_timeout
-
- @poll_timeout.setter
- def poll_timeout(self, value) -> None:
- if not isinstance(value, int) or value < 0:
- raise ValueError("polling period must be an integer greater than 0, not {}. Value is considered in milliseconds".format(value))
- self._poll_timeout = value
-
-
- async def poll(self) -> None:
- """
- Poll for replies from server. Since the client is message mapped, this method should be independently started
- in the event loop. Sending message and retrieving a message mapped is still carried out by other methods.
- """
- self.logger.info("client polling started for sockets for {}".format(list(self.pool.keys())))
- self.stop_poll = False
- event_loop = asyncio.get_event_loop()
- while not self.stop_poll:
- sockets = await self.poller.poll(self.poll_timeout) # type hints dont work in this line
- for socket, _ in sockets:
- while True:
- try:
- reply = self.parse_server_message(await socket.recv_multipart(zmq.NOBLOCK),
- deserialize=self._deserialize_server_messages)
- if not reply:
- continue
- except zmq.Again:
- # errors in handle_message should reach the client.
- break
- except ConnectionAbortedError:
- for client in self.pool.values():
- if client.socket.get_monitor_socket() == socket:
- self.poller.unregister(client.socket) # leave the monitor in the pool
- client.handshake(timeout=None)
- self.logger.error(f"{client.instance_name} disconnected." +
- " Unregistering from poller temporarily until server comes back.")
- break
- else:
- address, _, server_type, message_type, message_id, data, encoded_data = reply
- self.logger.debug(f"received reply from server '{address}' with message ID '{message_id}'")
- if message_id in self.cancelled_messages:
- self.cancelled_messages.remove(message_id)
- self.logger.debug(f"message_id '{message_id}' cancelled")
- continue
- event = self.events_map.get(message_id, None)
- if event:
- if len(encoded_data) > 0:
- self.message_map[message_id] = encoded_data
- else:
- self.message_map[message_id] = data
- event.set()
- else:
- if len(encoded_data) > 0:
- invalid_event_task = asyncio.create_task(self._resolve_reply(message_id, encoded_data))
- else:
- invalid_event_task = asyncio.create_task(self._resolve_reply(message_id, data))
- event_loop.call_soon(lambda: invalid_event_task)
-
-
- async def _resolve_reply(self, message_id : bytes, data : typing.Any) -> None:
- """
- This method is called when there is an asyncio Event not available for a message ID. This can happen only
- when the server replied before the client created a asyncio.Event object. check ``async_execute()`` for details.
-
- Parameters
- ----------
- message_id: bytes
- the message for which the event was not created
- data: bytes
- the data given by the server which needs to mapped to the message
- """
- max_number_of_retries = 100
- for i in range(max_number_of_retries):
- await asyncio.sleep(0.025)
- try:
- event = self.events_map[message_id]
- except KeyError:
- if message_id in self.cancelled_messages:
- # Only for safety, likely should never reach here
- self.cancelled_messages.remove(message_id)
- self.logger.debug(f'message_id {message_id} cancelled')
- return
- if i >= max_number_of_retries - 1:
- self.logger.error("unknown message id {} without corresponding event object".format(message_id))
- return
- else:
- self.message_map[message_id] = data
- event.set()
- break
-
- def assert_client_ready(self, client : AsyncZMQClient):
- if not client._handshake_event.is_set():
- raise ConnectionAbortedError(f"{client.instance_name} is currently not alive")
- if not client.socket in self.poller._map:
- raise ConnectionError("handshake complete, server is alive but client socket not yet ready to be polled." +
- "Application using MessageMappedClientPool should register the socket manually for polling." +
- "If using hololinked.server.HTTPServer, socket is waiting until HTTP Server updates its "
- "routing logic as the server has just now come alive, please try again soon.")
-
- async def async_send_instruction(self, instance_name : str, instruction : str,
- arguments : typing.Dict[str, typing.Any] = EMPTY_DICT, invokation_timeout : typing.Optional[float] = 3,
- execution_timeout : typing.Optional[float] = None,
- context : typing.Dict[str, typing.Any] = EMPTY_DICT, argument_schema : typing.Optional[JSON] = None) -> bytes:
- """
- Send instruction to server with instance name. Replies are automatically polled & to be retrieved using
- ``async_recv_reply()``
-
- Parameters
- ----------
- instance_name: str
- instance name of the server
- instruction: str
- unique str identifying a server side or ``Thing`` resource. These values corresponding
- to automatically extracted name from the object name or the URL_path prepended with the instance name.
- arguments: Dict[str, Any]
- if the instruction invokes a method, arguments of that method.
- context: Dict[str, Any]
- see execution context definitions
-
- Returns
- -------
- message_id: bytes
- created message ID
- """
- self.assert_client_ready(self.pool[instance_name])
- message_id = await self.pool[instance_name].async_send_instruction(instruction=instruction, arguments=arguments,
- invokation_timeout=invokation_timeout, execution_timeout=execution_timeout,
- context=context, argument_schema=argument_schema)
- event = self.event_pool.pop()
- self.events_map[message_id] = event
- return message_id
-
- async def async_recv_reply(self, instance_name : str, message_id : bytes, raise_client_side_exception = False,
- timeout : typing.Optional[float] = None) -> typing.Dict[str, typing.Any]:
- """
- Receive reply for specified message ID.
-
- Parameters
- ----------
- message_id: bytes
- the message id for which reply needs to eb fetched
- raise_client_side_exceptions: bool, default False
- raise exceptions from server on client side
- timeout: float,
- client side timeout, not the same as timeout passed to server, recommended to be None in general cases.
- Server side timeouts ensure start of execution of instructions within specified timeouts and
- drops execution altogether if timeout occured. Client side timeouts only wait for message to come within
- the timeout, but do not gaurantee non-execution.
-
- Returns
- -------
- reply: dict, Any
- dictionary when plain reply is False, any value returned from execution on the server side if plain reply is
- True.
-
- Raises
- ------
- ValueError:
- if supplied message id is not valid
- TimeoutError:
- if timeout is not None and reply did not arrive
- """
- try:
- event = self.events_map[message_id]
- except KeyError:
- raise ValueError(f"message id {message_id} unknown.") from None
- while True:
- try:
- await asyncio.wait_for(event.wait(), timeout if (timeout and timeout > 0) else 5)
- # default 5 seconds because we want to check if server is also dead
- if event.is_set():
- break
- self.assert_client_ready(self.pool[instance_name])
- except TimeoutError:
- if timeout is None:
- continue
- self.cancelled_messages.append(message_id)
- self.logger.debug(f'message_id {message_id} added to list of cancelled messages')
- raise TimeoutError(f"Execution not completed within {timeout} seconds") from None
- self.events_map.pop(message_id)
- self.event_pool.completed(event)
- reply = self.message_map.pop(message_id)
- if raise_client_side_exception and reply.get('exception', None) is not None:
- self.raise_local_exception(reply['exception'])
- return reply
-
- async def async_execute(self, instance_name : str, instruction : str, arguments : typing.Dict[str, typing.Any] = EMPTY_DICT,
- *, context : typing.Dict[str, typing.Any] = EMPTY_DICT, raise_client_side_exception = False,
- invokation_timeout : typing.Optional[float] = 5, execution_timeout : typing.Optional[float] = None,
- argument_schema : typing.Optional[JSON] = None) -> typing.Dict[str, typing.Any]:
- """
- sends message and receives reply.
-
- Parameters
- ----------
- instance_name: str
- instance name of the server
- instruction: str
- unique str identifying a server side or ``Thing`` resource. These values corresponding
- to automatically extracted name from the object name or the URL_path prepended with the instance name.
- arguments: Dict[str, Any]
- if the instruction invokes a method, arguments of that method.
- context: Dict[str, Any]
- see execution context definitions
- raise_client_side_exceptions: bool, default False
- raise exceptions from server on client side
- invokation_timeout: float, default 5
- server side timeout
- execution_timeout: float, default None
- client side timeout, not the same as timeout passed to server, recommended to be None in general cases.
- Server side timeouts ensure start of execution of instructions within specified timeouts and
- drops execution altogether if timeout occured. Client side timeouts only wait for message to come within
- the timeout, but do not gaurantee non-execution.
- """
- message_id = await self.async_send_instruction(instance_name=instance_name, instruction=instruction,
- arguments=arguments, invokation_timeout=invokation_timeout,
- execution_timeout=execution_timeout, context=context,
- argument_schema=argument_schema)
- return await self.async_recv_reply(instance_name=instance_name, message_id=message_id,
- raise_client_side_exception=raise_client_side_exception, timeout=None)
-
- def start_polling(self) -> None:
- """
- register the server message polling loop in the asyncio event loop.
- """
- event_loop = asyncio.get_event_loop()
- event_loop.call_soon(lambda: asyncio.create_task(self.poll()))
-
- def stop_polling(self):
- """
- stop polling for replies from server
- """
- self.stop_poll = True
-
- async def async_execute_in_all(self, instruction : str, instance_names : typing.Optional[typing.List[str]] = None,
- arguments : typing.Dict[str, typing.Any] = EMPTY_DICT, context : typing.Dict[str, typing.Any] = EMPTY_DICT,
- invokation_timeout : typing.Optional[float] = 5, execution_timeout : typing.Optional[float] = None,
- ) -> typing.Dict[str, typing.Any]:
- """
- execute a specified instruction in all Thing including eventloops
- """
- if not instance_names:
- instance_names = self.pool.keys()
- gathered_replies = await asyncio.gather(*[
- self.async_execute(instance_name, instruction, arguments,
- context=context, raise_client_side_exception=False,
- invokation_timeout=invokation_timeout, execution_timeout=execution_timeout)
- for instance_name in instance_names])
- replies = dict()
- for instance_name, reply in zip(instance_names, gathered_replies):
- replies[instance_name] = reply
- return replies
-
- async def async_execute_in_all_things(self, instruction : str, arguments : typing.Dict[str, typing.Any] = EMPTY_DICT,
- context : typing.Dict[str, typing.Any] = EMPTY_DICT,
- invokation_timeout : typing.Optional[float] = 5, execution_timeout : typing.Optional[float] = None,
- ) -> typing.Dict[str, typing.Any]: # raise_client_side_exception = False
- """
- execute the same instruction in all Things, eventloops are excluded.
- """
- return await self.async_execute_in_all(instruction=instruction,
- instance_names=[instance_name for instance_name, client in self.pool.items() if client.server_type == ServerTypes.THING],
- arguments=arguments, context=context, invokation_timeout=invokation_timeout,
- execution_timeout=execution_timeout)
-
- async def async_execute_in_all_eventloops(self, instruction : str, arguments : typing.Dict[str, typing.Any] = EMPTY_DICT,
- context : typing.Dict[str, typing.Any] = EMPTY_DICT,
- invokation_timeout : typing.Optional[float] = 5, execution_timeout : typing.Optional[float] = None,
- ) -> typing.Dict[str, typing.Any]: # raise_client_side_exception = False
- """
- execute the same instruction in all eventloops.
- """
- return await self.async_execute_in_all(instruction=instruction,
- instance_names=[instance_name for instance_name, client in self.pool.items() if client.server_type == ServerTypes.EVENTLOOP],
- arguments=arguments, context=context, invokation_timeout=invokation_timeout,
- execution_timeout=execution_timeout)
-
- async def ping_all_servers(self):
- """
- ping all servers connected to the client pool, calls ping() on Thing
- """
- return await self.async_execute_in_all(instruction=CommonRPC.PING)
-
- def __contains__(self, name : str) -> bool:
- return name in self.pool
-
- def __getitem__(self, key) ->AsyncZMQClient:
- return self.pool[key]
-
- def __iter__(self) -> typing.Iterator[AsyncZMQClient]:
- return iter(self.pool.values())
-
- def exit(self) -> None:
- BaseZMQ.exit(self)
- for client in self.pool.values():
- self.poller.unregister(client.socket)
- self.poller.unregister(client.socket.get_monitor_socket())
- client.exit()
- self.logger.info("all client socket unregistered from pool for '{}'".format(self.__class__))
- try:
- self.context.term()
- self.logger.info("context terminated for '{}'".format(self.__class__))
- except Exception as ex:
- self.logger.warning("could not properly terminate context or attempted to terminate an already terminated context" +
- "'{}'. Exception message : {}".format(self.identity, str(ex)))
-
-
-class AsyncioEventPool:
- """
- creates a pool of asyncio Events to be used as a synchronisation object for MessageMappedClientPool
-
- Parameters
- ----------
- initial_number_of_events: int
- initial pool size of events
- """
-
- def __init__(self, initial_number_of_events : int) -> None:
- self.pool = [asyncio.Event() for i in range(initial_number_of_events)]
- self.size = initial_number_of_events
-
- def pop(self) -> asyncio.Event:
- """
- pop an event, new one is created if nothing left in pool
- """
- try:
- event = self.pool.pop(0)
- except IndexError:
- self.size += 1
- event = asyncio.Event()
- event.clear()
- return event
-
- def completed(self, event : asyncio.Event) -> None:
- """
- put an event back into the pool
- """
- self.pool.append(event)
-
-
-
-class EventPublisher(BaseZMQServer, BaseSyncZMQ):
-
- def __init__(self, instance_name : str, protocol : str,
- context : typing.Union[zmq.Context, None] = None, **kwargs) -> None:
- super().__init__(instance_name=instance_name, server_type=ServerTypes.THING.value,
- **kwargs)
- self.create_socket(identity=f'{instance_name}/event-publisher', bind=True, context=context,
- protocol=protocol, socket_type=zmq.PUB, **kwargs)
- self.logger.info(f"created event publishing socket at {self.socket_address}")
- self.events = set() # type: typing.Set[EventDispatcher]
- self.event_ids = set() # type: typing.Set[bytes]
-
- def register(self, event : "EventDispatcher") -> None:
- """
- register event with a specific (unique) name
-
- Parameters
- ----------
- event: ``Event``
- ``Event`` object that needs to be registered. Events created at ``__init__()`` of Thing are
- automatically registered.
- """
- if event._unique_identifier in self.events and event not in self.events:
- raise AttributeError(f"event {event._name} already found in list of events, please use another name.")
- self.event_ids.add(event._unique_identifier)
- self.events.add(event)
-
- def unregister(self, event : "EventDispatcher") -> None:
- """
- unregister event with a specific (unique) name
-
- Parameters
- ----------
- event: ``Event``
- ``Event`` object that needs to be unregistered.
- """
- if event in self.events:
- self.events.remove(event)
- self.event_ids.remove(event._unique_identifier)
- else:
- warnings.warn(f"event {event._name} not found in list of events, please use another name.", UserWarning)
-
- def publish(self, unique_identifier : bytes, data : typing.Any, *, zmq_clients : bool = True,
- http_clients : bool = True, serialize : bool = True) -> None:
- """
- publish an event with given unique name.
-
- Parameters
- ----------
- unique_identifier: bytes
- unique identifier of the event
- data: Any
- payload of the event
- serialize: bool, default True
- serialize the payload before pushing, set to False when supplying raw bytes
- zmq_clients: bool, default True
- pushes event to RPC clients
- http_clients: bool, default True
- pushed event to HTTP clients
- """
- if unique_identifier in self.event_ids:
- if serialize:
- if isinstance(self.zmq_serializer , JSONSerializer):
- self.socket.send_multipart([unique_identifier, self.http_serializer.dumps(data)])
- return
- if zmq_clients:
- # TODO - event id should not any longer be unique
- self.socket.send_multipart([b'zmq-' + unique_identifier, self.zmq_serializer.dumps(data)])
- if http_clients:
- self.socket.send_multipart([unique_identifier, self.http_serializer.dumps(data)])
- elif not isinstance(self.zmq_serializer , JSONSerializer):
- if zmq_clients:
- self.socket.send_multipart([b'zmq-' + unique_identifier, data])
- if http_clients:
- self.socket.send_multipart([unique_identifier, data])
- else:
- self.socket.send_multipart([unique_identifier, data])
- else:
- raise AttributeError("event name {} not yet registered with socket {}".format(unique_identifier, self.socket_address))
-
- def exit(self):
- if not hasattr(self, 'logger'):
- self.logger = get_default_logger('{}|{}'.format(self.__class__.__name__, uuid4()))
- try:
- self.socket.close(0)
- self.logger.info("terminated event publishing socket with address '{}'".format(self.socket_address))
- except Exception as E:
- self.logger.warning("could not properly terminate context or attempted to terminate an already terminated context at address '{}'. Exception message : {}".format(
- self.socket_address, str(E)))
- try:
- self.context.term()
- self.logger.info("terminated context of event publishing socket with address '{}'".format(self.socket_address))
- except Exception as E:
- self.logger.warning("could not properly terminate socket or attempted to terminate an already terminated socket of event publishing socket at address '{}'. Exception message : {}".format(
- self.socket_address, str(E)))
-
-
-
-class BaseEventConsumer(BaseZMQClient):
- """
- Consumes events published at PUB sockets using SUB socket.
-
- Parameters
- ----------
- unique_identifier: str
- identifier of the event registered at the PUB socket
- socket_address: str
- socket address of the event publisher (``EventPublisher``)
- identity: str
- unique identity for the consumer
- client_type: bytes
- b'HTTP_SERVER' or b'PROXY'
- **kwargs:
- protocol: str
- TCP, IPC or INPROC
- http_serializer: JSONSerializer
- json serializer instance for HTTP_SERVER client type
- zmq_serializer: BaseSerializer
- serializer for RPC clients
- server_instance_name: str
- instance name of the Thing publishing the event
- """
-
- def __init__(self, unique_identifier : str, socket_address : str,
- identity : str, client_type = b'HTTP_SERVER',
- **kwargs) -> None:
- self._terminate_context : bool
- protocol = socket_address.split('://', 1)[0].upper()
- super().__init__(server_instance_name=kwargs.get('server_instance_name', None),
- client_type=client_type, **kwargs)
- self.create_socket(identity=identity, bind=False, context=self.context,
- socket_address=socket_address, socket_type=zmq.SUB, protocol=protocol, **kwargs)
- self.unique_identifier = bytes(unique_identifier, encoding='utf-8')
- self.socket.setsockopt(zmq.SUBSCRIBE, self.unique_identifier)
- # pair sockets cannot be polled unforunately, so we use router
- self.interruptor = self.context.socket(zmq.PAIR)
- self.interruptor.setsockopt_string(zmq.IDENTITY, f'interrupting-server')
- self.interruptor.bind(f'inproc://{self.identity}/interruption')
- self.interrupting_peer = self.context.socket(zmq.PAIR)
- self.interrupting_peer.setsockopt_string(zmq.IDENTITY, f'interrupting-client')
- self.interrupting_peer.connect(f'inproc://{self.identity}/interruption')
- self.poller.register(self.socket, zmq.POLLIN)
- self.poller.register(self.interruptor, zmq.POLLIN)
-
-
- def exit(self):
- if not hasattr(self, 'logger'):
- self.logger = get_default_logger('{}|{}'.format(self.__class__.__name__, uuid4()))
- try:
- self.poller.unregister(self.socket)
- self.poller.unregister(self.interruptor)
- except Exception as E:
- self.logger.warning("could not properly terminate socket or attempted to terminate an already terminated socket of event consuming socket at address '{}'. Exception message : {}".format(
- self.socket_address, str(E)))
-
- try:
- self.socket.close(0)
- self.interruptor.close(0)
- self.interrupting_peer.close(0)
- self.logger.info("terminated event consuming socket with address '{}'".format(self.socket_address))
- except:
- self.logger.warning("could not terminate sockets")
-
- try:
- if self._terminate_context:
- self.context.term()
- self.logger.info("terminated context of event consuming socket with address '{}'".format(self.socket_address))
- except Exception as E:
- self.logger.warning("could not properly terminate context or attempted to terminate an already terminated context at address '{}'. Exception message : {}".format(
- self.socket_address, str(E)))
-
-
-
-class AsyncEventConsumer(BaseEventConsumer):
- """
- Listens to events published at PUB sockets using SUB socket, use in async loops.
-
- Parameters
- ----------
- unique_identifier: str
- identifier of the event registered at the PUB socket
- socket_address: str
- socket address of the event publisher (``EventPublisher``)
- identity: str
- unique identity for the consumer
- client_type: bytes
- b'HTTP_SERVER' or b'PROXY'
- **kwargs:
- protocol: str
- TCP, IPC or INPROC
- http_serializer: JSONSerializer
- json serializer instance for HTTP_SERVER client type
- zmq_serializer: BaseSerializer
- serializer for RPC clients
- server_instance_name: str
- instance name of the Thing publishing the event
- """
-
- def __init__(self, unique_identifier : str, socket_address : str, identity : str, client_type = b'HTTP_SERVER',
- context : typing.Optional[zmq.asyncio.Context] = None, **kwargs) -> None:
- self._terminate_context = context == None
- self.context = context or zmq.asyncio.Context()
- self.poller = zmq.asyncio.Poller()
- super().__init__(unique_identifier=unique_identifier, socket_address=socket_address,
- identity=identity, client_type=client_type, **kwargs)
-
- async def receive(self, timeout : typing.Optional[float] = None, deserialize = True) -> typing.Union[bytes, typing.Any]:
- """
- receive event with given timeout
-
- Parameters
- ----------
- timeout: float, int, None
- timeout in milliseconds, None for blocking
- deserialize: bool, default True
- deseriliaze the data, use False for HTTP server sent event to simply bypass
- """
- contents = None
- sockets = await self.poller.poll(timeout)
- if len(sockets) > 1:
- if socket[0] == self.interrupting_peer:
- sockets = [socket[0]]
- elif sockets[1] == self.interrupting_peer:
- sockets = [socket[1]]
- for socket, _ in sockets:
- try:
- _, contents = await socket.recv_multipart(zmq.NOBLOCK)
- except zmq.Again:
- pass
- if not deserialize or not contents:
- return contents
- if self.client_type == HTTP_SERVER:
- return self.http_serializer.loads(contents)
- elif self.client_type == PROXY:
- return self.zmq_serializer.loads(contents)
- else:
- raise ValueError("invalid client type")
-
- async def interrupt(self):
- """
- interrupts the event consumer and returns a 'INTERRUPT' string from the receive() method,
- generally should be used for exiting this object
- """
- if self.client_type == HTTP_SERVER:
- message = [self.http_serializer.dumps(f'{self.identity}/interrupting-server'),
- self.http_serializer.dumps("INTERRUPT")]
- elif self.client_type == PROXY:
- message = [self.zmq_serializer.dumps(f'{self.identity}/interrupting-server'),
- self.zmq_serializer.dumps("INTERRUPT")]
- await self.interrupting_peer.send_multipart(message)
-
-
-class EventConsumer(BaseEventConsumer):
- """
- Listens to events published at PUB sockets using SUB socket, listen in blocking fashion or use in threads.
-
- Parameters
- ----------
- unique_identifier: str
- identifier of the event registered at the PUB socket
- socket_address: str
- socket address of the event publisher (``EventPublisher``)
- identity: str
- unique identity for the consumer
- client_type: bytes
- b'HTTP_SERVER' or b'PROXY'
- **kwargs:
- protocol: str
- TCP, IPC or INPROC
- http_serializer: JSONSerializer
- json serializer instance for HTTP_SERVER client type
- zmq_serializer: BaseSerializer
- serializer for RPC clients
- server_instance_name: str
- instance name of the Thing publishing the event
- """
-
- def __init__(self, unique_identifier : str, socket_address : str, identity : str, client_type = b'HTTP_SERVER',
- context : typing.Optional[zmq.Context] = None, **kwargs) -> None:
- self._terminate_context = context == None
- self.context = context or zmq.Context()
- self.poller = zmq.Poller()
- super().__init__(unique_identifier=unique_identifier, socket_address=socket_address,
- identity=identity, client_type=client_type, **kwargs)
-
- def receive(self, timeout : typing.Optional[float] = None, deserialize = True) -> typing.Union[bytes, typing.Any]:
- """
- receive event with given timeout
-
- Parameters
- ----------
- timeout: float, int, None
- timeout in milliseconds, None for blocking
- deserialize: bool, default True
- deseriliaze the data, use False for HTTP server sent event to simply bypass
- """
- contents = None
- sockets = self.poller.poll(timeout) # typing.List[typing.Tuple[zmq.Socket, int]]
- if len(sockets) > 1:
- if socket[0] == self.interrupting_peer:
- sockets = [socket[0]]
- elif sockets[1] == self.interrupting_peer:
- sockets = [socket[1]]
- for socket, _ in sockets:
- try:
- _, contents = socket.recv_multipart(zmq.NOBLOCK)
- except zmq.Again:
- pass
- if not deserialize:
- return contents
- if self.client_type == HTTP_SERVER:
- return self.http_serializer.loads(contents)
- elif self.client_type == PROXY:
- return self.zmq_serializer.loads(contents)
- else:
- raise ValueError("invalid client type for event")
-
- def interrupt(self):
- """
- interrupts the event consumer and returns a 'INTERRUPT' string from the receive() method,
- generally should be used for exiting this object
- """
- if self.client_type == HTTP_SERVER:
- message = [self.http_serializer.dumps(f'{self.identity}/interrupting-server'),
- self.http_serializer.dumps("INTERRUPT")]
- elif self.client_type == PROXY:
- message = [self.zmq_serializer.dumps(f'{self.identity}/interrupting-server'),
- self.zmq_serializer.dumps("INTERRUPT")]
- self.interrupting_peer.send_multipart(message)
-
-
-
-from .events import EventDispatcher
-
-
-__all__ = [
- AsyncZMQServer.__name__,
- AsyncPollingZMQServer.__name__,
- ZMQServerPool.__name__,
- RPCServer.__name__,
- SyncZMQClient.__name__,
- AsyncZMQClient.__name__,
- MessageMappedZMQClientPool.__name__,
- AsyncEventConsumer.__name__,
- EventConsumer.__name__
-]
\ No newline at end of file
diff --git a/hololinked/storage/__init__.py b/hololinked/storage/__init__.py
new file mode 100644
index 00000000..55c2a934
--- /dev/null
+++ b/hololinked/storage/__init__.py
@@ -0,0 +1,15 @@
+from .database import *
+from .database import prepare_object_database
+from .json_storage import ThingJsonStorage
+from ..utils import get_a_filename_from_instance
+
+
+
+def prepare_object_storage(instance, **kwargs):
+
+ if kwargs.get('use_json_file', instance.__class__.use_json_file if hasattr(instance.__class__, 'use_json_file') else False):
+ filename = kwargs.get('json_filename', f"{get_a_filename_from_instance(instance, extension='json')}")
+ instance.db_engine = ThingJsonStorage(filename=filename, instance=instance)
+ else:
+ prepare_object_database(instance, kwargs.get('use_default_db', False), kwargs.get('db_config_file', None))
+
\ No newline at end of file
diff --git a/hololinked/server/database.py b/hololinked/storage/database.py
similarity index 88%
rename from hololinked/server/database.py
rename to hololinked/storage/database.py
index ec540d83..478a9f60 100644
--- a/hololinked/server/database.py
+++ b/hololinked/storage/database.py
@@ -10,11 +10,11 @@
from dataclasses import dataclass
from ..param import Parameterized
-from .constants import JSONSerializable
-from .config import global_config
-from .utils import pep8_to_URL_path
-from .serializers import PythonBuiltinJSONSerializer as JSONSerializer, BaseSerializer
-from .property import Property
+from ..constants import JSONSerializable
+from ..config import global_config
+from ..utils import pep8_to_dashed_name
+from ..serializers.serializers import PythonBuiltinJSONSerializer as JSONSerializer, BaseSerializer
+from ..core.property import Property
@@ -29,7 +29,7 @@ class SerializedProperty(MappedAsDataclass, ThingTableBase):
"""
__tablename__ = "properties"
- instance_name : Mapped[str] = mapped_column(String)
+ id : Mapped[str] = mapped_column(String)
name : Mapped[str] = mapped_column(String, primary_key=True)
serialized_value : Mapped[bytes] = mapped_column(LargeBinary)
@@ -37,22 +37,22 @@ class SerializedProperty(MappedAsDataclass, ThingTableBase):
class ThingInformation(MappedAsDataclass, ThingTableBase):
__tablename__ = "things"
- instance_name : Mapped[str] = mapped_column(String, primary_key=True)
+ id : Mapped[str] = mapped_column(String, primary_key=True)
class_name : Mapped[str] = mapped_column(String)
script : Mapped[str] = mapped_column(String)
kwargs : Mapped[JSONSerializable] = mapped_column(JSON)
- eventloop_instance_name : Mapped[str] = mapped_column(String)
+ eventloop_id : Mapped[str] = mapped_column(String)
http_server : Mapped[str] = mapped_column(String)
level : Mapped[int] = mapped_column(Integer)
level_type : Mapped[str] = mapped_column(String) # starting local to computer or global to system?
def json(self):
return {
- "instance_name" : self.instance_name,
+ "id" : self.id,
"class_name" : self.class_name,
"script" : self.script,
"kwargs" : self.kwargs,
- "eventloop_instance_name" : self.eventloop_instance_name,
+ "eventloop_id" : self.eventloop_id,
"http_server" : self.http_server,
"level" : self.level,
"level_type" : self.level_type
@@ -64,7 +64,7 @@ class DeserializedProperty: # not part of database
"""
Property with deserialized value
"""
- instance_name : str
+ id : str
name : str
value : typing.Any
@@ -78,7 +78,7 @@ class BaseDB:
def __init__(self, instance : Parameterized, serializer : typing.Optional[BaseSerializer] = None,
config_file : typing.Union[str, None] = None) -> None:
self.thing_instance = instance
- self.instance_name = instance.instance_name
+ self.id = instance.id
self.serializer = serializer
self.URL = self.create_URL(config_file)
self._batch_call_context = {}
@@ -106,7 +106,7 @@ def create_URL(self, config_file : str) -> str:
folder = self.get_temp_dir_for_class_name(self.thing_instance.__class__.__name__)
if not os.path.exists(folder):
os.makedirs(folder)
- return BaseDB.create_sqlite_URL(**dict(file=f'{folder}{os.sep}{self.instance_name}.db'))
+ return BaseDB.create_sqlite_URL(**dict(file=f'{folder}{os.sep}{self.id}.db'))
conf = BaseDB.load_conf(config_file)
if conf.get('server', None):
return BaseDB.create_postgres_URL(conf=conf)
@@ -118,7 +118,7 @@ def get_temp_dir_for_class_name(self, class_name : str) -> str:
"""
get temporary directory for database files
"""
- return f"{global_config.TEMP_DIR}{os.sep}databases{os.sep}{pep8_to_URL_path(class_name)}"
+ return f"{global_config.TEMP_DIR}{os.sep}databases{os.sep}{pep8_to_dashed_name(class_name)}"
@classmethod
def create_postgres_URL(cls, conf : str = None, database : typing.Optional[str] = None,
@@ -219,8 +219,8 @@ class ThingDB(BaseSyncDB):
Parameters
----------
- instance_name: str
- ``instance_name`` of the ``Thing``
+ id: str
+ ``id`` of the ``Thing``
serializer: BaseSerializer
serializer used by the ``Thing``. The serializer to use for serializing and deserializing data (for example
property serializing into database for storage).
@@ -239,7 +239,7 @@ def fetch_own_info(self): # -> ThingInformation:
if not inspect_database(self.engine).has_table("things"):
return
with self.sync_session() as session:
- stmt = select(ThingInformation).filter_by(instance_name=self.instance_name)
+ stmt = select(ThingInformation).filter_by(id=self.id)
data = session.execute(stmt)
data = data.scalars().all()
if len(data) == 0:
@@ -268,7 +268,7 @@ def get_property(self, property : typing.Union[str, Property], deserialized : bo
"""
with self.sync_session() as session:
name = property if isinstance(property, str) else property.name
- stmt = select(SerializedProperty).filter_by(instance_name=self.instance_name, name=name)
+ stmt = select(SerializedProperty).filter_by(id=self.id, name=name)
data = session.execute(stmt)
prop = data.scalars().all() # type: typing.Sequence[SerializedProperty]
if len(prop) == 0:
@@ -296,7 +296,7 @@ def set_property(self, property : typing.Union[str, Property], value : typing.An
return
with self.sync_session() as session:
name = property if isinstance(property, str) else property.name
- stmt = select(SerializedProperty).filter_by(instance_name=self.instance_name,
+ stmt = select(SerializedProperty).filter_by(id=self.id,
name=name)
data = session.execute(stmt)
prop = data.scalars().all()
@@ -307,7 +307,7 @@ def set_property(self, property : typing.Union[str, Property], value : typing.An
prop.serialized_value = self.serializer.dumps(value)
else:
prop = SerializedProperty(
- instance_name=self.instance_name,
+ id=self.id,
name=name,
serialized_value=self.serializer.dumps(getattr(self.thing_instance, name))
)
@@ -334,7 +334,7 @@ def get_properties(self, properties : typing.Dict[typing.Union[str, Property], t
names = []
for obj in properties.keys():
names.append(obj if isinstance(obj, str) else obj.name)
- stmt = select(SerializedProperty).filter_by(instance_name=self.instance_name).filter(
+ stmt = select(SerializedProperty).filter_by(id=self.id).filter(
SerializedProperty.name.in_(names))
data = session.execute(stmt)
unserialized_props = data.scalars().all()
@@ -362,7 +362,7 @@ def set_properties(self, properties : typing.Dict[typing.Union[str, Property], t
names = []
for obj in properties.keys():
names.append(obj if isinstance(obj, str) else obj.name)
- stmt = select(SerializedProperty).filter_by(instance_name=self.instance_name).filter(
+ stmt = select(SerializedProperty).filter_by(id=self.id).filter(
SerializedProperty.name.in_(names))
data = session.execute(stmt)
db_props = data.scalars().all()
@@ -376,7 +376,7 @@ def set_properties(self, properties : typing.Dict[typing.Union[str, Property], t
db_prop.serialized_value = self.serializer.dumps(value)
else:
prop = SerializedProperty(
- instance_name=self.instance_name,
+ id=self.id,
name=name,
serialized_value=self.serializer.dumps(value)
)
@@ -394,7 +394,7 @@ def get_all_properties(self, deserialized : bool = True) -> typing.Dict[str, typ
deserilize the properties if True
"""
with self.sync_session() as session:
- stmt = select(SerializedProperty).filter_by(instance_name=self.instance_name)
+ stmt = select(SerializedProperty).filter_by(id=self.id)
data = session.execute(stmt)
existing_props = data.scalars().all() # type: typing.Sequence[SerializedProperty]
if not deserialized:
@@ -427,7 +427,7 @@ def create_missing_properties(self, properties : typing.Dict[str, Property],
for name, new_prop in properties.items():
if name not in existing_props:
prop = SerializedProperty(
- instance_name=self.instance_name,
+ id=self.id,
name=new_prop.name,
serialized_value=self.serializer.dumps(getattr(self.thing_instance,
new_prop.name))
@@ -463,7 +463,29 @@ def __exit__(self, exc_type, exc_value, exc_tb) -> None:
pass
-
+def prepare_object_database(instance, default_db : bool = False, config_file : str = None):
+ if not default_db and not config_file:
+ return
+ # 1. create engine
+ instance.db_engine = ThingDB(instance=instance, config_file=None if default_db else config_file,
+ serializer=instance.zmq_serializer)
+ # 2. create an object metadata to be used by different types of clients
+ object_info = instance.db_engine.fetch_own_info()
+ if object_info is not None:
+ instance._object_info = object_info
+ # 3. enter properties to DB if not already present
+ if instance.object_info.class_name != instance.__class__.__name__:
+ raise ValueError("Fetched instance name and class name from database not matching with the ",
+ "current Thing class/subclass. You might be reusing an instance name of another subclass ",
+ "and did not remove the old data from database. Please clean the database using database tools to ",
+ "start fresh.")
+ instance.load_properties_from_DB()
+
+
+
+
+
+
__all__ = [
BaseAsyncDB.__name__,
BaseSyncDB.__name__,
diff --git a/hololinked/server/json_storage.py b/hololinked/storage/json_storage.py
similarity index 97%
rename from hololinked/server/json_storage.py
rename to hololinked/storage/json_storage.py
index a554d4e5..84cb3e93 100644
--- a/hololinked/server/json_storage.py
+++ b/hololinked/storage/json_storage.py
@@ -1,8 +1,8 @@
import os
import threading
from typing import Any, Dict, List, Optional, Union
-from .serializers import JSONSerializer
-from .property import Property
+from ..serializers import JSONSerializer
+from ..core.property import Property
from ..param import Parameterized
@@ -21,10 +21,10 @@ class ThingJsonStorage:
serializer : JSONSerializer, optional
Serializer used for encoding and decoding JSON data. Defaults to an instance of ``JSONSerializer``.
"""
- def __init__(self, filename: str, instance: Parameterized, serializer: Optional[Any]=None):
+ def __init__(self, filename: str, instance: Parameterized, serializer: Optional[Any] = None):
self.filename = filename
self.thing_instance = instance
- self.instance_name = instance.instance_name
+ self.id = instance.id
self._serializer = serializer or JSONSerializer()
self._lock = threading.RLock()
self._data = self._load()
diff --git a/hololinked/td/__init__.py b/hololinked/td/__init__.py
new file mode 100644
index 00000000..3aad8505
--- /dev/null
+++ b/hololinked/td/__init__.py
@@ -0,0 +1,4 @@
+from .interaction_affordance import (InteractionAffordance, PropertyAffordance,
+ ActionAffordance, EventAffordance)
+from .tm import ThingModel
+from .td import ThingDescription
diff --git a/hololinked/td/base.py b/hololinked/td/base.py
new file mode 100644
index 00000000..6aeced92
--- /dev/null
+++ b/hololinked/td/base.py
@@ -0,0 +1,56 @@
+import inspect
+from typing import ClassVar, Optional
+import typing
+from pydantic import BaseModel
+
+
+class Schema(BaseModel):
+ """
+ Base dataclass for all WoT schema; Implements a custom asdict method which replaces dataclasses' asdict
+ utility function
+ """
+
+ skip_keys: ClassVar = [] # override this to skip some dataclass attributes in the schema
+
+ def json(self, indent: Optional[int] = None) -> dict[str, typing.Any]:
+ """Return the JSON representation of the schema"""
+ return self.model_dump(
+ mode="json",
+ by_alias=True,
+ exclude_unset=True,
+ exclude=[
+ "instance",
+ "skip_properties",
+ "skip_actions",
+ "skip_events",
+ "instance",
+ "ignore_errors",
+ "allow_loose_schema"
+ ]
+ )
+
+ @classmethod
+ def format_doc(cls, doc: str):
+ """strip tabs, newlines, whitespaces etc. to format the docstring nicely"""
+ # doc_as_list = doc.split('\n')
+ # final_doc = []
+ # for index, line in enumerate(doc_as_list):
+ # line = line.lstrip('\n').rstrip('\n')
+ # line = line.lstrip('\t').rstrip('\t')
+ # line = line.lstrip('\n').rstrip('\n')
+ # line = line.lstrip().rstrip()
+ # if index > 0:
+ # line = ' ' + line # add space to left in case of new line
+ # final_doc.append(line)
+ # final_doc = ''.join(final_doc)
+ doc = inspect.cleandoc(doc)
+ # Remove everything after "Parameters\n-----" if present (when using numpydoc)
+ marker = "Parameters\n-----"
+ idx = doc.find(marker)
+ if idx != -1:
+ doc = doc[:idx]
+ doc = doc.replace('\n', ' ')
+ doc = doc.replace('\t', ' ')
+ doc = doc.lstrip().rstrip()
+ return doc
+
diff --git a/hololinked/td/data_schema.py b/hololinked/td/data_schema.py
new file mode 100644
index 00000000..b1afe06e
--- /dev/null
+++ b/hololinked/td/data_schema.py
@@ -0,0 +1,446 @@
+from typing import Any, ClassVar, Optional
+from pydantic import BaseModel, Field, ConfigDict, RootModel
+
+from .base import Schema
+from .utils import get_summary
+from ..constants import JSON, JSONSerializable
+from ..schema_validators.json_schema import JSONSchema
+from ..core.properties import (String, Number, Integer, Boolean,
+ List, TypedList, Tuple, TupleSelector,
+ Selector, TypedDict, TypedKeyMappingsDict,
+ ClassSelector, Filename, Foldername, Path)
+from ..core import Property
+
+
+
+class DataSchema(Schema):
+ """
+ implements data schema
+
+ [Schema](https://www.w3.org/TR/wot-thing-description11/#sec-data-schema-vocabulary-definition)
+ [Supported Fields](https://www.w3.org/TR/wot-thing-description11/#data-schema-fields)
+ """
+ title: str = None
+ titles: Optional[dict[str, str]] = None
+ description: str = None
+ descriptions: Optional[dict[str, str]] = None
+ const: Optional[bool] = None
+ default: Optional[Any] = None
+ readOnly: Optional[bool] = None
+ writeOnly: Optional[bool] = None # write only can be considered as actions with no return value, so not used in this repository
+ format: Optional[str] = None
+ unit: Optional[str] = None
+ type: Optional[str] = None
+ oneOf: Optional[list[JSON]] = None
+
+ model_config = ConfigDict(extra="allow")
+ _custom_schema_generators: ClassVar = dict()
+
+ def __init__(self):
+ super().__init__()
+
+ def ds_build_fields_from_property(self, property: Property) -> None:
+ """populates schema information from descriptor object"""
+ assert isinstance(property, Property), f"only Property is a subclass of dataschema, given type: {type(property)}"
+ self.title = get_summary(property.doc)
+ if property.constant:
+ self.const = property.constant
+ if property.readonly:
+ self.readOnly = property.readonly
+ if property.default is not None:
+ self.default = property.default
+ if property.doc:
+ self.description = Schema.format_doc(property.doc)
+ if self.title == self.description:
+ del self.title
+ if property.label is not None:
+ self.title = property.label
+ if property.metadata and property.metadata.get("unit", None) is not None:
+ self.unit = property.metadata["unit"]
+ if property.allow_None:
+ if not hasattr(self, 'oneOf') or self.oneOf is None:
+ self.oneOf = []
+ if hasattr(self, 'type') and self.type is not None:
+ self._move_own_type_to_oneOf()
+ if not any(types["type"] in [None, "null"] for types in self.oneOf):
+ self.oneOf.append(dict(type="null"))
+
+ # & _ds prefix is used to avoid name conflicts with PropertyAffordance class
+ # you dont know what you are building, whether the data schema or something else when viewed from property affordance
+ def ds_build_from_property(self, property: Property) -> None:
+ """
+ generates the schema specific to the type,
+ calls `ds_build_fields_from_property()` after choosing the right type
+ """
+ assert isinstance(property, Property)
+
+ if not isinstance(property, Property):
+ raise TypeError(f"Property affordance schema can only be generated for Property. "
+ f"Given type {type(property)}")
+ if isinstance(property, (String, Filename, Foldername, Path)):
+ data_schema = StringSchema()
+ elif isinstance(property, (Number, Integer)):
+ data_schema = NumberSchema()
+ elif isinstance(property, Boolean):
+ data_schema = BooleanSchema()
+ elif isinstance(property, (List, TypedList, Tuple, TupleSelector)):
+ data_schema = ArraySchema()
+ elif isinstance(property, Selector):
+ data_schema = EnumSchema()
+ elif isinstance(property, (TypedDict, TypedKeyMappingsDict)):
+ data_schema = ObjectSchema()
+ elif isinstance(property, ClassSelector):
+ data_schema = OneOfSchema()
+ elif self._custom_schema_generators.get(property, NotImplemented) is not NotImplemented:
+ data_schema = self._custom_schema_generators[property]()
+ elif isinstance(property, Property) and property.model is not None:
+ from .pydantic_extensions import GenerateJsonSchemaWithoutDefaultTitles, type_to_dataschema
+ base_data_schema = DataSchema()
+ base_data_schema.ds_build_from_property(property=property)
+ if isinstance(property.model, dict):
+ given_data_schema = property.model
+ elif isinstance(property.model, (BaseModel, RootModel)):
+ given_data_schema = type_to_dataschema(property.model).model_dump(mode='json', exclude_none=True)
+
+ if base_data_schema.oneOf: # allow_None = True
+ base_data_schema.oneOf.append(given_data_schema)
+ else:
+ for key, value in given_data_schema.items():
+ setattr(base_data_schema, key, value)
+ data_schema = base_data_schema
+
+ else:
+ raise TypeError(f"WoT schema generator for this descriptor/property is not implemented. name {property.name} & type {type(property)}")
+
+ data_schema.ds_build_fields_from_property(property)
+ for field_name in data_schema.model_dump(exclude_unset=True).keys():
+ field_value = getattr(data_schema, field_name, NotImplemented)
+ if field_value is not NotImplemented:
+ setattr(self, field_name, field_value)
+
+
+ def _move_own_type_to_oneOf(self):
+ """move type to oneOf"""
+ raise NotImplementedError("Implement this method in subclass for each data type")
+
+ def _model_to_dataschema():
+
+ def type_to_dataschema(t: Union[type, BaseModel], **kwargs) -> DataSchema:
+ """Convert a Python type to a Thing Description DataSchema
+
+ This makes use of pydantic's `schema_of` function to create a
+ json schema, then applies some fixes to make a DataSchema
+ as per the Thing Description (because Thing Description is
+ almost but not quite compatible with JSONSchema).
+
+ Additional keyword arguments are added to the DataSchema,
+ and will override the fields generated from the type that
+ is passed in. Typically you'll want to use this for the
+ `title` field.
+ """
+ if isinstance(t, BaseModel):
+ json_schema = t.model_json_schema()
+ else:
+ json_schema = TypeAdapter(t).json_schema()
+ schema_dict = jsonschema_to_dataschema(json_schema)
+ # Definitions of referenced ($ref) schemas are put in a
+ # key called "definitions" or "$defs" by pydantic. We should delete this.
+ # TODO: find a cleaner way to do this
+ # This shouldn't be a severe problem: we will fail with a
+ # validation error if other junk is left in the schema.
+ for k in ["definitions", "$defs"]:
+ if k in schema_dict:
+ del schema_dict[k]
+ schema_dict.update(kwargs)
+ try:
+ return DataSchema(**schema_dict)
+ except ValidationError as ve:
+ print(
+ "Error while constructing DataSchema from the "
+ "following dictionary:\n"
+ + JSONSerializer().dumps(schema_dict, indent=2)
+ + "Before conversion, the JSONSchema was:\n"
+ + JSONSerializer().dumps(json_schema, indent=2)
+ )
+ raise ve
+
+
+class BooleanSchema(DataSchema):
+ """
+ boolean schema - https://www.w3.org/TR/wot-thing-description11/#booleanschema
+ used by Boolean descriptor
+ """
+ def __init__(self):
+ super().__init__()
+
+ def ds_build_fields_from_property(self, property) -> None:
+ """generates the schema"""
+ self.type = 'boolean'
+ super().ds_build_fields_from_property(property)
+
+ def _move_own_type_to_oneOf(self):
+ if not hasattr(self, 'type') or self.type is None:
+ return
+ if not hasattr(self, 'oneOf') or self.oneOf is None:
+ self.oneOf = []
+ self.oneOf.append(dict(type=self.type))
+ del self.type
+
+
+class StringSchema(DataSchema):
+ """
+ string schema - https://www.w3.org/TR/wot-thing-description11/#stringschema
+ used by String, Filename, Foldername, Path descriptors
+ """
+ pattern : Optional[str] = None
+ minLength: Optional[int] = None
+ maxLength: Optional[int] = None
+
+ def __init__(self):
+ super().__init__()
+
+ def ds_build_fields_from_property(self, property) -> None:
+ """generates the schema"""
+ self.type = 'string'
+ super().ds_build_fields_from_property(property)
+ if isinstance(property, String):
+ if property.regex is not None:
+ self.pattern = property.regex
+
+ def _move_own_type_to_oneOf(self):
+ if not hasattr(self, 'type') or self.type is None:
+ return
+ if not hasattr(self, 'oneOf') or self.oneOf is None:
+ self.oneOf = []
+ schema = dict(type=self.type)
+ del self.type
+ if self.pattern is not None:
+ schema['pattern'] = self.pattern
+ del self.pattern
+ self.oneOf.append(schema)
+
+
+class NumberSchema(DataSchema):
+ """
+ number schema - https://www.w3.org/TR/wot-thing-description11/#numberschema
+ used by String, Filename, Foldername, Path descriptors
+ """
+ minimum: Optional[int | float] = None
+ maximum: Optional[int | float] = None
+ exclusiveMinimum: Optional[int | float] = None
+ exclusiveMaximum: Optional[int | float] = None
+ multipleOf: Optional[int | float] = None
+
+ def __init__(self):
+ super().__init__()
+
+ def ds_build_fields_from_property(self, property) -> None:
+ """generates the schema"""
+ if isinstance(property, Integer):
+ self.type = 'integer'
+ elif isinstance(property, Number): # dont change order - one is subclass of other
+ self.type = 'number'
+ super().ds_build_fields_from_property(property)
+ if property.bounds is not None:
+ if isinstance(property.bounds[0], (int, float)): # i.e. value is not None which is allowed by param
+ if not property.inclusive_bounds[0]:
+ self.exclusiveMinimum = property.bounds[0]
+ else:
+ self.minimum = property.bounds[0]
+ if isinstance(property.bounds[1], (int, float)):
+ if not property.inclusive_bounds[1]:
+ self.exclusiveMaximum = property.bounds[1]
+ else:
+ self.maximum = property.bounds[1]
+ if property.step:
+ self.multipleOf = property.step
+
+ def _move_own_type_to_oneOf(self):
+ if not hasattr(self, 'type') or self.type is None:
+ return
+ if not hasattr(self, 'oneOf') or self.oneOf is None:
+ self.oneOf = []
+ schema = dict(type=self.type)
+ del self.type
+ for attr in ['minimum', 'maximum', 'exclusiveMinimum', 'exclusiveMaximum', 'multipleOf']:
+ if hasattr(self, attr) and getattr(self, attr) is not None:
+ schema[attr] = getattr(self, attr)
+ delattr(self, attr)
+ self.oneOf.append(schema)
+
+
+class ArraySchema(DataSchema):
+ """
+ array schema - https://www.w3.org/TR/wot-thing-description11/#arrayschema
+ Used by list, Tuple, TypedList and TupleSelector
+ """
+
+ items: Optional[DataSchema | list[DataSchema] | JSON | JSONSerializable] = None
+ maxItems: Optional[int] = Field(None, ge=0)
+ minItems: Optional[int] = Field(None, ge=0)
+
+ def __init__(self):
+ super().__init__()
+
+ def ds_build_fields_from_property(self, property) -> None:
+ """generates the schema"""
+ self.type = 'array'
+ super().ds_build_fields_from_property(property)
+ self.items = []
+ if isinstance(property, (List, Tuple, TypedList)) and property.item_type is not None:
+ if property.bounds:
+ if property.bounds[0]:
+ self.minItems = property.bounds[0]
+ if property.bounds[1]:
+ self.maxItems = property.bounds[1]
+ if isinstance(property.item_type, (list, tuple)):
+ for typ in property.item_type:
+ self.items.append(dict(type=JSONSchema.get_type(typ)))
+ elif property.item_type is not None:
+ self.items.append(dict(type=JSONSchema.get_type(property.item_type)))
+ elif isinstance(property, TupleSelector):
+ objects = list(property.objects)
+ for obj in objects:
+ if any(types["type"] == JSONSchema._replacements.get(type(obj), None) for types in self.items):
+ continue
+ self.items.append(dict(type=JSONSchema.get_type(type(obj))))
+ if len(self.items) == 0:
+ del self.items
+ elif len(self.items) > 1:
+ self.items = dict(oneOf=self.items)
+
+ def _move_own_type_to_oneOf(self):
+ if not hasattr(self, 'type') or self.type is None:
+ return
+ if not hasattr(self, 'oneOf') or self.oneOf is None:
+ self.oneOf = []
+ schema = dict(type=self.type)
+ del self.type
+ for attr in ['items', 'maxItems', 'minItems']:
+ if hasattr(self, attr) and getattr(self, attr) is not None:
+ schema[attr] = getattr(self, attr)
+ delattr(self, attr)
+ self.oneOf.append(schema)
+
+
+class ObjectSchema(DataSchema):
+ """
+ object schema - https://www.w3.org/TR/wot-thing-description11/#objectschema
+ Used by TypedDict
+ """
+ properties: Optional[JSON] = None
+ required: Optional[list[str]] = None
+
+ def __init__(self):
+ super().__init__()
+
+ def ds_build_fields_from_property(self, property) -> None:
+ """generates the schema"""
+ super().ds_build_fields_from_property(property)
+ properties = None
+ required = None
+ if hasattr(property, 'json_schema'):
+ # Code will not reach here for now as have not implemented schema for typed dictionaries.
+ properties = property.json_schema["properties"]
+ if property.json_schema.get("required", NotImplemented) is not NotImplemented:
+ required = property.json_schema["required"]
+ if not property.allow_None:
+ self.type = 'object'
+ if properties:
+ self.properties = properties
+ if required:
+ self.required = required
+ else:
+ schema = dict(type='object')
+ if properties:
+ schema['properties'] = properties
+ if required:
+ schema['required'] = required
+ self.oneOf.append(schema)
+
+
+class OneOfSchema(DataSchema):
+ """
+ custom schema to deal with ClassSelector to fill oneOf field correctly
+ https://www.w3.org/TR/wot-thing-description11/#dataschema
+ """
+ properties: Optional[JSON] = None
+ required: Optional[list[str]] = None
+ items: Optional[JSON | JSONSerializable] = None
+ minItems: Optional[int] = None
+ maxItems: Optional[int] = None
+ # ClassSelector can technically have a JSON serializable as a class_
+
+ def __init__(self):
+ super().__init__()
+
+ def ds_build_fields_from_property(self, property) -> None:
+ """generates the schema"""
+ self.oneOf = []
+ if isinstance(property, ClassSelector):
+ if not property.isinstance:
+ raise NotImplementedError("WoT TD for ClassSelector with isinstance set to True is not supported yet. " +
+ "Consider user this property in a different way.")
+ if isinstance(property.class_, (list, tuple)):
+ objects = list(property.class_)
+ else:
+ objects = [property.class_]
+ elif isinstance(property, Selector):
+ objects = list(property.objects)
+ else:
+ raise TypeError(f"EnumSchema and OneOfSchema supported only for Selector and ClassSelector. Given Type - {property}")
+ for obj in objects:
+ if any(types["type"] == JSONSchema._replacements.get(type(obj), None) for types in self.oneOf):
+ continue
+ if isinstance(property, ClassSelector):
+ if not JSONSchema.is_allowed_type(obj):
+ raise TypeError(f"Object for wot-td has invalid type for JSON conversion. Given type - {obj}. " +
+ "Use JSONSchema.register_replacements on hololinked.wot.td.JSONSchema object to recognise the type.")
+ subschema = dict(type=JSONSchema.get_type(obj))
+ if JSONSchema.is_supported(obj):
+ subschema.update(JSONSchema.get(obj))
+ self.oneOf.append(subschema)
+ elif isinstance(property, Selector):
+ if JSONSchema.get_type(type(obj)) == "null":
+ continue
+ self.oneOf.append(dict(type=JSONSchema.get_type(type(obj))))
+ super().ds_build_fields_from_property(property)
+ self.cleanup()
+
+ def cleanup(self):
+ if len(self.oneOf) == 1:
+ oneOf = self.oneOf[0]
+ self.type = oneOf["type"]
+ if oneOf["type"] == 'object':
+ if oneOf.get("properties", NotImplemented) is not NotImplemented:
+ self.properties = oneOf["properties"]
+ if oneOf.get("required", NotImplemented) is not NotImplemented:
+ self.required = oneOf["required"]
+ elif oneOf["type"] == 'array':
+ if oneOf.get("items", NotImplemented) is not NotImplemented:
+ self.items = oneOf["items"]
+ if oneOf.get("maxItems", NotImplemented) is not NotImplemented:
+ self.minItems = oneOf["minItems"]
+ if oneOf.get("maxItems", NotImplemented) is not NotImplemented:
+ self.maxItems = oneOf["maxItems"]
+ del self.oneOf
+
+
+class EnumSchema(OneOfSchema):
+ """
+ custom schema to fill enum field of property affordance correctly
+ https://www.w3.org/TR/wot-thing-description11/#dataschema
+ """
+ enum: Optional[list[Any]] = None
+
+ def __init__(self):
+ super().__init__()
+
+ def ds_build_fields_from_property(self, property) -> None:
+ """generates the schema"""
+ assert isinstance(property, Selector), f"EnumSchema compatible property is only Selector, not {property.__class__}"
+ self.enum = list(property.objects)
+ super().ds_build_fields_from_property(property)
+
+
+
diff --git a/hololinked/td/forms.py b/hololinked/td/forms.py
new file mode 100644
index 00000000..de02bfd0
--- /dev/null
+++ b/hololinked/td/forms.py
@@ -0,0 +1,49 @@
+import typing
+from pydantic import Field
+from .base import Schema
+from ..constants import JSON
+
+
+
+class ExpectedResponse(Schema):
+ """
+ Form property.
+ schema - https://www.w3.org/TR/wot-thing-description11/#expectedresponse
+ """
+ contentType : str
+
+ def __init__(self):
+ super().__init__()
+
+
+class AdditionalExpectedResponse(Schema):
+ """
+ Form field for additional responses which are different from the usual response.
+ schema - https://www.w3.org/TR/wot-thing-description11/#additionalexpectedresponse
+ """
+ success: bool = Field(default=False)
+ contentType: str = Field(default='application/json')
+ response_schema: typing.Optional[JSON] = Field(default='exception', alias='schema')
+
+ def __init__(self):
+ super().__init__()
+
+
+class Form(Schema):
+ """
+ Form hypermedia.
+ schema - https://www.w3.org/TR/wot-thing-description11/#form
+ """
+ href : str
+ op : str
+ htv_methodName : str
+ contentType : typing.Optional[str]
+ additionalResponses : typing.Optional[typing.List[AdditionalExpectedResponse]]
+ contentEncoding : typing.Optional[str]
+ security : typing.Optional[str]
+ scopes : typing.Optional[str]
+ response : typing.Optional[ExpectedResponse]
+ subprotocol : typing.Optional[str]
+
+ def __init__(self):
+ super().__init__()
\ No newline at end of file
diff --git a/hololinked/td/interaction_affordance.py b/hololinked/td/interaction_affordance.py
new file mode 100644
index 00000000..5bb84fc2
--- /dev/null
+++ b/hololinked/td/interaction_affordance.py
@@ -0,0 +1,455 @@
+from enum import Enum
+import typing
+from typing import ClassVar, Optional
+from pydantic import ConfigDict
+
+from .base import Schema
+from .data_schema import DataSchema
+from .forms import Form
+from .utils import get_summary
+from ..constants import JSON, ResourceTypes
+from ..core.property import Property
+from ..core.actions import Action
+from ..core.events import Event
+from ..core.thing import Thing, ThingMeta
+
+
+
+class InteractionAffordance(Schema):
+ """
+ Implements schema information common to all interaction affordances.
+
+ [Specification Definitions](https://www.w3.org/TR/wot-thing-description11/#interactionaffordance)
+ [UML Diagram](https://docs.hololinked.dev/UML/PDF/InteractionAffordance.pdf)
+ [Supported Fields]()
+ """
+ title: Optional[str] = None
+ titles: Optional[typing.Dict[str, str]] = None
+ description: Optional[str] = None
+ descriptions: Optional[typing.Dict[str, str]] = None
+ forms: Optional[typing.List[Form]] = None
+ # uri variables
+
+ _custom_schema_generators: ClassVar = dict()
+ model_config = ConfigDict(extra="allow")
+
+ def __init__(self):
+ super().__init__()
+ self._name = None
+ self._objekt = None
+ self._thing_id = None
+ self._thing_cls = None
+ self._owner = None
+
+ @property
+ def what(self) -> Enum:
+ """Whether it is a property, action or event"""
+ raise NotImplementedError("Unknown interaction affordance - implement in subclass of InteractionAffordance")
+
+ @property
+ def owner(self) -> Thing:
+ """Owning `Thing` instance of the interaction affordance"""
+ return self._owner
+
+ @owner.setter
+ def owner(self, value):
+ if self._owner is not None:
+ raise ValueError(f"owner is already set for this {self.what.name.lower()} affordance, " +
+ "recreate the affordance to change owner")
+ if not isinstance(value, (Thing, ThingMeta)):
+ raise TypeError(f"owner must be instance of Thing, given type {type(value)}")
+ self._owner = value
+ if isinstance(value, Thing):
+ self._thing_cls = value.__class__
+ self._thing_id = value.id
+
+ @property
+ def objekt(self) -> Property | Action | Event:
+ """Object instance of the interaction affordance - `Property`, `Action` or `Event`"""
+ return self._objekt
+
+ @objekt.setter
+ def objekt(self, value: Property | Action | Event) -> None:
+ """Set the object instance of the interaction affordance - `Property`, `Action` or `Event`"""
+ if self._objekt is not None:
+ raise ValueError(f"object is already set for this {self.what.name.lower()} affordance, " +
+ "recreate the affordance to change objekt")
+ if not (
+ (self.__class__.__name__.startswith("Property") and isinstance(value, Property)) or
+ (self.__class__.__name__.startswith("Action") and isinstance(value, Action)) or
+ (self.__class__.__name__.startswith("Event") and isinstance(value, Event))
+ ):
+ if not isinstance(value, (Property, Action, Event)):
+ raise TypeError(f"objekt must be instance of Property, Action or Event, given type {type(value)}")
+ raise ValueError(f"provide only corresponding object for {self.__class__.__name__}, " +
+ f"given object {value.__class__.__name__}")
+ self._objekt = value
+ self._name = value.name
+
+ @property
+ def name(self) -> str:
+ """Name of the interaction affordance used as key in the TD"""
+ return self._name
+
+ @property
+ def thing_id(self) -> str:
+ """ID of the `Thing` instance owning the interaction affordance"""
+ return self._thing_id
+
+ @property
+ def thing_cls(self) -> ThingMeta:
+ """`Thing` class owning the interaction affordance"""
+ return self._thing_cls
+
+ def build(self, interaction: Property | Action | Event, owner: Thing) -> None:
+ """
+ populate the fields of the schema for the specific interaction affordance
+
+ Parameters
+ ----------
+ interaction: Property | Action | Event
+ interaction object for which the schema is to be built
+ owner: Thing
+ owner of the interaction affordance
+ """
+ raise NotImplementedError("build must be implemented in subclass of InteractionAffordance")
+
+ def build_forms(self, protocol: str, authority: str) -> None:
+ """
+ build the forms for the specific protocol for each szupported operation
+
+ Parameters
+ ----------
+ protocol: str
+ protocol used for the interaction
+ authority: str
+ authority of the interaction
+ """
+ raise NotImplementedError("build_forms must be implemented in subclass of InteractionAffordance")
+
+ def retrieve_form(self, op: str, default: typing.Any = None) -> JSON:
+ """
+ retrieve form for a certain operation, return default if not found
+
+ Parameters
+ ----------
+ op: str
+ operation for which the form is to be retrieved
+ default: typing.Any, optional
+ default value to return if form is not found, by default None.
+ One can make use of a sensible default value for one's logic.
+
+ Returns
+ -------
+ Dict[str, typing.Any]
+ JSON representation of the form
+ """
+ if self.forms is None:
+ return default
+ for form in self.forms:
+ if form.op == op:
+ return form
+ return default
+
+ @classmethod
+ def generate(cls,
+ interaction: Property | Action | Event,
+ owner: Thing
+ ) -> typing.Union["PropertyAffordance", "ActionAffordance", "EventAffordance"]:
+ """
+ build the schema for the specific interaction affordance within the container object.
+ Use the `json()` method to get the JSON representation of the schema.
+
+ Note that this method is different from build() method as its supposed to be used as a classmethod.
+ Internally calls build(), however some additional steps are included.
+
+ Parameters
+ ----------
+ interaction: Property | Action | Event
+ interaction object for which the schema is to be built
+ owner: Thing
+ owner of the interaction affordance
+
+ Returns
+ -------
+ typing.Union[PropertyAffordance, ActionAffordance, EventAffordance]
+ """
+ raise NotImplementedError("generate_schema must be implemented in subclass of InteractionAffordance")
+
+ @classmethod
+ def from_TD(cls, name: str, TD: JSON) -> typing.Union["PropertyAffordance", "ActionAffordance", "EventAffordance"]:
+ """
+ populate the schema from the TD and return it as the container object
+
+ Parameters
+ ----------
+ name: str
+ name of the interaction affordance used as key in the TD
+ TD: JSON
+ Thing Description JSON dictionary (the entire one, not just the fragment of the affordance)
+
+ Returns
+ -------
+ typing.Union[PropertyAffordance, ActionAffordance, EventAffordance]
+ """
+ if cls == PropertyAffordance:
+ affordance_name = "properties"
+ elif cls == ActionAffordance:
+ affordance_name = "actions"
+ elif cls == EventAffordance:
+ affordance_name = "events"
+ else:
+ raise ValueError(f"unknown affordance type - {cls}, cannot create object from TD")
+ affordance_json = TD[affordance_name][name] # type: typing.Dict[str, JSON]
+ affordance = cls()
+ for field in cls.model_fields:
+ if field in affordance_json:
+ setattr(affordance, field, affordance_json[field])
+ affordance._name = name
+ affordance._thing_id = TD["id"]
+ return affordance
+
+ @classmethod
+ def register_descriptor(cls, descriptor: Property | Action | Event, schema_generator: "InteractionAffordance") -> None:
+ """register a custom schema generator for a descriptor"""
+ if not isinstance(descriptor, (Property, Action, Event)):
+ raise TypeError("custom schema generator can also be registered for Property." +
+ f" Given type {type(descriptor)}")
+ if not isinstance(schema_generator, InteractionAffordance):
+ raise TypeError("schema generator for Property must be subclass of PropertyAfforance. " +
+ f"Given type {type(schema_generator)}" )
+ InteractionAffordance._custom_schema_generators[descriptor] = schema_generator
+
+ def __hash__(self):
+ return hash(self.thing_id + "" if not self.thing_cls else self.thing_cls.__name__ + self.name)
+
+ def __str__(self):
+ if self.thing_cls:
+ return f"{self.__class__.__name__}({self.thing_cls.__name__}({self.thing_id}).{self.name})"
+ return f"{self.__class__.__name__}({self.name} of {self.thing_id})"
+
+ def __eq__(self, value):
+ if not isinstance(value, self.__class__):
+ return False
+ if self.thing_id is None or value.thing_id is None:
+ if self.owner is None or value.owner is None:
+ return False
+ if self.owner == value.owner and self.name == value.name:
+ return True
+ return False
+ return self.thing_id == value.thing_id and self.name == value.name
+
+
+
+class PropertyAffordance(InteractionAffordance, DataSchema):
+ """
+ Implements property affordance schema from `Property` descriptor object.
+
+ [Schema](https://www.w3.org/TR/wot-thing-description11/#propertyaffordance)
+ [UML Diagram](https://docs.hololinked.dev/UML/PDF/InteractionAffordance.pdf)
+ """
+ # [Supported Fields]()
+ observable: Optional[bool] = None
+
+ def __init__(self):
+ super().__init__()
+
+ @property
+ def what(self) -> Enum:
+ return ResourceTypes.PROPERTY
+
+ def build(self) -> None:
+ property = self.objekt
+ self.ds_build_from_property(property)
+ if property.observable: # does not disturb the default value of None, which will omit the field in the JSON
+ self.observable = property.observable
+
+ # def build_forms(self, authority: str) -> None:
+ # property = self.objekt
+ # self.forms = []
+ # for index, method in enumerate(property._remote_info.http_method):
+ # form = Form()
+ # # index is the order for http methods for (get, set, delete), generally (GET, PUT, DELETE)
+ # if (index == 1 and property.readonly) or index >= 2:
+ # continue # delete property is not a part of WoT, we also mostly never use it, so ignore.
+ # elif index == 0:
+ # form.op = 'readproperty'
+ # elif index == 1:
+ # form.op = 'writeproperty'
+ # form.href = f"{authority}{self.owner._qualified_id}{property._remote_info.URL_path}"
+ # form.htv_methodName = method.upper()
+ # form.contentType = "application/json"
+ # self.forms.append(form.asdict())
+
+ # if property._observable:
+ # self.observable = property._observable
+ # form = Form()
+ # form.op = 'observeproperty'
+ # form.href = f"{authority}{owner._full_URL_path_prefix}{property._observable_event_descriptor.URL_path}"
+ # form.htv_methodName = "GET"
+ # form.subprotocol = "sse"
+ # form.contentType = "text/plain"
+ # self.forms.append(form.asdict())
+
+ @classmethod
+ def generate(cls, property, owner = None):
+ assert isinstance(property, Property), f"property must be instance of Property, given type {type(property)}"
+ affordance = PropertyAffordance()
+ affordance.owner = owner
+ affordance.objekt = property
+ affordance.build()
+ return affordance
+
+
+
+class ActionAffordance(InteractionAffordance):
+ """
+ creates action affordance schema from actions (or methods).
+
+ [Schema](https://www.w3.org/TR/wot-thing-description11/#actionaffordance)
+ [UML Diagram](https://docs.hololinked.dev/UML/PDF/InteractionAffordance.pdf)
+ """
+ # [Supported Fields]()
+ input: JSON = None
+ output: JSON = None
+ safe: bool = None
+ idempotent: bool = None
+ synchronous: bool = None
+
+ def __init__(self):
+ super().__init__()
+
+ @property
+ def what(self):
+ return ResourceTypes.ACTION
+
+ def build(self) -> None:
+ action = self.objekt
+ assert isinstance(action, Action) # type definition
+ if action.obj.__doc__:
+ title = get_summary(action.obj.__doc__)
+ description = self.format_doc(action.obj.__doc__)
+ if title == description:
+ self.description = description
+ else:
+ self.title = title
+ self.description = description
+ if action.execution_info.argument_schema:
+ self.input = action.execution_info.argument_schema
+ if action.execution_info.return_value_schema:
+ self.output = action.execution_info.return_value_schema
+ if (
+ not (hasattr(self.owner, 'state_machine') and self.owner.state_machine is not None and
+ self.owner.state_machine.contains_object(action)) and
+ action.execution_info.idempotent
+ ):
+ self.idempotent = action.execution_info.idempotent
+ if action.execution_info.synchronous:
+ self.synchronous = action.execution_info.synchronous
+ if action.execution_info.safe:
+ self.safe = action.execution_info.safe
+
+ # def build_forms(self, protocol: str, authority : str, **protocol_metadata) -> None:
+ # self.forms = []
+ # for method in action.execution_info_validator.http_method:
+ # form = Form()
+ # form.op = 'invokeaction'
+ # form.href = f'{authority}/{self.owner.id}/{protocol_metadata.get("path", "")}/{action.name}'
+ # form.htv_methodName = method.upper()
+ # form.contentType = 'application/json'
+ # # form.additionalResponses = [AdditionalExpectedResponse().asdict()]
+ # self.forms.append(form.asdict())
+
+ @classmethod
+ def generate(cls, action: Action, owner, **kwargs) -> "ActionAffordance":
+ affordance = ActionAffordance()
+ affordance.owner = owner
+ affordance.objekt = action
+ affordance.build()
+ return affordance
+
+
+
+class EventAffordance(InteractionAffordance):
+ """
+ creates event affordance schema from events.
+
+ [Schema](https://www.w3.org/TR/wot-thing-description11/#eventaffordance)
+ [UML Diagram](https://docs.hololinked.dev/UML/PDF/InteractionAffordance.pdf)
+ """
+ # [Supported Fields]()
+ subscription: str = None
+ data: JSON = None
+
+ def __init__(self):
+ super().__init__()
+
+ @property
+ def what(self):
+ return ResourceTypes.EVENT
+
+ def build(self) -> None:
+ event = self.objekt
+ assert isinstance(event, Event) # type definition
+ if event.__doc__:
+ title = get_summary(event.doc)
+ description = self.format_doc(event.doc)
+ if title == description:
+ self.description = description
+ else:
+ self.title = title
+ self.description = description
+ if event.schema:
+ self.data = event.schema
+
+
+ # def build_forms(self, protocol, authority):
+ # form = Form()
+ # form.op = "subscribeevent"
+ # form.href = f"{authority}{owner._full_URL_path_prefix}{event.URL_path}"
+ # form.htv_methodName = "GET"
+ # form.contentType = "text/plain"
+ # form.subprotocol = "sse"
+ # self.forms = [form.asdict()]
+
+ @classmethod
+ def generate(cls, event: Event, owner, **kwargs) -> "EventAffordance":
+ affordance = EventAffordance()
+ affordance.owner = owner
+ affordance.objekt = event
+ affordance.build()
+ return affordance
+
+
+# @dataclass(**__dataclass_kwargs)
+# class ZMQEvent(ZMQResource):
+# """
+# event name and socket address of events to be consumed by clients.
+
+# Attributes
+# ----------
+# name : str
+# name of the event, must be unique
+# obj_name: str
+# name of the event variable used to populate the ZMQ client
+# socket_address : str
+# address of the socket
+# unique_identifier: str
+# unique ZMQ identifier used in PUB-SUB model
+# what: str, default EVENT
+# is it a property, method/action or event?
+# """
+# friendly_name : str = field(default=UNSPECIFIED)
+# unique_identifier : str = field(default=UNSPECIFIED)
+# serialization_specific : bool = field(default=False)
+# socket_address : str = field(default=UNSPECIFIED)
+
+# def __init__(self, *, what : str, class_name : str, id : str, obj_name : str,
+# friendly_name : str, qualname : str, unique_identifier : str,
+# serialization_specific : bool = False, socket_address : str, doc : str) -> None:
+# super(ZMQEvent, self).__init__(what=what, class_name=class_name, id=id, obj_name=obj_name,
+# qualname=qualname, doc=doc, request_as_argument=False)
+# self.friendly_name = friendly_name
+# self.unique_identifier = unique_identifier
+# self.serialization_specific = serialization_specific
+# self.socket_address = socket_address
\ No newline at end of file
diff --git a/hololinked/td/metadata.py b/hololinked/td/metadata.py
new file mode 100644
index 00000000..429aa634
--- /dev/null
+++ b/hololinked/td/metadata.py
@@ -0,0 +1,20 @@
+import typing
+from pydantic import Field
+
+from .base import Schema
+
+
+class Link(Schema):
+ href : str
+ anchor: typing.Optional[str]
+ rel: typing.Optional[str]
+ type: typing.Optional[str] = Field(default='application/json')
+
+
+class VersionInfo(Schema):
+ """
+ create version info.
+ schema - https://www.w3.org/TR/wot-thing-description11/#versioninfo
+ """
+ instance: str
+ model: str
\ No newline at end of file
diff --git a/hololinked/server/td_pydantic_extensions.py b/hololinked/td/pydantic_extensions.py
similarity index 69%
rename from hololinked/server/td_pydantic_extensions.py
rename to hololinked/td/pydantic_extensions.py
index 5bc32951..d50a9411 100644
--- a/hololinked/server/td_pydantic_extensions.py
+++ b/hololinked/td/pydantic_extensions.py
@@ -5,7 +5,7 @@
from pydantic.json_schema import GenerateJsonSchema
from pydantic._internal._core_utils import is_core_schema, CoreSchemaOrField
from typing import Optional, Sequence, Union, Any, Mapping, List, Dict
-from .serializers import JSONSerializer
+from ..serializers.serializers import JSONSerializer
JSONSchema = dict[str, Any] # A type to represent JSONSchema
@@ -29,47 +29,6 @@ class Type(Enum):
null = "null"
-class DataSchema(BaseModel):
-
- field_type: Optional[TypeDeclaration] = Field(None, alias="@type")
- description: Optional[Description] = None
- title: Optional[Title] = None
- descriptions: Optional[Descriptions] = None
- titles: Optional[Titles] = None
- writeOnly: Optional[bool] = None
- readOnly: Optional[bool] = None
- oneOf: Optional[list[DataSchema]] = None
- unit: Optional[str] = None
- enum: Optional[list] = None
- # enum was `Field(None, min_length=1, unique_items=True)` but this failed with
- # generic models
- format: Optional[str] = None
- const: Optional[Any] = None
- default: Optional[Any] = None
- type: Optional[Type] = None
- # The fields below should be empty unless type==Type.array
- items: Optional[Union[DataSchema, List[DataSchema]]] = None
- maxItems: Optional[int] = Field(None, ge=0)
- minItems: Optional[int] = Field(None, ge=0)
- # The fields below should be empty unless type==Type.number or Type.integer
- minimum: Optional[Union[int, float]] = None
- maximum: Optional[Union[int, float]] = None
- exclusiveMinimum: Optional[Union[int, float]] = None
- exclusiveMaximum: Optional[Union[int, float]] = None
- multipleOf: Optional[Union[int, float]] = None
- # The fields below should be empty unless type==Type.object
- properties: Optional[Mapping[str, DataSchema]] = None
- required: Optional[list[str]] = None
- # The fields below should be empty unless type==Type.string
- minLength: Optional[int] = None
- maxLength: Optional[int] = None
- pattern: Optional[str] = None
- contentEncoding: Optional[str] = None
- contentMediaType: Optional[str] = None
-
- model_config = ConfigDict(extra="forbid")
-
-
def is_a_reference(d: JSONSchema) -> bool:
"""Return True if a JSONSchema dict is a reference
@@ -238,44 +197,6 @@ def jsonschema_to_dataschema(
return output
-def type_to_dataschema(t: Union[type, BaseModel], **kwargs) -> DataSchema:
- """Convert a Python type to a Thing Description DataSchema
-
- This makes use of pydantic's `schema_of` function to create a
- json schema, then applies some fixes to make a DataSchema
- as per the Thing Description (because Thing Description is
- almost but not quite compatible with JSONSchema).
-
- Additional keyword arguments are added to the DataSchema,
- and will override the fields generated from the type that
- is passed in. Typically you'll want to use this for the
- `title` field.
- """
- if isinstance(t, BaseModel):
- json_schema = t.model_json_schema()
- else:
- json_schema = TypeAdapter(t).json_schema()
- schema_dict = jsonschema_to_dataschema(json_schema)
- # Definitions of referenced ($ref) schemas are put in a
- # key called "definitions" or "$defs" by pydantic. We should delete this.
- # TODO: find a cleaner way to do this
- # This shouldn't be a severe problem: we will fail with a
- # validation error if other junk is left in the schema.
- for k in ["definitions", "$defs"]:
- if k in schema_dict:
- del schema_dict[k]
- schema_dict.update(kwargs)
- try:
- return DataSchema(**schema_dict)
- except ValidationError as ve:
- print(
- "Error while constructing DataSchema from the "
- "following dictionary:\n"
- + JSONSerializer().dumps(schema_dict, indent=2)
- + "Before conversion, the JSONSchema was:\n"
- + JSONSerializer().dumps(json_schema, indent=2)
- )
- raise ve
class GenerateJsonSchemaWithoutDefaultTitles(GenerateJsonSchema):
diff --git a/hololinked/td/security_definitions.py b/hololinked/td/security_definitions.py
new file mode 100644
index 00000000..9fce9594
--- /dev/null
+++ b/hololinked/td/security_definitions.py
@@ -0,0 +1,25 @@
+
+
+
+
+import typing
+from .base import Schema
+
+
+class SecurityScheme(Schema):
+ """
+ create security scheme.
+ schema - https://www.w3.org/TR/wot-thing-description11/#sec-security-vocabulary-definition
+ """
+ scheme: str
+ description : str
+ descriptions : typing.Optional[typing.Dict[str, str]]
+ proxy : typing.Optional[str]
+
+ def __init__(self):
+ super().__init__()
+
+ def build(self):
+ self.scheme = 'nosec'
+ self.description = 'currently no security scheme supported - use cookie auth directly on hololinked.server.HTTPServer object'
+
\ No newline at end of file
diff --git a/hololinked/td/td.py b/hololinked/td/td.py
new file mode 100644
index 00000000..e67aac17
--- /dev/null
+++ b/hololinked/td/td.py
@@ -0,0 +1,95 @@
+import typing
+
+
+from .base import *
+from .data_schema import *
+from .forms import *
+from .security_definitions import *
+from .metadata import *
+from .interaction_affordance import *
+from .tm import ThingModel
+
+
+class ThingDescription(ThingModel):
+ """
+ generate Thing Description of W3C Web of Things standard.
+ Refer standard - https://www.w3.org/TR/wot-thing-description11
+ Refer schema - https://www.w3.org/TR/wot-thing-description11/#thing
+ """
+ links : typing.Optional[typing.List[Link]]
+ forms : typing.Optional[typing.List[Form]]
+ security : typing.Union[str, typing.List[str]]
+ securityDefinitions : SecurityScheme
+ schemaDefinitions : typing.Optional[typing.List[DataSchema]]
+
+
+ def produce(self) -> "ThingDescription":
+ super().produce()
+ # self.forms = NotImplemented
+ # self.links = NotImplemented
+ # self.schemaDefinitions = dict(exception=JSONSchema.get_type(Exception))
+ # self.add_links()
+ # self.add_top_level_forms()
+ # self.add_security_definitions()
+ return self
+
+# def add_links(self):
+# for name, resource in self.instance.sub_things.items():
+# if resource is self.instance: # or isinstance(resource, EventLoop):
+# continue
+# if self.links is None:
+# self.links = []
+# link = Link()
+# link.build(resource, self.instance, self.authority)
+# self.links.append(link.asdict())
+
+# def add_top_level_forms(self):
+
+# self.forms = []
+
+# properties_end_point = f"{self.authority}{self.instance._full_URL_path_prefix}/properties"
+
+# readallproperties = Form()
+# readallproperties.href = properties_end_point
+# readallproperties.op = "readallproperties"
+# readallproperties.htv_methodName = "GET"
+# readallproperties.contentType = "application/json"
+# # readallproperties.additionalResponses = [AdditionalExpectedResponse().asdict()]
+# self.forms.append(readallproperties.asdict())
+
+# writeallproperties = Form()
+# writeallproperties.href = properties_end_point
+# writeallproperties.op = "writeallproperties"
+# writeallproperties.htv_methodName = "PUT"
+# writeallproperties.contentType = "application/json"
+# # writeallproperties.additionalResponses = [AdditionalExpectedResponse().asdict()]
+# self.forms.append(writeallproperties.asdict())
+
+# readmultipleproperties = Form()
+# readmultipleproperties.href = properties_end_point
+# readmultipleproperties.op = "readmultipleproperties"
+# readmultipleproperties.htv_methodName = "GET"
+# readmultipleproperties.contentType = "application/json"
+# # readmultipleproperties.additionalResponses = [AdditionalExpectedResponse().asdict()]
+# self.forms.append(readmultipleproperties.asdict())
+
+# writemultipleproperties = Form()
+# writemultipleproperties.href = properties_end_point
+# writemultipleproperties.op = "writemultipleproperties"
+# writemultipleproperties.htv_methodName = "PATCH"
+# writemultipleproperties.contentType = "application/json"
+# # writemultipleproperties.additionalResponses = [AdditionalExpectedResponse().asdict()]
+# self.forms.append(writemultipleproperties.asdict())
+
+
+# def add_security_definitions(self):
+# self.security = 'unimplemented'
+# self.securityDefinitions = SecurityScheme().build('unimplemented', self.instance)
+
+
+# def json(self) -> JSON:
+# return self.asdict()
+
+
+
+
diff --git a/hololinked/td/tm.py b/hololinked/td/tm.py
new file mode 100644
index 00000000..3b89dae7
--- /dev/null
+++ b/hololinked/td/tm.py
@@ -0,0 +1,85 @@
+import typing
+from pydantic import Field, ConfigDict
+
+from .base import *
+from .data_schema import *
+from .forms import *
+from .security_definitions import *
+from .metadata import *
+from .interaction_affordance import *
+from ..core.state_machine import StateMachine
+
+
+class ThingModel(Schema):
+
+ context: typing.List[str] | str | typing.Dict[str, str] = "https://www.w3.org/2022/wot/td/v1.1"
+ type: typing.Optional[typing.Union[str, typing.List[str]]] = None
+ id: str = None
+ title: str = None
+ description: typing.Optional[str] = None
+ version: typing.Optional[VersionInfo] = None
+ created: typing.Optional[str] = None
+ modified: typing.Optional[str] = None
+ support: typing.Optional[str] = None
+ base: typing.Optional[str] = None
+ properties: typing.Dict[str, DataSchema] = Field(default_factory=dict)
+ actions: typing.Dict[str, ActionAffordance] = Field(default_factory=dict)
+ events: typing.Dict[str, EventAffordance] = Field(default_factory=dict)
+
+ model_config = ConfigDict(extra="allow")
+
+ def __init__(self,
+ instance: "Thing",
+ allow_loose_schema: typing.Optional[bool] = False,
+ ignore_errors: bool = False
+ ) -> None:
+ super().__init__()
+ self.instance = instance
+ self.allow_loose_schema = allow_loose_schema
+ self.ignore_errors = ignore_errors
+
+
+ def produce(self) -> "ThingModel":
+ """create thing model"""
+ self.id = self.instance.id
+ self.title = self.instance.__class__.__name__
+ if self.instance.__doc__:
+ self.description = Schema.format_doc(self.instance.__doc__)
+ self.properties = dict()
+ self.actions = dict()
+ self.events = dict()
+ self.add_interaction_affordances()
+ return self
+
+ # not the best code and logic, but works for now
+ skip_properties: typing.List[str] = ['expose', 'httpserver_resources', 'zmq_resources', 'gui_resources',
+ 'events', 'thing_description', 'GUI', 'object_info' ]
+ skip_actions: typing.List[str] = ['_set_properties', '_get_properties', '_add_property', '_get_properties_in_db',
+ 'get_postman_collection', 'get_thing_description', 'get_our_temp_thing_description']
+ skip_events: typing.List[str] = []
+
+
+ def add_interaction_affordances(self):
+ """add interaction affordances to thing model"""
+ for affordance, items, affordance_cls, skip_list in [
+ ['properties', self.instance.properties.remote_objects.items(), PropertyAffordance, self.skip_properties],
+ ['actions', self.instance.actions.descriptors.items(), ActionAffordance, self.skip_actions],
+ ['events', self.instance.events.descriptors.items(), EventAffordance, self.skip_events],
+ ]:
+ for name, obj in items:
+ if name in skip_list:
+ continue
+ if (
+ name == 'state' and affordance == 'properties' and
+ (not hasattr(self.instance, 'state_machine') or
+ not isinstance(self.instance.state_machine, StateMachine))
+ ):
+ continue
+ try:
+ affordance_dict = getattr(self, affordance)
+ affordance_dict[name] = affordance_cls.generate(obj, self.instance)
+ except Exception as ex:
+ if not self.ignore_errors:
+ raise ex from None
+ self.instance.logger.error(f"Error while generating schema for {name} - {ex}")
+
\ No newline at end of file
diff --git a/hololinked/td/utils.py b/hololinked/td/utils.py
new file mode 100644
index 00000000..9cf3174e
--- /dev/null
+++ b/hololinked/td/utils.py
@@ -0,0 +1,25 @@
+from typing import Any, Optional
+from ..utils import pep8_to_dashed_name
+
+def get_summary(docs: Any) -> Optional[str]:
+ """Return the first line of the dosctring of an object
+
+ :param obj: Any Python object
+ :returns: str: First line of object docstring
+
+ """
+ if docs:
+ return docs.partition("\n")[0].strip()
+ else:
+ return None
+
+
+def get_zmq_unique_identifier_from_event_affordance(affordance: Any) -> Optional[str]:
+ """Return the unique identifier for a ZMQ object
+
+ :param obj: Any Python object
+ :returns: str: Unique identifier for the object
+
+ """
+ return f'{affordance.thing_id}/{pep8_to_dashed_name(affordance.name)}'
+
diff --git a/hololinked/utils.py b/hololinked/utils.py
new file mode 100644
index 00000000..9cf7a397
--- /dev/null
+++ b/hololinked/utils.py
@@ -0,0 +1,593 @@
+import sys
+import logging
+import re
+import asyncio
+import inspect
+import typing
+import asyncio
+import types
+import traceback
+import typing
+import ifaddr
+import threading
+from functools import wraps
+from collections import OrderedDict
+from dataclasses import asdict
+from pydantic import BaseModel, ConfigDict, create_model, Field, RootModel
+from inspect import Parameter, signature
+
+
+def get_IP_from_interface(interface_name : str = 'Ethernet', adapter_name = None) -> str:
+ """
+ Get IP address of specified interface. Generally necessary when connected to the network
+ through multiple adapters and a server binds to only one adapter at a time.
+
+ Parameters
+ ----------
+ interface_name: str
+ Ethernet, Wifi etc.
+ adapter_name: optional, str
+ name of the adapter if available
+
+ Returns
+ -------
+ str:
+ IP address of the interface
+ """
+ adapters = ifaddr.get_adapters(include_unconfigured=True)
+ for adapter in adapters:
+ if not adapter_name:
+ for ip in adapter.ips:
+ if interface_name == ip.nice_name:
+ if ip.is_IPv4:
+ return ip.ip
+ elif adapter_name == adapter.nice_name:
+ for ip in adapter.ips:
+ if interface_name == ip.nice_name:
+ if ip.is_IPv4:
+ return ip.ip
+ raise ValueError(f"interface name {interface_name} not found in system interfaces.")
+
+
+def format_exception_as_json(exc : Exception) -> typing.Dict[str, typing.Any]:
+ """
+ return exception as a JSON serializable dictionary
+ """
+ return {
+ "message" : str(exc),
+ "type" : repr(exc).split('(', 1)[0],
+ "traceback" : traceback.format_exc().splitlines(),
+ "notes" : exc.__notes__ if hasattr(exc, "__notes__") else None
+ }
+
+
+def pep8_to_dashed_name(word : str) -> str:
+ """
+ Make an underscored, lowercase form from the expression in the string.
+ Example::
+ >>> pep8_to_dashed_URL("device_type")
+ 'device-type'
+ """
+ val = re.sub(r'_+', '-', word.lstrip('_').rstrip('_'))
+ return val.replace(' ', '-')
+
+
+def get_default_logger(name : str, log_level : int = logging.INFO, log_file = None,
+ format : str = '%(levelname)-8s - %(asctime)s:%(msecs)03d - %(name)s - %(message)s' ) -> logging.Logger:
+ """
+ the default logger used by most of hololinked package, when arguments are not modified.
+ StreamHandler is always created, pass log_file for a FileHandler as well.
+
+ Parameters
+ ----------
+ name: str
+ name of logger
+ log_level: int
+ log level
+ log_file: str
+ valid path to file
+ format: str
+ log format
+
+ Returns
+ -------
+ logging.Logger:
+ created logger
+ """
+ logger = logging.getLogger(name)
+ logger.setLevel(log_level)
+ default_handler = logging.StreamHandler(sys.stdout)
+ default_handler.setFormatter(logging.Formatter(format, datefmt='%Y-%m-%dT%H:%M:%S'))
+ logger.addHandler(default_handler)
+ if log_file:
+ file_handler = logging.FileHandler(log_file)
+ file_handler.setFormatter(logging.Formatter(format, datefmt='%Y-%m-%dT%H:%M:%S'))
+ logger.addHandler(file_handler)
+ return logger
+
+
+def run_coro_sync(coro : typing.Coroutine):
+ """
+ run coroutine synchronously
+ """
+ try:
+ eventloop = asyncio.get_event_loop()
+ except RuntimeError:
+ eventloop = asyncio.new_event_loop()
+ asyncio.set_event_loop(eventloop)
+ if eventloop.is_running():
+ raise RuntimeError(f"asyncio event loop is already running, cannot setup coroutine {coro.__name__} to run sync, please await it.")
+ # not the same as RuntimeError catch above.
+ else:
+ return eventloop.run_until_complete(coro)
+
+
+def run_callable_somehow(method : typing.Union[typing.Callable, typing.Coroutine]) -> typing.Any:
+ """
+ run method if synchronous, or when async, either schedule a coroutine or run it until its complete
+ """
+ if not (asyncio.iscoroutinefunction(method) or asyncio.iscoroutine(method)):
+ return method()
+ eventloop = get_current_async_loop()
+ if asyncio.iscoroutinefunction(method):
+ coro = method()
+ else:
+ coro = method
+ if eventloop.is_running():
+ # task = # check later if lambda is necessary
+ eventloop.create_task(coro)
+ else:
+ # task = method
+ return eventloop.run_until_complete(coro)
+
+
+def complete_pending_tasks_in_current_loop():
+ """
+ Complete all pending tasks in the current asyncio event loop.
+ """
+ get_current_async_loop().run_until_complete(
+ asyncio.gather(*asyncio.all_tasks(get_current_async_loop()))
+ )
+
+async def complete_pending_tasks_in_current_loop_async():
+ """
+ Complete all pending tasks in the current asyncio event loop.
+ """
+ await asyncio.gather(*asyncio.all_tasks(get_current_async_loop()))
+
+def print_pending_tasks_in_current_loop():
+ """
+ Print all pending tasks in the current asyncio event loop.
+ """
+ tasks = asyncio.all_tasks(get_current_async_loop())
+ if not tasks:
+ print("No pending tasks in the current event loop.")
+ return
+ for task in tasks:
+ print(f"Task: {task}, Status: {task._state}")
+
+
+def get_signature(callable : typing.Callable) -> typing.Tuple[typing.List[str], typing.List[type]]:
+ """
+ Retrieve the names and types of arguments based on annotations for the given callable.
+
+ Parameters
+ ----------
+ callable: Callable
+ function or method (not tested with __call__)
+
+ Returns
+ -------
+ tuple: List[str], List[type]
+ arguments name and types respectively
+ """
+ arg_names = []
+ arg_types = []
+
+ for param in inspect.signature(callable).parameters.values():
+ arg_name = param.name
+ arg_type = param.annotation if param.annotation != inspect.Parameter.empty else None
+
+ arg_names.append(arg_name)
+ arg_types.append(arg_type)
+
+ return arg_names, arg_types
+
+
+def getattr_without_descriptor_read(instance, key):
+ """
+ supply to inspect._get_members (not inspect.get_members) to avoid calling
+ __get__ on hardware attributes
+ """
+ if key in instance.__dict__:
+ return instance.__dict__[key]
+ mro = mro = (instance.__class__,) + inspect.getmro(instance.__class__)
+ for base in mro:
+ if key in base.__dict__:
+ value = base.__dict__[key]
+ if isinstance(value, types.FunctionType):
+ method = getattr(instance, key, None)
+ if isinstance(method, types.MethodType):
+ return method
+ return value
+ # for descriptor, first try to find it in class dict or instance dict (for instance descriptors (per_instance_descriptor=True))
+ # and then getattr from the instance. For descriptors/property, it will be mostly at above two levels.
+ return getattr(instance, key, None) # we can deal with None where we use this getter, so dont raise AttributeError
+
+
+def isclassmethod(method) -> bool:
+ """
+ Returns `True` if the method is a classmethod, `False` otherwise.
+ https://stackoverflow.com/questions/19227724/check-if-a-function-uses-classmethod
+
+ """
+ if isinstance(method, classmethod):
+ return True
+ bound_to = getattr(method, '__self__', None)
+ if not isinstance(bound_to, type):
+ # must be bound to a class
+ return False
+ name = method.__name__
+ for cls in bound_to.__mro__:
+ descriptor = vars(cls).get(name)
+ if descriptor is not None:
+ return isinstance(descriptor, classmethod)
+ return False
+
+
+def has_async_def(method) -> bool:
+ """
+ Checks if async def is found in method signature. Especially useful for class methods.
+ https://github.com/python/cpython/issues/100224#issuecomment-2000895467
+
+ Parameters
+ ----------
+ method: Callable
+ function or method
+
+ Returns
+ -------
+ bool
+ True if async def is found in method signature, False otherwise
+ """
+ source = inspect.getsource(method)
+ if re.search(r'^\s*async\s+def\s+' + re.escape(method.__name__) + r'\s*\(', source, re.MULTILINE):
+ return True
+ return False
+
+
+def issubklass(obj, cls) -> bool:
+ """
+ Safely check if `obj` is a subclass of `cls`.
+
+ Parameters
+ ----------
+ obj: typing.Any
+ The object to check if it's a subclass.
+ cls: typing.Any
+ The class (or tuple of classes) to compare against.
+
+ Returns
+ -------
+ bool
+ True if `obj` is a subclass of `cls`, False otherwise.
+ """
+ try:
+ # Check if obj is a class or a tuple of classes
+ if isinstance(obj, type):
+ return issubclass(obj, cls)
+ elif isinstance(obj, tuple):
+ # Ensure all elements in the tuple are classes
+ return all(isinstance(o, type) for o in obj) and issubclass(obj, cls)
+ else:
+ return False
+ except TypeError:
+ return False
+
+
+def get_a_filename_from_instance(thing: type, extension: str = 'json') -> str:
+ class_name = thing.__class__.__name__
+
+ # Remove invalid characters from the instance name
+ safe_id = re.sub(r'[<>:"/\\|?*\x00-\x1F]+', '_', thing.id)
+ # Collapse consecutive underscores into one
+ safe_id = re.sub(r'_+', '_', safe_id)
+ # Remove leading and trailing underscores
+ safe_id = safe_id.strip('_')
+
+ filename = f"{class_name}-{safe_id or '_'}.{extension}"
+ return filename
+
+
+def get_current_async_loop():
+ """
+ get or automatically create an asnyc loop for the current thread.
+ """
+ try:
+ loop = asyncio.get_event_loop()
+ except RuntimeError:
+ loop = asyncio.new_event_loop()
+ # set_event_loop_policy() - why not?
+ asyncio.set_event_loop(loop)
+ return loop
+
+
+class SerializableDataclass:
+ """
+ Presents uniform serialization for serializers using getstate and setstate and json
+ serialization.
+ """
+ def json(self):
+ return asdict(self)
+
+ def __getstate__(self):
+ return self.json()
+
+ def __setstate__(self, values : typing.Dict):
+ for key, value in values.items():
+ setattr(self, key, value)
+
+
+class Singleton(type):
+ """enforces a Singleton"""
+
+ _instances = {}
+
+ def __call__(cls, *args, **kwargs):
+ if cls not in cls._instances:
+ cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
+ return cls._instances[cls]
+
+
+class MappableSingleton(Singleton):
+ """Singleton with dict-like access to attributes"""
+
+ def __setitem__(self, key, value) -> None:
+ setattr(self, key, value)
+
+ def __getitem__(self, key) -> typing.Any:
+ return getattr(self, key)
+
+ def __contains__(self, key) -> bool:
+ return hasattr(self, key)
+
+
+def get_input_model_from_signature(
+ func: typing.Callable,
+ remove_first_positional_arg: bool = False,
+ ignore: typing.Sequence[str] | None = None,
+ model_for_empty_annotations: bool = False,
+) -> type[BaseModel] | None:
+ """
+ Create a pydantic model for a function's signature.
+
+ Parameters
+ ----------
+ func: Callable
+ The function for which to create the pydantic model.
+ remove_first_positional_arg: bool, optional
+ Remove the first argument from the model (this is appropriate for methods,
+ as the first argument, self, is baked in when it's called, but is present
+ in the signature).
+ ignore: Sequence[str], optional
+ Ignore arguments that have the specified name. This is useful for e.g.
+ dependencies that are injected by LabThings.
+ model_for_empty_annotations: bool, optional
+ If True, create a model even if there are no annotations.
+
+ Returns
+ -------
+ Type[BaseModel] or None
+ A pydantic model class describing the input parameters, or None if there are no parameters.
+ """
+ parameters = OrderedDict(signature(func).parameters) # type: OrderedDict[str, Parameter]
+ if len(parameters) == 0:
+ return None
+
+ if all(p.annotation is Parameter.empty for p in parameters.values()) and not model_for_empty_annotations:
+ return None
+
+ if remove_first_positional_arg:
+ name, parameter = next(iter((parameters.items()))) # get the first parameter
+ if parameter.kind in (Parameter.KEYWORD_ONLY, Parameter.VAR_KEYWORD):
+ raise ValueError("Can't remove first positional argument: there is none.")
+ del parameters[name]
+
+ # fields is a dictionary of tuples of (type, default) that defines the input model
+ type_hints = typing.get_type_hints(func, include_extras=True)
+ fields = {} # type: typing.Dict[str, typing.Tuple[type, typing.Any]]
+ for name, p in parameters.items():
+ if ignore and name in ignore:
+ continue
+ if p.kind == Parameter.VAR_KEYWORD:
+ p_type = typing.Annotated[typing.Dict[str, typing.Any] if p.annotation is Parameter.empty else type_hints[name], Parameter.VAR_KEYWORD]
+ default = dict() if p.default is Parameter.empty else p.default
+ elif p.kind == Parameter.VAR_POSITIONAL:
+ p_type = typing.Annotated[typing.Tuple if p.annotation is Parameter.empty else type_hints[name], Parameter.VAR_POSITIONAL]
+ default = tuple() if p.default is Parameter.empty else p.default
+ else:
+ # `type_hints` does more processing than p.annotation - but will
+ # not have entries for missing annotations.
+ p_type = typing.Any if p.annotation is Parameter.empty else type_hints[name]
+ # pydantic uses `...` to represent missing defaults (i.e. required params)
+ default = Field(...) if p.default is Parameter.empty else p.default
+ fields[name] = (p_type, default)
+
+ # If there are no fields, we don't want to return a model
+ if len(fields) == 0:
+ return None
+
+ model = create_model( # type: ignore[call-overload]
+ f"{func.__name__}_input",
+ **fields,
+ __config__=ConfigDict(extra="forbid", strict=True)
+ )
+ return model
+
+
+def pydantic_validate_args_kwargs(model: typing.Type[BaseModel], args: typing.Tuple = tuple(), kwargs: typing.Dict = dict()) -> None:
+ """
+ Validate and separate *args and **kwargs according to the fields of the given pydantic model.
+
+ Parameters
+ ----------
+ model: Type[BaseModel]
+ The pydantic model class to validate against.
+ *args: tuple
+ Positional arguments to validate.
+ **kwargs: dict
+ Keyword arguments to validate.
+
+ Returns
+ -------
+ None
+
+ Raises
+ ------
+ ValueError
+ If the arguments do not match the model's fields.
+ ValidationError
+ If the arguments are invalid
+ """
+
+ field_names = list(model.model_fields.keys())
+ data = {}
+
+ # Assign positional arguments to the corresponding fields
+ for i, arg in enumerate(args):
+ if i >= len(field_names):
+ raise ValueError(f"Too many positional arguments. Expected at most {len(field_names)}.")
+ field_name = field_names[i]
+ if Parameter.VAR_POSITIONAL in model.model_fields[field_name].metadata:
+ if typing.get_origin(model.model_fields[field_name].annotation) is list:
+ data[field_name] = list(args[i:])
+ else:
+ data[field_name] = args[i:] # *args become end of positional arguments
+ break
+ elif field_name in data:
+ raise ValueError(f"Multiple values for argument '{field_name}'.")
+ data[field_name] = arg
+
+ extra_kwargs = {}
+ # Assign keyword arguments to the corresponding fields
+ for key, value in kwargs.items():
+ if key in data or key in extra_kwargs: # Check for duplicate arguments
+ raise ValueError(f"Multiple values for argument '{key}'.")
+ if key in field_names:
+ data[key] = value
+ else:
+ extra_kwargs[key] = value
+
+ if extra_kwargs:
+ for i in range(len(field_names)):
+ if Parameter.VAR_KEYWORD in model.model_fields[field_names[i]].metadata:
+ data[field_names[i]] = extra_kwargs
+ break
+ elif i == len(field_names) - 1:
+ raise ValueError(f"Unexpected keyword arguments: {', '.join(extra_kwargs.keys())}")
+ # Validate and create the model instance
+ model.model_validate(data)
+
+
+def json_schema_merge_args_to_kwargs(schema: dict, args: typing.Tuple = tuple(), kwargs: typing.Dict = dict()) -> typing.Dict[str, typing.Any]:
+ """
+ Merge positional arguments into keyword arguments according to the schema.
+
+ Parameters
+ ----------
+ schema: dict
+ The JSON schema to validate against.
+ args: tuple
+ Positional arguments to merge.
+ kwargs: dict
+ Keyword arguments to merge.
+
+ Returns
+ -------
+ dict
+ The merged arguments as a dictionary, usually a JSON
+ """
+ if schema['type'] != 'object':
+ raise ValueError("Schema must be an object.")
+
+ field_names = list(OrderedDict(schema['properties']).keys())
+ data = {}
+
+ for i, arg in enumerate(args):
+ if i >= len(field_names):
+ raise ValueError(f"Too many positional arguments. Expected at most {len(field_names)}.")
+ field_name = field_names[i]
+ if field_name in data:
+ raise ValueError(f"Multiple values for argument '{field_name}'.")
+ data[field_name] = arg
+
+ extra_kwargs = {}
+ # Assign keyword arguments to the corresponding fields
+ for key, value in kwargs.items():
+ if key in data or key in extra_kwargs: # Check for duplicate arguments
+ raise ValueError(f"Multiple values for argument '{key}'.")
+ if key in field_names:
+ data[key] = value
+ else:
+ extra_kwargs[key] = value
+
+ if extra_kwargs:
+ data.update(extra_kwargs)
+ return data
+
+
+def get_return_type_from_signature(func: typing.Callable) -> RootModel:
+ """Determine the return type of a function."""
+ sig = inspect.signature(func)
+ if sig.return_annotation == inspect.Signature.empty:
+ return typing.Any # type: ignore[return-value]
+ else:
+ # We use `get_type_hints` rather than just `sig.return_annotation`
+ # because it resolves forward references, etc.
+ type_hints = typing.get_type_hints(func, include_extras=True)
+ from server.property import wrap_plain_types_in_rootmodel
+ return wrap_plain_types_in_rootmodel(type_hints["return"])
+
+
+def get_all_sub_things_recusively(thing) -> typing.List:
+ sub_things = [thing]
+ for sub_thing in thing.sub_things.values():
+ sub_things.extend(get_all_sub_things_recusively(sub_thing))
+ return sub_things
+
+
+def forkable(func):
+ """
+ Decorator to make a function forkable. This is useful for functions that need to be run in a separate process.
+ """
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ forked = kwargs.pop('forked', False) # Extract 'fork' argument, default to False
+ if forked:
+ thread = threading.Thread(target=func, args=args, kwargs=kwargs)
+ thread.start()
+ return thread
+ else:
+ return func(*args, **kwargs)
+ return wrapper
+
+
+__all__ = [
+ get_IP_from_interface.__name__,
+ format_exception_as_json.__name__,
+ pep8_to_dashed_name.__name__,
+ get_default_logger.__name__,
+ run_coro_sync.__name__,
+ run_callable_somehow.__name__,
+ complete_pending_tasks_in_current_loop.__name__,
+ get_signature.__name__,
+ isclassmethod.__name__,
+ issubklass.__name__,
+ get_current_async_loop.__name__,
+ get_input_model_from_signature.__name__,
+ pydantic_validate_args_kwargs.__name__,
+ get_return_type_from_signature.__name__,
+ getattr_without_descriptor_read.__name__,
+ forkable.__name__,
+]
+
diff --git a/licenses/wotpy-LICENSE.txt b/licenses/wotpy-LICENSE.txt
new file mode 100644
index 00000000..5f5cbfb8
--- /dev/null
+++ b/licenses/wotpy-LICENSE.txt
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2018 CTIC Centro Tecnologico
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index c45cf03e..5d685b52 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "hololinked"
-version = "0.2.12"
+version = "0.3.0"
authors = [
{name = "Vignesh Vaidyanathan", email = "vignesh.vaidyanathan@hololinked.dev"},
]
@@ -46,7 +46,17 @@ Documentation = "https://hololinked.readthedocs.io/en/latest/index.html"
Repository = "https://github.com/hololinked-dev/hololinked" # Update with actual repository URL
[tool.setuptools]
-packages = ["hololinked", "hololinked.server", "hololinked.rpc", "hololinked.client", "hololinked.param"]
+packages = [
+ "hololinked",
+ "hololinked.server", "hololinked.server.http",
+ "hololinked.client", "hololinked.client.http", "hololinked.client.zmq",
+ "hololinked.param",
+ "hololinked.schema_validators",
+ "hololinked.serializers",
+ "hololinked.storage",
+ "hololinked.td",
+ "hololinked.core", "hololinked.core.zmq"
+]
[project.optional-dependencies]
dev = [
diff --git a/tests/README.md b/tests/README.md
new file mode 100644
index 00000000..022e540b
--- /dev/null
+++ b/tests/README.md
@@ -0,0 +1,37 @@
+# Test Suites
+
+This test suite has both unit and integration tests. They are in a mixed fashion according to requirements defined in each
+of the test cases of how a certain feature should behave. It is attempted as much as possible to test from isolated
+parts of the code to the whole system. The following approximate explanation can be given for each of the files:
+
+1. `test_0_utils.py`: Tests for utility functions. These are functions that are used in multiple places in the code and can be used independently.
+2. `test_1_message.py`: Tests for the messaging contract of ZMQ, which implements a simple message passing protocol that conveys the exact information required to serve a `Thing` object. For each well standardized protocol, like HTTP, MQTT, etc., we extract the information that is required to run an operation on a `Thing` object, and wrap that information in a ZMQ message.
+3. `test_2_socket.py`: Tests for the socket creation and binding in ZMQ. We allow different types of sockets to be created although `INPROC` is mostly used.
+4. `test_3_serializers.py`: Tests for serializers used to send information on ZMQ socket.
+5. `test_4_thing_init.py`: Tests a `Thing` object whether it can be instantiated. Metaclass, Descriptor registries etc. are also tested here.
+6. `test_5_brokers.py`: Tests whether ZMQ request-reply patterns implemented on our simple protocol works as expected.
+7. `test_6_rpc_broker.py`: Tests whether the RPC broker, which is built on ZMQ message brokers, works as expected.
+8. `test_7_thing_run.py`: Tests whether a `Thing` object can be run using a ZMQ RPC server.
+9. `test_8_actions.py`: Tests all possibilities to execute actions of a `Thing` object.
+10. `test_9_property.py`: Tests all possibilities to execute properties of a `Thing` object.
+11. `test_10_events.py`: Tests all possibilities to execute events of a `Thing` object.
+
+The tests are written with `unittest` framework.
+
+### Running the tests
+
+To run the tests, just do:
+
+```bash
+python -m unittest
+```
+
+### More Documentation
+
+In each test file, there are more detailed explanations of the tests.
+
+To document a test
+1. Create a class and subclass from `TestCase` which is in `utils.py` (in this folder).
+2. State all the tests the class will perform
+3. Within each test method, state the purpose of the test as docstring
+4. Then, state each requirement one by one within a comment ("# req 1.") and test them.
diff --git a/tests/test_rpc.py b/tests/not working - yet to be integrated/test_14_rpc.py
similarity index 100%
rename from tests/test_rpc.py
rename to tests/not working - yet to be integrated/test_14_rpc.py
diff --git a/tests/test_property.py b/tests/not working - yet to be integrated/test_8_exposed_properties.py
similarity index 66%
rename from tests/test_property.py
rename to tests/not working - yet to be integrated/test_8_exposed_properties.py
index 45e03a96..39fc985d 100644
--- a/tests/test_property.py
+++ b/tests/not working - yet to be integrated/test_8_exposed_properties.py
@@ -1,143 +1,12 @@
-import logging, unittest, time, os
-import tempfile
-import pydantic_core
-from pydantic import BaseModel
-from hololinked.client import ObjectProxy
-from hololinked.server import action, Thing, Property
-from hololinked.server.properties import Number, String, Selector, List, Integer
-from hololinked.server.database import BaseDB
-try:
- from .utils import TestCase, TestRunner
- from .things import start_thing_forked
-except ImportError:
- from utils import TestCase, TestRunner
- from things import start_thing_forked
-
-
-
-
-class TestThing(Thing):
-
- number_prop = Number(doc="A fully editable number property")
- string_prop = String(default='hello', regex='^[a-z]+',
- doc="A string property with a regex constraint to check value errors")
- int_prop = Integer(default=5, step=2, bounds=(0, 100),
- doc="An integer property with step and bounds constraints to check RW")
- selector_prop = Selector(objects=['a', 'b', 'c', 1], default='a',
- doc="A selector property to check RW")
- observable_list_prop = List(default=None, allow_None=True, observable=True,
- doc="An observable list property to check observable events on write operations")
- observable_readonly_prop = Number(default=0, readonly=True, observable=True,
- doc="An observable readonly property to check observable events on read operations")
- db_commit_number_prop = Number(default=0, db_commit=True,
- doc="A fully editable number property to check commits to db on write operations")
- db_init_int_prop = Integer(default=1, db_init=True,
- doc="An integer property to check initialization from db")
- db_persist_selector_prop = Selector(objects=['a', 'b', 'c', 1], default='a', db_persist=True,
- doc="A selector property to check persistence to db on write operations")
- non_remote_number_prop = Number(default=5, remote=False,
- doc="A non remote number property to check non-availability on client")
-
-
- class PydanticProp(BaseModel):
- foo : str
- bar : int
- foo_bar : float
-
-
- pydantic_prop = Property(default=None, allow_None=True, model=PydanticProp,
- doc="A property with a pydantic model to check RW")
-
- pydantic_simple_prop = Property(default=None, allow_None=True, model='int',
- doc="A property with a simple pydantic model to check RW")
-
- schema = {
- "type" : "string",
- "minLength" : 1,
- "maxLength" : 10,
- "pattern" : "^[a-z]+$"
- }
-
- json_schema_prop = Property(default=None, allow_None=True, model=schema,
- doc="A property with a json schema to check RW")
-
- @observable_readonly_prop.getter
- def get_observable_readonly_prop(self):
- if not hasattr(self, '_observable_readonly_prop'):
- self._observable_readonly_prop = 0
- self._observable_readonly_prop += 1
- return self._observable_readonly_prop
- @action()
- def print_props(self):
- print(f'number_prop: {self.number_prop}')
- print(f'string_prop: {self.string_prop}')
- print(f'int_prop: {self.int_prop}')
- print(f'selector_prop: {self.selector_prop}')
- print(f'observable_list_prop: {self.observable_list_prop}')
- print(f'observable_readonly_prop: {self.observable_readonly_prop}')
- print(f'db_commit_number_prop: {self.db_commit_number_prop}')
- print(f'db_init_int_prop: {self.db_init_int_prop}')
- print(f'db_persist_selctor_prop: {self.db_persist_selector_prop}')
- print(f'non_remote_number_prop: {self.non_remote_number_prop}')
+class TestThing:
-
-class TestProperty(TestCase):
-
- @classmethod
- def setUpClass(self):
- print("test property")
- self.thing_cls = TestThing
- start_thing_forked(self.thing_cls, instance_name='test-property',
- log_level=logging.WARN)
- self.thing_client = ObjectProxy('test-property') # type: TestThing
-
- @classmethod
- def tearDownClass(self):
- print("tear down test property")
- self.thing_client.exit()
-
-
- def test_1_client_api(self):
- """basic read write tests for properties involing the dot operator"""
- # Test read
- self.assertEqual(self.thing_client.number_prop, 0)
- # Test write
- self.thing_client.string_prop = 'world'
- self.assertEqual(self.thing_client.string_prop, 'world')
- # Test exception propagation to client
- with self.assertRaises(ValueError):
- self.thing_client.string_prop = 'WORLD'
- with self.assertRaises(TypeError):
- self.thing_client.int_prop = '5'
- # Test non remote prop (non-)availability on client
- with self.assertRaises(AttributeError):
- self.thing_client.non_remote_number_prop
-
-
- def test_2_RW_multiple_properties(self):
- # Test partial list of read write properties
- self.thing_client.write_multiple_properties(
- number_prop=15,
- string_prop='foobar'
- )
- self.assertEqual(self.thing_client.number_prop, 15)
- self.assertEqual(self.thing_client.string_prop, 'foobar')
- # check prop that was not set in multiple properties
- self.assertEqual(self.thing_client.int_prop, 5)
-
- self.thing_client.selector_prop = 'b'
- self.thing_client.number_prop = -15
- props = self.thing_client.read_multiple_properties(names=['selector_prop', 'int_prop',
- 'number_prop', 'string_prop'])
- self.assertEqual(props['selector_prop'], 'b')
- self.assertEqual(props['int_prop'], 5)
- self.assertEqual(props['number_prop'], -15)
- self.assertEqual(props['string_prop'], 'foobar')
+
+
def test_3_observability(self):
@@ -206,7 +75,7 @@ def test_4_db_operations(self):
self.assertTrue(not os.path.exists(file_path))
# test db commit property
- thing = TestThing(instance_name='test-db-operations', use_default_db=True, log_level=logging.WARN)
+ thing = TestThing(id='test-db-operations', use_default_db=True, log_level=logging.WARN)
self.assertEqual(thing.db_commit_number_prop, 0) # 0 is default just for reference
thing.db_commit_number_prop = 100
self.assertEqual(thing.db_commit_number_prop, 100)
@@ -227,7 +96,7 @@ def test_4_db_operations(self):
del thing
# delete thing and reload from database
- thing = TestThing(instance_name='test-db-operations', use_default_db=True, log_level=logging.WARN)
+ thing = TestThing(id='test-db-operations', use_default_db=True, log_level=logging.WARN)
self.assertEqual(thing.db_init_int_prop, TestThing.db_init_int_prop.default)
self.assertEqual(thing.db_persist_selector_prop, 'c')
self.assertNotEqual(thing.db_commit_number_prop, 100)
@@ -236,7 +105,7 @@ def test_4_db_operations(self):
# check db init prop with a different value in database apart from default
thing.db_engine.set_property('db_init_int_prop', 101)
del thing
- thing = TestThing(instance_name='test-db-operations', use_default_db=True, log_level=logging.WARN)
+ thing = TestThing(id='test-db-operations', use_default_db=True, log_level=logging.WARN)
self.assertEqual(thing.db_init_int_prop, 101)
@@ -282,7 +151,7 @@ def test_7_json_db_operations(self):
filename = tf.name
# test db commit property
- thing = TestThing(instance_name="test-db-operations", use_json_file=True,
+ thing = TestThing(id="test-db-operations", use_json_file=True,
json_filename=filename, log_level=logging.WARN)
self.assertEqual(thing.db_commit_number_prop, 0)
thing.db_commit_number_prop = 100
@@ -304,7 +173,7 @@ def test_7_json_db_operations(self):
del thing
# delete thing and reload from database
- thing = TestThing(instance_name="test-db-operations", use_json_file=True,
+ thing = TestThing(id="test-db-operations", use_json_file=True,
json_filename=filename, log_level=logging.WARN)
self.assertEqual(thing.db_init_int_prop, TestThing.db_init_int_prop.default)
self.assertEqual(thing.db_persist_selector_prop, 'c')
@@ -314,7 +183,7 @@ def test_7_json_db_operations(self):
# check db init prop with a different value in database apart from default
thing.db_engine.set_property('db_init_int_prop', 101)
del thing
- thing = TestThing(instance_name="test-db-operations", use_json_file=True,
+ thing = TestThing(id="test-db-operations", use_json_file=True,
json_filename=filename, log_level=logging.WARN)
self.assertEqual(thing.db_init_int_prop, 101)
@@ -388,8 +257,8 @@ def test_1_simple_class_property(self):
self.assertEqual(TestClassPropertyThing.simple_class_prop, 100)
# Test that instance-level access reflects class value
- instance1 = TestClassPropertyThing(instance_name='test1', log_level=logging.WARN)
- instance2 = TestClassPropertyThing(instance_name='test2', log_level=logging.WARN)
+ instance1 = TestClassPropertyThing(id='test1', log_level=logging.WARN)
+ instance2 = TestClassPropertyThing(id='test2', log_level=logging.WARN)
self.assertEqual(instance1.simple_class_prop, 100)
self.assertEqual(instance2.simple_class_prop, 100)
@@ -415,7 +284,7 @@ def test_2_managed_class_property(self):
self.assertEqual(TestClassPropertyThing.managed_class_prop, 50)
# Test instance-level validation
- instance = TestClassPropertyThing(instance_name='test3', log_level=logging.WARN)
+ instance = TestClassPropertyThing(id='test3', log_level=logging.WARN)
with self.assertRaises(ValueError):
instance.managed_class_prop = -20
@@ -437,7 +306,7 @@ def test_3_readonly_class_property(self):
TestClassPropertyThing.readonly_class_prop = "new-value"
# Test that setting raises an error at instance level
- instance = TestClassPropertyThing(instance_name='test4', log_level=logging.WARN)
+ instance = TestClassPropertyThing(id='test4', log_level=logging.WARN)
with self.assertRaises(ValueError):
instance.readonly_class_prop = "new-value"
@@ -455,7 +324,7 @@ def test_4_deletable_class_property(self):
self.assertEqual(TestClassPropertyThing.deletable_class_prop, 150)
# Test deletion
- instance = TestClassPropertyThing(instance_name='test5', log_level=logging.WARN)
+ instance = TestClassPropertyThing(id='test5', log_level=logging.WARN)
del TestClassPropertyThing.deletable_class_prop
self.assertEqual(TestClassPropertyThing.deletable_class_prop, 100) # Should return to default
self.assertEqual(instance.deletable_class_prop, 100)
@@ -469,7 +338,7 @@ def test_4_deletable_class_property(self):
def test_5_descriptor_access(self):
"""Test descriptor access for class properties"""
# Test direct access through descriptor
- instance = TestClassPropertyThing(instance_name='test6', log_level=logging.WARN)
+ instance = TestClassPropertyThing(id='test6', log_level=logging.WARN)
self.assertIsInstance(TestClassPropertyThing.not_a_class_prop, Number)
self.assertEqual(instance.not_a_class_prop, 43)
instance.not_a_class_prop = 50
diff --git a/tests/buggy/test_http_server.py b/tests/not working - yet to be integrated/test_http_server.py
similarity index 100%
rename from tests/buggy/test_http_server.py
rename to tests/not working - yet to be integrated/test_http_server.py
diff --git a/tests/not working - yet to be integrated/test_n_message_brokers.py b/tests/not working - yet to be integrated/test_n_message_brokers.py
new file mode 100644
index 00000000..c05945bd
--- /dev/null
+++ b/tests/not working - yet to be integrated/test_n_message_brokers.py
@@ -0,0 +1,435 @@
+import threading, asyncio, typing
+import logging, multiprocessing, unittest
+import zmq.asyncio
+from uuid import UUID
+from hololinked.server.protocols.zmq.brokers import (CM_INDEX_ADDRESS, CM_INDEX_CLIENT_TYPE, CM_INDEX_MESSAGE_TYPE,
+ CM_INDEX_MESSAGE_ID, CM_INDEX_SERVER_EXEC_CONTEXT, CM_INDEX_THING_ID, CM_INDEX_OPERATION,
+ CM_INDEX_OBJECT, CM_INDEX_ARGUMENTS, CM_INDEX_THING_EXEC_CONTEXT, CM_MESSAGE_LENGTH, EXCEPTION)
+from hololinked.server.protocols.zmq.brokers import (SM_INDEX_ADDRESS, SM_INDEX_MESSAGE_TYPE, SM_INDEX_MESSAGE_ID,
+ SM_INDEX_SERVER_TYPE, SM_INDEX_DATA, SM_INDEX_PRE_ENCODED_DATA, SM_MESSAGE_LENGTH)
+from hololinked.server.protocols.zmq.brokers import PROXY, REPLY, TIMEOUT, INVALID_MESSAGE, HANDSHAKE, EXIT, OPERATION
+from hololinked.server.protocols.zmq.brokers import AsyncZMQServer, SyncZMQClient
+from hololinked.server.protocols.zmq.brokers import default_server_execution_context
+from hololinked.utils import get_current_async_loop, get_default_logger
+from hololinked.server.dataklasses import ZMQAction, ZMQResource
+from hololinked.constants import ZMQ_PROTOCOLS, ResourceTypes, ServerTypes
+from hololinked.server.protocols.zmq.rpc_server import RPCServer
+from hololinked.client.proxy import _Action, _Property
+
+
+try:
+ from ..utils import TestCase, TestRunner
+ # from .things import TestThing, start_thing_forked
+except ImportError:
+ from utils import TestCase, TestRunner
+ # from things import TestThing, start_thing_forked
+
+
+
+
+def run_server(server : AsyncZMQServer, owner : "TestServerBroker", done_queue : multiprocessing.Queue) -> None:
+ event_loop = get_current_async_loop()
+ async def run():
+ while True:
+ messages = await server.async_recv_requests()
+ owner.last_server_message = messages[0]
+ for message in messages:
+ if message[CM_INDEX_MESSAGE_TYPE] == b'EXIT':
+ return
+ await asyncio.sleep(0.01)
+ event_loop.run_until_complete(run())
+ if done_queue:
+ done_queue.put(True)
+
+
+
+class TestServerBroker(TestCase):
+ """Tests Individual ZMQ Server"""
+
+ @classmethod
+ def setUpServer(self):
+ self.server_message_broker = AsyncZMQServer(
+ instance_name='test-message-broker',
+ server_type='RPC',
+ logger=self.logger
+ )
+ self._server_thread = threading.Thread(
+ target=run_server,
+ args=(self.server_message_broker, self, self.done_queue),
+ daemon=True
+ )
+ self._server_thread.start()
+
+
+ @classmethod
+ def setUpClient(self):
+ self.client_message_broker = SyncZMQClient(
+ server_instance_name='test-message-broker',
+ logger=self.logger,
+ identity='test-client',
+ client_type=PROXY, handshake=False
+ )
+
+ """
+ Base class: BaseZMQ, BaseAsyncZMQ, BaseSyncZMQ
+ Servers: BaseZMQServer, AsyncZMQServer, ZMQServerPool
+ Clients: BaseZMQClient, SyncZMQClient, AsyncZMQClient, MessageMappedZMQClientPool
+ """
+
+ @classmethod
+ def setUpClass(self):
+ print(f"test ZMQ Message Broker with {self.__name__}")
+ self.logger = get_default_logger('test-message-broker', logging.ERROR)
+ self.done_queue = multiprocessing.Queue()
+ self.last_server_message = None
+ self.setUpServer()
+ self.setUpClient()
+
+
+ @classmethod
+ def tearDownClass(self):
+ print("tear down test message broker")
+
+
+ def check_server_message(self, message):
+ """
+ Utility function to check types of indices within the message created by the server
+ """
+ self.assertEqual(len(message), SM_MESSAGE_LENGTH)
+ """
+ SM_INDEX_ADDRESS = 0, SM_INDEX_SERVER_TYPE = 2, SM_INDEX_MESSAGE_TYPE = 3, SM_INDEX_MESSAGE_ID = 4,
+ SM_INDEX_DATA = 5, SM_INDEX_PRE_ENCODED_DATA = 6,
+ """
+ for index, msg in enumerate(message):
+ if index <= 4 or index == 6:
+ self.assertIsInstance(msg, bytes)
+ if message[SM_INDEX_MESSAGE_TYPE] == INVALID_MESSAGE:
+ self.assertEqual(message[SM_INDEX_DATA]["type"], "Exception")
+ elif message[SM_INDEX_MESSAGE_TYPE] == HANDSHAKE:
+ self.assertEqual(message[SM_INDEX_DATA], b'null')
+ elif message[SM_INDEX_MESSAGE_TYPE] == EXCEPTION:
+ self.assertEqual(message[SM_INDEX_DATA]["type"], "Exception")
+
+
+ def check_client_message(self, message):
+ """
+ Utility function to check types of indices within the message created by the client
+ """
+ self.assertEqual(len(message), CM_MESSAGE_LENGTH)
+ """
+ CM_INDEX_ADDRESS = 0, CM_INDEX_CLIENT_TYPE = 2, CM_INDEX_MESSAGE_TYPE = 3, CM_INDEX_MESSAGE_ID = 4,
+ CM_INDEX_SERVER_EXEC_CONTEXT = 5, CM_INDEX_THING_ID = 7, CM_INDEX_OBJECT = 8, CM_INDEX_OPERATION = 9,
+ CM_INDEX_ARGUMENTS = 10, CM_INDEX_THING_EXEC_CONTEXT = 11
+ """
+ for index, msg in enumerate(message):
+ if index <= 4 or index == 9 or index == 7: # 0, 2, 3, 4, 7, 9
+ self.assertIsInstance(msg, bytes)
+ elif index >= 10 or index == 8: # 8, 10, 11
+ self.assertTrue(not isinstance(msg, bytes))
+ # 1 and 6 are empty bytes
+ # 5 - server execution context is deserialized only by RPC server
+
+
+ def test_1_handshake_complete(self):
+ """
+ Test handshake so that client can connect to server. Once client connects to server,
+ verify a ZMQ internal monitoring socket is available.
+ """
+ self.client_message_broker.handshake()
+ self.assertTrue(self.client_message_broker._monitor_socket is not None)
+ # both directions
+ # HANDSHAKE = b'HANDSHAKE' # 1 - find out if the server is alive
+
+
+ def test_2_message_contract_indices(self):
+ """
+ Test message composition for every composition way possible.
+ Before production release, this is to freeze the message contract.
+ """
+ # client to server
+ # OPERATION = b'OPERATION' # 2 - operation request from client to server
+ client_message1 = self.client_message_broker.craft_request_from_arguments(b'test-device',
+ b'someProp', b'readProperty')
+ # test message contract length
+ self.assertEqual(len(client_message1), CM_MESSAGE_LENGTH)
+ # check all are bytes encoded at least in a loose sense
+ for msg in client_message1:
+ self.assertTrue(isinstance(msg, bytes))
+ self.assertEqual(client_message1[CM_INDEX_ADDRESS],
+ bytes(self.server_message_broker.instance_name, encoding='utf-8'))
+ self.assertEqual(client_message1[1], b'')
+ self.assertEqual(client_message1[CM_INDEX_CLIENT_TYPE], PROXY)
+ self.assertEqual(client_message1[CM_INDEX_MESSAGE_TYPE], OPERATION)
+ self.assertIsInstance(UUID(client_message1[CM_INDEX_MESSAGE_ID].decode(), version=4), UUID)
+ self.assertEqual(self.client_message_broker.zmq_serializer.loads(client_message1[CM_INDEX_SERVER_EXEC_CONTEXT]),
+ default_server_execution_context)
+ self.assertEqual(client_message1[CM_INDEX_THING_ID], b'test-device')
+ self.assertEqual(client_message1[CM_INDEX_OPERATION], b'readProperty')
+ self.assertEqual(client_message1[CM_INDEX_OBJECT], b'someProp')
+ self.assertEqual(self.client_message_broker.zmq_serializer.loads(client_message1[CM_INDEX_ARGUMENTS]), dict())
+ self.assertEqual(self.client_message_broker.zmq_serializer.loads(client_message1[CM_INDEX_THING_EXEC_CONTEXT]),
+ dict())
+ """
+ CM_INDEX_ADDRESS = 0, CM_INDEX_CLIENT_TYPE = 2, CM_INDEX_MESSAGE_TYPE = 3, CM_INDEX_MESSAGE_ID = 4,
+ CM_INDEX_SERVER_EXEC_CONTEXT = 5, CM_INDEX_THING_ID = 7, CM_INDEX_OBJECT = 8, CM_INDEX_OPERATION = 9,
+ CM_INDEX_ARGUMENTS = 10, CM_INDEX_THING_EXEC_CONTEXT = 11
+ """
+
+ # test specific way of crafting messages
+ # client side - only other second method that generates message
+ # 3 - exit the server
+ client_message2 = self.client_message_broker.craft_empty_request_with_message_type(b'EXIT')
+ self.assertEqual(len(client_message2), CM_MESSAGE_LENGTH)
+ for msg in client_message2:
+ self.assertTrue(isinstance(msg, bytes))
+
+ # Server to client
+ # REPLY = b'REPLY' # 4 - response for operation
+ server_message1 = self.server_message_broker.craft_response_from_arguments(b'test-device',
+ PROXY, REPLY, client_message1[CM_INDEX_MESSAGE_ID])
+ self.assertEqual(len(server_message1), SM_MESSAGE_LENGTH)
+ for msg in server_message1:
+ self.assertTrue(isinstance(msg, bytes))
+ self.assertEqual(server_message1[SM_INDEX_ADDRESS], b'test-device')
+ self.assertEqual(server_message1[1], b'')
+ self.assertEqual(server_message1[SM_INDEX_SERVER_TYPE], b'RPC')
+ self.assertEqual(server_message1[SM_INDEX_MESSAGE_TYPE], REPLY)
+ self.assertIsInstance(UUID(server_message1[SM_INDEX_MESSAGE_ID].decode(), version=4), UUID)
+ self.assertEqual(server_message1[SM_INDEX_DATA], self.server_message_broker.zmq_serializer.dumps(None))
+ self.assertEqual(server_message1[SM_INDEX_PRE_ENCODED_DATA], b'')
+ """
+ SM_INDEX_ADDRESS = 0, SM_INDEX_SERVER_TYPE = 2, SM_INDEX_MESSAGE_TYPE = 3, SM_INDEX_MESSAGE_ID = 4,
+ SM_INDEX_DATA = 5, SM_INDEX_PRE_ENCODED_DATA = 6,
+ """
+ # server side - only other second method that generates message
+ server_message2 = self.server_message_broker.craft_response_from_client_message(client_message2)
+ self.assertEqual(len(server_message2), SM_MESSAGE_LENGTH)
+ self.assertEqual(server_message2[CM_INDEX_MESSAGE_TYPE], REPLY)
+ for msg in server_message2:
+ self.assertTrue(isinstance(msg, bytes))
+
+
+ def test_3_message_contract_types(self):
+ """
+ Once composition is checked, check different message types
+ """
+ # message types
+ client_message = self.client_message_broker.craft_request_from_arguments(b'test-device',
+ b'someProp', b'readProperty')
+
+ async def handle_message_types():
+ # server to client
+ # REPLY = b'REPLY' # 4 - response for operation
+ # TIMEOUT = b'TIMEOUT' # 5 - timeout message, operation could not be completed
+ # EXCEPTION = b'EXCEPTION' # 6 - exception occurred while executing operation
+ # 7 INVALID_MESSAGE = b'INVALID_MESSAGE' # 7 - invalid message
+ client_message[CM_INDEX_ADDRESS] = b'test-client'
+ await self.server_message_broker._handle_timeout(client_message) # 5
+ await self.server_message_broker._handle_invalid_message(client_message, Exception('test')) # 7
+ await self.server_message_broker._handshake(client_message) # 1
+ await self.server_message_broker.async_send_response(client_message) # 4
+ await self.server_message_broker.async_send_response_with_message_type(client_message, EXCEPTION,
+ Exception('test')) # 6
+
+ get_current_async_loop().run_until_complete(handle_message_types())
+
+ """
+ message types
+
+ both directions
+ HANDSHAKE = b'HANDSHAKE' # 1 - taken care by test_1...
+
+ client to server
+ OPERATION = b'OPERATION' 2 - taken care by test_2_... # operation request from client to server
+ EXIT = b'EXIT' # 3 - taken care by test_7... # exit the server
+
+ server to client
+ REPLY = b'REPLY' # 4 - response for operation
+ TIMEOUT = b'TIMEOUT' # 5 - timeout message, operation could not be completed
+ EXCEPTION = b'EXCEPTION' # 6 - exception occurred while executing operation
+ INVALID_MESSAGE = b'INVALID_MESSAGE' # 7 - invalid message
+ SERVER_DISCONNECTED = 'EVENT_DISCONNECTED' not yet tested # socket died - zmq's builtin event
+
+ peer to peer
+ INTERRUPT = b'INTERRUPT' not yet tested # interrupt a socket while polling
+ """
+
+ msg = self.client_message_broker.recv_response(client_message[CM_INDEX_MESSAGE_ID])
+ self.assertEqual(msg[CM_INDEX_MESSAGE_TYPE], TIMEOUT)
+ self.check_server_message(msg)
+
+ msg = self.client_message_broker.recv_response(client_message[CM_INDEX_MESSAGE_ID])
+ self.assertEqual(msg[CM_INDEX_MESSAGE_TYPE], INVALID_MESSAGE)
+ self.check_server_message(msg)
+
+ msg = self.client_message_broker.socket.recv_multipart() # handshake dont come as response
+ self.assertEqual(msg[CM_INDEX_MESSAGE_TYPE], HANDSHAKE)
+ self.check_server_message(msg)
+
+ msg = self.client_message_broker.recv_response(client_message[CM_INDEX_MESSAGE_ID])
+ self.assertEqual(msg[CM_INDEX_MESSAGE_TYPE], REPLY)
+ self.check_server_message(msg)
+
+ msg = self.client_message_broker.recv_response(client_message[CM_INDEX_MESSAGE_ID])
+ self.assertEqual(msg[CM_INDEX_MESSAGE_TYPE], EXCEPTION)
+ self.check_server_message(msg)
+
+ # exit checked separately at the end
+
+ def test_pending(self):
+ pass
+ # SERVER_DISCONNECTED = 'EVENT_DISCONNECTED' # socket died - zmq's builtin event
+ # # peer to peer
+ # INTERRUPT = b'INTERRUPT' # interrupt a socket while polling
+ # # first test the length
+
+
+ def test_4_abstractions(self):
+ """
+ Once message types are checked, operations need to be checked. But exeuction of operations on the server
+ are implemeneted by event loop so that we skip that here. We check abstractions of message type and operation to a
+ higher level object, and said higher level object should send the message and message should have
+ been received by the server.
+ """
+ self._test_action_call_abstraction()
+ self._test_property_abstraction()
+
+
+ def _test_action_call_abstraction(self):
+ """
+ Higher level action object should be able to send messages to server
+ """
+ resource_info = ZMQResource(what=ResourceTypes.ACTION, class_name='TestThing', instance_name='test-thing',
+ obj_name='test_echo', qualname='TestThing.test_echo', doc="returns value as it is to the client",
+ request_as_argument=False)
+ action_abstractor = _Action(sync_client=self.client_message_broker, resource_info=resource_info,
+ invokation_timeout=5, execution_timeout=5, async_client=None, schema_validator=None)
+ action_abstractor.oneway() # because we dont have a thing running
+ self.client_message_broker.handshake() # force a response from server so that last_server_message is set
+ self.check_client_message(self.last_server_message) # last message received by server which is the client message
+
+
+ def _test_property_abstraction(self):
+ """
+ Higher level property object should be able to send messages to server
+ """
+ resource_info = ZMQResource(what=ResourceTypes.PROPERTY, class_name='TestThing', instance_name='test-thing',
+ obj_name='test_prop', qualname='TestThing.test_prop', doc="a random property",
+ request_as_argument=False)
+ property_abstractor = _Property(sync_client=self.client_message_broker, resource_info=resource_info,
+ invokation_timeout=5, execution_timeout=5, async_client=None)
+ property_abstractor.oneway_set(5) # because we dont have a thing running
+ self.client_message_broker.handshake() # force a response from server so that last_server_message is set
+ self.check_client_message(self.last_server_message) # last message received by server which is the client message
+
+
+ def test_6_message_broker_async(self):
+ """
+ Test if server can be started and stopped using builtin functions on the server side
+ """
+
+ async def verify_poll_stopped(self : "TestServerBroker"):
+ await self.server_message_broker.poll_requests()
+ self.server_message_broker.poll_timeout = 1000
+ await self.server_message_broker.poll_requests()
+ self.done_queue.put(True)
+
+ async def stop_poll(self : "TestServerBroker"):
+ await asyncio.sleep(0.1)
+ self.server_message_broker.stop_polling()
+ await asyncio.sleep(0.1)
+ self.server_message_broker.stop_polling()
+ # When the above two functions running, we dont send a message as the thread is
+ # also running
+ get_current_async_loop().run_until_complete(
+ asyncio.gather(*[verify_poll_stopped(self), stop_poll(self)])
+ )
+
+ self.assertTrue(self.done_queue.get())
+ self.assertEqual(self.server_message_broker.poll_timeout, 1000)
+
+
+ def test_7_exit(self):
+ """
+ Test if exit reaches to server
+ """
+ # EXIT = b'EXIT' # 7 - exit the server
+ client_message = self.client_message_broker.craft_empty_request_with_message_type(EXIT)
+ client_message[CM_INDEX_ADDRESS] = b'test-message-broker'
+ self.client_message_broker.socket.send_multipart(client_message)
+
+ self.assertTrue(self.done_queue.get())
+ self._server_thread.join()
+
+
+
+class TestRPCServer(TestCase):
+
+ @classmethod
+ def setUpClass(self):
+ print(f"test RPC Message Broker with {self.__name__}")
+ self.logger = get_default_logger('test-rpc-broker', logging.ERROR)
+ self.done_queue = multiprocessing.Queue()
+ self.start_rpc_server()
+ self.client_message_broker = SyncZMQClient(
+ server_instance_name='test-rpc-server',
+ identity='test-client',
+ client_type=PROXY,
+ log_level=logging.ERROR
+ )
+
+
+ @classmethod
+ def start_rpc_server(self):
+ context = zmq.asyncio.Context()
+ self.rpc_server = RPCServer(instance_name='test-rpc-server', logger=self.logger,
+ things=[], context=context)
+ self.inner_server = AsyncZMQServer(
+ instance_name=f'test-rpc-server/inner', # hardcoded be very careful
+ server_type=ServerTypes.THING,
+ context=context,
+ logger=self.logger,
+ protocol=ZMQ_PROTOCOLS.INPROC,
+
+ )
+ self._server_thread = threading.Thread(target=run_server, args=(self.inner_server, self, self.done_queue),
+ daemon=True)
+ self._server_thread.start()
+
+ self.rpc_server.run()
+
+
+ def test_7_exit(self):
+ """
+ Test if exit reaches to server
+ """
+ # # EXIT = b'EXIT' # exit the server
+ # client_message = self.client_message_broker.craft_request_from_arguments(b'test-device',
+ # b'readProperty', b'someProp')
+ # client_message[CM_INDEX_ADDRESS] = b'test-rpc-server/inner'
+ # client_message[CM_INDEX_MESSAGE_TYPE] = EXIT
+ # self.client_message_broker.socket.send_multipart(client_message)
+ # self.assertTrue(self.done_queue.get())
+ # self._server_thread.join()
+
+ @classmethod
+ def tearDownClass(self):
+ self.inner_server.exit()
+ self.rpc_server.exit()
+
+
+# resource_info = ZMQResource(what=ResourceTypes.ACTION, class_name='TestThing', instance_name='test-thing/inner',
+# obj_name='test_echo', qualname='TestThing.test_echo', doc="returns value as it is to the client",
+# request_as_argument=False)
+# action_abstractor = _Action(sync_client=client, resource_info=resource_info,
+# invokation_timeout=5, execution_timeout=5, async_client=None, schema_validator=None)
+# action_abstractor.oneway() # because we dont have a thing running
+# client.handshake() # force a response from server so that last_server_message is set
+# self.check_client_message(self.last_server_message)
+
+# self.assertEqual(self.last_server_message[CM_INDEX_THING_ID], b'test-client/inner')
+
+
+
+if __name__ == '__main__':
+ unittest.main(testRunner=TestRunner())
\ No newline at end of file
diff --git a/tests/test_00_utils.py b/tests/test_00_utils.py
new file mode 100644
index 00000000..57e834f6
--- /dev/null
+++ b/tests/test_00_utils.py
@@ -0,0 +1,480 @@
+import unittest
+import typing
+from pydantic import BaseModel, ValidationError
+
+from hololinked.utils import (get_input_model_from_signature, issubklass, pydantic_validate_args_kwargs,
+ json_schema_merge_args_to_kwargs)
+try:
+ from .utils import TestCase, TestRunner
+except ImportError:
+ from utils import TestCase, TestRunner
+
+
+
+class TestUtils(TestCase):
+
+
+ def test_1_pydantic_function_signature_validation(self):
+
+ def func_without_args():
+ return 1
+ model = get_input_model_from_signature(func_without_args)
+ self.assertTrue(model is None)
+
+ """
+ Test Sequence:
+ 1. Create model from function signature
+ 2. Check model annotations
+ 3. Check model fields length
+ 4. Check model config (pydantic's model_config)
+ 5. Validation with correction and wrong invokation of function
+ 6. Always check exception strings for ValueError
+ 7. Use ValidationError if pydantic is supposed to raise the Error
+ """
+
+ """
+ Signatures that we will validate:
+ 1. func_with_annotations(a: int, b: int) -> int:
+ 2. func_with_missing_annotations(a: int, b):
+ 3. func_with_no_annotations(a, b):
+ 4. func_with_kwargs(a: int, b: int, **kwargs):
+ 5. func_with_annotated_kwargs(a: int, b: int, **kwargs: typing.Dict[str, int]):
+ 6. func_with_args(*args):
+ 7. func_with_annotated_args(*args: typing.List[int]):
+ 8. func_with_args_and_kwargs(*args, **kwargs):
+ 9. func_with_annotated_args_and_kwargs(*args: typing.List[int], **kwargs: typing.Dict[str, int]):
+ 10. func_with_positional_only_args(a, b, /):
+ 11. func_with_keyword_only_args(*, a, b):
+ 12. func_with_positional_only_args_and_kwargs(a, *args, b, **kwargs):
+ """
+
+ ####################
+ ##### create model from function signature
+ # 1. func_with_annotations(a: int, b: int) -> int:
+ def func_with_annotations(a: int, b: int) -> int:
+ return a + b
+ model = get_input_model_from_signature(func_with_annotations)
+ self.assertTrue(issubklass(model, BaseModel))
+ self.assertEqual(model.model_fields['a'].annotation, int)
+ self.assertEqual(model.model_fields['b'].annotation, int)
+ self.assertEqual(len(model.model_fields), 2)
+ self.assertEqual(model.model_config['extra'], 'forbid')
+ ##### validate correct usage
+ # For all the following cases, see block comment below the test case for details
+ # 1. correct usage with keyword arguments
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': 2})
+ # 2. incorrect argument types with keyword arguments
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': '2'})
+ # 3. missing keyword arguments
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1})
+ # 4. too many keyword arguments
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': 2, 'c': 3})
+ self.assertTrue(str(ex.exception).startswith("Unexpected keyword arguments"))
+ # 5. correct usage with positional arguments
+ pydantic_validate_args_kwargs(model, args=(1, 2))
+ # 6. incorrect argument types with positional arguments
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, args=(1, '2'))
+ # 7. too many positional arguments
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1, 2, 3))
+ self.assertTrue(str(ex.exception).startswith("Too many positional arguments"))
+ # 8. missing positional arguments
+ with self.assertRaises(ValidationError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1,))
+ # 9. correct usage with positional and keyword arguments
+ pydantic_validate_args_kwargs(model, args=(1,), kwargs={'b': 2})
+ # 10. incorrect ordering with positional and keyword arguments
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1,), kwargs={'a': 2})
+ self.assertTrue(str(ex.exception).startswith("Multiple values for argument"))
+ # 11. incorrect usage with both positional and keyword arguments
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1, 2), kwargs={'c': 3})
+ self.assertTrue(str(ex.exception).startswith("Unexpected keyword arguments"))
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=('1', 2), kwargs={'c': 3})
+ self.assertTrue(str(ex.exception).startswith("Unexpected keyword arguments"))
+ ####################
+
+ # 1. correct usage with keyword arguments
+ # 2. incorrect argument types with keyword arguments
+ # 3. missing keyword arguments
+ # 4. too many keyword arguments
+ # 5. correct usage with positional arguments
+ # 6. incorrect argument types with positional arguments
+ # 7. too many positional arguments
+ # 8. missing positional arguments
+ # 9. correct usage with positional and keyword arguments
+ # 10. incorrect ordering with positional and keyword arguments
+ # 11. additional cases of incorrect usage falling under the same categories
+
+ ####################
+ ##### create model from function signature
+ # 2. func_with_missing_annotations(a: int, b):
+ def func_with_missing_annotations(a: int, b):
+ return a + b
+ model = get_input_model_from_signature(func_with_missing_annotations)
+ self.assertTrue(issubklass(model, BaseModel))
+ self.assertEqual(model.model_fields['a'].annotation, int)
+ self.assertEqual(model.model_fields['b'].annotation, typing.Any)
+ self.assertEqual(len(model.model_fields), 2)
+ self.assertEqual(model.model_config['extra'], 'forbid')
+ ##### validate correct usage
+ # 1. correct usage with keyword arguments
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': 2})
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': '2'})
+ pydantic_validate_args_kwargs(model, kwargs={'a': 2, 'b': list()})
+ # 2. incorrect argument types with keyword arguments
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, kwargs={'a': '1', 'b': '2'})
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, kwargs={'a': list(), 'b': dict()})
+ # 3. missing keyword arguments
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1})
+ # 4. too many keyword arguments
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': 2, 'c': 3})
+ self.assertTrue(str(ex.exception).startswith("Unexpected keyword arguments"))
+ # 5. correct positional arguments
+ pydantic_validate_args_kwargs(model, args=(1, 2))
+ pydantic_validate_args_kwargs(model, args=(1, '2'))
+ pydantic_validate_args_kwargs(model, args=(2, list()))
+ # 6. incorrect argument types with positional arguments
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, args=('1', '2'))
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, args=(list(), dict()))
+ # 7. too many positional arguments
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1,2,3))
+ self.assertTrue(str(ex.exception).startswith("Too many positional arguments"))
+ # 8. missing positional arguments
+ with self.assertRaises(ValidationError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1,))
+ # 9. correct usage with positional and keyword arguments
+ pydantic_validate_args_kwargs(model, args=(1,), kwargs={'b': 2})
+ pydantic_validate_args_kwargs(model, args=(1,), kwargs={'b': '2'})
+ pydantic_validate_args_kwargs(model, args=(2,), kwargs={'b': list()})
+ # 10. incorrect ordering with positional and keyword arguments
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1,), kwargs={'a': 2})
+ self.assertTrue(str(ex.exception).startswith("Multiple values for argument"))
+ # 11. incorrect usage with both positional and keyword arguments
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1, 2), kwargs={'c': 3})
+ self.assertTrue(str(ex.exception).startswith("Unexpected keyword arguments"))
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=('1', 2), kwargs={'c': 3})
+ self.assertTrue(str(ex.exception).startswith("Unexpected keyword arguments"))
+ ####################
+
+ ####################
+ ##### create model from function signature
+ # 3. func_with_no_annotations(a, b):
+ def func_with_no_annotations(a, b):
+ return a + b
+ model = get_input_model_from_signature(func_with_no_annotations, model_for_empty_annotations=True)
+ self.assertTrue(issubklass(model, BaseModel))
+ self.assertEqual(model.model_fields['a'].annotation, typing.Any)
+ self.assertEqual(model.model_fields['b'].annotation, typing.Any)
+ self.assertEqual(len(model.model_fields), 2)
+ self.assertEqual(model.model_config['extra'], 'forbid')
+ ##### validate correct usage
+ # 1. correct usage
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': 2})
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1.2, 'b': '2'})
+ pydantic_validate_args_kwargs(model, kwargs={'a': dict(), 'b': list()})
+ # 2. incorrect argument types
+ # typing.Any allows any type, so no ValidationError
+ # 3. missing keyword arguments
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, kwargs={'a': list()})
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, kwargs={'b': dict()})
+ # 4. too many keyword arguments
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': 2, 'c': 3})
+ self.assertTrue(str(ex.exception).startswith("Unexpected keyword arguments"))
+ # 5. correct positional arguments
+ pydantic_validate_args_kwargs(model, args=(1, 2))
+ pydantic_validate_args_kwargs(model, args=(1, '2'))
+ pydantic_validate_args_kwargs(model, args=(dict(), list()))
+ pydantic_validate_args_kwargs(model, args=(1,), kwargs={'b': 2})
+ # 6. incorrect argument types with positional arguments
+ # typing.Any allows any type, so no ValidationError
+ # 7. too many positional arguments
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1,2,3))
+ self.assertTrue(str(ex.exception).startswith("Too many positional arguments"))
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(dict(), list(), 3))
+ self.assertTrue(str(ex.exception).startswith("Too many positional arguments"))
+ # 8. missing positional arguments
+ with self.assertRaises(ValidationError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1,))
+ with self.assertRaises(ValidationError) as ex:
+ pydantic_validate_args_kwargs(model, args=(dict(),))
+ # 9. correct usage with positional and keyword arguments
+ pydantic_validate_args_kwargs(model, args=(1,), kwargs={'b': 2})
+ pydantic_validate_args_kwargs(model, args=(1.1,), kwargs={'b': '2'})
+ pydantic_validate_args_kwargs(model, args=(dict(),), kwargs={'b': list()})
+ # 10. incorrect ordering with positional and keyword arguments
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1,), kwargs={'a': 2})
+ self.assertTrue(str(ex.exception).startswith("Multiple values for argument"))
+ # 11. incorrect usage with both positional and keyword arguments
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1, 2), kwargs={'c': 3})
+ self.assertTrue(str(ex.exception).startswith("Unexpected keyword arguments"))
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=('1', 2), kwargs={'c': 3})
+ self.assertTrue(str(ex.exception).startswith("Unexpected keyword arguments"))
+
+ model = get_input_model_from_signature(func_with_no_annotations)
+ self.assertTrue(model is None)
+
+ ####################
+ ##### create model from function signature
+ # 4. func_with_kwargs(a: int, b: int, **kwargs):
+ def func_with_kwargs(a: int, b: int, **kwargs):
+ return a + b
+ model = get_input_model_from_signature(func_with_kwargs)
+ self.assertTrue(issubklass(model, BaseModel))
+ self.assertEqual(model.model_fields['a'].annotation, int)
+ self.assertEqual(model.model_fields['b'].annotation, int)
+ self.assertEqual(len(model.model_fields), 3)
+ self.assertEqual(model.model_config['extra'], 'forbid')
+ ##### validate correct usage
+ # 1. correct usage
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': 2})
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': 2, 'c': 3})
+ pydantic_validate_args_kwargs(model, args=(1,2), kwargs={'c': '3'})
+ # 2. incorrect argument types
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': '2'})
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': '2', 'c': '3'})
+ # 3. missing keyword arguments
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1})
+ # 4. too many keyword arguments
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': 2, 'c': 3, 'd': 4}) # OK, not an error
+ # 5. correct positional arguments
+ pydantic_validate_args_kwargs(model, args=(1, 2))
+ # 6. incorrect argument types with positional arguments
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, args=(1, '2'))
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, args=('1', 2))
+ # 7. too many positional arguments
+ with self.assertRaises(ValidationError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1, 2, 3))
+ # 8. missing positional arguments
+ with self.assertRaises(ValidationError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1,))
+ # 9. correct usage with positional and keyword arguments
+ pydantic_validate_args_kwargs(model, args=(1,), kwargs={'b': 2})
+ pydantic_validate_args_kwargs(model, args=(1,), kwargs={'b': 2, 'c': 3})
+ # 10. incorrect ordering with positional and keyword arguments
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1,), kwargs={'a': 2})
+ self.assertTrue(str(ex.exception).startswith("Multiple values for argument"))
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1, 2), kwargs={'a': 3})
+ self.assertTrue(str(ex.exception).startswith("Multiple values for argument"))
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1, 2), kwargs={'b': 3})
+ self.assertTrue(str(ex.exception).startswith("Multiple values for argument"))
+ # 11. incorrect usage with both positional and keyword arguments
+ # any extra keyword argument is allowed
+
+ ####################
+ ##### create model from function signature
+ # 5. func_with_annotated_kwargs(a: int, b: int, **kwargs: typing.Dict[str, int]):
+ def func_with_annotated_kwargs(a: int, b: int, **kwargs: typing.Dict[str, int]):
+ return a + b
+ model = get_input_model_from_signature(func_with_annotated_kwargs)
+ self.assertTrue(issubklass(model, BaseModel))
+ self.assertEqual(model.model_fields['a'].annotation, int)
+ self.assertEqual(model.model_fields['b'].annotation, int)
+ self.assertEqual(model.model_fields['kwargs'].annotation, typing.Dict[str, int])
+ self.assertEqual(len(model.model_fields), 3)
+ self.assertEqual(model.model_config['extra'], 'forbid')
+ # 1. correct usage
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': 2})
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': 2, 'c': 3})
+ pydantic_validate_args_kwargs(model, args=(1, 2), kwargs={'c': 3})
+ # 2. incorrect argument types
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': '2'})
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': 2, 'c': '3'})
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1, 'b': 2, 'c': list()})
+ # 3. missing keyword arguments
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, kwargs={'a': 1})
+ # 4. too many keyword arguments
+ # OK, not an error
+ # 5. correct positional arguments
+ pydantic_validate_args_kwargs(model, args=(1, 2))
+ # 6. incorrect argument types with positional arguments
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, args=(1, '2'))
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, args=(dict(), 2))
+ # 7. too many positional arguments
+ with self.assertRaises(ValidationError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1,2,3))
+ # 8. missing positional arguments
+ with self.assertRaises(ValidationError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1,))
+ # 9. correct usage with positional and keyword arguments
+ pydantic_validate_args_kwargs(model, args=(1,), kwargs={'b': 2})
+ pydantic_validate_args_kwargs(model, args=(1,), kwargs={'b': 2, 'c': 3})
+ # 10. incorrect ordering with positional and keyword arguments
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1,), kwargs={'a': 2})
+ self.assertTrue(str(ex.exception).startswith("Multiple values for argument"))
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1, 2), kwargs={'a': 3})
+ self.assertTrue(str(ex.exception).startswith("Multiple values for argument"))
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1, 2), kwargs={'b': 3})
+ self.assertTrue(str(ex.exception).startswith("Multiple values for argument"))
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1, 2), kwargs={'a': list(), 'c': 3})
+ self.assertTrue(str(ex.exception).startswith("Multiple values for argument"))
+ # 11. incorrect usage with both positional and keyword arguments
+ # any extra keyword argument is allowed so long it is of type int
+
+
+ # both the following are not allowed in python - its also illogical
+ # def func_with_double_args(*args1, *args2):
+ # """syntax error"""
+ # return
+ # def func_with_double_kwargs(**kwargs1, **kwargs2):
+ # """syntax error"""
+ # return
+
+ ####################
+ ##### create model from function signature
+ # 6. func_with_args(*args):
+ def func_with_args(*args):
+ return sum(args)
+ model = get_input_model_from_signature(func_with_args, model_for_empty_annotations=True)
+ self.assertTrue(issubklass(model, BaseModel))
+ self.assertEqual(model.model_fields['args'].annotation, typing.Tuple)
+ self.assertEqual(len(model.model_fields), 1)
+ self.assertEqual(model.model_config['extra'], 'forbid')
+ # 1. correct usage
+ pydantic_validate_args_kwargs(model, args=(1, 2))
+ pydantic_validate_args_kwargs(model)
+ pydantic_validate_args_kwargs(model, args=(dict()))
+ # 2. incorrect argument types
+ # OK, since args is a tuple of any type
+ # 3. missing keyword arguments
+ # OK, since args is a tuple
+ # 4. too many keyword arguments
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, kwargs={'a' : 1})
+ self.assertTrue(str(ex.exception).startswith("Unexpected keyword arguments"))
+ # 5. correct positional arguments
+ pydantic_validate_args_kwargs(model, args=(1, 2))
+ # 6. incorrect argument types with positional arguments
+ # OK, since args is a tuple of any type
+ # 7. too many positional arguments
+ # OK, since args is a tuple of any length
+ # 8. missing positional arguments
+ # OK, since args is a tuple of any length
+ # 9. correct usage with positional and keyword arguments
+ # no keyword arguments
+ # 10. incorrect ordering with positional and keyword arguments
+ # OK, since args is a tuple and not keywords, no multiple values
+ # 11. incorrect usage with both positional and keyword arguments
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1,), kwargs={'a': 2})
+ self.assertTrue(str(ex.exception).startswith("Unexpected keyword arguments"))
+ with self.assertRaises(ValueError) as ex:
+ pydantic_validate_args_kwargs(model, args=(1, 2), kwargs={'c': 3})
+ self.assertTrue(str(ex.exception).startswith("Unexpected keyword arguments"))
+
+ model = get_input_model_from_signature(func_with_args)
+ self.assertTrue(model is None)
+ ####################
+
+ ####################
+ ##### create model from function signature
+ # 7. func_with_annotated_args(*args: typing.List[int]):
+ def func_with_annotated_args(*args: typing.List[int]):
+ return sum(args)
+ model = get_input_model_from_signature(func_with_annotated_args)
+ self.assertTrue(issubklass(model, BaseModel))
+ self.assertEqual(model.model_fields['args'].annotation, typing.List[int])
+ self.assertEqual(len(model.model_fields), 1)
+ self.assertEqual(model.model_config['extra'], 'forbid')
+ # 1. correct usage with keyword arguments
+ # not possible, since args is a tuple
+ # 2. incorrect argument types with keyword arguments
+ # keyword arguments are not allowed
+ # 3. missing keyword arguments
+ # not possible
+ # 4. too many keyword arguments
+ with self.assertRaises(ValueError):
+ pydantic_validate_args_kwargs(model, kwargs={'a' : 1})
+ self.assertTrue(str(ex.exception).startswith("Unexpected keyword arguments"))
+ # 5. correct usage with positional arguments
+ pydantic_validate_args_kwargs(model)
+ pydantic_validate_args_kwargs(model, args=(1, 2))
+ # 6. incorrect argument types with positional arguments
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, args=(1, '2'))
+ with self.assertRaises(ValidationError):
+ pydantic_validate_args_kwargs(model, args=(dict(),))
+ # 7. too many positional arguments
+ # OK, since args is a list of any length
+ # 8. missing positional arguments
+ # OK, since args is a list of any length
+ # 9. correct usage with positional and keyword arguments
+ # not possible
+ # 10. incorrect ordering with positional and keyword arguments
+ # OK, since args is a list and not keywords, no multiple values
+ # 11. incorrect usage with both positional and keyword arguments
+ # not possible
+
+
+ #####################
+ ##### create model from function signature
+ # 8. func_with_args_and_kwargs(*args, **kwargs):
+ def func_with_args_and_kwargs(*args, **kwargs):
+ return sum(args) + sum(kwargs.values())
+ # no model
+ model = get_input_model_from_signature(func_with_args_and_kwargs)
+ self.assertTrue(model is None)
+ # check model for empty annotations
+ model = get_input_model_from_signature(func_with_args_and_kwargs, model_for_empty_annotations=True)
+ self.assertTrue(issubklass(model, BaseModel))
+ self.assertEqual(model.model_fields['args'].annotation, typing.Tuple)
+ self.assertEqual(model.model_fields['kwargs'].annotation, typing.Dict[str, typing.Any])
+ self.assertEqual(len(model.model_fields), 2)
+ self.assertEqual(model.model_config['extra'], 'forbid')
+
+
+ def func_with_annotated_args_and_kwargs(*args: typing.List[int], **kwargs: typing.Dict[str, int]):
+ return sum(args) + sum(kwargs.values())
+ model = get_input_model_from_signature(func_with_annotated_args_and_kwargs)
+ self.assertTrue(issubklass(model, BaseModel))
+ self.assertEqual(model.model_fields['args'].annotation, typing.List[int])
+ self.assertEqual(model.model_fields['kwargs'].annotation, typing.Dict[str, int])
+ self.assertEqual(len(model.model_fields), 2)
+ self.assertEqual(model.model_config['extra'], 'forbid')
+
+
+
+if __name__ == '__main__':
+ unittest.main(testRunner=TestRunner())
diff --git a/tests/test_01_message.py b/tests/test_01_message.py
new file mode 100644
index 00000000..100f47a7
--- /dev/null
+++ b/tests/test_01_message.py
@@ -0,0 +1,217 @@
+import unittest
+from uuid import UUID, uuid4
+
+from hololinked.core.zmq.message import (EXIT, OPERATION, HANDSHAKE,
+ PreserializedData, SerializableData, RequestHeader, EventHeader, RequestMessage) # client to server
+from hololinked.core.zmq.message import (TIMEOUT, INVALID_MESSAGE, ERROR, REPLY, ERROR,
+ ResponseMessage, ResponseHeader, EventMessage) # server to client
+from hololinked.serializers.serializers import Serializers
+
+try:
+ from .utils import TestCase, TestRunner
+except ImportError:
+ from utils import TestCase, TestRunner
+
+
+
+class MessageValidatorMixin(TestCase):
+ """A mixin class to validate request and response messages"""
+
+ @classmethod
+ def setUpClass(self):
+ super().setUpClass()
+ self.server_id = 'test-server'
+ self.client_id = 'test-client'
+ self.thing_id = 'test-thing'
+
+
+ def validate_request_message(self, request_message: RequestMessage) -> None:
+ """call this method to validate request message"""
+
+ # req. 1. check message ID is a UUID
+ self.assertTrue(isinstance(request_message.id, UUID) or isinstance(UUID(request_message.id, version=4), UUID))
+ # req. 2. generated byte array must confine to predefined length (which is readonly & fixed)
+ self.assertEqual(len(request_message.byte_array), request_message.length)
+ # req. 3. receiver which must be the server ID
+ self.assertEqual(request_message.receiver_id, self.server_id)
+ # req. 4. sender_id is the client ID
+ self.assertEqual(request_message.sender_id, self.client_id)
+ # req. 5. all indices of byte array are bytes
+ for obj in request_message.byte_array:
+ self.assertIsInstance(obj, bytes)
+ # req. 6. check that header is correct type (RequestHeader dataclass/struct)
+ self.assertIsInstance(request_message.header, RequestHeader)
+ # req. 7 check that body is correct type (list of SerializableData and PreserializedData)
+ self.assertIsInstance(request_message.body, list)
+ self.assertEqual(len(request_message.body), 2)
+ self.assertIsInstance(request_message.body[0], SerializableData)
+ self.assertIsInstance(request_message.body[1], PreserializedData)
+
+
+ def validate_response_message(self, response_message: ResponseMessage) -> None:
+ """call this method to validate response message"""
+
+ # check message ID is a UUID
+ self.assertTrue(isinstance(response_message.id, UUID) or isinstance(UUID(response_message.id, version=4), UUID))
+ # check message length
+ self.assertEqual(len(response_message.byte_array), response_message.length)
+ # check receiver which must be the client
+ self.assertEqual(response_message.receiver_id, self.client_id)
+ # sender_id is not set before sending message on the socket
+ self.assertEqual(response_message.sender_id, self.server_id)
+ # check that all indices are bytes
+ for obj in response_message.byte_array:
+ self.assertIsInstance(obj, bytes)
+ # check that header is correct type
+ self.assertIsInstance(response_message.header, ResponseHeader)
+ # check that body is correct type
+ self.assertIsInstance(response_message.body, list)
+ self.assertEqual(len(response_message.body), 2)
+ self.assertIsInstance(response_message.body[0], SerializableData)
+ self.assertIsInstance(response_message.body[1], PreserializedData)
+
+
+ def validate_event_message(self, event_message: EventMessage) -> None:
+ """call this method to validate event message"""
+
+ # check message ID is a UUID
+ self.assertTrue(isinstance(event_message.id, UUID) or isinstance(UUID(event_message.id, version=4), UUID))
+ # check message length
+ self.assertEqual(len(event_message.byte_array), event_message.length)
+ # no receiver id for event message, only event id
+ self.assertIsInstance(event_message.event_id, str)
+ # sender_id is not set before sending message on the socket
+ self.assertEqual(event_message.sender_id, self.server_id)
+ # check that all indices are bytes
+ for obj in event_message.byte_array:
+ self.assertIsInstance(obj, bytes)
+ # check that header is correct type
+ self.assertIsInstance(event_message.header, EventHeader)
+ # check that body is correct type
+ self.assertIsInstance(event_message.body, list)
+ self.assertEqual(len(event_message.body), 2)
+ self.assertIsInstance(event_message.body[0], SerializableData)
+ self.assertIsInstance(event_message.body[1], PreserializedData)
+
+
+
+class TestMessagingContract(MessageValidatorMixin):
+ """Tests request and response messages"""
+
+ @classmethod
+ def setUpClass(self):
+ super().setUpClass()
+ print(f"test message contract with {self.__name__}")
+
+
+ def test_1_request_message(self):
+ """test the request message"""
+
+ # request messages types are OPERATION, HANDSHAKE & EXIT
+ request_message = RequestMessage.craft_from_arguments(
+ receiver_id=self.server_id,
+ sender_id=self.client_id,
+ thing_id=self.thing_id,
+ objekt='some_prop',
+ operation='readProperty',
+ )
+ self.validate_request_message(request_message)
+ # check message type for the above craft_from_arguments method
+ self.assertEqual(request_message.type, OPERATION)
+
+ request_message = RequestMessage.craft_with_message_type(
+ receiver_id=self.server_id,
+ sender_id=self.client_id,
+ message_type=HANDSHAKE
+ )
+ self.validate_request_message(request_message)
+ # check message type for the above craft_with_message_type method
+ self.assertEqual(request_message.type, HANDSHAKE)
+
+ request_message = RequestMessage.craft_with_message_type(
+ receiver_id=self.server_id,
+ sender_id=self.client_id,
+ message_type=EXIT
+ )
+ self.validate_request_message(request_message)
+ # check message type for the above craft_with_message_type method
+ self.assertEqual(request_message.type, EXIT)
+
+
+ def test_2_response_message(self):
+ """test the response message"""
+
+ # response messages types are HANDSHAKE, TIMEOUT, INVALID_MESSAGE, ERROR and REPLY
+ response_message = ResponseMessage.craft_from_arguments(
+ receiver_id=self.client_id,
+ sender_id=self.server_id,
+ message_type=HANDSHAKE,
+ message_id=uuid4(),
+ )
+ self.validate_response_message(response_message)
+ # check message type for the above craft_with_message_type method
+ self.assertEqual(response_message.type, HANDSHAKE)
+
+ response_message = ResponseMessage.craft_from_arguments(
+ receiver_id=self.client_id,
+ sender_id=self.server_id,
+ message_type=TIMEOUT,
+ message_id=uuid4()
+ )
+ self.validate_response_message(response_message)
+ # check message type for the above craft_with_message_type method
+ self.assertEqual(response_message.type, TIMEOUT)
+
+ response_message = ResponseMessage.craft_from_arguments(
+ receiver_id=self.client_id,
+ sender_id=self.server_id,
+ message_type=INVALID_MESSAGE,
+ message_id=uuid4()
+ )
+ self.validate_response_message(response_message)
+ # check message type for the above craft_with_message_type method
+ self.assertEqual(response_message.type, INVALID_MESSAGE)
+
+ response_message = ResponseMessage.craft_from_arguments(
+ receiver_id=self.client_id,
+ sender_id=self.server_id,
+ message_type=ERROR,
+ message_id=uuid4(),
+ payload=SerializableData(Exception('test'))
+ )
+ self.validate_response_message(response_message)
+ self.assertEqual(response_message.type, ERROR)
+ self.assertIsInstance(Serializers.json.loads(response_message._bytes[2]), dict)
+
+ request_message = RequestMessage.craft_from_arguments(
+ sender_id=self.client_id,
+ receiver_id=self.server_id,
+ thing_id=self.thing_id,
+ objekt='some_prop',
+ operation='readProperty',
+ )
+ request_message._sender_id = self.client_id # will be done by craft_from_self
+ response_message = ResponseMessage.craft_reply_from_request(
+ request_message=request_message,
+ )
+ self.validate_response_message(response_message)
+ self.assertEqual(response_message.type, REPLY)
+ self.assertEqual(Serializers.json.loads(response_message._bytes[2]), None)
+ self.assertEqual(request_message.id, response_message.id)
+
+
+ def test_3_event_message(self):
+ """test the event message"""
+ # event messages types are HANDSHAKE, TIMEOUT, INVALID_MESSAGE, ERROR and REPLY
+ event_message = EventMessage.craft_from_arguments(
+ event_id='test-event',
+ sender_id=self.server_id,
+ payload=SerializableData('test'),
+ preserialized_payload=PreserializedData(b'test'),
+ )
+ self.validate_event_message(event_message)
+
+
+
+if __name__ == '__main__':
+ unittest.main(testRunner=TestRunner())
diff --git a/tests/test_02_socket.py b/tests/test_02_socket.py
new file mode 100644
index 00000000..ab77fbc6
--- /dev/null
+++ b/tests/test_02_socket.py
@@ -0,0 +1,234 @@
+import unittest
+import zmq.asyncio
+
+from hololinked.core.zmq.brokers import BaseZMQ
+from hololinked.constants import ZMQ_TRANSPORTS
+
+try:
+ from .utils import TestCase, TestRunner
+except ImportError:
+ from utils import TestCase, TestRunner
+
+
+
+class TestSocket(TestCase):
+
+ @classmethod
+ def setUpClass(self):
+ super().setUpClass()
+ print(f"test ZMQ socket creation with {self.__name__}")
+
+
+ def test_1_socket_creation_defaults(self):
+ """check the default settings of socket ceration, IPC socket which is a ROUTER and async"""
+ socket, socket_address = BaseZMQ.get_socket(
+ id='test-server',
+ node_type='server',
+ context=zmq.asyncio.Context()
+ )
+ self.assertIsInstance(socket, zmq.asyncio.Socket)
+ self.assertTrue(socket.getsockopt_string(zmq.IDENTITY) == 'test-server')
+ self.assertTrue(socket.socket_type == zmq.ROUTER)
+ self.assertTrue(socket_address.startswith('ipc://'))
+ self.assertTrue(socket_address.endswith('.ipc'))
+ socket.close()
+
+
+ def test_2_context_options(self):
+ """
+ Check that context and socket type are as expected.
+ Async context should be used for async socket and sync context for sync socket.
+ """
+ context = zmq.Context()
+ socket, _ = BaseZMQ.get_socket(
+ id='test-server',
+ node_type='server',
+ context=context
+ )
+ self.assertTrue(isinstance(socket, zmq.Socket))
+ self.assertTrue(not isinstance(socket, zmq.asyncio.Socket))
+ socket.close()
+ context.term()
+
+ context = zmq.asyncio.Context()
+ socket, _ = BaseZMQ.get_socket(
+ id='test-server',
+ node_type='server',
+ context=context
+ )
+ self.assertTrue(isinstance(socket, zmq.Socket))
+ self.assertTrue(isinstance(socket, zmq.asyncio.Socket))
+ socket.close()
+ context.term()
+
+
+ def test_3_transport_options(self):
+ """check only three transport options are supported"""
+ context = zmq.asyncio.Context()
+ socket, socket_address = BaseZMQ.get_socket(
+ id='test-server',
+ node_type='server',
+ context=context,
+ transport='TCP',
+ socket_address='tcp://*:5555'
+ )
+ for sock_addr in [socket_address, socket.getsockopt_string(zmq.LAST_ENDPOINT)]:
+ self.assertTrue(sock_addr.startswith('tcp://'))
+ self.assertTrue(sock_addr.endswith(':5555'))
+ socket.close()
+
+ socket, socket_address = BaseZMQ.get_socket(
+ id='test-server',
+ node_type='server',
+ context=context,
+ transport='IPC',
+ )
+
+ self.assertEqual(socket_address, socket.getsockopt_string(zmq.LAST_ENDPOINT))
+ self.assertTrue(socket_address.startswith('ipc://'))
+ self.assertTrue(socket_address.endswith('.ipc'))
+ socket.close()
+
+ socket, socket_address = BaseZMQ.get_socket(
+ id='test-server',
+ node_type='server',
+ context=context,
+ transport='INPROC',
+ )
+ self.assertEqual(socket_address, socket.getsockopt_string(zmq.LAST_ENDPOINT))
+ self.assertTrue(socket_address.startswith('inproc://'))
+ self.assertTrue(socket_address.endswith('test-server'))
+ socket.close()
+ context.term()
+
+ # Specify transport as enum and do the same tests
+ context = zmq.Context()
+ socket, socket_address = BaseZMQ.get_socket(
+ id='test-server',
+ node_type='server',
+ context=context,
+ transport=ZMQ_TRANSPORTS.INPROC,
+ )
+ self.assertTrue(socket_address.startswith('inproc://'))
+ self.assertTrue(socket_address.endswith('test-server'))
+ socket.close()
+
+ socket, socket_address = BaseZMQ.get_socket(
+ id='test-server',
+ node_type='server',
+ context=context,
+ transport=ZMQ_TRANSPORTS.IPC,
+ )
+ self.assertTrue(socket_address.startswith('ipc://'))
+ self.assertTrue(socket_address.endswith('.ipc'))
+ socket.close()
+
+ socket, socket_address = BaseZMQ.get_socket(
+ id='test-server',
+ node_type='server',
+ context=context,
+ transport=ZMQ_TRANSPORTS.TCP,
+ socket_address='tcp://*:5556'
+ )
+ self.assertTrue(socket_address.startswith('tcp://'))
+ self.assertTrue(socket_address.endswith(':5556'))
+ socket.close()
+ context.term()
+
+ # check that other transport options raise error
+ context = zmq.asyncio.Context()
+ self.assertRaises(NotImplementedError, lambda: BaseZMQ.get_socket(
+ id='test-server',
+ node_type='server',
+ context=context,
+ transport='PUB',
+ ))
+ context.term()
+
+
+ def test_4_socket_options(self):
+ """check that socket options are as expected"""
+ context = zmq.asyncio.Context()
+
+ socket, _ = BaseZMQ.get_socket(
+ id='test-server',
+ node_type='server',
+ context=context,
+ socket_type=zmq.ROUTER
+ )
+ self.assertTrue(socket.socket_type == zmq.ROUTER)
+ self.assertTrue(socket.getsockopt_string(zmq.IDENTITY) == 'test-server')
+ self.assertTrue(isinstance(socket, zmq.asyncio.Socket))
+ socket.close()
+
+ socket, _ = BaseZMQ.get_socket(
+ id='test-server',
+ node_type='server',
+ context=context,
+ socket_type=zmq.DEALER
+ )
+ self.assertTrue(socket.socket_type == zmq.DEALER)
+ self.assertTrue(socket.getsockopt_string(zmq.IDENTITY) == 'test-server')
+ self.assertTrue(isinstance(socket, zmq.asyncio.Socket))
+ socket.close()
+
+ socket, _ = BaseZMQ.get_socket(
+ id='test-server',
+ node_type='server',
+ context=context,
+ socket_type=zmq.PUB
+ )
+ self.assertTrue(socket.socket_type == zmq.PUB)
+ self.assertTrue(socket.getsockopt_string(zmq.IDENTITY) == 'test-server')
+ self.assertTrue(isinstance(socket, zmq.asyncio.Socket))
+ socket.close()
+
+ socket, _ = BaseZMQ.get_socket(
+ id='test-server',
+ node_type='server',
+ context=context,
+ socket_type=zmq.SUB
+ )
+ self.assertTrue(socket.socket_type == zmq.SUB)
+ self.assertTrue(socket.getsockopt_string(zmq.IDENTITY) == 'test-server')
+ self.assertTrue(isinstance(socket, zmq.asyncio.Socket))
+ socket.close()
+
+ socket, _ = BaseZMQ.get_socket(
+ id='test-server',
+ node_type='server',
+ context=context,
+ socket_type=zmq.PAIR
+ )
+ self.assertTrue(socket.socket_type == zmq.PAIR)
+ self.assertTrue(socket.getsockopt_string(zmq.IDENTITY) == 'test-server')
+ self.assertTrue(isinstance(socket, zmq.asyncio.Socket))
+ socket.close()
+
+ zmq.asyncio.Context()
+ socket, _ = BaseZMQ.get_socket(
+ id='test-server',
+ node_type='server',
+ context=context,
+ socket_type=zmq.PUSH
+ )
+ self.assertTrue(socket.socket_type == zmq.PUSH)
+ self.assertTrue(socket.getsockopt_string(zmq.IDENTITY) == 'test-server')
+ self.assertTrue(isinstance(socket, zmq.asyncio.Socket))
+ socket.close()
+
+ socket, _ = BaseZMQ.get_socket(
+ id='test-server',
+ node_type='server',
+ context=context,
+ socket_type=zmq.PULL
+ )
+ self.assertTrue(socket.socket_type == zmq.PULL)
+ self.assertTrue(socket.getsockopt_string(zmq.IDENTITY) == 'test-server')
+ self.assertTrue(isinstance(socket, zmq.asyncio.Socket))
+ socket.close()
+ context.term()
+
+
+if __name__ == '__main__':
+ unittest.main(testRunner=TestRunner())
\ No newline at end of file
diff --git a/tests/test_03_serializers.py b/tests/test_03_serializers.py
new file mode 100644
index 00000000..56478178
--- /dev/null
+++ b/tests/test_03_serializers.py
@@ -0,0 +1,166 @@
+import unittest
+
+from hololinked.serializers import Serializers
+from hololinked.serializers.serializers import BaseSerializer
+
+try:
+ from .utils import TestRunner, TestCase
+ from .things import TestThing
+except ImportError:
+ from utils import TestRunner, TestCase
+ from things import TestThing
+
+
+class TestSerializer(TestCase):
+ """Test the Serializers class"""
+
+ # test register a new serializer with content type
+ class YAMLSerializer(BaseSerializer):
+ """just a dummy, does not really serialize to YAML"""
+ @property
+ def content_type(self):
+ return 'application/yaml'
+
+
+ def test_1_singleton(self):
+ """Test the singleton nature of the Serializers class."""
+
+ serializers = Serializers()
+ self.assertEqual(serializers, Serializers())
+ self.assertNotEqual(Serializers, Serializers())
+ self.assertIsInstance(serializers, Serializers)
+ # all are class attributes
+ self.assertEqual(serializers.json, Serializers.json)
+ self.assertEqual(serializers.pickle, Serializers.pickle)
+ self.assertEqual(serializers.msgpack, Serializers.msgpack)
+ self.assertEqual(serializers.content_types, Serializers.content_types)
+ self.assertEqual(serializers.object_content_type_map, Serializers.object_content_type_map)
+ self.assertEqual(serializers.object_serializer_map, Serializers.object_serializer_map)
+ self.assertEqual(serializers.protocol_serializer_map, Serializers.protocol_serializer_map)
+ # check existing serializers are all instances of BaseSerializer
+ for name, serializer in Serializers.content_types.items():
+ self.assertIsInstance(serializer, BaseSerializer)
+ # check default serializer, given that we know its JSON at least for the current test
+ self.assertEqual(serializers.default, Serializers.json)
+ self.assertEqual(serializers.default, Serializers.default)
+ self.assertEqual(serializers.default, Serializers().json)
+ self.assertEqual(serializers.default, Serializers().default)
+ # check default content type, given that we know its JSON at least for the current test
+ self.assertEqual(serializers.default_content_type, Serializers.json.content_type)
+ # change default to pickle and check if it is set correctly
+ # serializers.default = serializers.pickle
+ # self.assertEqual(serializers.default, Serializers.pickle)
+ # self.assertEqual(Serializers().default, Serializers.pickle)
+
+
+ def test_2_protocol_registration(self):
+ """i.e. test if a new serializer (protocol) can be registered"""
+
+ # get existing number of serializers
+ num_serializers = len(Serializers.content_types)
+
+ # test register a new serializer
+ base_serializer = BaseSerializer()
+ # register with name
+ self.assertWarns(UserWarning, Serializers.register, base_serializer, 'base')
+ # user warning because content type property is not defined
+ # above is same as Serializers.register(base_serializer, 'base')
+
+ # check if name became a class attribute and name can be accessed as an attribute
+ self.assertIn('base', Serializers)
+ self.assertEqual(Serializers.base, base_serializer)
+ self.assertEqual(Serializers().base, base_serializer)
+ # we dont support getitem at instance level yet so we cannot test assertIn
+
+ # since a content type is not set, it should not be in the content types
+ self.assertNotIn(base_serializer, Serializers.content_types.values())
+ # so the length of content types should be the same
+ self.assertEqual(len(Serializers.content_types), num_serializers)
+
+
+ # instantiate
+ yaml_serializer = self.YAMLSerializer()
+ # register with name
+ Serializers.register(yaml_serializer, 'yaml')
+ # check if name became a class attribute and name can be accessed as an attribute
+ self.assertIn('yaml', Serializers)
+ self.assertEqual(Serializers.yaml, yaml_serializer)
+ self.assertEqual(Serializers().yaml, yaml_serializer)
+ # we dont support getitem at instance level yet
+
+ # since a content type is set, it should be in the content types
+ self.assertIn(yaml_serializer.content_type, Serializers.content_types.keys())
+ self.assertIn(yaml_serializer, Serializers.content_types.values())
+ # so the length of content types should have increased by 1
+ self.assertEqual(len(Serializers.content_types), num_serializers + 1)
+
+
+ def test_3_registration_for_objects(self):
+ """i.e. test if a new serializer can be registered for a specific property, action or event"""
+ Serializers.register_content_type_for_object(TestThing.base_property, 'application/octet-stream')
+ Serializers.register_content_type_for_object(TestThing.action_echo, 'x-msgpack')
+ Serializers.register_content_type_for_object(TestThing.test_event, 'application/yaml')
+
+ self.assertEqual(Serializers.for_object(None, 'TestThing', 'action_echo'), Serializers.msgpack)
+ self.assertEqual(Serializers.for_object(None, 'TestThing', 'base_property'), Serializers.pickle)
+ self.assertEqual(Serializers.for_object(None, 'TestThing', 'test_event'), Serializers.yaml)
+ self.assertEqual(Serializers.for_object(None, 'TestThing', 'test_unknown_property'), Serializers.default)
+
+
+ def test_4_registration_for_objects_by_name(self):
+
+ Serializers.register_content_type_for_object_per_thing_instance('test_thing', 'base_property',
+ 'application/yaml')
+ self.assertIsInstance(Serializers.for_object('test_thing', None, 'base_property'),
+ self.YAMLSerializer)
+
+
+ def test_5_registration_dict(self):
+ """test the dictionary where all serializers are stored"""
+ # depends on test 3
+ self.assertIn('test_thing', Serializers.object_content_type_map)
+ self.assertIn('base_property', Serializers.object_content_type_map['test_thing'])
+ self.assertEqual(Serializers.object_content_type_map['test_thing']['base_property'],
+ 'application/yaml')
+
+ self.assertIn('action_echo', Serializers.object_content_type_map['TestThing'])
+ self.assertEqual(Serializers.object_content_type_map['TestThing']['action_echo'],
+ 'x-msgpack')
+ self.assertIn('test_event', Serializers.object_content_type_map['TestThing'])
+ self.assertEqual(Serializers.object_content_type_map['TestThing']['test_event'],
+ 'application/yaml')
+
+
+ def test_6_retrieval(self):
+ # added in previous tests
+ self.assertIsInstance(Serializers.for_object('test_thing', None, 'base_property'), self.YAMLSerializer)
+ # unknown object should retrieve the default serializer
+ self.assertEqual(Serializers.for_object('test_thing', None, 'test_unknown_property'), Serializers.default)
+ # unknown thing should retrieve the default serializer
+ self.assertEqual(Serializers.for_object('test_unknown_thing', None, 'base_property'), Serializers.default)
+
+
+ def test_7_set_default(self):
+ """test setting the default serializer"""
+ # get existing default
+ old_default = Serializers.default
+ # set new default and check if default is set
+ Serializers.default = Serializers.yaml
+ self.assertEqual(Serializers.default, Serializers.yaml)
+ self.test_6_retrieval() # check if retrieval is consistent with default
+ # reset default and check if default is reset
+ Serializers.default = old_default
+ self.assertEqual(Serializers.default, old_default)
+ self.assertEqual(Serializers.default, Serializers.json) # because we know its JSON
+
+
+ @classmethod
+ def tearDownClass(self):
+ Serializers.reset()
+ return super().tearDownClass()
+
+
+if __name__ == '__main__':
+ unittest.main(testRunner=TestRunner())
+
+
diff --git a/tests/test_04_thing_init.py b/tests/test_04_thing_init.py
new file mode 100644
index 00000000..c8e67c8d
--- /dev/null
+++ b/tests/test_04_thing_init.py
@@ -0,0 +1,721 @@
+import typing
+import unittest
+import logging
+
+from hololinked.core.actions import BoundAction
+from hololinked.core.events import EventDispatcher
+from hololinked.core.zmq.brokers import EventPublisher
+from hololinked.core import Thing, ThingMeta, Action, Event, Property
+from hololinked.core.meta import DescriptorRegistry, PropertiesRegistry, ActionsRegistry, EventsRegistry
+from hololinked.core.zmq.rpc_server import RPCServer, prepare_rpc_server
+from hololinked.core.properties import Parameter
+from hololinked.core.state_machine import BoundFSM
+from hololinked.utils import get_default_logger
+from hololinked.core.logger import RemoteAccessHandler
+
+try:
+ from .things import OceanOpticsSpectrometer
+ from .utils import TestCase, TestRunner
+except ImportError:
+ from things import OceanOpticsSpectrometer
+ from utils import TestCase, TestRunner
+
+
+"""
+The tests in this file are for the initialization of the Thing class and its subclasses.
+1. Test Thing class
+2. Test Thing subclass
+3. Test ThingMeta metaclass
+4. Test ActionRegistry class
+5. Test EventRegistry class
+6. Test PropertiesRegistry class
+"""
+
+
+
+class TestThingInit(TestCase):
+ """Test Thing class which is the bread and butter of this package."""
+
+ @classmethod
+ def setUpClass(self):
+ super().setUpClass()
+ print(f"test Thing instantiation with {self.__name__}")
+ self.thing_cls = Thing
+ # using a variable called thing_cls because same tests are repeated for different thing class
+
+ """
+ Test sequence is as follows:
+ 1. Test id requirements
+ 2. Test logger setup
+ 3. Test state and state_machine setup
+ 4. Test composition of subthings
+ 5. Test servers init
+ 6. Test thing model generation
+ """
+
+ def test_1_id(self):
+ """Test id property of Thing class"""
+ # req. 1. instance name must be a string and cannot be changed after set
+ thing = self.thing_cls(id="test_id", log_level=logging.WARN)
+ self.assertEqual(thing.id, "test_id")
+ with self.assertRaises(ValueError):
+ thing.id = "new_instance"
+ with self.assertRaises(NotImplementedError):
+ del thing.id
+ # req. 2. regex is r'[A-Za-z]+[A-Za-z_0-9\-\/]*', simple URI like
+ valid_ids = ["test_id", "A123", "valid_id-123", "another/valid-id"]
+ invalid_ids = ["123_invalid", "invalid id", "invalid@id", ""]
+ for valid_id in valid_ids:
+ thing.properties.descriptors["id"].validate_and_adapt(valid_id)
+ for invalid_id in invalid_ids:
+ with self.assertRaises(ValueError):
+ thing.properties.descriptors["id"].validate_and_adapt(invalid_id)
+
+
+ def test_2_logger(self):
+ """Test logger setup"""
+ # req. 1. logger must have remote access handler if remote_accessible_logger is True
+ logger = get_default_logger("test_logger", log_level=logging.WARN)
+ thing = self.thing_cls(id="test_remote_accessible_logger", logger=logger, remote_accessible_logger=True)
+ self.assertEqual(thing.logger, logger)
+ self.assertTrue(any(isinstance(handler, RemoteAccessHandler) for handler in thing.logger.handlers))
+ # Therefore also check the false condition
+ logger = get_default_logger("test_logger_2", log_level=logging.WARN)
+ thing = self.thing_cls(id="test_logger_without_remote_access", logger=logger, remote_accessible_logger=False)
+ self.assertFalse(any(isinstance(handler, RemoteAccessHandler) for handler in thing.logger.handlers))
+ # NOTE - logger is modifiable after instantiation
+
+ # req. 2. logger is created automatically if not provided
+ thing = self.thing_cls(id="test_logger_auto_creation", log_level=logging.WARN)
+ self.assertIsNotNone(thing.logger)
+ self.assertFalse(any(isinstance(handler, RemoteAccessHandler) for handler in thing.logger.handlers))
+ self.assertNotEqual(thing.logger, logger) # not the above logger that we used.
+ # remote accessible only when we ask for it
+ thing = self.thing_cls(id="test_logger_auto_creation_2", log_level=logging.WARN, remote_accessible_logger=True)
+ self.assertIsNotNone(thing.logger)
+ self.assertTrue(any(isinstance(handler, RemoteAccessHandler) for handler in thing.logger.handlers))
+ self.assertNotEqual(thing.logger, logger)
+
+
+ def test_3_state(self):
+ """Test state and state_machine setup"""
+ # req. 1. state property must be None when no state machine is present
+ thing1 = self.thing_cls(id="test_no_state_machine", log_level=logging.WARN)
+ self.assertIsNone(thing1.state)
+ self.assertIsNone(thing1.state_machine)
+ # detailed checks in another file
+
+
+ def test_4_subthings(self):
+ """Test composition"""
+ thing = self.thing_cls(
+ id="test_subthings", log_level=logging.WARN,
+ remote_accessible_logger=True
+ )
+ # req. 1. subthings must be a dictionary
+ self.assertIsInstance(thing.sub_things, dict)
+ self.assertEqual(len(thing.sub_things), 1) # logger
+ # req. 2. subthings are always recomputed when accessed (at least thats the way it is right now),
+ # so we can add new subthings anytime
+ thing.another_thing = OceanOpticsSpectrometer(id="another_thing", log_level=logging.WARN)
+ self.assertIsInstance(thing.sub_things, dict)
+ self.assertEqual(len(thing.sub_things), 2)
+ # req. 3. subthings must be instances of Thing and have the parent as owner
+ for name, subthing in thing.sub_things.items():
+ self.assertTrue(thing in subthing._owners)
+ self.assertIsInstance(subthing, Thing)
+ # req. 4. name of subthing must match name of the attribute
+ self.assertTrue(hasattr(thing, name))
+
+
+ def test_5_servers_init(self):
+ """Test if servers can be initialized/instantiated"""
+ # req. 1. rpc_server and event_publisher must be None when not run()
+ thing = self.thing_cls(id="test_servers_init", log_level=logging.ERROR)
+ self.assertIsNone(thing.rpc_server)
+ self.assertIsNone(thing.event_publisher)
+ # req. 2. rpc_server and event_publisher must be instances of their respective classes when run()
+ prepare_rpc_server(thing, 'IPC')
+ self.assertIsInstance(thing.rpc_server, RPCServer)
+ self.assertIsInstance(thing.event_publisher, EventPublisher)
+ # exit to quit nicely
+ thing.rpc_server.exit()
+ thing.event_publisher.exit()
+
+
+
+class TestOceanOpticsSpectrometer(TestThingInit):
+ """test Thing subclass example"""
+
+ @classmethod
+ def setUpClass(self):
+ super().setUpClass()
+ self.thing_cls = OceanOpticsSpectrometer
+
+ # check docs of the parent class for the test sequence
+
+ def test_3_state(self):
+ """Test state and state_machine setup"""
+ thing1 = self.thing_cls(id="test_state_machine", log_level=logging.WARN)
+ # req. 1. state and state machine must be present because we create this subclass with a state machine
+ self.assertIsNotNone(thing1.state)
+ self.assertIsInstance(thing1.state_machine, BoundFSM)
+ # req. 2. state and state machine must be different for different instances
+ thing2 = self.thing_cls(id="test_state_machine_2", log_level=logging.WARN)
+ # first check if state machine exists
+ self.assertIsNotNone(thing2.state)
+ self.assertIsInstance(thing2.state_machine, BoundFSM)
+ # then check if they are different
+ self.assertNotEqual(thing1.state_machine, thing2.state_machine)
+ # until state is set, initial state is equal
+ self.assertEqual(thing1.state, thing2.state)
+ self.assertEqual(thing1.state_machine.initial_state, thing2.state_machine.initial_state)
+ # after state is set, they are different
+ thing1.state_machine.set_state(thing1.states.ALARM)
+ self.assertNotEqual(thing1.state, thing2.state)
+ self.assertNotEqual(thing1.state_machine, thing2.state_machine)
+ # initial state is still same
+ self.assertEqual(thing1.state_machine.initial_state, thing2.state_machine.initial_state)
+
+
+
+class TestMetaclass(TestCase):
+ """Test ThingMeta metaclass which instantiates a Thing (sub-)class"""
+
+ @classmethod
+ def setUpClass(self):
+ super().setUpClass()
+ print(f"test ThingMeta with {self.__name__}")
+
+ """
+ Test sequence is as follows:
+ 1. Test metaclass of Thing class
+ 2. Test registry creation and access which is currently the main purpose of the metaclass
+ """
+
+ def test_1_metaclass(self):
+ """test metaclass of Thing class"""
+ # req. 1 metaclass must be ThingMeta of any Thing class
+ self.assertEqual(Thing.__class__, ThingMeta)
+ self.assertEqual(OceanOpticsSpectrometer.__class__, ThingMeta)
+ self.assertEqual(Thing.__class__, OceanOpticsSpectrometer.__class__)
+
+
+ def test_2_registry_creation(self):
+ """test registry creation and access which is currently the main purpose of the metaclass"""
+ # req. 1. registry attributes must be instances of their respective classes
+ self.assertIsInstance(Thing.properties, PropertiesRegistry)
+ self.assertIsInstance(Thing.actions, ActionsRegistry)
+ self.assertIsInstance(Thing.events, EventsRegistry)
+
+ # req. 2. new registries are not created on the fly and are same between accesses
+ self.assertEqual(Thing.properties, Thing.properties)
+ self.assertEqual(Thing.actions, Thing.actions)
+ self.assertEqual(Thing.events, Thing.events)
+ # This test is done as the implementation deviates from `param`
+
+ # req. 3. different subclasses have different registries
+ self.assertNotEqual(Thing.properties, OceanOpticsSpectrometer.properties)
+ self.assertNotEqual(Thing.actions, OceanOpticsSpectrometer.actions)
+ self.assertNotEqual(Thing.events, OceanOpticsSpectrometer.events)
+
+ # create instances for further tests
+ thing = Thing(id="test_registry_creation", log_level=logging.WARN)
+ spectrometer = OceanOpticsSpectrometer(id="test_registry_creation_2", log_level=logging.WARN)
+
+ # req. 4. registry attributes must be instances of their respective classes also for instances
+ self.assertIsInstance(thing.properties, PropertiesRegistry)
+ self.assertIsInstance(thing.actions, ActionsRegistry)
+ self.assertIsInstance(thing.events, EventsRegistry)
+
+ # req. 5. registries are not created on the fly and are same between accesses also for instances
+ self.assertEqual(thing.properties, thing.properties)
+ self.assertEqual(thing.actions, thing.actions)
+ self.assertEqual(thing.events, thing.events)
+
+ # req. 6. registries are not shared between instances
+ self.assertNotEqual(thing.properties, spectrometer.properties)
+ self.assertNotEqual(thing.actions, spectrometer.actions)
+ self.assertNotEqual(thing.events, spectrometer.events)
+
+ # req. 7. registries are not shared between instances and their classes
+ self.assertNotEqual(thing.properties, Thing.properties)
+ self.assertNotEqual(thing.actions, Thing.actions)
+ self.assertNotEqual(thing.events, Thing.events)
+ self.assertNotEqual(spectrometer.properties, OceanOpticsSpectrometer.properties)
+ self.assertNotEqual(spectrometer.actions, OceanOpticsSpectrometer.actions)
+ self.assertNotEqual(spectrometer.events, OceanOpticsSpectrometer.events)
+
+
+
+
+# Uncomment the following for type hints while coding registry tests,
+# comment it before testing, otherwise tests will fail due to overriding Thing object
+# class Thing(Thing):
+# class_registry: PropertiesRegistry | ActionsRegistry | EventsRegistry
+# instance_registry: PropertiesRegistry | ActionsRegistry | EventsRegistry | None
+# descriptor_object: type[Property | Action | Event]
+
+# class OceanOpticsSpectrometer(OceanOpticsSpectrometer):
+# class_registry: PropertiesRegistry | ActionsRegistry | EventsRegistry
+# instance_registry: PropertiesRegistry | ActionsRegistry | EventsRegistry | None
+# descriptor_object: type[Property | Action | Event]
+
+class TestRegistry(TestCase):
+
+ # Read the commented section above before proceeding to this test
+
+ @classmethod
+ def setUpClass(self):
+ super().setUpClass()
+ self.setUpRegistryObjects()
+ self.setUpRegistryAttributes()
+ if self.is_abstract_test_class:
+ return
+ print(f"test {self.registry_cls.__name__} with {self.__name__}")
+
+ @classmethod
+ def setUpRegistryObjects(self):
+ self.registry_cls = None # type: DescriptorRegistry | None
+ self.registry_object = None # type: type[Property | Action | Event]
+
+ @property
+ def is_abstract_test_class(self):
+ # if self.registry_cls is None:
+ # print("registry_cls is None")
+ # if self.registry_object is None:
+ # print("registry_object is None")
+ return self.registry_cls is None or self.registry_object is None
+
+ @classmethod
+ def setUpRegistryAttributes(self):
+ if self.registry_cls is None or self.registry_object is None:
+ return
+
+ # create instances for further tests
+ self.thing = Thing(id=f"test_{self.registry_object.__name__}_registry", log_level=logging.WARN)
+ self.spectrometer = OceanOpticsSpectrometer(
+ id=f"test_{self.registry_object.__name__}_registry",
+ log_level=logging.WARN
+ )
+ if self.registry_cls == ActionsRegistry:
+ Thing.class_registry = Thing.actions
+ OceanOpticsSpectrometer.class_registry = OceanOpticsSpectrometer.actions
+ self.thing.instance_registry = self.thing.actions
+ self.spectrometer.instance_registry = self.spectrometer.actions
+ self.bound_object = BoundAction
+ elif self.registry_cls == PropertiesRegistry:
+ Thing.class_registry = Thing.properties
+ OceanOpticsSpectrometer.class_registry = OceanOpticsSpectrometer.properties
+ self.thing.instance_registry = self.thing.properties
+ self.spectrometer.instance_registry = self.spectrometer.properties
+ self.bound_object = typing.Any
+ elif self.registry_cls == EventsRegistry:
+ Thing.class_registry = Thing.events
+ OceanOpticsSpectrometer.class_registry = OceanOpticsSpectrometer.events
+ self.thing.instance_registry = self.thing.events
+ self.spectrometer.instance_registry = self.spectrometer.events
+ self.bound_object = EventDispatcher
+ else:
+ raise NotImplementedError("This registry class is not implemented")
+
+ """
+ Test action registry first because actions are the easiest to test.
+ 1. Test owner attribute
+ 2. Test descriptors access
+ 3. Test dunders
+ """
+
+ def test_1_owner(self):
+ """Test owner attribute of DescriptorRegistry"""
+ if self.is_abstract_test_class:
+ return
+ # See comment above TestRegistry class to enable type definitions
+ # req. 1. owner attribute must be the class itself when accessed as class attribute
+ self.assertEqual(Thing.class_registry.owner, Thing)
+ self.assertEqual(OceanOpticsSpectrometer.class_registry.owner, OceanOpticsSpectrometer)
+ # therefore owner instance must be None
+ self.assertIsNone(Thing.class_registry.owner_inst)
+ self.assertIsNone(OceanOpticsSpectrometer.class_registry.owner_inst)
+
+ # req. 2. owner attribute must be the instance for instance registries (i.e. when accessed as instance attribute)
+ self.assertEqual(self.thing.instance_registry.owner, self.thing)
+ self.assertEqual(self.spectrometer.instance_registry.owner, self.spectrometer)
+ self.assertEqual(self.thing.instance_registry.owner_cls, Thing)
+ self.assertEqual(self.spectrometer.instance_registry.owner_cls, OceanOpticsSpectrometer)
+
+ # req. 3. descriptor_object must be defined correctly and is a class
+ self.assertEqual(Thing.class_registry.descriptor_object, self.registry_object)
+ self.assertEqual(OceanOpticsSpectrometer.class_registry.descriptor_object, self.registry_object)
+ self.assertEqual(self.thing.instance_registry.descriptor_object, self.registry_object)
+ self.assertEqual(self.thing.instance_registry.descriptor_object, Thing.class_registry.descriptor_object)
+
+
+ def test_2_descriptors(self):
+ """Test descriptors access"""
+ if self.is_abstract_test_class:
+ return
+
+ # req. 1. descriptors are instances of the descriptor object - Property | Action | Event
+ for name, value in Thing.class_registry.descriptors.items():
+ self.assertIsInstance(value, self.registry_object)
+ self.assertIsInstance(name, str)
+ for name, value in OceanOpticsSpectrometer.class_registry.descriptors.items():
+ self.assertIsInstance(value, self.registry_object)
+ self.assertIsInstance(name, str)
+ # subclass have more descriptors than parent class because our example Thing OceanOpticsSpectrometer
+ # has defined its own actions, properties and events
+ self.assertTrue(len(OceanOpticsSpectrometer.class_registry.descriptors) > len(Thing.class_registry.descriptors))
+ # req. 2. either class level or instance level descriptors are same - not a strict requirement for different
+ # use cases, one can always add instance level descriptors
+ for name, value in self.thing.instance_registry.descriptors.items():
+ self.assertIsInstance(value, self.registry_object)
+ self.assertIsInstance(name, str)
+ for name, value in self.spectrometer.instance_registry.descriptors.items():
+ self.assertIsInstance(value, self.registry_object)
+ self.assertIsInstance(name, str)
+ # req. 3. because class level and instance level descriptors are same, they are equal
+ for (name, value), (name2, value2) in zip(Thing.class_registry.descriptors.items(), self.thing.instance_registry.descriptors.items()):
+ self.assertEqual(name, name2)
+ self.assertEqual(value, value2)
+ for (name, value), (name2, value2) in zip(OceanOpticsSpectrometer.class_registry.descriptors.items(), self.spectrometer.instance_registry.descriptors.items()):
+ self.assertEqual(name, name2)
+ self.assertEqual(value, value2)
+ # req. 4. descriptors can be cleared
+ self.assertTrue(hasattr(self.thing.instance_registry, f'_{self.thing.instance_registry._qualified_prefix}_{self.registry_cls.__name__.lower()}'))
+ self.thing.instance_registry.clear()
+ self.assertTrue(not hasattr(self.thing.instance_registry, f'_{self.thing.instance_registry._qualified_prefix}_{self.registry_cls.__name__.lower()}'))
+ # clearing again any number of times should not raise error
+ self.thing.instance_registry.clear()
+ self.thing.instance_registry.clear()
+ self.assertTrue(not hasattr(self.thing.instance_registry, f'_{self.thing.instance_registry._qualified_prefix}_{self.registry_cls.__name__.lower()}'))
+
+
+ def test_3_dunders(self):
+ """Test dunders of DescriptorRegistry"""
+ if self.is_abstract_test_class:
+ return
+
+ # req. 1. __getitem__ must return the descriptor object
+ for name, value in Thing.class_registry.descriptors.items():
+ self.assertEqual(Thing.class_registry[name], value)
+ # req. 2. __contains__ must return True if the descriptor is present
+ self.assertIn(value, Thing.class_registry)
+ self.assertIn(name, Thing.class_registry.descriptors.keys())
+
+ # req. 2. __iter__ must return an iterator over the descriptors dictionary
+ # which in turn iterates over the keys
+ self.assertTrue(all(isinstance(descriptor_name, str) for descriptor_name in Thing.class_registry))
+ self.assertTrue(all(isinstance(descriptor_name, str) for descriptor_name in OceanOpticsSpectrometer.class_registry))
+ # __iter__ can also be casted as other iterators like lists
+ thing_descriptors = list(self.thing.instance_registry)
+ spectrometer_descriptors = list(self.spectrometer.instance_registry)
+ self.assertIsInstance(thing_descriptors, list)
+ self.assertIsInstance(spectrometer_descriptors, list)
+ self.assertTrue(all(isinstance(descriptor_name, str) for descriptor_name in thing_descriptors))
+ self.assertTrue(all(isinstance(descriptor_name, str) for descriptor_name in spectrometer_descriptors))
+
+ # req. 3. __len__ must return the number of descriptors
+ self.assertTrue(len(Thing.class_registry) == len(Thing.class_registry.descriptors))
+ self.assertTrue(len(OceanOpticsSpectrometer.class_registry) == len(OceanOpticsSpectrometer.class_registry.descriptors))
+ self.assertTrue(len(self.thing.instance_registry) == len(self.thing.instance_registry.descriptors))
+ self.assertTrue(len(self.spectrometer.instance_registry) == len(self.spectrometer.instance_registry.descriptors))
+ self.assertTrue(len(self.thing.instance_registry) == len(Thing.class_registry))
+ self.assertTrue(len(self.spectrometer.instance_registry) == len(OceanOpticsSpectrometer.class_registry))
+
+ # req. 4. registries have their unique hashes
+ # NOTE - not sure if this is really a useful feature or just plain stupid
+ # The requirement was to be able to generate unique hashes for each registry like foodict[] = Thing.actions
+ foodict = {Thing.class_registry: 1, OceanOpticsSpectrometer.class_registry: 2, self.thing.instance_registry: 3, self.spectrometer.instance_registry: 4}
+ self.assertEqual(foodict[Thing.class_registry], 1)
+ self.assertEqual(foodict[OceanOpticsSpectrometer.class_registry], 2)
+ self.assertEqual(foodict[self.thing.instance_registry], 3)
+ self.assertEqual(foodict[self.spectrometer.instance_registry], 4)
+
+ # __dir__ not yet tested
+ # __str__ will not be tested
+
+
+ def test_4_bound_objects(self):
+ """Test bound objects returned from descriptor access"""
+ if self.is_abstract_test_class:
+ return
+ if self.registry_object not in [Property, Parameter, Action]:
+ # Events work a little differently, may need to be tested separately or refactored to same implementation
+ return
+
+ # req. 1. number of bound objects must be equal to number of descriptors
+ # for example, number of bound actions must be equal to number of actions
+ self.assertEqual(len(self.thing.instance_registry), len(self.thing.instance_registry.descriptors))
+ self.assertEqual(len(self.spectrometer.instance_registry), len(self.spectrometer.instance_registry.descriptors))
+
+ # req. 2. bound objects must be instances of bound instances
+ for name, value in self.thing.instance_registry.values.items():
+ if self.bound_object != typing.Any:
+ self.assertIsInstance(value, self.bound_object)
+ self.assertIsInstance(name, str)
+ for name, value in self.spectrometer.instance_registry.values.items():
+ if self.bound_object != typing.Any:
+ self.assertIsInstance(value, self.bound_object)
+ self.assertIsInstance(name, str)
+
+
+class TestActionRegistry(TestRegistry):
+ """Test ActionRegistry class"""
+
+ @classmethod
+ def setUpRegistryObjects(self):
+ self.registry_cls = ActionsRegistry
+ self.registry_object = Action
+
+
+class TestEventRegistry(TestRegistry):
+
+ @classmethod
+ def setUpRegistryObjects(self):
+ self.registry_cls = EventsRegistry
+ self.registry_object = Event
+
+
+ def test_2_descriptors(self):
+ if self.is_abstract_test_class:
+ return
+
+ super().test_2_descriptors()
+
+ # req. 5. observables and change events are also descriptors
+ for name, value in self.thing.events.observables.items():
+ self.assertIsInstance(value, Property)
+ self.assertIsInstance(name, str)
+ for name, value in self.thing.events.change_events.items():
+ self.assertIsInstance(value, Event)
+ self.assertIsInstance(name, str)
+ # req. 4. descriptors can be cleared
+ self.assertTrue(hasattr(self.thing.events, f'_{self.thing.events._qualified_prefix}_{EventsRegistry.__name__.lower()}'))
+ self.assertTrue(hasattr(self.thing.events, f'_{self.thing.events._qualified_prefix}_{EventsRegistry.__name__.lower()}_change_events'))
+ self.assertTrue(hasattr(self.thing.events, f'_{self.thing.events._qualified_prefix}_{EventsRegistry.__name__.lower()}_observables'))
+ self.thing.events.clear()
+ self.assertTrue(not hasattr(self.thing.events, f'_{self.thing.events._qualified_prefix}_{EventsRegistry.__name__.lower()}'))
+ self.assertTrue(not hasattr(self.thing.events, f'_{self.thing.events._qualified_prefix}_{EventsRegistry.__name__.lower()}_change_events'))
+ self.assertTrue(not hasattr(self.thing.events, f'_{self.thing.events._qualified_prefix}_{EventsRegistry.__name__.lower()}_observables'))
+ self.thing.events.clear()
+ self.thing.events.clear()
+ self.assertTrue(not hasattr(self.thing.events, f'_{self.thing.events._qualified_prefix}_{EventsRegistry.__name__.lower()}'))
+ self.assertTrue(not hasattr(self.thing.events, f'_{self.thing.events._qualified_prefix}_{EventsRegistry.__name__.lower()}_change_events'))
+ self.assertTrue(not hasattr(self.thing.events, f'_{self.thing.events._qualified_prefix}_{EventsRegistry.__name__.lower()}_observables'))
+
+
+class TestPropertiesRegistry(TestRegistry):
+
+ @classmethod
+ def setUpRegistryObjects(self):
+ self.registry_cls = PropertiesRegistry
+ self.registry_object = Parameter
+
+
+ def test_2_descriptors(self):
+ if self.is_abstract_test_class:
+ return
+
+ super().test_2_descriptors()
+
+ # req. 5. parameters that are subclass of Property are usually remote objects
+ for name, value in self.thing.properties.remote_objects.items():
+ self.assertIsInstance(value, Property)
+ self.assertIsInstance(name, str)
+ for name, value in self.spectrometer.properties.remote_objects.items():
+ self.assertIsInstance(value, Property)
+ self.assertIsInstance(name, str)
+ # req. 6. db_objects, db_init_objects, db_persisting_objects, db_commit_objects are also descriptors
+ for name, value in self.thing.properties.db_objects.items():
+ self.assertIsInstance(value, Property)
+ self.assertIsInstance(name, str)
+ self.assertTrue(value.db_init or value.db_persist or value.db_commit)
+ for name, value in self.thing.properties.db_init_objects.items():
+ self.assertIsInstance(value, Property)
+ self.assertIsInstance(name, str)
+ self.assertTrue(value.db_init or value.db_persist)
+ self.assertFalse(value.db_commit)
+ for name, value in self.thing.properties.db_commit_objects.items():
+ self.assertIsInstance(value, Property)
+ self.assertIsInstance(name, str)
+ self.assertTrue(value.db_commit or value.db_persist)
+ self.assertFalse(value.db_init)
+ for name, value in self.thing.properties.db_persisting_objects.items():
+ self.assertIsInstance(value, Property)
+ self.assertIsInstance(name, str)
+ self.assertTrue(value.db_persist)
+ self.assertFalse(value.db_init) # in user given cases, this could be true, this is not strict requirement
+ self.assertFalse(value.db_commit) # in user given cases, this could be true, this is not strict requirement
+
+ # req. 4. descriptors can be cleared
+ self.assertTrue(hasattr(self.thing.properties, f'_{self.thing.properties._qualified_prefix}_{PropertiesRegistry.__name__.lower()}'))
+ self.thing.properties.clear()
+ self.assertTrue(not hasattr(self.thing.properties, f'_{self.thing.properties._qualified_prefix}_{PropertiesRegistry.__name__.lower()}'))
+ self.thing.properties.clear()
+ self.thing.properties.clear()
+ self.assertTrue(not hasattr(self.thing.properties, f'_{self.thing.properties._qualified_prefix}_{PropertiesRegistry.__name__.lower()}'))
+
+
+ def test_5_bulk_read_write(self):
+ """Test bulk read and write operations for properties"""
+
+ # req. 1. test read in bulk for readAllProperties
+ prop_values = self.spectrometer.properties.get()
+ # read value is a dictionary
+ self.assertIsInstance(prop_values, dict)
+ self.assertTrue(len(prop_values) > 0)
+ # all properties are read at instance level and get only reads remote objects
+ self.assertTrue(len(prop_values) == len(self.spectrometer.properties.remote_objects))
+ # read values are not descriptors themselves
+ for name, value in prop_values.items():
+ self.assertIsInstance(name, str)
+ self.assertNotIsInstance(value, Parameter) # descriptor has been read
+
+ # req. 2. properties can be read with new names
+ prop_values = self.spectrometer.properties.get(integration_time='integrationTime', state='State', trigger_mode='triggerMode')
+ self.assertIsInstance(prop_values, dict)
+ self.assertTrue(len(prop_values) == 3)
+ for name, value in prop_values.items():
+ self.assertIsInstance(name, str)
+ self.assertTrue(name in ['integrationTime', 'triggerMode', 'State'])
+ self.assertNotIsInstance(value, Parameter)
+
+ # req. 3. read in bulk for readMultipleProperties
+ prop_values = self.spectrometer.properties.get(names=['integration_time', 'trigger_mode', 'state', 'last_intensity'])
+ # read value is a dictionary
+ self.assertIsInstance(prop_values, dict)
+ self.assertTrue(len(prop_values) == 4)
+ # read values are not descriptors themselves
+ for name, value in prop_values.items():
+ self.assertIsInstance(name, str)
+ self.assertTrue(name in ['integration_time', 'trigger_mode', 'state', 'last_intensity'])
+ self.assertNotIsInstance(value, Parameter)
+
+ # req. 4. read a property that is not present raises AttributeError
+ with self.assertRaises(AttributeError) as ex:
+ prop_values = self.spectrometer.properties.get(names=['integration_time', 'trigger_mode', 'non_existent_property', 'last_intensity'])
+ self.assertTrue("property non_existent_property does not exist" in str(ex.exception))
+
+ # req. 5. write in bulk
+ prop_values = self.spectrometer.properties.get()
+ self.spectrometer.properties.set(
+ integration_time=10,
+ trigger_mode=1
+ )
+ self.assertNotEqual(prop_values['integration_time'], self.spectrometer.integration_time)
+ self.assertNotEqual(prop_values['trigger_mode'], self.spectrometer.trigger_mode)
+
+ # req. 6. writing a non existent property raises RuntimeError
+ with self.assertRaises(RuntimeError) as ex:
+ self.spectrometer.properties.set(
+ integration_time=120,
+ trigger_mode=2,
+ non_existent_property=10
+ )
+ self.assertTrue("Some properties could not be set due to errors" in str(ex.exception))
+ self.assertTrue("non_existent_property" in str(ex.exception.__notes__))
+ # but those that exist will still be written
+ self.assertEqual(self.spectrometer.integration_time, 120)
+ self.assertEqual(self.spectrometer.trigger_mode, 2)
+
+
+ def test_6_db_properties(self):
+ """Test db operations for properties"""
+
+ # req. 1. db operations are supported only at instance level
+ with self.assertRaises(AttributeError) as ex:
+ Thing.properties.load_from_DB()
+ self.assertTrue("database operations are only supported at instance level" in str(ex.exception))
+ with self.assertRaises(AttributeError) as ex:
+ Thing.properties.get_from_DB()
+ self.assertTrue("database operations are only supported at instance level" in str(ex.exception))
+
+
+
+def load_tests(loader, tests, pattern):
+ suite = unittest.TestSuite()
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestThingInit))
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestOceanOpticsSpectrometer))
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestMetaclass))
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestActionRegistry))
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestPropertiesRegistry))
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestEventRegistry))
+ return suite
+
+if __name__ == '__main__':
+ runner = TestRunner()
+ runner.run(load_tests(unittest.TestLoader(), None, None))
+
+
+"""
+# Summary of tests and requirements:
+
+TestThing class:
+1. Test id requirements:
+ - Instance name must be a string and cannot be changed after set.
+ - Valid and invalid IDs based on regex (r'[A-Za-z]+[A-Za-z_0-9\-\/]*').
+2. Test logger setup:
+ - Logger must have remote access handler if remote_accessible_logger is True.
+ - Logger is created automatically if not provided.
+3. Test state and state_machine setup:
+ - State property must be None when no state machine is present.
+4. Test composition of subthings:
+ - Subthings must be a dictionary.
+ - Subthings are recomputed when accessed.
+ - Subthings must be instances of Thing and have the parent as owner.
+ - Name of subthing must match name of the attribute.
+5. Test servers init:
+ - rpc_server and event_publisher must be None when not run().
+ - rpc_server and event_publisher must be instances of their respective classes when run().
+6. Test thing model generation:
+ - Basic test to ensure nothing is fundamentally wrong.
+
+TestOceanOpticsSpectrometer class:
+1. Test state and state_machine setup:
+ - State and state machine must be present because subclass has a state machine.
+ - State and state machine must be different for different instances.
+
+TestMetaclass class:
+1. Test metaclass of Thing class:
+ - Metaclass must be ThingMeta for any Thing class.
+2. Test registry creation and access:
+ - Registry attributes must be instances of their respective classes.
+ - New registries are not created on the fly and are same between accesses.
+ - Different subclasses have different registries.
+ - Registry attributes must be instances of their respective classes also for instances.
+ - Registries are not created on the fly and are same between accesses also for instances.
+ - Registries are not shared between instances.
+ - Registries are not shared between instances and their classes.
+
+TestRegistry class:
+1. Test owner attribute:
+ - Owner attribute must be the class itself when accessed as class attribute.
+ - Owner attribute must be the instance for instance registries.
+ - Descriptor_object must be defined correctly and is a class.
+2. Test descriptors access:
+ - Descriptors are instances of the descriptor object.
+ - Class level or instance level descriptors are same.
+ - Descriptors can be cleared.
+3. Test dunders:
+ - __getitem__ must return the descriptor object.
+ - __contains__ must return True if the descriptor is present.
+ - __iter__ must return an iterator over the descriptors dictionary.
+ - __len__ must return the number of descriptors.
+ - Registries have their unique hashes.
+4. Test bound objects:
+ - Number of bound objects must be equal to number of descriptors.
+ - Bound objects must be instances of bound instances.
+
+TestActionRegistry class:
+- Inherits tests from TestRegistry.
+
+TestEventRegistry class:
+- Inherits tests from TestRegistry.
+- Observables and change events are also descriptors.
+
+TestPropertiesRegistry class:
+- Inherits tests from TestRegistry.
+- Parameters that are subclass of Property are usually remote objects.
+- DB operations are supported only at instance level.
+"""
diff --git a/tests/test_05_brokers.py b/tests/test_05_brokers.py
new file mode 100644
index 00000000..5b319b5a
--- /dev/null
+++ b/tests/test_05_brokers.py
@@ -0,0 +1,520 @@
+import threading, asyncio
+import logging, multiprocessing, unittest
+
+from hololinked.core.zmq.message import (ERROR, EXIT, OPERATION, HANDSHAKE, REPLY,
+ PreserializedData, RequestHeader, RequestMessage, SerializableData) # client to server
+from hololinked.core.zmq.message import (TIMEOUT, INVALID_MESSAGE, ERROR,
+ ResponseMessage, ResponseHeader) # server to client
+from hololinked.core.zmq.brokers import AsyncZMQServer, MessageMappedZMQClientPool, SyncZMQClient, AsyncZMQClient
+from hololinked.utils import get_current_async_loop, get_default_logger
+
+try:
+ from .utils import TestRunner
+ from .test_01_message import MessageValidatorMixin
+ from .things.starter import run_zmq_server
+ from .things import TestThing
+except ImportError:
+ from utils import TestRunner
+ from test_01_message import MessageValidatorMixin
+ from things.starter import run_zmq_server
+ from things import TestThing
+
+
+
+class TestBrokerMixin(MessageValidatorMixin):
+ """Tests Individual ZMQ Server"""
+
+ @classmethod
+ def setUpServer(self):
+ self.server = AsyncZMQServer(
+ id=self.server_id,
+ logger=self.logger
+ )
+ """
+ Base class: BaseZMQ, BaseAsyncZMQ, BaseSyncZMQ
+ Servers: BaseZMQServer, AsyncZMQServer, ZMQServerPool
+ Clients: BaseZMQClient, SyncZMQClient, AsyncZMQClient, MessageMappedZMQClientPool
+ """
+
+ @classmethod
+ def setUpClient(self):
+ self.sync_client = None
+ self.async_client = None
+
+ @classmethod
+ def setUpThing(self):
+ self.thing = TestThing(
+ id=self.thing_id,
+ logger=self.logger,
+ remote_accessible_logger=True
+ )
+
+ @classmethod
+ def startServer(self):
+ self._server_thread = threading.Thread(
+ target=run_zmq_server,
+ args=(self.server, self, self.done_queue),
+ daemon=True
+ )
+ self._server_thread.start()
+
+ @classmethod
+ def setUpClass(self):
+ super().setUpClass()
+ print(f"test ZMQ message brokers {self.__name__}")
+ self.logger = get_default_logger('test-message-broker', logging.ERROR)
+ self.done_queue = multiprocessing.Queue()
+ self.last_server_message = None
+ self.setUpThing()
+ self.setUpServer()
+ self.setUpClient()
+ self.startServer()
+
+
+
+class TestBasicServerAndClient(TestBrokerMixin):
+
+ @classmethod
+ def setUpClient(self):
+ super().setUpClient()
+ self.sync_client = SyncZMQClient(
+ id=self.client_id,
+ server_id=self.server_id,
+ logger=self.logger,
+ handshake=False
+ )
+ self.client = self.sync_client
+
+
+ def test_1_handshake_complete(self):
+ """
+ Test handshake so that client can connect to server. Once client connects to server,
+ verify a ZMQ internal monitoring socket is available.
+ """
+ self.client.handshake()
+ self.assertTrue(self.client._monitor_socket is not None)
+ # both directions
+ # HANDSHAKE = 'HANDSHAKE' # 1 - find out if the server is alive
+
+
+ def test_2_message_contract_types(self):
+ """
+ Once composition is checked, check different message types
+ """
+ # message types
+ request_message = RequestMessage.craft_from_arguments(
+ receiver_id=self.server_id,
+ sender_id=self.client_id,
+ thing_id=self.thing_id,
+ objekt='some_prop',
+ operation='readProperty'
+ )
+
+ async def handle_message_types_server():
+ # server to client
+ # REPLY = b'REPLY' # 4 - response for operation
+ # TIMEOUT = b'TIMEOUT' # 5 - timeout message, operation could not be completed
+ # EXCEPTION = b'EXCEPTION' # 6 - exception occurred while executing operation
+ # INVALID_MESSAGE = b'INVALID_MESSAGE' # 7 - invalid message
+ await self.server._handle_timeout(request_message, timeout_type='execution') # 5
+ await self.server._handle_invalid_message(request_message, SerializableData(Exception('test'))) # 7
+ await self.server._handshake(request_message) # 1
+ await self.server._handle_error_message(request_message, Exception('test')) # 6
+ await self.server.async_send_response(request_message) # 4
+ await self.server.async_send_response_with_message_type(request_message, ERROR,
+ SerializableData(Exception('test'))) # 6
+
+ get_current_async_loop().run_until_complete(handle_message_types_server())
+
+ """
+ message types
+
+ both directions
+ HANDSHAKE = b'HANDSHAKE' # 1 - taken care by test_1...
+
+ client to server
+ OPERATION = b'OPERATION' 2 - taken care by test_2_... # operation request from client to server
+ EXIT = b'EXIT' # 3 - taken care by test_7... # exit the server
+
+ server to client
+ REPLY = b'REPLY' # 4 - response for operation
+ TIMEOUT = b'TIMEOUT' # 5 - timeout message, operation could not be completed
+ EXCEPTION = b'EXCEPTION' # 6 - exception occurred while executing operation
+ INVALID_MESSAGE = b'INVALID_MESSAGE' # 7 - invalid message
+ SERVER_DISCONNECTED = 'EVENT_DISCONNECTED' not yet tested # socket died - zmq's builtin event
+
+ peer to peer
+ INTERRUPT = b'INTERRUPT' not yet tested # interrupt a socket while polling
+ """
+
+ msg = self.client.recv_response(request_message.id)
+ self.assertEqual(msg.type, TIMEOUT)
+ self.validate_response_message(msg)
+
+ msg = self.client.recv_response(request_message.id)
+ self.assertEqual(msg.type, INVALID_MESSAGE)
+ self.validate_response_message(msg)
+
+ msg = self.client.socket.recv_multipart() # handshake dont come as response
+ response_message = ResponseMessage(msg)
+ self.assertEqual(response_message.type, HANDSHAKE)
+ self.validate_response_message(response_message)
+
+ msg = self.client.recv_response(request_message.id)
+ self.assertEqual(msg.type, ERROR)
+ self.validate_response_message(msg)
+
+ msg = self.client.recv_response(request_message.id)
+ self.assertEqual(msg.type, REPLY)
+ self.validate_response_message(msg)
+
+ msg = self.client.recv_response(request_message.id)
+ # custom crafted explicitly to be ERROR
+ self.assertEqual(msg.type, ERROR)
+ self.validate_response_message(msg)
+
+ self.client.handshake()
+
+
+ def test_3_verify_polling(self):
+ """
+ Test if polling may be stopped and started again
+ """
+ async def verify_poll_stopped(self: TestBasicServerAndClient) -> None:
+ await self.server.poll_requests()
+ self.server.poll_timeout = 1000
+ await self.server.poll_requests()
+ self.done_queue.put(True)
+
+ async def stop_poll(self: TestBasicServerAndClient) -> None:
+ await asyncio.sleep(0.1)
+ self.server.stop_polling()
+ await asyncio.sleep(0.1)
+ self.server.stop_polling()
+ # When the above two functions running,
+ # we dont send a message as the thread is also running
+ get_current_async_loop().run_until_complete(
+ asyncio.gather(*[verify_poll_stopped(self), stop_poll(self)])
+ )
+
+ self.assertTrue(self.done_queue.get())
+ self.assertEqual(self.server.poll_timeout, 1000)
+ self.client.handshake()
+
+
+ def test_4_exit(self):
+ """
+ Test if exit reaches to server
+ """
+ # EXIT = b'EXIT' # 7 - exit the server
+ request_message = RequestMessage.craft_with_message_type(
+ receiver_id=self.server_id,
+ sender_id=self.client_id,
+ message_type=EXIT
+ )
+ self.client.socket.send_multipart(request_message.byte_array)
+ self.assertTrue(self.done_queue.get())
+ self._server_thread.join()
+
+
+ # def test_5_server_disconnected(self):
+
+ # self.server.exit() # exit causes the server socket to send a ZMQ builtin termination message to the client
+ # # we need to complete all the tasks before we can exit other some loosely hanging tasks (which will anyway complete
+ # # before the script quits) has invalid sockets because of the exit
+
+ # # SERVER_DISCONNECTED = 'EVENT_DISCONNECTED' # socket died - zmq's builtin event
+ # with self.assertRaises(ConnectionAbortedError) as ex:
+ # self.client.recv_response(message_id=b'not-necessary')
+ # self.assertTrue(str(ex.exception).startswith(f"server disconnected for {self.client_id}"))
+
+ # peer to peer
+ # INTERRUPT = b'INTERRUPT' # interrupt a socket while polling
+ # first test the length
+
+
+
+class TestAsyncZMQClient(TestBrokerMixin):
+
+ @classmethod
+ def setUpClient(self):
+ self.async_client = AsyncZMQClient(
+ id=self.client_id,
+ server_id=self.server_id,
+ logger=self.logger,
+ handshake=False
+ )
+ self.client = self.async_client
+
+ @classmethod
+ def setUpClass(self):
+ super().setUpClass()
+ self.client = self.async_client
+
+
+ def test_1_handshake_complete(self):
+ """
+ Test handshake so that client can connect to server. Once client connects to server,
+ verify a ZMQ internal monitoring socket is available.
+ """
+ async def test():
+ self.client.handshake()
+ await self.client.handshake_complete()
+ self.assertTrue(self.client._monitor_socket is not None)
+ get_current_async_loop().run_until_complete(test())
+ # both directions
+ # HANDSHAKE = 'HANDSHAKE' # 1 - find out if the server is alive
+
+
+ def test_2_message_contract_types(self):
+ """
+ Once composition is checked, check different message types
+ """
+ # message types
+ request_message = RequestMessage.craft_from_arguments(
+ receiver_id=self.server_id,
+ sender_id=self.client_id,
+ thing_id=self.thing_id,
+ objekt='some_prop',
+ operation='readProperty'
+ )
+
+ async def handle_message_types_server():
+ # server to client
+ # REPLY = b'REPLY' # 4 - response for operation
+ # TIMEOUT = b'TIMEOUT' # 5 - timeout message, operation could not be completed
+ # EXCEPTION = b'EXCEPTION' # 6 - exception occurred while executing operation
+ # INVALID_MESSAGE = b'INVALID_MESSAGE' # 7 - invalid message
+ await self.server._handle_timeout(request_message, timeout_type='invokation') # 5
+ await self.server._handle_invalid_message(request_message, SerializableData(Exception('test')))
+ await self.server._handshake(request_message)
+ await self.server._handle_error_message(request_message, Exception('test'))
+ await self.server.async_send_response(request_message)
+ await self.server.async_send_response_with_message_type(request_message, ERROR,
+ SerializableData(Exception('test')))
+
+ async def handle_message_types_client():
+ """
+ message types
+ both directions
+ HANDSHAKE = b'HANDSHAKE' # 1 - taken care by test_1...
+
+ client to server
+ OPERATION = b'OPERATION' 2 - taken care by test_2_... # operation request from client to server
+ EXIT = b'EXIT' # 3 - taken care by test_7... # exit the server
+
+ server to client
+ REPLY = b'REPLY' # 4 - response for operation
+ TIMEOUT = b'TIMEOUT' # 5 - timeout message, operation could not be completed
+ EXCEPTION = b'EXCEPTION' # 6 - exception occurred while executing operation
+ INVALID_MESSAGE = b'INVALID_MESSAGE' # 7 - invalid message
+ SERVER_DISCONNECTED = 'EVENT_DISCONNECTED' not yet tested # socket died - zmq's builtin event
+
+ peer to peer
+ INTERRUPT = b'INTERRUPT' not yet tested # interrupt a socket while polling
+ """
+ msg = await self.client.async_recv_response(request_message.id)
+ self.assertEqual(msg.type, TIMEOUT)
+ self.validate_response_message(msg)
+
+ msg = await self.client.async_recv_response(request_message.id)
+ self.assertEqual(msg.type, INVALID_MESSAGE)
+ self.validate_response_message(msg)
+
+ msg = await self.client.socket.recv_multipart() # handshake don't come as response
+ response_message = ResponseMessage(msg)
+ self.assertEqual(response_message.type, HANDSHAKE)
+ self.validate_response_message(response_message)
+
+ msg = await self.client.async_recv_response(request_message.id)
+ self.assertEqual(msg.type, ERROR)
+ self.validate_response_message(msg)
+
+ msg = await self.client.async_recv_response(request_message.id)
+ self.assertEqual(msg.type, REPLY)
+ self.validate_response_message(msg)
+
+ msg = await self.client.async_recv_response(request_message.id)
+ self.assertEqual(msg.type, ERROR)
+ self.validate_response_message(msg)
+
+ # exit checked separately at the end
+ get_current_async_loop().run_until_complete(
+ asyncio.gather(*[
+ handle_message_types_server(),
+ handle_message_types_client()
+ ])
+ )
+
+
+ def test_3_exit(self):
+ """
+ Test if exit reaches to server
+ """
+ # EXIT = b'EXIT' # 7 - exit the server
+ request_message = RequestMessage.craft_with_message_type(
+ receiver_id=self.server_id,
+ sender_id=self.client_id,
+ message_type=EXIT
+ )
+ self.client.socket.send_multipart(request_message.byte_array)
+ self.assertTrue(self.done_queue.get())
+ self._server_thread.join()
+
+
+
+class TestMessageMappedClientPool(TestBrokerMixin):
+
+ @classmethod
+ def setUpClient(self):
+ self.client = MessageMappedZMQClientPool(
+ id='client-pool',
+ client_ids=[self.client_id],
+ server_ids=[self.server_id],
+ logger=self.logger,
+ handshake=False
+ )
+
+
+ def test_1_handshake_complete(self):
+ """
+ Test handshake so that client can connect to server. Once client connects to server,
+ verify a ZMQ internal monitoring socket is available.
+ """
+ async def test():
+ self.client.handshake()
+ await self.client.handshake_complete()
+ for client in self.client.pool.values():
+ self.assertTrue(client._monitor_socket is not None)
+ get_current_async_loop().run_until_complete(test())
+ # both directions
+ # HANDSHAKE = 'HANDSHAKE' # 1 - find out if the server is alive
+
+
+ def test_2_message_contract_types(self):
+ """
+ Once composition is checked, check different message types
+ """
+ # message types
+ request_message = RequestMessage.craft_from_arguments(
+ receiver_id=self.server_id,
+ sender_id=self.client_id,
+ thing_id=self.thing_id,
+ objekt='some_prop',
+ operation='readProperty'
+ )
+
+ async def handle_message_types():
+ """
+ message types
+ both directions
+ HANDSHAKE = b'HANDSHAKE' # 1 - taken care by test_1...
+
+ client to server
+ OPERATION = b'OPERATION' 2 - taken care by test_2_... # operation request from client to server
+ EXIT = b'EXIT' # 3 - taken care by test_7... # exit the server
+
+ server to client
+ REPLY = b'REPLY' # 4 - response for operation
+ TIMEOUT = b'TIMEOUT' # 5 - timeout message, operation could not be completed
+ EXCEPTION = b'EXCEPTION' # 6 - exception occurred while executing operation
+ INVALID_MESSAGE = b'INVALID_MESSAGE' # 7 - invalid message
+ SERVER_DISCONNECTED = 'EVENT_DISCONNECTED' not yet tested # socket died - zmq's builtin event
+
+ peer to peer
+ INTERRUPT = b'INTERRUPT' not yet tested # interrupt a socket while polling
+ """
+ self.client.start_polling()
+
+ self.client.events_map[request_message.id] = self.client.event_pool.pop()
+ await self.server._handle_timeout(request_message, timeout_type='invokation') # 5
+ msg = await self.client.async_recv_response(self.client_id, request_message.id)
+ self.assertEqual(msg.type, TIMEOUT)
+ self.validate_response_message(msg)
+
+ self.client.events_map[request_message.id] = self.client.event_pool.pop()
+ await self.server._handle_invalid_message(request_message, SerializableData(Exception('test')))
+ msg = await self.client.async_recv_response(self.client_id, request_message.id)
+ self.assertEqual(msg.type, INVALID_MESSAGE)
+ self.validate_response_message(msg)
+
+ self.client.events_map[request_message.id] = self.client.event_pool.pop()
+ await self.server._handshake(request_message)
+ msg = await self.client.pool[self.client_id].socket.recv_multipart() # handshake don't come as response
+ response_message = ResponseMessage(msg)
+ self.assertEqual(response_message.type, HANDSHAKE)
+ self.validate_response_message(response_message)
+
+ self.client.events_map[request_message.id] = self.client.event_pool.pop()
+ await self.server.async_send_response(request_message)
+ msg = await self.client.async_recv_response(self.client_id, request_message.id)
+ self.assertEqual(msg.type, REPLY)
+ self.validate_response_message(msg)
+
+ self.client.events_map[request_message.id] = self.client.event_pool.pop()
+ await self.server.async_send_response_with_message_type(request_message, ERROR,
+ SerializableData(Exception('test')))
+ msg = await self.client.async_recv_response(self.client_id, request_message.id)
+ self.assertEqual(msg.type, ERROR)
+ self.validate_response_message(msg)
+
+ self.client.stop_polling()
+
+ # exit checked separately at the end
+ get_current_async_loop().run_until_complete(
+ asyncio.gather(*[
+ handle_message_types()
+ ])
+ )
+
+
+ def test_3_verify_polling(self):
+ """
+ Test if polling may be stopped and started again
+ """
+ async def verify_poll_stopped(self: "TestMessageMappedClientPool") -> None:
+ await self.client.poll_responses()
+ self.client.poll_timeout = 1000
+ await self.client.poll_responses()
+ self.done_queue.put(True)
+
+ async def stop_poll(self: "TestMessageMappedClientPool") -> None:
+ await asyncio.sleep(0.1)
+ self.client.stop_polling()
+ await asyncio.sleep(0.1)
+ self.client.stop_polling()
+ # When the above two functions running,
+ # we dont send a message as the thread is also running
+ get_current_async_loop().run_until_complete(
+ asyncio.gather(*[verify_poll_stopped(self), stop_poll(self)])
+ )
+ self.assertTrue(self.done_queue.get())
+ self.assertEqual(self.client.poll_timeout, 1000)
+
+
+ def test_4_exit(self):
+ """
+ Test if exit reaches to server
+ """
+ # EXIT = b'EXIT' # 7 - exit the server
+ request_message = RequestMessage.craft_with_message_type(
+ receiver_id=self.server_id,
+ sender_id=self.client_id,
+ message_type=EXIT
+ )
+ self.client[self.client_id].socket.send_multipart(request_message.byte_array)
+ self.assertTrue(self.done_queue.get())
+ self._server_thread.join()
+
+
+
+def load_tests(loader, tests, pattern):
+ suite = unittest.TestSuite()
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestBasicServerAndClient))
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestAsyncZMQClient))
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestMessageMappedClientPool))
+ return suite
+
+if __name__ == '__main__':
+ runner = TestRunner()
+ runner.run(load_tests(unittest.TestLoader(), None, None))
diff --git a/tests/test_06_actions.py b/tests/test_06_actions.py
new file mode 100644
index 00000000..a41ff74e
--- /dev/null
+++ b/tests/test_06_actions.py
@@ -0,0 +1,428 @@
+import asyncio
+import unittest
+import logging
+
+from hololinked.utils import isclassmethod
+from hololinked.core.actions import Action, BoundAction, BoundSyncAction, BoundAsyncAction
+from hololinked.core.dataklasses import ActionInfoValidator
+from hololinked.core.thing import Thing, action
+from hololinked.td.interaction_affordance import ActionAffordance
+from hololinked.schema_validators import JSONSchemaValidator
+
+try:
+ from .utils import TestCase, TestRunner
+ from .things import TestThing
+ from .things.test_thing import replace_methods_with_actions
+except ImportError:
+ from utils import TestCase, TestRunner
+ from things import TestThing
+ from things.test_thing import replace_methods_with_actions
+
+
+class TestAction(TestCase):
+
+ @classmethod
+ def setUpClass(self):
+ super().setUpClass()
+ print(f"test action with {self.__name__}")
+
+
+ def test_1_allowed_actions(self):
+ """Test if methods can be decorated with action"""
+ # 1. instance method can be decorated with action
+ self.assertEqual(TestThing.action_echo, action()(TestThing.action_echo.obj)) # already predecorated as action
+ # 2. classmethod can be decorated with action
+ self.assertEqual(Action(TestThing.action_echo_with_classmethod),
+ action()(TestThing.action_echo_with_classmethod))
+ self.assertTrue(isclassmethod(TestThing.action_echo_with_classmethod))
+ # 3. async methods can be decorated with action
+ self.assertEqual(Action(TestThing.action_echo_async),
+ action()(TestThing.action_echo_async))
+ # 4. async classmethods can be decorated with action
+ self.assertEqual(Action(TestThing.action_echo_async_with_classmethod),
+ action()(TestThing.action_echo_async_with_classmethod))
+ self.assertTrue(isclassmethod(TestThing.action_echo_async_with_classmethod))
+ # 5. parameterized function can be decorated with action
+ self.assertEqual(Action(TestThing.parameterized_action),
+ action(safe=True)(TestThing.parameterized_action))
+ self.assertEqual(Action(TestThing.parameterized_action_without_call),
+ action(idempotent=True)(TestThing.parameterized_action_without_call))
+ self.assertEqual(Action(TestThing.parameterized_action_async),
+ action(synchronous=True)(TestThing.parameterized_action_async))
+ # 6. actions with input and output schema
+ self.assertEqual(Action(TestThing.json_schema_validated_action),
+ action(input_schema={'val1': 'integer', 'val2': 'string', 'val3': 'object', 'val4': 'array'},
+ output_schema={'val1': 'int', 'val3': 'dict'})(TestThing.json_schema_validated_action))
+ self.assertEqual(Action(TestThing.pydantic_validated_action),
+ action()(TestThing.pydantic_validated_action))
+
+
+ def test_2_bound_method(self):
+ """Test if methods decorated with action are correctly bound"""
+ thing = TestThing(id='test-action', log_level=logging.ERROR)
+ replace_methods_with_actions(thing_cls=TestThing)
+
+ # 1. instance method can be decorated with action
+ self.assertIsInstance(thing.action_echo, BoundAction)
+ self.assertIsInstance(thing.action_echo, BoundSyncAction)
+ self.assertNotIsInstance(thing.action_echo, BoundAsyncAction)
+ self.assertIsInstance(TestThing.action_echo, Action)
+ self.assertNotIsInstance(TestThing.action_echo, BoundAction)
+ # associated attributes of BoundAction
+ assert isinstance(thing.action_echo, BoundAction) # type definition
+ self.assertEqual(thing.action_echo.name, 'action_echo')
+ self.assertEqual(thing.action_echo.owner_inst, thing)
+ self.assertEqual(thing.action_echo.owner, TestThing)
+ self.assertEqual(thing.action_echo.execution_info, TestThing.action_echo.execution_info)
+ self.assertEqual(str(thing.action_echo),
+ f"")
+ self.assertNotEqual(thing.action_echo, TestThing.action_echo)
+ self.assertEqual(thing.action_echo.bound_obj, thing)
+
+ # 2. classmethod can be decorated with action
+ self.assertIsInstance(thing.action_echo_with_classmethod, BoundAction)
+ self.assertIsInstance(thing.action_echo_with_classmethod, BoundSyncAction)
+ self.assertNotIsInstance(thing.action_echo_with_classmethod, BoundAsyncAction)
+ self.assertIsInstance(TestThing.action_echo_with_classmethod, BoundAction)
+ self.assertIsInstance(TestThing.action_echo_with_classmethod, BoundSyncAction)
+ self.assertNotIsInstance(TestThing.action_echo_with_classmethod, Action)
+ # associated attributes of BoundAction
+ assert isinstance(thing.action_echo_with_classmethod, BoundAction)
+ self.assertEqual(thing.action_echo_with_classmethod.name, 'action_echo_with_classmethod')
+ self.assertEqual(thing.action_echo_with_classmethod.owner_inst, thing)
+ self.assertEqual(thing.action_echo_with_classmethod.owner, TestThing)
+ self.assertEqual(thing.action_echo_with_classmethod.execution_info, TestThing.action_echo_with_classmethod.execution_info)
+ self.assertEqual(str(thing.action_echo_with_classmethod),
+ f"")
+ self.assertEqual(thing.action_echo_with_classmethod, TestThing.action_echo_with_classmethod)
+ self.assertEqual(thing.action_echo_with_classmethod.bound_obj, TestThing)
+
+ # 3. async methods can be decorated with action
+ self.assertIsInstance(thing.action_echo_async, BoundAction)
+ self.assertNotIsInstance(thing.action_echo_async, BoundSyncAction)
+ self.assertIsInstance(thing.action_echo_async, BoundAsyncAction)
+ self.assertIsInstance(TestThing.action_echo_async, Action)
+ self.assertNotIsInstance(TestThing.action_echo_async, BoundAction)
+ # associated attributes of BoundAction
+ assert isinstance(thing.action_echo_async, BoundAction)
+ self.assertEqual(thing.action_echo_async.name, 'action_echo_async')
+ self.assertEqual(thing.action_echo_async.owner_inst, thing)
+ self.assertEqual(thing.action_echo_async.owner, TestThing)
+ self.assertEqual(thing.action_echo_async.execution_info, TestThing.action_echo_async.execution_info)
+ self.assertEqual(str(thing.action_echo_async),
+ f"")
+ self.assertNotEqual(thing.action_echo_async, TestThing.action_echo_async)
+ self.assertEqual(thing.action_echo_async.bound_obj, thing)
+
+ # 4. async classmethods can be decorated with action
+ self.assertIsInstance(thing.action_echo_async_with_classmethod, BoundAction)
+ self.assertNotIsInstance(thing.action_echo_async_with_classmethod, BoundSyncAction)
+ self.assertIsInstance(thing.action_echo_async_with_classmethod, BoundAsyncAction)
+ self.assertIsInstance(TestThing.action_echo_async_with_classmethod, BoundAction)
+ self.assertIsInstance(TestThing.action_echo_async_with_classmethod, BoundAsyncAction)
+ self.assertNotIsInstance(TestThing.action_echo_async_with_classmethod, Action)
+ # associated attributes of BoundAction
+ assert isinstance(thing.action_echo_async_with_classmethod, BoundAction)
+ self.assertEqual(thing.action_echo_async_with_classmethod.name, 'action_echo_async_with_classmethod')
+ self.assertEqual(thing.action_echo_async_with_classmethod.owner_inst, thing)
+ self.assertEqual(thing.action_echo_async_with_classmethod.owner, TestThing)
+ self.assertEqual(thing.action_echo_async_with_classmethod.execution_info, TestThing.action_echo_async_with_classmethod.execution_info)
+ self.assertEqual(str(thing.action_echo_async_with_classmethod),
+ f"")
+ self.assertEqual(thing.action_echo_async_with_classmethod, TestThing.action_echo_async_with_classmethod)
+ self.assertEqual(thing.action_echo_async_with_classmethod.bound_obj, TestThing)
+
+ # 5. parameterized function can be decorated with action
+ self.assertIsInstance(thing.parameterized_action, BoundAction)
+ self.assertIsInstance(thing.parameterized_action, BoundSyncAction)
+ self.assertNotIsInstance(thing.parameterized_action, BoundAsyncAction)
+ self.assertIsInstance(TestThing.parameterized_action, Action)
+ self.assertNotIsInstance(TestThing.parameterized_action, BoundAction)
+ # associated attributes of BoundAction
+ assert isinstance(thing.parameterized_action, BoundAction)
+ self.assertEqual(thing.parameterized_action.name, 'parameterized_action')
+ self.assertEqual(thing.parameterized_action.owner_inst, thing)
+ self.assertEqual(thing.parameterized_action.owner, TestThing)
+ self.assertEqual(thing.parameterized_action.execution_info, TestThing.parameterized_action.execution_info)
+ self.assertEqual(str(thing.parameterized_action),
+ f"")
+ self.assertNotEqual(thing.parameterized_action, TestThing.parameterized_action)
+ self.assertEqual(thing.parameterized_action.bound_obj, thing)
+
+ # 6. parameterized function can be decorated with action
+ self.assertIsInstance(thing.parameterized_action_without_call, BoundAction)
+ self.assertIsInstance(thing.parameterized_action_without_call, BoundSyncAction)
+ self.assertNotIsInstance(thing.parameterized_action_without_call, BoundAsyncAction)
+ self.assertIsInstance(TestThing.parameterized_action_without_call, Action)
+ self.assertNotIsInstance(TestThing.parameterized_action_without_call, BoundAction)
+ # associated attributes of BoundAction
+ assert isinstance(thing.parameterized_action_without_call, BoundAction)
+ self.assertEqual(thing.parameterized_action_without_call.name, 'parameterized_action_without_call')
+ self.assertEqual(thing.parameterized_action_without_call.owner_inst, thing)
+ self.assertEqual(thing.parameterized_action_without_call.owner, TestThing)
+ self.assertEqual(thing.parameterized_action_without_call.execution_info, TestThing.parameterized_action_without_call.execution_info)
+ self.assertEqual(str(thing.parameterized_action_without_call),
+ f"")
+ self.assertNotEqual(thing.parameterized_action_without_call, TestThing.parameterized_action_without_call)
+ self.assertEqual(thing.parameterized_action_without_call.bound_obj, thing)
+
+ # 7. parameterized function can be decorated with action
+ self.assertIsInstance(thing.parameterized_action_async, BoundAction)
+ self.assertNotIsInstance(thing.parameterized_action_async, BoundSyncAction)
+ self.assertIsInstance(thing.parameterized_action_async, BoundAsyncAction)
+ self.assertIsInstance(TestThing.parameterized_action_async, Action)
+ self.assertNotIsInstance(TestThing.parameterized_action_async, BoundAction)
+ # associated attributes of BoundAction
+ assert isinstance(thing.parameterized_action_async, BoundAction)
+ self.assertEqual(thing.parameterized_action_async.name, 'parameterized_action_async')
+ self.assertEqual(thing.parameterized_action_async.owner_inst, thing)
+ self.assertEqual(thing.parameterized_action_async.owner, TestThing)
+ self.assertEqual(thing.parameterized_action_async.execution_info, TestThing.parameterized_action_async.execution_info)
+ self.assertEqual(str(thing.parameterized_action_async),
+ f"")
+ self.assertNotEqual(thing.parameterized_action_async, TestThing.parameterized_action_async)
+ self.assertEqual(thing.parameterized_action_async.bound_obj, thing)
+
+ # 8. actions with input and output schema
+ self.assertIsInstance(thing.json_schema_validated_action, BoundAction)
+ self.assertIsInstance(thing.json_schema_validated_action, BoundSyncAction)
+ self.assertNotIsInstance(thing.json_schema_validated_action, BoundAsyncAction)
+ self.assertIsInstance(TestThing.json_schema_validated_action, Action)
+ self.assertNotIsInstance(TestThing.json_schema_validated_action, BoundAction)
+ # associated attributes of BoundAction
+ assert isinstance(thing.json_schema_validated_action, BoundAction)
+ self.assertEqual(thing.json_schema_validated_action.name, 'json_schema_validated_action')
+ self.assertEqual(thing.json_schema_validated_action.owner_inst, thing)
+ self.assertEqual(thing.json_schema_validated_action.owner, TestThing)
+ self.assertEqual(thing.json_schema_validated_action.execution_info, TestThing.json_schema_validated_action.execution_info)
+ self.assertEqual(str(thing.json_schema_validated_action),
+ f"")
+ self.assertNotEqual(thing.json_schema_validated_action, TestThing.json_schema_validated_action)
+ self.assertEqual(thing.json_schema_validated_action.bound_obj, thing)
+
+
+ def test_3_remote_info(self):
+ """Test if the validator is working correctly, on which the logic of the action is based"""
+ # basic check if the remote_info is correct, although this test is not necessary, not recommended and
+ # neither particularly useful
+ remote_info = TestThing.action_echo.execution_info
+ self.assertIsInstance(remote_info, ActionInfoValidator)
+ assert isinstance(remote_info, ActionInfoValidator) # type definition
+ self.assertTrue(remote_info.isaction)
+ self.assertFalse(remote_info.isproperty)
+ self.assertFalse(remote_info.isparameterized)
+ self.assertFalse(remote_info.iscoroutine)
+ self.assertFalse(remote_info.safe)
+ self.assertFalse(remote_info.idempotent)
+ self.assertTrue(remote_info.synchronous)
+
+ remote_info = TestThing.action_echo_async.execution_info
+ self.assertIsInstance(remote_info, ActionInfoValidator)
+ assert isinstance(remote_info, ActionInfoValidator) # type definition
+ self.assertTrue(remote_info.isaction)
+ self.assertTrue(remote_info.iscoroutine)
+ self.assertFalse(remote_info.isproperty)
+ self.assertFalse(remote_info.isparameterized)
+ self.assertFalse(remote_info.safe)
+ self.assertFalse(remote_info.idempotent)
+ self.assertTrue(remote_info.synchronous)
+
+ remote_info = TestThing.action_echo_with_classmethod.execution_info
+ self.assertIsInstance(remote_info, ActionInfoValidator)
+ assert isinstance(remote_info, ActionInfoValidator) # type definition
+ self.assertTrue(remote_info.isaction)
+ self.assertFalse(remote_info.iscoroutine)
+ self.assertFalse(remote_info.isproperty)
+ self.assertFalse(remote_info.isparameterized)
+ self.assertFalse(remote_info.safe)
+ self.assertFalse(remote_info.idempotent)
+ self.assertTrue(remote_info.synchronous)
+
+ remote_info = TestThing.parameterized_action.execution_info
+ self.assertIsInstance(remote_info, ActionInfoValidator)
+ assert isinstance(remote_info, ActionInfoValidator)
+ self.assertTrue(remote_info.isaction)
+ self.assertFalse(remote_info.iscoroutine)
+ self.assertFalse(remote_info.isproperty)
+ self.assertTrue(remote_info.isparameterized)
+ self.assertTrue(remote_info.safe)
+ self.assertFalse(remote_info.idempotent)
+ self.assertTrue(remote_info.synchronous)
+
+ remote_info = TestThing.parameterized_action_without_call.execution_info
+ self.assertIsInstance(remote_info, ActionInfoValidator)
+ assert isinstance(remote_info, ActionInfoValidator)
+ self.assertTrue(remote_info.isaction)
+ self.assertFalse(remote_info.iscoroutine)
+ self.assertFalse(remote_info.isproperty)
+ self.assertTrue(remote_info.isparameterized)
+ self.assertFalse(remote_info.safe)
+ self.assertTrue(remote_info.idempotent)
+ self.assertTrue(remote_info.synchronous)
+
+ remote_info = TestThing.parameterized_action_async.execution_info
+ self.assertIsInstance(remote_info, ActionInfoValidator)
+ assert isinstance(remote_info, ActionInfoValidator)
+ self.assertTrue(remote_info.isaction)
+ self.assertTrue(remote_info.iscoroutine)
+ self.assertFalse(remote_info.isproperty)
+ self.assertTrue(remote_info.isparameterized)
+ self.assertFalse(remote_info.safe)
+ self.assertFalse(remote_info.idempotent)
+ self.assertTrue(remote_info.synchronous)
+
+ remote_info = TestThing.json_schema_validated_action.execution_info
+ self.assertIsInstance(remote_info, ActionInfoValidator)
+ assert isinstance(remote_info, ActionInfoValidator)
+ self.assertTrue(remote_info.isaction)
+ self.assertFalse(remote_info.iscoroutine)
+ self.assertFalse(remote_info.isproperty)
+ self.assertFalse(remote_info.isparameterized)
+ self.assertFalse(remote_info.safe)
+ self.assertFalse(remote_info.idempotent)
+ self.assertTrue(remote_info.synchronous)
+ self.assertIsInstance(remote_info.schema_validator, JSONSchemaValidator)
+
+
+ def test_4_api_and_invalid_actions(self):
+ """Test if action prevents invalid objects from being named as actions and raises neat errors"""
+ # done allow action decorator to be terminated without '()' on a method
+ with self.assertRaises(TypeError) as ex:
+ action(TestThing.incorrectly_decorated_method)
+ self.assertTrue(str(ex.exception).startswith("input schema should be a JSON or pydantic BaseModel, not a function/method, did you decorate your action wrongly?"))
+
+ # dunder methods cannot be decorated with action
+ with self.assertRaises(ValueError) as ex:
+ action()(TestThing.__internal__)
+ self.assertTrue(str(ex.exception).startswith("dunder objects cannot become remote"))
+
+ # only functions and methods can be decorated with action
+ for obj in [TestThing, str, 1, 1.0, 'Str', True, None, object(), type, property]:
+ with self.assertRaises(TypeError) as ex:
+ action()(obj) # not an action
+ self.assertTrue(str(ex.exception).startswith("target for action or is not a function/method."))
+
+ with self.assertRaises(ValueError) as ex:
+ action(safe=True, some_kw=1)
+ self.assertTrue(str(ex.exception).startswith("Only 'safe', 'idempotent', 'synchronous' are allowed"))
+
+
+ def test_5_thing_cls_actions(self):
+ """Test class and instance level action access"""
+ thing = TestThing(id='test-action', log_level=logging.ERROR)
+ # class level
+ for name, action in TestThing.actions.descriptors.items():
+ self.assertIsInstance(action, Action)
+ for name in replace_methods_with_actions._exposed_actions:
+ self.assertTrue(name in TestThing.actions)
+ # instance level
+ for name, action in thing.actions.values.items():
+ self.assertIsInstance(action, BoundAction)
+ for name in replace_methods_with_actions._exposed_actions:
+ self.assertTrue(name in thing.actions)
+ # cannot call an instance bound action at class level
+ self.assertRaises(NotImplementedError, lambda: TestThing.action_echo(thing, 1))
+ # but can call instance bound action with instance
+ self.assertEqual(1, thing.action_echo(1))
+ # can also call classmethods as usual
+ self.assertEqual(2, TestThing.action_echo_with_classmethod(2))
+ self.assertEqual(3, thing.action_echo_with_classmethod(3))
+ # async methods behave similarly
+ self.assertEqual(4, asyncio.run(thing.action_echo_async(4)))
+ self.assertEqual(5, asyncio.run(TestThing.action_echo_async_with_classmethod(5)))
+ self.assertRaises(NotImplementedError, lambda: asyncio.run(TestThing.action_echo(7)))
+ # parameterized actions behave similarly
+ self.assertEqual(('test-action', 1, 'hello1', 1.1), thing.parameterized_action(1, 'hello1', 1.1))
+ self.assertEqual(('test-action', 2, 'hello2', 'foo2'), asyncio.run(thing.parameterized_action_async(2, 'hello2', 'foo2')))
+ self.assertRaises(NotImplementedError, lambda: TestThing.parameterized_action(3, 'hello3', 5))
+ self.assertRaises(NotImplementedError, lambda: asyncio.run(TestThing.parameterized_action_async(4, 'hello4', 5)))
+
+
+ def test_6_action_affordance(self):
+ """Test if action affordance is correctly created"""
+ thing = TestThing(id='test-action', log_level=logging.ERROR)
+
+ assert isinstance(thing.action_echo, BoundAction) # type definition
+ affordance = thing.action_echo.to_affordance()
+ self.assertIsInstance(affordance, ActionAffordance)
+ self.assertIsNone(affordance.idempotent) # by default, not idempotent
+ self.assertTrue(affordance.synchronous) # by default, not synchronous
+ self.assertIsNone(affordance.safe) # by default, not safe
+ self.assertIsNone(affordance.input) # no input schema
+ self.assertIsNone(affordance.output) # no output schema
+ self.assertIsNone(affordance.description) # no doc
+
+ assert isinstance(thing.action_echo_with_classmethod, BoundAction) # type definition
+ affordance = thing.action_echo_with_classmethod.to_affordance()
+ self.assertIsInstance(affordance, ActionAffordance)
+ self.assertIsNone(affordance.idempotent) # by default, not idempotent
+ self.assertTrue(affordance.synchronous) # by default, synchronous
+ self.assertIsNone(affordance.safe) # by default, not safe
+ self.assertIsNone(affordance.input) # no input schema
+ self.assertIsNone(affordance.output) # no output schema
+ self.assertIsNone(affordance.description) # no doc
+
+ assert isinstance(thing.action_echo_async, BoundAction) # type definition
+ affordance = thing.action_echo_async.to_affordance()
+ self.assertIsInstance(affordance, ActionAffordance)
+ self.assertIsNone(affordance.idempotent) # by default, not idempotent
+ self.assertTrue(affordance.synchronous) # by default, synchronous
+ self.assertIsNone(affordance.safe) # by default, not safe
+ self.assertIsNone(affordance.input) # no input schema
+ self.assertIsNone(affordance.output) # no output schema
+ self.assertIsNone(affordance.description) # no doc
+
+ assert isinstance(thing.action_echo_async_with_classmethod, BoundAction) # type definition
+ affordance = thing.action_echo_async_with_classmethod.to_affordance()
+ self.assertIsInstance(affordance, ActionAffordance)
+ self.assertIsNone(affordance.idempotent) # by default, not idempotent
+ self.assertTrue(affordance.synchronous) # by default, synchronous
+ self.assertIsNone(affordance.safe) # by default, not safe
+ self.assertIsNone(affordance.input) # no input schema
+ self.assertIsNone(affordance.output) # no output schema
+ self.assertIsNone(affordance.description) # no doc
+
+ assert isinstance(thing.parameterized_action, BoundAction) # type definition
+ affordance = thing.parameterized_action.to_affordance()
+ self.assertIsInstance(affordance, ActionAffordance)
+ self.assertIsNone(affordance.idempotent)
+ self.assertTrue(affordance.synchronous)
+ self.assertTrue(affordance.safe)
+ self.assertIsNone(affordance.input)
+ self.assertIsNone(affordance.output)
+ self.assertIsNone(affordance.description)
+
+ assert isinstance(thing.parameterized_action_without_call, BoundAction) # type definition
+ affordance = thing.parameterized_action_without_call.to_affordance()
+ self.assertIsInstance(affordance, ActionAffordance)
+ self.assertTrue(affordance.idempotent) # by default, not idempotent
+ self.assertTrue(affordance.synchronous) # by default, synchronous
+ self.assertIsNone(affordance.safe) # by default, not safe
+ self.assertIsNone(affordance.input) # no input schema
+ self.assertIsNone(affordance.output) # no output schema
+ self.assertIsNone(affordance.description) # no doc
+
+ assert isinstance(thing.parameterized_action_async, BoundAction) # type definition
+ affordance = thing.parameterized_action_async.to_affordance()
+ self.assertIsInstance(affordance, ActionAffordance)
+ self.assertIsNone(affordance.idempotent) # by default, not idempotent
+ self.assertTrue(affordance.synchronous) # by default, not synchronous
+ self.assertIsNone(affordance.safe) # by default, not safe
+ self.assertIsNone(affordance.input) # no input schema
+ self.assertIsNone(affordance.output) # no output schema
+ self.assertIsNone(affordance.description) # no doc
+
+ assert isinstance(thing.json_schema_validated_action, BoundAction) # type definition
+ affordance = thing.json_schema_validated_action.to_affordance()
+ self.assertIsInstance(affordance, ActionAffordance)
+ self.assertIsNone(affordance.idempotent) # by default, not idempotent
+ self.assertTrue(affordance.synchronous) # by default, not synchronous
+ self.assertIsNone(affordance.safe) # by default, not safe
+ self.assertIsInstance(affordance.input, dict)
+ self.assertIsInstance(affordance.output, dict)
+ self.assertIsNone(affordance.description) # no doc
+
+
+
+if __name__ == '__main__':
+ unittest.main(testRunner=TestRunner())
\ No newline at end of file
diff --git a/tests/test_07_properties.py b/tests/test_07_properties.py
new file mode 100644
index 00000000..208a8959
--- /dev/null
+++ b/tests/test_07_properties.py
@@ -0,0 +1,181 @@
+import logging, unittest
+
+from hololinked.core.properties import Number
+
+try:
+ from .utils import TestCase, TestRunner
+ from .things import TestThing
+except ImportError:
+ from utils import TestCase, TestRunner
+ from things import TestThing
+
+
+
+class TestProperty(TestCase):
+
+ @classmethod
+ def setUpClass(self):
+ super().setUpClass()
+ print(f"test property with {self.__name__}")
+
+ @classmethod
+ def tearDownClass(self):
+ print("\ntear down test property")
+ # self.thing_client.exit()
+
+
+ def test_1_simple_class_property(self):
+ """Test basic class property functionality"""
+ # Test class-level access
+ self.assertEqual(TestThing.simple_class_prop, 42)
+ TestThing.simple_class_prop = 100
+ self.assertEqual(TestThing.simple_class_prop, 100)
+
+ # Test that instance-level access reflects class value
+ instance1 = TestThing(id='test1', log_level=logging.WARN)
+ instance2 = TestThing(id='test2', log_level=logging.WARN)
+ self.assertEqual(instance1.simple_class_prop, 100)
+ self.assertEqual(instance2.simple_class_prop, 100)
+
+ # Test that instance-level changes affect class value
+ instance1.simple_class_prop = 200
+ self.assertEqual(TestThing.simple_class_prop, 200)
+ self.assertEqual(instance2.simple_class_prop, 200)
+
+
+ def test_2_managed_class_property(self):
+ """Test class property with custom getter/setter"""
+ # Test initial value
+ self.assertEqual(TestThing.managed_class_prop, 0)
+ # Test valid value assignment
+ TestThing.managed_class_prop= 50
+ self.assertEqual(TestThing.managed_class_prop, 50)
+ # Test validation in setter
+ with self.assertRaises(ValueError):
+ TestThing.managed_class_prop = -10
+ # Verify value wasn't changed after failed assignment
+ self.assertEqual(TestThing.managed_class_prop, 50)
+ # Test instance-level validation
+ instance = TestThing(id='test3', log_level=logging.WARN)
+ with self.assertRaises(ValueError):
+ instance.managed_class_prop = -20
+ # Test that instance-level access reflects class value
+ self.assertEqual(instance.managed_class_prop, 50)
+ # Test that instance-level changes affects class value
+ instance.managed_class_prop = 100
+ self.assertEqual(TestThing.managed_class_prop, 100)
+ self.assertEqual(instance.managed_class_prop, 100)
+
+
+ def test_3_readonly_class_property(self):
+ """Test read-only class property behavior"""
+ # Test reading the value
+ self.assertEqual(TestThing.readonly_class_prop, "read-only-value")
+
+ # Test that setting raises an error at class level
+ with self.assertRaises(ValueError):
+ TestThing.readonly_class_prop = "new-value"
+
+ # Test that setting raises an error at instance level
+ instance = TestThing(id='test4', log_level=logging.WARN)
+ with self.assertRaises(ValueError):
+ instance.readonly_class_prop = "new-value"
+
+ # Verify value remains unchanged
+ self.assertEqual(TestThing.readonly_class_prop, "read-only-value")
+ self.assertEqual(instance.readonly_class_prop, "read-only-value")
+
+
+ def test_4_deletable_class_property(self):
+ """Test class property deletion"""
+ # Test initial value
+ self.assertEqual(TestThing.deletable_class_prop, 100)
+
+ # Test setting new value
+ TestThing.deletable_class_prop = 150
+ self.assertEqual(TestThing.deletable_class_prop, 150)
+
+ # Test deletion
+ instance = TestThing(id='test5', log_level=logging.WARN)
+ del TestThing.deletable_class_prop
+ self.assertEqual(TestThing.deletable_class_prop, 100) # Should return to default
+ self.assertEqual(instance.deletable_class_prop, 100)
+
+ # Test instance-level deletion
+ instance.deletable_class_prop = 200
+ self.assertEqual(TestThing.deletable_class_prop, 200)
+ del instance.deletable_class_prop
+ self.assertEqual(TestThing.deletable_class_prop, 100) # Should return to default
+
+
+ def test_5_descriptor_access(self):
+ """Test descriptor access for class properties"""
+ # Test direct access through descriptor
+ instance = TestThing(id='test6', log_level=logging.WARN)
+ self.assertIsInstance(TestThing.not_a_class_prop, Number)
+ self.assertEqual(instance.not_a_class_prop, 43)
+ instance.not_a_class_prop = 50
+ self.assertEqual(instance.not_a_class_prop, 50)
+
+ del instance.not_a_class_prop
+ # deleter deletes only an internal instance variable
+ self.assertTrue(hasattr(TestThing, 'not_a_class_prop'))
+ self.assertEqual(instance.not_a_class_prop, 43)
+
+ del TestThing.not_a_class_prop
+ # descriptor itself is deleted
+ self.assertFalse(hasattr(TestThing, 'not_a_class_prop'))
+ self.assertFalse(hasattr(instance, 'not_a_class_prop'))
+ with self.assertRaises(AttributeError):
+ instance.not_a_class_prop
+
+
+
+
+# def test_7_json_db_operations(self):
+# with tempfile.NamedTemporaryFile(delete=False) as tf:
+# filename = tf.name
+
+# # test db commit property
+# thing = TestThing(id="test-db-operations", use_json_file=True,
+# json_filename=filename, log_level=logging.WARN)
+# self.assertEqual(thing.db_commit_number_prop, 0)
+# thing.db_commit_number_prop = 100
+# self.assertEqual(thing.db_commit_number_prop, 100)
+# self.assertEqual(thing.db_engine.get_property('db_commit_number_prop'), 100)
+
+# # test db persist property
+# self.assertEqual(thing.db_persist_selector_prop, 'a')
+# thing.db_persist_selector_prop = 'c'
+# self.assertEqual(thing.db_persist_selector_prop, 'c')
+# self.assertEqual(thing.db_engine.get_property('db_persist_selector_prop'), 'c')
+
+# # test db init property
+# self.assertEqual(thing.db_init_int_prop, 1)
+# thing.db_init_int_prop = 50
+# self.assertEqual(thing.db_init_int_prop, 50)
+# self.assertNotEqual(thing.db_engine.get_property('db_init_int_prop'), 50)
+# self.assertEqual(thing.db_engine.get_property('db_init_int_prop'), TestThing.db_init_int_prop.default)
+# del thing
+
+# # delete thing and reload from database
+# thing = TestThing(id="test-db-operations", use_json_file=True,
+# json_filename=filename, log_level=logging.WARN)
+# self.assertEqual(thing.db_init_int_prop, TestThing.db_init_int_prop.default)
+# self.assertEqual(thing.db_persist_selector_prop, 'c')
+# self.assertNotEqual(thing.db_commit_number_prop, 100)
+# self.assertEqual(thing.db_commit_number_prop, TestThing.db_commit_number_prop.default)
+
+# # check db init prop with a different value in database apart from default
+# thing.db_engine.set_property('db_init_int_prop', 101)
+# del thing
+# thing = TestThing(id="test-db-operations", use_json_file=True,
+# json_filename=filename, log_level=logging.WARN)
+# self.assertEqual(thing.db_init_int_prop, 101)
+
+# os.remove(filename)
+
+
+if __name__ == '__main__':
+ unittest.main(testRunner=TestRunner())
+
\ No newline at end of file
diff --git a/tests/test_08_events.py b/tests/test_08_events.py
new file mode 100644
index 00000000..f651f25e
--- /dev/null
+++ b/tests/test_08_events.py
@@ -0,0 +1,102 @@
+import asyncio
+import unittest
+import logging
+
+
+from hololinked.core.events import Event, EventDispatcher
+from hololinked.core.zmq.brokers import EventPublisher
+from hololinked.td.interaction_affordance import EventAffordance
+from hololinked.schema_validators import JSONSchemaValidator
+
+try:
+ from .utils import TestCase, TestRunner
+ from .things import TestThing
+except ImportError:
+ from utils import TestCase, TestRunner
+ from things import TestThing
+
+
+
+class TestEvents(TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ print(f"test events with {cls.__name__}")
+
+ @classmethod
+ def tearDownClass(cls):
+ print("\ntear down test events")
+
+ def _test_dispatcher(self, descriptor: Event, dispatcher: EventDispatcher, thing: TestThing):
+ """pass the event descriptor and the dispatcher to test the dispatcher"""
+ self.assertIsInstance(dispatcher, EventDispatcher) # instance access returns dispatcher
+ self.assertTrue(dispatcher._owner_inst is thing) # dispatcher has the owner instance
+ self.assertTrue(
+ (thing.rpc_server and thing.rpc_server.event_publisher and isinstance(dispatcher.publisher, EventPublisher)) # publisher is set
+ or dispatcher.publisher is None # publisher is not set if no rpc_server
+ )
+ self.assertEqual(
+ dispatcher._unique_identifier,
+ f'{thing._qualified_id}/{descriptor._internal_name}'
+ )
+
+
+ def test_1_pure_events(self):
+ """Test basic event functionality"""
+
+ # 1. Test class-level access to event descriptor
+ self.assertIsInstance(TestThing.test_event, Event) # class access returns descriptor
+ # self.assertFalse(TestThing.test_event._observable) # not an oberservable property
+
+ # 2. Test instance-level access to event dispatcher which is returned by the descriptor
+ thing = TestThing(id='test-event', log_level=logging.WARN)
+ self._test_dispatcher(TestThing.test_event, thing.test_event, thing) # test dispatcher returned by descriptor
+
+ # 3. Event with JSON schema has schema variable set
+
+
+ def test_2_observable_events(self):
+ """Test observable event (of properties) functionality"""
+
+ # 1. observable properties have an event descriptor associated with them as a reference
+ self.assertIsInstance(TestThing.observable_list_prop._observable_event_descriptor, Event)
+ self.assertIsInstance(TestThing.state._observable_event_descriptor, Event)
+ self.assertIsInstance(TestThing.observable_readonly_prop._observable_event_descriptor, Event)
+
+ # 2. observable descriptors have been assigned as an attribute of the owning class
+ self.assertTrue(hasattr(TestThing, TestThing.observable_list_prop._observable_event_descriptor.name))
+ self.assertTrue(hasattr(TestThing, TestThing.state._observable_event_descriptor.name))
+ self.assertTrue(hasattr(TestThing, TestThing.observable_readonly_prop._observable_event_descriptor.name))
+
+ # 3. accessing those descriptors returns the event dispatcher
+ thing = TestThing(id='test-event', log_level=logging.WARN)
+ self._test_dispatcher(
+ TestThing.observable_list_prop._observable_event_descriptor,
+ getattr(thing, TestThing.observable_list_prop._observable_event_descriptor.name, None),
+ thing
+ ) # test dispatcher returned by descriptor
+ self._test_dispatcher(
+ TestThing.state._observable_event_descriptor,
+ getattr(thing, TestThing.state._observable_event_descriptor.name, None),
+ thing
+ )
+ self._test_dispatcher(
+ TestThing.observable_readonly_prop._observable_event_descriptor,
+ getattr(thing, TestThing.observable_readonly_prop._observable_event_descriptor.name, None),
+ thing
+ )
+
+
+ def test_3_event_affordance(self):
+ """Test event affordance generation"""
+
+ # 1. Test event affordance generation
+ thing = TestThing(id='test-event', log_level=logging.WARN)
+ event = TestThing.test_event.to_affordance(thing)
+ self.assertIsInstance(event, EventAffordance)
+
+
+
+if __name__ == '__main__':
+ unittest.main(testRunner=TestRunner())
\ No newline at end of file
diff --git a/tests/test_09_rpc_broker.py b/tests/test_09_rpc_broker.py
new file mode 100644
index 00000000..e25d121a
--- /dev/null
+++ b/tests/test_09_rpc_broker.py
@@ -0,0 +1,998 @@
+import asyncio
+import threading
+import typing
+import unittest
+import zmq.asyncio
+import jsonschema
+import logging
+import random
+import time
+
+from hololinked.core.actions import BoundAction
+from hololinked.core.property import Property
+from hololinked.core.thing import Thing
+from hololinked.core.zmq.brokers import AsyncEventConsumer, AsyncZMQClient, EventConsumer, EventPublisher, SyncZMQClient
+from hololinked.core.zmq.message import EXIT, RequestMessage
+from hololinked.core.zmq.rpc_server import RPCServer
+from hololinked.server.zmq import ZMQServer
+from hololinked.td.utils import get_zmq_unique_identifier_from_event_affordance
+from hololinked.utils import get_all_sub_things_recusively, get_current_async_loop
+from hololinked.td import ActionAffordance, PropertyAffordance, EventAffordance
+from hololinked.client.zmq.consumed_interactions import ZMQAction, ZMQProperty, ZMQEvent
+
+try:
+ from .test_05_brokers import TestBrokerMixin
+ from .test_06_actions import replace_methods_with_actions
+ from .utils import TestRunner, TestCase
+ from .things import run_thing_with_zmq_server_forked, test_thing_TD, TestThing
+except ImportError:
+ from tests.test_05_brokers import TestBrokerMixin
+ from tests.test_06_actions import replace_methods_with_actions
+ from utils import TestRunner, TestCase
+ from things import run_thing_with_zmq_server_forked, test_thing_TD, TestThing
+
+
+
+data_structures = [
+ {"key": "value"},
+ [1, 2, 3],
+ "string",
+ 42,
+ 3.14,
+ True,
+ None,
+ {"nested": {"key": "value"}},
+ [{"list": "of"}, {"dicts": "here"}],
+ {"complex": {"nested": {"list": [1, 2, 3]}, "mixed": [1, "two", 3.0, None]}},
+ {"array": [1, 2, 3]}
+] # to use for testing
+
+
+
+class InteractionAffordanceMixin(TestBrokerMixin):
+
+ @classmethod
+ def setUpClass(self):
+ super().setUpClass()
+ self.setUpActions()
+ self.setUpProperties()
+ self.setUpEvents()
+
+ @classmethod
+ def setUpActions(self):
+ self.action_echo = ZMQAction(
+ resource=ActionAffordance.from_TD('action_echo', test_thing_TD),
+ sync_client=self.sync_client,
+ async_client=self.async_client,
+ invokation_timeout=5,
+ execution_timeout=5,
+ schema_validator=None
+ )
+
+ self.action_get_serialized_data = ZMQAction(
+ resource=ActionAffordance.from_TD('get_serialized_data', test_thing_TD),
+ sync_client=self.sync_client,
+ async_client=self.async_client,
+ invokation_timeout=5,
+ execution_timeout=5,
+ schema_validator=None
+ )
+
+ self.action_sleep = ZMQAction(
+ resource=ActionAffordance.from_TD('sleep', test_thing_TD),
+ sync_client=self.sync_client,
+ async_client=self.async_client,
+ invokation_timeout=5,
+ execution_timeout=5,
+ schema_validator=None
+ )
+
+ self.action_get_mixed_content_data = ZMQAction(
+ resource= ActionAffordance.from_TD('get_mixed_content_data', test_thing_TD),
+ sync_client=self.sync_client,
+ async_client=self.async_client,
+ invokation_timeout=5,
+ execution_timeout=5,
+ schema_validator=None
+ )
+ self.action_push_events = ZMQAction(
+ resource=ActionAffordance.from_TD('push_events', test_thing_TD),
+ sync_client=self.sync_client,
+ async_client=self.async_client,
+ invokation_timeout=5,
+ execution_timeout=5,
+ schema_validator=None
+ )
+
+ @classmethod
+ def setUpProperties(self):
+ self.base_property = ZMQProperty(
+ resource=PropertyAffordance.from_TD('base_property', test_thing_TD),
+ sync_client=self.sync_client,
+ async_client=self.async_client,
+ invokation_timeout=5,
+ execution_timeout=5,
+ schema_validator=None
+ )
+ self.total_number_of_events = ZMQProperty(
+ resource=PropertyAffordance.from_TD('total_number_of_events', test_thing_TD),
+ sync_client=self.sync_client,
+ async_client=self.async_client,
+ invokation_timeout=5,
+ execution_timeout=5,
+ schema_validator=None
+ )
+
+ @classmethod
+ def setUpEvents(self):
+ self.test_event = ZMQEvent(
+ resource=EventAffordance.from_TD('test_event', test_thing_TD),
+ sync_zmq_client=None
+ )
+
+
+
+class TestRPCServerMixin(InteractionAffordanceMixin):
+
+ @classmethod
+ def setUpThing(self):
+ replace_methods_with_actions(TestThing)
+ super().setUpThing()
+
+ @classmethod
+ def setUpServer(self):
+ self.server = RPCServer(
+ id=self.server_id,
+ things=[self.thing],
+ logger=self.logger,
+ context=self.context
+ )
+
+ @classmethod
+ def setUpClient(self):
+ self.async_client = AsyncZMQClient(
+ id=self.client_id,
+ server_id=self.server_id,
+ logger=self.logger,
+ context=self.context,
+ handshake=False,
+ transport='INPROC'
+ )
+ self.sync_client = SyncZMQClient(
+ id=self.client_id+'-sync',
+ server_id=self.server_id,
+ logger=self.logger,
+ context=self.context,
+ handshake=False,
+ transport='INPROC'
+ )
+
+ @classmethod
+ def startServer(self):
+ self._server_thread = threading.Thread(
+ target=self.server.run,
+ daemon=False # to test exit daemon must be False
+ )
+ self._server_thread.start()
+
+ @classmethod
+ def setUpClass(self):
+ self.context = zmq.asyncio.Context()
+ super().setUpClass()
+ print(f"test ZMQ RPC Server {self.__name__}")
+
+
+
+class TestInprocRPCServer(TestRPCServerMixin):
+
+ def test_1_creation_defaults(self):
+ """test server configuration defaults"""
+ self.assertTrue(self.server.req_rep_server.socket_address.startswith('inproc://'))
+ self.assertTrue(self.server.event_publisher.socket_address.startswith('inproc://'))
+
+ self.assertTrue(self.thing.rpc_server, self.server)
+ self.assertTrue(self.thing.event_publisher, self.server.event_publisher)
+
+
+ def test_2_handshake(self):
+ """test handshake mechanisms"""
+ self.sync_client.handshake()
+ async def async_handshake():
+ self.async_client.handshake()
+ await self.async_client.handshake_complete()
+ get_current_async_loop().run_until_complete(async_handshake())
+
+
+ def test_3_action_abstractions(self):
+ """"test if action can be invoked by a client"""
+
+ async def test_basic_operations():
+ """Test if action can be invoked by a client in basic request/response way, oneway and no block"""
+ nonlocal self
+ await self.action_echo.async_call('value')
+ self.action_echo.oneway(5)
+ noblock_msg_id = self.action_echo.noblock(10)
+ self.assertEqual(self.action_echo.last_return_value, 'value')
+ # test the responses for no block call, so read the socket - but, this is usually abstracte in a higher level API
+ response = self.action_echo._sync_zmq_client.recv_response(noblock_msg_id)
+ self.action_echo._last_zmq_response = response
+ self.assertEqual(self.action_echo.last_return_value, 10)
+ self.assertEqual(self.action_echo(2), 2)
+
+ get_current_async_loop().run_until_complete(test_basic_operations())
+ self.sync_client.handshake()
+
+ async def test_operations_thorough():
+ # Generate 20 random JSON serializable data structures
+ nonlocal self
+ global data_structures
+
+ msg_ids = [None for i in range(len(data_structures))]
+ last_call_type = None
+ # Randomize calls to self.action_echo
+ for index, data in enumerate(data_structures):
+ call_type = random.choice(["async_call", "plain_call", "oneway", "noblock"])
+ if call_type == "async_call":
+ result = await self.action_echo.async_call(data)
+ self.assertEqual(result, data)
+ elif call_type == "plain_call":
+ result = self.action_echo(data)
+ self.assertEqual(result, data)
+ elif call_type == "oneway":
+ self.action_echo.oneway(data)
+ self.assertNotEqual(data, self.action_echo.last_return_value)
+ elif call_type == "noblock":
+ msg_ids[index] = self.action_echo.noblock(data)
+ self.assertNotEqual(data, self.action_echo.last_return_value)
+
+ # print("last_call_type", last_call_type, "call_type", call_type, "data", data)
+ if last_call_type == "noblock":
+ response = self.action_echo._sync_zmq_client.recv_response(msg_ids[index-1])
+ self.action_echo._last_zmq_response = response
+ self.assertEqual(self.action_echo.last_return_value, data_structures[index-1])
+
+ last_call_type = call_type
+
+ get_current_async_loop().run_until_complete(test_operations_thorough())
+ self.sync_client.handshake()
+
+
+ def test_4_property_abstractions(self):
+ """Test if property can be invoked by a client"""
+
+ def test_basic_operations():
+ nonlocal self
+ self.base_property.set(100)
+ self.assertEqual(self.base_property.get(), 100)
+ self.base_property.oneway_set(200)
+ self.assertEqual(self.base_property.get(), 200)
+
+ async def test_async_property_abstractions():
+ nonlocal self
+ await self.base_property.async_set(300)
+ self.assertEqual(self.base_property.get(), 300)
+ await self.base_property.async_set(0)
+ self.assertEqual(await self.base_property.async_get(), 0)
+
+ get_current_async_loop().run_until_complete(test_async_property_abstractions())
+
+ test_basic_operations()
+ self.sync_client.handshake()
+
+ async def test_operations_thorough():
+ # Generate 20 random JSON serializable data structures
+ nonlocal self
+ global data_structures
+
+ msg_ids = [None for i in range(len(data_structures))]
+ last_call_type = None
+ # Randomize calls to self.action_echo
+ for index, data in enumerate(data_structures):
+ call_type = random.choice(["async_set", "set", "oneway_set", "noblock_get"])
+ if call_type == "async_set":
+ self.assertIsNone(await self.base_property.async_set(data))
+ self.assertEqual(await self.base_property.async_get(), data)
+ elif call_type == "set":
+ self.assertIsNone(self.base_property.set(data))
+ self.assertEqual(self.base_property.get(), data)
+ elif call_type == "oneway_set":
+ self.assertIsNone(self.base_property.oneway_set(data))
+ self.assertNotEqual(data, self.base_property.last_read_value)
+ self.assertEqual(data, self.base_property.get())
+ # for one way calls as well, get() will return the latest value
+ elif call_type == "noblock_get":
+ msg_ids[index] = self.base_property.noblock_get()
+ self.assertNotEqual(data, self.base_property.last_read_value)
+
+ # print("last_call_type", last_call_type, "call_type", call_type, "data", data)
+ if last_call_type == "noblock":
+ response = self.base_property._sync_zmq_client.recv_response(msg_ids[index-1])
+ self.base_property._last_zmq_response = response
+ self.assertEqual(self.base_property.last_read_value, data_structures[index-1])
+
+ last_call_type = call_type
+
+ get_current_async_loop().run_until_complete(test_operations_thorough())
+ self.sync_client.handshake()
+
+
+ def test_5_thing_execution_context(self):
+ """test if thing execution context is used correctly"""
+ old_thing_execution_context = self.action_echo._thing_execution_context
+ # Only fetch_execution_logs currently supported
+ self.action_echo._thing_execution_context = dict(fetch_execution_logs=True)
+ get_current_async_loop().run_until_complete(self.action_echo.async_call('value'))
+ self.assertIsInstance(self.action_echo.last_return_value, dict)
+ self.assertTrue('execution_logs' in self.action_echo.last_return_value.keys())
+ self.assertTrue('return_value' in self.action_echo.last_return_value.keys())
+ self.assertTrue(len(self.action_echo.last_return_value) == 2)
+ self.assertFalse(self.action_echo.last_return_value == 'value') # because its a dict now
+ self.assertIsInstance(self.action_echo.last_return_value['execution_logs'], list)
+ self.assertTrue(self.action_echo.last_return_value['return_value'] == 'value')
+ self.action_echo._thing_execution_context = old_thing_execution_context
+
+
+ def test_6_server_execution_context(self):
+ """test if server execution context is used correctly"""
+ async def test_execution_timeout():
+ try:
+ await self.action_sleep.async_call()
+ except Exception as ex:
+ self.assertIsInstance(ex, TimeoutError)
+ self.assertIn('Execution timeout occured', str(ex))
+ else:
+ self.assertTrue(False) # fail the test if reached here
+ get_current_async_loop().run_until_complete(test_execution_timeout())
+
+ async def test_invokation_timeout():
+ try:
+ old_timeout = self.action_sleep._invokation_timeout
+ self.action_sleep._invokation_timeout = 0.1 # reduce the value to test timeout
+ await self.action_sleep.async_call()
+ except Exception as ex:
+ self.assertIsInstance(ex, TimeoutError)
+ self.assertIn('Invokation timeout occured', str(ex))
+ else:
+ self.assertTrue(False) # fail the test if reached here
+ finally:
+ self.action_sleep._invokation_timeout = old_timeout
+
+ get_current_async_loop().run_until_complete(test_invokation_timeout())
+
+
+ def test_7_binary_payloads(self):
+ """test if binary payloads are handled correctly"""
+ self.assertEqual(self.action_get_mixed_content_data(), ('foobar', b'foobar'))
+ self.assertEqual(self.action_get_serialized_data(), b'foobar')
+
+ async def async_call():
+ await self.action_get_mixed_content_data.async_call()
+ return self.action_get_mixed_content_data.last_return_value
+ result = get_current_async_loop().run_until_complete(async_call())
+ self.assertEqual(result, ('foobar', b'foobar'))
+
+ async def async_call():
+ await self.action_get_serialized_data.async_call()
+ return self.action_get_serialized_data.last_return_value
+ result = get_current_async_loop().run_until_complete(async_call())
+ self.assertEqual(result, b'foobar')
+
+
+ def test_8_stop(self):
+ """test if server can be stopped"""
+ self.server.stop()
+
+
+
+class TestRPCServer(TestInprocRPCServer):
+
+ @classmethod
+ def setUpServer(self):
+ self.server = ZMQServer(
+ id=self.server_id,
+ things=[self.thing],
+ logger=self.logger,
+ context=self.context,
+ transports=['INPROC', 'IPC', 'TCP'],
+ tcp_socket_address='tcp://*:59000'
+ )
+
+
+ @classmethod
+ def setUpClient(self):
+ super().setUpClient()
+ self.sync_ipc_client = SyncZMQClient(
+ id=self.client_id+"-sync",
+ server_id=self.server_id,
+ logger=self.logger,
+ handshake=False,
+ transport='IPC'
+ )
+ self.sync_tcp_client = SyncZMQClient(
+ id=self.client_id+"-sync",
+ server_id=self.server_id,
+ logger=self.logger,
+ handshake=False,
+ transport='TCP',
+ socket_address='tcp://localhost:59000'
+ )
+ self.async_ipc_client = AsyncZMQClient(
+ id=self.client_id+"-async",
+ server_id=self.server_id,
+ logger=self.logger,
+ handshake=False,
+ transport='IPC'
+ )
+ self.async_tcp_client = AsyncZMQClient(
+ id=self.client_id+"-async",
+ server_id=self.server_id,
+ logger=self.logger,
+ handshake=False,
+ transport='TCP',
+ socket_address='tcp://localhost:59000'
+ )
+
+
+ def test_1_creation_defaults(self):
+ super().test_1_creation_defaults()
+ # check socket creation defaults
+ self.assertTrue(self.server.ipc_server.socket_address.startswith('ipc://'))
+ self.assertTrue(self.server.tcp_server.socket_address.startswith('tcp://'))
+ self.assertTrue(self.server.tcp_server.socket_address.endswith(':59000'))
+
+
+ def test_2_handshake(self):
+ super().test_2_handshake()
+ self.sync_ipc_client.handshake()
+ self.sync_tcp_client.handshake()
+ async def async_handshake():
+ self.async_ipc_client.handshake()
+ await self.async_ipc_client.handshake_complete()
+ self.async_tcp_client.handshake()
+ await self.async_tcp_client.handshake_complete()
+ get_current_async_loop().run_until_complete(async_handshake())
+
+
+ def test_3_action_abstractions(self):
+ old_sync_client = self.action_echo._sync_zmq_client
+ old_async_client = self.action_echo._async_zmq_client
+ for clients in [(self.sync_tcp_client, self.async_tcp_client), (self.sync_ipc_client, self.async_ipc_client)]:
+ self.action_echo._sync_zmq_client, self.action_echo._async_zmq_client = clients
+ super().test_3_action_abstractions()
+ self.action_echo._sync_zmq_client = old_sync_client
+ self.action_echo._async_zmq_client = old_async_client
+
+
+ def test_4_property_abstractions(self):
+ old_sync_client = self.base_property._sync_zmq_client
+ old_async_client = self.base_property._async_zmq_client
+ for clients in [(self.sync_tcp_client, self.async_tcp_client), (self.sync_ipc_client, self.async_ipc_client)]:
+ self.base_property._sync_zmq_client, self.base_property._async_zmq_client = clients
+ super().test_4_property_abstractions()
+ self.base_property._sync_zmq_client = old_sync_client
+ self.base_property._async_zmq_client = old_async_client
+
+
+ def test_5_thing_execution_context(self):
+ old_sync_client = self.action_echo._sync_zmq_client
+ old_async_client = self.action_echo._async_zmq_client
+ for clients in [(self.sync_tcp_client, self.async_tcp_client), (self.sync_ipc_client, self.async_ipc_client)]:
+ self.action_echo._sync_zmq_client, self.action_echo._async_zmq_client = clients
+ super().test_5_thing_execution_context()
+ self.action_echo._sync_zmq_client = old_sync_client
+ self.action_echo._async_zmq_client = old_async_client
+
+
+ def test_6_server_execution_context(self):
+ old_sync_client = self.action_sleep._sync_zmq_client
+ old_async_client = self.action_sleep._async_zmq_client
+ for clients in [(self.sync_tcp_client, self.async_tcp_client), (self.sync_ipc_client, self.async_ipc_client)]:
+ self.action_sleep._sync_zmq_client, self.action_sleep._async_zmq_client = clients
+ super().test_6_server_execution_context()
+ self.action_sleep._sync_zmq_client = old_sync_client
+ self.action_sleep._async_zmq_client = old_async_client
+
+
+ def test_7_binary_payloads(self):
+ for clients in [(self.sync_tcp_client, self.async_tcp_client), (self.sync_ipc_client, self.async_ipc_client)]:
+ for action in [self.action_get_serialized_data, self.action_get_mixed_content_data]:
+ action._sync_zmq_client, action._async_zmq_client = clients
+ super().test_7_binary_payloads()
+
+
+
+class TestExposedActions(InteractionAffordanceMixin):
+
+ @classmethod
+ def setUpClient(self):
+ super().setUpClient()
+ self.server_id = 'test-action'
+ self.sync_client = SyncZMQClient(
+ id=self.client_id,
+ server_id=self.server_id,
+ logger=self.logger,
+ handshake=False
+ )
+ self.client = self.sync_client
+
+
+ def test_1_exposed_actions(self):
+ """
+ Now that actions can be invoked by a client, test different types of actions
+ and their behaviors
+ """
+ run_thing_with_zmq_server_forked(
+ thing_cls=TestThing,
+ id='test-action',
+ log_level=logging.ERROR+10,
+ done_queue=self.done_queue,
+ prerun_callback=replace_methods_with_actions,
+ )
+ thing = TestThing(id='test-action', log_level=logging.ERROR)
+ self.sync_client.handshake()
+
+ # thing_client = ObjectProxy('test-action', log_level=logging.ERROR) # type: TestThing
+ assert isinstance(thing.action_echo, BoundAction) # type definition
+ action_echo = ZMQAction(
+ resource=thing.action_echo.to_affordance(),
+ sync_client=self.client
+ )
+ self.assertEqual(action_echo(1), 1)
+
+ assert isinstance(thing.action_echo_with_classmethod, BoundAction) # type definition
+ action_echo_with_classmethod = ZMQAction(
+ resource=thing.action_echo_with_classmethod.to_affordance(),
+ sync_client=self.client
+ )
+ self.assertEqual(action_echo_with_classmethod(2), 2)
+
+ assert isinstance(thing.action_echo_async, BoundAction) # type definition
+ action_echo_async = ZMQAction(
+ resource=thing.action_echo_async.to_affordance(),
+ sync_client=self.client
+ )
+ self.assertEqual(action_echo_async("string"), "string")
+
+ assert isinstance(thing.action_echo_async_with_classmethod, BoundAction) # type definition
+ action_echo_async_with_classmethod = ZMQAction(
+ resource=thing.action_echo_async_with_classmethod.to_affordance(),
+ sync_client=self.client
+ )
+ self.assertEqual(action_echo_async_with_classmethod([1, 2]), [1, 2])
+
+ assert isinstance(thing.parameterized_action, BoundAction) # type definition
+ parameterized_action = ZMQAction(
+ resource=thing.parameterized_action.to_affordance(),
+ sync_client=self.client
+ )
+ self.assertEqual(parameterized_action(arg1=1, arg2='hello', arg3=5), ['test-action', 1, 'hello', 5])
+
+ assert isinstance(thing.parameterized_action_async, BoundAction) # type definition
+ parameterized_action_async = ZMQAction(
+ resource=thing.parameterized_action_async.to_affordance(),
+ sync_client=self.client
+ )
+ self.assertEqual(parameterized_action_async(arg1=2.5, arg2='hello', arg3='foo'), ['test-action', 2.5, 'hello', 'foo'])
+
+ assert isinstance(thing.parameterized_action_without_call, BoundAction) # type definition
+ parameterized_action_without_call = ZMQAction(
+ resource=thing.parameterized_action_without_call.to_affordance(),
+ sync_client=self.client
+ )
+ with self.assertRaises(NotImplementedError) as ex:
+ parameterized_action_without_call(arg1=2, arg2='hello', arg3=5)
+ self.assertTrue(str(ex.exception).startswith("Subclasses must implement __call__"))
+
+
+ def test_2_schema_validation(self):
+ """Test if schema validation is working correctly"""
+ self._test_2_json_schema_validation()
+ self._test_2_pydantic_validation()
+
+
+ def _test_2_json_schema_validation(self):
+
+ thing = TestThing(id='test-action', log_level=logging.ERROR)
+ self.sync_client.handshake()
+
+ # JSON schema validation
+ assert isinstance(thing.json_schema_validated_action, BoundAction) # type definition
+ action_affordance = thing.json_schema_validated_action.to_affordance()
+ json_schema_validated_action = ZMQAction(
+ resource=action_affordance,
+ sync_client=self.client
+ )
+ # data with invalid schema
+ with self.assertRaises(Exception) as ex1:
+ json_schema_validated_action(val1='1', val2='hello', val3={'field' : 'value'}, val4=[])
+ self.assertTrue(str(ex1.exception).startswith("'1' is not of type 'integer'"))
+ with self.assertRaises(Exception) as ex2:
+ json_schema_validated_action('1', val2='hello', val3={'field' : 'value'}, val4=[])
+ self.assertTrue(str(ex2.exception).startswith("'1' is not of type 'integer'"))
+ with self.assertRaises(Exception) as ex3:
+ json_schema_validated_action(1, 2, val3={'field' : 'value'}, val4=[])
+ self.assertTrue(str(ex3.exception).startswith("2 is not of type 'string'"))
+ with self.assertRaises(Exception) as ex4:
+ json_schema_validated_action(1, 'hello', val3='field', val4=[])
+ self.assertTrue(str(ex4.exception).startswith("'field' is not of type 'object'"))
+ with self.assertRaises(Exception) as ex5:
+ json_schema_validated_action(1, 'hello', val3={'field' : 'value'}, val4='[]')
+ self.assertTrue(str(ex5.exception).startswith("'[]' is not of type 'array'"))
+ # data with valid schema
+ return_value = json_schema_validated_action(val1=1, val2='hello', val3={'field' : 'value'}, val4=[])
+ self.assertEqual(return_value, {'val1': 1, 'val3': {'field': 'value'}})
+ jsonschema.Draft7Validator(action_affordance.output).validate(return_value)
+
+
+ def _test_2_pydantic_validation(self):
+
+ thing = TestThing(id='test-action', log_level=logging.ERROR)
+ self.sync_client.handshake()
+
+ # Pydantic schema validation
+ assert isinstance(thing.pydantic_validated_action, BoundAction) # type definition
+ action_affordance = thing.pydantic_validated_action.to_affordance()
+ pydantic_validated_action = ZMQAction(
+ resource=action_affordance,
+ sync_client=self.client
+ )
+ # data with invalid schema
+ with self.assertRaises(Exception) as ex1:
+ pydantic_validated_action(val1='1', val2='hello', val3={'field' : 'value'}, val4=[])
+ self.assertTrue(
+ "validation error for pydantic_validated_action_input" in str(ex1.exception) and
+ 'val1' in str(ex1.exception) and 'val2' not in str(ex1.exception) and 'val3' not in str(ex1.exception) and
+ 'val4' not in str(ex1.exception)
+ ) # {obj.name}_input is the pydantic model name
+ with self.assertRaises(Exception) as ex2:
+ pydantic_validated_action('1', val2='hello', val3={'field' : 'value'}, val4=[])
+ self.assertTrue(
+ "validation error for pydantic_validated_action_input" in str(ex2.exception) and
+ 'val1' in str(ex2.exception) and 'val2' not in str(ex2.exception) and 'val3' not in str(ex2.exception) and
+ 'val4' not in str(ex2.exception)
+ )
+ with self.assertRaises(Exception) as ex3:
+ pydantic_validated_action(1, 2, val3={'field' : 'value'}, val4=[])
+ self.assertTrue(
+ "validation error for pydantic_validated_action_input" in str(ex3.exception) and
+ 'val1' not in str(ex3.exception) and 'val2' in str(ex3.exception) and 'val3' not in str(ex3.exception) and
+ 'val4' not in str(ex3.exception)
+ )
+ with self.assertRaises(Exception) as ex4:
+ pydantic_validated_action(1, 'hello', val3='field', val4=[])
+ self.assertTrue(
+ "validation error for pydantic_validated_action_input" in str(ex4.exception) and
+ 'val1' not in str(ex4.exception) and 'val2' not in str(ex4.exception) and 'val3' in str(ex4.exception) and
+ 'val4' not in str(ex4.exception)
+ )
+ with self.assertRaises(Exception) as ex5:
+ pydantic_validated_action(1, 'hello', val3={'field' : 'value'}, val4='[]')
+ self.assertTrue(
+ "validation error for pydantic_validated_action_input" in str(ex5.exception) and
+ 'val1' not in str(ex5.exception) and 'val2' not in str(ex5.exception) and 'val3' not in str(ex5.exception) and
+ 'val4' in str(ex5.exception)
+ )
+ # data with valid schema
+ return_value = pydantic_validated_action(val1=1, val2='hello', val3={'field' : 'value'}, val4=[])
+ self.assertEqual(return_value, {'val2': 'hello', 'val4': []})
+
+
+ def test_3_exit(self):
+ """Exit the server"""
+ exit_message = RequestMessage.craft_with_message_type(
+ sender_id='test-action-client',
+ receiver_id='test-action',
+ message_type=EXIT
+ )
+ self.sync_client.socket.send_multipart(exit_message.byte_array)
+ self.assertEqual(self.done_queue.get(), 'test-action')
+
+
+
+class TestExposedProperties(InteractionAffordanceMixin):
+
+ @classmethod
+ def setUpClient(self):
+ super().setUpClient()
+ self.server_id = 'test-property'
+ self.sync_client = SyncZMQClient(
+ id=self.client_id,
+ server_id=self.server_id,
+ logger=self.logger,
+ handshake=False
+ )
+ self.client = self.sync_client
+
+
+ def test_1_property_abstractions(self):
+
+ run_thing_with_zmq_server_forked(
+ thing_cls=TestThing,
+ id=self.server_id,
+ log_level=logging.ERROR+10,
+ done_queue=self.done_queue,
+ )
+ thing = TestThing(id=self.server_id, log_level=logging.ERROR)
+ self.sync_client.handshake()
+
+ descriptor = thing.properties['number_prop']
+ assert isinstance(descriptor, Property) # type definition
+ number_prop = ZMQProperty(
+ resource=descriptor.to_affordance(thing),
+ sync_client=self.client
+ )
+ self.assertEqual(number_prop.get(), descriptor.default)
+ number_prop.set(100)
+ self.assertEqual(number_prop.get(), 100)
+ number_prop.oneway_set(200)
+ self.assertEqual(number_prop.get(), 200)
+
+ async def test_6_async_property_abstractions(self: "TestThing"):
+ nonlocal number_prop
+ async_client = AsyncZMQClient(
+ id='test-property-async-client',
+ server_id=self.server_id,
+ log_level=logging.ERROR,
+ handshake=False
+ )
+ number_prop._async_zmq_client = async_client
+ async_client.handshake()
+ await async_client.handshake_complete()
+ await number_prop.async_set(300)
+ self.assertEqual(number_prop.get(), 300)
+ await number_prop.async_set(0)
+ self.assertEqual(await number_prop.async_get(), 0)
+
+ get_current_async_loop().run_until_complete(test_6_async_property_abstractions(self))
+
+
+ def test_2_exit(self):
+ exit_message = RequestMessage.craft_with_message_type(
+ sender_id='test-property-client',
+ receiver_id=self.server_id,
+ message_type=EXIT
+ )
+ self.sync_client.socket.send_multipart(exit_message.byte_array)
+ self.assertEqual(self.done_queue.get(), self.server_id)
+
+
+
+class TestExposedEvents(TestRPCServerMixin):
+
+ @classmethod
+ def setUpServer(self):
+ self.server = ZMQServer(
+ id=self.server_id,
+ things=[self.thing],
+ logger=self.logger,
+ context=self.context,
+ transports=['INPROC', 'IPC', 'TCP'],
+ tcp_socket_address='tcp://*:59005'
+ )
+
+ @classmethod
+ def setUpEvents(self):
+ self.event_names = ['test_event', 'test_binary_payload_event', 'test_event_with_json_schema']
+ for event_name in self.event_names:
+ event_affordance = EventAffordance.from_TD(event_name, test_thing_TD)
+ sync_event_client = EventConsumer(
+ id=f"{event_affordance.thing_id}|{event_affordance.name}|sync",
+ event_unique_identifier=get_zmq_unique_identifier_from_event_affordance(event_affordance),
+ socket_address=self.server.event_publisher.socket_address,
+ logger=self.logger,
+ context=self.context
+ )
+ async_event_client = AsyncEventConsumer(
+ id=f"{event_affordance.thing_id}|{event_affordance.name}|async",
+ event_unique_identifier=get_zmq_unique_identifier_from_event_affordance(event_affordance),
+ socket_address=self.server.event_publisher.socket_address,
+ logger=self.logger,
+ context=self.context
+ )
+ event = ZMQEvent(
+ resource=event_affordance,
+ sync_zmq_client=sync_event_client,
+ async_zmq_client=async_event_client,
+ )
+ setattr(self, event_name, event)
+
+
+ def test_1_creation_defaults(self):
+ """test server configuration defaults"""
+ all_things = get_all_sub_things_recusively(self.thing)
+ self.assertTrue(len(all_things) > 1) # run the test only if there are sub things
+ for thing in all_things:
+ assert isinstance(thing, Thing)
+ for name, event in thing.events.values.items():
+ self.assertTrue(event.publisher, self.server.event_publisher)
+ self.assertIsInstance(event._unique_identifier, str)
+ self.assertEqual(event._owner_inst, thing)
+
+
+ def test_2_sync_client_event_stream(self):
+ """test if event can be streamed by a synchronous threaded client"""
+ def test_events(event_name: str, expected_data: typing.Any) -> None:
+ event_client = getattr(self, event_name) # type: ZMQEvent
+ event_client._default_scheduling_mode = 'sync'
+ self.assertEqual(
+ get_zmq_unique_identifier_from_event_affordance(event_client._resource),
+ getattr(self.thing, event_client._resource.name)._unique_identifier # type: EventDispatcher
+ )
+ # self.assertEqual(
+ # event_client._sync_zmq_client.socket_address,
+ # self.server.event_publisher.socket_address
+ # )
+ attempts = 100
+ results = []
+ def cb(value):
+ nonlocal results
+ results.append(value)
+ if event_client._callbacks:
+ event_client._callbacks.clear()
+ event_client.subscribe(cb)
+ time.sleep(5) # calm down for event publisher to connect fully as there is no handshake for events
+ self.action_push_events(event_name=event_name, total_number_of_events=attempts)
+
+ for i in range(attempts):
+ if len(results) == attempts:
+ break
+ time.sleep(0.1)
+ self.assertTrue(len(results) >= attempts)
+ self.assertEqual(results, [expected_data]*len(results))
+ event_client.unsubscribe(cb)
+
+ for name, data in zip(
+ self.event_names,
+ ['test data', b'test data',
+ {
+ 'val1': 1,
+ 'val2': 'test',
+ 'val3': {'key': 'value'},
+ 'val4': [1, 2, 3]
+ }
+ ]
+ ):
+ test_events(name, data)
+
+
+ def test_3_async_client_event_stream(self):
+ """test if event can be streamed by an asynchronous client in an async loop"""
+ async def test_events(event_name: str, expected_data: typing.Any) -> None:
+ event_client = getattr(self, event_name) # type: ZMQEvent
+ event_client._default_scheduling_mode = 'async'
+ self.assertEqual(
+ get_zmq_unique_identifier_from_event_affordance(event_client._resource),
+ getattr(self.thing, event_client._resource.name)._unique_identifier # type: EventDispatcher
+ )
+ # self.assertEqual(
+ # event_client._async_zmq_client.socket_address,
+ # self.server.event_publisher.socket_address
+ # )
+ attempts = 100
+ results = []
+ def cb(value):
+ nonlocal results
+ # print("event callback", value)
+ results.append(value)
+ if event_client._callbacks:
+ event_client._callbacks.clear()
+ event_client.subscribe(cb)
+ time.sleep(5) # calm down for event publisher to connect fully as there is no handshake for events
+ self.action_push_events(event_name=event_name, total_number_of_events=attempts)
+
+ for i in range(attempts):
+ if len(results) == attempts:
+ break
+ await asyncio.sleep(0.1)
+ self.assertTrue(len(results) >= attempts)
+ # since we are pushing events in multiple protocols, sometimes the event from the previous test is
+ # still lingering on the socket. So the captured event must be at least the number of attempts.
+ self.assertEqual(results, [expected_data]*len(results))
+ event_client.unsubscribe(cb)
+
+ for name, data in zip(
+ self.event_names,
+ [
+ 'test data',
+ b'test data',
+ {
+ 'val1': 1,
+ 'val2': 'test',
+ 'val3': {'key': 'value'},
+ 'val4': [1, 2, 3]
+ }
+ ]
+ ):
+ get_current_async_loop().run_until_complete(test_events(name, data))
+
+
+ def test_4_other_transports(self):
+ for publisher in [self.server.ipc_event_publisher, self.server.tcp_event_publisher]:
+ self.assertIsInstance(publisher, EventPublisher)
+ self.assertTrue(publisher.socket_address.startswith('tcp://') or publisher.socket_address.startswith('ipc://'))
+ for event_name in self.event_names:
+ event_affordance = EventAffordance.from_TD(event_name, test_thing_TD)
+ event = getattr(self, event_name) # type: ZMQEvent
+ sync_event_client = EventConsumer(
+ id=f"{event_affordance.thing_id}|{event_affordance.name}|sync",
+ event_unique_identifier=get_zmq_unique_identifier_from_event_affordance(event_affordance),
+ socket_address=publisher.socket_address.replace('*', 'localhost'),
+ logger=self.logger
+ )
+ async_event_client = AsyncEventConsumer(
+ id=f"{event_affordance.thing_id}|{event_affordance.name}|async",
+ event_unique_identifier=get_zmq_unique_identifier_from_event_affordance(event_affordance),
+ socket_address=publisher.socket_address.replace('*', 'localhost'),
+ logger=self.logger
+ )
+ event._sync_zmq_client = sync_event_client
+ event._async_zmq_client = async_event_client
+ self.test_2_sync_client_event_stream()
+ self.test_3_async_client_event_stream()
+
+
+ def test_5_exit(self):
+ self.server.stop()
+
+
+
+class TestThingRunRPCServer(TestBrokerMixin):
+ """Finally check if the thing can be run with a ZMQ server"""
+
+ @classmethod
+ def setUpThing(self):
+ self.thing = TestThing(
+ id=self.thing_id,
+ logger=self.logger,
+ remote_accessible_logger=True
+ )
+
+ @classmethod
+ def startServer(self):
+ self.thing.run_with_zmq_server(forked=True)
+ self.server = self.thing.rpc_server
+ self.sync_client = SyncZMQClient(
+ id=self.client_id,
+ server_id=self.thing_id,
+ logger=self.logger,
+ handshake=False,
+ context=self.thing.rpc_server.context,
+ transport='INPROC'
+ )
+ self.async_client = AsyncZMQClient(
+ id=self.client_id+'async',
+ server_id=self.thing_id,
+ logger=self.logger,
+ handshake=False,
+ context=self.thing.rpc_server.context,
+ transport='INPROC'
+ )
+ time.sleep(2)
+
+ def test_1_setup_zmq_server(self):
+ self.assertIsInstance(self.thing.rpc_server, ZMQServer)
+ self.assertIsInstance(self.thing.event_publisher, EventPublisher)
+
+ def test_2_handshake(self):
+ self.sync_client.handshake()
+ self.async_client.handshake()
+ get_current_async_loop().run_until_complete(self.async_client.handshake_complete())
+
+ def test_3_stop(self):
+ self.thing.rpc_server.stop()
+
+
+
+def load_tests(loader, tests, pattern):
+ suite = unittest.TestSuite()
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestInprocRPCServer))
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestRPCServer))
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestExposedActions))
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestExposedProperties))
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestExposedEvents))
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestThingRunRPCServer))
+ return suite
+
+if __name__ == '__main__':
+ runner = TestRunner()
+ runner.run(load_tests(unittest.TestLoader(), None, None))
\ No newline at end of file
diff --git a/tests/test_11_thing_description.py b/tests/test_11_thing_description.py
new file mode 100644
index 00000000..d27db05c
--- /dev/null
+++ b/tests/test_11_thing_description.py
@@ -0,0 +1,240 @@
+import logging
+import unittest
+from pydantic import BaseModel
+from hololinked.constants import ResourceTypes
+from hololinked.td.data_schema import (DataSchema, NumberSchema, StringSchema, BooleanSchema, ObjectSchema,
+ ArraySchema, EnumSchema)
+from hololinked.td.interaction_affordance import (PropertyAffordance, InteractionAffordance,
+ ActionAffordance, EventAffordance)
+from hololinked.core.properties import Property, Number, String, Boolean
+
+try:
+ from .things import OceanOpticsSpectrometer, TestThing
+ from .utils import TestCase, TestRunner
+except ImportError:
+ from things import OceanOpticsSpectrometer, TestThing
+ from utils import TestCase, TestRunner
+
+
+
+
+class TestInteractionAffordance(TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls.thing = OceanOpticsSpectrometer(id='test-thing', log_level=logging.ERROR)
+ print(f"Test Interaction Affordance with {cls.__name__}")
+
+ def test_1_associated_objects(self):
+ affordance = PropertyAffordance()
+ affordance.objekt = OceanOpticsSpectrometer.integration_time
+ affordance.owner = self.thing
+ # req. 1. internal test for multiple inheritance of pydantic models as there are many classes to track
+ self.assertIsInstance(affordance, BaseModel)
+ self.assertIsInstance(affordance, DataSchema)
+ self.assertIsInstance(affordance, InteractionAffordance)
+ self.assertTrue(affordance.what, ResourceTypes.PROPERTY)
+ # req. 2. owner must be a Thing
+ self.assertEqual(affordance.owner, self.thing)
+ # req. 3. when owner is set, thing id & thing class is also set
+ self.assertEqual(affordance.thing_id, self.thing.id)
+ self.assertEqual(affordance.thing_cls, self.thing.__class__)
+ # req. 4. objekt must be a Property, since we use a property affordance here
+ self.assertIsInstance(affordance.objekt, Property)
+ # req. 5. objekt must be a property of the owner thing
+ # --- not enforced yet
+ # req. 6. when objekt is set, property name is also set
+ self.assertEqual(affordance.name, OceanOpticsSpectrometer.integration_time.name)
+
+ # test the opposite
+ affordance = PropertyAffordance()
+ # req. 7. accessing any of unset objects should raise an error
+ self.assertTrue(affordance.owner is None)
+ self.assertTrue(affordance.objekt is None)
+ self.assertTrue(affordance.name is None)
+ self.assertTrue(affordance.thing_id is None)
+ self.assertTrue(affordance.thing_cls is None)
+
+ # req. 8. Only the corresponding object can be set for each affordance type
+ affordance = ActionAffordance()
+ with self.assertRaises(ValueError) as ex:
+ affordance.objekt = OceanOpticsSpectrometer.integration_time
+ with self.assertRaises(TypeError) as ex:
+ affordance.objekt = 5
+ self.assertIn("objekt must be instance of Property, Action or Event, given type", str(ex.exception))
+ affordance.objekt = OceanOpticsSpectrometer.connect
+ self.assertTrue(affordance.what, ResourceTypes.ACTION)
+
+ affordance = EventAffordance()
+ with self.assertRaises(ValueError) as ex:
+ affordance.objekt = OceanOpticsSpectrometer.integration_time
+ with self.assertRaises(TypeError) as ex:
+ affordance.objekt = 5
+ self.assertIn("objekt must be instance of Property, Action or Event, given type", str(ex.exception))
+ affordance.objekt = OceanOpticsSpectrometer.intensity_measurement_event
+ self.assertTrue(affordance.what, ResourceTypes.EVENT)
+
+ affordance = PropertyAffordance()
+ with self.assertRaises(ValueError) as ex:
+ affordance.objekt = OceanOpticsSpectrometer.connect
+ with self.assertRaises(TypeError) as ex:
+ affordance.objekt = 5
+ self.assertIn("objekt must be instance of Property, Action or Event, given type", str(ex.exception))
+ affordance.objekt = OceanOpticsSpectrometer.integration_time
+
+
+
+class TestDataSchema(TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls.thing = OceanOpticsSpectrometer(id='test-thing', log_level=logging.ERROR)
+ print(f"Test Data Schema with {cls.__name__}")
+
+ """
+ OceanOpticsSpectrometer.trigger_mode # selector
+ OceanOpticsSpectrometer.integration_time # number
+ OceanOpticsSpectrometer.serial_number # string
+ OceanOpticsSpectrometer.nonlinearity_correction # boolean
+ OceanOpticsSpectrometer.custom_background_intensity # typed list float, int
+ OceanOpticsSpectrometer.wavelengths # list float int
+ """
+
+ def test_2_number_schema(self):
+
+ # test implicit generation before actual testing
+ schema = OceanOpticsSpectrometer.integration_time.to_affordance(owner_inst=self.thing)
+ self.assertIsInstance(schema, PropertyAffordance)
+ self.assertEqual(schema.type, 'number')
+
+ integration_time = Number(bounds=(1, 1000), default=100, crop_to_bounds=True, step=1,
+ doc="integration time in milliseconds", metadata=dict(unit="ms"))
+ integration_time.__set_name__(OceanOpticsSpectrometer, 'integration_time')
+ # req. 1. Schema can be created
+ schema = integration_time.to_affordance(owner_inst=self.thing)
+ # print(schema.json(indent=4))
+ self.assertIsInstance(schema, PropertyAffordance)
+ self.assertEqual(schema.type, 'number')
+ # req. 2. Test number schema specific attributes
+ # minimum, maximum, multipleOf
+ self.assertEqual(schema.minimum, integration_time.bounds[0])
+ self.assertEqual(schema.maximum, integration_time.bounds[1])
+ self.assertEqual(schema.multipleOf, integration_time.step)
+ self.assertRaises(AttributeError, lambda: schema.exclusiveMinimum)
+ self.assertRaises(AttributeError, lambda: schema.exclusiveMaximum)
+ # exclusiveMinimum, exclusiveMaximum
+ integration_time.inclusive_bounds = (False, False)
+ integration_time.step = None
+ schema = integration_time.to_affordance(owner_inst=self.thing)
+ self.assertEqual(schema.exclusiveMinimum, integration_time.bounds[0])
+ self.assertEqual(schema.exclusiveMaximum, integration_time.bounds[1])
+ self.assertRaises(AttributeError, lambda: schema.minimum)
+ self.assertRaises(AttributeError, lambda: schema.maximum)
+ self.assertRaises(AttributeError, lambda: schema.multipleOf)
+ # req. 3. oneOf for allow_None to be True
+ integration_time.allow_None = True
+ schema = integration_time.to_affordance(owner_inst=self.thing)
+ self.assertTrue(any(subtype["type"] == 'null' for subtype in schema.oneOf))
+ self.assertTrue(any(subtype["type"] == 'number' for subtype in schema.oneOf))
+ self.assertTrue(len(schema.oneOf), 2)
+ self.assertTrue(not hasattr(schema, "type") or schema.type is None)
+ # print(schema.json(indent=4))
+ # Test some standard data schema values
+ self.assertEqual(schema.default, integration_time.default)
+ self.assertEqual(schema.unit, integration_time.metadata['unit'])
+
+
+ def test_3_string_schema(self):
+
+ # test implicit generation before actual testing
+ schema = OceanOpticsSpectrometer.status.to_affordance(owner_inst=self.thing)
+ self.assertIsInstance(schema, PropertyAffordance)
+
+ status = String(regex=r'^[a-zA-Z0-9]{1,10}$', default='IDLE', doc="status of the spectrometer")
+ status.__set_name__(OceanOpticsSpectrometer, 'status')
+ # req. 1. Schema can be created from the string property
+ schema = status.to_affordance(owner_inst=self.thing)
+ # print(schema.json(indent=4))
+ self.assertIsInstance(schema, PropertyAffordance)
+ self.assertEqual(schema.type, 'string')
+ # req. 2. Test string schema specific attributes
+ self.assertEqual(schema.pattern, status.regex)
+ # req. 3. oneOf for allow_None to be True
+ status.allow_None = True
+ schema = status.to_affordance(owner_inst=self.thing)
+ self.assertTrue(any(subtype["type"] == 'null' for subtype in schema.oneOf))
+ self.assertTrue(any(subtype["type"] == 'string' for subtype in schema.oneOf))
+ self.assertTrue(len(schema.oneOf), 2)
+ self.assertTrue(not hasattr(schema, "type") or schema.type is None)
+ # print(schema.json(indent=4))
+ # Test some standard data schema values
+ self.assertEqual(schema.default, status.default)
+
+
+ def test_4_boolean_schema(self):
+
+ # req. 1. Schema can be created from the boolean property and is a boolean schema based property affordance
+ schema = OceanOpticsSpectrometer.nonlinearity_correction.to_affordance(owner_inst=self.thing)
+ self.assertIsInstance(schema, PropertyAffordance)
+
+ nonlinearity_correction = Boolean(default=True, doc="nonlinearity correction enabled")
+ nonlinearity_correction.__set_name__(OceanOpticsSpectrometer, 'nonlinearity_correction')
+ schema = nonlinearity_correction.to_affordance(owner_inst=self.thing)
+ # print(schema.json(indent=4))
+ self.assertIsInstance(schema, PropertyAffordance)
+ self.assertEqual(schema.type, 'boolean')
+ # req. 2. Test boolean schema specific attributes
+ # None exists for boolean schema
+ # req. 3. oneOf for allow_None to be True
+ nonlinearity_correction.allow_None = True
+ schema = nonlinearity_correction.to_affordance(owner_inst=self.thing)
+ self.assertTrue(any(subtype["type"] == 'null' for subtype in schema.oneOf))
+ self.assertTrue(any(subtype["type"] == 'boolean' for subtype in schema.oneOf))
+ self.assertTrue(len(schema.oneOf), 2)
+ self.assertTrue(not hasattr(schema, "type") or schema.type is None)
+ # print(schema.json(indent=4))
+ # Test some standard data schema values
+ self.assertEqual(schema.default, nonlinearity_correction.default)
+
+
+ # def test_5_array_schema(self):
+
+ # # req. 1. Schema can be created from the array property and is a array schema based property affordance
+ # schema = OceanOpticsSpectrometer.wavelengths.to_affordance(owner_inst=self.thing)
+ # self.assertIsInstance(schema, BaseModel)
+ # self.assertIsInstance(schema, DataSchema)
+ # self.assertIsInstance(schema, PropertyAffordance)
+ # # print(schema.json(indent=4))
+ # self.assertEqual(schema.type, 'array')
+ # # req. 2. Test array schema specific attributes
+ # self.assertEqual(schema.items.type, 'number')
+ # # req. 3. Test some standard data schema values
+ # # self.assertEqual(schema.default, OceanOpticsSpectrometer.custom_background_intensity.default)
+
+
+ # def test_6_enum_schema(self):
+ # pass
+
+
+
+class TestThingDescription(TestCase):
+
+ def test_1_thing_model_generation(self):
+
+ thing = TestThing(id="test-thing-model", log_level=logging.ERROR+10)
+ self.assertIsInstance(thing.get_thing_model(ignore_errors=True).json(), dict)
+ # print(json.dumps(thing.get_thing_model(ignore_errors=True).json(), indent=4))
+ # self.assertIsInstance(thing.get_our_thing_model().json(), dict)
+
+ # start_thing_forked(self.thing_cls, id='test-gui-resource-generation', log_level=logging.WARN)
+ # thing_client = ObjectProxy('test-gui-resource-generation')
+ # self.assertIsInstance(thing_client.get_our_temp_thing_description(), dict)
+ # thing_client.exit()
+
+
+
+
+if __name__ == '__main__':
+ unittest.main(testRunner=TestRunner())
\ No newline at end of file
diff --git a/tests/test_12_protocols_zmq_object_proxy.py b/tests/test_12_protocols_zmq_object_proxy.py
new file mode 100644
index 00000000..b26a9115
--- /dev/null
+++ b/tests/test_12_protocols_zmq_object_proxy.py
@@ -0,0 +1,224 @@
+# only client side tests, server tests already carried out
+import time, unittest, logging
+from hololinked.client.factory import ClientFactory
+from hololinked.client.proxy import ObjectProxy
+from hololinked.utils import complete_pending_tasks_in_current_loop_async
+
+try:
+ from .things import OceanOpticsSpectrometer, TestThing
+ from .utils import TestCase, TestRunner, fake, AsyncTestCase
+except ImportError:
+ from things import OceanOpticsSpectrometer, TestThing
+ from utils import TestCase, TestRunner, fake, AsyncTestCase
+
+
+
+class TestZMQObjectProxyClient(TestCase):
+ """Test the zmq object proxy client"""
+ @classmethod
+ def setUpClass(self):
+ super().setUpClass()
+ self.setUpThing()
+
+ @classmethod
+ def setUpThing(self):
+ """Set up the thing for the zmq object proxy client"""
+ self.thing = TestThing(id="test-thing", log_level=logging.ERROR+10)
+ self.thing.run_with_zmq_server(forked=True)
+ self.thing_model = self.thing.get_thing_model(ignore_errors=True).json()
+
+
+ def test_01_creation_and_handshake(self):
+ """Test the creation and handshake of the zmq object proxy client"""
+ thing = ClientFactory.zmq("test-thing", "test-thing", "IPC")
+ self.assertIsInstance(thing, ObjectProxy)
+ self.assertTrue(
+ len(thing.properties) + len(thing.actions) + len(thing.events) >=
+ len(self.thing_model["properties"]) + len(self.thing_model["actions"]) + len(self.thing_model["events"])
+ )
+
+
+ def test_02_invoke_action(self):
+ """Test the invocation of an action on the zmq object proxy client"""
+ thing = ClientFactory.zmq("test-thing", "test-thing", "IPC")
+ self.assertIsInstance(thing, ObjectProxy)
+ # Test invoke_action method with reply
+ self.assertEqual(thing.invoke_action("action_echo", fake.text(max_nb_chars=100)), fake.last)
+ self.assertEqual(thing.invoke_action("action_echo", fake.sentence()), fake.last)
+ self.assertEqual(thing.invoke_action("action_echo", fake.json()), fake.last)
+ # Test invoke_action with dot notation
+ self.assertEqual(thing.action_echo(fake.chrome()), fake.last)
+ self.assertEqual(thing.action_echo(fake.sha256()), fake.last)
+ self.assertEqual(thing.action_echo(fake.address()), fake.last)
+ # Test invoke_action with no reply
+ self.assertEqual(thing.invoke_action("set_non_remote_number_prop", fake.random_number(), oneway=True), None)
+ self.assertEqual(thing.get_non_remote_number_prop(), fake.last)
+ # Test invoke_action in non blocking mode
+ noblock_payload = fake.pylist(20, value_types=[int, float, str, bool])
+ noblock_msg_id = thing.invoke_action("action_echo", noblock_payload, noblock=True)
+ self.assertIsInstance(noblock_msg_id, str)
+ self.assertEqual(thing.invoke_action("action_echo", fake.pylist(20, value_types=[int, float, str, bool])), fake.last)
+ self.assertEqual(thing.invoke_action("action_echo", fake.pylist(10, value_types=[int, float, str, bool])), fake.last)
+ self.assertEqual(thing.read_reply(noblock_msg_id), noblock_payload)
+
+
+ def test_03_rwd_properties(self):
+ """Test the read, write and delete of properties on the zmq object proxy client"""
+ thing = ClientFactory.zmq("test-thing", "test-thing", "IPC")
+ self.assertIsInstance(thing, ObjectProxy)
+ # Test read_property method
+ self.assertIsInstance(thing.read_property("number_prop"), (int, float))
+ self.assertIsInstance(thing.read_property("string_prop"), str)
+ self.assertIn(thing.read_property("selector_prop"), TestThing.selector_prop.objects)
+ # Test write_property method
+ thing.write_property("number_prop", fake.random_number())
+ self.assertEqual(thing.read_property("number_prop"), fake.last)
+ thing.write_property("selector_prop", TestThing.selector_prop.objects[fake.random_int(0, len(TestThing.selector_prop.objects)-1)])
+ self.assertEqual(thing.read_property("selector_prop"), TestThing.selector_prop.objects[fake.last])
+ thing.write_property("observable_list_prop", fake.pylist(25, value_types=[int, float, str, bool]))
+ self.assertEqual(thing.read_property("observable_list_prop"), fake.last)
+ # Test read property through dot notation attribute access
+ self.assertIsInstance(thing.number_prop, (int, float))
+ self.assertIsInstance(thing.string_prop, str)
+ self.assertIn(thing.selector_prop, TestThing.selector_prop.objects)
+ # Test write property through dot notation attribute access
+ thing.number_prop = fake.random_number()
+ self.assertEqual(thing.number_prop, fake.last)
+ thing.selector_prop = TestThing.selector_prop.objects[fake.random_int(0, len(TestThing.selector_prop.objects)-1)]
+ self.assertEqual(thing.selector_prop, TestThing.selector_prop.objects[fake.last])
+ thing.observable_list_prop = fake.pylist(25, value_types=[int, float, str, bool])
+ self.assertEqual(thing.observable_list_prop, fake.last)
+ # Test one way write property
+ thing.write_property("number_prop", fake.random_number(), oneway=True)
+ self.assertEqual(thing.read_property("number_prop"), fake.last)
+ thing.write_property("selector_prop", TestThing.selector_prop.objects[fake.random_int(0, len(TestThing.selector_prop.objects)-1)], oneway=True)
+ self.assertEqual(thing.read_property("selector_prop"), TestThing.selector_prop.objects[fake.last])
+ thing.write_property("observable_list_prop", fake.pylist(25, value_types=[int, float, str, bool]), oneway=True)
+ self.assertEqual(thing.read_property("observable_list_prop"), fake.last)
+ # Test noblock read property
+ noblock_msg_id = thing.read_property("number_prop", noblock=True)
+ self.assertIsInstance(noblock_msg_id, str)
+ self.assertIn(thing.read_property("selector_prop"), TestThing.selector_prop.objects)
+ self.assertIsInstance(thing.read_property("string_prop"), str)
+ self.assertEqual(thing.read_reply(noblock_msg_id), thing.number_prop)
+ # Test noblock write property
+ noblock_msg_id = thing.write_property("number_prop", fake.random_number(), noblock=True)
+ self.assertIsInstance(noblock_msg_id, str)
+ self.assertEqual(thing.read_property("number_prop"), fake.last) # noblock worked
+ self.assertEqual(thing.read_reply(noblock_msg_id), None)
+ # Test exception propagation to client
+ thing.string_prop = 'world'
+ self.assertEqual(thing.string_prop, 'world')
+ with self.assertRaises(ValueError):
+ thing.string_prop = 'WORLD'
+ with self.assertRaises(TypeError):
+ thing.int_prop = '5'
+ # Test non remote prop (non-)availability on client
+ with self.assertRaises(AttributeError):
+ thing.non_remote_number_prop
+
+
+ def test_04_RW_multiple_properties(self):
+
+ # TD is not well defined for this yet, although both client and server separately work.
+ # Test partial list of read write properties
+ thing = ClientFactory.zmq("test-thing", "test-thing", "IPC")
+ self.assertIsInstance(thing, ObjectProxy)
+ # Test read_multiple_properties method
+ thing.write_multiple_properties(
+ number_prop=15,
+ string_prop='foobar'
+ )
+ self.assertEqual(thing.number_prop, 15)
+ self.assertEqual(thing.string_prop, 'foobar')
+ # check prop that was not set in multiple properties
+
+ thing.int_prop = 5
+ thing.selector_prop = 'b'
+ thing.number_prop = -15 # simply override
+ props = thing.read_multiple_properties(
+ names=[
+ 'selector_prop',
+ 'int_prop',
+ 'number_prop',
+ 'string_prop'
+ ]
+ )
+ self.assertEqual(props['selector_prop'], 'b')
+ self.assertEqual(props['int_prop'], 5)
+ self.assertEqual(props['number_prop'], -15)
+ self.assertEqual(props['string_prop'], 'foobar')
+
+
+ def test_05_stop(self):
+ """Test the stop of the zmq object proxy client"""
+ self.thing.rpc_server.stop()
+
+
+
+class TestZMQObjectProxyClientAsync(AsyncTestCase):
+
+ @classmethod
+ def setUpClass(self):
+ super().setUpClass()
+ self.setUpThing()
+
+ @classmethod
+ def setUpThing(self):
+ """Set up the thing for the zmq object proxy client"""
+ self.thing = TestThing(id="test-thing", log_level=logging.ERROR+10)
+ self.thing.run_with_zmq_server(forked=True)
+ self.thing_model = self.thing.get_thing_model(ignore_errors=True).json()
+
+
+ async def test_01_creation_and_handshake(self):
+ """Test the creation and handshake of the zmq object proxy client"""
+ thing = ClientFactory.zmq("test-thing", "test-thing", "IPC")
+ self.assertIsInstance(thing, ObjectProxy)
+ self.assertTrue(
+ len(thing.properties) + len(thing.actions) + len(thing.events) >=
+ len(self.thing_model["properties"]) + len(self.thing_model["actions"]) + len(self.thing_model["events"])
+ )
+
+ async def test_02_invoke_action(self):
+ thing = ClientFactory.zmq("test-thing", "test-thing", "IPC")
+ self.assertIsInstance(thing, ObjectProxy)
+ self.assertEqual(await thing.async_invoke_action("action_echo", fake.text(max_nb_chars=100)), fake.last)
+ self.assertEqual(await thing.async_invoke_action("action_echo", fake.sentence()), fake.last)
+ self.assertEqual(await thing.async_invoke_action("action_echo", fake.json()), fake.last)
+
+
+ async def test_03_rwd_properties(self):
+ """Test the read, write and delete of properties on the zmq object proxy client"""
+ thing = ClientFactory.zmq("test-thing", "test-thing", "IPC")
+ self.assertIsInstance(thing, ObjectProxy)
+ # Test read_property method
+ self.assertIsInstance(await thing.async_read_property("number_prop"), (int, float))
+ self.assertIsInstance(await thing.async_read_property("string_prop"), str)
+ self.assertIn(await thing.async_read_property("selector_prop"), TestThing.selector_prop.objects)
+ # Test write_property method
+ await thing.async_write_property("number_prop", fake.random_number())
+ self.assertEqual(await thing.async_read_property("number_prop"), fake.last)
+ await thing.async_write_property("selector_prop", TestThing.selector_prop.objects[fake.random_int(0, len(TestThing.selector_prop.objects)-1)])
+ self.assertEqual(await thing.async_read_property("selector_prop"), TestThing.selector_prop.objects[fake.last])
+ await thing.async_write_property("observable_list_prop", fake.pylist(25, value_types=[int, float, str, bool]))
+ self.assertEqual(await thing.async_read_property("observable_list_prop"), fake.last)
+
+ # await complete_pending_tasks_in_current_loop_async()
+
+
+
+def load_tests(loader, tests, pattern):
+ suite = unittest.TestSuite()
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestZMQObjectProxyClient))
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestZMQObjectProxyClientAsync))
+ return suite
+
+if __name__ == '__main__':
+ runner = TestRunner()
+ runner.run(load_tests(unittest.TestLoader(), None, None))
+
+
+
+
+
diff --git a/tests/test_13_protocols_http.py b/tests/test_13_protocols_http.py
new file mode 100644
index 00000000..350cb2a7
--- /dev/null
+++ b/tests/test_13_protocols_http.py
@@ -0,0 +1,238 @@
+from types import SimpleNamespace
+import unittest, time, logging, requests
+from hololinked.constants import ZMQ_TRANSPORTS
+from hololinked.core.meta import ThingMeta
+from hololinked.server.http import HTTPServer
+from hololinked.core.zmq.rpc_server import RPCServer # sets loop policy, TODO: move somewhere else
+from hololinked.utils import get_current_async_loop, issubklass, print_pending_tasks_in_current_loop
+
+try:
+ from .things import OceanOpticsSpectrometer, TestThing
+ from .utils import TestCase, TestRunner
+except ImportError:
+ from things import OceanOpticsSpectrometer, TestThing
+ from utils import TestCase, TestRunner
+
+
+
+class TestHTTPServer(TestCase):
+
+
+ def test_1_init_run_and_stop(self):
+ """Test basic init, run and stop of the HTTP server."""
+
+ # init, run and stop synchronously
+ server = HTTPServer(log_level=logging.ERROR+10)
+ self.assertTrue(server.all_ok)
+ server.listen(forked=True)
+ time.sleep(3)
+ server.stop()
+ time.sleep(2)
+
+ # init, run and stop asynchronously
+ server.listen(forked=True)
+ time.sleep(3)
+ get_current_async_loop().run_until_complete(server.async_stop())
+ time.sleep(2)
+
+ server.listen(forked=True)
+ time.sleep(3)
+ response = requests.post('http://localhost:8080/stop')
+ self.assertIn(response.status_code, [200, 201, 202, 204])
+ time.sleep(2)
+
+ def notest_2_add_interaction_affordance(self):
+ """Test adding an interaction affordance to the HTTP server."""
+
+ # init, run and stop synchronously
+ server = HTTPServer(log_level=logging.ERROR+10)
+ self.assertTrue(server.all_ok)
+
+ # add an interaction affordance
+ server.add_property('/max-intensity', OceanOpticsSpectrometer.max_intensity)
+ server.add_action('/connect', OceanOpticsSpectrometer.connect)
+ server.add_event('/intensity/event', OceanOpticsSpectrometer.intensity_measurement_event)
+
+ self.assertIn('/max-intensity', server.router)
+ self.assertIn('/connect', server.router)
+ self.assertIn('/intensity/event', server.router)
+
+ # replacing interation affordances on an existing URL path causes a warning
+ self.assertWarns(
+ UserWarning,
+ server.add_property,
+ '/max-intensity', OceanOpticsSpectrometer.last_intensity
+ )
+ self.assertWarns(
+ UserWarning,
+ server.add_action,
+ '/connect', OceanOpticsSpectrometer.disconnect
+ )
+ self.assertWarns(
+ UserWarning,
+ server.add_event,
+ '/intensity/event', OceanOpticsSpectrometer.intensity_measurement_event
+ )
+
+
+ def notest_3_add_thing(self):
+ """Test adding a thing to the HTTP server."""
+
+ # init, run and stop synchronously
+ # self.assertTrue(server.all_ok)
+ # server.listen(forked=True)
+
+ # add a thing, both class and instance
+ for thing in [
+ OceanOpticsSpectrometer(id='test-spectrometer', log_level=logging.ERROR+10),
+ # TestThing(id='test-thing', log_level=logging.ERROR+10)
+ ]:
+ server = HTTPServer(log_level=logging.ERROR+10)
+ old_number_of_rules = len(server.app.wildcard_router.rules) + len(server.router._pending_rules)
+ server.add_thing(thing)
+ self.assertTrue(
+ len(server.app.wildcard_router.rules) + len(server.router._pending_rules) - old_number_of_rules >=
+ len(thing.properties.remote_objects) + len(thing.actions) + len(thing.events)
+ )
+ # server.router.print_rules()
+
+ for thing_meta in [OceanOpticsSpectrometer, TestThing]:
+ self.assertWarns(
+ UserWarning,
+ server.add_thing,
+ thing_meta
+ )
+
+ # dont overwrite already given routes
+ for thing in [
+ OceanOpticsSpectrometer(id='test-spectrometer', log_level=logging.ERROR+10),
+ # TestThing(id='test-thing', log_level=logging.ERROR+10)
+ ]:
+ server = HTTPServer(log_level=logging.ERROR+10)
+ old_number_of_rules = len(server.app.wildcard_router.rules) + len(server.router._pending_rules)
+ server.add_property('/max-intensity/custom', OceanOpticsSpectrometer.max_intensity)
+ server.add_action('/connect/custom', OceanOpticsSpectrometer.connect)
+ server.add_event('/intensity/event/custom', OceanOpticsSpectrometer.intensity_measurement_event)
+ server.add_thing(thing)
+ self.assertIn('/max-intensity/custom', server.router)
+ self.assertIn('/connect/custom', server.router)
+ self.assertIn('/intensity/event/custom', server.router)
+ self.assertTrue(
+ len(server.app.wildcard_router.rules) + len(server.router._pending_rules) - old_number_of_rules >=
+ len(thing.properties.remote_objects) + len(thing.actions) + len(thing.events)
+ )
+
+
+ def notest_4_add_thing_over_zmq_server(self):
+ """extension of previous two tests to complete adding a thing running over a zmq server"""
+ server = HTTPServer(log_level=logging.ERROR+10)
+ old_number_of_rules = len(server.app.wildcard_router.rules) + len(server.router._pending_rules)
+
+ thing = OceanOpticsSpectrometer(id='test-spectrometer', log_level=logging.ERROR+10)
+ thing.run_with_zmq_server(ZMQ_TRANSPORTS.INPROC, forked=True)
+
+ while thing.rpc_server is None:
+ time.sleep(0.01)
+ server.zmq_client_pool.context = thing.rpc_server.context
+ server.add_property('/max-intensity/custom', OceanOpticsSpectrometer.max_intensity)
+ server.add_action('/connect/custom', OceanOpticsSpectrometer.connect)
+ server.add_event('/intensity/event/custom', OceanOpticsSpectrometer.intensity_measurement_event)
+ server.register_id_for_thing(OceanOpticsSpectrometer, 'test-spectrometer')
+ server.add_thing({"INPROC": thing.id})
+
+ self.assertTrue(
+ len(server.app.wildcard_router.rules) + len(server.router._pending_rules) - old_number_of_rules >=
+ len(thing.properties.remote_objects) + len(thing.actions) + len(thing.events)
+ )
+
+ fake_request = SimpleNamespace(path='/test-spectrometer/max-intensity/custom')
+ self.assertTrue(any([rule.matcher.match(fake_request) is not None for rule in server.app.wildcard_router.rules]))
+ fake_request = SimpleNamespace(path='/non-existing-path-that-i-know-will-not-match')
+ self.assertFalse(any([rule.matcher.match(fake_request) is not None for rule in server.app.wildcard_router.rules]))
+ fake_request = SimpleNamespace(path='/test-spectrometer/connect/custom')
+ self.assertTrue(any([rule.matcher.match(fake_request) is not None for rule in server.app.wildcard_router.rules]))
+ fake_request = SimpleNamespace(path='/test-spectrometer/intensity/event/custom')
+ self.assertTrue(any([rule.matcher.match(fake_request) is not None for rule in server.app.wildcard_router.rules]))
+
+ # server.router.print_rules()
+ thing.rpc_server.stop()
+
+
+ def test_5_http_handler_functionalities(self):
+ for thing, port in zip([
+ OceanOpticsSpectrometer(
+ id='test-spectrometer',
+ serial_number='simulation',
+ log_level=logging.ERROR+10,
+ )
+ # TestThing(id='test-thing', log_level=logging.ERROR+10)
+ ], [
+ 8085
+ ]):
+ thing.run_with_http_server(forked=True, port=port)
+ time.sleep(3) # TODO: add a way to check if the server is running
+
+ session = requests.Session()
+ for (method, path, body) in [
+ ('get', '/test-spectrometer/max-intensity', 16384),
+ ('get', '/test-spectrometer/serial-number', 'simulation'),
+ ('get', '/test-spectrometer/integration-time', 1000),
+ ('post', '/test-spectrometer/disconnect', None),
+ ('post', '/test-spectrometer/connect', None)
+ ]:
+ if method == 'get':
+ response = session.get(f'http://localhost:{port}{path}')
+ elif method == 'post':
+ response = session.post(f'http://localhost:{port}{path}')
+ self.assertTrue(response.status_code in [200, 201, 202, 204])
+ if body:
+ self.assertTrue(response.json() == body)
+
+
+ for (method, path, body) in [
+ ('post', '/test-spectrometer/exit', None),
+ ('post', '/stop', None)
+ ]:
+ if method == 'get':
+ response = session.get(f'http://localhost:{port}{path}')
+ elif method == 'post':
+ response = session.post(f'http://localhost:{port}{path}')
+
+
+ def notest_5_run_thing_with_http_server(self):
+ """Test running a thing with an HTTP server."""
+
+ # add a thing, both class and instance
+ for thing, port in zip([
+ OceanOpticsSpectrometer(id='test-spectrometer-stop', log_level=logging.ERROR+10),
+ # TestThing(id='test-thing', log_level=logging.ERROR+10)
+ ], [
+ 8090
+ ]):
+ thing.run_with_http_server(forked=True, port=port)
+ time.sleep(3)
+ response = requests.post(f'http://localhost:{port}/stop')
+ self.assertTrue(response.status_code in [200, 201, 202, 204])
+
+ for thing, port in zip([
+ OceanOpticsSpectrometer(id='test-spectrometer', log_level=logging.ERROR+10),
+ # TestThing(id='test-thing', log_level=logging.ERROR+10)
+ ], [
+ 8091
+ ]):
+ thing.run_with_http_server(forked=True, port=port)
+ time.sleep(3)
+ response = requests.post(f'http://localhost:{port}/stop')
+ self.assertTrue(response.status_code in [200, 201, 202, 204])
+
+
+
+
+def load_tests(loader, tests, pattern):
+ suite = unittest.TestSuite()
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestHTTPServer))
+ return suite
+
+if __name__ == '__main__':
+ runner = TestRunner()
+ runner.run(load_tests(unittest.TestLoader(), None, None))
\ No newline at end of file
diff --git a/tests/test_action.py b/tests/test_action.py
deleted file mode 100644
index 0eb300c8..00000000
--- a/tests/test_action.py
+++ /dev/null
@@ -1,262 +0,0 @@
-import asyncio
-import typing
-import unittest
-import logging
-import multiprocessing
-from hololinked.server.dataklasses import ActionInfoValidator
-from hololinked.server.thing import Thing, action
-from hololinked.server.utils import isclassmethod
-from hololinked.param import ParameterizedFunction
-from hololinked.client import ObjectProxy
-from hololinked.server.properties import Number, String, ClassSelector
-try:
- from .utils import TestCase, TestRunner
- from .things import TestThing
-except ImportError:
- from utils import TestCase, TestRunner
- from things import start_thing_forked
-
-
-
-class TestThing(Thing):
-
- def action_echo(self, value):
- return value
-
- @classmethod
- def action_echo_with_classmethod(self, value):
- return value
-
- async def action_echo_async(self, value):
- await asyncio.sleep(0.1)
- return value
-
- @classmethod
- async def action_echo_async_with_classmethod(self, value):
- await asyncio.sleep(0.1)
- return value
-
- class typed_action(ParameterizedFunction):
-
- arg1 = Number(bounds=(0, 10), step=0.5, default=5, crop_to_bounds=True,
- doc='arg1 description')
- arg2 = String(default='hello', doc='arg2 description', regex='[a-z]+')
- arg3 = ClassSelector(class_=(int, float, str),
- default=5, doc='arg3 description')
-
- def __call__(self, instance, arg1, arg2, arg3):
- return instance.instance_name, arg1, arg2, arg3
-
-
- class typed_action_without_call(ParameterizedFunction):
-
- arg1 = Number(bounds=(0, 10), step=0.5, default=5, crop_to_bounds=True,
- doc='arg1 description')
- arg2 = String(default='hello', doc='arg2 description', regex='[a-z]+')
- arg3 = ClassSelector(class_=(int, float, str),
- default=5, doc='arg3 description')
-
-
- class typed_action_async(ParameterizedFunction):
-
- arg1 = Number(bounds=(0, 10), step=0.5, default=5, crop_to_bounds=True,
- doc='arg1 description')
- arg2 = String(default='hello', doc='arg2 description', regex='[a-z]+')
- arg3 = ClassSelector(class_=(int, float, str),
- default=5, doc='arg3 description')
-
- async def __call__(self, instance, arg1, arg2, arg3):
- await asyncio.sleep(0.1)
- return instance.instance_name, arg1, arg2, arg3
-
-
- def __internal__(self, value):
- return value
-
- def incorrectly_decorated_method(self, value):
- return value
-
- def not_an_action(self, value):
- return value
-
- async def not_an_async_action(self, value):
- await asyncio.sleep(0.1)
- return value
-
-
-
-class TestAction(TestCase):
-
- @classmethod
- def setUpClass(self):
- print("test action")
- self.thing_cls = TestThing
-
- @classmethod
- def tearDownClass(self) -> None:
- print("tear down test action")
-
- def test_1_allowed_actions(self):
- # instance method can be decorated with action
- self.assertEqual(self.thing_cls.action_echo, action()(self.thing_cls.action_echo))
- # classmethod can be decorated with action
- self.assertEqual(self.thing_cls.action_echo_with_classmethod,
- action()(self.thing_cls.action_echo_with_classmethod))
- self.assertTrue(isclassmethod(self.thing_cls.action_echo_with_classmethod))
- # async methods can be decorated with action
- self.assertEqual(self.thing_cls.action_echo_async,
- action()(self.thing_cls.action_echo_async))
- # async classmethods can be decorated with action
- self.assertEqual(self.thing_cls.action_echo_async_with_classmethod,
- action()(self.thing_cls.action_echo_async_with_classmethod))
- self.assertTrue(isclassmethod(self.thing_cls.action_echo_async_with_classmethod))
- # parameterized function can be decorated with action
- self.assertEqual(self.thing_cls.typed_action, action(safe=True)(self.thing_cls.typed_action))
- self.assertEqual(self.thing_cls.typed_action_without_call, action(idempotent=True)(self.thing_cls.typed_action_without_call))
- self.assertEqual(self.thing_cls.typed_action_async, action(synchronous=True)(self.thing_cls.typed_action_async))
-
-
- def test_2_remote_info(self):
- # basic check if the remote_info is correct, although this test is not necessary, not recommended and
- # neither particularly useful
- remote_info = self.thing_cls.action_echo._remote_info
- self.assertIsInstance(remote_info, ActionInfoValidator)
- assert isinstance(remote_info, ActionInfoValidator) # type definition
- self.assertTrue(remote_info.isaction)
- self.assertFalse(remote_info.isproperty)
- self.assertFalse(remote_info.isparameterized)
- self.assertFalse(remote_info.iscoroutine)
- self.assertFalse(remote_info.safe)
- self.assertFalse(remote_info.idempotent)
- self.assertFalse(remote_info.synchronous)
-
- remote_info = self.thing_cls.action_echo_async._remote_info
- self.assertIsInstance(remote_info, ActionInfoValidator)
- assert isinstance(remote_info, ActionInfoValidator) # type definition
- self.assertTrue(remote_info.isaction)
- self.assertTrue(remote_info.iscoroutine)
- self.assertFalse(remote_info.isproperty)
- self.assertFalse(remote_info.isparameterized)
- self.assertFalse(remote_info.safe)
- self.assertFalse(remote_info.idempotent)
- self.assertFalse(remote_info.synchronous)
-
- remote_info = self.thing_cls.action_echo_with_classmethod._remote_info
- self.assertIsInstance(remote_info, ActionInfoValidator)
- assert isinstance(remote_info, ActionInfoValidator) # type definition
- self.assertTrue(remote_info.isaction)
- self.assertFalse(remote_info.iscoroutine)
- self.assertFalse(remote_info.isproperty)
- self.assertFalse(remote_info.isparameterized)
- self.assertFalse(remote_info.safe)
- self.assertFalse(remote_info.idempotent)
- self.assertFalse(remote_info.synchronous)
-
- remote_info = self.thing_cls.typed_action._remote_info
- self.assertIsInstance(remote_info, ActionInfoValidator)
- assert isinstance(remote_info, ActionInfoValidator)
- self.assertTrue(remote_info.isaction)
- self.assertFalse(remote_info.iscoroutine)
- self.assertFalse(remote_info.isproperty)
- self.assertTrue(remote_info.isparameterized)
- self.assertTrue(remote_info.safe)
- self.assertFalse(remote_info.idempotent)
- self.assertFalse(remote_info.synchronous)
-
- remote_info = self.thing_cls.typed_action_without_call._remote_info
- self.assertIsInstance(remote_info, ActionInfoValidator)
- assert isinstance(remote_info, ActionInfoValidator)
- self.assertTrue(remote_info.isaction)
- self.assertFalse(remote_info.iscoroutine)
- self.assertFalse(remote_info.isproperty)
- self.assertTrue(remote_info.isparameterized)
- self.assertFalse(remote_info.safe)
- self.assertTrue(remote_info.idempotent)
- self.assertFalse(remote_info.synchronous)
-
- remote_info = self.thing_cls.typed_action_async._remote_info
- self.assertIsInstance(remote_info, ActionInfoValidator)
- assert isinstance(remote_info, ActionInfoValidator)
- self.assertTrue(remote_info.isaction)
- self.assertTrue(remote_info.iscoroutine)
- self.assertFalse(remote_info.isproperty)
- self.assertTrue(remote_info.isparameterized)
- self.assertFalse(remote_info.safe)
- self.assertFalse(remote_info.idempotent)
- self.assertTrue(remote_info.synchronous)
-
-
- def test_3_api_and_invalid_actions(self):
- # done allow action decorator to be terminated without '()' on a method
- with self.assertRaises(TypeError) as ex:
- action(self.thing_cls.incorrectly_decorated_method)
- self.assertTrue(str(ex.exception).startswith("URL_path should be a string, not a function/method, did you decorate"))
-
- # dunder methods cannot be decorated with action
- with self.assertRaises(ValueError) as ex:
- action()(self.thing_cls.__internal__)
- self.assertTrue(str(ex.exception).startswith("dunder objects cannot become remote"))
-
- # only functions and methods can be decorated with action
- for obj in [self.thing_cls, str, 1, 1.0, 'Str', True, None, object(), type, property]:
- with self.assertRaises(TypeError) as ex:
- action()(obj) # not an action
- self.assertTrue(str(ex.exception).startswith("target for action or is not a function/method."))
-
- with self.assertRaises(ValueError) as ex:
- action(safe=True, some_kw=1)
- self.assertTrue(str(ex.exception).startswith("Only 'safe', 'idempotent', 'synchronous' are allowed"))
-
-
- def test_4_exposed_actions(self):
- self.assertTrue(hasattr(self.thing_cls.action_echo, '_remote_info'))
- done_queue = multiprocessing.Queue()
- start_thing_forked(self.thing_cls, instance_name='test-action', done_queue=done_queue,
- log_level=logging.ERROR+10, prerun_callback=expose_actions)
-
- thing_client = ObjectProxy('test-action', log_level=logging.ERROR) # type: TestThing
-
- self.assertTrue(thing_client.action_echo(1) == 1)
- self.assertTrue(thing_client.action_echo_async("string") == "string")
- self.assertTrue(thing_client.typed_action(arg1=1, arg2='hello', arg3=5) == ['test-action', 1, 'hello', 5])
- self.assertTrue(thing_client.typed_action_async(arg1=2.5, arg2='hello', arg3='foo') == ['test-action', 2.5, 'hello', 'foo'])
-
- with self.assertRaises(NotImplementedError) as ex:
- thing_client.typed_action_without_call(arg1=1, arg2='hello', arg3=5),
- self.assertTrue(str(ex.exception).startswith("Subclasses must implement __call__"))
-
- with self.assertRaises(AttributeError) as ex:
- thing_client.__internal__(1)
- self.assertTrue(str(ex.exception).startswith("'ObjectProxy' object has no attribute '__internal__'"))
-
- with self.assertRaises(AttributeError) as ex:
- thing_client.not_an_action("foo")
- self.assertTrue(str(ex.exception).startswith("'ObjectProxy' object has no attribute 'not_an_action'"))
-
- with self.assertRaises(AttributeError) as ex:
- thing_client.not_an_async_action(1)
- self.assertTrue(str(ex.exception).startswith("'ObjectProxy' object has no attribute 'not_an_async_action'"))
-
- thing_client.exit()
-
- self.assertTrue(done_queue.get() == 'test-action')
-
-
-
-def expose_actions(thing_cls):
- action()(thing_cls.action_echo)
- # classmethod can be decorated with action
- action()(thing_cls.action_echo_with_classmethod)
- # async methods can be decorated with action
- action()(thing_cls.action_echo_async)
- # async classmethods can be decorated with action
- action()(thing_cls.action_echo_async_with_classmethod)
- # parameterized function can be decorated with action
- action(safe=True)(thing_cls.typed_action)
- action(idempotent=True)(thing_cls.typed_action_without_call)
- action(synchronous=True)(thing_cls.typed_action_async)
-
-
-
-if __name__ == '__main__':
- unittest.main(testRunner=TestRunner())
\ No newline at end of file
diff --git a/tests/test_events.py b/tests/test_events.py
deleted file mode 100644
index 59956d1e..00000000
--- a/tests/test_events.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import logging, threading, time
-import unittest
-from hololinked.client import ObjectProxy
-from hololinked.server import Thing, action, Event
-from hololinked.server.properties import Number
-try:
- from .utils import TestCase, TestRunner
- from .things import TestThing
-except ImportError:
- from utils import TestCase, TestRunner
- from things import start_thing_forked
-
-
-
-class TestThing(Thing):
-
- total_number_of_events = Number(default=1, bounds=(1, None),
- doc="Total number of events pushed")
-
- test_event = Event(friendly_name="test-event", doc="A test event",
- URL_path='/test-event')
-
- @action()
- def push_events(self):
- threading.Thread(target=self._push_worker).start()
-
- def _push_worker(self):
- for i in range(100):
- self.test_event.push('test data')
- time.sleep(0.01) # 10ms
-
-
-
-class TestEvent(TestCase):
-
- @classmethod
- def setUpClass(self):
- print("test event")
- self.thing_cls = TestThing
- start_thing_forked(self.thing_cls, instance_name='test-event',
- log_level=logging.WARN)
- self.thing_client = ObjectProxy('test-event') # type: TestThing
-
- @classmethod
- def tearDownClass(self):
- print("tear down test event")
- self.thing_client.exit()
-
-
- def test_1_event(self):
- attempts = 100
- self.thing_client.total_number_of_events = attempts
-
- results = []
- def cb(value):
- results.append(value)
-
- self.thing_client.test_event.subscribe(cb)
- time.sleep(3)
- # Calm down for event publisher to connect fully as there is no handshake for events
- self.thing_client.push_events()
-
- for i in range(attempts):
- if len(results) == attempts:
- break
- time.sleep(0.1)
-
- self.assertEqual(len(results), attempts)
- self.assertEqual(results, ['test data']*attempts)
- self.thing_client.test_event.unsubscribe(cb)
-
-
-
-if __name__ == '__main__':
- unittest.main(testRunner=TestRunner())
\ No newline at end of file
diff --git a/tests/test_thing_init.py b/tests/test_thing_init.py
deleted file mode 100644
index 8f5f83a4..00000000
--- a/tests/test_thing_init.py
+++ /dev/null
@@ -1,204 +0,0 @@
-import unittest
-import logging
-import warnings
-
-from hololinked.server import Thing
-from hololinked.server.schema_validators import JsonSchemaValidator, BaseSchemaValidator
-from hololinked.server.serializers import JSONSerializer, PickleSerializer, MsgpackSerializer
-from hololinked.server.utils import get_default_logger
-from hololinked.server.logger import RemoteAccessHandler
-from hololinked.client import ObjectProxy
-try:
- from .things import OceanOpticsSpectrometer, start_thing_forked
- from .utils import TestCase
-except ImportError:
- from things import OceanOpticsSpectrometer, start_thing_forked
- from utils import TestCase
-
-
-class TestThing(TestCase):
- """Test Thing class from hololinked.server.thing module."""
-
- @classmethod
- def setUpClass(self):
- print("test Thing init")
- self.thing_cls = Thing
-
- @classmethod
- def tearDownClass(self) -> None:
- print("tear down test Thing init")
-
- def test_1_instance_name(self):
- # instance name must be a string and cannot be changed after set
- thing = self.thing_cls(instance_name="test_instance_name", log_level=logging.WARN)
- self.assertEqual(thing.instance_name, "test_instance_name")
- with self.assertRaises(ValueError):
- thing.instance_name = "new_instance"
- with self.assertRaises(NotImplementedError):
- del thing.instance_name
-
-
- def test_2_logger(self):
- # logger must have remote access handler if logger_remote_access is True
- logger = get_default_logger("test_logger", log_level=logging.WARN)
- thing = self.thing_cls(instance_name="test_logger_remote_access", logger=logger, logger_remote_access=True)
- self.assertEqual(thing.logger, logger)
- self.assertTrue(any(isinstance(handler, RemoteAccessHandler) for handler in thing.logger.handlers))
-
- # Therefore also check the false condition
- logger = get_default_logger("test_logger_2", log_level=logging.WARN)
- thing = self.thing_cls(instance_name="test_logger_without_remote_access", logger=logger, logger_remote_access=False)
- self.assertFalse(any(isinstance(handler, RemoteAccessHandler) for handler in thing.logger.handlers))
- # NOTE - logger is modifiable after instantiation
- # What if user gives his own remote access handler?
-
-
- def test_3_JSON_serializer(self):
- # req 1 - if serializer is not provided, default is JSONSerializer and http and zmq serializers are same
- thing = self.thing_cls(instance_name="test_serializer_when_not_provided", log_level=logging.WARN)
- self.assertIsInstance(thing.zmq_serializer, JSONSerializer)
- self.assertEqual(thing.http_serializer, thing.zmq_serializer)
-
- # req 2 - similarly, serializer keyword argument creates same serialitzer for both zmq and http transports
- serializer = JSONSerializer()
- thing = self.thing_cls(instance_name="test_common_serializer", serializer=serializer, log_level=logging.WARN)
- self.assertEqual(thing.zmq_serializer, serializer)
- self.assertEqual(thing.http_serializer, serializer)
-
- # req 3 - serializer keyword argument must be JSONSerializer only, because this keyword should
- # what is common to both zmq and http
- with self.assertRaises(TypeError) as ex:
- serializer = PickleSerializer()
- thing = self.thing_cls(instance_name="test_common_serializer_nonJSON", serializer=serializer, log_level=logging.WARN)
- self.assertTrue(str(ex), "serializer key word argument must be JSONSerializer")
-
- # req 4 - zmq_serializer and http_serializer is differently instantiated if zmq_serializer and http_serializer
- # keyword arguments are provided, albeit the same serializer type
- serializer = JSONSerializer()
- thing = self.thing_cls(instance_name="test_common_serializer", zmq_serializer=serializer, log_level=logging.WARN)
- self.assertEqual(thing.zmq_serializer, serializer)
- self.assertNotEqual(thing.http_serializer, serializer) # OR, same as line below
- self.assertNotEqual(thing.http_serializer, thing.zmq_serializer)
- self.assertIsInstance(thing.http_serializer, JSONSerializer)
-
-
- def test_4_other_serializers(self):
- # req 1 - http_serializer cannot be anything except than JSON
- with self.assertRaises(ValueError) as ex:
- # currenty this has written this as ValueError although TypeError is more appropriate
- serializer = PickleSerializer()
- thing = self.thing_cls(instance_name="test_http_serializer_nonJSON", http_serializer=serializer,
- log_level=logging.WARN)
- self.assertTrue(str(ex), "invalid JSON serializer option")
- # test the same with MsgpackSerializer
- with self.assertRaises(ValueError) as ex:
- # currenty this has written this as ValueError although TypeError is more appropriate
- serializer = MsgpackSerializer()
- thing = self.thing_cls(instance_name="test_http_serializer_nonJSON", http_serializer=serializer,
- log_level=logging.WARN)
- self.assertTrue(str(ex), "invalid JSON serializer option")
-
- # req 2 - http_serializer and zmq_serializer can be different
- warnings.filterwarnings("ignore", category=UserWarning)
- http_serializer = JSONSerializer()
- zmq_serializer = PickleSerializer()
- thing = self.thing_cls(instance_name="test_different_serializers_1", http_serializer=http_serializer,
- zmq_serializer=zmq_serializer, log_level=logging.WARN)
- self.assertNotEqual(thing.http_serializer, thing.zmq_serializer)
- self.assertEqual(thing.http_serializer, http_serializer)
- self.assertEqual(thing.zmq_serializer, zmq_serializer)
- warnings.resetwarnings()
-
- # try the same with MsgpackSerializer
- http_serializer = JSONSerializer()
- zmq_serializer = MsgpackSerializer()
- thing = self.thing_cls(instance_name="test_different_serializers_2", http_serializer=http_serializer,
- zmq_serializer=zmq_serializer, log_level=logging.WARN)
- self.assertNotEqual(thing.http_serializer, thing.zmq_serializer)
- self.assertEqual(thing.http_serializer, http_serializer)
- self.assertEqual(thing.zmq_serializer, zmq_serializer)
-
- # req 3 - pickle serializer should raise warning
- http_serializer = JSONSerializer()
- zmq_serializer = PickleSerializer()
- with self.assertWarns(expected_warning=UserWarning):
- thing = self.thing_cls(instance_name="test_pickle_serializer_warning", http_serializer=http_serializer,
- zmq_serializer=zmq_serializer, log_level=logging.WARN)
-
-
- def test_5_schema_validator(self):
- # schema_validator must be a class or subclass of BaseValidator
- validator = JsonSchemaValidator(schema=True)
- with self.assertRaises(ValueError):
- thing = self.thing_cls(instance_name="test_schema_validator_with_instance", schema_validator=validator)
-
- validator = JsonSchemaValidator
- thing = self.thing_cls(instance_name="test_schema_validator_with_subclass", schema_validator=validator,
- log_level=logging.WARN)
- self.assertEqual(thing.schema_validator, validator)
-
- validator = BaseSchemaValidator
- thing = self.thing_cls(instance_name="test_schema_validator_with_subclass", schema_validator=validator,
- log_level=logging.WARN)
- self.assertEqual(thing.schema_validator, validator)
-
-
- def test_6_state(self):
- # state property must be None when no state machine is present
- thing = self.thing_cls(instance_name="test_no_state_machine", log_level=logging.WARN)
- self.assertIsNone(thing.state)
- self.assertFalse(hasattr(thing, 'state_machine'))
- # detailed tests should be in another file
-
-
- def test_7_servers_init(self):
- # rpc_server, message_broker and event_publisher must be None when not run()
- thing = self.thing_cls(instance_name="test_servers_init", log_level=logging.WARN)
- self.assertIsNone(thing.rpc_server)
- self.assertIsNone(thing.message_broker)
- self.assertIsNone(thing.event_publisher)
-
-
- def test_8_resource_generation(self):
- # basic test only to make sure nothing is fundamentally wrong
- thing = self.thing_cls(instance_name="test_servers_init", log_level=logging.WARN)
- # thing._prepare_resources()
- self.assertIsInstance(thing.get_thing_description(), dict)
- self.assertIsInstance(thing.httpserver_resources, dict)
- self.assertIsInstance(thing.zmq_resources, dict)
-
- start_thing_forked(self.thing_cls, instance_name='test-gui-resource-generation', log_level=logging.WARN)
- thing_client = ObjectProxy('test-gui-resource-generation')
- self.assertIsInstance(thing_client.gui_resources, dict)
- thing_client.exit()
-
-
-
-class TestOceanOpticsSpectrometer(TestThing):
-
- @classmethod
- def setUpClass(self):
- print("test OceanOpticsSpectrometer init")
- self.thing_cls = OceanOpticsSpectrometer
-
- @classmethod
- def tearDownClass(self) -> None:
- print("tear down test OceanOpticsSpectrometer init")
-
- def test_6_state(self):
- # req 1 - state property must be None when no state machine is present
- thing = self.thing_cls(instance_name="test_state_machine", log_level=logging.WARN)
- self.assertIsNotNone(thing.state)
- self.assertTrue(hasattr(thing, 'state_machine'))
- # detailed tests should be in another file
-
-
-
-if __name__ == '__main__':
- try:
- from utils import TestRunner
- except ImportError:
- from .utils import TestRunner
-
- unittest.main(testRunner=TestRunner())
-
diff --git a/tests/test_thing_run.py b/tests/test_thing_run.py
deleted file mode 100644
index d778350a..00000000
--- a/tests/test_thing_run.py
+++ /dev/null
@@ -1,98 +0,0 @@
-import threading
-import typing
-import unittest
-import multiprocessing
-import logging
-import zmq.asyncio
-
-from hololinked.server import Thing
-from hololinked.client import ObjectProxy
-from hololinked.server.eventloop import EventLoop
-try:
- from .things import TestThing, OceanOpticsSpectrometer
- from .utils import TestCase
-except ImportError:
- from things import TestThing, OceanOpticsSpectrometer
- from utils import TestCase
-
-
-class TestThingRun(TestCase):
-
- @classmethod
- def setUpClass(self):
- print("test Thing run")
- self.thing_cls = Thing
-
- @classmethod
- def tearDownClass(self):
- # Code to clean up any resources or configurations after each test case
- print("tear down test Thing run")
-
- def test_thing_run_and_exit(self):
- # should be able to start and end with exactly the specified protocols
- done_queue = multiprocessing.Queue()
- multiprocessing.Process(target=start_thing, args=('test-run', ), kwargs=dict(done_queue=done_queue),
- daemon=True).start()
- thing_client = ObjectProxy('test-run', log_level=logging.WARN) # type: Thing
- self.assertEqual(thing_client.get_protocols(), ['IPC'])
- thing_client.exit()
- self.assertEqual(done_queue.get(), 'test-run')
-
- done_queue = multiprocessing.Queue()
- multiprocessing.Process(target=start_thing, args=('test-run-2', ['IPC', 'INPROC'],),
- kwargs=dict(done_queue=done_queue), daemon=True).start()
- thing_client = ObjectProxy('test-run-2', log_level=logging.WARN) # type: Thing
- self.assertEqual(thing_client.get_protocols(), ['INPROC', 'IPC']) # order should reflect get_protocols() action
- thing_client.exit()
- self.assertEqual(done_queue.get(), 'test-run-2')
-
- done_queue = multiprocessing.Queue()
- multiprocessing.Process(target=start_thing, args=('test-run-3', ['IPC', 'INPROC', 'TCP'], 'tcp://*:59000'),
- kwargs=dict(done_queue=done_queue), daemon=True).start()
- thing_client = ObjectProxy('test-run-3', log_level=logging.WARN) # type: Thing
- self.assertEqual(thing_client.get_protocols(), ['INPROC', 'IPC', 'TCP'])
- thing_client.exit()
- self.assertEqual(done_queue.get(), 'test-run-3')
-
-
- # def test_thing_run_and_exit_with_httpserver(self):
- # EventLoop.get_async_loop() # creates the event loop if absent
- # context = zmq.asyncio.Context()
- # T = threading.Thread(target=start_thing_with_http_server, args=('test-run-4', context), daemon=True)
- # T.start()
- # # difficult case, currently not supported - https://github.com/zeromq/pyzmq/issues/1354
- # thing_client = ObjectProxy('test-run-4', log_level=logging.WARN, context=context) # type: Thing
- # self.assertEqual(thing_client.get_protocols(), ['INPROC'])
- # thing_client.exit()
- # T.join()
-
-
-class TestOceanOpticsSpectrometer(TestThing):
-
- @classmethod
- def setUpClass(self):
- self.thing_cls = OceanOpticsSpectrometer
-
-
-
-
-def start_thing(instance_name : str, protocols : typing.List[str] = ['IPC'], tcp_socket_address : str = None,
- done_queue : typing.Optional[multiprocessing.Queue] = None) -> None:
- thing = TestThing(instance_name=instance_name) #, log_level=logging.WARN)
- thing.run(zmq_protocols=protocols, tcp_socket_address=tcp_socket_address)
- if done_queue is not None:
- done_queue.put(instance_name)
-
-
-def start_thing_with_http_server(instance_name : str, context : zmq.asyncio.Context) -> None:
- EventLoop.get_async_loop() # creates the event loop if absent
- thing = TestThing(instance_name=instance_name)# , log_level=logging.WARN)
- thing.run_with_http_server(context=context)
-
-
-if __name__ == '__main__':
- try:
- from utils import TestRunner
- except ImportError:
- from .utils import TestRunner
- unittest.main(testRunner=TestRunner())
diff --git a/tests/things/__init__.py b/tests/things/__init__.py
index f6b62c1f..fa61194b 100644
--- a/tests/things/__init__.py
+++ b/tests/things/__init__.py
@@ -1,4 +1,4 @@
-from .test_thing import TestThing
+from .test_thing import TestThing, test_thing_TD
from .spectrometer import OceanOpticsSpectrometer
-from .starter import start_thing_forked
+from .starter import run_thing_with_zmq_server_forked
diff --git a/tests/things/spectrometer.py b/tests/things/spectrometer.py
index 22c9cc2e..681d4967 100644
--- a/tests/things/spectrometer.py
+++ b/tests/things/spectrometer.py
@@ -6,12 +6,13 @@
from dataclasses import dataclass
-from hololinked.server import Thing, Property, action, Event
-from hololinked.server.properties import (String, Integer, Number, List, Boolean,
+from hololinked.core import Thing, Property, action, Event
+from hololinked.core.properties import (String, Integer, Number, List, Boolean,
Selector, ClassSelector, TypedList)
-from hololinked.server import HTTP_METHODS, StateMachine
-from hololinked.server import JSONSerializer
-from hololinked.server.td import JSONSchema
+from hololinked.core.state_machine import StateMachine
+from hololinked.serializers import JSONSerializer
+from hololinked.schema_validators import JSONSchema
+from hololinked.server.http import HTTPServer
@dataclass
@@ -73,25 +74,26 @@ class OceanOpticsSpectrometer(Thing):
states = States
- status = String(URL_path='/status', readonly=True, fget=lambda self: self._status,
+ status = String(readonly=True, fget=lambda self: self._status,
doc="descriptive status of current operation") # type: str
- serial_number = String(default=None, allow_None=True, URL_path='/serial-number',
- doc="serial number of the spectrometer to connect/or connected")# type: str
+ serial_number = String(default=None, allow_None=True,
+ doc="serial number of the spectrometer to connect/or connected")# type: str
last_intensity = ClassSelector(default=None, allow_None=True, class_=Intensity,
- URL_path='/intensity', doc="last measurement intensity (in arbitrary units)") # type: Intensity
+ doc="last measurement intensity (in arbitrary units)") # type: Intensity
- intensity_measurement_event = Event(friendly_name='intensity-measurement-event', URL_path='/intensity/measurement-event',
+ intensity_measurement_event = Event(
doc="event generated on measurement of intensity, max 30 per second even if measurement is faster.",
schema=Intensity.schema)
reference_intensity = ClassSelector(default=None, allow_None=True, class_=Intensity,
- URL_path="/intensity/reference", doc="reference intensity to overlap in background") # type: Intensity
+ doc="reference intensity to overlap in background") # type: Intensity
- def __init__(self, instance_name : str, serial_number : typing.Optional[str] = None, **kwargs) -> None:
- super().__init__(instance_name=instance_name, serial_number=serial_number, **kwargs)
+ def __init__(self, id: str, serial_number: typing.Optional[str] = None, **kwargs) -> None:
+ super().__init__(id=id, serial_number=serial_number, **kwargs)
+ self.set_status("disconnected")
if serial_number is not None:
self.connect()
self._acquisition_thread = None
@@ -103,7 +105,7 @@ def set_status(self, *args) -> None:
else:
self._status = ' '.join(args)
- @action(URL_path='/connect', http_method=HTTP_METHODS.POST, input_schema=connect_args)
+ @action(input_schema=connect_args)
def connect(self, serial_number : str = None, trigger_mode : int = None, integration_time : float = None) -> None:
if serial_number is not None:
self.serial_number = serial_number
@@ -125,32 +127,33 @@ def connect(self, serial_number : str = None, trigger_mode : int = None, integra
self.logger.debug(f"opened device with serial number {self.serial_number} with model {self.model}")
self.set_status("ready to start acquisition")
- model = String(default=None, URL_path='/model', allow_None=True, readonly=True,
+ model = String(default=None, allow_None=True, readonly=True,
doc="model of the connected spectrometer",
fget=lambda self: self._model if self.state_machine.current_state != self.states.DISCONNECTED else None
) # type: str
- wavelengths = List(default=None, allow_None=True, item_type=(float, int), readonly=True,
- URL_path='/supported-wavelengths', doc="wavelength bins of measurement",
+ wavelengths = List(default=[], item_type=(float, int), readonly=True, allow_None=False,
+ # this is only for testing, be careful
+ doc="wavelength bins of measurement",
fget=lambda self: self._wavelengths if self.state_machine.current_state != self.states.DISCONNECTED else None,
) # type: typing.List[typing.Union[float, int]]
- pixel_count = Integer(default=None, allow_None=True, URL_path='/pixel-count', readonly=True,
+ pixel_count = Integer(default=None, allow_None=True, readonly=True,
doc="number of points in wavelength",
fget=lambda self: self._pixel_count if self.state_machine.current_state != self.states.DISCONNECTED else None
) # type: int
- max_intensity = Number(readonly=True, URL_path="/intensity/max-allowed",
+ max_intensity = Number(readonly=True,
doc="""the maximum intensity that can be returned by the spectrometer in (a.u.).
It's possible that the spectrometer saturates already at lower values.""",
fget=lambda self: self._max_intensity if self.state_machine.current_state != self.states.DISCONNECTED else None
) # type: float
- @action(URL_path='/disconnect', http_method=HTTP_METHODS.POST)
+ @action()
def disconnect(self):
self.state_machine.current_state = self.states.DISCONNECTED
- trigger_mode = Selector(objects=[0, 1, 2, 3, 4], default=0, URL_path='/trigger-mode', observable=True,
+ trigger_mode = Selector(objects=[0, 1, 2, 3, 4], default=0, observable=True,
doc="""0 = normal/free running, 1 = Software trigger, 2 = Ext. Trigger Level,
3 = Ext. Trigger Synchro/ Shutter mode, 4 = Ext. Trigger Edge""") # type: int
@@ -163,11 +166,11 @@ def get_trigger_mode(self):
try:
return self._trigger_mode
except:
- return self.properties["trigger_mode"].default
+ return OceanOpticsSpectrometer.properties["trigger_mode"].default
integration_time = Number(default=1000, bounds=(0.001, None), crop_to_bounds=True,
- URL_path='/integration-time', observable=True,
+ observable=True,
doc="integration time of measurement in milliseconds") # type: float
@integration_time.setter
@@ -179,25 +182,23 @@ def get_integration_time(self) -> float:
try:
return self._integration_time
except:
- return self.properties["integration_time"].default
+ return OceanOpticsSpectrometer.properties["integration_time"].default
background_correction = Selector(objects=['AUTO', 'CUSTOM', None], default=None, allow_None=True,
- URL_path='/background-correction',
doc="set True for Seabreeze internal black level correction") # type: typing.Optional[str]
- custom_background_intensity = TypedList(item_type=(float, int),
- URL_path='/background-correction/user-defined-intensity') # type: typing.List[typing.Union[float, int]]
+ custom_background_intensity = TypedList(item_type=(float, int)) # type: typing.List[typing.Union[float, int]]
- nonlinearity_correction = Boolean(default=False, URL_path='/nonlinearity-correction',
+ nonlinearity_correction = Boolean(default=False,
doc="automatic correction of non linearity in detector CCD") # type: bool
- @action(URL_path='/acquisition/start', http_method=HTTP_METHODS.POST)
+ @action()
def start_acquisition(self) -> None:
self.stop_acquisition() # Just a shield
self._acquisition_thread = threading.Thread(target=self.measure)
self._acquisition_thread.start()
- @action(URL_path='/acquisition/stop', http_method=HTTP_METHODS.POST)
+ @action()
def stop_acquisition(self) -> None:
if self._acquisition_thread is not None:
self.logger.debug(f"stopping acquisition thread with thread-ID {self._acquisition_thread.ident}")
@@ -260,14 +261,14 @@ def measure(self, max_count = None):
self.set_status(f'error during acquisition - {str(ex)}, {type(ex)}')
self.state_machine.current_state = self.states.FAULT
- @action(URL_path='/acquisition/single', http_method=HTTP_METHODS.POST)
+ @action()
def start_acquisition_single(self):
self.stop_acquisition() # Just a shield
self._acquisition_thread = threading.Thread(target=self.measure, args=(1,))
self._acquisition_thread.start()
self.logger.info("data event will be pushed once acquisition is complete.")
- @action(URL_path='/reset-fault', http_method=HTTP_METHODS.POST)
+ @action()
def reset_fault(self):
self.state_machine.set_state(self.states.ON)
@@ -286,4 +287,20 @@ def test_echo(self, value):
FAULT=[stop_acquisition, reset_fault]
)
- logger_remote_access = True
\ No newline at end of file
+ logger_remote_access = True
+
+def run_zmq_server():
+ thing = OceanOpticsSpectrometer(id='test_spectrometer')
+ thing.run_with_zmq_server()
+
+
+def run_http_server():
+ thing = OceanOpticsSpectrometer(id='test_spectrometer')
+ server = HTTPServer()
+ server.add_things(thing)
+ server.listen()
+
+
+if __name__ == '__main__':
+ run_zmq_server()
+ # run_http_server()
diff --git a/tests/things/starter.py b/tests/things/starter.py
index d29156a6..4b6887c8 100644
--- a/tests/things/starter.py
+++ b/tests/things/starter.py
@@ -1,104 +1,152 @@
+import asyncio
import typing, multiprocessing, threading, logging, queue
-from hololinked.server import HTTPServer, ThingMeta, Thing
+from hololinked.exceptions import BreakLoop
+from hololinked.core.zmq.brokers import AsyncZMQServer
+from hololinked.core.zmq.message import EXIT
+from hololinked.core import ThingMeta, Thing
+from hololinked.utils import get_current_async_loop
-def run_thing(
- thing_cls : ThingMeta,
- instance_name : str,
- protocols : typing.List[str] = ['IPC'],
- tcp_socket_address : str = None,
- done_queue : typing.Optional[multiprocessing.Queue] = None,
- log_level : int = logging.WARN,
- prerun_callback : typing.Optional[typing.Callable] = None
+def run_thing_with_zmq_server(
+ thing_cls: ThingMeta,
+ id: str,
+ protocols: typing.List[str] = ['IPC'],
+ tcp_socket_address: str = None,
+ done_queue: typing.Optional[multiprocessing.Queue] = None,
+ log_level: int = logging.WARN,
+ prerun_callback: typing.Optional[typing.Callable] = None
) -> None:
if prerun_callback:
prerun_callback(thing_cls)
- thing = thing_cls(instance_name=instance_name, log_level=log_level) # type: Thing
- thing.run(zmq_protocols=protocols, tcp_socket_address=tcp_socket_address)
+ thing = thing_cls(id=id, log_level=log_level) # type: Thing
+ thing.run_with_zmq_server(
+ zmq_protocols=protocols,
+ tcp_socket_address=tcp_socket_address
+ )
if done_queue is not None:
- done_queue.put(instance_name)
+ done_queue.put(id)
def run_thing_with_http_server(
- thing_cls : ThingMeta,
- instance_name : str,
- done_queue : queue.Queue = None,
- log_level : int = logging.WARN,
- prerun_callback : typing.Optional[typing.Callable] = None
+ thing_cls: ThingMeta,
+ id: str,
+ done_queue: queue.Queue = None,
+ log_level: int = logging.WARN,
+ prerun_callback: typing.Optional[typing.Callable] = None
) -> None:
if prerun_callback:
prerun_callback(thing_cls)
- thing = thing_cls(instance_name=instance_name, log_level=log_level) # type: Thing
+ thing = thing_cls(id=id, log_level=log_level) # type: Thing
thing.run_with_http_server()
if done_queue is not None:
- done_queue.put(instance_name)
+ done_queue.put(id)
-def start_http_server(instance_name : str) -> None:
- H = HTTPServer([instance_name], log_level=logging.WARN)
+def start_http_server(id : str) -> None:
+ H = HTTPServer([id], log_level=logging.WARN)
H.listen()
-def start_thing_forked(
- thing_cls : ThingMeta,
- instance_name : str,
- protocols : typing.List[str] = ['IPC'],
- tcp_socket_address : str = None,
- done_queue : typing.Optional[multiprocessing.Queue] = None,
- log_level : int = logging.WARN,
- prerun_callback : typing.Optional[typing.Callable] = None,
- as_process : bool = True,
- http_server : bool = False
-):
+def run_thing_with_zmq_server_forked(
+ thing_cls: ThingMeta,
+ id: str,
+ log_level: int = logging.WARN,
+ protocols: typing.List[str] = ['IPC'],
+ tcp_socket_address: str = None,
+ prerun_callback: typing.Optional[typing.Callable] = None,
+ as_process: bool = True,
+ done_queue: typing.Optional[multiprocessing.Queue] = None,
+) -> typing.Union[multiprocessing.Process, threading.Thread]:
+ """
+ run a Thing in a ZMQ server by forking from main process or thread.
+
+ Parameters:
+ -----------
+ thing_cls: ThingMeta
+ The class of the Thing to be run.
+ id: str
+ The id of the Thing to be run.
+ log_level: int
+ The log level to be used for the Thing. Default is logging.WARN.
+ protocols: list of str
+ The ZMQ protocols to be used for the Thing. Default is ['IPC'].
+ tcp_socket_address: str
+ The TCP socket address to be used for the Thing. Default is None.
+ prerun_callback: callable
+ A callback function to be called before running the Thing. Default is None.
+ as_process: bool
+ Whether to run the Thing in a separate process or thread. Default is True (as process).
+ done_queue: multiprocessing.Queue
+ A queue to be used for communication between processes. Default is None.
+ """
+
if as_process:
P = multiprocessing.Process(
- target=run_thing,
+ target=run_thing_with_zmq_server,
kwargs=dict(
thing_cls=thing_cls,
- instance_name=instance_name,
+ id=id,
protocols=protocols,
tcp_socket_address=tcp_socket_address,
done_queue=done_queue,
log_level=log_level,
prerun_callback=prerun_callback
- ), daemon=True
+ ),
+ daemon=True
)
P.start()
- if not http_server:
- return P
- multiprocessing.Process(
- target=start_http_server,
- args=(instance_name,),
- daemon=True
- ).start()
- return P
+ # if not http_server:
+ # return P
+ # multiprocessing.Process(
+ # target=start_http_server,
+ # args=(id,),
+ # daemon=True
+ # ).start()
+ # return P
else:
- if http_server:
- T = threading.Thread(
- target=run_thing_with_http_server,
- kwargs=dict(
- thing_cls=thing_cls,
- instance_name=instance_name,
- done_queue=done_queue,
- log_level=log_level,
- prerun_callback=prerun_callback
- )
- )
- else:
- T = threading.Thread(
- target=run_thing,
- kwargs=dict(
- thing_cls=thing_cls,
- instance_name=instance_name,
- protocols=protocols,
- tcp_socket_address=tcp_socket_address,
- done_queue=done_queue,
- log_level=log_level,
- prerun_callback=prerun_callback
- ), daemon=True
- )
+ # if http_server:
+ # T = threading.Thread(
+ # target=run_thing_with_http_server,
+ # kwargs=dict(
+ # thing_cls=thing_cls,
+ # id=id,
+ # done_queue=done_queue,
+ # log_level=log_level,
+ # prerun_callback=prerun_callback
+ # )
+ # )
+ # else:
+ T = threading.Thread(
+ target=run_thing_with_zmq_server,
+ kwargs=dict(
+ thing_cls=thing_cls,
+ id=id,
+ protocols=protocols,
+ tcp_socket_address=tcp_socket_address,
+ done_queue=done_queue,
+ log_level=log_level,
+ prerun_callback=prerun_callback
+ ), daemon=True
+ )
T.start()
return T
-
\ No newline at end of file
+
+def run_zmq_server(server: AsyncZMQServer, owner, done_queue: multiprocessing.Queue) -> None:
+ event_loop = get_current_async_loop()
+ async def run():
+ while True:
+ try:
+ messages = await server.async_recv_requests()
+ owner.last_server_message = messages[0]
+ for message in messages:
+ if message.type == EXIT:
+ return
+ await asyncio.sleep(0.01)
+ except BreakLoop:
+ break
+ event_loop.run_until_complete(run())
+ event_loop.run_until_complete(asyncio.gather(*asyncio.all_tasks(event_loop)))
+ if done_queue:
+ done_queue.put(True)
\ No newline at end of file
diff --git a/tests/things/test_thing.py b/tests/things/test_thing.py
index ce248d76..6a454b37 100644
--- a/tests/things/test_thing.py
+++ b/tests/things/test_thing.py
@@ -1,19 +1,485 @@
-from hololinked.server import Thing, action
+import asyncio, threading, time, logging, unittest, os
+import typing
+from pydantic import BaseModel
+
+from hololinked.core import Thing, action, Property, Event
+from hololinked.core.properties import Number, String, Selector, List, Integer, ClassSelector
+from hololinked.core.actions import Action, BoundAction, BoundSyncAction, BoundAsyncAction
+from hololinked.param import ParameterizedFunction
+from hololinked.core.dataklasses import ActionInfoValidator
+from hololinked.utils import isclassmethod
class TestThing(Thing):
@action()
- def get_protocols(self):
- protocols = []
- if self.rpc_server.inproc_server is not None and self.rpc_server.inproc_server.socket_address.startswith('inproc://'):
- protocols.append('INPROC')
+ def get_transports(self):
+ transports = []
+ if self.rpc_server.req_rep_server is not None and self.rpc_server.req_rep_server.socket_address.startswith('inproc://'):
+ transports.append('INPROC')
if self.rpc_server.ipc_server is not None and self.rpc_server.ipc_server.socket_address.startswith('ipc://'):
- protocols.append('IPC')
+ transports.append('IPC')
if self.rpc_server.tcp_server is not None and self.rpc_server.tcp_server.socket_address.startswith('tcp://'):
- protocols.append('TCP')
- return protocols
+ transports.append('TCP')
+ return transports
+
+ @action()
+ def action_echo(self, value):
+ # print("action_echo called with value: ", value)
+ return value
+
+ @classmethod
+ def action_echo_with_classmethod(self, value):
+ return value
+
+ async def action_echo_async(self, value):
+ await asyncio.sleep(0.1)
+ return value
+
+ @classmethod
+ async def action_echo_async_with_classmethod(self, value):
+ await asyncio.sleep(0.1)
+ return value
+
+ class parameterized_action(ParameterizedFunction):
+
+ arg1 = Number(bounds=(0, 10), step=0.5, default=5, crop_to_bounds=True,
+ doc='arg1 description')
+ arg2 = String(default='hello', doc='arg2 description', regex='[a-z]+')
+ arg3 = ClassSelector(class_=(int, float, str),
+ default=5, doc='arg3 description')
+
+ def __call__(self, instance, arg1, arg2, arg3):
+ return instance.id, arg1, arg2, arg3
+
+ class parameterized_action_without_call(ParameterizedFunction):
+
+ arg1 = Number(bounds=(0, 10), step=0.5, default=5, crop_to_bounds=True,
+ doc='arg1 description')
+ arg2 = String(default='hello', doc='arg2 description', regex='[a-z]+')
+ arg3 = ClassSelector(class_=(int, float, str),
+ default=5, doc='arg3 description')
+ class parameterized_action_async(ParameterizedFunction):
+
+ arg1 = Number(bounds=(0, 10), step=0.5, default=5, crop_to_bounds=True,
+ doc='arg1 description')
+ arg2 = String(default='hello', doc='arg2 description', regex='[a-z]+')
+ arg3 = ClassSelector(class_=(int, float, str),
+ default=5, doc='arg3 description')
+
+ async def __call__(self, instance, arg1, arg2, arg3):
+ await asyncio.sleep(0.1)
+ return instance.id, arg1, arg2, arg3
+
+ def __internal__(self, value):
+ return value
+
+ def incorrectly_decorated_method(self, value):
+ return value
+
+ def not_an_action(self, value):
+ return value
+
+ async def not_an_async_action(self, value):
+ await asyncio.sleep(0.1)
+ return value
+
+ def json_schema_validated_action(self, val1: int, val2: str, val3: dict, val4: list):
+ return {
+ 'val1': val1,
+ 'val3': val3
+ }
+
+ def pydantic_validated_action(self, val1: int, val2: str, val3: dict, val4: list) -> typing.Dict[str, typing.Union[int, dict]]:
+ return {
+ 'val2': val2,
+ 'val4': val4
+ }
+
+ @action()
+ def get_serialized_data(self):
+ return b'foobar'
+
@action()
- def test_echo(self, value):
- return value
\ No newline at end of file
+ def get_mixed_content_data(self):
+ return 'foobar', b'foobar'
+
+ @action()
+ def sleep(self):
+ time.sleep(10)
+
+ #----------- Properties --------------
+
+ base_property = Property(default=None, allow_None=True,
+ doc='a base Property class')
+ number_prop = Number(doc="A fully editable number property",
+ default=1)
+ string_prop = String(default='hello', regex='^[a-z]+',
+ doc="A string property with a regex constraint to check value errors")
+ int_prop = Integer(default=5, step=2, bounds=(0, 100),
+ doc="An integer property with step and bounds constraints to check RW")
+ selector_prop = Selector(objects=['a', 'b', 'c', 1], default='a',
+ doc="A selector property to check RW")
+ observable_list_prop = List(default=None, allow_None=True, observable=True,
+ doc="An observable list property to check observable events on write operations")
+ observable_readonly_prop = Number(default=0, readonly=True, observable=True,
+ doc="An observable readonly property to check observable events on read operations")
+ db_commit_number_prop = Number(default=0, db_commit=True,
+ doc="A fully editable number property to check commits to db on write operations")
+ db_init_int_prop = Integer(default=1, db_init=True,
+ doc="An integer property to check initialization from db")
+ db_persist_selector_prop = Selector(objects=['a', 'b', 'c', 1], default='a', db_persist=True,
+ doc="A selector property to check persistence to db on write operations")
+ non_remote_number_prop = Number(default=5, remote=False,
+ doc="A non remote number property to check non-availability on client")
+ sleeping_prop = Number(default=0, observable=True, readonly=True,
+ doc="A property that sleeps for 10 seconds on read operations")
+
+ @sleeping_prop.getter
+ def get_sleeping_prop(self):
+ time.sleep(10)
+ try:
+ return self._sleeping_prop
+ except AttributeError:
+ return 42
+
+ @sleeping_prop.setter
+ def set_sleeping_prop(self, value):
+ time.sleep(10)
+ self._sleeping_prop = value
+
+ @action()
+ def set_non_remote_number_prop(self, value):
+ if value < 0:
+ raise ValueError("Value must be non-negative")
+ self.non_remote_number_prop = value
+
+ @action()
+ def get_non_remote_number_prop(self):
+ return self.non_remote_number_prop
+
+ #----------- Pydantic and JSON schema properties --------------
+
+ class PydanticProp(BaseModel):
+ foo : str
+ bar : int
+ foo_bar : float
+
+ pydantic_prop = Property(default=None, allow_None=True, model=PydanticProp,
+ doc="A property with a pydantic model to check RW")
+
+ pydantic_simple_prop = Property(default=None, allow_None=True, model='int',
+ doc="A property with a simple pydantic model to check RW")
+
+ schema = {
+ "type" : "string",
+ "minLength" : 1,
+ "maxLength" : 10,
+ "pattern" : "^[a-z]+$"
+ }
+
+ json_schema_prop = Property(default=None, allow_None=True, model=schema,
+ doc="A property with a json schema to check RW")
+
+ @observable_readonly_prop.getter
+ def get_observable_readonly_prop(self):
+ if not hasattr(self, '_observable_readonly_prop'):
+ self._observable_readonly_prop = 0
+ self._observable_readonly_prop += 1
+ return self._observable_readonly_prop
+
+ #----------- Class properties --------------
+
+ simple_class_prop = Number(class_member=True, default=42,
+ doc='simple class property with default value')
+
+ managed_class_prop = Number(class_member=True,
+ doc='(managed) class property with custom getter/setter')
+
+ @managed_class_prop.getter
+ def get_managed_class_prop(cls):
+ return getattr(cls, '_managed_value', 0)
+
+ @managed_class_prop.setter
+ def set_managed_class_prop(cls, value):
+ if value < 0:
+ raise ValueError("Value must be non-negative")
+ cls._managed_value = value
+
+ readonly_class_prop = String(class_member=True, readonly=True,
+ doc='read-only class property')
+
+ @readonly_class_prop.getter
+ def get_readonly_class_prop(cls):
+ return "read-only-value"
+
+ deletable_class_prop = Number(class_member=True, default=100,
+ doc='deletable class property with custom deleter')
+
+ @deletable_class_prop.getter
+ def get_deletable_class_prop(cls):
+ return getattr(cls, '_deletable_value', 100)
+
+ @deletable_class_prop.setter
+ def set_deletable_class_prop(cls, value):
+ cls._deletable_value = value
+
+ @deletable_class_prop.deleter
+ def del_deletable_class_prop(cls):
+ if hasattr(cls, '_deletable_value'):
+ del cls._deletable_value
+
+ not_a_class_prop = Number(class_member=False, default=43,
+ doc="test property with class_member=False")
+
+ @not_a_class_prop.getter
+ def get_not_a_class_prop(self):
+ return getattr(self, '_not_a_class_value', 43)
+
+ @not_a_class_prop.setter
+ def set_not_a_class_prop(self, value):
+ self._not_a_class_value = value
+
+ @not_a_class_prop.deleter
+ def del_not_a_class_prop(self):
+ if hasattr(self, '_not_a_class_value'):
+ del self._not_a_class_value
+
+ @action()
+ def print_props(self):
+ print(f'number_prop: {self.number_prop}')
+ print(f'string_prop: {self.string_prop}')
+ print(f'int_prop: {self.int_prop}')
+ print(f'selector_prop: {self.selector_prop}')
+ print(f'observable_list_prop: {self.observable_list_prop}')
+ print(f'observable_readonly_prop: {self.observable_readonly_prop}')
+ print(f'db_commit_number_prop: {self.db_commit_number_prop}')
+ print(f'db_init_int_prop: {self.db_init_int_prop}')
+ print(f'db_persist_selctor_prop: {self.db_persist_selector_prop}')
+ print(f'non_remote_number_prop: {self.non_remote_number_prop}')
+
+
+ #----------- Events --------------
+
+ test_event = Event(doc='test event with arbitrary payload')
+
+ total_number_of_events = Number(default=100, bounds=(1, None),
+ doc="Total number of events pushed")
+
+ @action()
+ def push_events(self, event_name: str = 'test_event', total_number_of_events: int = 100):
+ if event_name not in self.events:
+ raise ValueError(f"Event {event_name} is not a valid event")
+ threading.Thread(target=self._push_worker, args=(event_name, total_number_of_events)).start()
+
+ def _push_worker(self, event_name: str = 'test_event', total_number_of_events: int = 100):
+ for i in range(total_number_of_events):
+ event_descriptor = self.events.descriptors[event_name]
+ if event_descriptor == self.__class__.test_event:
+ # print(f"pushing event {event_name} with value {i}")
+ self.test_event.push('test data')
+ elif event_descriptor == self.__class__.test_binary_payload_event:
+ # print(f"pushing event {event_name} with value {i}")
+ self.test_binary_payload_event.push(b'test data')
+ elif event_descriptor == self.__class__.test_mixed_content_payload_event:
+ # print(f"pushing event {event_name} with value {i}")
+ self.test_mixed_content_payload_event.push(('test data', b'test data'))
+ elif event_descriptor == self.__class__.test_event_with_json_schema:
+ # print(f"pushing event {event_name} with value {i}")
+ self.test_event_with_json_schema.push({
+ 'val1': 1,
+ 'val2': 'test',
+ 'val3': {'key': 'value'},
+ 'val4': [1, 2, 3]
+ })
+ elif event_descriptor == self.test_event_with_pydantic_schema:
+ self.test_event_with_pydantic_schema.push({
+ 'val1': 1,
+ 'val2': 'test',
+ 'val3': {'key': 'value'},
+ 'val4': [1, 2, 3]
+ })
+ time.sleep(0.01) # 10ms
+
+ test_binary_payload_event = Event(doc='test event with binary payload')
+
+ test_mixed_content_payload_event = Event(doc='test event with mixed content payload')
+
+ test_event_with_json_schema = Event(doc='test event with schema validation')
+
+ test_event_with_pydantic_schema = Event(doc='test event with pydantic schema validation')
+
+
+def replace_methods_with_actions(thing_cls: typing.Type[TestThing]) -> None:
+ exposed_actions = []
+ if not isinstance(thing_cls.action_echo, (Action, BoundAction)):
+ thing_cls.action_echo = action()(thing_cls.action_echo)
+ thing_cls.action_echo.__set_name__(thing_cls, 'action_echo')
+ exposed_actions.append('action_echo')
+
+ if not isinstance(thing_cls.action_echo_with_classmethod, (Action, BoundAction)):
+ # classmethod can be decorated with action
+ thing_cls.action_echo_with_classmethod = action()(thing_cls.action_echo_with_classmethod)
+ # BoundAction already, cannot call __set_name__ on it, at least at the time of writing
+ exposed_actions.append('action_echo_with_classmethod')
+
+ if not isinstance(thing_cls.action_echo_async, (Action, BoundAction)):
+ # async methods can be decorated with action
+ thing_cls.action_echo_async = action()(thing_cls.action_echo_async)
+ thing_cls.action_echo_async.__set_name__(thing_cls, 'action_echo_async')
+ exposed_actions.append('action_echo_async')
+
+ if not isinstance(thing_cls.action_echo_async_with_classmethod, (Action, BoundAction)):
+ # async classmethods can be decorated with action
+ thing_cls.action_echo_async_with_classmethod = action()(thing_cls.action_echo_async_with_classmethod)
+ # BoundAction already, cannot call __set_name__ on it, at least at the time of writing
+ exposed_actions.append('action_echo_async_with_classmethod')
+
+ if not isinstance(thing_cls.parameterized_action, (Action, BoundAction)):
+ # parameterized function can be decorated with action
+ thing_cls.parameterized_action = action(safe=True)(thing_cls.parameterized_action)
+ thing_cls.parameterized_action.__set_name__(thing_cls, 'parameterized_action')
+ exposed_actions.append('parameterized_action')
+
+ if not isinstance(thing_cls.parameterized_action_without_call, (Action, BoundAction)):
+ thing_cls.parameterized_action_without_call = action(idempotent=True)(thing_cls.parameterized_action_without_call)
+ thing_cls.parameterized_action_without_call.__set_name__(thing_cls, 'parameterized_action_without_call')
+ exposed_actions.append('parameterized_action_without_call')
+
+ if not isinstance(thing_cls.parameterized_action_async, (Action, BoundAction)):
+ thing_cls.parameterized_action_async = action(synchronous=True)(thing_cls.parameterized_action_async)
+ thing_cls.parameterized_action_async.__set_name__(thing_cls, 'parameterized_action_async')
+ exposed_actions.append('parameterized_action_async')
+
+ if not isinstance(thing_cls.json_schema_validated_action, (Action, BoundAction)):
+ # schema validated actions
+ thing_cls.json_schema_validated_action = action(
+ input_schema={
+ 'type': 'object',
+ 'properties': {
+ 'val1': {'type': 'integer'},
+ 'val2': {'type': 'string'},
+ 'val3': {'type': 'object'},
+ 'val4': {'type': 'array'}
+ }
+ },
+ output_schema={
+ 'type': 'object',
+ 'properties': {
+ 'val1': {'type': 'integer'},
+ 'val3': {'type': 'object'}
+ }
+ }
+ )(thing_cls.json_schema_validated_action)
+ thing_cls.json_schema_validated_action.__set_name__(thing_cls, 'json_schema_validated_action')
+ exposed_actions.append('json_schema_validated_action')
+
+ if not isinstance(thing_cls.pydantic_validated_action, (Action, BoundAction)):
+ thing_cls.pydantic_validated_action = action()(thing_cls.pydantic_validated_action)
+ thing_cls.pydantic_validated_action.__set_name__(thing_cls, 'pydantic_validated_action')
+ exposed_actions.append('pydantic_validated_action')
+
+ replace_methods_with_actions._exposed_actions = exposed_actions
+
+
+
+test_thing_TD = {
+ 'title' : 'TestThing',
+ 'id': 'test-thing',
+ 'actions' : {
+ 'get_transports': {
+ 'title' : 'get_transports',
+ 'description' : 'returns available transports'
+ },
+ 'action_echo': {
+ 'title' : 'action_echo',
+ 'description' : 'returns value as it is to the client'
+ },
+ 'get_serialized_data': {
+ 'title' : 'get_serialized_data',
+ 'description' : 'returns serialized data',
+ },
+ 'get_mixed_content_data': {
+ 'title' : 'get_mixed_content_data',
+ 'description' : 'returns mixed content data',
+ },
+ 'sleep': {
+ 'title' : 'sleep',
+ 'description' : 'sleeps for 10 seconds',
+ },
+ 'push_events': {
+ 'title' : 'push_events',
+ 'description' : 'pushes events',
+ }
+ },
+ 'properties' : {
+ 'base_property': {
+ 'title' : 'base_property',
+ 'description' : 'test property',
+ 'default' : None
+ },
+ 'number_prop': {
+ 'title' : 'number_prop',
+ 'description' : 'A fully editable number property',
+ 'default' : 0
+ },
+ 'string_prop': {
+ 'title' : 'string_prop',
+ 'description' : 'A string property with a regex constraint to check value errors',
+ 'default' : 'hello',
+ 'regex' : '^[a-z]+$'
+ },
+ 'total_number_of_events': {
+ 'title' : 'total_number_of_events',
+ 'description' : 'Total number of events pushed',
+ 'default' : 100,
+ 'minimum' : 1
+ }
+ },
+ 'events' : {
+ 'test_event': {
+ 'title' : 'test_event',
+ 'description' : 'test event'
+ },
+ 'test_binary_payload_event': {
+ 'title' : 'test_binary_payload_event',
+ 'description' : 'test event with binary payload'
+ },
+ 'test_mixed_content_payload_event': {
+ 'title' : 'test_mixed_content_payload_event',
+ 'description' : 'test event with mixed content payload'
+ },
+ 'test_event_with_json_schema': {
+ 'title' : 'test_event_with_json_schema',
+ 'description' : 'test event with schema validation',
+ 'data' : {
+ 'val1': {
+ 'type': 'integer',
+ 'description': 'integer value'
+ },
+ 'val2': {
+ 'type': 'string',
+ 'description': 'string value'
+ },
+ 'val3': {
+ 'type': 'object',
+ 'description': 'object value'
+ },
+ 'val4': {
+ 'type': 'array',
+ 'description': 'array value'
+ }
+ }
+ },
+ 'test_event_with_pydantic_schema': {
+ 'title' : 'test_event_with_pydantic_schema',
+ 'description' : 'test event with pydantic schema validation'
+ }
+ },
+}
+
+
+if __name__ == '__main__':
+ T = TestThing(id='test-thing')
+ T.run()
\ No newline at end of file
diff --git a/tests/utils.py b/tests/utils.py
index 5aec5dba..94be0b78 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -1,9 +1,16 @@
+import asyncio
import threading
+import typing
import unittest
+from faker import Faker
+
+from hololinked.utils import get_current_async_loop
class TestResult(unittest.TextTestResult):
+ """Custom test result class to format the output of test results."""
+
def addSuccess(self, test):
super().addSuccess(test)
self.stream.write(f' {test} ✔')
@@ -19,20 +26,78 @@ def addError(self, test, err):
self.stream.write(f' {test} ❌ Error')
self.stream.flush()
+
class TestRunner(unittest.TextTestRunner):
+ """Custom test runner class to use the custom test result class."""
resultclass = TestResult
class TestCase(unittest.TestCase):
+ """Custom test case class to print some extra spaces and info about test carried out"""
+
+ @classmethod
+ def setUpClass(self):
+ print(f"----------------------------------------------------------------------")
+
+ @classmethod
+ def tearDownClass(self):
+ print(f"\n\ntear down {self.__name__}")
def setUp(self):
- print() # dont concatenate with results printed by unit test
+ print() # add gaps between results printed by unit test
-def print_lingering_threads(exclude_daemon=True):
+class AsyncTestCase(unittest.IsolatedAsyncioTestCase):
+ """Custom async test case class to print some extra spaces and info about test carried out"""
+
+ @classmethod
+ def setUpClass(self):
+ print(f"----------------------------------------------------------------------")
+
+ async def asyncSetUp(self):
+ loop = asyncio.get_running_loop()
+ loop.set_debug(False)
+
+ @classmethod
+ def tearDownClass(self):
+ print(f"\n\ntear down {self.__name__}")
+
+ def setUp(self):
+ print() # add gaps between results printed by unit test
+
+
+
+
+
+def print_lingering_threads(exclude_daemon: bool = True):
+ """
+ debugging helper function that prints the names and IDs of all alive threads,
+ excluding daemon threads if specified.
+ """
alive_threads = threading.enumerate()
if exclude_daemon:
alive_threads = [t for t in alive_threads if not t.daemon]
for thread in alive_threads:
print(f"Thread Name: {thread.name}, Thread ID: {thread.ident}, Is Alive: {thread.is_alive()}")
+
+
+class TrackingFaker:
+ """A wrapper around Faker to track the last generated value."""
+
+ def __init__(self, *args, **kwargs):
+ self.gen = Faker(*args, **kwargs)
+ self.last = None
+
+ def __getattr__(self, name) -> typing.Any:
+ orig = getattr(self.gen, name)
+ if callable(orig):
+ def wrapped(*args, **kwargs):
+ result = orig(*args, **kwargs)
+ self.last = result
+ return result
+ return wrapped
+ return orig
+
+fake = TrackingFaker() # type: Faker
+