From 8eb1437edb068021c27b46d3d7bedddc935f9ebd Mon Sep 17 00:00:00 2001 From: olivakar Date: Mon, 21 Mar 2022 21:27:28 -0800 Subject: [PATCH 01/20] feat(azure-iot-device) : Client cert issuance for dps cert management (#971) --- azure-iot-device/azure/iot/device/constant.py | 2 +- .../abstract_provisioning_device_client.py | 14 +++ .../aio/async_provisioning_device_client.py | 6 +- .../models/registration_result.py | 9 +- .../provisioning/pipeline/mqtt_pipeline.py | 8 +- .../pipeline/pipeline_ops_provisioning.py | 7 +- .../pipeline/pipeline_stages_provisioning.py | 14 ++- .../provisioning_device_client.py | 6 +- ...sion_symmetric_key_client_cert_issuance.py | 87 +++++++++++++++++++ scripts/dps_cert_mgmt/device_api_tokengen.py | 39 +++++++++ scripts/dps_cert_mgmt/service_api_tokengen.py | 30 +++++++ .../test_async_provisioning_device_client.py | 53 +++++++++-- .../models/test_registration_result.py | 5 ++ .../pipeline/test_mqtt_pipeline.py | 17 ++++ .../test_pipeline_ops_provisioning.py | 12 +++ .../test_pipeline_stages_provisioning.py | 84 ++++++++++++------ .../provisioning/shared_client_fixtures.py | 8 +- .../unit/provisioning/shared_client_tests.py | 8 ++ .../test_sync_provisioning_device_client.py | 60 +++++++++++-- 19 files changed, 411 insertions(+), 58 deletions(-) create mode 100644 azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance.py create mode 100644 scripts/dps_cert_mgmt/device_api_tokengen.py create mode 100644 scripts/dps_cert_mgmt/service_api_tokengen.py diff --git a/azure-iot-device/azure/iot/device/constant.py b/azure-iot-device/azure/iot/device/constant.py index dc69935bc..964a37cc7 100644 --- a/azure-iot-device/azure/iot/device/constant.py +++ b/azure-iot-device/azure/iot/device/constant.py @@ -10,7 +10,7 @@ IOTHUB_IDENTIFIER = "azure-iot-device-iothub-py" PROVISIONING_IDENTIFIER = "azure-iot-device-provisioning-py" IOTHUB_API_VERSION = "2019-10-01" -PROVISIONING_API_VERSION = "2019-03-31" +PROVISIONING_API_VERSION = "2021-11-01-preview" SECURITY_MESSAGE_INTERFACE_ID = "urn:azureiot:Security:SecurityAgent:1" TELEMETRY_MESSAGE_SIZE_LIMIT = 262144 # The max keep alive is determined by the load balancer currently. diff --git a/azure-iot-device/azure/iot/device/provisioning/abstract_provisioning_device_client.py b/azure-iot-device/azure/iot/device/provisioning/abstract_provisioning_device_client.py index 152dea1bd..1c18d6455 100644 --- a/azure-iot-device/azure/iot/device/provisioning/abstract_provisioning_device_client.py +++ b/azure-iot-device/azure/iot/device/provisioning/abstract_provisioning_device_client.py @@ -84,6 +84,7 @@ def __init__(self, pipeline): """ self._pipeline = pipeline self._provisioning_payload = None + self._client_csr = None self._pipeline.on_background_exception = handle_exceptions.handle_background_exception @@ -244,6 +245,19 @@ def provisioning_payload(self, provisioning_payload): """ self._provisioning_payload = provisioning_payload + @property + def client_csr(self): + return self._client_csr + + @client_csr.setter + def client_csr(self, csr): + """ + Set the certificate signing request for device client certificate. + The certificate will be used later for authentication after provisioning. + :param csr: The certificate signing request + """ + self._client_csr = csr + def log_on_register_complete(result=None): # This could be a failed/successful registration result from DPS diff --git a/azure-iot-device/azure/iot/device/provisioning/aio/async_provisioning_device_client.py b/azure-iot-device/azure/iot/device/provisioning/aio/async_provisioning_device_client.py index acd79c533..d9d4d0cc7 100644 --- a/azure-iot-device/azure/iot/device/provisioning/aio/async_provisioning_device_client.py +++ b/azure-iot-device/azure/iot/device/provisioning/aio/async_provisioning_device_client.py @@ -79,7 +79,11 @@ async def register(self): register_async = async_adapter.emulate_async(self._pipeline.register) register_complete = async_adapter.AwaitableCallback(return_arg_name="result") - await register_async(payload=self._provisioning_payload, callback=register_complete) + await register_async( + payload=self._provisioning_payload, + callback=register_complete, + client_csr=self.client_csr, + ) result = await handle_result(register_complete) log_on_register_complete(result) diff --git a/azure-iot-device/azure/iot/device/provisioning/models/registration_result.py b/azure-iot-device/azure/iot/device/provisioning/models/registration_result.py index 560d76720..ad6995a52 100644 --- a/azure-iot-device/azure/iot/device/provisioning/models/registration_result.py +++ b/azure-iot-device/azure/iot/device/provisioning/models/registration_result.py @@ -66,6 +66,7 @@ def __init__( last_update_date_time=None, etag=None, payload=None, + client_cert=None, ): """ :param device_id: Desired device id for the provisioned device @@ -75,7 +76,8 @@ def __init__( :param created_date_time: Registration create date time (in UTC). :param last_update_date_time: Last updated date time (in UTC). :param etag: The entity tag associated with the resource. - :param payload: The payload with which hub is responding + :param payload: The payload with which hub is responding. + :param client_cert: Client certificate issued to the device in PEM format. """ self._device_id = device_id self._assigned_hub = assigned_hub @@ -84,6 +86,7 @@ def __init__( self._last_update_date_time = last_update_date_time self._etag = etag self._response_payload = payload + self._issued_client_certificate = client_cert @property def device_id(self): @@ -113,6 +116,10 @@ def etag(self): def response_payload(self): return json.dumps(self._response_payload, default=lambda o: o.__dict__, sort_keys=True) + @property + def issued_client_certificate(self): + return self._issued_client_certificate + def __str__(self): return "\n".join( [self.device_id, self.assigned_hub, self.sub_status, self.response_payload] diff --git a/azure-iot-device/azure/iot/device/provisioning/pipeline/mqtt_pipeline.py b/azure-iot-device/azure/iot/device/provisioning/pipeline/mqtt_pipeline.py index e410cd2c1..c50f5f6ca 100644 --- a/azure-iot-device/azure/iot/device/provisioning/pipeline/mqtt_pipeline.py +++ b/azure-iot-device/azure/iot/device/provisioning/pipeline/mqtt_pipeline.py @@ -240,11 +240,12 @@ def pipeline_callback(op, error): ) ) - def register(self, payload=None, callback=None): + def register(self, payload=None, callback=None, client_csr=None): """ Register to the device provisioning service. :param payload: Payload that can be sent with the registration request. :param callback: callback which is called when the registration is done. + :param client_csr: The certificate signing request sent to device provisioning service. :raises: :class:`azure.iot.device.provisioning.pipeline.exceptions.PipelineNotRunning` if the pipeline has already been shut down @@ -274,6 +275,9 @@ def on_complete(op, error): self._pipeline.run_op( pipeline_ops_provisioning.RegisterOperation( - request_payload=payload, registration_id=self._registration_id, callback=on_complete + request_payload=payload, + registration_id=self._registration_id, + callback=on_complete, + client_csr=client_csr, ) ) diff --git a/azure-iot-device/azure/iot/device/provisioning/pipeline/pipeline_ops_provisioning.py b/azure-iot-device/azure/iot/device/provisioning/pipeline/pipeline_ops_provisioning.py index 2a670cc0e..b0faaf838 100644 --- a/azure-iot-device/azure/iot/device/provisioning/pipeline/pipeline_ops_provisioning.py +++ b/azure-iot-device/azure/iot/device/provisioning/pipeline/pipeline_ops_provisioning.py @@ -14,7 +14,9 @@ class RegisterOperation(PipelineOperation): This operation is in the group of DPS operations because it is very specific to the DPS client. """ - def __init__(self, request_payload, registration_id, callback, registration_result=None): + def __init__( + self, request_payload, registration_id, callback, registration_result=None, client_csr=None + ): """ Initializer for RegisterOperation objects. @@ -23,6 +25,8 @@ def __init__(self, request_payload, registration_id, callback, registration_resu :param Function callback: The function that gets called when this operation is complete or has failed. The callback function must accept A PipelineOperation object which indicates the specific operation which has completed or failed. + :param client_csr: Certificate signing request sent to device provisioning service in order to issue a + client certificate which will be used for authentication to iot hub post provisioning. """ super().__init__(callback=callback) self.request_payload = request_payload @@ -31,6 +35,7 @@ def __init__(self, request_payload, registration_id, callback, registration_resu self.retry_after_timer = None self.polling_timer = None self.provisioning_timeout_timer = None + self.client_csr = client_csr class PollStatusOperation(PipelineOperation): diff --git a/azure-iot-device/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning.py b/azure-iot-device/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning.py index 2f739d74b..45aa1cda6 100644 --- a/azure-iot-device/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning.py +++ b/azure-iot-device/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning.py @@ -60,6 +60,7 @@ def _form_complete_result(operation_id, decoded_response, status): last_update_date_time=decoded_state.get("lastUpdatedDateTimeUtc", None), etag=decoded_state.get("etag", None), payload=decoded_state.get("payload", None), + client_cert=decoded_state.get("issuedClientCertificate", None), ) registration_result = RegistrationResult( @@ -431,13 +432,21 @@ def do_query_after_interval(): registration_payload = DeviceRegistrationPayload( registration_id=initial_register_op.registration_id, custom_payload=initial_register_op.request_payload, + client_csr=initial_register_op.client_csr, ) + json_request = registration_payload.get_json_string() + logger.debug( + "{}({}): Sending json payload {} to provisioning".format( + self.name, op.name, json_request + ) + ) + self.send_op_down( pipeline_ops_base.RequestAndResponseOperation( request_type=constant.REGISTER, method="PUT", resource_location="/", - request_body=registration_payload.get_json_string(), + request_body=json_request, callback=on_registration_response, ) ) @@ -451,11 +460,12 @@ class DeviceRegistrationPayload(object): The class representing the payload that needs to be sent to the service. """ - def __init__(self, registration_id, custom_payload=None): + def __init__(self, registration_id, custom_payload=None, client_csr=None): # This is not a convention to name variables in python but the # DPS service spec needs the name to be exact for it to work self.registrationId = registration_id self.payload = custom_payload + self.clientCertificateCsr = client_csr def get_json_string(self): return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True) diff --git a/azure-iot-device/azure/iot/device/provisioning/provisioning_device_client.py b/azure-iot-device/azure/iot/device/provisioning/provisioning_device_client.py index 479f9e9ec..85a12164d 100644 --- a/azure-iot-device/azure/iot/device/provisioning/provisioning_device_client.py +++ b/azure-iot-device/azure/iot/device/provisioning/provisioning_device_client.py @@ -79,7 +79,11 @@ def register(self): # Register register_complete = EventedCallback(return_arg_name="result") - self._pipeline.register(payload=self._provisioning_payload, callback=register_complete) + self._pipeline.register( + payload=self._provisioning_payload, + callback=register_complete, + client_csr=self.client_csr, + ) result = handle_result(register_complete) log_on_register_complete(result) diff --git a/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance.py b/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance.py new file mode 100644 index 000000000..3f8eed8ec --- /dev/null +++ b/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance.py @@ -0,0 +1,87 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import asyncio +from azure.iot.device.aio import ProvisioningDeviceClient +import os +from azure.iot.device.aio import IoTHubDeviceClient +from azure.iot.device import Message +import uuid +from azure.iot.device import X509 + +messages_to_send = 10 +provisioning_host = os.getenv("PROVISIONING_HOST") +id_scope = os.getenv("PROVISIONING_IDSCOPE") +registration_id = os.getenv("PROVISIONING_REGISTRATION_ID") +symmetric_key = os.getenv("PROVISIONING_SYMMETRIC_KEY") + +csr_file = os.getenv("CSR_FILE") +key_file = os.getenv("X509_KEY_FILE") +issued_cert_file = os.getenv("X509_CERT_FILE") + + +async def main(): + provisioning_device_client = ProvisioningDeviceClient.create_from_symmetric_key( + provisioning_host=provisioning_host, + registration_id=registration_id, + id_scope=id_scope, + symmetric_key=symmetric_key, # authenticate for DPS + ) + with open(csr_file, "r") as csr: + csr_data = csr.read() + # set the CSR on the client + provisioning_device_client.client_csr = str(csr_data) + + registration_result = await provisioning_device_client.register() + + print("The complete registration result is") + print(registration_result.registration_state) + + with open(issued_cert_file, "w") as out_ca_pem: + # Write the issued certificate on the file. + cert_data = registration_result.registration_state.issued_client_certificate + out_ca_pem.write(cert_data) + + if registration_result.status == "assigned": + print("Will send telemetry from the provisioned device") + + x509 = X509( + cert_file=issued_cert_file, + key_file=key_file, + pass_phrase=os.getenv("PASS_PHRASE"), + ) + + device_client = IoTHubDeviceClient.create_from_x509_certificate( + hostname=registration_result.registration_state.assigned_hub, + device_id=registration_result.registration_state.device_id, + x509=x509, + ) + # Connect the client. + await device_client.connect() + + async def send_test_message(i): + print("sending message #" + str(i)) + msg = Message("test wind speed " + str(i)) + msg.message_id = uuid.uuid4() + await device_client.send_message(msg) + print("done sending message #" + str(i)) + + # send `messages_to_send` messages in parallel + await asyncio.gather(*[send_test_message(i) for i in range(1, messages_to_send + 1)]) + + # finally, disconnect + await device_client.disconnect() + else: + print("Can not send telemetry from the provisioned device") + + +if __name__ == "__main__": + asyncio.run(main()) + + # If using Python 3.6 or below, use the following code instead of asyncio.run(main()): + # loop = asyncio.get_event_loop() + # loop.run_until_complete(main()) + # loop.close() diff --git a/scripts/dps_cert_mgmt/device_api_tokengen.py b/scripts/dps_cert_mgmt/device_api_tokengen.py new file mode 100644 index 000000000..6ba468cd6 --- /dev/null +++ b/scripts/dps_cert_mgmt/device_api_tokengen.py @@ -0,0 +1,39 @@ +from base64 import b64encode, b64decode +from hashlib import sha256 +from time import time +from urllib import parse +from hmac import HMAC + +# For Device API token, paste your DPS ID Scope here. +# This can be found in the DPS 'Overview' blade +id_scope = "" + +# For Device API token, paste the registration ID of the enrollment here. +# This can be found in the DPS -> Manage enrollments -> Individual enrollments -> +registration_id = "" + +# For Device API token, paste the primary or secondary key belonging to the individual enrollment. +# This can be found in the DPS -> Manage enrollments -> Individual enrollments -> +# under 'Primary key' or 'Secondary key' +# If using a symmetric key-based enrollment group, you'll need to first generate a device symmetric +# key using the enrollment group key. Use the enrollment group primary or secondary key to compute +# an HMAC-SHA256 of the registration ID for the device. The result is then converted into Base64 +# format to obtain the derived device key. To view code examples, see +# https://docs.microsoft.com/en-us/azure/iot-dps/how-to-legacy-device-symm-key +key = "" + +uri = id_scope + "/registrations/" + registration_id +policy_name = "registration" +expiry = 3600 + +ttl = time() + expiry +sign_key = "%s\n%d" % ((parse.quote_plus(uri)), int(ttl)) +# print(sign_key) +signature = b64encode(HMAC(b64decode(key), sign_key.encode("utf-8"), sha256).digest()) + +rawtoken = {"sr": uri, "sig": signature, "se": str(int(ttl))} + +if policy_name is not None: + rawtoken["skn"] = policy_name + +print("SharedAccessSignature " + parse.urlencode(rawtoken)) diff --git a/scripts/dps_cert_mgmt/service_api_tokengen.py b/scripts/dps_cert_mgmt/service_api_tokengen.py new file mode 100644 index 000000000..193f92ca5 --- /dev/null +++ b/scripts/dps_cert_mgmt/service_api_tokengen.py @@ -0,0 +1,30 @@ +from base64 import b64encode, b64decode +from hashlib import sha256 +from time import time +from urllib import parse +from hmac import HMAC + +# For Service API token, uri is your DPS Service endpoint +# This can be found in the DPS 'Overview' blade +# e.g mydps.azure-devices-provisioning.net +uri = "" + +# For Service API token, paste the primary or secondary key belonging to the provisioningservice owner. +# This can be found in the DPS 'Shared access policies' blade +# under 'Primary key' or 'Secondary key' +key = "" + +policy_name = "provisioningserviceowner" +expiry = 3600 + +ttl = time() + expiry +sign_key = "%s\n%d" % ((parse.quote_plus(uri)), int(ttl)) +# print(sign_key) +signature = b64encode(HMAC(b64decode(key), sign_key.encode("utf-8"), sha256).digest()) + +rawtoken = {"sr": uri, "sig": signature, "se": str(int(ttl))} + +if policy_name is not None: + rawtoken["skn"] = policy_name + +print("SharedAccessSignature " + parse.urlencode(rawtoken)) diff --git a/tests/unit/provisioning/aio/test_async_provisioning_device_client.py b/tests/unit/provisioning/aio/test_async_provisioning_device_client.py index 2b1106a52..55d542480 100644 --- a/tests/unit/provisioning/aio/test_async_provisioning_device_client.py +++ b/tests/unit/provisioning/aio/test_async_provisioning_device_client.py @@ -69,7 +69,7 @@ async def test_enables_provisioning_only_if_not_already_enabled( self, mocker, provisioning_pipeline, registration_result ): # Override callback to pass successful result - def register_complete_success_callback(payload, callback): + def register_complete_success_callback(payload, callback, client_csr): callback(result=registration_result) mocker.patch.object( @@ -93,7 +93,7 @@ def register_complete_success_callback(payload, callback): async def test_register_calls_pipeline_register( self, provisioning_pipeline, mocker, registration_result ): - def register_complete_success_callback(payload, callback): + def register_complete_success_callback(payload, callback, client_csr): callback(result=registration_result) mocker.patch.object( @@ -110,7 +110,7 @@ async def test_shutdown_upon_success(self, mocker, provisioning_pipeline, regist # success result registration_result._status = "assigned" - def register_complete_success_callback(payload, callback): + def register_complete_success_callback(payload, callback, client_csr): callback(result=registration_result) mocker.patch.object( @@ -129,7 +129,7 @@ async def test_no_shutdown_upon_fail(self, mocker, provisioning_pipeline, regist # fail result registration_result._status = "not assigned" - def register_complete_fail_callback(payload, callback): + def register_complete_fail_callback(payload, callback, client_csr): callback(result=registration_result) mocker.patch.object( @@ -171,7 +171,7 @@ async def test_waits_for_pipeline_op_completions_on_success( assert provisioning_pipeline.shutdown.call_count == 1 # Callbacks sent to pipeline as expected assert provisioning_pipeline.register.call_args == mocker.call( - payload=mocker.ANY, callback=cb_mock_register + payload=mocker.ANY, callback=cb_mock_register, client_csr=None ) assert provisioning_pipeline.shutdown.call_args == mocker.call(callback=cb_mock_shutdown) # Callback completions were waited upon as expected @@ -208,7 +208,7 @@ async def test_waits_for_pipeline_op_completion_on_failure( assert provisioning_pipeline.shutdown.call_count == 0 # Callbacks sent to pipeline as expected assert provisioning_pipeline.register.call_args == mocker.call( - payload=mocker.ANY, callback=cb_mock_register + payload=mocker.ANY, callback=cb_mock_register, client_csr=None ) # Callback completions were waited upon as expected assert cb_mock_register.completion.call_count == 1 @@ -220,7 +220,7 @@ async def test_verifies_registration_result_returned( ): result = registration_result - def register_complete_success_callback(payload, callback): + def register_complete_success_callback(payload, callback, client_csr): callback(result=result) mocker.patch.object( @@ -229,6 +229,8 @@ def register_complete_success_callback(payload, callback): client = ProvisioningDeviceClient(provisioning_pipeline) result_returned = await client.register() + print("print(result_returned)") + print(result_returned) assert result_returned == result @pytest.mark.it( @@ -270,7 +272,7 @@ async def test_raises_error_on_register_pipeline_op_error( ): error = pipeline_error() - def register_complete_failure_callback(payload, callback): + def register_complete_failure_callback(payload, callback, client_csr): callback(result=None, error=error) mocker.patch.object( @@ -303,7 +305,7 @@ async def test_raises_error_on_shutdown_pipeline_op_error( error = pipeline_error() - def register_complete_success_callback(payload, callback): + def register_complete_success_callback(payload, callback, client_csr): callback(result=registration_result) def shutdown_failure_callback(callback): @@ -363,3 +365,36 @@ async def test_get_payload(self, mocker, payload_input): client = ProvisioningDeviceClient(provisioning_pipeline) client.provisioning_payload = payload_input assert client.provisioning_payload == payload_input + + +@pytest.mark.describe("ProvisioningDeviceClient (Async) - .set_client_csr()") +class TestClientCsr(object): + @pytest.mark.it("Sets the csr on the client csr attribute") + @pytest.mark.parametrize( + "csr_input", + [ + pytest.param("Client Certificate Request", id="String input"), + pytest.param(None, id="None input"), + ], + ) + def test_set_csr(self, mocker, csr_input): + provisioning_pipeline = mocker.MagicMock() + + client = ProvisioningDeviceClient(provisioning_pipeline) + client.client_csr = csr_input + assert client._client_csr == csr_input + + @pytest.mark.it("Gets the csr from the client csr property") + @pytest.mark.parametrize( + "csr_input", + [ + pytest.param("Client Certificate Request", id="String input"), + pytest.param(None, id="None input"), + ], + ) + def test_get_csr(self, mocker, csr_input): + provisioning_pipeline = mocker.MagicMock() + + client = ProvisioningDeviceClient(provisioning_pipeline) + client.client_csr = csr_input + assert client.client_csr == csr_input diff --git a/tests/unit/provisioning/models/test_registration_result.py b/tests/unit/provisioning/models/test_registration_result.py index aba1bb4be..153c34f07 100644 --- a/tests/unit/provisioning/models/test_registration_result.py +++ b/tests/unit/provisioning/models/test_registration_result.py @@ -25,6 +25,7 @@ fake_last_update_dttm = datetime.datetime(2020, 10, 17) fake_etag = "SomeEtag" fake_payload = "this is a fake payload" +fake_issued_certificate = "IssuedCertificateToConnectToHub" @pytest.mark.describe("RegistrationResult") @@ -85,6 +86,10 @@ def test_some_properties_of_result_are_not_settable(self, input_setter_code): "registration_state.last_update_date_time = datetime.datetime(3000, 10, 17)", id="Last Update Date Time", ), + pytest.param( + 'registration_state.issued_client_certificate = "some_issued_cert"', + id="Issued Client Certificate", + ), ], ) @pytest.mark.it("Has `RegistrationState` with properties that do not have setter") diff --git a/tests/unit/provisioning/pipeline/test_mqtt_pipeline.py b/tests/unit/provisioning/pipeline/test_mqtt_pipeline.py index 09085ff1e..075050f15 100644 --- a/tests/unit/provisioning/pipeline/test_mqtt_pipeline.py +++ b/tests/unit/provisioning/pipeline/test_mqtt_pipeline.py @@ -8,6 +8,8 @@ import logging from azure.iot.device.common.models import X509 from azure.iot.device.provisioning.pipeline.mqtt_pipeline import MQTTPipeline +from tests.common.pipeline import helpers +import json from azure.iot.device.provisioning.pipeline import constant as dps_constants from azure.iot.device.provisioning.pipeline import ( pipeline_stages_provisioning, @@ -385,6 +387,21 @@ def test_sets_empty_payload(self, pipeline, mocker): op = pipeline._pipeline.run_op.call_args[0][0] assert op.request_payload is None + @pytest.mark.it("passes the client_csr parameter as client_csr on the RegistrationRequest") + def test_sets_csr_payload(self, pipeline, mocker): + cb = mocker.MagicMock() + fake_client_csr = "fake_client_csr" + pipeline.register(client_csr=fake_client_csr, callback=cb) + op = pipeline._pipeline.run_op.call_args[0][0] + assert op.client_csr is fake_client_csr + + @pytest.mark.it("sets client_csr on the RegistrationRequest to None if no csr is provided") + def test_sets_empty_csr(self, pipeline, mocker): + cb = mocker.MagicMock() + pipeline.register(callback=cb) + op = pipeline._pipeline.run_op.call_args[0][0] + assert op.client_csr is None + @pytest.mark.it( "Triggers the callback upon successful completion of the RegisterOperation, passing the registration result in the result parameter" ) diff --git a/tests/unit/provisioning/pipeline/test_pipeline_ops_provisioning.py b/tests/unit/provisioning/pipeline/test_pipeline_ops_provisioning.py index 875484e24..567e34019 100644 --- a/tests/unit/provisioning/pipeline/test_pipeline_ops_provisioning.py +++ b/tests/unit/provisioning/pipeline/test_pipeline_ops_provisioning.py @@ -59,6 +59,18 @@ def test_provisioning_timeout_timer(self, cls_type, init_kwargs): op = cls_type(**init_kwargs) assert op.provisioning_timeout_timer is None + @pytest.mark.it("Initializes 'client_csr' attribute to with provided csr") + @pytest.mark.parametrize( + "csr_input", + [ + pytest.param("Client Certificate Request", id="Some input"), + pytest.param(None, id="None input"), + ], + ) + def test_client_csr(self, cls_type, init_kwargs, csr_input): + op = cls_type(**init_kwargs, client_csr=csr_input) + assert op.client_csr is csr_input + pipeline_ops_test.add_operation_tests( test_module=this_module, diff --git a/tests/unit/provisioning/pipeline/test_pipeline_stages_provisioning.py b/tests/unit/provisioning/pipeline/test_pipeline_stages_provisioning.py index ac7b187b1..8b6e63eb4 100644 --- a/tests/unit/provisioning/pipeline/test_pipeline_stages_provisioning.py +++ b/tests/unit/provisioning/pipeline/test_pipeline_stages_provisioning.py @@ -13,10 +13,13 @@ pipeline_ops_provisioning, ) from azure.iot.device.common.pipeline import pipeline_ops_base -from tests.unit.common.pipeline import pipeline_stage_test +from tests.common.pipeline import pipeline_stage_test from azure.iot.device.exceptions import ServiceError - -from tests.unit.common.pipeline.helpers import StageRunOpTestBase +from azure.iot.device.provisioning.models.registration_result import ( + RegistrationResult, + RegistrationState, +) +from tests.common.pipeline.helpers import StageRunOpTestBase from azure.iot.device import exceptions from azure.iot.device.provisioning.pipeline import constant @@ -44,6 +47,8 @@ fake_x509_cert_file = "fake_cert_file" fake_x509_cert_key_file = "fake_cert_key_file" fake_pass_phrase = "fake_pass_phrase" +fake_csr = "fake_client_csr" +fake_issued_x509_cert = "fake_x509_cert" class FakeRegistrationResult(object): @@ -57,11 +62,12 @@ def __str__(self): class FakeRegistrationState(object): - def __init__(self, payload): + def __init__(self, payload, cert): self.deviceId = fake_device_id self.assignedHub = fake_assigned_hub self.payload = payload self.substatus = fake_sub_status + self.issued_client_certificate = cert def __str__(self): return "\n".join( @@ -72,8 +78,8 @@ def get_payload_string(self): return json.dumps(self.payload, default=lambda o: o.__dict__, sort_keys=True) -def create_registration_result(fake_payload, status): - state = FakeRegistrationState(payload=fake_payload) +def create_registration_result(payload, status, issued_x509_cert): + state = FakeRegistrationState(payload=payload, cert=issued_x509_cert) return FakeRegistrationResult(fake_operation_id, status, state) @@ -130,10 +136,17 @@ class TestRegistrationStageWithRegisterOperation(StageRunOpTestBase, Registratio def request_payload(self, request): return request.param + @pytest.fixture(params=[None, fake_csr], ids=["empty csr", "some csr"]) + def request_client_csr(self, request): + return request.param + @pytest.fixture - def op(self, stage, mocker, request_payload): + def op(self, stage, mocker, request_payload, request_client_csr): op = pipeline_ops_provisioning.RegisterOperation( - request_payload, fake_registration_id, callback=mocker.MagicMock() + request_payload, + fake_registration_id, + callback=mocker.MagicMock(), + client_csr=request_client_csr, ) yield op @@ -146,9 +159,11 @@ def op(self, stage, mocker, request_payload): op.provisioning_timeout_timer.cancel() @pytest.fixture - def request_body(self, request_payload): - return '{{"payload": {json_payload}, "registrationId": "{reg_id}"}}'.format( - reg_id=fake_registration_id, json_payload=json.dumps(request_payload) + def request_body(self, request_payload, request_client_csr): + return '{{"clientCertificateCsr": {json_csr}, "payload": {json_payload}, "registrationId": "{reg_id}"}}'.format( + reg_id=fake_registration_id, + json_payload=json.dumps(request_payload), + json_csr=json.dumps(request_client_csr), ) @pytest.mark.it( @@ -188,10 +203,17 @@ class TestRegistrationStageWithRegisterOperationCompleted(RegistrationStageConfi def request_payload(self, request): return request.param + @pytest.fixture(params=[None, fake_csr], ids=["empty csr", "some csr"]) + def request_client_csr(self, request): + return request.param + @pytest.fixture - def send_registration_op(self, mocker, request_payload): + def send_registration_op(self, mocker, request_payload, request_client_csr): op = pipeline_ops_provisioning.RegisterOperation( - request_payload, fake_registration_id, callback=mocker.MagicMock() + request_payload, + fake_registration_id, + callback=mocker.MagicMock(), + client_csr=request_client_csr, ) yield op @@ -223,9 +245,11 @@ def request_and_response_op(self, stage): return op @pytest.fixture - def request_body(self, request_payload): - return '{{"payload": {json_payload}, "registrationId": "{reg_id}"}}'.format( - reg_id=fake_registration_id, json_payload=json.dumps(request_payload) + def request_body(self, request_payload, request_client_csr): + return '{{"clientCertificateCsr": {json_csr}, "payload": {json_payload}, "registrationId": "{reg_id}"}}'.format( + reg_id=fake_registration_id, + json_payload=json.dumps(request_payload), + json_csr=json.dumps(request_client_csr), ) @pytest.mark.it( @@ -307,7 +331,9 @@ def test_request_and_response_op_completed_success_with_bad_code( def test_request_and_response_op_completed_success_with_status_assigned( self, stage, request_payload, send_registration_op, request_and_response_op ): - registration_result = create_registration_result(request_payload, "assigned") + registration_result = create_registration_result( + request_payload, "assigned", fake_issued_x509_cert + ) assert not send_registration_op.completed assert not request_and_response_op.completed @@ -332,7 +358,7 @@ def test_request_and_response_op_completed_success_with_status_assigned( def test_request_and_response_op_completed_success_with_status_failed( self, stage, request_payload, send_registration_op, request_and_response_op ): - registration_result = create_registration_result(request_payload, "failed") + registration_result = create_registration_result(request_payload, "failed", None) assert not send_registration_op.completed assert not request_and_response_op.completed @@ -358,7 +384,7 @@ def test_request_and_response_op_completed_success_with_status_failed( def test_request_and_response_op_completed_success_with_unknown_status( self, stage, request_payload, send_registration_op, request_and_response_op ): - registration_result = create_registration_result(request_payload, "some_status") + registration_result = create_registration_result(request_payload, "some_status", None) assert not send_registration_op.completed assert not request_and_response_op.completed @@ -387,7 +413,7 @@ def test_spawns_another_op_request_and_response_op_completed_success_with_status ) mocker.spy(send_registration_op, "spawn_worker_op") - registration_result = create_registration_result(request_payload, "assigning") + registration_result = create_registration_result(request_payload, "assigning", None) assert not send_registration_op.completed assert not request_and_response_op.completed @@ -441,7 +467,9 @@ def cls_type(self): @pytest.fixture def op(self, stage, mocker, request_payload): op = pipeline_ops_provisioning.RegisterOperation( - request_payload, fake_registration_id, callback=mocker.MagicMock() + request_payload, + fake_registration_id, + callback=mocker.MagicMock(), ) yield op @@ -455,7 +483,7 @@ def op(self, stage, mocker, request_payload): @pytest.fixture def request_body(self, request_payload): - return '{{"payload": {json_payload}, "registrationId": "{reg_id}"}}'.format( + return '{{"clientCertificateCsr": null, "payload": {json_payload}, "registrationId": "{reg_id}"}}'.format( reg_id=fake_registration_id, json_payload=json.dumps(request_payload) ) @@ -476,7 +504,7 @@ def test_stage_retries_op_if_next_stage_responds_with_status_code_greater_than_4 next_op.status_code = 430 next_op.retry_after = "1" - registration_result = create_registration_result(request_payload, "some_status") + registration_result = create_registration_result(request_payload, "some_status", None) next_op.response_body = get_registration_result_as_bytes(registration_result) next_op.complete() @@ -776,7 +804,7 @@ def test_request_and_response_op_completed_success_with_bad_code( def test_request_and_response_op_completed_success_with_status_assigned( self, stage, send_query_op, request_and_response_op ): - registration_result = create_registration_result(" ", "assigned") + registration_result = create_registration_result(" ", "assigned", fake_issued_x509_cert) assert not send_query_op.completed assert not request_and_response_op.completed @@ -801,7 +829,7 @@ def test_request_and_response_op_completed_success_with_status_assigned( def test_request_and_response_op_completed_success_with_status_failed( self, stage, send_query_op, request_and_response_op ): - registration_result = create_registration_result(" ", "failed") + registration_result = create_registration_result(" ", "failed", None) assert not send_query_op.completed assert not request_and_response_op.completed @@ -827,7 +855,7 @@ def test_request_and_response_op_completed_success_with_status_failed( def test_request_and_response_op_completed_success_with_unknown_status( self, stage, send_query_op, request_and_response_op ): - registration_result = create_registration_result(" ", "some_status") + registration_result = create_registration_result(" ", "quidditching", None) assert not send_query_op.completed assert not request_and_response_op.completed @@ -884,7 +912,7 @@ def test_stage_retries_op_if_next_stage_responds_with_status_code_greater_than_4 next_op.status_code = 430 next_op.retry_after = "1" - registration_result = create_registration_result(" ", "some_status") + registration_result = create_registration_result(" ", "flying", None) next_op.response_body = get_registration_result_as_bytes(registration_result) next_op.complete() @@ -918,7 +946,7 @@ def test_stage_retries_op_if_next_stage_responds_with_status_assigning(self, moc next_op.status_code = 228 next_op.retry_after = "1" - registration_result = create_registration_result(" ", "assigning") + registration_result = create_registration_result(" ", "assigning", None) next_op.response_body = get_registration_result_as_bytes(registration_result) next_op.complete() diff --git a/tests/unit/provisioning/shared_client_fixtures.py b/tests/unit/provisioning/shared_client_fixtures.py index 9c53d5d18..cf8a210bb 100644 --- a/tests/unit/provisioning/shared_client_fixtures.py +++ b/tests/unit/provisioning/shared_client_fixtures.py @@ -20,7 +20,7 @@ fake_operation_id = "fake_operation_id" fake_device_id = "MyDevice" fake_assigned_hub = "MyIoTHub" - +fake_issued_client_cert = "fake_client_x509_cert" """Pipeline fixtures""" @@ -51,7 +51,7 @@ def disconnect(self, callback): def enable_responses(self, callback): callback() - def register(self, payload, callback): + def register(self, payload, callback, client_csr): callback(result={}) @@ -60,7 +60,9 @@ def register(self, payload, callback): @pytest.fixture def registration_result(): - registration_state = RegistrationState(fake_device_id, fake_assigned_hub, fake_sub_status) + registration_state = RegistrationState( + fake_device_id, fake_assigned_hub, fake_sub_status, client_cert=fake_issued_client_cert + ) return RegistrationResult(fake_operation_id, fake_status, registration_state) diff --git a/tests/unit/provisioning/shared_client_tests.py b/tests/unit/provisioning/shared_client_tests.py index fb5f5a606..9bcfad4cc 100644 --- a/tests/unit/provisioning/shared_client_tests.py +++ b/tests/unit/provisioning/shared_client_tests.py @@ -51,6 +51,14 @@ def test_payload(self, client_class, provisioning_pipeline): assert client._provisioning_payload is None + @pytest.mark.it( + "Instantiates with the initial value of the '_client_csr' attribute set to None" + ) + def test_csr(self, client_class, provisioning_pipeline): + client = client_class(provisioning_pipeline) + + assert client._client_csr is None + class SharedProvisioningClientCreateMethodUserOptionTests(object): @pytest.mark.it( diff --git a/tests/unit/provisioning/test_sync_provisioning_device_client.py b/tests/unit/provisioning/test_sync_provisioning_device_client.py index f80f9e5d6..0d252d1c2 100644 --- a/tests/unit/provisioning/test_sync_provisioning_device_client.py +++ b/tests/unit/provisioning/test_sync_provisioning_device_client.py @@ -58,7 +58,7 @@ def test_enables_provisioning_only_if_not_already_enabled( self, mocker, provisioning_pipeline, registration_result ): # Override callback to pass successful result - def register_complete_success_callback(payload, callback): + def register_complete_success_callback(payload, callback, client_csr): callback(result=registration_result) mocker.patch.object( @@ -83,7 +83,7 @@ def register_complete_success_callback(payload, callback): def test_register_calls_pipeline_register( self, provisioning_pipeline, mocker, registration_result ): - def register_complete_success_callback(payload, callback): + def register_complete_success_callback(payload, callback, client_csr): callback(result=registration_result) mocker.patch.object( @@ -100,7 +100,7 @@ def test_shutdown_upon_success(self, mocker, provisioning_pipeline, registration # success result registration_result._status = "assigned" - def register_complete_success_callback(payload, callback): + def register_complete_success_callback(payload, callback, client_csr): callback(result=registration_result) mocker.patch.object( @@ -119,7 +119,7 @@ def test_no_shutdown_upon_fail(self, mocker, provisioning_pipeline, registration # fail result registration_result._status = "not assigned" - def register_complete_fail_callback(payload, callback): + def register_complete_fail_callback(payload, callback, client_csr): callback(result=registration_result) mocker.patch.object( @@ -157,7 +157,9 @@ def test_waits_for_pipeline_op_completions_on_success( assert provisioning_pipeline.shutdown.call_count == 1 # Callbacks sent to pipeline as expected assert provisioning_pipeline.register.call_args == mocker.call( - payload=mocker.ANY, callback=cb_mock_register + payload=mocker.ANY, + callback=cb_mock_register, + client_csr=None, ) assert provisioning_pipeline.shutdown.call_args == mocker.call(callback=cb_mock_shutdown) # Callback completions were waited upon as expected @@ -190,7 +192,9 @@ def test_waits_for_pipeline_op_completion_on_failure( assert provisioning_pipeline.shutdown.call_count == 0 # Callbacks sent to pipeline as expected assert provisioning_pipeline.register.call_args == mocker.call( - payload=mocker.ANY, callback=cb_mock_register + payload=mocker.ANY, + callback=cb_mock_register, + client_csr=None, ) # Callback completions were waited upon as expected assert cb_mock_register.wait_for_completion.call_count == 1 @@ -202,7 +206,11 @@ def test_verifies_registration_result_returned( ): result = registration_result - def register_complete_success_callback(payload, callback): + def register_complete_success_callback( + payload, + callback, + client_csr=None, + ): callback(result=result) mocker.patch.object( @@ -211,6 +219,7 @@ def register_complete_success_callback(payload, callback): client = ProvisioningDeviceClient(provisioning_pipeline) result_returned = client.register() + assert result_returned == result @pytest.mark.it( @@ -252,7 +261,7 @@ def test_raises_error_on_register_pipeline_op_error( ): error = pipeline_error() - def register_complete_failure_callback(payload, callback): + def register_complete_failure_callback(payload, callback, client_csr): callback(result=None, error=error) mocker.patch.object( @@ -284,7 +293,7 @@ def test_raises_error_on_shutdown_pipeline_op_error( error = pipeline_error() - def register_complete_success_callback(payload, callback): + def register_complete_success_callback(payload, callback, client_csr): callback(result=registration_result) def shutdown_failure_callback(callback): @@ -344,3 +353,36 @@ def test_get_payload(self, mocker, payload_input): client = ProvisioningDeviceClient(provisioning_pipeline) client.provisioning_payload = payload_input assert client.provisioning_payload == payload_input + + +@pytest.mark.describe("ProvisioningDeviceClient (Sync) - .set_client_csr()") +class TestClientCsr(object): + @pytest.mark.it("Sets the csr on the client csr attribute") + @pytest.mark.parametrize( + "csr_input", + [ + pytest.param("Client Certificate Request", id="String input"), + pytest.param(None, id="None input"), + ], + ) + def test_set_csr(self, mocker, csr_input): + provisioning_pipeline = mocker.MagicMock() + + client = ProvisioningDeviceClient(provisioning_pipeline) + client.client_csr = csr_input + assert client._client_csr == csr_input + + @pytest.mark.it("Gets the csr from the client csr property") + @pytest.mark.parametrize( + "csr_input", + [ + pytest.param("Client Certificate Request", id="String input"), + pytest.param(None, id="None input"), + ], + ) + def test_get_csr(self, mocker, csr_input): + provisioning_pipeline = mocker.MagicMock() + + client = ProvisioningDeviceClient(provisioning_pipeline) + client.client_csr = csr_input + assert client.client_csr == csr_input From c880df0de6ae42e7cf44a82d22b6e82b05de4303 Mon Sep 17 00:00:00 2001 From: olivakar Date: Fri, 15 Apr 2022 13:46:35 -0700 Subject: [PATCH 02/20] feat (azure-iot-device) : Trust Bundle (#987) --- .../models/registration_result.py | 7 ++ .../pipeline/pipeline_stages_provisioning.py | 1 + .../dps_certificate_management.md | 104 ++++++++++++++++++ ...client_cert_issuance_send_message_x509.py} | 5 +- ...y_trust_bundle_and_client_cert_issuance.py | 92 ++++++++++++++++ ...ion_symmetric_key_trust_bundle_issuance.py | 82 ++++++++++++++ 6 files changed, 288 insertions(+), 3 deletions(-) create mode 100644 azure-iot-device/samples/dps-cert-mgmt/dps_certificate_management.md rename azure-iot-device/samples/dps-cert-mgmt/{provision_symmetric_key_client_cert_issuance.py => provision_symmetric_key_client_cert_issuance_send_message_x509.py} (94%) create mode 100644 azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_trust_bundle_and_client_cert_issuance.py create mode 100644 azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_trust_bundle_issuance.py diff --git a/azure-iot-device/azure/iot/device/provisioning/models/registration_result.py b/azure-iot-device/azure/iot/device/provisioning/models/registration_result.py index ad6995a52..e5f93084b 100644 --- a/azure-iot-device/azure/iot/device/provisioning/models/registration_result.py +++ b/azure-iot-device/azure/iot/device/provisioning/models/registration_result.py @@ -67,6 +67,7 @@ def __init__( etag=None, payload=None, client_cert=None, + trust_bundle=None, ): """ :param device_id: Desired device id for the provisioned device @@ -78,6 +79,7 @@ def __init__( :param etag: The entity tag associated with the resource. :param payload: The payload with which hub is responding. :param client_cert: Client certificate issued to the device in PEM format. + :param trust_bundle: The trust bundle returned from the service. """ self._device_id = device_id self._assigned_hub = assigned_hub @@ -87,6 +89,7 @@ def __init__( self._etag = etag self._response_payload = payload self._issued_client_certificate = client_cert + self._trust_bundle = trust_bundle @property def device_id(self): @@ -120,6 +123,10 @@ def response_payload(self): def issued_client_certificate(self): return self._issued_client_certificate + @property + def trust_bundle(self): + return self._trust_bundle + def __str__(self): return "\n".join( [self.device_id, self.assigned_hub, self.sub_status, self.response_payload] diff --git a/azure-iot-device/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning.py b/azure-iot-device/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning.py index 45aa1cda6..f8dcbc115 100644 --- a/azure-iot-device/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning.py +++ b/azure-iot-device/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning.py @@ -61,6 +61,7 @@ def _form_complete_result(operation_id, decoded_response, status): etag=decoded_state.get("etag", None), payload=decoded_state.get("payload", None), client_cert=decoded_state.get("issuedClientCertificate", None), + trust_bundle=decoded_state.get("trustBundle", None), ) registration_result = RegistrationResult( diff --git a/azure-iot-device/samples/dps-cert-mgmt/dps_certificate_management.md b/azure-iot-device/samples/dps-cert-mgmt/dps_certificate_management.md new file mode 100644 index 000000000..5c8a6e7da --- /dev/null +++ b/azure-iot-device/samples/dps-cert-mgmt/dps_certificate_management.md @@ -0,0 +1,104 @@ +# DPS CERTIFICATE MANAGEMENT + +In a nutshell, IoT Device can request a client certificate from a CA through DPS. +* Connect DPS to the private CA hosted by one of our CA partners. +* IoT device sends a CSR to DPS +* DPS forwards it to the CA for signing and returns an X.509 client certificate to device. +* Device uses certificate to authenticate with IoT Hub. + +## Steps for making it work in preview (api-version=2021-11-01-preview) + +### Prerequisite Steps for DPS , enrollment and CA + +These steps must be done for any scenario inside DPS Cert Management. + +* A new DPS instance must created in __West Central US__. +* Create an Enrollment Group or Individual Enrollment in this DPS instance. + * Choose to use symmetric key, TPM key or X.509 for this enrollment. + * Link an IoT Hub to the enrollment. +* Create an internal-use test account with one of Microsoft CA partners by emailing _iotcerts@microsoft.com_ + * Additionally, request them to add the DPS Service Endpoint to the allow list. `What allow list?` +* Once the CA account is created there are 2 pieces of that will be provided. + * `api_key` - DigiCert API key AND `profile_id` - DigiCert Client Cert Profile ID. +* Use DPS Service API to associate the CA object above to the DPS. + ```bash + curl -k -L -i -X PUT https:///certificateAuthorities/?api-version=2021-11-01-preview -H 'Authorization: ' -H 'Content-Type: application/json' -H 'Content-Encoding: utf-8' -d'{"certificateAuthorityType":"DigiCertCertificateAuthority","apiKey":"","profileName":""}' + ``` + where, + * `dps_service_endpoint` - available in overview blade of the DPS details. + * `ca_name` - this is an user chosen friendly name (e.g. myca1). + * `service_api_sas_token` - generated using shared access policy `provisioningserviceowner` + * `api_key` and `profile_id` obtained before. +* Query the Service API for the individual or group enrollment and save it to a file called enrollment.json. +_NOTE: This is a preferred way so that for updating the enrollment only modification of the enrollment.json file is needed._ + ```bash + curl -X GET -H "Content-Type: application/json" -H "Content-Encoding: utf-8" -H "Authorization: " https:///enrollments/?api-version=2021-11-01-preview > enrollment.json + ``` + +### Client Certificate Issuance +* All prerequisite steps must be done before following the rest. +* Use DPS Service API to connect the CA to the __individual__ enrollment or group enrollment. + * First, update the enrollment.json file to add the following + ``` + "clientCertificateIssuancePolicy": {"certificateAuthorityName": ""} + ``` + where, + * `ca_name` - The friendly name that was assigned to the CA created in previous 5 (e.g. myca1). + * Then, update the enrollment information: + ```bash + curl -k -L -i -X PUT -H "Content-Type: application/json" -H "Content-Encoding: utf-8" -H "Authorization: " https:///enrollments/?api-version=2021-11-01-preview -H "If-Match: " -d @enrollment.json + ``` + where, + * `dps_service_endpoint` - available in overview blade of the DPS details. + * `registration_id` – Is your individual enrollment registration id (e.g. mydevice1). + * `service_api_sas_token` - The DPS Service API shared access token generated previously. + +* If a group enrollment was created then similar command must be performed for the group. +* Generate an ECC P-256 keypair using OpenSSL as follows: + ```bash + openssl ecparam -genkey -name prime256v1 -out ec256-key-pair.key + ``` +* Generate a CSR using OpenSSL. Replace the CN with the registration ID of the device. __Important: DPS has character set restrictions for registration ID.__ +Note: The same CSR can be reused and sent to DPS multiple times. + ```bash + openssl req -new -key ec256-key-pair.key -out ecc256.csr -subj '/CN=' + ``` +* Run [sample](provision_symmetric_key_client_cert_issuance_send_message_x509.py) for DPS. Use the file path of the above generated csr for the environment variable `CSR_FILE` and +use file path for the key file for the environment variable `X509_KEY_FILE`. + +### Trust bundle issuance +* All prerequisite steps must be done before following the rest. +* Use DPS Service API to create a trust bundle + ```bash + curl -v -X PUT -H 'Authorization: ' -H 'Content-Type: application/json' https:///trustBundles/?api-version=2021-11-01-preview -d @sample-put-trustbundle-payload.json + ``` + where, + * `dps_service_endpoint` - available in overview blade. + * `trust_bundle_name` – The name you want to assign to your trust bundle (e.g. mytrustbundle1). + * `service_api_sas_token` - The DPS Service API shared access token generated previously. + * `sample-put-trustbundle-payload.json` - A file that contains the content for trust bundle as shown below + + ```json + { + "certificates": [ + { + "certificate": "-----BEGIN CERTIFICATE-----\r\nsome content\r\nsome content\r\nsome content\r\n-----END CERTIFICATE-----\r\n" + } + ] + } + ``` +* Use DPS Service API to connect the trust bundle or update the __individual__ enrollment with the trust bundle. + ```bash + curl -k -L -i -X PUT -H "Content-Type: application/json" -H "Content-Encoding: utf-8" -H "Authorization: " https:///enrollments/?api-version=2021-11-01-preview -H "If-Match: " -d @enrollment.json + ``` + where, + * `dps_service_endpoint` - available in overview blade. + * `registration_id` – Is your individual enrollment registration id (e.g. mydevice1). + * `etag` - The etag of the individual enrollment. + * `service_api_sas_token` - The DPS Service API shared access token generated previously. + * `enrollment.json` - A file that contains all the enrollment details that was retrieved. Update this file and the following details must be present tp update the individual enrollment + ``` + "clientCertificateIssuancePolicy":{"certificateAuthorityName":""}, + "trustBundleId":"" + ``` +* Run [sample](provision_symmetric_key_trust_bundle_issuance.py) for DPS. \ No newline at end of file diff --git a/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance.py b/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance_send_message_x509.py similarity index 94% rename from azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance.py rename to azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance_send_message_x509.py index 3f8eed8ec..1bbd7c1a3 100644 --- a/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance.py +++ b/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance_send_message_x509.py @@ -32,7 +32,7 @@ async def main(): ) with open(csr_file, "r") as csr: csr_data = csr.read() - # set the CSR on the client + # Set the CSR on the client to send it to DPS provisioning_device_client.client_csr = str(csr_data) registration_result = await provisioning_device_client.register() @@ -41,7 +41,7 @@ async def main(): print(registration_result.registration_state) with open(issued_cert_file, "w") as out_ca_pem: - # Write the issued certificate on the file. + # Write the issued certificate on the file. This forms the certificate portion of the X509 object. cert_data = registration_result.registration_state.issued_client_certificate out_ca_pem.write(cert_data) @@ -51,7 +51,6 @@ async def main(): x509 = X509( cert_file=issued_cert_file, key_file=key_file, - pass_phrase=os.getenv("PASS_PHRASE"), ) device_client = IoTHubDeviceClient.create_from_x509_certificate( diff --git a/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_trust_bundle_and_client_cert_issuance.py b/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_trust_bundle_and_client_cert_issuance.py new file mode 100644 index 000000000..7f9b43a94 --- /dev/null +++ b/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_trust_bundle_and_client_cert_issuance.py @@ -0,0 +1,92 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import asyncio +from azure.iot.device.aio import ProvisioningDeviceClient +import os +from azure.iot.device.aio import IoTHubDeviceClient +from azure.iot.device import Message +import uuid +from azure.iot.device import X509 + +messages_to_send = 10 +provisioning_host = os.getenv("PROVISIONING_HOST") +id_scope = os.getenv("PROVISIONING_IDSCOPE") +registration_id = os.getenv("PROVISIONING_REGISTRATION_ID") +symmetric_key = os.getenv("PROVISIONING_SYMMETRIC_KEY") + +csr_file = os.getenv("CSR_FILE") +key_file = os.getenv("X509_KEY_FILE") +issued_cert_file = os.getenv("X509_CERT_FILE") + + +async def main(): + provisioning_device_client = ProvisioningDeviceClient.create_from_symmetric_key( + provisioning_host=provisioning_host, + registration_id=registration_id, + id_scope=id_scope, + symmetric_key=symmetric_key, # authenticate for DPS + ) + # The trust bundle feature is orthogonal, so we do not need to send CSR. + # But it can be sent optionally to get a client cert back to connect to IoTHub later. + with open(csr_file, "r") as csr: + csr_data = csr.read() + # set the CSR on the client + provisioning_device_client.client_csr = str(csr_data) + + registration_result = await provisioning_device_client.register() + + print("The complete registration result is") + print(registration_result.registration_state) + + print("the trust bundles") + trust_bundle = registration_result.registration_state.trust_bundle + print(trust_bundle) + + with open(issued_cert_file, "w") as out_ca_pem: + # Write the issued certificate on the file. This forms the certificate portion of the X509 object. + cert_data = registration_result.registration_state.issued_client_certificate + out_ca_pem.write(cert_data) + + if registration_result.status == "assigned": + print("Will send telemetry from the provisioned device") + x509 = X509( + cert_file=issued_cert_file, + key_file=key_file, + ) + + device_client = IoTHubDeviceClient.create_from_x509_certificate( + hostname=registration_result.registration_state.assigned_hub, + device_id=registration_result.registration_state.device_id, + x509=x509, + ) + + # Connect the client. + await device_client.connect() + + async def send_test_message(i): + print("sending message #" + str(i)) + msg = Message("test wind speed " + str(i)) + msg.message_id = uuid.uuid4() + await device_client.send_message(msg) + print("done sending message #" + str(i)) + + # send `messages_to_send` messages in parallel + await asyncio.gather(*[send_test_message(i) for i in range(1, messages_to_send + 1)]) + + # finally, disconnect + await device_client.disconnect() + else: + print("Can not send telemetry from the provisioned device") + + +if __name__ == "__main__": + asyncio.run(main()) + + # If using Python 3.6 or below, use the following code instead of asyncio.run(main()): + # loop = asyncio.get_event_loop() + # loop.run_until_complete(main()) + # loop.close() diff --git a/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_trust_bundle_issuance.py b/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_trust_bundle_issuance.py new file mode 100644 index 000000000..507e578b5 --- /dev/null +++ b/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_trust_bundle_issuance.py @@ -0,0 +1,82 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import asyncio +from azure.iot.device.aio import ProvisioningDeviceClient +import os + +messages_to_send = 10 +provisioning_host = os.getenv("PROVISIONING_HOST") +id_scope = os.getenv("PROVISIONING_IDSCOPE") +registration_id = os.getenv("PROVISIONING_REGISTRATION_ID") +symmetric_key = os.getenv("PROVISIONING_SYMMETRIC_KEY") + + +async def main(): + provisioning_device_client = ProvisioningDeviceClient.create_from_symmetric_key( + provisioning_host=provisioning_host, + registration_id=registration_id, + id_scope=id_scope, + symmetric_key=symmetric_key, # authenticate for DPS + ) + # The trust bundle feature is orthogonal, we do not need to send CSR. + registration_result = await provisioning_device_client.register() + + print("The complete registration result is") + print(registration_result.registration_state) + + trust_bundle = registration_result.registration_state.trust_bundle + if not trust_bundle: + print("Trust bundle is empty") + else: + etag = trust_bundle.get("etag", None) + # If the TrustBundle is updated, the application needs to update the Trusted Root. + # Old etag and current etag can be compared to arrive at this decision. + # New certificates in the bundle should be added to the correct store. + # Certificates previously installed but not present in the bundle should be removed. + if etag: + print("New trust bundle version.") + + certificates = trust_bundle.get("certificates", None) + if not certificates: + print("Unexpected trust bundle response") + else: + count_certs = len(certificates) + print("Trust bundle has {number} number of certificates".format(number=count_certs)) + for i in range(0, count_certs): + certificate = certificates[i] + if not certificate: + print("Unable to parse certificate") + else: + cert_content = certificate.get("certificate", None) + if not cert_content: + print("Certificate has NO content") + else: + self_signed = False + metadata = certificate.get("metadata", None) + subject = metadata.get("subjectName", None) + issuer = metadata.get("issuerName", None) + if not subject or not issuer: + print("Invalid CA certificate") + elif subject == issuer: + # If the TrustBundle certificate is a CA root, it should be installed within the + # Trusted Root store. + self_signed = True + print("It is a self-signed = {}, certificate".format(self_signed)) + else: + print("It is a NOT a self-signed = {}, certificate".format(self_signed)) + print("Subject = {}".format(subject)) + print("Issuer = {}".format(issuer)) + print("Content of PEM = {}".format(cert_content)) + + +if __name__ == "__main__": + asyncio.run(main()) + + # If using Python 3.6 or below, use the following code instead of asyncio.run(main()): + # loop = asyncio.get_event_loop() + # loop.run_until_complete(main()) + # loop.close() From f8cb4033838d2ede9bf767c9f99c3c225203fce1 Mon Sep 17 00:00:00 2001 From: olivakar Date: Thu, 28 Apr 2022 16:02:41 -0700 Subject: [PATCH 03/20] Create a new yaml file for dps cert mgmt Create a new yaml file for dps cert mgmt to make it run on a new pipeline --- cert-mgmt-dps.yaml | 54 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 cert-mgmt-dps.yaml diff --git a/cert-mgmt-dps.yaml b/cert-mgmt-dps.yaml new file mode 100644 index 000000000..e953c592c --- /dev/null +++ b/cert-mgmt-dps.yaml @@ -0,0 +1,54 @@ +resources: +- repo: self +#Multi-configuration and multi-agent job options are not exported to YAML. Configure these options using documentation guidance: https://docs.microsoft.com/vsts/pipelines/process/phases +jobs: + +- job: 'Test' + pool: + vmImage: 'Ubuntu 20.04' + + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.7' + architecture: 'x64' + + - script: 'python env_setup.py --no_dev' + displayName: 'Prepare environment (install packages + dev dependencies + test dependencies + tools)' + + - script: 'python -m pip install cryptography' + displayName: 'Install pyca/cryptography for X509 functionality' + + - script: | + cd $(Agent.WorkFolder) + cd .. + touch .rnd + displayName: 'create RANDFILE file (needed to store seed data) separately due to openssl version issues in the pipeline' + + - script: | + cd $(Build.SourcesDirectory)/azure_provisioning_e2e/tests + pytest test_*.py --junitxml=junit/dps-e2e-test-results.xml + displayName: 'Run Specified E2E Test with env variables' + + env: + IOTHUB_CONNECTION_STRING: $(DPSCERT-MAC-IOTHUB-CONNECTION-STRING) + IOTHUB_EVENTHUB_CONNECTION_STRING: $(DPSCERT-MAC-IOTHUB-EVENTHUB-CONNECTION-STRING) + IOTHUB_CA_ROOT_CERT: $(DPSCERT-MAC-IOTHUB-CA-ROOT-CERT) + IOTHUB_CA_ROOT_CERT_KEY: $(DPSCERT-MAC-IOTHUB-CA-ROOT-CERT-KEY) + STORAGE_CONNECTION_STRING: $(DPSCERT-MAC-STORAGE-CONNECTION-STRING) + + PROVISIONING_DEVICE_ENDPOINT: $(DPSCERT-MAC-DPS-DEVICE-ENDPOINT) + PROVISIONING_SERVICE_CONNECTION_STRING: $(DPSCERT-MAC-DPS-CONNECTION-STRING) + PROVISIONING_DEVICE_IDSCOPE: $(DPSCERT-MAC-DPS-ID-SCOPE) + + PROVISIONING_ROOT_CERT: $(DPSCERT-MAC-IOT-PROVISIONING-ROOT-CERT) + PROVISIONING_ROOT_CERT_KEY: $(DPSCERT-MAC-IOT-PROVISIONING-ROOT-CERT-KEY) + PROVISIONING_ROOT_PASSWORD: $(DPSCERT-MAC-ROOT-CERT-PASSWORD) + PYTHONUNBUFFERED: True + + - task: PublishTestResults@2 + displayName: 'Publish Test Results' + condition: always() + inputs: + testResultsFiles: '**/dps-cert-mgmt-e2e-test-*.xml' + testRunTitle: 'Publish test results for Python $(python.version)' From 44de9e217a622b7ea8842b24ee3bf49e3a4044e6 Mon Sep 17 00:00:00 2001 From: olivakar Date: Thu, 28 Apr 2022 16:05:58 -0700 Subject: [PATCH 04/20] Create a new yaml --- vsts/dps-e2e-cert-mgmt.yaml | 54 +++++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 vsts/dps-e2e-cert-mgmt.yaml diff --git a/vsts/dps-e2e-cert-mgmt.yaml b/vsts/dps-e2e-cert-mgmt.yaml new file mode 100644 index 000000000..e953c592c --- /dev/null +++ b/vsts/dps-e2e-cert-mgmt.yaml @@ -0,0 +1,54 @@ +resources: +- repo: self +#Multi-configuration and multi-agent job options are not exported to YAML. Configure these options using documentation guidance: https://docs.microsoft.com/vsts/pipelines/process/phases +jobs: + +- job: 'Test' + pool: + vmImage: 'Ubuntu 20.04' + + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.7' + architecture: 'x64' + + - script: 'python env_setup.py --no_dev' + displayName: 'Prepare environment (install packages + dev dependencies + test dependencies + tools)' + + - script: 'python -m pip install cryptography' + displayName: 'Install pyca/cryptography for X509 functionality' + + - script: | + cd $(Agent.WorkFolder) + cd .. + touch .rnd + displayName: 'create RANDFILE file (needed to store seed data) separately due to openssl version issues in the pipeline' + + - script: | + cd $(Build.SourcesDirectory)/azure_provisioning_e2e/tests + pytest test_*.py --junitxml=junit/dps-e2e-test-results.xml + displayName: 'Run Specified E2E Test with env variables' + + env: + IOTHUB_CONNECTION_STRING: $(DPSCERT-MAC-IOTHUB-CONNECTION-STRING) + IOTHUB_EVENTHUB_CONNECTION_STRING: $(DPSCERT-MAC-IOTHUB-EVENTHUB-CONNECTION-STRING) + IOTHUB_CA_ROOT_CERT: $(DPSCERT-MAC-IOTHUB-CA-ROOT-CERT) + IOTHUB_CA_ROOT_CERT_KEY: $(DPSCERT-MAC-IOTHUB-CA-ROOT-CERT-KEY) + STORAGE_CONNECTION_STRING: $(DPSCERT-MAC-STORAGE-CONNECTION-STRING) + + PROVISIONING_DEVICE_ENDPOINT: $(DPSCERT-MAC-DPS-DEVICE-ENDPOINT) + PROVISIONING_SERVICE_CONNECTION_STRING: $(DPSCERT-MAC-DPS-CONNECTION-STRING) + PROVISIONING_DEVICE_IDSCOPE: $(DPSCERT-MAC-DPS-ID-SCOPE) + + PROVISIONING_ROOT_CERT: $(DPSCERT-MAC-IOT-PROVISIONING-ROOT-CERT) + PROVISIONING_ROOT_CERT_KEY: $(DPSCERT-MAC-IOT-PROVISIONING-ROOT-CERT-KEY) + PROVISIONING_ROOT_PASSWORD: $(DPSCERT-MAC-ROOT-CERT-PASSWORD) + PYTHONUNBUFFERED: True + + - task: PublishTestResults@2 + displayName: 'Publish Test Results' + condition: always() + inputs: + testResultsFiles: '**/dps-cert-mgmt-e2e-test-*.xml' + testRunTitle: 'Publish test results for Python $(python.version)' From 5c34581f2d185c335381e1aa226d6e5d6888863e Mon Sep 17 00:00:00 2001 From: olivakar Date: Thu, 28 Apr 2022 16:07:43 -0700 Subject: [PATCH 05/20] Delete cert-mgmt-dps.yaml --- cert-mgmt-dps.yaml | 54 ---------------------------------------------- 1 file changed, 54 deletions(-) delete mode 100644 cert-mgmt-dps.yaml diff --git a/cert-mgmt-dps.yaml b/cert-mgmt-dps.yaml deleted file mode 100644 index e953c592c..000000000 --- a/cert-mgmt-dps.yaml +++ /dev/null @@ -1,54 +0,0 @@ -resources: -- repo: self -#Multi-configuration and multi-agent job options are not exported to YAML. Configure these options using documentation guidance: https://docs.microsoft.com/vsts/pipelines/process/phases -jobs: - -- job: 'Test' - pool: - vmImage: 'Ubuntu 20.04' - - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.7' - architecture: 'x64' - - - script: 'python env_setup.py --no_dev' - displayName: 'Prepare environment (install packages + dev dependencies + test dependencies + tools)' - - - script: 'python -m pip install cryptography' - displayName: 'Install pyca/cryptography for X509 functionality' - - - script: | - cd $(Agent.WorkFolder) - cd .. - touch .rnd - displayName: 'create RANDFILE file (needed to store seed data) separately due to openssl version issues in the pipeline' - - - script: | - cd $(Build.SourcesDirectory)/azure_provisioning_e2e/tests - pytest test_*.py --junitxml=junit/dps-e2e-test-results.xml - displayName: 'Run Specified E2E Test with env variables' - - env: - IOTHUB_CONNECTION_STRING: $(DPSCERT-MAC-IOTHUB-CONNECTION-STRING) - IOTHUB_EVENTHUB_CONNECTION_STRING: $(DPSCERT-MAC-IOTHUB-EVENTHUB-CONNECTION-STRING) - IOTHUB_CA_ROOT_CERT: $(DPSCERT-MAC-IOTHUB-CA-ROOT-CERT) - IOTHUB_CA_ROOT_CERT_KEY: $(DPSCERT-MAC-IOTHUB-CA-ROOT-CERT-KEY) - STORAGE_CONNECTION_STRING: $(DPSCERT-MAC-STORAGE-CONNECTION-STRING) - - PROVISIONING_DEVICE_ENDPOINT: $(DPSCERT-MAC-DPS-DEVICE-ENDPOINT) - PROVISIONING_SERVICE_CONNECTION_STRING: $(DPSCERT-MAC-DPS-CONNECTION-STRING) - PROVISIONING_DEVICE_IDSCOPE: $(DPSCERT-MAC-DPS-ID-SCOPE) - - PROVISIONING_ROOT_CERT: $(DPSCERT-MAC-IOT-PROVISIONING-ROOT-CERT) - PROVISIONING_ROOT_CERT_KEY: $(DPSCERT-MAC-IOT-PROVISIONING-ROOT-CERT-KEY) - PROVISIONING_ROOT_PASSWORD: $(DPSCERT-MAC-ROOT-CERT-PASSWORD) - PYTHONUNBUFFERED: True - - - task: PublishTestResults@2 - displayName: 'Publish Test Results' - condition: always() - inputs: - testResultsFiles: '**/dps-cert-mgmt-e2e-test-*.xml' - testRunTitle: 'Publish test results for Python $(python.version)' From 0650fd335d55fa924e61b1c3fa3fb8736c729172 Mon Sep 17 00:00:00 2001 From: olivakar Date: Thu, 12 May 2022 13:18:04 -0700 Subject: [PATCH 06/20] test(azure-iot-device) : Add client cert issuance tests (#995) --- .../abstract_provisioning_device_client.py | 6 +- .../aio/async_provisioning_device_client.py | 2 +- .../provisioning_device_client.py | 2 +- ..._client_cert_issuance_send_message_x509.py | 2 +- ...y_trust_bundle_and_client_cert_issuance.py | 2 +- .../tests/test_async_symmetric_enrollments.py | 103 +++++++++++++++++- .../tests/test_sync_symmetric_enrollments.py | 101 ++++++++++++++++- .../test_async_provisioning_device_client.py | 6 +- .../test_sync_provisioning_device_client.py | 6 +- vsts/dps-e2e-cert-mgmt.yaml | 6 +- 10 files changed, 213 insertions(+), 23 deletions(-) diff --git a/azure-iot-device/azure/iot/device/provisioning/abstract_provisioning_device_client.py b/azure-iot-device/azure/iot/device/provisioning/abstract_provisioning_device_client.py index 1c18d6455..32c7a78be 100644 --- a/azure-iot-device/azure/iot/device/provisioning/abstract_provisioning_device_client.py +++ b/azure-iot-device/azure/iot/device/provisioning/abstract_provisioning_device_client.py @@ -246,11 +246,11 @@ def provisioning_payload(self, provisioning_payload): self._provisioning_payload = provisioning_payload @property - def client_csr(self): + def client_certificate_signing_request(self): return self._client_csr - @client_csr.setter - def client_csr(self, csr): + @client_certificate_signing_request.setter + def client_certificate_signing_request(self, csr): """ Set the certificate signing request for device client certificate. The certificate will be used later for authentication after provisioning. diff --git a/azure-iot-device/azure/iot/device/provisioning/aio/async_provisioning_device_client.py b/azure-iot-device/azure/iot/device/provisioning/aio/async_provisioning_device_client.py index d9d4d0cc7..4a95cc4eb 100644 --- a/azure-iot-device/azure/iot/device/provisioning/aio/async_provisioning_device_client.py +++ b/azure-iot-device/azure/iot/device/provisioning/aio/async_provisioning_device_client.py @@ -82,7 +82,7 @@ async def register(self): await register_async( payload=self._provisioning_payload, callback=register_complete, - client_csr=self.client_csr, + client_csr=self.client_certificate_signing_request, ) result = await handle_result(register_complete) diff --git a/azure-iot-device/azure/iot/device/provisioning/provisioning_device_client.py b/azure-iot-device/azure/iot/device/provisioning/provisioning_device_client.py index 85a12164d..fe2cacf1f 100644 --- a/azure-iot-device/azure/iot/device/provisioning/provisioning_device_client.py +++ b/azure-iot-device/azure/iot/device/provisioning/provisioning_device_client.py @@ -82,7 +82,7 @@ def register(self): self._pipeline.register( payload=self._provisioning_payload, callback=register_complete, - client_csr=self.client_csr, + client_csr=self.client_certificate_signing_request, ) result = handle_result(register_complete) diff --git a/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance_send_message_x509.py b/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance_send_message_x509.py index 1bbd7c1a3..799480452 100644 --- a/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance_send_message_x509.py +++ b/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance_send_message_x509.py @@ -33,7 +33,7 @@ async def main(): with open(csr_file, "r") as csr: csr_data = csr.read() # Set the CSR on the client to send it to DPS - provisioning_device_client.client_csr = str(csr_data) + provisioning_device_client.client_certificate_signing_request = str(csr_data) registration_result = await provisioning_device_client.register() diff --git a/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_trust_bundle_and_client_cert_issuance.py b/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_trust_bundle_and_client_cert_issuance.py index 7f9b43a94..7d046be21 100644 --- a/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_trust_bundle_and_client_cert_issuance.py +++ b/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_trust_bundle_and_client_cert_issuance.py @@ -35,7 +35,7 @@ async def main(): with open(csr_file, "r") as csr: csr_data = csr.read() # set the CSR on the client - provisioning_device_client.client_csr = str(csr_data) + provisioning_device_client.client_certificate_signing_request = str(csr_data) registration_result = await provisioning_device_client.register() diff --git a/tests/e2e/provisioning_e2e/tests/test_async_symmetric_enrollments.py b/tests/e2e/provisioning_e2e/tests/test_async_symmetric_enrollments.py index 3657db15b..b295b3c95 100644 --- a/tests/e2e/provisioning_e2e/tests/test_async_symmetric_enrollments.py +++ b/tests/e2e/provisioning_e2e/tests/test_async_symmetric_enrollments.py @@ -6,12 +6,18 @@ from provisioning_e2e.service_helper import Helper, connection_string_to_hostname from azure.iot.device.aio import ProvisioningDeviceClient +from azure.iot.device.aio import IoTHubDeviceClient +from azure.iot.device import X509 from provisioningserviceclient import ProvisioningServiceClient, IndividualEnrollment from provisioningserviceclient.protocol.models import AttestationMechanism, ReprovisionPolicy import pytest import logging import os import uuid +from scripts.create_x509_chain_crypto import ( + create_private_key, + create_csr, +) pytestmark = pytest.mark.asyncio logging.basicConfig(level=logging.DEBUG) @@ -26,6 +32,10 @@ service_client = ProvisioningServiceClient.create_from_connection_string(conn_str) device_registry_helper = Helper(os.getenv("IOTHUB_CONNECTION_STRING")) linked_iot_hub = connection_string_to_hostname(os.getenv("IOTHUB_CONNECTION_STRING")) +# TODO Delete this line. This is a pre created variable in key vault now. +symmetric_key_for_cert_management = os.getenv("DPS_CERT_ISSUANCE_SYM_KEY_AIO") + +logger = logging.getLogger(__name__) @pytest.mark.it( @@ -78,6 +88,79 @@ async def test_device_register_with_device_id_for_a_symmetric_key_individual_enr service_client.delete_individual_enrollment_by_param(registration_id) +@pytest.mark.it( + "A device requests a client cert by sending a certificate signing request " + "while being provisioned to the linked IoTHub with the device_id equal to the registration_id" + "of the individual enrollment that has been created with a symmetric key authentication" +) +@pytest.mark.parametrize("protocol", ["mqtt", "mqttws"]) +async def test_device_register_with_client_cert_issuance_for_a_symmetric_key_individual_enrollment( + protocol +): + key_file = "key.pem" + csr_file = "request.pem" + issued_cert_file = "cert.pem" + + try: + # TODO Uncomment lines when service releases. Can not create enrollment record now as it + # TODO involves manual steps to associate the enrollment with Client and Server Profiles. + # individual_enrollment_record = create_individual_enrollment( + # "e2e-dps-avis" + str(uuid.uuid4()) + # ) + + # registration_id = individual_enrollment_record.registration_id + # symmetric_key = individual_enrollment_record.attestation.symmetric_key.primary_key + + registration_id = "e2e-dps-avis" + symmetric_key = symmetric_key_for_cert_management + + logger.debug("the symmetric key for e2e-dps-avis") + logger.debug(symmetric_key_for_cert_management) + private_key = create_private_key(key_file) + create_csr(private_key, csr_file, registration_id) + + registration_result = await result_from_register( + registration_id, symmetric_key, protocol, csr_file=csr_file + ) + + assert_device_provisioned( + device_id=registration_id, registration_result=registration_result, client_cert=True + ) + with open(issued_cert_file, "w") as out_ca_pem: + # Write the issued certificate on the file. This forms the certificate portion of the X509 object. + cert_data = registration_result.registration_state.issued_client_certificate + out_ca_pem.write(cert_data) + + x509 = X509( + cert_file=issued_cert_file, + key_file=key_file, + ) + + device_client = IoTHubDeviceClient.create_from_x509_certificate( + hostname=registration_result.registration_state.assigned_hub, + device_id=registration_result.registration_state.device_id, + x509=x509, + ) + # Connect the client. + await device_client.connect() + # Assert that this X509 was able to connect. + assert device_client.connected + await device_client.disconnect() + + # TODO Uncomment this line. Right now do not delete the enrollment as it is not created on the fly. + # device_registry_helper.try_delete_device(registration_id) + finally: + # TODO Uncomment this line. Right now do not delete the enrollment as it is not created on the fly. + # TODO This is a previously created enrollment record. + # service_client.delete_individual_enrollment_by_param(registration_id) + if os.path.exists(key_file): + os.remove(key_file) + if os.path.exists(csr_file): + os.remove(csr_file) + if os.path.exists(issued_cert_file): + os.remove(issued_cert_file) + + def create_individual_enrollment(registration_id, device_id=None): """ Create an individual enrollment record using the service client @@ -98,11 +181,12 @@ def create_individual_enrollment(registration_id, device_id=None): return service_client.create_or_update(individual_provisioning_model) -def assert_device_provisioned(device_id, registration_result): +def assert_device_provisioned(device_id, registration_result, client_cert=False): """ Assert that the device has been provisioned correctly to iothub from the registration result as well as from the device registry :param device_id: The device id :param registration_result: The registration result + :param client_cert: Boolean expecting client cert to be issued """ assert registration_result.status == "assigned" assert registration_result.registration_state.device_id == device_id @@ -110,12 +194,17 @@ def assert_device_provisioned(device_id, registration_result): device = device_registry_helper.get_device(device_id) assert device is not None - assert device.authentication.type == "sas" + if client_cert: + assert device.authentication.type == "selfSigned" + else: + assert device.authentication.type == "sas" assert device.device_id == device_id + if client_cert: + assert registration_result.registration_state.issued_client_certificate is not None + -# TODO Eventually should return result after the APi changes -async def result_from_register(registration_id, symmetric_key, protocol): +async def result_from_register(registration_id, symmetric_key, protocol, csr_file=None): # We have this mapping because the pytest logs look better with "mqtt" and "mqttws" # instead of just "True" and "False". protocol_boolean_mapping = {"mqtt": False, "mqttws": True} @@ -126,5 +215,9 @@ async def result_from_register(registration_id, symmetric_key, protocol): symmetric_key=symmetric_key, websockets=protocol_boolean_mapping[protocol], ) - + if csr_file: + with open(csr_file, "r") as csr: + csr_data = csr.read() + # Set the CSR on the client to send it to DPS + provisioning_device_client.client_certificate_signing_request = str(csr_data) return await provisioning_device_client.register() diff --git a/tests/e2e/provisioning_e2e/tests/test_sync_symmetric_enrollments.py b/tests/e2e/provisioning_e2e/tests/test_sync_symmetric_enrollments.py index 29fb0d168..d8592bba7 100644 --- a/tests/e2e/provisioning_e2e/tests/test_sync_symmetric_enrollments.py +++ b/tests/e2e/provisioning_e2e/tests/test_sync_symmetric_enrollments.py @@ -4,7 +4,7 @@ # license information. # -------------------------------------------------------------------------- -from provisioning_e2e.service_helper import Helper, connection_string_to_hostname +from azure_provisioning_e2e.service_helper import Helper, connection_string_to_hostname from azure.iot.device import ProvisioningDeviceClient from provisioningserviceclient import ProvisioningServiceClient, IndividualEnrollment from provisioningserviceclient.protocol.models import AttestationMechanism, ReprovisionPolicy @@ -12,6 +12,11 @@ import logging import os import uuid +from scripts.create_x509_chain_crypto import ( + create_private_key, + create_csr, +) +from azure.iot.device import IoTHubDeviceClient logging.basicConfig(level=logging.DEBUG) @@ -22,6 +27,10 @@ ) device_registry_helper = Helper(os.getenv("IOTHUB_CONNECTION_STRING")) linked_iot_hub = connection_string_to_hostname(os.getenv("IOTHUB_CONNECTION_STRING")) +# TODO Delete this line. This is a pre created variable in key vault now. +symmetric_key_for_cert_management = os.getenv("DPS_CERT_ISSUANCE_SYM_KEY_SYNC") + +logger = logging.getLogger(__name__) @pytest.mark.it( @@ -71,6 +80,80 @@ def test_device_register_with_device_id_for_a_symmetric_key_individual_enrollmen service_client.delete_individual_enrollment_by_param(registration_id) +@pytest.mark.it( + "A device requests a client cert by sending a certificate signing request " + "while being provisioned to the linked IoTHub with the device_id equal to the registration_id" + "of the individual enrollment that has been created with a symmetric key authentication" +) +@pytest.mark.parametrize("protocol", ["mqtt", "mqttws"]) +def test_device_register_with_client_cert_issuance_for_a_symmetric_key_individual_enrollment( + protocol +): + key_file = "key.pem" + csr_file = "request.pem" + issued_cert_file = "cert.pem" + try: + # individual_enrollment_record = create_individual_enrollment( + # "e2e-dps-avis" + str(uuid.uuid4()) + # ) + # + # registration_id = individual_enrollment_record.registration_id + # symmetric_key = individual_enrollment_record.attestation.symmetric_key.primary_key + + registration_id = "e2e-dps-ventus" + symmetric_key = symmetric_key_for_cert_management + logger.debug("the symmetric key for e2e-dps-ventus") + logger.debug(symmetric_key_for_cert_management) + + key_file = "key.pem" + csr_file = "request.pem" + issued_cert_file = "cert.pem" + + private_key = create_private_key(key_file) + create_csr(private_key, csr_file, registration_id) + + registration_result = result_from_register( + registration_id, symmetric_key, protocol, csr_file=csr_file + ) + + assert_device_provisioned( + device_id=registration_id, registration_result=registration_result, client_cert=True + ) + with open(issued_cert_file, "w") as out_ca_pem: + # Write the issued certificate on the file. This forms the certificate portion of the X509 object. + cert_data = registration_result.registration_state.issued_client_certificate + out_ca_pem.write(cert_data) + + x509 = X509( + cert_file=issued_cert_file, + key_file=key_file, + ) + + device_client = IoTHubDeviceClient.create_from_x509_certificate( + hostname=registration_result.registration_state.assigned_hub, + device_id=registration_result.registration_state.device_id, + x509=x509, + ) + # Connect the client. + device_client.connect() + # Assert that this X509 was able to connect. + assert device_client.connected + device_client.disconnect() + + # TODO Uncomment this line. Right now do not delete the enrollment as it is not created on the fly. + # device_registry_helper.try_delete_device(registration_id) + finally: + # TODO Uncomment this line. Right now do not delete the enrollment as it is not created on the fly. + # TODO This is a previously created enrollment record. + # service_client.delete_individual_enrollment_by_param(registration_id) + if os.path.exists(key_file): + os.remove(key_file) + if os.path.exists(csr_file): + os.remove(csr_file) + if os.path.exists(issued_cert_file): + os.remove(issued_cert_file) + + def create_individual_enrollment(registration_id, device_id=None): """ Create an individual enrollment record using the service client @@ -91,7 +174,7 @@ def create_individual_enrollment(registration_id, device_id=None): return service_client.create_or_update(individual_provisioning_model) -def assert_device_provisioned(device_id, registration_result): +def assert_device_provisioned(device_id, registration_result, client_cert=False): """ Assert that the device has been provisioned correctly to iothub from the registration result as well as from the device registry :param device_id: The device id @@ -103,11 +186,16 @@ def assert_device_provisioned(device_id, registration_result): device = device_registry_helper.get_device(device_id) assert device is not None - assert device.authentication.type == "sas" + if client_cert: + assert device.authentication.type == "selfSigned" + else: + assert device.authentication.type == "sas" assert device.device_id == device_id + if client_cert: + assert registration_result.registration_state.issued_client_certificate is not None -def result_from_register(registration_id, symmetric_key, protocol): +def result_from_register(registration_id, symmetric_key, protocol, csr_file=None): protocol_boolean_mapping = {"mqtt": False, "mqttws": True} provisioning_device_client = ProvisioningDeviceClient.create_from_symmetric_key( provisioning_host=PROVISIONING_HOST, @@ -117,4 +205,9 @@ def result_from_register(registration_id, symmetric_key, protocol): websockets=protocol_boolean_mapping[protocol], ) + if csr_file: + with open(csr_file, "r") as csr: + csr_data = csr.read() + # Set the CSR on the client to send it to DPS + provisioning_device_client.client_certificate_signing_request = str(csr_data) return provisioning_device_client.register() diff --git a/tests/unit/provisioning/aio/test_async_provisioning_device_client.py b/tests/unit/provisioning/aio/test_async_provisioning_device_client.py index 55d542480..745c06afe 100644 --- a/tests/unit/provisioning/aio/test_async_provisioning_device_client.py +++ b/tests/unit/provisioning/aio/test_async_provisioning_device_client.py @@ -381,7 +381,7 @@ def test_set_csr(self, mocker, csr_input): provisioning_pipeline = mocker.MagicMock() client = ProvisioningDeviceClient(provisioning_pipeline) - client.client_csr = csr_input + client.client_certificate_signing_request = csr_input assert client._client_csr == csr_input @pytest.mark.it("Gets the csr from the client csr property") @@ -396,5 +396,5 @@ def test_get_csr(self, mocker, csr_input): provisioning_pipeline = mocker.MagicMock() client = ProvisioningDeviceClient(provisioning_pipeline) - client.client_csr = csr_input - assert client.client_csr == csr_input + client.client_certificate_signing_request = csr_input + assert client.client_certificate_signing_request == csr_input diff --git a/tests/unit/provisioning/test_sync_provisioning_device_client.py b/tests/unit/provisioning/test_sync_provisioning_device_client.py index 0d252d1c2..3902a3273 100644 --- a/tests/unit/provisioning/test_sync_provisioning_device_client.py +++ b/tests/unit/provisioning/test_sync_provisioning_device_client.py @@ -369,7 +369,7 @@ def test_set_csr(self, mocker, csr_input): provisioning_pipeline = mocker.MagicMock() client = ProvisioningDeviceClient(provisioning_pipeline) - client.client_csr = csr_input + client.client_certificate_signing_request = csr_input assert client._client_csr == csr_input @pytest.mark.it("Gets the csr from the client csr property") @@ -384,5 +384,5 @@ def test_get_csr(self, mocker, csr_input): provisioning_pipeline = mocker.MagicMock() client = ProvisioningDeviceClient(provisioning_pipeline) - client.client_csr = csr_input - assert client.client_csr == csr_input + client.client_certificate_signing_request = csr_input + assert client.client_certificate_signing_request == csr_input diff --git a/vsts/dps-e2e-cert-mgmt.yaml b/vsts/dps-e2e-cert-mgmt.yaml index e953c592c..05bcc75c8 100644 --- a/vsts/dps-e2e-cert-mgmt.yaml +++ b/vsts/dps-e2e-cert-mgmt.yaml @@ -27,7 +27,7 @@ jobs: - script: | cd $(Build.SourcesDirectory)/azure_provisioning_e2e/tests - pytest test_*.py --junitxml=junit/dps-e2e-test-results.xml + pytest test_*.py --junitxml=junit/dps-cert-mgmt-e2e-test-results.xml displayName: 'Run Specified E2E Test with env variables' env: @@ -46,6 +46,10 @@ jobs: PROVISIONING_ROOT_PASSWORD: $(DPSCERT-MAC-ROOT-CERT-PASSWORD) PYTHONUNBUFFERED: True + # Extra variable manually created + DPS_CERT_ISSUANCE_SYM_KEY_AIO: $(DPSCERT-MAC-DPS-CLIENT-CERT-ISSUE-SYM-KEY-ASYNC) + DPS_CERT_ISSUANCE_SYM_KEY_SYNC: $(DPSCERT-MAC-DPS-CLIENT-CERT-ISSUE-SYM-KEY-SYNC) + - task: PublishTestResults@2 displayName: 'Publish Test Results' condition: always() From e209edfb89f29dc057300c54cd9d1307a83af658 Mon Sep 17 00:00:00 2001 From: Oliva Kar Date: Tue, 23 Aug 2022 10:52:13 -0700 Subject: [PATCH 07/20] change yaml --- vsts/dps-e2e-cert-mgmt.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vsts/dps-e2e-cert-mgmt.yaml b/vsts/dps-e2e-cert-mgmt.yaml index 05bcc75c8..03bb32917 100644 --- a/vsts/dps-e2e-cert-mgmt.yaml +++ b/vsts/dps-e2e-cert-mgmt.yaml @@ -13,7 +13,7 @@ jobs: versionSpec: '3.7' architecture: 'x64' - - script: 'python env_setup.py --no_dev' + - script: 'python scripts/env_setup.py --no_dev' displayName: 'Prepare environment (install packages + dev dependencies + test dependencies + tools)' - script: 'python -m pip install cryptography' From 89eda56396fbc8252caf079ebc510f2bfa4b83d7 Mon Sep 17 00:00:00 2001 From: Oliva Kar Date: Tue, 23 Aug 2022 10:53:32 -0700 Subject: [PATCH 08/20] change yaml --- vsts/dps-e2e-cert-mgmt.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vsts/dps-e2e-cert-mgmt.yaml b/vsts/dps-e2e-cert-mgmt.yaml index 03bb32917..b1277d669 100644 --- a/vsts/dps-e2e-cert-mgmt.yaml +++ b/vsts/dps-e2e-cert-mgmt.yaml @@ -13,7 +13,7 @@ jobs: versionSpec: '3.7' architecture: 'x64' - - script: 'python scripts/env_setup.py --no_dev' + - script: 'python scripts/env_setup.py --no_dev:qq' displayName: 'Prepare environment (install packages + dev dependencies + test dependencies + tools)' - script: 'python -m pip install cryptography' From defeeaaf8d3d9f83c9549b2e1cc9f1303552f975 Mon Sep 17 00:00:00 2001 From: Oliva Kar Date: Tue, 23 Aug 2022 11:02:02 -0700 Subject: [PATCH 09/20] change yaml --- vsts/dps-e2e-cert-mgmt.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vsts/dps-e2e-cert-mgmt.yaml b/vsts/dps-e2e-cert-mgmt.yaml index b1277d669..03bb32917 100644 --- a/vsts/dps-e2e-cert-mgmt.yaml +++ b/vsts/dps-e2e-cert-mgmt.yaml @@ -13,7 +13,7 @@ jobs: versionSpec: '3.7' architecture: 'x64' - - script: 'python scripts/env_setup.py --no_dev:qq' + - script: 'python scripts/env_setup.py --no_dev' displayName: 'Prepare environment (install packages + dev dependencies + test dependencies + tools)' - script: 'python -m pip install cryptography' From e0695f19b24e6d40e99c939889ab3a32e849f6e3 Mon Sep 17 00:00:00 2001 From: Oliva Kar Date: Tue, 23 Aug 2022 11:07:20 -0700 Subject: [PATCH 10/20] change e22 command --- vsts/dps-e2e-cert-mgmt.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vsts/dps-e2e-cert-mgmt.yaml b/vsts/dps-e2e-cert-mgmt.yaml index 03bb32917..2cd657cc5 100644 --- a/vsts/dps-e2e-cert-mgmt.yaml +++ b/vsts/dps-e2e-cert-mgmt.yaml @@ -26,7 +26,7 @@ jobs: displayName: 'create RANDFILE file (needed to store seed data) separately due to openssl version issues in the pipeline' - script: | - cd $(Build.SourcesDirectory)/azure_provisioning_e2e/tests + cd $(Build.SourcesDirectory)/tests/e2e/provisioning_e2e pytest test_*.py --junitxml=junit/dps-cert-mgmt-e2e-test-results.xml displayName: 'Run Specified E2E Test with env variables' From bbb7e0ab524b549eee2fdaa191cac2658679e0a4 Mon Sep 17 00:00:00 2001 From: Oliva Kar Date: Tue, 23 Aug 2022 11:11:13 -0700 Subject: [PATCH 11/20] change e2e command --- vsts/dps-e2e-cert-mgmt.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vsts/dps-e2e-cert-mgmt.yaml b/vsts/dps-e2e-cert-mgmt.yaml index 2cd657cc5..d5dbc61a3 100644 --- a/vsts/dps-e2e-cert-mgmt.yaml +++ b/vsts/dps-e2e-cert-mgmt.yaml @@ -27,7 +27,7 @@ jobs: - script: | cd $(Build.SourcesDirectory)/tests/e2e/provisioning_e2e - pytest test_*.py --junitxml=junit/dps-cert-mgmt-e2e-test-results.xml + pytest --junitxml=junit/dps-cert-mgmt-e2e-test-results.xml displayName: 'Run Specified E2E Test with env variables' env: From 1a83b269653fe3fb9de955593a1851dbfb91f74d Mon Sep 17 00:00:00 2001 From: Oliva Kar Date: Tue, 23 Aug 2022 11:57:04 -0700 Subject: [PATCH 12/20] change paths --- .../tests/test_async_symmetric_enrollments.py | 7 ++++--- .../tests/test_sync_symmetric_enrollments.py | 6 ++++-- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/tests/e2e/provisioning_e2e/tests/test_async_symmetric_enrollments.py b/tests/e2e/provisioning_e2e/tests/test_async_symmetric_enrollments.py index b295b3c95..2074c520f 100644 --- a/tests/e2e/provisioning_e2e/tests/test_async_symmetric_enrollments.py +++ b/tests/e2e/provisioning_e2e/tests/test_async_symmetric_enrollments.py @@ -7,14 +7,15 @@ from provisioning_e2e.service_helper import Helper, connection_string_to_hostname from azure.iot.device.aio import ProvisioningDeviceClient from azure.iot.device.aio import IoTHubDeviceClient -from azure.iot.device import X509 +from azure.iot.device.common import X509 from provisioningserviceclient import ProvisioningServiceClient, IndividualEnrollment from provisioningserviceclient.protocol.models import AttestationMechanism, ReprovisionPolicy import pytest import logging import os import uuid -from scripts.create_x509_chain_crypto import ( +from . import path_adjust # noqa: F401 +from create_x509_chain_crypto import ( create_private_key, create_csr, ) @@ -95,7 +96,7 @@ async def test_device_register_with_device_id_for_a_symmetric_key_individual_enr ) @pytest.mark.parametrize("protocol", ["mqtt", "mqttws"]) async def test_device_register_with_client_cert_issuance_for_a_symmetric_key_individual_enrollment( - protocol + protocol, ): key_file = "key.pem" csr_file = "request.pem" diff --git a/tests/e2e/provisioning_e2e/tests/test_sync_symmetric_enrollments.py b/tests/e2e/provisioning_e2e/tests/test_sync_symmetric_enrollments.py index d8592bba7..f7c4ca639 100644 --- a/tests/e2e/provisioning_e2e/tests/test_sync_symmetric_enrollments.py +++ b/tests/e2e/provisioning_e2e/tests/test_sync_symmetric_enrollments.py @@ -6,13 +6,15 @@ from azure_provisioning_e2e.service_helper import Helper, connection_string_to_hostname from azure.iot.device import ProvisioningDeviceClient +from azure.iot.device.common import X509 from provisioningserviceclient import ProvisioningServiceClient, IndividualEnrollment from provisioningserviceclient.protocol.models import AttestationMechanism, ReprovisionPolicy import pytest import logging import os import uuid -from scripts.create_x509_chain_crypto import ( +from . import path_adjust # noqa: F401 +from create_x509_chain_crypto import ( create_private_key, create_csr, ) @@ -87,7 +89,7 @@ def test_device_register_with_device_id_for_a_symmetric_key_individual_enrollmen ) @pytest.mark.parametrize("protocol", ["mqtt", "mqttws"]) def test_device_register_with_client_cert_issuance_for_a_symmetric_key_individual_enrollment( - protocol + protocol, ): key_file = "key.pem" csr_file = "request.pem" From 956db25d3a921f7457e9ea3d7f0552375fcaa21d Mon Sep 17 00:00:00 2001 From: Oliva Kar Date: Tue, 23 Aug 2022 12:28:17 -0700 Subject: [PATCH 13/20] change imports --- .../provisioning_e2e/tests/test_sync_symmetric_enrollments.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/e2e/provisioning_e2e/tests/test_sync_symmetric_enrollments.py b/tests/e2e/provisioning_e2e/tests/test_sync_symmetric_enrollments.py index f7c4ca639..b5be87919 100644 --- a/tests/e2e/provisioning_e2e/tests/test_sync_symmetric_enrollments.py +++ b/tests/e2e/provisioning_e2e/tests/test_sync_symmetric_enrollments.py @@ -4,7 +4,7 @@ # license information. # -------------------------------------------------------------------------- -from azure_provisioning_e2e.service_helper import Helper, connection_string_to_hostname +from provisioning_e2e.service_helper import Helper, connection_string_to_hostname from azure.iot.device import ProvisioningDeviceClient from azure.iot.device.common import X509 from provisioningserviceclient import ProvisioningServiceClient, IndividualEnrollment From 1779f4462522f366895f9848ade5e2a797fbf623 Mon Sep 17 00:00:00 2001 From: Oliva Kar Date: Tue, 23 Aug 2022 16:47:36 -0700 Subject: [PATCH 14/20] check what happens with sleep --- .../tests/test_async_certificate_enrollments.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/e2e/provisioning_e2e/tests/test_async_certificate_enrollments.py b/tests/e2e/provisioning_e2e/tests/test_async_certificate_enrollments.py index 1b8ea4ab3..b13d9d527 100644 --- a/tests/e2e/provisioning_e2e/tests/test_async_certificate_enrollments.py +++ b/tests/e2e/provisioning_e2e/tests/test_async_certificate_enrollments.py @@ -3,7 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- - +import asyncio from provisioning_e2e.service_helper import Helper, connection_string_to_hostname from azure.iot.device.aio import ProvisioningDeviceClient @@ -98,6 +98,7 @@ async def test_device_register_with_device_id_for_a_x509_individual_enrollment(p assert device_id != registration_id assert_device_provisioned(device_id=device_id, registration_result=registration_result) + await asyncio.sleep(10) device_registry_helper.try_delete_device(device_id) finally: service_client.delete_individual_enrollment_by_param(registration_id) From e93c0739e294242ef6f1f7c76f90cb65176e5122 Mon Sep 17 00:00:00 2001 From: Oliva Kar Date: Tue, 23 Aug 2022 17:27:25 -0700 Subject: [PATCH 15/20] sleep works --- .../provisioning_e2e/tests/test_async_certificate_enrollments.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/e2e/provisioning_e2e/tests/test_async_certificate_enrollments.py b/tests/e2e/provisioning_e2e/tests/test_async_certificate_enrollments.py index b13d9d527..1f3e97c63 100644 --- a/tests/e2e/provisioning_e2e/tests/test_async_certificate_enrollments.py +++ b/tests/e2e/provisioning_e2e/tests/test_async_certificate_enrollments.py @@ -98,6 +98,7 @@ async def test_device_register_with_device_id_for_a_x509_individual_enrollment(p assert device_id != registration_id assert_device_provisioned(device_id=device_id, registration_result=registration_result) + # TODO Remove weird fix : not sure why the delete of the device results in connection time out await asyncio.sleep(10) device_registry_helper.try_delete_device(device_id) finally: From 116ca5751203d6c0f0d9402891e9d8e65ebda4db Mon Sep 17 00:00:00 2001 From: olivakar Date: Tue, 6 Sep 2022 14:48:46 -0700 Subject: [PATCH 16/20] rebased on main --- .../aedes/node_modules/.bin/uuid | 15 + .../aedes/node_modules/.bin/uuid.cmd | 7 + .../aedes-packet/.github/workflows/ci.yml | 26 + .../aedes/node_modules/aedes-packet/LICENSE | 22 + .../aedes/node_modules/aedes-packet/README.md | 23 + .../node_modules/aedes-packet/package.json | 90 + .../node_modules/aedes-packet/packet.d.ts | 19 + .../aedes/node_modules/aedes-packet/packet.js | 21 + .../aedes/node_modules/aedes-packet/test.js | 125 + .../aedes-packet/test/types/index.ts | 21 + .../aedes-packet/test/types/tsconfig.json | 11 + .../.github/workflows/ci.yml | 50 + .../node_modules/aedes-persistence/LICENSE | 22 + .../node_modules/aedes-persistence/README.md | 304 ++ .../aedes-persistence/abstract.js | 1430 ++++++++ .../aedes-persistence/package.json | 120 + .../aedes-persistence/persistence.js | 356 ++ .../node_modules/aedes-persistence/test.js | 10 + .../aedes-persistence/types/index.d.ts | 270 ++ .../aedes-persistence/types/index.test-d.ts | 168 + .../.github/workflows/ci.yml | 43 + .../aedes-protocol-decoder/LICENSE | 21 + .../aedes-protocol-decoder/README.md | 61 + .../aedes-protocol-decoder/example.js | 174 + .../aedes-protocol-decoder/index.js | 5 + .../aedes-protocol-decoder/package.json | 117 + .../aedes-protocol-decoder/test.js | 482 +++ .../aedes-protocol-decoder/types/index.d.ts | 11 + .../aedes/node_modules/aedes/.coveralls.yml | 1 + .../aedes/.github/workflows/ci.yml | 51 + .../node_modules/aedes/CODE_OF_CONDUCT.md | 84 + .../aedes/node_modules/aedes/LICENSE | 22 + .../aedes/node_modules/aedes/README.md | 312 ++ .../aedes/node_modules/aedes/aedes.d.ts | 135 + .../aedes/node_modules/aedes/aedes.js | 351 ++ .../node_modules/aedes/benchmarks/bombing.js | 34 + .../aedes/benchmarks/bombingQoS1.js | 36 + .../node_modules/aedes/benchmarks/pingpong.js | 53 + .../node_modules/aedes/benchmarks/server.js | 25 + .../aedes/benchmarks/throughputCounter.js | 23 + .../aedes/benchmarks/throughputCounterQoS1.js | 33 + .../aedes/node_modules/aedes/docs/Aedes.md | 419 +++ .../aedes/node_modules/aedes/docs/Client.md | 156 + .../aedes/node_modules/aedes/docs/Examples.md | 67 + .../aedes/node_modules/aedes/example.js | 44 + .../aedes/examples/clusters/index.js | 75 + .../aedes/examples/clusters/package.json | 16 + .../aedes/examples/proxy/index.js | 175 + .../aedes/examples/proxy/package.json | 17 + .../aedes/node_modules/aedes/package.json | 164 + .../aedes/node_modules/aedes/test/auth.js | 1099 +++++++ .../aedes/node_modules/aedes/test/basic.js | 815 +++++ .../node_modules/aedes/test/client-pub-sub.js | 942 ++++++ .../aedes/test/close_socket_by_other_party.js | 175 + .../aedes/node_modules/aedes/test/connect.js | 1071 ++++++ .../aedes/node_modules/aedes/test/events.js | 182 ++ .../aedes/node_modules/aedes/test/helper.js | 119 + .../node_modules/aedes/test/keep-alive.js | 92 + .../aedes/node_modules/aedes/test/meta.js | 327 ++ .../node_modules/aedes/test/not-blocking.js | 209 ++ .../aedes/node_modules/aedes/test/qos1.js | 831 +++++ .../aedes/node_modules/aedes/test/qos2.js | 654 ++++ .../aedes/node_modules/aedes/test/regr-21.js | 34 + .../aedes/node_modules/aedes/test/retain.js | 646 ++++ .../aedes/node_modules/aedes/test/topics.js | 294 ++ .../node_modules/aedes/test/types/index.ts | 141 + .../aedes/test/types/tsconfig.json | 11 + .../aedes/node_modules/aedes/test/will.js | 527 +++ .../aedes/node_modules/base64-js/LICENSE | 21 + .../aedes/node_modules/base64-js/README.md | 34 + .../node_modules/base64-js/base64js.min.js | 1 + .../aedes/node_modules/base64-js/index.d.ts | 3 + .../aedes/node_modules/base64-js/index.js | 150 + .../aedes/node_modules/base64-js/package.json | 75 + .../aedes/node_modules/bl/.travis.yml | 17 + .../aedes/node_modules/bl/BufferList.js | 396 +++ .../aedes/node_modules/bl/LICENSE.md | 13 + .../aedes/node_modules/bl/README.md | 247 ++ .../aedes/node_modules/bl/bl.js | 84 + .../aedes/node_modules/bl/package.json | 65 + .../aedes/node_modules/bl/test/convert.js | 21 + .../aedes/node_modules/bl/test/indexOf.js | 492 +++ .../node_modules/bl/test/isBufferList.js | 32 + .../aedes/node_modules/bl/test/test.js | 869 +++++ .../aedes/node_modules/buffer/AUTHORS.md | 70 + .../aedes/node_modules/buffer/LICENSE | 21 + .../aedes/node_modules/buffer/README.md | 410 +++ .../aedes/node_modules/buffer/index.d.ts | 186 ++ .../aedes/node_modules/buffer/index.js | 1817 +++++++++++ .../aedes/node_modules/buffer/package.json | 127 + .../bulk-write-stream/.travis.yml | 5 + .../node_modules/bulk-write-stream/LICENSE | 21 + .../node_modules/bulk-write-stream/README.md | 43 + .../node_modules/bulk-write-stream/index.js | 70 + .../bulk-write-stream/package.json | 55 + .../node_modules/bulk-write-stream/test.js | 102 + .../aedes/node_modules/core-util-is/LICENSE | 19 + .../aedes/node_modules/core-util-is/README.md | 3 + .../node_modules/core-util-is/package.json | 68 + .../aedes/node_modules/debug/LICENSE | 20 + .../aedes/node_modules/debug/README.md | 481 +++ .../aedes/node_modules/debug/package.json | 101 + .../aedes/node_modules/debug/src/browser.js | 269 ++ .../aedes/node_modules/debug/src/common.js | 274 ++ .../aedes/node_modules/debug/src/index.js | 10 + .../aedes/node_modules/debug/src/node.js | 263 ++ .../aedes/node_modules/end-of-stream/LICENSE | 21 + .../node_modules/end-of-stream/README.md | 54 + .../aedes/node_modules/end-of-stream/index.js | 94 + .../node_modules/end-of-stream/package.json | 65 + .../aedes/node_modules/fastfall/.npmignore | 27 + .../aedes/node_modules/fastfall/.travis.yml | 16 + .../aedes/node_modules/fastfall/LICENSE | 22 + .../aedes/node_modules/fastfall/README.md | 232 ++ .../aedes/node_modules/fastfall/bench.js | 81 + .../aedes/node_modules/fastfall/example.js | 20 + .../aedes/node_modules/fastfall/fall.js | 150 + .../aedes/node_modules/fastfall/package.json | 82 + .../aedes/node_modules/fastfall/test.js | 193 ++ .../fastparallel/.github/dependabot.yml | 11 + .../fastparallel/.github/workflows/ci.yml | 27 + .../aedes/node_modules/fastparallel/LICENSE | 14 + .../aedes/node_modules/fastparallel/README.md | 124 + .../aedes/node_modules/fastparallel/bench.js | 120 + .../node_modules/fastparallel/bench_long.js | 41 + .../node_modules/fastparallel/example.js | 40 + .../node_modules/fastparallel/package.json | 77 + .../node_modules/fastparallel/parallel.js | 199 ++ .../aedes/node_modules/fastparallel/test.js | 465 +++ .../fastseries/.github/workflows/ci.yml | 43 + .../aedes/node_modules/fastseries/LICENSE | 14 + .../aedes/node_modules/fastseries/README.md | 115 + .../aedes/node_modules/fastseries/bench.js | 94 + .../aedes/node_modules/fastseries/example.js | 45 + .../node_modules/fastseries/package.json | 72 + .../aedes/node_modules/fastseries/series.js | 155 + .../aedes/node_modules/fastseries/test.js | 368 +++ .../aedes/node_modules/forwarded/HISTORY.md | 16 + .../aedes/node_modules/forwarded/LICENSE | 22 + .../aedes/node_modules/forwarded/README.md | 57 + .../aedes/node_modules/forwarded/index.js | 76 + .../aedes/node_modules/forwarded/package.json | 78 + .../aedes/node_modules/from2/.travis.yml | 8 + .../aedes/node_modules/from2/LICENSE.md | 21 + .../aedes/node_modules/from2/README.md | 70 + .../aedes/node_modules/from2/index.js | 103 + .../node_modules/readable-stream/.travis.yml | 34 + .../readable-stream/CONTRIBUTING.md | 38 + .../readable-stream/GOVERNANCE.md | 136 + .../node_modules/readable-stream/LICENSE | 47 + .../node_modules/readable-stream/README.md | 58 + .../doc/wg-meetings/2015-01-30.md | 60 + .../readable-stream/duplex-browser.js | 1 + .../node_modules/readable-stream/duplex.js | 1 + .../node_modules/readable-stream/package.json | 81 + .../readable-stream/passthrough.js | 1 + .../readable-stream/readable-browser.js | 7 + .../node_modules/readable-stream/readable.js | 19 + .../node_modules/readable-stream/transform.js | 1 + .../readable-stream/writable-browser.js | 1 + .../node_modules/readable-stream/writable.js | 8 + .../from2/node_modules/safe-buffer/LICENSE | 21 + .../from2/node_modules/safe-buffer/README.md | 584 ++++ .../from2/node_modules/safe-buffer/index.d.ts | 187 ++ .../from2/node_modules/safe-buffer/index.js | 62 + .../node_modules/safe-buffer/package.json | 63 + .../node_modules/string_decoder/.travis.yml | 50 + .../from2/node_modules/string_decoder/LICENSE | 48 + .../node_modules/string_decoder/README.md | 47 + .../node_modules/string_decoder/package.json | 59 + .../aedes/node_modules/from2/package.json | 75 + .../aedes/node_modules/from2/test.js | 123 + .../aedes/node_modules/ieee754/LICENSE | 11 + .../aedes/node_modules/ieee754/README.md | 51 + .../aedes/node_modules/ieee754/index.d.ts | 10 + .../aedes/node_modules/ieee754/index.js | 85 + .../aedes/node_modules/ieee754/package.json | 84 + .../aedes/node_modules/inherits/LICENSE | 16 + .../aedes/node_modules/inherits/README.md | 42 + .../aedes/node_modules/inherits/inherits.js | 9 + .../node_modules/inherits/inherits_browser.js | 27 + .../aedes/node_modules/inherits/package.json | 65 + .../aedes/node_modules/isarray/.npmignore | 1 + .../aedes/node_modules/isarray/.travis.yml | 4 + .../aedes/node_modules/isarray/Makefile | 6 + .../aedes/node_modules/isarray/README.md | 60 + .../aedes/node_modules/isarray/component.json | 19 + .../aedes/node_modules/isarray/index.js | 5 + .../aedes/node_modules/isarray/package.json | 73 + .../aedes/node_modules/isarray/test.js | 20 + .../mqemitter/.github/dependabot.yml | 11 + .../mqemitter/.github/workflows/ci.yml | 27 + .../aedes/node_modules/mqemitter/.nycrc | 11 + .../node_modules/mqemitter/CODE_OF_CONDUCT.md | 84 + .../aedes/node_modules/mqemitter/LICENSE | 15 + .../aedes/node_modules/mqemitter/README.md | 190 ++ .../node_modules/mqemitter/abstractTest.js | 544 ++++ .../aedes/node_modules/mqemitter/bench.js | 40 + .../aedes/node_modules/mqemitter/example.js | 61 + .../aedes/node_modules/mqemitter/mqemitter.js | 128 + .../aedes/node_modules/mqemitter/package.json | 101 + .../aedes/node_modules/mqemitter/test/test.js | 130 + .../mqemitter/test/types/index.ts | 46 + .../mqemitter/test/types/tsconfig.json | 11 + .../mqemitter/types/mqemitter.d.ts | 24 + .../mqemitter/types/mqemitter.test-d.ts | 32 + .../mqtt-packet/.github/workflows/ci.yml | 27 + .../node_modules/mqtt-packet/CONTRIBUTING.md | 27 + .../aedes/node_modules/mqtt-packet/LICENSE.md | 13 + .../aedes/node_modules/mqtt-packet/README.md | 491 +++ .../mqtt-packet/benchmarks/generate.js | 26 + .../mqtt-packet/benchmarks/generateNet.js | 51 + .../mqtt-packet/benchmarks/parse.js | 20 + .../mqtt-packet/benchmarks/writeToStream.js | 49 + .../node_modules/mqtt-packet/constants.js | 187 ++ .../node_modules/mqtt-packet/generate.js | 52 + .../aedes/node_modules/mqtt-packet/mqtt.js | 3 + .../aedes/node_modules/mqtt-packet/numbers.js | 58 + .../node_modules/mqtt-packet/package.json | 91 + .../aedes/node_modules/mqtt-packet/packet.js | 13 + .../aedes/node_modules/mqtt-packet/parser.js | 716 ++++ .../aedes/node_modules/mqtt-packet/test.js | 2866 +++++++++++++++++ .../node_modules/mqtt-packet/testRandom.js | 86 + .../node_modules/mqtt-packet/types/index.d.ts | 255 ++ .../node_modules/mqtt-packet/writeToStream.js | 1117 +++++++ .../aedes/node_modules/ms/index.js | 162 + .../aedes/node_modules/ms/license.md | 21 + .../aedes/node_modules/ms/package.json | 69 + .../aedes/node_modules/ms/readme.md | 60 + .../aedes/node_modules/nanoid/CHANGELOG.md | 156 + .../aedes/node_modules/nanoid/LICENSE | 20 + .../aedes/node_modules/nanoid/README.md | 397 +++ .../nanoid/async/format.browser.js | 43 + .../aedes/node_modules/nanoid/async/format.js | 72 + .../node_modules/nanoid/async/generate.js | 24 + .../nanoid/async/index.browser.js | 37 + .../aedes/node_modules/nanoid/async/index.js | 37 + .../nanoid/async/random.browser.js | 8 + .../aedes/node_modules/nanoid/async/random.js | 31 + .../node_modules/nanoid/async/random.rn.js | 14 + .../node_modules/nanoid/format.browser.js | 40 + .../aedes/node_modules/nanoid/format.js | 67 + .../aedes/node_modules/nanoid/generate.js | 22 + .../node_modules/nanoid/index.browser.js | 54 + .../aedes/node_modules/nanoid/index.js | 34 + .../nanoid/non-secure/generate.js | 26 + .../node_modules/nanoid/non-secure/index.js | 42 + .../aedes/node_modules/nanoid/package.json | 68 + .../node_modules/nanoid/random.browser.js | 5 + .../aedes/node_modules/nanoid/random.js | 19 + .../aedes/node_modules/nanoid/url.js | 27 + .../aedes/node_modules/once/LICENSE | 15 + .../aedes/node_modules/once/README.md | 79 + .../aedes/node_modules/once/once.js | 42 + .../aedes/node_modules/once/package.json | 66 + .../process-nextick-args/index.js | 45 + .../process-nextick-args/license.md | 19 + .../process-nextick-args/package.json | 51 + .../process-nextick-args/readme.md | 18 + .../node_modules/proxy-protocol-js/LICENSE | 8 + .../node_modules/proxy-protocol-js/README.md | 125 + .../proxy-protocol-js/example/building/v1.js | 13 + .../proxy-protocol-js/example/building/v2.js | 16 + .../proxy-protocol-js/example/parsing/v1.js | 11 + .../proxy-protocol-js/example/parsing/v2.js | 16 + .../proxy-protocol-js/package.json | 87 + .../aedes/node_modules/qlobber/Gruntfile.js | 74 + .../aedes/node_modules/qlobber/LICENCE | 19 + .../aedes/node_modules/qlobber/README.md | 516 +++ .../node_modules/qlobber/aedes/qlobber-sub.js | 140 + .../aedes/node_modules/qlobber/index.js | 3 + .../aedes/node_modules/qlobber/package.json | 79 + .../readable-stream/CONTRIBUTING.md | 38 + .../readable-stream/GOVERNANCE.md | 136 + .../node_modules/readable-stream/LICENSE | 47 + .../node_modules/readable-stream/README.md | 106 + .../readable-stream/errors-browser.js | 127 + .../node_modules/readable-stream/errors.js | 116 + .../readable-stream/experimentalWarning.js | 17 + .../node_modules/readable-stream/package.json | 99 + .../readable-stream/readable-browser.js | 9 + .../node_modules/readable-stream/readable.js | 16 + .../aedes/node_modules/retimer/.travis.yml | 6 + .../aedes/node_modules/retimer/LICENSE | 22 + .../aedes/node_modules/retimer/README.md | 61 + .../aedes/node_modules/retimer/bench.js | 65 + .../aedes/node_modules/retimer/package.json | 68 + .../aedes/node_modules/retimer/retimer.js | 63 + .../aedes/node_modules/retimer/test.js | 100 + .../node_modules/retimer/time-browser.js | 5 + .../aedes/node_modules/retimer/time.js | 6 + .../aedes/node_modules/reusify/.coveralls.yml | 1 + .../aedes/node_modules/reusify/.travis.yml | 28 + .../aedes/node_modules/reusify/LICENSE | 22 + .../aedes/node_modules/reusify/README.md | 145 + .../benchmarks/createNoCodeFunction.js | 30 + .../node_modules/reusify/benchmarks/fib.js | 13 + .../reusify/benchmarks/reuseNoCodeFunction.js | 38 + .../aedes/node_modules/reusify/package.json | 75 + .../aedes/node_modules/reusify/reusify.js | 33 + .../aedes/node_modules/reusify/test.js | 66 + .../aedes/node_modules/safe-buffer/LICENSE | 21 + .../aedes/node_modules/safe-buffer/README.md | 584 ++++ .../aedes/node_modules/safe-buffer/index.d.ts | 187 ++ .../aedes/node_modules/safe-buffer/index.js | 65 + .../node_modules/safe-buffer/package.json | 76 + .../aedes/node_modules/shortid/CHANGELOG.md | 32 + .../aedes/node_modules/shortid/LICENSE | 34 + .../aedes/node_modules/shortid/README.md | 252 ++ .../aedes/node_modules/shortid/index.js | 2 + .../aedes/node_modules/shortid/package.json | 62 + .../aedes/node_modules/string_decoder/LICENSE | 48 + .../node_modules/string_decoder/README.md | 47 + .../node_modules/string_decoder/package.json | 62 + .../node_modules/util-deprecate/History.md | 16 + .../aedes/node_modules/util-deprecate/LICENSE | 24 + .../node_modules/util-deprecate/README.md | 53 + .../node_modules/util-deprecate/browser.js | 67 + .../aedes/node_modules/util-deprecate/node.js | 6 + .../node_modules/util-deprecate/package.json | 57 + .../aedes/node_modules/uuid/CHANGELOG.md | 229 ++ .../aedes/node_modules/uuid/CONTRIBUTING.md | 18 + .../aedes/node_modules/uuid/LICENSE.md | 9 + .../aedes/node_modules/uuid/README.md | 505 +++ .../aedes/node_modules/uuid/package.json | 164 + .../aedes/node_modules/uuid/wrapper.mjs | 10 + .../aedes/node_modules/wrappy/LICENSE | 15 + .../aedes/node_modules/wrappy/README.md | 36 + .../aedes/node_modules/wrappy/package.json | 58 + .../aedes/node_modules/wrappy/wrappy.js | 33 + .../aedes/node_modules/xtend/.jshintrc | 30 + .../aedes/node_modules/xtend/LICENSE | 20 + .../aedes/node_modules/xtend/README.md | 32 + .../aedes/node_modules/xtend/immutable.js | 19 + .../aedes/node_modules/xtend/mutable.js | 17 + .../aedes/node_modules/xtend/package.json | 86 + .../aedes/node_modules/xtend/test.js | 103 + .../meantimerecovery/aedes/package-lock.json | 305 ++ 338 files changed, 42915 insertions(+) create mode 100644 sdklab/meantimerecovery/aedes/node_modules/.bin/uuid create mode 100644 sdklab/meantimerecovery/aedes/node_modules/.bin/uuid.cmd create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-packet/.github/workflows/ci.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-packet/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-packet/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-packet/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-packet/packet.d.ts create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-packet/packet.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-packet/test.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-packet/test/types/index.ts create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-packet/test/types/tsconfig.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/.github/workflows/ci.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/abstract.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/persistence.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/test.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/types/index.d.ts create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/types/index.test-d.ts create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/.github/workflows/ci.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/example.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/test.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/types/index.d.ts create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/.coveralls.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/.github/workflows/ci.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/CODE_OF_CONDUCT.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/aedes.d.ts create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/aedes.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/bombing.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/bombingQoS1.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/pingpong.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/server.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/throughputCounter.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/throughputCounterQoS1.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/docs/Aedes.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/docs/Client.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/docs/Examples.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/example.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/examples/clusters/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/examples/clusters/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/examples/proxy/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/examples/proxy/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/auth.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/basic.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/client-pub-sub.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/close_socket_by_other_party.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/connect.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/events.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/helper.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/keep-alive.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/meta.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/not-blocking.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/qos1.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/qos2.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/regr-21.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/retain.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/topics.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/types/index.ts create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/types/tsconfig.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/aedes/test/will.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/base64-js/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/base64-js/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/base64-js/base64js.min.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/base64-js/index.d.ts create mode 100644 sdklab/meantimerecovery/aedes/node_modules/base64-js/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/base64-js/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/bl/.travis.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/bl/BufferList.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/bl/LICENSE.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/bl/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/bl/bl.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/bl/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/bl/test/convert.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/bl/test/indexOf.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/bl/test/isBufferList.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/bl/test/test.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/buffer/AUTHORS.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/buffer/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/buffer/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/buffer/index.d.ts create mode 100644 sdklab/meantimerecovery/aedes/node_modules/buffer/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/buffer/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/.travis.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/test.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/core-util-is/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/core-util-is/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/core-util-is/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/debug/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/debug/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/debug/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/debug/src/browser.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/debug/src/common.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/debug/src/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/debug/src/node.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/end-of-stream/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/end-of-stream/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/end-of-stream/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/end-of-stream/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastfall/.npmignore create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastfall/.travis.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastfall/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastfall/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastfall/bench.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastfall/example.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastfall/fall.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastfall/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastfall/test.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastparallel/.github/dependabot.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastparallel/.github/workflows/ci.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastparallel/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastparallel/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastparallel/bench.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastparallel/bench_long.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastparallel/example.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastparallel/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastparallel/parallel.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastparallel/test.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastseries/.github/workflows/ci.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastseries/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastseries/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastseries/bench.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastseries/example.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastseries/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastseries/series.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/fastseries/test.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/forwarded/HISTORY.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/forwarded/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/forwarded/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/forwarded/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/forwarded/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/.travis.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/LICENSE.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/.travis.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/CONTRIBUTING.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/GOVERNANCE.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/duplex-browser.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/duplex.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/passthrough.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/readable-browser.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/readable.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/transform.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/writable-browser.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/writable.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/index.d.ts create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/string_decoder/.travis.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/string_decoder/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/string_decoder/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/string_decoder/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/from2/test.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/ieee754/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/ieee754/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/ieee754/index.d.ts create mode 100644 sdklab/meantimerecovery/aedes/node_modules/ieee754/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/ieee754/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/inherits/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/inherits/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/inherits/inherits.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/inherits/inherits_browser.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/inherits/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/isarray/.npmignore create mode 100644 sdklab/meantimerecovery/aedes/node_modules/isarray/.travis.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/isarray/Makefile create mode 100644 sdklab/meantimerecovery/aedes/node_modules/isarray/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/isarray/component.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/isarray/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/isarray/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/isarray/test.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqemitter/.github/dependabot.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqemitter/.github/workflows/ci.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqemitter/.nycrc create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqemitter/CODE_OF_CONDUCT.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqemitter/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqemitter/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqemitter/abstractTest.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqemitter/bench.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqemitter/example.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqemitter/mqemitter.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqemitter/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqemitter/test/test.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqemitter/test/types/index.ts create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqemitter/test/types/tsconfig.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqemitter/types/mqemitter.d.ts create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqemitter/types/mqemitter.test-d.ts create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/.github/workflows/ci.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/CONTRIBUTING.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/LICENSE.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/benchmarks/generate.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/benchmarks/generateNet.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/benchmarks/parse.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/benchmarks/writeToStream.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/constants.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/generate.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/mqtt.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/numbers.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/packet.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/parser.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/test.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/testRandom.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/types/index.d.ts create mode 100644 sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/writeToStream.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/ms/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/ms/license.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/ms/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/ms/readme.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/CHANGELOG.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/async/format.browser.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/async/format.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/async/generate.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/async/index.browser.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/async/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/async/random.browser.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/async/random.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/async/random.rn.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/format.browser.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/format.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/generate.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/index.browser.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/non-secure/generate.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/non-secure/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/random.browser.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/random.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/nanoid/url.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/once/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/once/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/once/once.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/once/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/process-nextick-args/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/process-nextick-args/license.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/process-nextick-args/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/process-nextick-args/readme.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/example/building/v1.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/example/building/v2.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/example/parsing/v1.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/example/parsing/v2.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/qlobber/Gruntfile.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/qlobber/LICENCE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/qlobber/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/qlobber/aedes/qlobber-sub.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/qlobber/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/qlobber/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/readable-stream/CONTRIBUTING.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/readable-stream/GOVERNANCE.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/readable-stream/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/readable-stream/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/readable-stream/errors-browser.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/readable-stream/errors.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/readable-stream/experimentalWarning.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/readable-stream/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/readable-stream/readable-browser.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/readable-stream/readable.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/retimer/.travis.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/retimer/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/retimer/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/retimer/bench.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/retimer/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/retimer/retimer.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/retimer/test.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/retimer/time-browser.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/retimer/time.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/reusify/.coveralls.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/reusify/.travis.yml create mode 100644 sdklab/meantimerecovery/aedes/node_modules/reusify/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/reusify/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/reusify/benchmarks/createNoCodeFunction.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/reusify/benchmarks/fib.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/reusify/benchmarks/reuseNoCodeFunction.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/reusify/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/reusify/reusify.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/reusify/test.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/safe-buffer/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/safe-buffer/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/safe-buffer/index.d.ts create mode 100644 sdklab/meantimerecovery/aedes/node_modules/safe-buffer/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/safe-buffer/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/shortid/CHANGELOG.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/shortid/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/shortid/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/shortid/index.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/shortid/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/string_decoder/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/string_decoder/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/string_decoder/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/util-deprecate/History.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/util-deprecate/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/util-deprecate/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/util-deprecate/browser.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/util-deprecate/node.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/util-deprecate/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/uuid/CHANGELOG.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/uuid/CONTRIBUTING.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/uuid/LICENSE.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/uuid/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/uuid/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/uuid/wrapper.mjs create mode 100644 sdklab/meantimerecovery/aedes/node_modules/wrappy/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/wrappy/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/wrappy/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/wrappy/wrappy.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/xtend/.jshintrc create mode 100644 sdklab/meantimerecovery/aedes/node_modules/xtend/LICENSE create mode 100644 sdklab/meantimerecovery/aedes/node_modules/xtend/README.md create mode 100644 sdklab/meantimerecovery/aedes/node_modules/xtend/immutable.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/xtend/mutable.js create mode 100644 sdklab/meantimerecovery/aedes/node_modules/xtend/package.json create mode 100644 sdklab/meantimerecovery/aedes/node_modules/xtend/test.js create mode 100644 sdklab/meantimerecovery/aedes/package-lock.json diff --git a/sdklab/meantimerecovery/aedes/node_modules/.bin/uuid b/sdklab/meantimerecovery/aedes/node_modules/.bin/uuid new file mode 100644 index 000000000..feb22b596 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/.bin/uuid @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../uuid/dist/bin/uuid" "$@" + ret=$? +else + node "$basedir/../uuid/dist/bin/uuid" "$@" + ret=$? +fi +exit $ret diff --git a/sdklab/meantimerecovery/aedes/node_modules/.bin/uuid.cmd b/sdklab/meantimerecovery/aedes/node_modules/.bin/uuid.cmd new file mode 100644 index 000000000..f549fa267 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/.bin/uuid.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\uuid\dist\bin\uuid" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\uuid\dist\bin\uuid" %* +) \ No newline at end of file diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/.github/workflows/ci.yml b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/.github/workflows/ci.yml new file mode 100644 index 000000000..3b30be3e4 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/.github/workflows/ci.yml @@ -0,0 +1,26 @@ +name: ci + +on: [push, pull_request] + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + node-version: [8.x, 10.x, 12.x, 13.x] + + steps: + - uses: actions/checkout@v1 + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install + + - name: Run tests + run: | + npm run test:ci diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/LICENSE new file mode 100644 index 000000000..865027349 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) Aedes Contributors +Copyright (c) 2015-2020 Matteo Collina, http://matteocollina.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/README.md b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/README.md new file mode 100644 index 000000000..cad12e672 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/README.md @@ -0,0 +1,23 @@ + +# aedes-packet + +![ci](https://github.com/moscajs/aedes-packet/workflows/ci/badge.svg) +[![Known Vulnerabilities](https://snyk.io/test/github/moscajs/aedes-packet/badge.svg)](https://snyk.io/test/github/moscajs/aedes-packet) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](http://standardjs.com/)\ +[![Dependencies Status](https://david-dm.org/moscajs/aedes-packet/status.svg)](https://david-dm.org/moscajs/aedes-packet) +[![devDependencies Status](https://david-dm.org/moscajs/aedes-packet/dev-status.svg)](https://david-dm.org/moscajs/aedes-packet?type=dev)\ +[![NPM version](https://img.shields.io/npm/v/aedes-packet.svg?style=flat)](https://www.npmjs.com/package/aedes-packet) +[![NPM downloads](https://img.shields.io/npm/dm/aedes-packet.svg?style=flat)](https://www.npmjs.com/package/aedes-packet) + +Basic data structure for packets in [Aedes](https://www.npmjs.com/aedes), packaged up for perf and reusability between modules. + +See [./packet.js](./packet.js) and [./test.js](./test.js) for +documentation and usage. + +## Collaborators + +* [__Gnought__](https://github.com/gnought) + +## License + +MIT diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/package.json b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/package.json new file mode 100644 index 000000000..7e7a9164a --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/package.json @@ -0,0 +1,90 @@ +{ + "_from": "aedes-packet@^2.3.1", + "_id": "aedes-packet@2.3.1", + "_inBundle": false, + "_integrity": "sha512-LqBd57uc2rui2RbjycW17dylglejG26mM4ewVXGNDnVp/SUHFVEgm7d1HTmYrnSkSCNoHti042qgcTwv/F+BtQ==", + "_location": "/aedes-packet", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "aedes-packet@^2.3.1", + "name": "aedes-packet", + "escapedName": "aedes-packet", + "rawSpec": "^2.3.1", + "saveSpec": null, + "fetchSpec": "^2.3.1" + }, + "_requiredBy": [ + "/aedes", + "/aedes-persistence" + ], + "_resolved": "https://registry.npmjs.org/aedes-packet/-/aedes-packet-2.3.1.tgz", + "_shasum": "87916fb25618c63a286ee7cee260062fb7471d6d", + "_spec": "aedes-packet@^2.3.1", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes", + "author": { + "name": "Matteo Collina", + "email": "hello@matteocollina.com" + }, + "bugs": { + "url": "https://github.com/moscajs/aedes-packet/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Gnought", + "url": "https://github.com/gnought" + } + ], + "dependencies": { + "mqtt-packet": "^6.3.0" + }, + "deprecated": false, + "description": "Basic data structure for packets in Aedes ", + "devDependencies": { + "@types/node": "^12.12.27", + "@typescript-eslint/eslint-plugin": "^2.19.0", + "@typescript-eslint/parser": "^2.19.0", + "faucet": "0.0.1", + "license-checker": "^25.0.1", + "markdownlint-cli": "^0.22.0", + "pre-commit": "^1.2.2", + "snazzy": "^8.0.0", + "standard": "^14.3.1", + "tap": "^14.10.6" + }, + "engines": { + "node": ">=8" + }, + "homepage": "https://github.com/moscajs/aedes-packet#readme", + "keywords": [ + "mqtt", + "packet", + "broker", + "aedes" + ], + "license": "MIT", + "main": "packet.js", + "name": "aedes-packet", + "pre-commit": [ + "test" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/moscajs/aedes-packet.git" + }, + "scripts": { + "license-checker": "license-checker --production --onlyAllow='MIT;ISC;BSD-3-Clause;BSD-2-Clause'", + "lint": "npm run lint:standard && npm run lint:typescript && npm run lint:markdown", + "lint:markdown": "markdownlint README.md", + "lint:standard": "standard --verbose | snazzy", + "lint:typescript": "standard --parser @typescript-eslint/parser --plugin @typescript-eslint/eslint-plugin test/types/*.ts packet.d.ts", + "test": "npm run lint && npm run unit && npm run typescript", + "test:ci": "npm run test", + "typescript": "tsc --project ./test/types/tsconfig.json", + "unit": "tap --no-esm -J test.js" + }, + "types": "packet.d.ts", + "version": "2.3.1" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/packet.d.ts b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/packet.d.ts new file mode 100644 index 000000000..a5f5b2de3 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/packet.d.ts @@ -0,0 +1,19 @@ +/* eslint no-unused-vars: 0 */ +/* eslint no-undef: 0 */ +/* eslint space-infix-ops: 0 */ + +/// + +import { Packet as IPacket } from 'mqtt-packet' + +declare namespace aedes { + + type AedesPacket = IPacket & { + brokerId: string + brokerCounter: number + } + + function Packet(object?: AedesPacket) : aedes.AedesPacket +} + +export = aedes diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/packet.js b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/packet.js new file mode 100644 index 000000000..219b7ca4d --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/packet.js @@ -0,0 +1,21 @@ +'use strict' + +function Packet (original, broker) { + this.cmd = original.cmd || 'publish' + this.brokerId = original.brokerId || (broker && broker.id) + this.brokerCounter = original.brokerCounter || (broker ? (++broker.counter) : 0) + this.topic = original.topic + this.payload = original.payload || Buffer.alloc(0) + this.qos = original.qos || 0 + this.retain = original.retain || false + this.dup = original.dup || false + // [MQTT-2.3.1-5] + if (this.qos > 0 || this.cmd !== 'publish') { + // [MQTT-2.3.1-1] + // This is packet identifier uniquely identifies a message as it flows between + // client and broker. It is only relevant for QoS levels greater than 0 + this.messageId = undefined + } +} + +module.exports = Packet diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/test.js b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/test.js new file mode 100644 index 000000000..a15c63b7e --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/test.js @@ -0,0 +1,125 @@ +'use strict' + +var { test } = require('tap') +var Packet = require('./') + +test('Packet defaults - PUBLISH, QoS 0', function (t) { + var instance = new Packet({}) + t.equal(instance.cmd, 'publish') + t.equal(instance.brokerId, undefined) + t.equal(instance.brokerCounter, 0) + t.equal(instance.topic, undefined) + t.deepEqual(instance.payload, Buffer.alloc(0)) + t.equal(instance.qos, 0) + t.equal(instance.dup, false) + t.equal(instance.retain, false) + t.notOk(Object.prototype.hasOwnProperty.call(instance, 'messageId')) + t.end() +}) + +test('Packet defaults - PUBREL, QoS 0', function (t) { + var instance = new Packet({ cmd: 'pubrel' }) + t.equal(instance.cmd, 'pubrel') + t.equal(instance.brokerId, undefined) + t.equal(instance.brokerCounter, 0) + t.equal(instance.topic, undefined) + t.deepEqual(instance.payload, Buffer.alloc(0)) + t.equal(instance.qos, 0) + t.equal(instance.dup, false) + t.equal(instance.retain, false) + t.ok(Object.prototype.hasOwnProperty.call(instance, 'messageId')) + t.equal(instance.messageId, undefined) + t.end() +}) + +test('Packet defaults - PUBLISH, QoS 1', function (t) { + var instance = new Packet({ qos: 1 }) + t.equal(instance.cmd, 'publish') + t.equal(instance.brokerId, undefined) + t.equal(instance.brokerCounter, 0) + t.equal(instance.topic, undefined) + t.deepEqual(instance.payload, Buffer.alloc(0)) + t.equal(instance.qos, 1) + t.equal(instance.dup, false) + t.equal(instance.retain, false) + t.ok(Object.prototype.hasOwnProperty.call(instance, 'messageId')) + t.equal(instance.messageId, undefined) + t.end() +}) + +test('Packet defaults - PUBLISH, dup=true', function (t) { + var instance = new Packet({ dup: true }) + t.equal(instance.cmd, 'publish') + t.equal(instance.brokerId, undefined) + t.equal(instance.brokerCounter, 0) + t.equal(instance.topic, undefined) + t.deepEqual(instance.payload, Buffer.alloc(0)) + t.equal(instance.qos, 0) + t.equal(instance.dup, true) + t.equal(instance.retain, false) + t.equal(instance.messageId, undefined) + t.end() +}) + +test('Packet copies over most data', function (t) { + var original = { + cmd: 'pubrel', + brokerId: 'A56c', + brokerCounter: 42, + topic: 'hello', + payload: 'world', + qos: 2, + dup: true, + retain: true, + messageId: 24 + } + var instance = new Packet(original) + var expected = { + cmd: 'pubrel', + brokerId: 'A56c', + brokerCounter: 42, + topic: 'hello', + payload: 'world', + qos: 2, + dup: true, + retain: true + } + + t.ok(Object.prototype.hasOwnProperty.call(instance, 'messageId')) + t.equal(instance.messageId, undefined) + delete instance.messageId + t.deepEqual(instance, expected) + t.end() +}) + +test('Packet fills in broker data', function (t) { + var broker = { + id: 'A56c', + counter: 41 + } + var original = { + cmd: 'pubrel', + topic: 'hello', + payload: 'world', + qos: 2, + retain: true, + messageId: 24 + } + var instance = new Packet(original, broker) + var expected = { + cmd: 'pubrel', + brokerId: 'A56c', + brokerCounter: 42, + topic: 'hello', + payload: 'world', + qos: 2, + dup: false, + retain: true + } + + t.ok(Object.prototype.hasOwnProperty.call(instance, 'messageId')) + t.equal(instance.messageId, undefined) + delete instance.messageId + t.deepEqual(instance, expected) + t.end() +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/test/types/index.ts b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/test/types/index.ts new file mode 100644 index 000000000..ac736ca03 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/test/types/index.ts @@ -0,0 +1,21 @@ +/* eslint no-unused-vars: 0 */ +/* eslint no-undef: 0 */ + +import { Packet } from '../../packet' + +var p = Packet() +p = Packet({ + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + dup: false, + retain: false, + brokerId: 'afds8f', + brokerCounter: 10 +}) +p = Packet({ + cmd: 'pingresp', + brokerId: 'ab7d9', + brokerCounter: 3 +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/test/types/tsconfig.json b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/test/types/tsconfig.json new file mode 100644 index 000000000..791484233 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-packet/test/types/tsconfig.json @@ -0,0 +1,11 @@ +{ + "compilerOptions": { + "module": "commonjs", + "target": "es6", + "noEmit": true, + "strict": true + }, + "files": [ + "./index.ts" + ] +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/.github/workflows/ci.yml b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/.github/workflows/ci.yml new file mode 100644 index 000000000..217d75719 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/.github/workflows/ci.yml @@ -0,0 +1,50 @@ +name: ci + +on: + push: + branches: + - master + pull_request: + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + node-version: [10.x, 12.x, 13.x] + + steps: + - uses: actions/checkout@v2 + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install + + - name: Check licenses + run: | + npm run license-checker + + - name: Run tests + run: | + npm run test:ci + + - name: Coveralls Parallel + uses: coverallsapp/github-action@master + with: + github-token: ${{ secrets.github_token }} + parallel: true + + coverage: + needs: test + runs-on: ubuntu-latest + steps: + - name: Coveralls Finished + uses: coverallsapp/github-action@master + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + parallel-finished: true diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/LICENSE new file mode 100644 index 000000000..865027349 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) Aedes Contributors +Copyright (c) 2015-2020 Matteo Collina, http://matteocollina.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/README.md b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/README.md new file mode 100644 index 000000000..104fb3992 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/README.md @@ -0,0 +1,304 @@ +# aedes-persistence +![](https://github.com/moscajs/aedes-persistence/workflows/ci/badge.svg) +[![Dependencies Status](https://david-dm.org/moscajs/aedes-persistence/status.svg)](https://david-dm.org/moscajs/aedes-persistence) +[![devDependencies Status](https://david-dm.org/moscajs/aedes-persistence/dev-status.svg)](https://david-dm.org/moscajs/aedes-persistence?type=dev) +
+[![Known Vulnerabilities](https://snyk.io/test/github/moscajs/aedes-persistence/badge.svg)](https://snyk.io/test/github/moscajs/aedes-persistence) +[![Coverage Status](https://coveralls.io/repos/moscajs/aedes-persistence/badge.svg?branch=master&service=github)](https://coveralls.io/github/moscajs/aedes-persistence?branch=master) +[![NPM version](https://img.shields.io/npm/v/aedes-persistence.svg?style=flat)](https://www.npmjs.com/package/aedes-persistence) +[![NPM downloads](https://img.shields.io/npm/dm/aedes-persistence.svg?style=flat)](https://www.npmjs.com/package/aedes-persistence) + +The spec for an [Aedes](http://npm.im/aedes) persistence, with abstract +tests and a fast in-memory implementation. + +* [Install](#install) +* [API](#api) +* [Implement another persistence](#implement) +* [License](#license) + + +## Install +To install aedes-persistence, simply use npm: + +``` +npm install aedes-persistence --save +``` + + +## API + + * persistence() + * instance.storeRetained() + * instance.createRetainedStream() + * instance.createRetainedStreamCombi() + * instance.addSubscriptions() + * instance.removeSubscriptions() + * instance.subscriptionsByClient() + * instance.countOffline() + * instance.subscriptionsByTopic() + * instance.cleanSubscriptions() + * instance.outgoingEnqueue() + * instance.outgoingEnqueueCombi() + * instance.outgoingUpdate() + * instance.outgoingClearMessageId() + * instance.outgoingStream() + * instance.incomingStorePacket() + * instance.incomingGetPacket() + * instance.incomingDelPacket() + * instance.putWill() + * instance.getWill() + * instance.delWill() + * instance.streamWill() + * instance.getClientList() + * instance.destroy() + +------------------------------------------------------- + +### persistence([opts]) + +Creates a new instance of a persistence, that is already ready to +operate. The default implementation is in-memory only. + +------------------------------------------------------- + +### instance.storeRetained(packet, callback(err)) + +Store a retained message, calls the callback when it was saved. + +------------------------------------------------------- + +### instance.createRetainedStream(pattern) + +Return a stream that will load all retained messages matching the given +pattern (according to the MQTT spec) asynchronously. Deprecated. + +------------------------------------------------------- + +### instance.createRetainedStreamCombi(patterns) + +Return a stream that will load all retained messages matching given +patterns (according to the MQTT spec) asynchronously. + +------------------------------------------------------- + +### instance.addSubscriptions(client, subscriptions, callback(err, client)) + +Add the given offline subscriptions for the given +[Client](https://github.com/moscajs/aedes#client). The client __must__ +have connected with `clean: false`, as this is not checked here. +This is called when a client issue a SUBSCRIBE packet. + +`subscriptions` is in the same format of the `subscribe` property in the +[SUBSCRIBE](https://github.com/mqttjs/mqtt-packet#subscribe) packet: + +```js +[{ + topic: 'hello/world', + qos: 1, +}, { + topic: 'hello/#', + qos: 2, +}] +``` + +------------------------------------------------------- + +### instance.removeSubscriptions(client, subscriptions, callback(err, client)) + +The inverse of [`addSubscriptions`](#addSubscriptions) but subscriptions is an array of topic names. + +------------------------------------------------------- + +### instance.subscriptionsByClient(client, callback(err, subscriptions, client)) + +Returns all the offline subscriptions for the given client. Called when +a client with `clean: false` connects to restore its subscriptions. + +`subscriptions` is in the same format of the `subscribe` property in the +[SUBSCRIBE](https://github.com/mqttjs/mqtt-packet#subscribe) packet: + +```js +[{ + topic: 'hello/world', + qos: 1, +}, { + topic: 'hello/#', + qos: 2, +}] +``` + +------------------------------------------------------- + +### instance.countOffline(cb(err, numOfSubscriptions, numOfClients)) + +Returns the number of offline subscriptions and the number of offline +clients. + +------------------------------------------------------- + +### instance.subscriptionsByTopic(pattern, callback(err, subscriptions)) + +Returns all the offline subscriptions matching the given pattern. Called when +a PUBLISH with `qos: 1` or `qos: 2` is received. + +The subscriptions are in the format: + +```js +{ + clientId: client.id, + topic: sub.topic, + qos: sub.qos +} +``` + +------------------------------------------------------- + +### instance.cleanSubscriptions(client, callback(err, client)) + +Removes all offline subscriptions for a given client. + +------------------------------------------------------- + +### instance.outgoingEnqueue(subscription, packet, callback(err)) + +Enqueue a potentially offline delivery. `subscription` is one of the +objects returned by [`subscriptionsByTopic`](#subscriptionsByTopic). Deprecated. + +------------------------------------------------------- + +### instance.outgoingEnqueueCombi(subscriptions, packet, callback(err)) + +Enqueue a potentially offline delivery. `subscriptions` is the whole subscriptions +objects returned by [`subscriptionsByTopic`](#subscriptionsByTopic). + +------------------------------------------------------- + +### instance.outgoingUpdate(client, packet, callback(err)) + +Called before a (potentially) offline packet is delivered, the caller +should update the `packet.messageId` before updating. + +------------------------------------------------------- + +### instance.outgoingClearMessageId(client, packet, callback(err, packet)) + +Removes a packet with the given `messageId` (passing a PUBACK is ok) +from the persistence. Passes back original packet to the callback. + +------------------------------------------------------- + +### instance.outgoingStream(client) + +Return a stream that will load all offline messages for the given client asynchronously. + +------------------------------------------------------- + +### instance.incomingStorePacket(client, packet, cb(err, packet)) + +Store an incoming packet for the given client. Used for QoS 2. + +------------------------------------------------------- + +### instance.incomingGetPacket(client, packet, cb(err, packet)) + +Retrieve an incoming packet with the same `messageId` for the given client. Used for QoS 2. + +------------------------------------------------------- + +### instance.incomingDelPacket(client, packet, cb(err, packet)) + +Deletes incoming packet with the same `messageId` for the given client. Used for QoS 2. + +------------------------------------------------------- + +### instance.putWill(client, packet, cb(err)) + +Stores the will of a client. Used to support multi-broker environments +and to not lose wills in case of a crash. + +------------------------------------------------------- + +### instance.getWill(client, packet, cb(err)) + +Retrieves the will of a client. Used to support multi-broker environments +and to not lose wills in case of a crash. + +------------------------------------------------------- + +### instance.delWill(client, packet, cb(err)) + +Removes the will of a client. Used to support multi-broker environments +and to not lose wills in case of a crash. + +------------------------------------------------------- + +### instance.streamWill(brokers) + +Streams all the wills for the given brokers. The brokers are in the +format: + +```js +{ + mybroker: { + brokerId: 'mybroker' + } +} +``` + +------------------------------------------------------- + +### instance.getClientList(topic) + +Returns a stream which has all the clientIds subscribed to the +specified topic + + +### instance.destroy(cb(err)) + +Destroy current persistence. Use callback `cb(err)` to catch errors if any + + +## Implement another persistence + +A persistence needs to pass all tests defined in +[./abstract.js](./abstract.js). You can import and use that test suite +in the following manner: + +```js +var test = require('tape').test +var myperst = require('./') +var abs = require('aedes-persistence/abstract') + +abs({ + test: test, + persistence: myperst +}) +``` + +If you require some async stuff before returning, a callback is also +supported: + +```js +var test = require('tape').test +var myperst = require('./') +var abs = require('aedes-persistence/abstract') +var clean = require('./clean') // invented module + +abs({ + test: test, + buildEmitter: require('mymqemitter'), // optional + persistence: function build (cb) { + clean(function (err) { + cb(err, myperst()) + }) + } +}) +``` + +## Collaborators + +* [__Gnought__](https://github.com/gnought) + +## License + +MIT diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/abstract.js b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/abstract.js new file mode 100644 index 000000000..2039b21a4 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/abstract.js @@ -0,0 +1,1430 @@ +'use strict' + +const concat = require('concat-stream') +const pump = require('pump') +const through = require('through2') +const Packet = require('aedes-packet') + +function abstractPersistence (opts) { + const test = opts.test + var _persistence = opts.persistence + const waitForReady = opts.waitForReady + + // requiring it here so it will not error for modules + // not using the default emitter + const buildEmitter = opts.buildEmitter || require('mqemitter') + + if (_persistence.length === 0) { + _persistence = function asyncify (cb) { + cb(null, opts.persistence()) + } + } + + function persistence (cb) { + const mq = buildEmitter() + const broker = { + id: 'broker-42', + mq, + publish: mq.emit.bind(mq), + subscribe: mq.on.bind(mq), + unsubscribe: mq.removeListener.bind(mq), + counter: 0 + } + + _persistence(function (err, instance) { + if (instance) { + // Wait for ready event, if applicable, to ensure the persistence isn't + // destroyed while it's still being set up. + // https://github.com/mcollina/aedes-persistence-redis/issues/41 + if (waitForReady) { + // We have to listen to 'ready' before setting broker because that + // can result in 'ready' being emitted. + instance.on('ready', function () { + instance.removeListener('error', cb) + cb(null, instance) + }) + instance.on('error', cb) + } + instance.broker = broker + if (waitForReady) { + // 'ready' event will call back. + return + } + } + cb(err, instance) + }) + } + + function storeRetained (instance, opts, cb) { + opts = opts || {} + + const packet = { + cmd: 'publish', + id: instance.broker.id, + topic: opts.topic || 'hello/world', + payload: opts.payload || Buffer.from('muahah'), + qos: 0, + retain: true + } + + instance.storeRetained(packet, function (err) { + cb(err, packet) + }) + } + + function matchRetainedWithPattern (t, pattern, opts) { + persistence(function (err, instance) { + if (err) { throw err } + + storeRetained(instance, opts, function (err, packet) { + t.notOk(err, 'no error') + var stream + if (Array.isArray(pattern)) { + stream = instance.createRetainedStreamCombi(pattern) + } else { + stream = instance.createRetainedStream(pattern) + } + + stream.pipe(concat(function (list) { + t.deepEqual(list, [packet], 'must return the packet') + instance.destroy(t.end.bind(t)) + })) + }) + }) + } + + function testInstance (title, cb) { + test(title, function (t) { + persistence(function (err, instance) { + if (err) { throw err } + cb(t, instance) + }) + }) + } + + function testPacket (t, packet, expected) { + if (packet.messageId === null) packet.messageId = undefined + t.equal(packet.messageId, undefined, 'should have an unassigned messageId in queue') + t.deepLooseEqual(packet, expected, 'must return the packet') + } + + test('store and look up retained messages', function (t) { + matchRetainedWithPattern(t, 'hello/world') + }) + + test('look up retained messages with a # pattern', function (t) { + matchRetainedWithPattern(t, '#') + }) + + test('look up retained messages with a hello/world/# pattern', function (t) { + matchRetainedWithPattern(t, 'hello/world/#') + }) + + test('look up retained messages with a + pattern', function (t) { + matchRetainedWithPattern(t, 'hello/+') + }) + + test('look up retained messages with multiple patterns', function (t) { + matchRetainedWithPattern(t, ['hello/+', 'other/hello']) + }) + + testInstance('store multiple retained messages in order', function (t, instance) { + const totalMessages = 1000 + var done = 0 + + const retained = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + retain: true + } + + function checkIndex (index) { + const packet = new Packet(retained, instance.broker) + + instance.storeRetained(packet, function (err) { + t.notOk(err, 'no error') + t.equal(packet.brokerCounter, index + 1, 'packet stored in order') + if (++done === totalMessages) { + instance.destroy(t.end.bind(t)) + } + }) + } + + for (let i = 0; i < totalMessages; i++) { + checkIndex(i) + } + }) + + testInstance('remove retained message', function (t, instance) { + storeRetained(instance, {}, function (err, packet) { + t.notOk(err, 'no error') + storeRetained(instance, { + payload: Buffer.alloc(0) + }, function (err) { + t.notOk(err, 'no error') + + const stream = instance.createRetainedStream('#') + + stream.pipe(concat(function (list) { + t.deepEqual(list, [], 'must return an empty list') + instance.destroy(t.end.bind(t)) + })) + }) + }) + }) + + testInstance('storing twice a retained message should keep only the last', function (t, instance) { + storeRetained(instance, {}, function (err, packet) { + t.notOk(err, 'no error') + storeRetained(instance, { + payload: Buffer.from('ahah') + }, function (err, packet) { + t.notOk(err, 'no error') + + const stream = instance.createRetainedStream('#') + + stream.pipe(concat(function (list) { + t.deepEqual(list, [packet], 'must return the last packet') + instance.destroy(t.end.bind(t)) + })) + }) + }) + }) + + testInstance('Create a new packet while storing a retained message', function (t, instance) { + const packet = { + cmd: 'publish', + id: instance.broker.id, + topic: opts.topic || 'hello/world', + payload: opts.payload || Buffer.from('muahah'), + qos: 0, + retain: true + } + const newPacket = Object.assign({}, packet) + + instance.storeRetained(packet, function (err) { + t.notOk(err, 'no error') + // packet reference change to check if a new packet is stored always + packet.retain = false + const stream = instance.createRetainedStream('#') + + stream.pipe(concat(function (list) { + t.deepEqual(list, [newPacket], 'must return the last packet') + instance.destroy(t.end.bind(t)) + })) + }) + }) + + testInstance('store and look up subscriptions by client', function (t, instance) { + const client = { id: 'abcde' } + const subs = [{ + topic: 'hello', + qos: 1 + }, { + topic: 'matteo', + qos: 1 + }, { + topic: 'noqos', + qos: 0 + }] + + instance.addSubscriptions(client, subs, function (err, reClient) { + t.equal(reClient, client, 'client must be the same') + t.notOk(err, 'no error') + instance.subscriptionsByClient(client, function (err, resubs, reReClient) { + t.equal(reReClient, client, 'client must be the same') + t.notOk(err, 'no error') + t.deepEqual(resubs, subs) + instance.destroy(t.end.bind(t)) + }) + }) + }) + + testInstance('remove subscriptions by client', function (t, instance) { + const client = { id: 'abcde' } + const subs = [{ + topic: 'hello', + qos: 1 + }, { + topic: 'matteo', + qos: 1 + }] + + instance.addSubscriptions(client, subs, function (err, reClient) { + t.notOk(err, 'no error') + instance.removeSubscriptions(client, ['hello'], function (err, reClient) { + t.notOk(err, 'no error') + t.equal(reClient, client, 'client must be the same') + instance.subscriptionsByClient(client, function (err, resubs, reClient) { + t.equal(reClient, client, 'client must be the same') + t.notOk(err, 'no error') + t.deepEqual(resubs, [{ + topic: 'matteo', + qos: 1 + }]) + instance.destroy(t.end.bind(t)) + }) + }) + }) + }) + + testInstance('store and look up subscriptions by topic', function (t, instance) { + const client = { id: 'abcde' } + const subs = [{ + topic: 'hello', + qos: 1 + }, { + topic: 'hello/#', + qos: 1 + }, { + topic: 'matteo', + qos: 1 + }] + + instance.addSubscriptions(client, subs, function (err) { + t.notOk(err, 'no error') + instance.subscriptionsByTopic('hello', function (err, resubs) { + t.notOk(err, 'no error') + t.deepEqual(resubs, [{ + clientId: client.id, + topic: 'hello/#', + qos: 1 + }, { + clientId: client.id, + topic: 'hello', + qos: 1 + }]) + instance.destroy(t.end.bind(t)) + }) + }) + }) + + testInstance('get client list after subscriptions', function (t, instance) { + const client1 = { id: 'abcde' } + const client2 = { id: 'efghi' } + const subs = [{ + topic: 'helloagain', + qos: 1 + }] + + instance.addSubscriptions(client1, subs, function (err) { + t.notOk(err, 'no error for client 1') + instance.addSubscriptions(client2, subs, function (err) { + t.notOk(err, 'no error for client 2') + const stream = instance.getClientList(subs[0].topic) + stream.pipe(concat({ encoding: 'object' }, function (out) { + t.deepEqual(out, [client1.id, client2.id]) + instance.destroy(t.end.bind(t)) + })) + }) + }) + }) + + testInstance('get client list after an unsubscribe', function (t, instance) { + const client1 = { id: 'abcde' } + const client2 = { id: 'efghi' } + const subs = [{ + topic: 'helloagain', + qos: 1 + }] + + instance.addSubscriptions(client1, subs, function (err) { + t.notOk(err, 'no error for client 1') + instance.addSubscriptions(client2, subs, function (err) { + t.notOk(err, 'no error for client 2') + instance.removeSubscriptions(client2, [subs[0].topic], function (err, reClient) { + t.notOk(err, 'no error for removeSubscriptions') + const stream = instance.getClientList(subs[0].topic) + stream.pipe(concat({ encoding: 'object' }, function (out) { + t.deepEqual(out, [client1.id]) + instance.destroy(t.end.bind(t)) + })) + }) + }) + }) + }) + + testInstance('get subscriptions list after an unsubscribe', function (t, instance) { + const client1 = { id: 'abcde' } + const client2 = { id: 'efghi' } + const subs = [{ + topic: 'helloagain', + qos: 1 + }] + + instance.addSubscriptions(client1, subs, function (err) { + t.notOk(err, 'no error for client 1') + instance.addSubscriptions(client2, subs, function (err) { + t.notOk(err, 'no error for client 2') + instance.removeSubscriptions(client2, [subs[0].topic], function (err, reClient) { + t.notOk(err, 'no error for removeSubscriptions') + instance.subscriptionsByTopic(subs[0].topic, function (err, clients) { + t.notOk(err, 'no error getting subscriptions by topic') + t.deepEqual(clients[0].clientId, client1.id) + instance.destroy(t.end.bind(t)) + }) + }) + }) + }) + }) + + testInstance('QoS 0 subscriptions, restored but not matched', function (t, instance) { + const client = { id: 'abcde' } + const subs = [{ + topic: 'hello', + qos: 0 + }, { + topic: 'hello/#', + qos: 1 + }, { + topic: 'matteo', + qos: 1 + }] + + instance.addSubscriptions(client, subs, function (err) { + t.notOk(err, 'no error') + instance.subscriptionsByClient(client, function (err, resubs) { + t.notOk(err, 'no error') + t.deepEqual(resubs, subs) + instance.subscriptionsByTopic('hello', function (err, resubs2) { + t.notOk(err, 'no error') + t.deepEqual(resubs2, [{ + clientId: client.id, + topic: 'hello/#', + qos: 1 + }]) + instance.destroy(t.end.bind(t)) + }) + }) + }) + }) + + testInstance('clean subscriptions', function (t, instance) { + const client = { id: 'abcde' } + const subs = [{ + topic: 'hello', + qos: 1 + }, { + topic: 'matteo', + qos: 1 + }] + + instance.addSubscriptions(client, subs, function (err) { + t.notOk(err, 'no error') + instance.cleanSubscriptions(client, function (err) { + t.notOk(err, 'no error') + instance.subscriptionsByTopic('hello', function (err, resubs) { + t.notOk(err, 'no error') + t.deepEqual(resubs, [], 'no subscriptions') + + instance.subscriptionsByClient(client, function (err, resubs) { + t.error(err) + t.deepEqual(resubs, null, 'no subscriptions') + + instance.countOffline(function (err, subsCount, clientsCount) { + t.error(err, 'no error') + t.equal(subsCount, 0, 'no subscriptions added') + t.equal(clientsCount, 0, 'no clients added') + + instance.destroy(t.end.bind(t)) + }) + }) + }) + }) + }) + }) + + testInstance('clean subscriptions with no active subscriptions', function (t, instance) { + const client = { id: 'abcde' } + + instance.cleanSubscriptions(client, function (err) { + t.notOk(err, 'no error') + instance.subscriptionsByTopic('hello', function (err, resubs) { + t.notOk(err, 'no error') + t.deepEqual(resubs, [], 'no subscriptions') + + instance.subscriptionsByClient(client, function (err, resubs) { + t.error(err) + t.deepEqual(resubs, null, 'no subscriptions') + + instance.countOffline(function (err, subsCount, clientsCount) { + t.error(err, 'no error') + t.equal(subsCount, 0, 'no subscriptions added') + t.equal(clientsCount, 0, 'no clients added') + + instance.destroy(t.end.bind(t)) + }) + }) + }) + }) + }) + + testInstance('same topic, different QoS', function (t, instance) { + const client = { id: 'abcde' } + const subs = [{ + topic: 'hello', + qos: 0 + }, { + topic: 'hello', + qos: 1 + }] + + instance.addSubscriptions(client, subs, function (err, reClient) { + t.equal(reClient, client, 'client must be the same') + t.error(err, 'no error') + + instance.subscriptionsByClient(client, function (err, subsForClient, client) { + t.error(err, 'no error') + t.deepEqual(subsForClient, [{ + topic: 'hello', + qos: 1 + }]) + + instance.subscriptionsByTopic('hello', function (err, subsForTopic) { + t.error(err, 'no error') + t.deepEqual(subsForTopic, [{ + clientId: 'abcde', + topic: 'hello', + qos: 1 + }]) + + instance.countOffline(function (err, subsCount, clientsCount) { + t.error(err, 'no error') + t.equal(subsCount, 1, 'one subscription added') + t.equal(clientsCount, 1, 'one client added') + + instance.destroy(t.end.bind(t)) + }) + }) + }) + }) + }) + + testInstance('replace subscriptions', function (t, instance) { + const client = { id: 'abcde' } + const topic = 'hello' + const sub = { topic } + const subByTopic = { clientId: client.id, topic } + + function check (qos, cb) { + sub.qos = subByTopic.qos = qos + instance.addSubscriptions(client, [sub], function (err, reClient) { + t.equal(reClient, client, 'client must be the same') + t.error(err, 'no error') + instance.subscriptionsByClient(client, function (err, subsForClient, client) { + t.error(err, 'no error') + t.deepEqual(subsForClient, [sub]) + instance.subscriptionsByTopic(topic, function (err, subsForTopic) { + t.error(err, 'no error') + t.deepEqual(subsForTopic, qos === 0 ? [] : [subByTopic]) + instance.countOffline(function (err, subsCount, clientsCount) { + t.error(err, 'no error') + if (qos === 0) { + t.equal(subsCount, 0, 'no subscriptions added') + } else { + t.equal(subsCount, 1, 'one subscription added') + } + t.equal(clientsCount, 1, 'one client added') + cb() + }) + }) + }) + }) + } + + check(0, function () { + check(1, function () { + check(2, function () { + check(1, function () { + check(0, function () { + instance.destroy(t.end.bind(t)) + }) + }) + }) + }) + }) + }) + + testInstance('replace subscriptions in same call', function (t, instance) { + const client = { id: 'abcde' } + const topic = 'hello' + const subs = [ + { topic, qos: 0 }, + { topic, qos: 1 }, + { topic, qos: 2 }, + { topic, qos: 1 }, + { topic, qos: 0 } + ] + instance.addSubscriptions(client, subs, function (err, reClient) { + t.equal(reClient, client, 'client must be the same') + t.error(err, 'no error') + instance.subscriptionsByClient(client, function (err, subsForClient, client) { + t.error(err, 'no error') + t.deepEqual(subsForClient, [{ topic, qos: 0 }]) + instance.subscriptionsByTopic(topic, function (err, subsForTopic) { + t.error(err, 'no error') + t.deepEqual(subsForTopic, []) + instance.countOffline(function (err, subsCount, clientsCount) { + t.error(err, 'no error') + t.equal(subsCount, 0, 'no subscriptions added') + t.equal(clientsCount, 1, 'one client added') + instance.destroy(t.end.bind(t)) + }) + }) + }) + }) + }) + + testInstance('store and count subscriptions', function (t, instance) { + const client = { id: 'abcde' } + const subs = [{ + topic: 'hello', + qos: 1 + }, { + topic: 'matteo', + qos: 1 + }, { + topic: 'noqos', + qos: 0 + }] + + instance.addSubscriptions(client, subs, function (err, reClient) { + t.equal(reClient, client, 'client must be the same') + t.error(err, 'no error') + + instance.countOffline(function (err, subsCount, clientsCount) { + t.error(err, 'no error') + t.equal(subsCount, 2, 'two subscriptions added') + t.equal(clientsCount, 1, 'one client added') + + instance.removeSubscriptions(client, ['hello'], function (err, reClient) { + t.error(err, 'no error') + + instance.countOffline(function (err, subsCount, clientsCount) { + t.error(err, 'no error') + t.equal(subsCount, 1, 'one subscription added') + t.equal(clientsCount, 1, 'one client added') + + instance.removeSubscriptions(client, ['matteo'], function (err, reClient) { + t.error(err, 'no error') + + instance.countOffline(function (err, subsCount, clientsCount) { + t.error(err, 'no error') + t.equal(subsCount, 0, 'zero subscriptions added') + t.equal(clientsCount, 1, 'one client added') + + instance.removeSubscriptions(client, ['noqos'], function (err, reClient) { + t.error(err, 'no error') + + instance.countOffline(function (err, subsCount, clientsCount) { + t.error(err, 'no error') + t.equal(subsCount, 0, 'zero subscriptions added') + t.equal(clientsCount, 0, 'zero clients added') + + instance.removeSubscriptions(client, ['noqos'], function (err, reClient) { + t.error(err, 'no error') + + instance.countOffline(function (err, subsCount, clientsCount) { + t.error(err, 'no error') + t.equal(subsCount, 0, 'zero subscriptions added') + t.equal(clientsCount, 0, 'zero clients added') + + instance.destroy(t.end.bind(t)) + }) + }) + }) + }) + }) + }) + }) + }) + }) + }) + }) + + testInstance('count subscriptions with two clients', function (t, instance) { + const client1 = { id: 'abcde' } + const client2 = { id: 'fghij' } + const subs = [{ + topic: 'hello', + qos: 1 + }, { + topic: 'matteo', + qos: 1 + }, { + topic: 'noqos', + qos: 0 + }] + + function remove (client, subs, expectedSubs, expectedClients, cb) { + instance.removeSubscriptions(client, subs, function (err, reClient) { + t.error(err, 'no error') + t.equal(reClient, client, 'client must be the same') + + instance.countOffline(function (err, subsCount, clientsCount) { + t.error(err, 'no error') + t.equal(subsCount, expectedSubs, 'subscriptions added') + t.equal(clientsCount, expectedClients, 'clients added') + + cb() + }) + }) + } + + instance.addSubscriptions(client1, subs, function (err, reClient) { + t.equal(reClient, client1, 'client must be the same') + t.error(err, 'no error') + + instance.addSubscriptions(client2, subs, function (err, reClient) { + t.equal(reClient, client2, 'client must be the same') + t.error(err, 'no error') + + remove(client1, ['foobar'], 4, 2, function () { + remove(client1, ['hello'], 3, 2, function () { + remove(client1, ['hello'], 3, 2, function () { + remove(client1, ['matteo'], 2, 2, function () { + remove(client1, ['noqos'], 2, 1, function () { + remove(client2, ['hello'], 1, 1, function () { + remove(client2, ['matteo'], 0, 1, function () { + remove(client2, ['noqos'], 0, 0, function () { + instance.destroy(t.end.bind(t)) + }) + }) + }) + }) + }) + }) + }) + }) + }) + }) + }) + + testInstance('add duplicate subs to persistence for qos > 0', function (t, instance) { + const client = { id: 'abcde' } + const topic = 'hello' + const subs = [{ + topic, + qos: 1 + }] + + instance.addSubscriptions(client, subs, function (err, reClient) { + t.equal(reClient, client, 'client must be the same') + t.error(err, 'no error') + + instance.addSubscriptions(client, subs, function (err, resCLient) { + t.equal(resCLient, client, 'client must be the same') + t.error(err, 'no error') + subs[0].clientId = client.id + instance.subscriptionsByTopic(topic, function (err, subsForTopic) { + t.error(err, 'no error') + t.deepEqual(subsForTopic, subs) + instance.destroy(t.end.bind(t)) + }) + }) + }) + }) + + testInstance('add duplicate subs to persistence for qos 0', function (t, instance) { + const client = { id: 'abcde' } + const topic = 'hello' + const subs = [{ + topic, + qos: 0 + }] + + instance.addSubscriptions(client, subs, function (err, reClient) { + t.equal(reClient, client, 'client must be the same') + t.error(err, 'no error') + + instance.addSubscriptions(client, subs, function (err, resCLient) { + t.equal(resCLient, client, 'client must be the same') + t.error(err, 'no error') + instance.subscriptionsByClient(client, function (err, subsForClient, client) { + t.error(err, 'no error') + t.deepEqual(subsForClient, subs) + instance.destroy(t.end.bind(t)) + }) + }) + }) + }) + + testInstance('get topic list after concurrent subscriptions of a client', function (t, instance) { + const client = { id: 'abcde' } + const subs1 = [{ + topic: 'hello1', + qos: 1 + }] + const subs2 = [{ + topic: 'hello2', + qos: 1 + }] + var calls = 2 + + function done () { + if (!--calls) { + instance.subscriptionsByClient(client, function (err, resubs) { + t.notOk(err, 'no error') + resubs.sort((a, b) => b.topic.localeCompare(b.topic, 'en')) + t.deepEqual(resubs, [subs1[0], subs2[0]]) + instance.destroy(t.end.bind(t)) + }) + } + } + + instance.addSubscriptions(client, subs1, function (err) { + t.notOk(err, 'no error for hello1') + done() + }) + instance.addSubscriptions(client, subs2, function (err) { + t.notOk(err, 'no error for hello2') + done() + }) + }) + + testInstance('add outgoing packet and stream it', function (t, instance) { + const sub = { + clientId: 'abcde', + topic: 'hello', + qos: 1 + } + const client = { + id: sub.clientId + } + const packet = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + dup: false, + length: 14, + retain: false, + brokerId: instance.broker.id, + brokerCounter: 42 + } + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + retain: false, + dup: false, + brokerId: instance.broker.id, + brokerCounter: 42, + messageId: undefined + } + + instance.outgoingEnqueue(sub, packet, function (err) { + t.error(err) + const stream = instance.outgoingStream(client) + + stream.pipe(concat(function (list) { + var packet = list[0] + testPacket(t, packet, expected) + instance.destroy(t.end.bind(t)) + })) + }) + }) + + testInstance('add outgoing packet for multiple subs and stream to all', function (t, instance) { + const sub = { + clientId: 'abcde', + topic: 'hello', + qos: 1 + } + const sub2 = { + clientId: 'fghih', + topic: 'hello', + qos: 1 + } + const subs = [sub, sub2] + const client = { + id: sub.clientId + } + const client2 = { + id: sub2.clientId + } + const packet = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + dup: false, + length: 14, + retain: false, + brokerId: instance.broker.id, + brokerCounter: 42 + } + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + retain: false, + dup: false, + brokerId: instance.broker.id, + brokerCounter: 42, + messageId: undefined + } + + instance.outgoingEnqueueCombi(subs, packet, function (err) { + t.error(err) + const stream = instance.outgoingStream(client) + stream.pipe(concat(function (list) { + var packet = list[0] + testPacket(t, packet, expected) + + const stream2 = instance.outgoingStream(client2) + stream2.pipe(concat(function (list) { + var packet = list[0] + testPacket(t, packet, expected) + instance.destroy(t.end.bind(t)) + })) + })) + }) + }) + + testInstance('add outgoing packet as a string and pump', function (t, instance) { + const sub = { + clientId: 'abcde', + topic: 'hello', + qos: 1 + } + const client = { + id: sub.clientId + } + const packet1 = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + retain: false, + brokerId: instance.broker.id, + brokerCounter: 10 + } + const packet2 = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('matteo'), + qos: 1, + retain: false, + brokerId: instance.broker.id, + brokerCounter: 50 + } + const queue = [] + enqueueAndUpdate(t, instance, client, sub, packet1, 42, function (updated1) { + enqueueAndUpdate(t, instance, client, sub, packet2, 43, function (updated2) { + const stream = instance.outgoingStream(client) + pump(stream, through.obj(function clearQueue (data, enc, next) { + instance.outgoingUpdate(client, data, + function (err, client, packet) { + t.notOk(err, 'no error') + queue.push(packet) + next() + }) + }), function done () { + t.equal(queue.length, 2) + if (queue.length === 2) { + t.deepEqual(queue[0], updated1) + t.deepEqual(queue[1], updated2) + } + instance.destroy(t.end.bind(t)) + }) + }) + }) + }) + + testInstance('add outgoing packet as a string and stream', function (t, instance) { + const sub = { + clientId: 'abcde', + topic: 'hello', + qos: 1 + } + const client = { + id: sub.clientId + } + const packet = { + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + dup: false, + length: 14, + retain: false, + brokerId: instance.broker.id, + brokerCounter: 42 + } + const expected = { + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + retain: false, + dup: false, + brokerId: instance.broker.id, + brokerCounter: 42, + messageId: undefined + } + + instance.outgoingEnqueueCombi([sub], packet, function (err) { + t.error(err) + const stream = instance.outgoingStream(client) + + stream.pipe(concat(function (list) { + var packet = list[0] + testPacket(t, packet, expected) + instance.destroy(t.end.bind(t)) + })) + }) + }) + + testInstance('add outgoing packet and stream it twice', function (t, instance) { + const sub = { + clientId: 'abcde', + topic: 'hello', + qos: 1 + } + const client = { + id: sub.clientId + } + const packet = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + dup: false, + length: 14, + retain: false, + brokerId: instance.broker.id, + brokerCounter: 42, + messageId: 4242 + } + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + retain: false, + dup: false, + brokerId: instance.broker.id, + brokerCounter: 42, + messageId: undefined + } + + instance.outgoingEnqueueCombi([sub], packet, function (err) { + t.error(err) + const stream = instance.outgoingStream(client) + + stream.pipe(concat(function (list) { + var packet = list[0] + testPacket(t, packet, expected) + + const stream = instance.outgoingStream(client) + + stream.pipe(concat(function (list) { + var packet = list[0] + testPacket(t, packet, expected) + t.notEqual(packet, expected, 'packet must be a different object') + instance.destroy(t.end.bind(t)) + })) + })) + }) + }) + + function enqueueAndUpdate (t, instance, client, sub, packet, messageId, callback) { + instance.outgoingEnqueueCombi([sub], packet, function (err) { + t.error(err) + const updated = new Packet(packet) + updated.messageId = messageId + + instance.outgoingUpdate(client, updated, function (err, reclient, repacket) { + t.error(err) + t.equal(reclient, client, 'client matches') + t.equal(repacket, updated, 'packet matches') + callback(updated) + }) + }) + } + + testInstance('add outgoing packet and update messageId', function (t, instance) { + const sub = { + clientId: 'abcde', topic: 'hello', qos: 1 + } + const client = { + id: sub.clientId + } + const packet = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + dup: false, + length: 14, + retain: false, + brokerId: instance.broker.id, + brokerCounter: 42 + } + + enqueueAndUpdate(t, instance, client, sub, packet, 42, function (updated) { + const stream = instance.outgoingStream(client) + delete updated.messageId + stream.pipe(concat(function (list) { + delete list[0].messageId + t.notEqual(list[0], updated, 'must not be the same object') + t.deepEqual(list, [updated], 'must return the packet') + instance.destroy(t.end.bind(t)) + })) + }) + }) + + testInstance('add 2 outgoing packet and clear messageId', function (t, instance) { + const sub = { + clientId: 'abcde', topic: 'hello', qos: 1 + } + const client = { + id: sub.clientId + } + const packet1 = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + dup: false, + length: 14, + retain: false, + brokerId: instance.broker.id, + brokerCounter: 42 + } + const packet2 = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('matteo'), + qos: 1, + dup: false, + length: 14, + retain: false, + brokerId: instance.broker.id, + brokerCounter: 43 + } + + enqueueAndUpdate(t, instance, client, sub, packet1, 42, function (updated1) { + enqueueAndUpdate(t, instance, client, sub, packet2, 43, function (updated2) { + instance.outgoingClearMessageId(client, updated1, function (err, packet) { + t.error(err) + t.deepEqual(packet.messageId, 42, 'must have the same messageId') + t.deepEqual(packet.payload.toString(), packet1.payload.toString(), 'must have original payload') + t.deepEqual(packet.topic, packet1.topic, 'must have original topic') + const stream = instance.outgoingStream(client) + delete updated2.messageId + stream.pipe(concat(function (list) { + delete list[0].messageId + t.notEqual(list[0], updated2, 'must not be the same object') + t.deepEqual(list, [updated2], 'must return the packet') + instance.destroy(t.end.bind(t)) + })) + }) + }) + }) + }) + + testInstance('update to publish w/ same messageId', function (t, instance) { + const sub = { + clientId: 'abcde', topic: 'hello', qos: 1 + } + const client = { + id: sub.clientId + } + const packet1 = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + dup: false, + length: 14, + retain: false, + brokerId: instance.broker.id, + brokerCounter: 42, + messageId: 42 + } + const packet2 = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + dup: false, + length: 14, + retain: false, + brokerId: instance.broker.id, + brokerCounter: 50, + messageId: 42 + } + + instance.outgoingEnqueue(sub, packet1, function () { + instance.outgoingEnqueue(sub, packet2, function () { + instance.outgoingUpdate(client, packet1, function () { + instance.outgoingUpdate(client, packet2, function () { + const stream = instance.outgoingStream(client) + stream.pipe(concat(function (list) { + t.equal(list.length, 2, 'must have two items in queue') + t.equal(list[0].brokerCounter, packet1.brokerCounter, 'brokerCounter must match') + t.equal(list[0].messageId, packet1.messageId, 'messageId must match') + t.equal(list[1].brokerCounter, packet2.brokerCounter, 'brokerCounter must match') + t.equal(list[1].messageId, packet2.messageId, 'messageId must match') + instance.destroy(t.end.bind(t)) + })) + }) + }) + }) + }) + }) + + testInstance('update to pubrel', function (t, instance) { + const sub = { + clientId: 'abcde', topic: 'hello', qos: 1 + } + const client = { + id: sub.clientId + } + const packet = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + dup: false, + length: 14, + retain: false, + brokerId: instance.broker.id, + brokerCounter: 42 + } + + instance.outgoingEnqueueCombi([sub], packet, function (err) { + t.error(err) + const updated = new Packet(packet) + updated.messageId = 42 + + instance.outgoingUpdate(client, updated, function (err, reclient, repacket) { + t.error(err) + t.equal(reclient, client, 'client matches') + t.equal(repacket, updated, 'packet matches') + + const pubrel = { + cmd: 'pubrel', + messageId: updated.messageId + } + + instance.outgoingUpdate(client, pubrel, function (err) { + t.error(err) + + const stream = instance.outgoingStream(client) + + stream.pipe(concat(function (list) { + t.deepEqual(list, [pubrel], 'must return the packet') + instance.destroy(t.end.bind(t)) + })) + }) + }) + }) + }) + + testInstance('add incoming packet, get it, and clear with messageId', function (t, instance) { + const client = { + id: 'abcde' + } + const packet = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + dup: false, + length: 14, + retain: false, + messageId: 42 + } + + instance.incomingStorePacket(client, packet, function (err) { + t.error(err) + + instance.incomingGetPacket(client, { + messageId: packet.messageId + }, function (err, retrieved) { + t.error(err) + + // adjusting the objects so they match + delete retrieved.brokerCounter + delete retrieved.brokerId + delete packet.length + + t.deepLooseEqual(retrieved, packet, 'retrieved packet must be deeply equal') + t.notEqual(retrieved, packet, 'retrieved packet must not be the same objet') + + instance.incomingDelPacket(client, retrieved, function (err) { + t.error(err) + + instance.incomingGetPacket(client, { + messageId: packet.messageId + }, function (err, retrieved) { + t.ok(err, 'must error') + instance.destroy(t.end.bind(t)) + }) + }) + }) + }) + }) + + testInstance('store, fetch and delete will message', function (t, instance) { + const client = { + id: '12345' + } + const expected = { + topic: 'hello/died', + payload: Buffer.from('muahahha'), + qos: 0, + retain: true + } + + instance.putWill(client, expected, function (err, c) { + t.error(err, 'no error') + t.equal(c, client, 'client matches') + instance.getWill(client, function (err, packet, c) { + t.error(err, 'no error') + t.deepEqual(packet, expected, 'will matches') + t.equal(c, client, 'client matches') + client.brokerId = packet.brokerId + instance.delWill(client, function (err, packet, c) { + t.error(err, 'no error') + t.deepEqual(packet, expected, 'will matches') + t.equal(c, client, 'client matches') + instance.getWill(client, function (err, packet, c) { + t.error(err, 'no error') + t.notOk(packet, 'no will after del') + t.equal(c, client, 'client matches') + instance.destroy(t.end.bind(t)) + }) + }) + }) + }) + }) + + testInstance('stream all will messages', function (t, instance) { + const client = { + id: '12345' + } + const toWrite = { + topic: 'hello/died', + payload: Buffer.from('muahahha'), + qos: 0, + retain: true + } + + instance.putWill(client, toWrite, function (err, c) { + t.error(err, 'no error') + t.equal(c, client, 'client matches') + instance.streamWill().pipe(through.obj(function (chunk, enc, cb) { + t.deepEqual(chunk, { + clientId: client.id, + brokerId: instance.broker.id, + topic: 'hello/died', + payload: Buffer.from('muahahha'), + qos: 0, + retain: true + }, 'packet matches') + cb() + client.brokerId = chunk.brokerId + instance.delWill(client, function (err, result, client) { + t.error(err, 'no error') + instance.destroy(t.end.bind(t)) + }) + })) + }) + }) + + testInstance('stream all will message for unknown brokers', function (t, instance) { + const originalId = instance.broker.id + const client = { + id: '42' + } + const anotherClient = { + id: '24' + } + const toWrite1 = { + topic: 'hello/died42', + payload: Buffer.from('muahahha'), + qos: 0, + retain: true + } + const toWrite2 = { + topic: 'hello/died24', + payload: Buffer.from('muahahha'), + qos: 0, + retain: true + } + + instance.putWill(client, toWrite1, function (err, c) { + t.error(err, 'no error') + t.equal(c, client, 'client matches') + instance.broker.id = 'anotherBroker' + instance.putWill(anotherClient, toWrite2, function (err, c) { + t.error(err, 'no error') + t.equal(c, anotherClient, 'client matches') + instance.streamWill({ + anotherBroker: Date.now() + }) + .pipe(through.obj(function (chunk, enc, cb) { + t.deepEqual(chunk, { + clientId: client.id, + brokerId: originalId, + topic: 'hello/died42', + payload: Buffer.from('muahahha'), + qos: 0, + retain: true + }, 'packet matches') + cb() + client.brokerId = chunk.brokerId + instance.delWill(client, function (err, result, client) { + t.error(err, 'no error') + instance.destroy(t.end.bind(t)) + }) + })) + }) + }) + }) + + testInstance('delete wills from dead brokers', function (t, instance) { + const client = { + id: '42' + } + + const toWrite1 = { + topic: 'hello/died42', + payload: Buffer.from('muahahha'), + qos: 0, + retain: true + } + + instance.putWill(client, toWrite1, function (err, c) { + t.error(err, 'no error') + t.equal(c, client, 'client matches') + instance.broker.id = 'anotherBroker' + client.brokerId = instance.broker.id + instance.delWill(client, function (err, result, client) { + t.error(err, 'no error') + instance.destroy(t.end.bind(t)) + }) + }) + }) + + testInstance('do not error if unkown messageId in outoingClearMessageId', function (t, instance) { + const client = { + id: 'abc-123' + } + + instance.outgoingClearMessageId(client, 42, function (err) { + t.error(err) + instance.destroy(t.end.bind(t)) + }) + }) +} + +module.exports = abstractPersistence diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/package.json b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/package.json new file mode 100644 index 000000000..a6a97c7b5 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/package.json @@ -0,0 +1,120 @@ +{ + "_from": "aedes-persistence@^8.1.1", + "_id": "aedes-persistence@8.1.3", + "_inBundle": false, + "_integrity": "sha512-VMCjEV+2g1TNJb/IlDEUy6SP9crT+QUhe2xc6UjyqrFNBNgTvHmOefXY7FxWrwmR2QA02vwg3+5p/JXkyg/Dkw==", + "_location": "/aedes-persistence", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "aedes-persistence@^8.1.1", + "name": "aedes-persistence", + "escapedName": "aedes-persistence", + "rawSpec": "^8.1.1", + "saveSpec": null, + "fetchSpec": "^8.1.1" + }, + "_requiredBy": [ + "/aedes" + ], + "_resolved": "https://registry.npmjs.org/aedes-persistence/-/aedes-persistence-8.1.3.tgz", + "_shasum": "3c7981dcdbfd33d5e4593d0d386f4c306e444b1e", + "_spec": "aedes-persistence@^8.1.1", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes", + "author": { + "name": "Matteo Collina", + "email": "hello@matteocollina.com" + }, + "bugs": { + "url": "https://github.com/moscajs/aedes-persistence/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Gnought", + "url": "https://github.com/gnought" + } + ], + "dependencies": { + "aedes-packet": "^2.3.1", + "from2": "^2.3.0", + "qlobber": "^5.0.3" + }, + "deprecated": false, + "description": "The spec for an Aedes persistence, with abstract tests and a fast in-memory implementation.", + "devDependencies": { + "aedes": "^0.45.0", + "concat-stream": "^2.0.0", + "faucet": "0.0.1", + "license-checker": "^25.0.1", + "mqemitter": "^4.4.0", + "nyc": "^15.1.0", + "pre-commit": "^1.2.2", + "pump": "^3.0.0", + "release-it": "^14.2.0", + "snazzy": "^9.0.0", + "standard": "^15.0.1", + "tape": "^5.2.1", + "through2": "^4.0.2", + "tsd": "^0.14.0" + }, + "engines": { + "node": ">=10" + }, + "homepage": "https://github.com/moscajs/aedes-persistence#readme", + "keywords": [ + "mqtt", + "broker", + "persistence", + "aedes" + ], + "license": "MIT", + "main": "persistence.js", + "name": "aedes-persistence", + "pre-commit": [ + "test" + ], + "release-it": { + "github": { + "release": true + }, + "git": { + "tagName": "v${version}" + }, + "hooks": { + "before:init": [ + "npm run test" + ] + }, + "npm": { + "publish": true + } + }, + "repository": { + "type": "git", + "url": "git+https://github.com/moscajs/aedes-persistence.git" + }, + "scripts": { + "coverage": "nyc --reporter=lcov tape test.js", + "license-checker": "license-checker --production --onlyAllow='MIT;ISC;BSD-3-Clause;BSD-2-Clause'", + "lint": "standard --verbose | snazzy", + "lint-fix": "standard --fix", + "release": "read -p 'GITHUB_TOKEN: ' GITHUB_TOKEN && export GITHUB_TOKEN=$GITHUB_TOKEN && release-it --disable-metrics", + "test": "npm run lint && npm run unit && tsd", + "test:ci": "npm run lint && npm run coverage", + "test:types": "tsd", + "unit": "tape test.js | faucet" + }, + "types": "types/index.d.ts", + "version": "8.1.3", + "warnings": [ + { + "code": "ENOTSUP", + "required": { + "node": ">=10" + }, + "pkgid": "aedes-persistence@8.1.3" + } + ] +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/persistence.js b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/persistence.js new file mode 100644 index 000000000..7568aaeec --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/persistence.js @@ -0,0 +1,356 @@ +'use strict' + +const from2 = require('from2') +const QlobberSub = require('qlobber/aedes/qlobber-sub') +const { QlobberTrue } = require('qlobber') +const Packet = require('aedes-packet') +const QlobberOpts = { + wildcard_one: '+', + wildcard_some: '#', + separator: '/' +} + +function MemoryPersistence () { + if (!(this instanceof MemoryPersistence)) { + return new MemoryPersistence() + } + + this._retained = [] + // clientId -> topic -> qos + this._subscriptions = new Map() + this._clientsCount = 0 + this._trie = new QlobberSub(QlobberOpts) + this._outgoing = {} + this._incoming = {} + this._wills = {} +} + +function matchTopic (p) { + return p.topic !== this.topic +} + +MemoryPersistence.prototype.storeRetained = function (packet, cb) { + packet = Object.assign({}, packet) + this._retained = this._retained.filter(matchTopic, packet) + + if (packet.payload.length > 0) this._retained.push(packet) + + cb(null) +} + +function matchingStream (current, pattern) { + const matcher = new QlobberTrue(QlobberOpts) + + if (Array.isArray(pattern)) { + for (var i = 0; i < pattern.length; i += 1) { + matcher.add(pattern[i]) + } + } else { + matcher.add(pattern) + } + + return from2.obj(function match (size, next) { + var entry + + while ((entry = current.shift()) != null) { + if (matcher.test(entry.topic)) { + setImmediate(next, null, entry) + return + } + } + + if (!entry) this.push(null) + }) +} + +MemoryPersistence.prototype.createRetainedStream = function (pattern) { + return matchingStream([].concat(this._retained), pattern) +} + +MemoryPersistence.prototype.createRetainedStreamCombi = function (patterns) { + return matchingStream([].concat(this._retained), patterns) +} + +MemoryPersistence.prototype.addSubscriptions = function (client, subs, cb) { + var stored = this._subscriptions.get(client.id) + const trie = this._trie + + if (!stored) { + stored = new Map() + this._subscriptions.set(client.id, stored) + this._clientsCount++ + } + + for (var i = 0; i < subs.length; i += 1) { + const sub = subs[i] + const qos = stored.get(sub.topic) + const hasQoSGreaterThanZero = (qos !== undefined) && (qos > 0) + if (sub.qos > 0) { + trie.add(sub.topic, { + clientId: client.id, + topic: sub.topic, + qos: sub.qos + }) + } else if (hasQoSGreaterThanZero) { + trie.remove(sub.topic, { + clientId: client.id, + topic: sub.topic + }) + } + stored.set(sub.topic, sub.qos) + } + + cb(null, client) +} + +MemoryPersistence.prototype.removeSubscriptions = function (client, subs, cb) { + const stored = this._subscriptions.get(client.id) + const trie = this._trie + + if (stored) { + for (var i = 0; i < subs.length; i += 1) { + const topic = subs[i] + const qos = stored.get(topic) + if (qos !== undefined) { + if (qos > 0) { + trie.remove(topic, { clientId: client.id, topic }) + } + stored.delete(topic) + } + } + + if (stored.size === 0) { + this._clientsCount-- + this._subscriptions.delete(client.id) + } + } + + cb(null, client) +} + +MemoryPersistence.prototype.subscriptionsByClient = function (client, cb) { + var subs = null + const stored = this._subscriptions.get(client.id) + if (stored) { + subs = [] + for (const topicAndQos of stored) { + subs.push({ topic: topicAndQos[0], qos: topicAndQos[1] }) + } + } + cb(null, subs, client) +} + +MemoryPersistence.prototype.countOffline = function (cb) { + return cb(null, this._trie.subscriptionsCount, this._clientsCount) +} + +MemoryPersistence.prototype.subscriptionsByTopic = function (pattern, cb) { + cb(null, this._trie.match(pattern)) +} + +MemoryPersistence.prototype.cleanSubscriptions = function (client, cb) { + const trie = this._trie + const stored = this._subscriptions.get(client.id) + + if (stored) { + for (const topicAndQos of stored) { + if (topicAndQos[1] > 0) { + const topic = topicAndQos[0] + trie.remove(topic, { clientId: client.id, topic }) + } + } + + this._clientsCount-- + this._subscriptions.delete(client.id) + } + + cb(null, client) +} + +MemoryPersistence.prototype.outgoingEnqueue = function (sub, packet, cb) { + _outgoingEnqueue.call(this, sub, packet) + process.nextTick(cb) +} + +MemoryPersistence.prototype.outgoingEnqueueCombi = function (subs, packet, cb) { + for (var i = 0; i < subs.length; i++) { + _outgoingEnqueue.call(this, subs[i], packet) + } + process.nextTick(cb) +} + +function _outgoingEnqueue (sub, packet) { + const id = sub.clientId + const queue = this._outgoing[id] || [] + + this._outgoing[id] = queue + const p = new Packet(packet) + queue[queue.length] = p +} + +MemoryPersistence.prototype.outgoingUpdate = function (client, packet, cb) { + const clientId = client.id + const outgoing = this._outgoing[clientId] || [] + var temp + + this._outgoing[clientId] = outgoing + + for (var i = 0; i < outgoing.length; i++) { + temp = outgoing[i] + if (temp.brokerId === packet.brokerId) { + if (temp.brokerCounter === packet.brokerCounter) { + temp.messageId = packet.messageId + return cb(null, client, packet) + } + /* + Maximum of messageId (packet identifier) is 65535 and will be rotated, + brokerCounter is to ensure the packet identifier be unique. + The for loop is going to search which packet messageId should be updated + in the _outgoing queue. + If there is a case that brokerCounter is different but messageId is same, + we need to let the loop keep searching + */ + } else if (temp.messageId === packet.messageId) { + outgoing[i] = packet + return cb(null, client, packet) + } + } + + cb(new Error('no such packet'), client, packet) +} + +MemoryPersistence.prototype.outgoingClearMessageId = function (client, packet, cb) { + const clientId = client.id + const outgoing = this._outgoing[clientId] || [] + var temp + + this._outgoing[clientId] = outgoing + + for (var i = 0; i < outgoing.length; i++) { + temp = outgoing[i] + if (temp.messageId === packet.messageId) { + outgoing.splice(i, 1) + return cb(null, temp) + } + } + + cb() +} + +MemoryPersistence.prototype.outgoingStream = function (client) { + const queue = [].concat(this._outgoing[client.id] || []) + + return from2.obj(function match (size, next) { + var entry + + if ((entry = queue.shift()) != null) { + setImmediate(next, null, entry) + return + } + + if (!entry) this.push(null) + }) +} + +MemoryPersistence.prototype.incomingStorePacket = function (client, packet, cb) { + const id = client.id + const store = this._incoming[id] || {} + + this._incoming[id] = store + + store[packet.messageId] = new Packet(packet) + store[packet.messageId].messageId = packet.messageId + + cb(null) +} + +MemoryPersistence.prototype.incomingGetPacket = function (client, packet, cb) { + const id = client.id + const store = this._incoming[id] || {} + var err = null + + this._incoming[id] = store + + if (!store[packet.messageId]) { + err = new Error('no such packet') + } + + cb(err, store[packet.messageId]) +} + +MemoryPersistence.prototype.incomingDelPacket = function (client, packet, cb) { + const id = client.id + const store = this._incoming[id] || {} + const toDelete = store[packet.messageId] + var err = null + + if (!toDelete) { + err = new Error('no such packet') + } else { + delete store[packet.messageId] + } + + cb(err) +} + +MemoryPersistence.prototype.putWill = function (client, packet, cb) { + packet.brokerId = this.broker.id + packet.clientId = client.id + this._wills[client.id] = packet + cb(null, client) +} + +MemoryPersistence.prototype.getWill = function (client, cb) { + cb(null, this._wills[client.id], client) +} + +MemoryPersistence.prototype.delWill = function (client, cb) { + const will = this._wills[client.id] + delete this._wills[client.id] + cb(null, will, client) +} + +MemoryPersistence.prototype.streamWill = function (brokers) { + const clients = Object.keys(this._wills) + const wills = this._wills + brokers = brokers || {} + return from2.obj(function match (size, next) { + var entry + + while ((entry = clients.shift()) != null) { + if (!brokers[wills[entry].brokerId]) { + setImmediate(next, null, wills[entry]) + return + } + } + + if (!entry) { + this.push(null) + } + }) +} + +MemoryPersistence.prototype.getClientList = function (topic) { + const clientSubs = this._subscriptions + const entries = clientSubs.entries(clientSubs) + return from2.obj(function match (size, next) { + var entry + while (!(entry = entries.next()).done) { + if (entry.value[1].has(topic)) { + setImmediate(next, null, entry.value[0]) + return + } + } + next(null, null) + }) +} + +MemoryPersistence.prototype.destroy = function (cb) { + this._retained = null + if (cb) { + cb(null) + } +} + +module.exports = MemoryPersistence +module.exports.Packet = Packet diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/test.js b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/test.js new file mode 100644 index 000000000..0471cab40 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/test.js @@ -0,0 +1,10 @@ +'use strict' + +const test = require('tape').test +const memory = require('./') +const abs = require('./abstract') + +abs({ + test, + persistence: memory +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/types/index.d.ts b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/types/index.d.ts new file mode 100644 index 000000000..079c66c70 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/types/index.d.ts @@ -0,0 +1,270 @@ +import type { Brokers, Client, Subscription } from 'aedes'; +import type { AedesPacket } from 'aedes-packet'; +import type { QoS } from 'mqtt-packet'; +import type { Readable } from 'stream'; + +export type { AedesPacket as Packet } from 'aedes-packet'; + +export interface AedesPersistenceSubscription { + clientId: string; + topic: string; + qos?: QoS; +} + +export type CallbackError = Error | null | undefined; + +export type WillPacket = AedesPacket & { [key: string]: any }; + +interface Incoming { + [clientId: string]: { [messageId: string]: AedesPacket }; +} + +export interface AedesPersistence { + storeRetained: ( + packet: AedesPacket, + cb: (error: CallbackError) => void + ) => void; + + createRetainedStream: (pattern: string) => Readable; + + createRetainedStreamCombi: (patterns: string[]) => Readable; + + addSubscriptions: ( + client: Client, + subs: Subscription[], + cb: (error: CallbackError, client: Client) => void + ) => void; + + removeSubscriptions: ( + client: Client, + subs: Subscription[], + cb: (error: CallbackError, client: Client) => void + ) => void; + + subscriptionsByClient: ( + client: Client, + cb: ( + error: CallbackError, + subs: { topic: string; qos: QoS }[], + client: Client + ) => void + ) => void; + + countOffline: ( + cb: ( + error: CallbackError, + subscriptionsCount: number, + clientsCount: number + ) => void + ) => void; + + subscriptionsByTopic: ( + pattern: string, + cb: (error: CallbackError, subs: AedesPersistenceSubscription[]) => void + ) => void; + + cleanSubscriptions: ( + client: Client, + cb: (error: CallbackError, client: Client) => void + ) => void; + + outgoingEnqueue: ( + sub: { clientId: string }, + packet: AedesPacket, + cb: (error: CallbackError) => void + ) => void; + + outgoingEnqueueCombi: ( + subs: { clientId: string }[], + packet: AedesPacket, + cb: (error: CallbackError) => void + ) => void; + + outgoingUpdate: ( + client: Client, + packet: AedesPacket, + cb: (error: CallbackError, client: Client, packet: AedesPacket) => void + ) => void; + + outgoingClearMessageId: ( + client: Client, + packet: AedesPacket, + cb: (error?: CallbackError, packet?: AedesPacket) => void + ) => void; + + outgoingStream: (client: Client) => Readable; + + incomingStorePacket: ( + client: Client, + packet: AedesPacket, + cb: (error: CallbackError) => void + ) => void; + + incomingGetPacket: ( + client: Client, + packet: AedesPacket, + cb: (error: CallbackError, packet: AedesPacket) => void + ) => void; + + incomingDelPacket: ( + client: Client, + packet: AedesPacket, + cb: (error: CallbackError) => void + ) => void; + + putWill: ( + client: Client, + packet: AedesPacket, + cb: (error: CallbackError, client: Client) => void + ) => void; + + getWill: ( + client: Client, + cb: (error: CallbackError, will: WillPacket, client: Client) => void + ) => void; + + delWill: ( + client: Client, + cb: (error: CallbackError, will: WillPacket, client: Client) => void + ) => void; + + streamWill: (brokers: Brokers) => Readable; + + getClientList: (topic: string) => Readable; + + destroy: (cb?: (error: CallbackError) => void) => void; +} + +export class AedesMemoryPersistence implements AedesPersistence { + _retained: AedesPacket[]; + _subscriptions: Map< + AedesPersistenceSubscription['clientId'], + Map< + AedesPersistenceSubscription['topic'], + AedesPersistenceSubscription['qos'] + > + >; + _clientsCount: number; + _trie: any; + _outgoing: Record; + _incoming: Incoming; + _wills: Record; + + constructor(); + + storeRetained: ( + packet: AedesPacket, + cb: (error: CallbackError) => void + ) => void; + + createRetainedStream: (pattern: string) => Readable; + + createRetainedStreamCombi: (patterns: string[]) => Readable; + + addSubscriptions: ( + client: Client, + subs: Subscription[], + cb: (error: CallbackError, client: Client) => void + ) => void; + + removeSubscriptions: ( + client: Client, + subs: Subscription[], + cb: (error: CallbackError, client: Client) => void + ) => void; + + subscriptionsByClient: ( + client: Client, + cb: ( + error: CallbackError, + subs: { topic: string; qos: QoS }[], + client: Client + ) => void + ) => void; + + countOffline: ( + cb: ( + error: CallbackError, + subscriptionsCount: number, + clientsCount: number + ) => void + ) => void; + + subscriptionsByTopic: ( + pattern: string, + cb: (error: CallbackError, subs: AedesPersistenceSubscription[]) => void + ) => void; + + cleanSubscriptions: ( + client: Client, + cb: (error: CallbackError, client: Client) => void + ) => void; + + outgoingEnqueue: ( + sub: { clientId: string }, + packet: AedesPacket, + cb: (error: CallbackError) => void + ) => void; + + outgoingEnqueueCombi: ( + sub: { clientId: string }[], + packet: AedesPacket, + cb: (error: CallbackError) => void + ) => void; + + outgoingUpdate: ( + client: Client, + packet: AedesPacket, + cb: (error: CallbackError, client: Client, packet: AedesPacket) => void + ) => void; + + outgoingClearMessageId: ( + client: Client, + packet: AedesPacket, + cb: (error?: CallbackError, packet?: AedesPacket) => void + ) => void; + + outgoingStream: (client: Client) => Readable; + + incomingStorePacket: ( + client: Client, + packet: AedesPacket, + cb: (error: CallbackError) => void + ) => void; + + incomingGetPacket: ( + client: Client, + packet: AedesPacket, + cb: (error: CallbackError, packet: AedesPacket) => void + ) => void; + + incomingDelPacket: ( + client: Client, + packet: AedesPacket, + cb: (error: CallbackError) => void + ) => void; + + putWill: ( + client: Client, + packet: AedesPacket, + cb: (error: CallbackError, client: Client) => void + ) => void; + + getWill: ( + client: Client, + cb: (error: CallbackError, will: WillPacket, client: Client) => void + ) => void; + + delWill: ( + client: Client, + cb: (error: CallbackError, will: WillPacket, client: Client) => void + ) => void; + + streamWill: (brokers: Brokers) => Readable; + + getClientList: (topic: string) => Readable; + + destroy: (cb?: (error: CallbackError) => void) => void; +} + +export default function aedesMemoryPersistence(): AedesMemoryPersistence; diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/types/index.test-d.ts b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/types/index.test-d.ts new file mode 100644 index 000000000..2249263e8 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-persistence/types/index.test-d.ts @@ -0,0 +1,168 @@ +import type { Brokers, Client, Subscription } from 'aedes'; +import type { AedesPacket } from 'aedes-packet'; +import type { QoS } from 'mqtt-packet'; +import type { Readable } from 'stream'; +import { expectType } from 'tsd'; +import aedesMemoryPersistence, { + AedesMemoryPersistence, + AedesPersistenceSubscription, + CallbackError, + WillPacket, +} from '.'; + +expectType(aedesMemoryPersistence()); + +expectType( + aedesMemoryPersistence().storeRetained( + { + brokerId: '', + brokerCounter: 1, + cmd: 'publish', + qos: 0, + dup: false, + retain: false, + topic: 'test', + payload: 'test', + }, + (err: CallbackError) => {} + ) +); + +expectType( + aedesMemoryPersistence().addSubscriptions( + {} as Client, + [] as Subscription[], + (err: CallbackError) => {} + ) +); + +expectType( + aedesMemoryPersistence().removeSubscriptions( + {} as Client, + [] as Subscription[], + (err: CallbackError) => {} + ) +); + +expectType( + aedesMemoryPersistence().subscriptionsByClient( + {} as Client, + ( + error: CallbackError, + subs: { topic: string; qos: QoS }[], + client: Client + ) => {} + ) +); + +expectType( + aedesMemoryPersistence().countOffline( + ( + error: CallbackError, + subscriptionsCount: number, + clientsCount: number + ) => {} + ) +); + +expectType( + aedesMemoryPersistence().subscriptionsByTopic( + 'pattern', + (error: CallbackError, subs: AedesPersistenceSubscription[]) => {} + ) +); + +expectType( + aedesMemoryPersistence().cleanSubscriptions( + {} as Client, + (error: CallbackError, client: Client) => {} + ) +); + +expectType( + aedesMemoryPersistence().outgoingEnqueue( + { clientId: '' }, + {} as AedesPacket, + (error: CallbackError) => {} + ) +); + +expectType( + aedesMemoryPersistence().outgoingEnqueueCombi( + [{ clientId: '' }], + {} as AedesPacket, + (error: CallbackError) => {} + ) +); + +expectType( + aedesMemoryPersistence().outgoingUpdate( + {} as Client, + {} as AedesPacket, + (error: CallbackError, client: Client, packet: AedesPacket) => {} + ) +); + +expectType( + aedesMemoryPersistence().outgoingClearMessageId( + {} as Client, + {} as AedesPacket, + (error: CallbackError, packet?: AedesPacket) => {} + ) +); + +expectType(aedesMemoryPersistence().outgoingStream({} as Client)); + +expectType( + aedesMemoryPersistence().incomingStorePacket( + {} as Client, + {} as AedesPacket, + (error: CallbackError) => {} + ) +); + +expectType( + aedesMemoryPersistence().incomingGetPacket( + {} as Client, + {} as AedesPacket, + (error: CallbackError, packet: AedesPacket) => {} + ) +); + +expectType( + aedesMemoryPersistence().incomingDelPacket( + {} as Client, + {} as AedesPacket, + (error: CallbackError) => {} + ) +); + +expectType( + aedesMemoryPersistence().putWill( + {} as Client, + {} as AedesPacket, + (error: CallbackError, client: Client) => {} + ) +); + +expectType( + aedesMemoryPersistence().getWill( + {} as Client, + (error: CallbackError, will: WillPacket, client: Client) => {} + ) +); + +expectType( + aedesMemoryPersistence().delWill( + {} as Client, + (error: CallbackError, will: WillPacket, client: Client) => {} + ) +); + +expectType(aedesMemoryPersistence().streamWill({} as Brokers)); + +expectType(aedesMemoryPersistence().getClientList('topic')); + +expectType(aedesMemoryPersistence().destroy()); + +expectType(aedesMemoryPersistence().destroy(() => {})); diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/.github/workflows/ci.yml b/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/.github/workflows/ci.yml new file mode 100644 index 000000000..35fa31fec --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/.github/workflows/ci.yml @@ -0,0 +1,43 @@ +name: ci + +on: [push, pull_request] + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + node-version: [8.x, 10.x, 12.x, 13.x] + + steps: + - uses: actions/checkout@v1 + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install + - name: Check licenses + run: | + npm run license-checker + - name: Run tests + run: | + npm run test:ci + - name: Coveralls Parallel + uses: coverallsapp/github-action@master + with: + github-token: ${{ secrets.github_token }} + parallel: true + + coverage: + needs: test + runs-on: ubuntu-latest + steps: + - name: Coveralls Finished + uses: coverallsapp/github-action@master + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + parallel-finished: true \ No newline at end of file diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/LICENSE new file mode 100644 index 000000000..f847c8480 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 moscajs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/README.md b/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/README.md new file mode 100644 index 000000000..d79eba7e5 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/README.md @@ -0,0 +1,61 @@ +# aedes-protocol-decoder + +![](https://github.com/moscajs/aedes-protocol-decoder/workflows/ci/badge.svg) +[![Dependencies Status](https://david-dm.org/moscajs/aedes-protocol-decoder/status.svg)](https://david-dm.org/moscajs/aedes-protocol-decoder) +[![devDependencies Status](https://david-dm.org/moscajs/aedes-protocol-decoder/dev-status.svg)](https://david-dm.org/moscajs/aedes-protocol-decoder?type=dev) +
+[![Known Vulnerabilities](https://snyk.io/test/github/moscajs/aedes-protocol-decoder/badge.svg)](https://snyk.io/test/github/moscajs/aedes-protocol-decoder) +[![Coverage Status](https://coveralls.io/repos/moscajs/aedes-protocol-decoder/badge.svg?branch=master&service=github)](https://coveralls.io/github/moscajs/aedes-protocol-decoder?branch=master) +[![NPM version](https://img.shields.io/npm/v/aedes-protocol-decoder.svg?style=flat)](https://www.npmjs.com/package/aedes-protocol-decoder) +[![NPM downloads](https://img.shields.io/npm/dm/aedes-protocol-decoder.svg?style=flat)](https://www.npmjs.com/package/aedes-protocol-decoder) + + +[![js-standard-style](https://cdn.rawgit.com/feross/standard/master/badge.svg)](https://github.com/feross/standard) + +Protocol decoder for Aedes MQTT Broker + +The purpose of this module is to be used inside [aedes](https://github.com/moscajs/aedes) `decodeProtocol` hook, which is called when aedes instance receives a first valid buffer from client ( before CONNECT packet). The client object state is in default and its connected state is false. +The function extract socket details and if aedes `trustProxy` option is set to true, it will first parse http headers (x-real-ip | x-forwarded-for) and proxy protocol (v1 and v2) to retrieve information in client.connDetails. + +The function `protocolDecoder` returns [ConnectionDetails](./types/index.d.ts), if the object contains data property, it will be parsed as an mqtt-packet. + +## Install + +```bash +npm install aedes-protocol-decoder --save +``` + +## Example + +```js +var aedes = require('aedes') +var { protocolDecoder } = require('aedes-protocol-decoder') +var net = require('net') +var port = 1883 + +var broker = aedes({ + decodeProtocol: function (client, buffer) { + var proto = protocolDecoder(client, buffer) + return proto + }, + preConnect: function (client, done) { + if (client.connDetails && client.connDetails.ipAddress) { + client.ip = client.connDetails.ipAddress + } + return done(null, true) + }, + trustProxy: true +}) + +var server = net.createServer(broker.handle) + +server.listen(port, function () { + console.log('server listening on port', port) +}) + + +``` + +## License + +MIT diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/example.js b/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/example.js new file mode 100644 index 000000000..c23cf3902 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/example.js @@ -0,0 +1,174 @@ +'use strict' + +var aedes = require('aedes') +var mqttPacket = require('mqtt-packet') +var net = require('net') +var proxyProtocol = require('proxy-protocol-js') +var protocolDecoder = require('./lib/protocol-decoder') + +var brokerPort = 4883 + +// from https://stackoverflow.com/questions/57077161/how-do-i-convert-hex-buffer-to-ipv6-in-javascript +function parseIpV6 (ip) { + return ip.match(/.{1,4}/g) + .map((val) => val.replace(/^0+/, '')) + .join(':') + .replace(/0000:/g, ':') + .replace(/:{2,}/g, '::') +} + +function sendProxyPacket (version = 1, ipFamily = 4) { + var packet = { + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: `my-client-${version}`, + keepalive: 0 + } + var hostIpV4 = '0.0.0.0' + var clientIpV4 = '192.168.1.128' + var hostIpV6 = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + var clientIpV6 = [0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 192, 168, 1, 128] + var protocol + if (version === 1) { + if (ipFamily === 4) { + protocol = new proxyProtocol.V1BinaryProxyProtocol( + proxyProtocol.INETProtocol.TCP4, + new proxyProtocol.Peer(clientIpV4, 12345), + new proxyProtocol.Peer(hostIpV4, brokerPort), + mqttPacket.generate(packet) + ).build() + } else if (ipFamily === 6) { + protocol = new proxyProtocol.V1BinaryProxyProtocol( + proxyProtocol.INETProtocol.TCP6, + new proxyProtocol.Peer(parseIpV6(Buffer.from(clientIpV6).toString('hex')), 12345), + new proxyProtocol.Peer(parseIpV6(Buffer.from(hostIpV6).toString('hex')), brokerPort), + mqttPacket.generate(packet) + ).build() + } + } else if (version === 2) { + if (ipFamily === 4) { + protocol = new proxyProtocol.V2ProxyProtocol( + proxyProtocol.Command.LOCAL, + proxyProtocol.TransportProtocol.STREAM, + new proxyProtocol.IPv4ProxyAddress( + proxyProtocol.IPv4Address.createFrom(clientIpV4.split('.')), + 12346, + proxyProtocol.IPv4Address.createFrom(hostIpV4.split('.')), + brokerPort + ), + mqttPacket.generate(packet) + ).build() + } else if (ipFamily === 6) { + protocol = new proxyProtocol.V2ProxyProtocol( + proxyProtocol.Command.PROXY, + proxyProtocol.TransportProtocol.STREAM, + new proxyProtocol.IPv6ProxyAddress( + proxyProtocol.IPv6Address.createFrom(clientIpV6), + 12346, + proxyProtocol.IPv6Address.createFrom(hostIpV6), + brokerPort + ), + mqttPacket.generate(packet) + ).build() + } + } + + var parsedProto = version === 1 + ? proxyProtocol.V1BinaryProxyProtocol.parse(protocol) + : proxyProtocol.V2ProxyProtocol.parse(protocol) + // console.log(parsedProto) + + var dstPort = version === 1 + ? parsedProto.destination.port + : parsedProto.proxyAddress.destinationPort + + var dstHost + if (version === 1) { + if (ipFamily === 4) { + dstHost = parsedProto.destination.ipAddress + } else if (ipFamily === 6) { + dstHost = parsedProto.destination.ipAddress + // console.log('ipV6 host :', parsedProto.destination.ipAddress) + } + } else if (version === 2) { + if (ipFamily === 4) { + dstHost = parsedProto.proxyAddress.destinationAddress.address.join('.') + } else if (ipFamily === 6) { + // console.log('ipV6 client :', parseIpV6(Buffer.from(clientIpV6).toString('hex'))) + dstHost = parseIpV6(Buffer.from(parsedProto.proxyAddress.destinationAddress.address).toString('hex')) + } + } + + console.log('Connection to :', dstHost, dstPort) + var mqttConn = net.createConnection( + { + port: dstPort, + host: dstHost, + timeout: 150 + } + ) + + var data = protocol + + mqttConn.on('timeout', function () { + mqttConn.end(data) + }) +} + +function startAedes () { + var broker = aedes({ + decodeProtocol: function (client, buffer) { + var proto = protocolDecoder(client, buffer) + return proto + }, + preConnect: function (client, done) { + console.log('Aedes preConnect check client ip:', client.connDetails) + if (client.connDetails && client.connDetails.ipAddress) { + client.ip = client.connDetails.ipAddress + } + client.close() + return done(null, true) + }, + trustProxy: true + }) + + var server = require('net').createServer(broker.handle) + + server.listen(brokerPort, function () { + console.log('Aedes listening on :', server.address()) + broker.publish({ topic: 'aedes/hello', payload: "I'm broker " + broker.id }) + setTimeout(() => sendProxyPacket(1), 250) + setTimeout(() => sendProxyPacket(1, 6), 500) + setTimeout(() => sendProxyPacket(2), 750) + setTimeout(() => sendProxyPacket(2, 6), 1000) + }) + + broker.on('subscribe', function (subscriptions, client) { + console.log('MQTT client \x1b[32m' + (client ? client.id : client) + + '\x1b[0m subscribed to topics: ' + subscriptions.map(s => s.topic).join('\n'), 'from broker', broker.id) + }) + + broker.on('unsubscribe', function (subscriptions, client) { + console.log('MQTT client \x1b[32m' + (client ? client.id : client) + + '\x1b[0m unsubscribed to topics: ' + subscriptions.join('\n'), 'from broker', broker.id) + }) + + // fired when a client connects + broker.on('client', function (client) { + console.log('Client Connected: \x1b[33m' + (client ? client.id : client) + ' ip ' + (client ? client.ip : null) + '\x1b[0m', 'to broker', broker.id) + }) + + // fired when a client disconnects + broker.on('clientDisconnect', function (client) { + console.log('Client Disconnected: \x1b[31m' + (client ? client.id : client) + '\x1b[0m', 'to broker', broker.id) + }) + + // fired when a message is published + broker.on('publish', async function (packet, client) { + console.log('Client \x1b[31m' + (client ? client.id : 'BROKER_' + broker.id) + '\x1b[0m has published', packet.payload.toString(), 'on', packet.topic, 'to broker', broker.id) + }) +} + +startAedes() diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/index.js b/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/index.js new file mode 100644 index 000000000..c2d994d5a --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/index.js @@ -0,0 +1,5 @@ +var protocolDecoder = require('./lib/protocol-decoder') + +module.exports = { + protocolDecoder +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/package.json b/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/package.json new file mode 100644 index 000000000..e063c3c70 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/package.json @@ -0,0 +1,117 @@ +{ + "_from": "aedes-protocol-decoder@^1.0.0", + "_id": "aedes-protocol-decoder@1.0.0", + "_inBundle": false, + "_integrity": "sha512-nzWwSetptIFFk0gXtXBvmBPUD6HwuZKeVm/8mRJqEKfBJ4ZnvgpcO5CFwEDzTi884jOWTIBaJAWll3QLYsOCFQ==", + "_location": "/aedes-protocol-decoder", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "aedes-protocol-decoder@^1.0.0", + "name": "aedes-protocol-decoder", + "escapedName": "aedes-protocol-decoder", + "rawSpec": "^1.0.0", + "saveSpec": null, + "fetchSpec": "^1.0.0" + }, + "_requiredBy": [ + "/aedes" + ], + "_resolved": "https://registry.npmjs.org/aedes-protocol-decoder/-/aedes-protocol-decoder-1.0.0.tgz", + "_shasum": "dfc7d6f11bdf01de95820b855ac6655db96ea75a", + "_spec": "aedes-protocol-decoder@^1.0.0", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes", + "author": { + "name": "Get Large", + "email": "https://github.com/getlarge" + }, + "bugs": { + "url": "http://github.com/moscajs/aedes-protocol-decoder/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Get Large", + "url": "https://github.com/getlarge" + }, + { + "name": "Daniel Lando", + "url": "https://github.com/robertsLando" + } + ], + "dependencies": { + "forwarded": "^0.1.2", + "proxy-protocol-js": "^4.0.3" + }, + "deprecated": false, + "description": "Aedes plugin to decode connection frame and validate proxies", + "devDependencies": { + "@types/node": "^12.12.25", + "@typescript-eslint/eslint-plugin": "^2.17.0", + "@typescript-eslint/parser": "^2.17.0", + "aedes": "git+https://git@github.com/moscajs/aedes.git#master", + "faucet": "0.0.1", + "license-checker": "^25.0.1", + "mqtt": "^3.0.0", + "mqtt-packet": "^6.3.0", + "nyc": "^15.0.0", + "pre-commit": "^1.2.2", + "release-it": "^12.4.3", + "snazzy": "^8.0.0", + "standard": "^14.3.1", + "tape": "^4.13.0", + "typescript": "^3.7.5", + "websocket-stream": "^5.5.0" + }, + "engines": { + "node": ">=8" + }, + "homepage": "https://github.com/moscajs/aedes-protocol-decoder#readme", + "keywords": [ + "mqtt", + "proxy", + "protocol", + "decoder", + "parser" + ], + "license": "MIT", + "main": "index.js", + "name": "aedes-protocol-decoder", + "pre-commit": [ + "test" + ], + "release-it": { + "github": { + "release": true + }, + "git": { + "tagName": "v${version}" + }, + "hooks": { + "before:init": [ + "npm run test" + ] + }, + "npm": { + "publish": true + } + }, + "repository": { + "type": "git", + "url": "git+https://github.com/moscajs/aedes-protocol-decoder.git" + }, + "scripts": { + "coverage": "nyc --reporter=lcov tape test.js", + "license-checker": "license-checker --production --onlyAllow='MIT;ISC;BSD-3-Clause;BSD-2-Clause'", + "lint": "npm run lint:standard && npm run lint:typescript", + "lint:standard": "standard --verbose | snazzy", + "lint:typescript": "standard --parser @typescript-eslint/parser --plugin @typescript-eslint/eslint-plugin types/**/*.d.ts", + "release": "read -p 'GITHUB_TOKEN: ' GITHUB_TOKEN && export GITHUB_TOKEN=$GITHUB_TOKEN && release-it", + "test": "npm run lint && npm run unit", + "test:ci": "npm run lint && npm run coverage", + "unit": "tape test.js | faucet" + }, + "types": "types/index.d.ts", + "version": "1.0.0" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/test.js b/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/test.js new file mode 100644 index 000000000..f58b8d8f0 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/test.js @@ -0,0 +1,482 @@ +'use strict' + +var test = require('tape').test +var aedes = require('aedes') +var http = require('http') +var ws = require('websocket-stream') +var mqtt = require('mqtt') +var mqttPacket = require('mqtt-packet') +var net = require('net') +var proxyProtocol = require('proxy-protocol-js') +var protocolDecoder = require('./lib/protocol-decoder') + +// test ipAddress property presence when trustProxy is enabled +test('tcp clients have access to the ipAddress from the socket', function (t) { + t.plan(2) + + var port = 4883 + var broker = aedes({ + decodeProtocol: function (client, buffer) { + var proto = protocolDecoder(client, buffer) + return proto + }, + preConnect: function (client, done) { + if (client && client.connDetails && client.connDetails.ipAddress) { + client.ip = client.connDetails.ipAddress + t.equal('::ffff:127.0.0.1', client.ip) + } else { + t.fail('no ip address present') + } + done(null, true) + setImmediate(finish) + }, + trustProxy: true + }) + + var server = net.createServer(broker.handle) + server.listen(port, function (err) { + t.error(err, 'no error') + }) + + var client = mqtt.connect({ + port, + keepalive: 0, + clientId: 'mqtt-client', + clean: false + }) + + function finish () { + client.end() + broker.close() + server.close() + t.end() + } +}) + +test('tcp proxied (protocol v1) clients have access to the ipAddress(v4)', function (t) { + t.plan(2) + + var port = 4883 + var clientIp = '192.168.0.140' + var packet = { + cmd: 'connect', + protocolId: 'MQIsdp', + protocolVersion: 3, + clean: true, + clientId: 'my-client-proxyV1', + keepalive: 0 + } + + var buf = mqttPacket.generate(packet) + var src = new proxyProtocol.Peer(clientIp, 12345) + var dst = new proxyProtocol.Peer('127.0.0.1', port) + var protocol = new proxyProtocol.V1BinaryProxyProtocol( + proxyProtocol.INETProtocol.TCP4, + src, + dst, + buf + ).build() + + var broker = aedes({ + decodeProtocol: function (client, buffer) { + var proto = protocolDecoder(client, buffer) + return proto + }, + preConnect: function (client, done) { + if (client.connDetails && client.connDetails.ipAddress) { + client.ip = client.connDetails.ipAddress + t.equal(clientIp, client.ip) + } else { + t.fail('no ip address present') + } + done(null, true) + setImmediate(finish) + }, + trustProxy: true + }) + + var server = net.createServer(broker.handle) + server.listen(port, function (err) { + t.error(err, 'no error') + }) + + var client = net.connect({ + port, + timeout: 0 + }, function () { + client.write(protocol) + }) + + function finish () { + client.end() + broker.close() + server.close() + t.end() + } +}) + +test('tcp proxied (protocol v2) clients have access to the ipAddress(v4)', function (t) { + t.plan(2) + + var port = 4883 + var clientIp = '192.168.0.140' + var packet = { + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client-proxyV2' + } + + var protocol = new proxyProtocol.V2ProxyProtocol( + proxyProtocol.Command.LOCAL, + proxyProtocol.TransportProtocol.DGRAM, + new proxyProtocol.IPv4ProxyAddress( + proxyProtocol.IPv4Address.createFrom(clientIp.split('.')), + 12345, + proxyProtocol.IPv4Address.createFrom([127, 0, 0, 1]), + port + ), + mqttPacket.generate(packet) + ).build() + + var broker = aedes({ + decodeProtocol: function (client, buffer) { + var proto = protocolDecoder(client, buffer) + return proto + }, + preConnect: function (client, done) { + if (client.connDetails && client.connDetails.ipAddress) { + client.ip = client.connDetails.ipAddress + t.equal(clientIp, client.ip) + } else { + t.fail('no ip address present') + } + done(null, true) + setImmediate(finish) + }, + trustProxy: true + }) + + var server = net.createServer(broker.handle) + server.listen(port, function (err) { + t.error(err, 'no error') + }) + + var client = net.createConnection( + { + port, + timeout: 0 + }, function () { + client.write(Buffer.from(protocol)) + } + ) + + function finish () { + client.end() + broker.close() + server.close() + t.end() + } +}) + +test('tcp proxied (protocol v2) clients have access to the ipAddress(v6)', function (t) { + t.plan(2) + + var port = 4883 + var clientIpArray = [0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 192, 168, 1, 128] + var clientIp = '::ffff:c0a8:180:' + var packet = { + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client-proxyV2' + } + + var protocol = new proxyProtocol.V2ProxyProtocol( + proxyProtocol.Command.PROXY, + proxyProtocol.TransportProtocol.STREAM, + new proxyProtocol.IPv6ProxyAddress( + proxyProtocol.IPv6Address.createFrom(clientIpArray), + 12345, + proxyProtocol.IPv6Address.createWithEmptyAddress(), + port + ), + mqttPacket.generate(packet) + ).build() + + var broker = aedes({ + decodeProtocol: function (client, buffer) { + var proto = protocolDecoder(client, buffer) + return proto + }, + preConnect: function (client, done) { + if (client.connDetails && client.connDetails.ipAddress) { + client.ip = client.connDetails.ipAddress + t.equal(clientIp, client.ip) + } else { + t.fail('no ip address present') + } + done(null, true) + setImmediate(finish) + }, + trustProxy: true + }) + + var server = net.createServer(broker.handle) + server.listen(port, function (err) { + t.error(err, 'no error') + }) + + var client = net.createConnection( + { + port, + timeout: 0 + }, function () { + client.write(Buffer.from(protocol)) + } + ) + + function finish () { + client.end() + broker.close() + server.close() + t.end() + } +}) + +test('websocket clients have access to the ipAddress from the socket (if no ip header)', function (t) { + t.plan(2) + + var clientIp = '::ffff:127.0.0.1' + var port = 4883 + var broker = aedes({ + decodeProtocol: function (client, buffer) { + var proto = protocolDecoder(client, buffer) + return proto + }, + preConnect: function (client, done) { + if (client.connDetails && client.connDetails.ipAddress) { + client.ip = client.connDetails.ipAddress + t.equal(clientIp, client.ip) + } else { + t.fail('no ip address present') + } + done(null, true) + setImmediate(finish) + }, + trustProxy: true + }) + + var server = http.createServer() + ws.createServer({ + server: server + }, broker.handle) + + server.listen(port, function (err) { + t.error(err, 'no error') + }) + + var client = mqtt.connect(`ws://localhost:${port}`) + + function finish () { + broker.close() + server.close() + client.end() + t.end() + } +}) + +test('websocket proxied clients have access to the ipAddress from x-real-ip header', function (t) { + t.plan(2) + + var clientIp = '192.168.0.140' + var port = 4883 + var broker = aedes({ + decodeProtocol: function (client, buffer) { + var proto = protocolDecoder(client, buffer) + return proto + }, + preConnect: function (client, done) { + if (client.connDetails && client.connDetails.ipAddress) { + client.ip = client.connDetails.ipAddress + t.equal(clientIp, client.ip) + } else { + t.fail('no ip address present') + } + done(null, true) + setImmediate(finish) + }, + trustProxy: true + }) + + var server = http.createServer() + ws.createServer({ + server: server + }, broker.handle) + + server.listen(port, function (err) { + t.error(err, 'no error') + }) + + var client = mqtt.connect(`ws://localhost:${port}`, { + wsOptions: { + headers: { + 'X-Real-Ip': clientIp + } + } + }) + + function finish () { + broker.close() + server.close() + client.end() + t.end() + } +}) + +test('websocket proxied clients have access to the ipAddress from x-forwarded-for header', function (t) { + t.plan(2) + + var clientIp = '192.168.0.140' + var port = 4883 + var broker = aedes({ + decodeProtocol: function (client, buffer) { + var proto = protocolDecoder(client, buffer) + return proto + }, + preConnect: function (client, done) { + if (client.connDetails && client.connDetails.ipAddress) { + client.ip = client.connDetails.ipAddress + t.equal(clientIp, client.ip) + } else { + t.fail('no ip address present') + } + done(null, true) + setImmediate(finish) + }, + trustProxy: true + }) + + var server = http.createServer() + ws.createServer({ + server: server + }, broker.handle) + + server.listen(port, function (err) { + t.error(err, 'no error') + }) + + var client = mqtt.connect(`ws://localhost:${port}`, { + wsOptions: { + headers: { + 'X-Forwarded-For': clientIp + } + } + }) + + function finish () { + broker.close() + server.close() + client.end() + t.end() + } +}) + +test('tcp proxied (protocol v1) clients buffer contains MQTT packet and proxy header', function (t) { + t.plan(3) + + var brokerPort = 4883 + var proxyPort = 4884 + var clientIp = '192.168.0.140' + var packet = { + cmd: 'connect', + protocolId: 'MQIsdp', + protocolVersion: 3, + clean: true, + clientId: 'my-client-proxyV1', + keepalive: 0 + } + + var buf = mqttPacket.generate(packet) + var src = new proxyProtocol.Peer(clientIp, 12345) + var dst = new proxyProtocol.Peer('127.0.0.1', proxyPort) + + var broker = aedes({ + decodeProtocol: function (client, buff) { + var proto = protocolDecoder(client, buff) + if (proto.data) { + t.equal(proto.data.toString(), buf.toString()) + } else { + t.fail('no MQTT packet extracted from TCP buffer') + } + return proto + }, + trustProxy: true + }) + + broker.on('clientDisconnect', function (client) { + // console.log('onClientDisconnect', client.id) + setImmediate(finish) + }) + + var server = net.createServer(broker.handle) + server.listen(brokerPort, function (err) { + t.error(err, 'no error') + }) + + var proxyServer = net.createServer() + proxyServer.listen(proxyPort, function (err) { + t.error(err, 'no error') + }) + + var proxyClient + + proxyServer.on('connection', function (socket) { + socket.on('end', function (data) { + proxyClient.end(data, function () { + proxyClient.connected = false + }) + }) + + socket.on('data', function (data) { + if (proxyClient && proxyClient.connected) { + proxyClient.write(data) + } else { + var protocol = new proxyProtocol.V1BinaryProxyProtocol( + proxyProtocol.INETProtocol.TCP4, + src, + dst, + data + ).build() + proxyClient = net.connect({ + port: brokerPort, + timeout: 0 + }, function () { + proxyClient.write(protocol, function () { + proxyClient.connected = true + }) + }) + } + }) + }) + + var client = net.connect({ + port: proxyPort, + timeout: 200 + }, function () { + client.write(buf) + }) + + client.on('timeout', function () { + client.end(mqttPacket.generate({ cmd: 'disconnect' })) + }) + + function finish () { + broker.close() + server.close() + proxyServer.close() + t.end() + } +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/types/index.d.ts b/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/types/index.d.ts new file mode 100644 index 000000000..7ab31d1f6 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes-protocol-decoder/types/index.d.ts @@ -0,0 +1,11 @@ +export interface ConnectionDetails { + ipAddress: string + port: number + ipFamily: number + serverIpAddress: string + isWebsocket: boolean + isProxy: number + data?: Buffer +} + +export type ProtocolDecoder = (client: Object, buffer: Buffer) => ConnectionDetails | null diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/.coveralls.yml b/sdklab/meantimerecovery/aedes/node_modules/aedes/.coveralls.yml new file mode 100644 index 000000000..3131e781b --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/.coveralls.yml @@ -0,0 +1 @@ +repo_token: 0zzWBuyAWoQxHHR0JQ4jdZ12CPdvhiolr diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/.github/workflows/ci.yml b/sdklab/meantimerecovery/aedes/node_modules/aedes/.github/workflows/ci.yml new file mode 100644 index 000000000..ad6deacc2 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/.github/workflows/ci.yml @@ -0,0 +1,51 @@ +name: ci + +on: + push: + branches: + - master + pull_request: + +jobs: + test: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [10.x, 12.x, 13.x] + + steps: + - uses: actions/checkout@v2 + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install + + - name: Check licenses + run: | + npm run license-checker + + - name: Run tests + run: | + npm run test:ci + + - name: Coveralls Parallel + uses: coverallsapp/github-action@master + with: + github-token: ${{ secrets.github_token }} + parallel: true + + coverage: + needs: test + runs-on: ubuntu-latest + steps: + - name: Coveralls Finished + uses: coverallsapp/github-action@master + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + parallel-finished: true diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/CODE_OF_CONDUCT.md b/sdklab/meantimerecovery/aedes/node_modules/aedes/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..0d5ca1b9d --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/CODE_OF_CONDUCT.md @@ -0,0 +1,84 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience +* Focusing on what is best not just for us as individuals, but for the overall community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or + advances of any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email + address, without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at hello@matteocollina.com. All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series of actions. + +**Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, +available at https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity). + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at https://www.contributor-covenant.org/translations. diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/aedes/LICENSE new file mode 100644 index 000000000..0a28403e2 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) Aedes Contributors +Copyright (c) 2015-2020 Matteo Collina, http://matteocollina.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/README.md b/sdklab/meantimerecovery/aedes/node_modules/aedes/README.md new file mode 100644 index 000000000..541988dda --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/README.md @@ -0,0 +1,312 @@ + +# Aedes + +![ci](https://github.com/moscajs/aedes/workflows/ci/badge.svg) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](http://standardjs.com/) +[![Maintenance](https://img.shields.io/badge/Maintained%3F-yes-green.svg)](https://github.com/moscajs/aedes/graphs/commit-activity) +[![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg)](https://github.com/moscajs/aedes/pulls)\ +[![Total alerts](https://img.shields.io/lgtm/alerts/g/moscajs/aedes.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/moscajs/aedes/alerts/) +[![Language grade: JavaScript](https://img.shields.io/lgtm/grade/javascript/g/moscajs/aedes.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/moscajs/aedes/context:javascript) +[![Coverage Status](https://coveralls.io/repos/moscajs/aedes/badge.svg?branch=master&service=github)](https://coveralls.io/github/moscajs/aedes?branch=master) +[![Known Vulnerabilities](https://snyk.io/test/github/moscajs/aedes/badge.svg)](https://snyk.io/test/github/moscajs/aedes)\ +[![Dependencies Status](https://david-dm.org/moscajs/aedes/status.svg)](https://david-dm.org/moscajs/aedes) +[![devDependencies Status](https://david-dm.org/moscajs/aedes/dev-status.svg)](https://david-dm.org/moscajs/aedes?type=dev)\ +![node](https://img.shields.io/node/v/aedes) +[![NPM version](https://img.shields.io/npm/v/aedes.svg?style=flat)](https://www.npmjs.com/aedes) +[![NPM downloads](https://img.shields.io/npm/dm/aedes.svg?style=flat)](https://www.npmjs.com/aedes) + +[![opencollective](https://opencollective.com/aedes/donate/button.png)](https://opencollective.com/aedes/donate) + +Barebone MQTT server that can run on any stream servers + +- [Aedes](#aedes) + - [Install](#install) + - [Docker](#docker) + - [API](#api) + - [Features](#features) + - [Examples](#examples) + - [Clusters](#clusters) + - [Exensions](#exensions) + - [Middleware Plugins](#middleware-plugins) + - [Persistence](#persistence) + - [MQEmitter](#mqemitter) + - [Acknowledgements](#acknowledgements) + - [Mosca vs Aedes](#mosca-vs-aedes) + - [Benchmark: Aedes](#benchmark-aedes) + - [In memory - No clusters](#in-memory---no-clusters) + - [Redis Persistence and Redis Emitter - With Clusters](#redis-persistence-and-redis-emitter---with-clusters) + - [Mongo Persistence and Redis Emitter - With Clusters](#mongo-persistence-and-redis-emitter---with-clusters) + - [Redis Persistence and Mongodb Emitter - With Clusters](#redis-persistence-and-mongodb-emitter---with-clusters) + - [Benchmark: Mosca](#benchmark-mosca) + - [Made with Aedes](#made-with-aedes) + - [Collaborators](#collaborators) + - [Contribution](#contribution) + - [Support](#support) + - [Backers](#backers) + - [Sponsors](#sponsors) + - [License](#license) + +## Install + +To install aedes, simply use npm: + +```sh +npm install aedes +``` + +## Docker + +Check Docker docs [here](https://github.com/moscajs/aedes-cli#docker) + +## API + +- [Aedes object](./docs/Aedes.md) +- [Client object](./docs/Client.md) + +## Features + +- Full compatible with [MQTT 3.1 and 3.1.1][ISO20922] +- Standard TCP Support +- SSL / TLS +- WebSocket Support +- Message Persistence +- Automatic Reconnect +- Offline Buffering +- Backpress-support API +- High Availability +- Clusterable +- Authentication and Authorization +- `$SYS` support +- Pluggable middlewares +- [Dynamic Topics][dynamic_topics] Support +- MQTT Bridge Support between aedes +- [MQTT 5.0][mqttv5] _(not support yet)_ +- [Bridge Protocol][bridge_protocol] _(not support yet)_ + +## Examples + +- [Examples](./docs/Examples.md) + +## Clusters + +Aedes needs on disk dbs like MongoDB and Redis in order to work with clusters. Based on our tests and users reports the best performances/stability are reached when using [aedes-persistence-mongodb] paired with [mqemitter-redis]. + +Other info: + +- The repo [aedes-tests](https://github.com/moscajs/aedes-tests) is used to test aedes with clusters and different emitters/persistences. Check its source code to have a starting point on how to work with clusters + +## Exensions + +- [aedes-logging]: Logging module for Aedes, based on Pino +- [aedes-stats]: Stats for Aedes +- [aedes-cli]: Run Aedes MQTT Broker from the CLI +- [aedes-protocol-decoder]: Protocol decoder for Aedes MQTT Broker + +## Middleware Plugins + +### Persistence + +- [aedes-persistence]: In-memory implementation of an Aedes persistence +- [aedes-persistence-mongodb]: MongoDB persistence for Aedes +- [aedes-persistence-redis]: Redis persistence for Aedes +- [aedes-persistence-level]: LevelDB persistence for Aedes +- [aedes-persistence-nedb]: NeDB persistence for Aedes + +### MQEmitter + +- [mqemitter]: An opinionated memory Message Queue with an emitter-style API +- [mqemitter-redis]: Redis-powered mqemitter +- [mqemitter-mongodb]: Mongodb based mqemitter +- [mqemitter-child-process]: Share the same mqemitter between a hierarchy of + child processes +- [mqemitter-cs]: Expose a MQEmitter via a simple client/server protocol +- [mqemitter-p2p]: A P2P implementation of MQEmitter, based on HyperEmitter and + a Merkle DAG +- [mqemitter-aerospike]: Aerospike mqemitter + +## Acknowledgements + +This library is born after a lot of discussion with all +[Mosca](http://www.npmjs.com/mosca) users and how that was deployed in +production. This addresses your concerns about performance and stability. + +## Mosca vs Aedes + +Example benchmark test with 1000 clients sending 5000 QoS 1 messsages. Used +[mqtt-benchmark] with command: + +```sh +mqtt-benchmark --broker tcp://localhost:1883 --clients 1000 --qos 1 --count 5000 +``` + +CPU INFO: + +```sh +Architecture: x86_64 +CPU op-mode(s): 32-bit, 64-bit +Byte Order: Little Endian +CPU(s): 8 +On-line CPU(s) list: 0-7 +Thread(s) per core: 2 +Core(s) per socket: 4 +Socket(s): 1 +NUMA node(s): 1 +Vendor ID: GenuineIntel +CPU family: 6 +Model: 94 +Model name: Intel(R) Core(TM) i7-6700HQ CPU @ 2.60GHz +Stepping: 3 +CPU MHz: 800.014 +CPU max MHz: 3500,0000 +CPU min MHz: 800,0000 +BogoMIPS: 5199.98 +Virtualization: VT-x +L1d cache: 32K +L1i cache: 32K +L2 cache: 256K +L3 cache: 6144K +``` + +### Benchmark: Aedes + +#### In memory - No clusters + +```sh +========= TOTAL (1000) ========= +Total Ratio: 1.000 (5000000/5000000) +Total Runtime (sec): 178.495 +Average Runtime (sec): 177.845 +Msg time min (ms): 0.077 +Msg time max (ms): 199.805 +Msg time mean mean (ms): 35.403 +Msg time mean std (ms): 0.042 +Average Bandwidth (msg/sec): 28.115 +Total Bandwidth (msg/sec): 28114.678 +``` + +#### Redis Persistence and Redis Emitter - With Clusters + +```sh +========= TOTAL (1000) ========= +Total Ratio: 1.000 (5000000/5000000) +Total Runtime (sec): 114.404 +Average Runtime (sec): 109.022 +Msg time min (ms): 0.065 +Msg time max (ms): 393.214 +Msg time mean mean (ms): 21.520 +Msg time mean std (ms): 0.595 +Average Bandwidth (msg/sec): 45.896 +Total Bandwidth (msg/sec): 45896.306 +``` + +#### Mongo Persistence and Redis Emitter - With Clusters + +```sh +========= TOTAL (1000) ========= +Total Ratio: 1.000 (5000000/5000000) +Total Runtime (sec): 112.769 +Average Runtime (sec): 105.524 +Msg time min (ms): 0.062 +Msg time max (ms): 329.062 +Msg time mean mean (ms): 20.750 +Msg time mean std (ms): 0.878 +Average Bandwidth (msg/sec): 47.464 +Total Bandwidth (msg/sec): 47464.271 +``` + +### Redis Persistence and Mongodb Emitter - With Clusters + +```sh +========= TOTAL (1000) ========= +Total Ratio: 1.000 (5000000/5000000) +Total Runtime (sec): 118.587 +Average Runtime (sec): 114.190 +Msg time min (ms): 0.080 +Msg time max (ms): 324.028 +Msg time mean mean (ms): 22.558 +Msg time mean std (ms): 0.730 +Average Bandwidth (msg/sec): 43.832 +Total Bandwidth (msg/sec): 43831.927 +``` + +### Benchmark: [Mosca](http://www.npmjs.com/mosca) + +```sh +========= TOTAL (1000) ========= +Total Ratio: 1.000 (5000000/5000000) +Total Runtime (sec): 264.934 +Average Runtime (sec): 264.190 +Msg time min (ms): 0.070 +Msg time max (ms): 168.116 +Msg time mean mean (ms): 52.629 +Msg time mean std (ms): 0.074 +Average Bandwidth (msg/sec): 18.926 +Total Bandwidth (msg/sec): 18925.942 +``` + +## Made with Aedes + +Here is a list of some interesting projects that are using Aedes as MQTT Broker. Submit a PR or an issue if you would like to add yours + +- [node-red-contrib-aedes](https://github.com/martin-doyle/node-red-contrib-aedes): MQTT broker for Node-Red based on Aedes +- [Mqtt2Mqtt](https://github.com/robertsLando/Mqtt2Mqtt): Mqtt Bridge between two brokers with UI +- [Kuzzle](https://github.com/kuzzleio/kuzzle): High performance and full featured IoT backend using MQTT alongside WebSocket and Http protocols + +## Collaborators + +- [__Gavin D'mello__](https://github.com/GavinDmello) +- [__Behrad Zari__](https://github.com/behrad) +- [__Gnought__](https://github.com/gnought) +- [__Daniel Lando__](https://github.com/robertsLando) + +## Contribution + +[![Help wanted](https://img.shields.io/github/labels/moscajs/aedes/help%20wanted)](https://github.com/moscajs/aedes/labels/help%20wanted) +[![Contributors](https://img.shields.io/github/contributors/moscajs/aedes)](https://github.com/moscajs/aedes/graphs/contributors) + +Want to contribute? Check our list of +[features/bugs](https://github.com/moscajs/aedes/projects/1) + +## Support + +If there are bugs/leaks in production scenarios, we encourage people to send Pull Request and/or reach out maintainers for some paid support. + +### Backers + +Thank you to all our backers! :raised_hands: + +[![Backers](https://opencollective.com/aedes/backers.svg?avatarHeight=64&width=890&button=false)](https://opencollective.com/aedes#backers) + +### Sponsors + +Become a sponsor to get your logo on our README on Github + +[![Sponsor](https://opencollective.com/aedes/sponsors.svg)](https://opencollective.com/aedes#sponsor) + +## License + +Licensed under [MIT](./LICENSE). + +[ISO20922]: https://docs.oasis-open.org/mqtt/mqtt/v3.1.1/mqtt-v3.1.1.html +[mqttv5]: https://docs.oasis-open.org/mqtt/mqtt/v5.0/mqtt-v5.0.html +[bridge_protocol]: https://github.com/mqtt/mqtt.github.io/wiki/bridge_protocol +[dynamic_topics]: https://github.com/mqtt/mqtt.github.io/wiki/are_topics_dynamic +[mqtt-benchmark]: https://github.com/krylovsk/mqtt-benchmark + +[aedes-logging]: https://www.npmjs.com/aedes-logging +[aedes-stats]: https://www.npmjs.com/aedes-stats +[aedes-cli]: https://www.npmjs.com/aedes-cli +[aedes-protocol-decoder]: https://www.npmjs.com/aedes-protocol-decoder +[aedes-persistence]: https://www.npmjs.com/aedes-persistence +[aedes-persistence-mongodb]: https://www.npmjs.com/aedes-persistence-mongodb +[aedes-persistence-redis]: https://www.npmjs.com/aedes-persistence-redis +[aedes-persistence-level]: https://www.npmjs.com/aedes-persistence-level +[aedes-persistence-nedb]: https://www.npmjs.com/aedes-persistence-nedb + +[mqemitter]: https://www.npmjs.com/mqemitter +[mqemitter-redis]: https://www.npmjs.com/mqemitter-redis +[mqemitter-mongodb]: https://www.npmjs.com/mqemitter-mongodb +[mqemitter-child-process]: https://www.npmjs.com/mqemitter-child-process +[mqemitter-cs]: https://www.npmjs.com/mqemitter-cs +[mqemitter-p2p]: https://www.npmjs.com/mqemitter-p2p +[mqemitter-aerospike]: https://www.npmjs.com/mqemitter-aerospike diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/aedes.d.ts b/sdklab/meantimerecovery/aedes/node_modules/aedes/aedes.d.ts new file mode 100644 index 000000000..3f06ac95f --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/aedes.d.ts @@ -0,0 +1,135 @@ +/* eslint no-unused-vars: 0 */ +/* eslint no-undef: 0 */ +/* eslint space-infix-ops: 0 */ + +/// + +import { IConnackPacket, IPingreqPacket, IPublishPacket, IPubrelPacket, ISubscribePacket, ISubscription, IUnsubscribePacket } from 'mqtt-packet' +import { AedesPacket } from 'aedes-packet' +import { Duplex } from 'stream' +import { Socket } from 'net' +import { IncomingMessage } from 'http' +import EventEmitter = NodeJS.EventEmitter + +declare function aedes (options?: aedes.AedesOptions): aedes.Aedes + +// eslint-disable-next-line no-redeclare +declare namespace aedes { + enum AuthErrorCode { + UNNACCEPTABLE_PROTOCOL = 1, + IDENTIFIER_REJECTED = 2, + SERVER_UNAVAILABLE = 3, + BAD_USERNAME_OR_PASSWORD = 4, + NOT_AUTHORIZED = 5 + } + + type Connection = Duplex | Socket + + type Subscription = ISubscription + type Subscriptions = { subscriptions: Subscription[] } + type SubscribePacket = ISubscribePacket & { cmd: 'subscribe' } + type UnsubscribePacket = IUnsubscribePacket & { cmd: 'unsubscribe' } + + type PublishPacket = IPublishPacket & { cmd: 'publish' } + type AedesPublishPacket = PublishPacket & AedesPacket + + type ConnackPacket = IConnackPacket & { cmd: 'connack' } + type PubrelPacket = IPubrelPacket & { cmd: 'pubrel' } + type PingreqPacket = IPingreqPacket & { cmd: 'pingreq' } + + type PreConnectHandler = (client: Client, callback: (error: Error | null, success: boolean) => void) => void + + type AuthenticateError = Error & { returnCode: AuthErrorCode } + + type AuthenticateHandler = ( + client: Client, + username: string, + password: Buffer, + done: (error: AuthenticateError | null, success: boolean | null) => void + ) => void + + type AuthorizePublishHandler = (client: Client, packet: PublishPacket, callback: (error?: Error | null) => void) => void + + type AuthorizeSubscribeHandler = (client: Client, subscription: Subscription, callback: (error: Error | null, subscription?: Subscription | null) => void) => void + + type AuthorizeForwardHandler = (client: Client, packet: AedesPublishPacket) => AedesPublishPacket | null | void + + type PublishedHandler = (packet: AedesPublishPacket, client: Client, callback: (error?: Error | null) => void) => void + + interface AedesOptions { + mq?: any + id?: string + persistence?: any + concurrency?: number + heartbeatInterval?: number + connectTimeout?: number + preConnect?: PreConnectHandler + authenticate?: AuthenticateHandler + authorizePublish?: AuthorizePublishHandler + authorizeSubscribe?: AuthorizeSubscribeHandler + authorizeForward?: AuthorizeForwardHandler + published?: PublishedHandler + queueLimit?: number + maxClientsIdLength?: number + } + interface Client extends EventEmitter { + id: string + clean: boolean + version: number + conn: Connection + req?: IncomingMessage + connecting: boolean + connected: boolean + closed: boolean + + on (event: 'connected', listener: () => void): this + on (event: 'error', listener: (error: Error) => void): this + + publish (message: PublishPacket, callback?: (error?: Error) => void): void + subscribe ( + subscriptions: Subscriptions | Subscription | Subscription[] | SubscribePacket, + callback?: (error?: Error) => void + ): void + unsubscribe (topicObjects: Subscriptions | Subscription | Subscription[] | UnsubscribePacket, callback?: (error?: Error) => void): void + close (callback?: () => void): void + emptyOutgoingQueue (callback?: () => void): void + } + + interface Aedes extends EventEmitter { + id: string + connectedClients: number + closed: boolean + + handle: (stream: Connection) => Client + + on (event: 'closed', listener: () => void): this + on (event: 'client' | 'clientReady' | 'clientDisconnect' | 'keepaliveTimeout', listener: (client: Client) => void): this + on (event: 'clientError' | 'connectionError', listener: (client: Client, error: Error) => void): this + on (event: 'connackSent', listener: (packet: ConnackPacket, client: Client) => void): this + on (event: 'ping', listener: (packet: PingreqPacket, client: Client) => void): this + on (event: 'publish', listener: (packet: AedesPublishPacket, client: Client) => void): this + on (event: 'ack', listener: (packet: PublishPacket | PubrelPacket, client: Client) => void): this + on (event: 'subscribe', listener: (subscriptions: Subscription[], client: Client) => void): this + on (event: 'unsubscribe', listener: (unsubscriptions: string[], client: Client) => void): this + + publish ( + packet: PublishPacket, + callback: (error?: Error) => void + ): void + subscribe ( + topic: string, + deliverfunc: (packet: AedesPublishPacket, callback: () => void) => void, + callback: () => void + ): void + unsubscribe ( + topic: string, + deliverfunc: (packet: AedesPublishPacket, callback: () => void) => void, + callback: () => void + ): void + close (callback?: () => void): void + } + + function Server (options?: aedes.AedesOptions): aedes.Aedes +} + +export = aedes diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/aedes.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/aedes.js new file mode 100644 index 000000000..cf1caed65 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/aedes.js @@ -0,0 +1,351 @@ +'use strict' + +const EventEmitter = require('events') +const util = require('util') +const parallel = require('fastparallel') +const series = require('fastseries') +const { v4: uuidv4 } = require('uuid') +const bulk = require('bulk-write-stream') +const reusify = require('reusify') +const { pipeline } = require('readable-stream') +const Packet = require('aedes-packet') +const memory = require('aedes-persistence') +const mqemitter = require('mqemitter') +const Client = require('./lib/client') + +module.exports = Aedes.Server = Aedes + +const defaultOptions = { + concurrency: 100, + heartbeatInterval: 60000, // 1 minute + connectTimeout: 30000, // 30 secs + decodeProtocol: null, + preConnect: defaultPreConnect, + authenticate: defaultAuthenticate, + authorizePublish: defaultAuthorizePublish, + authorizeSubscribe: defaultAuthorizeSubscribe, + authorizeForward: defaultAuthorizeForward, + published: defaultPublished, + trustProxy: false, + trustedProxies: [], + queueLimit: 42, + maxClientsIdLength: 23 +} + +function Aedes (opts) { + const that = this + + if (!(this instanceof Aedes)) { + return new Aedes(opts) + } + + opts = Object.assign({}, defaultOptions, opts) + + this.id = opts.id || uuidv4() + // +1 when construct a new aedes-packet + // internal track for last brokerCounter + this.counter = 0 + this.queueLimit = opts.queueLimit + this.connectTimeout = opts.connectTimeout + this.maxClientsIdLength = opts.maxClientsIdLength + this.mq = opts.mq || mqemitter({ + concurrency: opts.concurrency, + matchEmptyLevels: true // [MQTT-4.7.1-3] + }) + this.handle = function handle (conn, req) { + conn.setMaxListeners(opts.concurrency * 2) + // create a new Client instance for a new connection + // return, just to please standard + return new Client(that, conn, req) + } + this.persistence = opts.persistence || memory() + this.persistence.broker = this + this._parallel = parallel() + this._series = series() + this._enqueuers = reusify(DoEnqueues) + + this.preConnect = opts.preConnect + this.authenticate = opts.authenticate + this.authorizePublish = opts.authorizePublish + this.authorizeSubscribe = opts.authorizeSubscribe + this.authorizeForward = opts.authorizeForward + this.published = opts.published + + this.decodeProtocol = opts.decodeProtocol + this.trustProxy = opts.trustProxy + this.trustedProxies = opts.trustedProxies + + this.clients = {} + this.brokers = {} + + const heartbeatTopic = '$SYS/' + that.id + '/heartbeat' + this._heartbeatInterval = setInterval(heartbeat, opts.heartbeatInterval) + + const bufId = Buffer.from(that.id, 'utf8') + + function heartbeat () { + that.publish({ + topic: heartbeatTopic, + payload: bufId + }, noop) + } + + function deleteOldBrokers (broker) { + if (that.brokers[broker] + (3 * opts.heartbeatInterval) < Date.now()) { + delete that.brokers[broker] + } + } + + this._clearWillInterval = setInterval(function () { + Object.keys(that.brokers).forEach(deleteOldBrokers) + + pipeline( + that.persistence.streamWill(that.brokers), + bulk.obj(receiveWills), + function done (err) { + if (err) { + that.emit('error', err) + } + } + ) + }, opts.heartbeatInterval * 4) + + function receiveWills (chunks, done) { + that._parallel(that, checkAndPublish, chunks, done) + } + + function checkAndPublish (will, done) { + const needsPublishing = + !that.brokers[will.brokerId] || + that.brokers[will.brokerId] + (3 * opts.heartbeatInterval) < + Date.now() + + if (needsPublishing) { + // randomize this, so that multiple brokers + // do not publish the same wills at the same time + that.publish(will, function publishWill (err) { + if (err) { + return done(err) + } + + that.persistence.delWill({ + id: will.clientId, + brokerId: will.brokerId + }, done) + }) + } else { + done() + } + } + + this.mq.on('$SYS/+/heartbeat', function storeBroker (packet, done) { + that.brokers[packet.payload.toString()] = Date.now() + done() + }) + + this.mq.on('$SYS/+/new/clients', function closeSameClients (packet, done) { + const serverId = packet.topic.split('/')[1] + const clientId = packet.payload.toString() + + if (that.clients[clientId] && serverId !== that.id) { + that.clients[clientId].close(done) + } else { + done() + } + }) + + // metadata + this.connectedClients = 0 + this.closed = false +} + +util.inherits(Aedes, EventEmitter) + +function storeRetained (packet, done) { + if (packet.retain) { + this.broker.persistence.storeRetained(packet, done) + } else { + done() + } +} + +function emitPacket (packet, done) { + packet.retain = false + this.broker.mq.emit(packet, done) +} + +function enqueueOffline (packet, done) { + var enqueuer = this.broker._enqueuers.get() + + enqueuer.complete = done + enqueuer.packet = packet + enqueuer.topic = packet.topic + enqueuer.broker = this.broker + this.broker.persistence.subscriptionsByTopic( + packet.topic, + enqueuer.done + ) +} + +function DoEnqueues () { + this.next = null + this.complete = null + this.packet = null + this.topic = null + this.broker = null + + const that = this + + this.done = function doneEnqueue (err, subs) { + const broker = that.broker + + if (err) { + // is this really recoverable? + // let's just error the whole aedes + // https://nodejs.org/api/events.html#events_error_events + broker.emit('error', err) + return + } + + if (that.topic.indexOf('$SYS') === 0) { + subs = subs.filter(removeSharp) + } + + const packet = that.packet + const complete = that.complete + + that.packet = null + that.complete = null + that.topic = null + + broker.persistence.outgoingEnqueueCombi(subs, packet, complete) + broker._enqueuers.release(that) + } +} + +// + is 43 +// # is 35 +function removeSharp (sub) { + const code = sub.topic.charCodeAt(0) + return code !== 43 && code !== 35 +} + +function callPublished (_, done) { + this.broker.published(this.packet, this.client, done) + this.broker.emit('publish', this.packet, this.client) +} + +const publishFuncsSimple = [ + storeRetained, + emitPacket, + callPublished +] +const publishFuncsQoS = [ + storeRetained, + enqueueOffline, + emitPacket, + callPublished +] +Aedes.prototype.publish = function (packet, client, done) { + if (typeof client === 'function') { + done = client + client = null + } + var p = new Packet(packet, this) + const publishFuncs = p.qos > 0 ? publishFuncsQoS : publishFuncsSimple + + this._series(new PublishState(this, client, packet), publishFuncs, p, done) +} + +Aedes.prototype.subscribe = function (topic, func, done) { + this.mq.on(topic, func, done) +} + +Aedes.prototype.unsubscribe = function (topic, func, done) { + this.mq.removeListener(topic, func, done) +} + +Aedes.prototype.registerClient = function (client) { + const that = this + if (this.clients[client.id]) { + // [MQTT-3.1.4-2] + this.clients[client.id].close(function closeClient () { + that._finishRegisterClient(client) + }) + } else { + this._finishRegisterClient(client) + } +} + +Aedes.prototype._finishRegisterClient = function (client) { + this.connectedClients++ + this.clients[client.id] = client + this.emit('client', client) + this.publish({ + topic: '$SYS/' + this.id + '/new/clients', + payload: Buffer.from(client.id, 'utf8') + }, noop) +} + +Aedes.prototype.unregisterClient = function (client) { + this.connectedClients-- + delete this.clients[client.id] + this.emit('clientDisconnect', client) + this.publish({ + topic: '$SYS/' + this.id + '/disconnect/clients', + payload: Buffer.from(client.id, 'utf8') + }, noop) +} + +function closeClient (client, cb) { + this.clients[client].close(cb) +} + +Aedes.prototype.close = function (cb = noop) { + const that = this + if (this.closed) { + return cb() + } + this.closed = true + clearInterval(this._heartbeatInterval) + clearInterval(this._clearWillInterval) + this._parallel(this, closeClient, Object.keys(this.clients), doneClose) + function doneClose () { + that.emit('closed') + that.mq.close(cb) + } +} + +Aedes.prototype.version = require('./package.json').version + +function defaultPreConnect (client, callback) { + callback(null, true) +} + +function defaultAuthenticate (client, username, password, callback) { + callback(null, true) +} + +function defaultAuthorizePublish (client, packet, callback) { + callback(null) +} + +function defaultAuthorizeSubscribe (client, sub, callback) { + callback(null, sub) +} + +function defaultAuthorizeForward (client, packet) { + return packet +} + +function defaultPublished (packet, client, callback) { + callback(null) +} + +function PublishState (broker, client, packet) { + this.broker = broker + this.client = client + this.packet = packet +} + +function noop () {} diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/bombing.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/bombing.js new file mode 100644 index 000000000..e8d3e0a47 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/bombing.js @@ -0,0 +1,34 @@ +#! /usr/bin/env node + +const mqtt = require('mqtt') +const client = mqtt.connect({ port: 1883, host: 'localhost', clean: true, keepalive: 0 }) +const interval = 5000 + +var sent = 0 + +function count () { + console.log('sent/s', sent / interval * 1000) + sent = 0 +} + +setInterval(count, interval) + +function immediatePublish () { + setImmediate(publish) +} + +function publish () { + sent++ + client.publish('test', 'payload', immediatePublish) +} + +client.on('connect', publish) + +client.on('offline', function () { + console.log('offline') +}) + +client.on('error', function () { + console.log('reconnect!') + client.stream.end() +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/bombingQoS1.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/bombingQoS1.js new file mode 100644 index 000000000..fb5a2570d --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/bombingQoS1.js @@ -0,0 +1,36 @@ +#! /usr/bin/env node + +const mqtt = require('mqtt') +const client = mqtt.connect({ port: 1883, host: 'localhost', clean: true, keepalive: 0 }) +const interval = 5000 + +var sent = 0 + +function count () { + console.log('sent/s', sent / interval * 1000) + sent = 0 +} + +setInterval(count, interval) + +function publish () { + sent++ + client.publish('test', 'payload', { qos: 1 }, publish) +} + +client.setMaxListeners(100) + +client.on('connect', function () { + for (var i = 0; i < 50; i++) { + publish() + } +}) + +client.on('offline', function () { + console.log('offline') +}) + +client.on('error', function () { + console.log('reconnect!') + client.stream.end() +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/pingpong.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/pingpong.js new file mode 100644 index 000000000..b22a49b77 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/pingpong.js @@ -0,0 +1,53 @@ +#! /usr/bin/env node + +const mqtt = require('mqtt') +const convertHrtime = require('convert-hrtime') +const mode = require('compute-mode') +const client = mqtt.connect({ port: 1883, host: 'localhost', clean: true, keepalive: 0 }) +const interval = 5000 + +var sent = 0 +var latencies = [] + +function count () { + console.log('sent/s', sent / interval * 1000) + sent = 0 +} + +setInterval(count, interval) + +function publish () { + sent++ + client.publish('test', JSON.stringify(process.hrtime()), { qos: 1 }) +} + +function subscribe () { + client.subscribe('test', { qos: 1 }, publish) +} + +client.on('connect', subscribe) +client.on('message', publish) +client.on('message', function (topic, payload) { + var sentAt = JSON.parse(payload) + var diff = process.hrtime(sentAt) + latencies.push(convertHrtime(diff).ms) +}) + +client.on('offline', function () { + console.log('offline') +}) + +client.on('error', function () { + console.log('reconnect!') + client.stream.end() +}) + +process.on('SIGINT', function () { + var total = latencies.reduce(function (acc, num) { + return acc + num + }) + console.log('total', total) + console.log('average', total / latencies.length) + console.log('mode', mode(latencies)) + process.exit(0) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/server.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/server.js new file mode 100644 index 000000000..49b69e757 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/server.js @@ -0,0 +1,25 @@ +'use strict' + +// To be used with cpuprofilify http://npm.im/cpuprofilify + +const aedes = require('../')() +const server = require('net').createServer(aedes.handle) +const port = 1883 + +server.listen(port, function () { + console.error('server listening on port', port, 'pid', process.pid) +}) + +aedes.on('clientError', function (client, err) { + console.error('client error', client.id, err.message) +}) + +// Cleanly shut down process on SIGTERM to ensure that perf-.map gets flushed +process.on('SIGINT', onSIGINT) + +function onSIGINT () { + // IMPORTANT to log on stderr, to not clutter stdout which is purely for data, i.e. dtrace stacks + console.error('Caught SIGTERM, shutting down.') + server.close() + process.exit(0) +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/throughputCounter.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/throughputCounter.js new file mode 100644 index 000000000..bd977251d --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/throughputCounter.js @@ -0,0 +1,23 @@ +#! /usr/bin/env node + +const mqtt = require('mqtt') + +const client = mqtt.connect({ port: 1883, host: 'localhost', clean: true, encoding: 'binary', keepalive: 0 }) +const interval = 5000 + +var counter = 0 + +function count () { + console.log('received/s', counter / interval * 1000) + counter = 0 +} + +setInterval(count, interval) + +client.on('connect', function () { + count() + this.subscribe('test') + this.on('message', function () { + counter++ + }) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/throughputCounterQoS1.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/throughputCounterQoS1.js new file mode 100644 index 000000000..ddb2c84bb --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/benchmarks/throughputCounterQoS1.js @@ -0,0 +1,33 @@ +#! /usr/bin/env node + +const mqtt = require('mqtt') + +const client = mqtt.connect({ port: 1883, host: 'localhost', clean: true, encoding: 'binary', keepalive: 0 }) +const interval = 5000 + +var counter = 0 + +function count () { + console.log('received/s', counter / interval * 1000) + counter = 0 +} + +setInterval(count, interval) + +client.on('connect', function () { + this.subscribe('test', { qos: 1 }) +}) + +client.handleMessage = function (packet, done) { + counter++ + done() +} + +client.on('offline', function () { + console.log('offline') +}) + +client.on('error', function () { + console.log('reconnect!') + client.stream.end() +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/docs/Aedes.md b/sdklab/meantimerecovery/aedes/node_modules/aedes/docs/Aedes.md new file mode 100644 index 000000000..832ef37b6 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/docs/Aedes.md @@ -0,0 +1,419 @@ + +# Aedes + +- [Aedes](#aedes) + - [new Aedes([options]) / new Aedes.Server([options])](#new-aedesoptions--new-aedesserveroptions) + - [aedes.id](#aedesid) + - [aedes.connectedClients](#aedesconnectedclients) + - [aedes.closed](#aedesclosed) + - [Event: client](#event-client) + - [Event: clientReady](#event-clientready) + - [Event: clientDisconnect](#event-clientdisconnect) + - [Event: clientError](#event-clienterror) + - [Event: connectionError](#event-connectionerror) + - [Event: keepaliveTimeout](#event-keepalivetimeout) + - [Event: publish](#event-publish) + - [Event: ack](#event-ack) + - [Event: ping](#event-ping) + - [Event: subscribe](#event-subscribe) + - [Event: unsubscribe](#event-unsubscribe) + - [Event: connackSent](#event-connacksent) + - [Event: closed](#event-closed) + - [aedes.handle (stream)](#aedeshandle-stream) + - [aedes.subscribe (topic, deliverfunc, callback)](#aedessubscribe-topic-deliverfunc-callback) + - [aedes.unsubscribe (topic, deliverfunc, callback)](#aedesunsubscribe-topic-deliverfunc-callback) + - [aedes.publish (packet, callback)](#aedespublish-packet-callback) + - [aedes.close ([callback])](#aedesclose-callback) + - [Handler: decodeProtocol (client, buffer)](#handler-decodeprotocol-client-buffer) + - [Handler: preConnect (client, callback)](#handler-preconnect-client-callback) + - [Handler: authenticate (client, username, password, callback)](#handler-authenticate-client-username-password-callback) + - [Handler: authorizePublish (client, packet, callback)](#handler-authorizepublish-client-packet-callback) + - [Handler: authorizeSubscribe (client, subscription, callback)](#handler-authorizesubscribe-client-subscription-callback) + - [Handler: authorizeForward (client, packet)](#handler-authorizeforward-client-packet) + - [Handler: published (packet, client, callback)](#handler-published-packet-client-callback) + +## new Aedes([options]) / new Aedes.Server([options]) + +- options `` + - `mq` [``](../README.md#mqemitter) middleware used to deliver messages to subscribed clients. In a cluster environment it is used also to share messages between brokers instances. __Default__: `mqemitter` + - `concurrency` `` maximum number of concurrent messages delivered by `mq`. __Default__: `100` + - `persistence` [``](../README.md#persistence) middleware that stores _QoS > 0, retained, will_ packets and _subscriptions_. __Default__: `aedes-persistence` (_in memory_) + - `queueLimit` `` maximum number of queued messages before client session is established. If number of queued items exceeds, `connectionError` throws an error `Client queue limit reached`. __Default__: `42` + - `maxClientsIdLength` option to override MQTT 3.1.0 clients Id length limit. __Default__: `23` + - `heartbeatInterval` `` an interval in millisconds at which server beats its health signal in `$SYS//heartbeat` topic. __Default__: `60000` + - `id` `` aedes broker unique identifier. __Default__: `uuidv4()` + - `connectTimeout` `` maximum waiting time in milliseconds waiting for a [`CONNECT`][CONNECT] packet. __Default__: `30000` +- Returns `` + +Create a new Aedes server. + +Aedes is the class and function exposed by this module. It can be created by `Aedes()` or using `new Aedes()`. An variant `aedes.Server` is for TypeScript or ES modules. + +## aedes.id + +- `` __Default__: `uuidv4()` + +Server unique identifier. + +## aedes.connectedClients + +- `` __Default__: 0 + +Number of connected clients in server. + +## aedes.closed + +- `` __Default__: false + +a read-only flag indicates if server is closed or not. + +## Event: client + +- `client` [``](./Client.md) + +Emitted when the `client` registers itself to server. The `client` is not ready yet. Its [`connecting`](./Client.md##clientconnecting) state equals to `true`. + +Server publishes a SYS topic `$SYS//new/clients` to inform it registers the client into its registration pool. `client.id` is the payload. + +## Event: clientReady + +- `client` [``](./Client.md) + +Emitted when the `client` has received all its offline messages and be initialized. The `client` [`connected`](./Client.md##clientconnected) state equals to `true` and is ready for processing incoming messages. + +## Event: clientDisconnect + +- `client` [``](./Client.md) + +Emitted when a client disconnects. + +Server publishes a SYS topic `$SYS//disconnect/clients` to inform it deregisters the client. `client.id` is the payload. + +## Event: clientError + +- `client` [``](./Client.md) +- `error` `` + +Emitted when an error occurs. + +## Event: connectionError + +- `client` [``](./Client.md) +- `error` `` + +Emitted when an error occurs. Unlike `clientError` it raises only when `client` is uninitialized. + +## Event: keepaliveTimeout + +- `client` [``](./Client.md) + +Emitted when timeout happes in the `client` keepalive. + +## Event: publish + +- `packet` `` & [`PUBLISH`][PUBLISH] +- `client` [``](./Client.md) | `null` + +Emitted when servers delivers the `packet` to subscribed `client`. If there are no clients subscribed to the `packet` topic, server still publish the `packet` and emit thie event. `client` is `null` when `packet` is an internal message like aedes heartbeat message and LWT. + +> _Note! `packet` belongs `aedes-packet` type. Some properties belongs to aedes internal, any changes on them will break aedes internal flow._ + +## Event: ack + +- `packet` `` [`PUBLISH`][PUBLISH] for QoS 1, [`PUBREL`][PUBREL] for QoS 2 +- `client` [``](./Client.md) + +Emitted an QoS 1 or 2 acknowledgement when the `packet` successfully delivered to the `client`. + +## Event: ping + +- `packet` `` [`PINGREQ`][PINGREQ] +- `client` [``](./Client.md) + +Emitted when `client` sends a `PINGREQ`. + +## Event: subscribe + +- `subscriptions` `` +- `client` [``](./Client.md) + +Emitted when `client` successfully subscribe the `subscriptions` in server. + +`subscriptions` is an array of `{ topic: topic, qos: qos }`. The array excludes duplicated topics and includes negated subscriptions where `qos` equals to `128`. See more on [authorizeSubscribe](#handler-authorizesubscribe-client-subscription-callback) + +Server publishes a SYS topic `$SYS//new/subscribers` to inform a client successfully subscribed to one or more topics. The payload is a JSON that has `clientId` and `subs` props, `subs` equals to `subscriptions` array. + +## Event: unsubscribe + +- `unsubscriptions` `Array` +- `client` [``](./Client.md) + +Emitted when `client` successfully unsubscribe the `subscriptions` in server. + +`unsubscriptions` are an array of unsubscribed topics. + +Server publishes a SYS topic `$SYS//new/unsubscribers` to inform a client successfully unsubscribed to one or more topics. The payload is a JSON that has `clientId` and `subs` props, `subs` equals to `unsubscriptions` array. + +## Event: connackSent + +- `packet` `` [`CONNACK`][CONNACK] +- `client` [``](./Client.md) + +Emitted when server sends an acknowledge to `client`. Please refer to the MQTT specification for the explanation of returnCode object property in `CONNACK`. + +## Event: closed + +Emitted when server is closed. + +## aedes.handle (stream) + +- stream: `` | `` +- Returns: [``](./Client.md) + +A connection listener that pipe stream to aedes. + +```js +const aedes = require('./aedes')() +const server = require('net').createServer(aedes.handle) +``` + +## aedes.subscribe (topic, deliverfunc, callback) + +- topic: `` +- deliverfunc: `` `(packet, cb) => void` + - packet: `` & [`PUBLISH`][PUBLISH] + - cb: `` +- callback: `` + +Directly subscribe a `topic` in server side. Bypass [`authorizeSubscribe`](#handler-authorizesubscribe-client-subscription-callback) + +The `topic` and `deliverfunc` is a compound key to differentiate the uniqueness of its subscription pool. `topic` could be the one that is existed, in this case `deliverfunc` will be invoked as well as `SUBSCRIBE` does. + +`deliverfunc` supports backpressue. + +In aedes internal, `deliverfunc` is a function that delivers messages to subscribed clients. + +> _Note! `packet` belongs `aedes-packet` type. Some properties belongs to aedes internal, any changes on them will break aedes internal flow._ + +In general you would find most properities in `packet` is same as what the incoming [`PUBLISH`][PUBLISH] is. For sure `cmd` property in `packet` structure in `deliverfunc` must be `publish`. + +> _Note! it requires `deliverfunc` to call `cb` before the function returns, otherwise some subscribed clients with same `topic` will not receive messages._ + +`callback` is invoked when server successfully registers the subscription. + +## aedes.unsubscribe (topic, deliverfunc, callback) + +Reverse of [aedes.subscribe](#aedessubscribe-topic-deliverfunc-callback). + +> _Note! the `deliverfunc` should be same as when `aedes.subscribe` does, otherwise the unsubscription will fail._ + +## aedes.publish (packet, callback) + +- `packet` `` [`PUBLISH`][PUBLISH] +- `callback` `` `(error) => void` + - error `` | `null` + +Directly deliver `packet` on behalf of server to subscribed clients. Bypass [`authorizePublish`](#handler-authorizepublish-client-packet-callback). + +`callback` will be invoked with `error` arugments after finish. + +## aedes.close ([callback]) + +- callback: `` + +Close aedes server and disconnects all clients. + +`callback` will be invoked when server is closed. + +## Handler: decodeProtocol (client, buffer) + +- client: [``](./Client.md) +- buffer: `` + +Invoked when aedes instance `trustProxy` is `true` + +It targets to decode wrapped protocols (e.g. websocket and PROXY) into plain raw mqtt stream. + +`aedes-protocol-decoder` is an example to parse https headers (x-real-ip | x-forwarded-for) and proxy protocol v1 and v2 to retrieve information in `client.connDetails`. + +```js +aedes.decodeProtocol = function(client, buffer) { + return yourDecoder(client, buffer) +} +``` + +## Handler: preConnect (client, callback) + +- client: [``](./Client.md) +- callback: `` `(error, successful) => void` + - error `` | `null` + - successful `` + +Invoked when server receives a valid [`CONNECT`][CONNECT] packet. + +`client` object is in default state. If invoked `callback` with no errors and `successful` be `true`, server will continue to establish a session. + +Any `error` will be raised in `connectionError` event. + +Some Use Cases: + +1. Rate Limit / Throttle by `client.conn.remoteAddress` +2. Check `aedes.connectedClient` to limit maximum connections +3. IP blacklisting + +```js +aedes.preConnect = function(client, callback) { + callback(null, client.conn.remoteAddress === '::1') { +} +``` + +```js +aedes.preConnect = function(client, callback) { + callback(new Error('connection error'), client.conn.remoteAddress !== '::1') { +} +``` + +## Handler: authenticate (client, username, password, callback) + +- client: [``](./Client.md) +- username: `` +- password: `` +- callback: `` `(error, successful) => void` + - error `` | `null` + - successful `` + +Invoked after `preConnect`. + +Server parses the [`CONNECT`][CONNECT] packet, initializes `client` object which set `client.id` to match the one in [`CONNECT`][CONNECT] packet and extract `username` and `password` as parameters for user-defined authentication flow. + +If invoked `callback` with no errors and `successful` be `true`, server authenticates `client` and continues to setup the client session. + +If authenticated, server acknowledges a [`CONNACK`][CONNACK] with `returnCode=0`, otherwise `returnCode=5`. Users could define the value between `2` and `5` by defining a `returnCode` property in `error` object. + +```js +aedes.authenticate = function (client, username, password, callback) { + callback(null, username === 'matteo') +} +``` + +```js +aedes.authenticate = function (client, username, password, callback) { + var error = new Error('Auth error') + error.returnCode = 4 + callback(error, null) +} +``` + +Please refer to [Connect Return Code](http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Table_3.1_-) to see their meanings. + +## Handler: authorizePublish (client, packet, callback) + +- client: [``](./Client.md) +- packet: `` [`PUBLISH`][PUBLISH] +- callback: `` `(error) => void` + - error `` | `null` + +Invoked when + +1. publish LWT to all online clients +2. incoming client publish + +If invoked `callback` with no errors, server authorizes the packet otherwise emits `clientError` with `error`. If an `error` occurs the client connection will be closed, but no error is returned to the client (MQTT-3.3.5-2) + +```js +aedes.authorizePublish = function (client, packet, callback) { + if (packet.topic === 'aaaa') { + return callback(new Error('wrong topic')) + } + if (packet.topic === 'bbb') { + packet.payload = Buffer.from('overwrite packet payload') + } + callback(null) +} +``` + +## Handler: authorizeSubscribe (client, subscription, callback) + +- client: [``](./Client.md) +- subscription: `` +- callback: `` `(error) => void` + - error `` | `null` + - subscription: `` | `null` + +Invoked when + +1. restore subscriptions in non-clean session. +2. incoming client `SUBSCRIBE` + +`subscrption` is a dictionary object like `{ topic: hello, qos: 0 }`. + +If invoked `callback` with no errors, server authorizes the packet otherwise emits `clientError` with `error`. + +In general user should not touch the `subscription` and pass to callback, but server gives an option to change the subscription on-the-fly. + +```js +aedes.authorizeSubscribe = function (client, sub, callback) { + if (sub.topic === 'aaaa') { + return callback(new Error('wrong topic')) + } + if (sub.topic === 'bbb') { + // overwrites subscription + sub.topic = 'foo' + sub.qos = 1 + } + callback(null, sub) +} +``` + +To negate a subscription, set the subscription to `null`. Aedes ignores the negated subscription and the `qos` in `SubAck` is set to `128` based on [MQTT 3.11 spec](https://docs.oasis-open.org/mqtt/mqtt/v3.1.1/mqtt-v3.1.1.html#_Toc385349323): + +```js +aedes.authorizeSubscribe = function (client, sub, callback) { + // prohibited to subscribe 'aaaa' and suppress error + callback(null, sub.topic === 'aaaa' ? null : sub) +} +``` + +## Handler: authorizeForward (client, packet) + +- client: [``](./Client.md) +- packet: `` & [`PUBLISH`][PUBLISH] +- Returns: `` | `null` + +Invoked when + +1. aedes sends retained messages when client reconnects +2. aedes pre-delivers subscribed message to clients + +Return `null` will not forward `packet` to clients. + +In general user should not touch the `packet` and return it what it is, but server gives an option to change the `packet` on-the-fly and forward it to clients. + +> _Note! `packet` belongs `aedes-packet` type. Some properties belongs to aedes internal, any changes on them will break aedes internal flow._ + +```js +aedes.authorizeForward = function (client, packet) { + if (packet.topic === 'aaaa' && client.id === "I should not see this") { + return + } + if (packet.topic === 'bbb') { + packet.payload = new Buffer('overwrite packet payload') + } + return packet +} +``` + +## Handler: published (packet, client, callback) + +- packet: `` & [`PUBLISH`][PUBLISH] +- client: [``](./Client.md) +- callback: `` + +same as [`Event: publish`](#event-publish), but provides a backpressure functionality. + +[CONNECT]: https://github.com/mqttjs/mqtt-packet#connect +[CONNACK]: https://github.com/mqttjs/mqtt-packet#connack +[SUBSCRIBE]: https://github.com/mqttjs/mqtt-packet#subscribe +[PINGREQ]: https://github.com/mqttjs/mqtt-packet#pingreq +[PUBLISH]: https://github.com/mqttjs/mqtt-packet#publish +[PUBREL]: https://github.com/mqttjs/mqtt-packet#pubrel diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/docs/Client.md b/sdklab/meantimerecovery/aedes/node_modules/aedes/docs/Client.md new file mode 100644 index 000000000..183969ac8 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/docs/Client.md @@ -0,0 +1,156 @@ + +# Client + +- [Client](#client) + - [new Client(aedes, stream, request)](#new-clientaedes-stream-request) + - [client.conn](#clientconn) + - [client.req](#clientreq) + - [client.connecting](#clientconnecting) + - [client.connected](#clientconnected) + - [client.closed](#clientclosed) + - [client.id](#clientid) + - [client.clean](#clientclean) + - [client.version](#clientversion) + - [Event: connected](#event-connected) + - [Event: error](#event-error) + - [client.publish (packet, [callback])](#clientpublish-packet-callback) + - [client.subscribe (subscriptions, [callback])](#clientsubscribe-subscriptions-callback) + - [client.unsubscribe (unsubscriptions, [callback])](#clientunsubscribe-unsubscriptions-callback) + - [client.close ([callback])](#clientclose-callback) + - [client.emptyOutgoingQueue ([callback])](#clientemptyoutgoingqueue-callback) + +## new Client(aedes, stream, request) + +- aedes [``](./Aedes.md) +- stream: `` | `` +- request: `` +- Returns: `` + +## client.conn + +- `` | `` + +Client connection stream object. + +In the case of `net.createServer`, `conn` passed to the `connectionlistener` function by node's [net.createServer](https://nodejs.org/api/net.html#net_net_createserver_options_connectionlistener) API. + +In the case of [`websocket-stream`][websocket-stream], it's the `stream` argument passed to the websocket `handle` function in [`websocket-stream #on-the-server`][websocket-stream-doc-on-the-server]]. + +## client.req + +- `` + +only for [`websocket-stream`][websocket-stream]. It is a HTTP Websocket upgrade request object passed to websocket `handle` function in [`websocket-stream #on-the-server`][websocket-stream-doc-on-the-server]. It gives an option for accessing headers or cookies. + +## client.connecting + +- `` __Default__: `false` + +a read-only flag, it is true when Client is in CONNECT phase. Aedes emits `connackSent` event will not reset `connecting` to `false` until it received all its offline messagess to the Client. + +## client.connected + +- `` __Default__: `false` + +a read-only flag, it is `true` when `connected` event is emitted, and `false` when client is closed. + +## client.closed + +- `` __Default__: `false` + +a read-only flag indicates if client is closed or not. + +## client.id + +- `` __Default__: `aedes_${shortid()}` + +Client unique identifier, specified by CONNECT packet. + +It is available only after `CONNACK (rc=0)`, otherwise it is `null` in cases: + +- in [`aedes.preConnect`](./Aedes.md#handler-preconnect-client-callback) stage +- after `CONNACK (rc!=0)` response +- `connectionError` raised by aedes + +## client.clean + +- `` __Default__: `true` + +Client clean flag, set by clean flag in `CONNECT` packet. + +## client.version + +- `` __Default__: `null` + +Client version, set by protocol version in `CONNECT` packet when `CONNACK (rc=0)` returns. + +## Event: connected + +Same as aedes [`clientReady`](./Aedes.md#event-clientready) but in client-wise. + +## Event: error + +- `error` `` + +Emitted when an error occurs. + +## client.publish (packet, [callback]) + +- `packet` `` [`PUBLISH`][PUBLISH] +- `callback` `` `(error) => void` + - error `` | `null` + +Publish the given `packet` to this client. QoS 1 and 2 are fully supported, while the retained flag is not. + +`callback`  will be invoked when the message has been sent, but not acked. + +## client.subscribe (subscriptions, [callback]) + +- `subscriptions` `` +- `callback` `` `(error) => void` + - error `` | `null` + +Subscribe client to the list of topics. + +`subscriptions` can be: + +1. a single object in the format `{ topic: topic, qos: qos }` +2. an array of the above +3. a full [`SUBSCRIBE`][SUBSCRIBE], specifying a `messageId` will send suback to the client. + +`callback`  will be invoked when the subscription is completed. + +## client.unsubscribe (unsubscriptions, [callback]) + +- `unsubscriptions` `` +- `callback` `` `(error) => void` + - error `` | `null` + +Unsubscribe client to the list of topics. + +`unsubscriptions` can be: + +1. a single object in the format `{ topic: topic, qos: qos }` +2. an array of the above +3. a full [`UNSUBSCRIBE`][UNSUBSCRIBE] + +`callback`  will be invoked when the unsubscriptions are completed. + +## client.close ([callback]) + +Disconnect client + +`callback` will be invoked when client is closed. + +## client.emptyOutgoingQueue ([callback]) + +Clear all outgoing messages (QoS > 1) related to this client from persistence + +`callback` will be invoked when the operation ends. + +[PUBLISH]: https://github.com/mqttjs/mqtt-packet#publish +[SUBSCRIBE]: https://github.com/mqttjs/mqtt-packet#subscribe +[UNSUBSCRIBE]: https://github.com/mqttjs/mqtt-packet#unsubscribe + +[websocket-stream]: https://www.npmjs.com/websocket-stream +[websocket-stream-doc-on-the-server]: https://github.com/maxogden/websocket-stream/blob/master/readme.md#on-the-server diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/docs/Examples.md b/sdklab/meantimerecovery/aedes/node_modules/aedes/docs/Examples.md new file mode 100644 index 000000000..eb41324ef --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/docs/Examples.md @@ -0,0 +1,67 @@ + +# Examples + +## Simple plain MQTT server + +```js +const aedes = require('aedes')() +const server = require('net').createServer(aedes.handle) +const port = 1883 + +server.listen(port, function () { + console.log('server started and listening on port ', port) +}) +``` + +## MQTT over TLS / MQTTS + +```js +const fs = require('fs') +const aedes = require('aedes')() +const port = 8883 + +const options = { + key: fs.readFileSync('YOUR_PRIVATE_KEY_FILE.pem'), + cert: fs.readFileSync('YOUR_PUBLIC_CERT_FILE.pem') +} + +const server = require('tls').createServer(options, aedes.handle) + +server.listen(port, function () { + console.log('server started and listening on port ', port) +}) +``` + +## MQTT server over WebSocket + +```js +const aedes = require('aedes')() +const httpServer = require('http').createServer() +const ws = require('websocket-stream') +const port = 8888 + +ws.createServer({ server: httpServer }, aedes.handle) + +httpServer.listen(port, function () { + console.log('websocket server listening on port ', port) +}) +``` + +## Clusters + +In order to use Aedes in clusters you have to choose a persistence and an mqemitter that supports clusters. Tested persistence/mqemitters that works with clusters are: + +- [mqemitter-redis] +- [mqemitter-child-process] +- [mqemitter-mongodb] +- [aedes-persistence-mongodb] +- [aedes-persistence-redis] + +[This](https://github.com/moscajs/aedes/blob/master/examples/clusters/index.js) is an example using [mqemitter-mongodb] and [aedes-persistence-mongodb] + +[aedes-persistence-mongodb]: https://www.npmjs.com/aedes-persistence-mongodb +[aedes-persistence-redis]: https://www.npmjs.com/aedes-persistence-redis + +[mqemitter-redis]: https://www.npmjs.com/mqemitter-redis +[mqemitter-mongodb]: https://www.npmjs.com/mqemitter-mongodb +[mqemitter-child-process]: https://www.npmjs.com/mqemitter-child-process diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/example.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/example.js new file mode 100644 index 000000000..2ab5144c5 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/example.js @@ -0,0 +1,44 @@ +'use strict' + +const aedes = require('./aedes')() +const server = require('net').createServer(aedes.handle) +const httpServer = require('http').createServer() +const ws = require('websocket-stream') +const port = 1883 +const wsPort = 8888 + +server.listen(port, function () { + console.log('server listening on port', port) +}) + +ws.createServer({ + server: httpServer +}, aedes.handle) + +httpServer.listen(wsPort, function () { + console.log('websocket server listening on port', wsPort) +}) + +aedes.on('clientError', function (client, err) { + console.log('client error', client.id, err.message, err.stack) +}) + +aedes.on('connectionError', function (client, err) { + console.log('client error', client, err.message, err.stack) +}) + +aedes.on('publish', function (packet, client) { + if (client) { + console.log('message from client', client.id) + } +}) + +aedes.on('subscribe', function (subscriptions, client) { + if (client) { + console.log('subscribe from client', subscriptions, client.id) + } +}) + +aedes.on('client', function (client) { + console.log('new client', client.id) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/examples/clusters/index.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/examples/clusters/index.js new file mode 100644 index 000000000..ac80da977 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/examples/clusters/index.js @@ -0,0 +1,75 @@ +const cluster = require('cluster') +const mqemitter = require('mqemitter-mongodb') +const mongoPersistence = require('aedes-persistence-mongodb') + +const MONGO_URL = 'mongodb://127.0.0.1/aedes-clusters' + +function startAedes () { + const port = 1883 + + const aedes = require('aedes')({ + id: 'BROKER_' + cluster.worker.id, + mq: mqemitter({ + url: MONGO_URL + }), + persistence: mongoPersistence({ + url: MONGO_URL, + // Optional ttl settings + ttl: { + packets: 300, // Number of seconds + subscriptions: 300 + } + }) + }) + + const server = require('net').createServer(aedes.handle) + + server.listen(port, function () { + console.log('Aedes listening on port:', port) + aedes.publish({ topic: 'aedes/hello', payload: "I'm broker " + aedes.id }) + }) + + aedes.on('subscribe', function (subscriptions, client) { + console.log('MQTT client \x1b[32m' + (client ? client.id : client) + + '\x1b[0m subscribed to topics: ' + subscriptions.map(s => s.topic).join('\n'), 'from broker', aedes.id) + }) + + aedes.on('unsubscribe', function (subscriptions, client) { + console.log('MQTT client \x1b[32m' + (client ? client.id : client) + + '\x1b[0m unsubscribed to topics: ' + subscriptions.join('\n'), 'from broker', aedes.id) + }) + + // fired when a client connects + aedes.on('client', function (client) { + console.log('Client Connected: \x1b[33m' + (client ? client.id : client) + '\x1b[0m', 'to broker', aedes.id) + }) + + // fired when a client disconnects + aedes.on('clientDisconnect', function (client) { + console.log('Client Disconnected: \x1b[31m' + (client ? client.id : client) + '\x1b[0m', 'to broker', aedes.id) + }) + + // fired when a message is published + aedes.on('publish', async function (packet, client) { + console.log('Client \x1b[31m' + (client ? client.id : 'BROKER_' + aedes.id) + '\x1b[0m has published', packet.payload.toString(), 'on', packet.topic, 'to broker', aedes.id) + }) +} + +if (cluster.isMaster) { + const numWorkers = require('os').cpus().length + for (let i = 0; i < numWorkers; i++) { + cluster.fork() + } + + cluster.on('online', function (worker) { + console.log('Worker ' + worker.process.pid + ' is online') + }) + + cluster.on('exit', function (worker, code, signal) { + console.log('Worker ' + worker.process.pid + ' died with code: ' + code + ', and signal: ' + signal) + console.log('Starting a new worker') + cluster.fork() + }) +} else { + startAedes() +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/examples/clusters/package.json b/sdklab/meantimerecovery/aedes/node_modules/aedes/examples/clusters/package.json new file mode 100644 index 000000000..423ff69bc --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/examples/clusters/package.json @@ -0,0 +1,16 @@ +{ + "name": "aedes_clusters", + "version": "1.0.0", + "description": "Testing Aedes Broker with clusters", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "robertsLando", + "license": "MIT", + "dependencies": { + "aedes": "^0.42.0", + "aedes-persistence-mongodb": "^7.0.1", + "mqemitter-mongodb": "^7.0.1" + } +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/examples/proxy/index.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/examples/proxy/index.js new file mode 100644 index 000000000..be34406cc --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/examples/proxy/index.js @@ -0,0 +1,175 @@ +'use strict' + +const aedes = require('../../aedes') +const mqemitter = require('mqemitter') +const persistence = require('aedes-persistence') +const mqttPacket = require('mqtt-packet') +const net = require('net') +const proxyProtocol = require('proxy-protocol-js') + +const brokerPort = 4883 + +// from https://stackoverflow.com/questions/57077161/how-do-i-convert-hex-buffer-to-ipv6-in-javascript +function parseIpV6 (ip) { + return ip.match(/.{1,4}/g) + .map((val) => val.replace(/^0+/, '')) + .join(':') + .replace(/0000:/g, ':') + .replace(/:{2,}/g, '::') +} + +function sendProxyPacket (version = 1, ipFamily = 4) { + const packet = { + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: `my-client-${version}`, + keepalive: 0 + } + const hostIpV4 = '0.0.0.0' + const clientIpV4 = '192.168.1.128' + const hostIpV6 = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + const clientIpV6 = [0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 192, 168, 1, 128] + var protocol + if (version === 1) { + if (ipFamily === 4) { + protocol = new proxyProtocol.V1BinaryProxyProtocol( + proxyProtocol.INETProtocol.TCP4, + new proxyProtocol.Peer(clientIpV4, 12345), + new proxyProtocol.Peer(hostIpV4, brokerPort), + mqttPacket.generate(packet) + ).build() + } else if (ipFamily === 6) { + protocol = new proxyProtocol.V1BinaryProxyProtocol( + proxyProtocol.INETProtocol.TCP6, + new proxyProtocol.Peer(parseIpV6(Buffer.from(clientIpV6).toString('hex')), 12345), + new proxyProtocol.Peer(parseIpV6(Buffer.from(hostIpV6).toString('hex')), brokerPort), + mqttPacket.generate(packet) + ).build() + } + } else if (version === 2) { + if (ipFamily === 4) { + protocol = new proxyProtocol.V2ProxyProtocol( + proxyProtocol.Command.LOCAL, + proxyProtocol.TransportProtocol.STREAM, + new proxyProtocol.IPv4ProxyAddress( + proxyProtocol.IPv4Address.createFrom(clientIpV4.split('.')), + 12346, + proxyProtocol.IPv4Address.createFrom(hostIpV4.split('.')), + brokerPort + ), + mqttPacket.generate(packet) + ).build() + } else if (ipFamily === 6) { + protocol = new proxyProtocol.V2ProxyProtocol( + proxyProtocol.Command.PROXY, + proxyProtocol.TransportProtocol.STREAM, + new proxyProtocol.IPv6ProxyAddress( + proxyProtocol.IPv6Address.createFrom(clientIpV6), + 12346, + proxyProtocol.IPv6Address.createFrom(hostIpV6), + brokerPort + ), + mqttPacket.generate(packet) + ).build() + } + } + + const parsedProto = version === 1 + ? proxyProtocol.V1BinaryProxyProtocol.parse(protocol) + : proxyProtocol.V2ProxyProtocol.parse(protocol) + // console.log(parsedProto) + + const dstPort = version === 1 + ? parsedProto.destination.port + : parsedProto.proxyAddress.destinationPort + + var dstHost + if (version === 1) { + if (ipFamily === 4) { + dstHost = parsedProto.destination.ipAddress + } else if (ipFamily === 6) { + dstHost = parsedProto.destination.ipAddress + // console.log('ipV6 host :', parsedProto.destination.ipAddress) + } + } else if (version === 2) { + if (ipFamily === 4) { + dstHost = parsedProto.proxyAddress.destinationAddress.address.join('.') + } else if (ipFamily === 6) { + // console.log('ipV6 client :', parseIpV6(Buffer.from(clientIpV6).toString('hex'))) + dstHost = parseIpV6(Buffer.from(parsedProto.proxyAddress.destinationAddress.address).toString('hex')) + } + } + + console.log('Connection to :', dstHost, dstPort) + var mqttConn = net.createConnection( + { + port: dstPort, + host: dstHost, + timeout: 150 + } + ) + + const data = protocol + + mqttConn.on('timeout', function () { + mqttConn.end(data) + }) +} + +function startAedes () { + const broker = aedes({ + mq: mqemitter({ + concurrency: 100 + }), + persistence: persistence(), + preConnect: function (client, done) { + console.log('Aedes preConnect check client ip:', client.connDetails) + if (client.connDetails && client.connDetails.ipAddress) { + client.ip = client.connDetails.ipAddress + } + client.close() + return done(null, true) + }, + trustProxy: true + }) + + const server = require('net').createServer(broker.handle) + + server.listen(brokerPort, function () { + console.log('Aedes listening on :', server.address()) + broker.publish({ topic: 'aedes/hello', payload: "I'm broker " + broker.id }) + setTimeout(() => sendProxyPacket(1), 250) + setTimeout(() => sendProxyPacket(1, 6), 500) + setTimeout(() => sendProxyPacket(2), 750) + setTimeout(() => sendProxyPacket(2, 6), 1000) + }) + + broker.on('subscribe', function (subscriptions, client) { + console.log('MQTT client \x1b[32m' + (client ? client.id : client) + + '\x1b[0m subscribed to topics: ' + subscriptions.map(s => s.topic).join('\n'), 'from broker', broker.id) + }) + + broker.on('unsubscribe', function (subscriptions, client) { + console.log('MQTT client \x1b[32m' + (client ? client.id : client) + + '\x1b[0m unsubscribed to topics: ' + subscriptions.join('\n'), 'from broker', broker.id) + }) + + // fired when a client connects + broker.on('client', function (client) { + console.log('Client Connected: \x1b[33m' + (client ? client.id : client) + ' ip ' + (client ? client.ip : null) + '\x1b[0m', 'to broker', broker.id) + }) + + // fired when a client disconnects + broker.on('clientDisconnect', function (client) { + console.log('Client Disconnected: \x1b[31m' + (client ? client.id : client) + '\x1b[0m', 'to broker', broker.id) + }) + + // fired when a message is published + broker.on('publish', async function (packet, client) { + console.log('Client \x1b[31m' + (client ? client.id : 'BROKER_' + broker.id) + '\x1b[0m has published', packet.payload.toString(), 'on', packet.topic, 'to broker', broker.id) + }) +} + +startAedes() diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/examples/proxy/package.json b/sdklab/meantimerecovery/aedes/node_modules/aedes/examples/proxy/package.json new file mode 100644 index 000000000..af993cb12 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/examples/proxy/package.json @@ -0,0 +1,17 @@ +{ + "name": "aedes_proxy", + "version": "1.0.0", + "description": "Testing Aedes Broker behing proxy", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "getlarge", + "license": "MIT", + "dependencies": { + "aedes": "^0.42.0", + "mqemitter": "^3.0.0", + "mqtt-packet": "^6.3.0", + "proxy-protocol-js": "^4.0.3" + } +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/package.json b/sdklab/meantimerecovery/aedes/node_modules/aedes/package.json new file mode 100644 index 000000000..976a2f8fb --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/package.json @@ -0,0 +1,164 @@ +{ + "_from": "aedes@^0.42.5", + "_id": "aedes@0.42.6", + "_inBundle": false, + "_integrity": "sha512-x2jfb6zmRNRDHOkcadEhjUE81wsPFippNncrI1IX8YnEt4YrbpEmuSNbKYssqWW0p1c2Q2iaFVS/a3FiFOZ14Q==", + "_location": "/aedes", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "aedes@^0.42.5", + "name": "aedes", + "escapedName": "aedes", + "rawSpec": "^0.42.5", + "saveSpec": null, + "fetchSpec": "^0.42.5" + }, + "_requiredBy": [ + "/" + ], + "_resolved": "https://registry.npmjs.org/aedes/-/aedes-0.42.6.tgz", + "_shasum": "f4360b88877af659638446e2604f2dace1e7103f", + "_spec": "aedes@^0.42.5", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes", + "author": { + "name": "Matteo Collina", + "email": "hello@matteocollina.com" + }, + "bugs": { + "url": "http://github.com/moscajs/aedes/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Gavin D'mello", + "url": "https://github.com/GavinDmello" + }, + { + "name": "Behrad Zari", + "url": "https://github.com/behrad" + }, + { + "name": "Gnought", + "url": "https://github.com/gnought" + }, + { + "name": "Daniel Lando", + "url": "https://github.com/robertsLando" + } + ], + "dependencies": { + "aedes-packet": "^2.3.1", + "aedes-persistence": "^8.1.1", + "aedes-protocol-decoder": "^1.0.0", + "bulk-write-stream": "^2.0.1", + "end-of-stream": "^1.4.4", + "fastfall": "^1.5.1", + "fastparallel": "^2.3.0", + "fastseries": "^2.0.0", + "mqemitter": "^4.2.0", + "mqtt-packet": "^6.3.2", + "proxy-protocol-js": "^4.0.4", + "readable-stream": "^3.6.0", + "retimer": "^2.0.0", + "reusify": "^1.0.4", + "shortid": "^2.2.15", + "uuid": "^8.0.0" + }, + "deprecated": false, + "description": "Stream-based MQTT broker", + "devDependencies": { + "@sinonjs/fake-timers": "^6.0.1", + "@types/node": "^14.0.1", + "@typescript-eslint/eslint-plugin": "^2.30.0", + "@typescript-eslint/parser": "^2.30.0", + "concat-stream": "^2.0.0", + "duplexify": "^4.1.1", + "license-checker": "^25.0.1", + "markdownlint-cli": "^0.23.1", + "mqtt": "^4.0.0", + "mqtt-connection": "^4.0.0", + "pre-commit": "^1.2.2", + "proxyquire": "^2.1.3", + "release-it": "^14.0.2", + "snazzy": "^8.0.0", + "standard": "^14.3.3", + "tap": "^14.10.7", + "typescript": "^4.0.2", + "websocket-stream": "^5.5.2" + }, + "engines": { + "node": ">=10" + }, + "homepage": "https://github.com/moscajs/aedes#readme", + "keywords": [ + "mqtt", + "broker", + "server", + "mqtt-server", + "stream", + "streams", + "publish", + "subscribe", + "pubsub", + "messaging", + "mosca", + "mosquitto", + "iot", + "internet", + "of", + "things" + ], + "license": "MIT", + "main": "aedes.js", + "name": "aedes", + "pre-commit": [ + "test" + ], + "release-it": { + "github": { + "release": true + }, + "git": { + "tagName": "v${version}" + }, + "hooks": { + "before:init": [ + "npm run test" + ] + }, + "npm": { + "publish": true + } + }, + "repository": { + "type": "git", + "url": "git+https://github.com/moscajs/aedes.git" + }, + "scripts": { + "license-checker": "license-checker --production --onlyAllow=\"MIT;ISC;BSD-3-Clause;BSD-2-Clause\"", + "lint": "npm run lint:standard && npm run lint:typescript && npm run lint:markdown", + "lint:markdown": "markdownlint docs/*.md README.md", + "lint:standard": "standard --verbose | snazzy", + "lint:typescript": "standard --parser @typescript-eslint/parser --plugin @typescript-eslint/eslint-plugin test/types/*.ts aedes.d.ts", + "release": "read -p 'GITHUB_TOKEN: ' GITHUB_TOKEN && export GITHUB_TOKEN=$GITHUB_TOKEN && release-it --disable-metrics", + "test": "npm run lint && npm run unit && npm run typescript", + "test:ci": "npm run lint && npm run unit -- --cov --coverage-report=lcovonly && npm run typescript", + "test:report": "npm run lint && npm run unit:report && npm run typescript", + "typescript": "tsc --project ./test/types/tsconfig.json", + "unit": "tap --no-esm -J test/*.js", + "unit:report": "tap --no-esm -J test/*.js --cov --coverage-report=html --coverage-report=cobertura | tee out.tap" + }, + "types": "aedes.d.ts", + "version": "0.42.6", + "warnings": [ + { + "code": "ENOTSUP", + "required": { + "node": ">=10" + }, + "pkgid": "aedes@0.42.6" + } + ] +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/auth.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/auth.js new file mode 100644 index 000000000..4cd74c34e --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/auth.js @@ -0,0 +1,1099 @@ +'use strict' + +const { test } = require('tap') +const eos = require('end-of-stream') +const Faketimers = require('@sinonjs/fake-timers') +const Client = require('../lib/client') +const { setup, connect, noError, subscribe, subscribeMultiple } = require('./helper') +const aedes = require('../') + +test('authenticate successfully a client with username and password', function (t) { + t.plan(4) + + const s = noError(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.authenticate = function (client, username, password, cb) { + t.ok(client instanceof Client, 'client is there') + t.equal(username, 'my username', 'username is there') + t.deepEqual(password, Buffer.from('my pass'), 'password is there') + cb(null, true) + } + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client', + username: 'my username', + password: 'my pass', + keepalive: 0 + }) + + s.outStream.on('data', function (packet) { + t.deepEqual(packet, { + cmd: 'connack', + returnCode: 0, + length: 2, + qos: 0, + retain: false, + dup: false, + topic: null, + payload: null, + sessionPresent: false + }, 'successful connack') + }) +}) + +test('authenticate unsuccessfully a client with username and password', function (t) { + t.plan(6) + + const s = setup() + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.authenticate = function (client, username, password, cb) { + t.ok(client instanceof Client, 'client is there') + t.equal(username, 'my username', 'username is there') + t.deepEqual(password, Buffer.from('my pass'), 'password is there') + cb(null, false) + } + + s.broker.on('clientError', function (client, err) { + t.equal(err.errorCode, 5) + }) + + s.broker.on('clientReady', function (client) { + t.fail('client should not ready') + }) + + s.outStream.on('data', function (packet) { + t.deepEqual(packet, { + cmd: 'connack', + returnCode: 5, + length: 2, + qos: 0, + retain: false, + dup: false, + topic: null, + payload: null, + sessionPresent: false + }, 'unsuccessful connack, unauthorized') + }) + + eos(s.outStream, function () { + t.equal(s.broker.connectedClients, 0, 'no connected clients') + }) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client', + username: 'my username', + password: 'my pass', + keepalive: 0 + }) +}) + +test('authenticate errors', function (t) { + t.plan(7) + + const s = setup() + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.authenticate = function (client, username, password, cb) { + t.ok(client instanceof Client, 'client is there') + t.equal(username, 'my username', 'username is there') + t.deepEqual(password, Buffer.from('my pass'), 'password is there') + cb(new Error('this should happen!')) + } + + s.broker.on('clientError', function (client, err) { + t.equal(err.message, 'this should happen!') + t.equal(err.errorCode, 5) + }) + + s.broker.on('clientReady', function (client) { + t.fail('client should not ready') + }) + + s.outStream.on('data', function (packet) { + t.deepEqual(packet, { + cmd: 'connack', + returnCode: 5, + length: 2, + qos: 0, + retain: false, + dup: false, + topic: null, + payload: null, + sessionPresent: false + }, 'unsuccessful connack, unauthorized') + }) + + eos(s.outStream, function () { + t.equal(s.broker.connectedClients, 0, 'no connected clients') + }) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client', + username: 'my username', + password: 'my pass', + keepalive: 0 + }) +}) + +test('authentication error when return code 1 (unacceptable protocol version) is passed', function (t) { + t.plan(7) + + const s = setup() + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.authenticate = function (client, username, password, cb) { + t.ok(client instanceof Client, 'client is there') + t.equal(username, 'my username', 'username is there') + t.deepEqual(password, Buffer.from('my pass'), 'password is there') + var error = new Error('Auth error') + error.returnCode = 1 + cb(error, null) + } + + s.broker.on('clientError', function (client, err) { + t.equal(err.message, 'Auth error') + t.equal(err.errorCode, 5) + }) + + s.broker.on('clientReady', function (client) { + t.fail('client should not ready') + }) + + s.outStream.on('data', function (packet) { + t.deepEqual(packet, { + cmd: 'connack', + returnCode: 5, + length: 2, + qos: 0, + retain: false, + dup: false, + topic: null, + payload: null, + sessionPresent: false + }, 'unsuccessful connack, unauthorized') + }) + + eos(s.outStream, function () { + t.equal(s.broker.connectedClients, 0, 'no connected clients') + }) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client', + username: 'my username', + password: 'my pass', + keepalive: 0 + }) +}) + +test('authentication error when return code 2 (identifier rejected) is passed', function (t) { + t.plan(7) + + const s = setup() + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.authenticate = function (client, username, password, cb) { + t.ok(client instanceof Client, 'client is there') + t.equal(username, 'my username', 'username is there') + t.deepEqual(password, Buffer.from('my pass'), 'password is there') + var error = new Error('Auth error') + error.returnCode = 2 + cb(error, null) + } + + s.broker.on('clientError', function (client, err) { + t.equal(err.message, 'Auth error') + t.equal(err.errorCode, 2) + }) + + s.broker.on('clientReady', function (client) { + t.fail('client should not ready') + }) + + s.outStream.on('data', function (packet) { + t.deepEqual(packet, { + cmd: 'connack', + returnCode: 2, + length: 2, + qos: 0, + retain: false, + dup: false, + topic: null, + payload: null, + sessionPresent: false + }, 'unsuccessful connack, identifier rejected') + }) + + eos(s.outStream, function () { + t.equal(s.broker.connectedClients, 0, 'no connected clients') + }) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client', + username: 'my username', + password: 'my pass', + keepalive: 0 + }) +}) + +test('authentication error when return code 3 (Server unavailable) is passed', function (t) { + t.plan(7) + + const s = setup() + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.authenticate = function (client, username, password, cb) { + t.ok(client instanceof Client, 'client is there') + t.equal(username, 'my username', 'username is there') + t.deepEqual(password, Buffer.from('my pass'), 'password is there') + var error = new Error('Auth error') + error.returnCode = 3 + cb(error, null) + } + + s.broker.on('clientError', function (client, err) { + t.equal(err.message, 'Auth error') + t.equal(err.errorCode, 3) + }) + + s.broker.on('clientReady', function (client) { + t.fail('client should not ready') + }) + + s.outStream.on('data', function (packet) { + t.deepEqual(packet, { + cmd: 'connack', + returnCode: 3, + length: 2, + qos: 0, + retain: false, + dup: false, + topic: null, + payload: null, + sessionPresent: false + }, 'unsuccessful connack, Server unavailable') + }) + + eos(s.outStream, function () { + t.equal(s.broker.connectedClients, 0, 'no connected clients') + }) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client', + username: 'my username', + password: 'my pass', + keepalive: 0 + }) +}) + +test('authentication error when return code 4 (bad user or password) is passed', function (t) { + t.plan(7) + + const s = setup() + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.authenticate = function (client, username, password, cb) { + t.ok(client instanceof Client, 'client is there') + t.equal(username, 'my username', 'username is there') + t.deepEqual(password, Buffer.from('my pass'), 'password is there') + var error = new Error('Auth error') + error.returnCode = 4 + cb(error, null) + } + + s.broker.on('clientError', function (client, err) { + t.equal(err.message, 'Auth error') + t.equal(err.errorCode, 4) + }) + + s.broker.on('clientReady', function (client) { + t.fail('client should not ready') + }) + + s.outStream.on('data', function (packet) { + t.deepEqual(packet, { + cmd: 'connack', + returnCode: 4, + length: 2, + qos: 0, + retain: false, + dup: false, + topic: null, + payload: null, + sessionPresent: false + }, 'unsuccessful connack, bad username or password') + }) + + eos(s.outStream, function () { + t.equal(s.broker.connectedClients, 0, 'no connected clients') + }) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client', + username: 'my username', + password: 'my pass', + keepalive: 0 + }) +}) + +test('authentication error when non numeric return code is passed', function (t) { + t.plan(7) + + const s = setup() + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.authenticate = function (client, username, password, cb) { + t.ok(client instanceof Client, 'client is there') + t.equal(username, 'my username', 'username is there') + t.deepEqual(password, Buffer.from('my pass'), 'password is there') + var error = new Error('Non numeric error codes') + error.returnCode = 'return Code' + cb(error, null) + } + + s.broker.on('clientError', function (client, err) { + t.equal(err.message, 'Non numeric error codes') + t.equal(err.errorCode, 5) + }) + + s.broker.on('clientReady', function (client) { + t.fail('client should not ready') + }) + + s.outStream.on('data', function (packet) { + t.deepEqual(packet, { + cmd: 'connack', + returnCode: 5, + length: 2, + qos: 0, + retain: false, + dup: false, + topic: null, + payload: null, + sessionPresent: false + }, 'unsuccessful connack, unauthorized') + }) + + eos(s.outStream, function () { + t.equal(s.broker.connectedClients, 0, 'no connected clients') + }) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client', + username: 'my username', + password: 'my pass', + keepalive: 0 + }) +}) + +test('authorize publish', function (t) { + t.plan(4) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + var expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + retain: false, + length: 12, + dup: false + } + + s.broker.authorizePublish = function (client, packet, cb) { + t.ok(client, 'client exists') + t.deepEqual(packet, expected, 'packet matches') + cb() + } + + s.broker.mq.on('hello', function (packet, cb) { + t.notOk(Object.prototype.hasOwnProperty.call(packet, 'messageId'), 'should not contain messageId in QoS 0') + expected.brokerId = s.broker.id + expected.brokerCounter = s.broker.counter + delete expected.length + t.deepEqual(packet, expected, 'packet matches') + cb() + }) + + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world' + }) +}) + +test('authorize waits for authenticate', function (t) { + t.plan(6) + + const s = setup() + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.authenticate = function (client, username, password, cb) { + t.ok(client instanceof Client, 'client is there') + process.nextTick(function () { + t.equal(username, 'my username', 'username is there') + t.deepEqual(password, Buffer.from('my pass'), 'password is there') + client.authenticated = true + cb(null, true) + }) + } + + s.broker.authorizePublish = function (client, packet, cb) { + t.ok(client.authenticated, 'client authenticated') + cb() + } + + var expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + retain: false, + length: 12, + dup: false + } + + s.broker.mq.on('hello', function (packet, cb) { + t.notOk(Object.prototype.hasOwnProperty.call(packet, 'messageId'), 'should not contain messageId in QoS 0') + expected.brokerId = s.broker.id + expected.brokerCounter = s.broker.counter + delete expected.length + t.deepEqual(packet, expected, 'packet matches') + cb() + }) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client', + username: 'my username', + password: 'my pass', + keepalive: 0 + }) + + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world' + }) +}) + +test('authorize publish from configOptions', function (t) { + t.plan(4) + + const s = connect(setup(aedes({ + authorizePublish: function (client, packet, cb) { + t.ok(client, 'client exists') + t.deepEqual(packet, expected, 'packet matches') + cb() + } + }))) + t.tearDown(s.broker.close.bind(s.broker)) + + var expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + retain: false, + length: 12, + dup: false + } + + s.broker.mq.on('hello', function (packet, cb) { + t.notOk(Object.prototype.hasOwnProperty.call(packet, 'messageId'), 'should not contain messageId in QoS 0') + expected.brokerId = s.broker.id + expected.brokerCounter = s.broker.counter + delete expected.length + t.deepEqual(packet, expected, 'packet matches') + cb() + }) + + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world' + }) +}) + +test('do not authorize publish', function (t) { + t.plan(3) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + retain: false, + length: 12, + dup: false + } + + s.broker.authorizePublish = function (client, packet, cb) { + t.ok(client, 'client exists') + t.deepEqual(packet, expected, 'packet matches') + cb(new Error('auth negated')) + } + + eos(s.conn, function () { + t.equal(s.broker.connectedClients, 0, 'no connected clients') + }) + + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world' + }) +}) + +test('authorize subscribe', function (t) { + t.plan(5) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.authorizeSubscribe = function (client, sub, cb) { + t.ok(client, 'client exists') + t.deepEqual(sub, { + topic: 'hello', + qos: 0 + }, 'topic matches') + cb(null, sub) + } + + subscribe(t, s, 'hello', 0) +}) + +test('authorize subscribe multiple same topics with same qos', function (t) { + t.plan(4) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.authorizeSubscribe = function (client, sub, cb) { + t.deepEqual(sub, { + topic: 'hello', + qos: 0 + }, 'topic matches') + cb(null, sub) + } + + subscribeMultiple(t, s, [{ topic: 'hello', qos: 0 }, { topic: 'hello', qos: 0 }], [0]) +}) + +test('authorize subscribe multiple same topics with different qos', function (t) { + t.plan(4) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.authorizeSubscribe = function (client, sub, cb) { + t.deepEqual(sub, { + topic: 'hello', + qos: 1 + }, 'topic matches') + cb(null, sub) + } + + subscribeMultiple(t, s, [{ topic: 'hello', qos: 0 }, { topic: 'hello', qos: 1 }], [1]) +}) + +test('authorize subscribe multiple different topics', function (t) { + t.plan(7) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.authorizeSubscribe = function (client, sub, cb) { + t.ok(client, 'client exists') + if (sub.topic === 'hello') { + t.deepEqual(sub, { + topic: 'hello', + qos: 0 + }, 'topic matches') + } else if (sub.topic === 'foo') { + t.deepEqual(sub, { + topic: 'foo', + qos: 0 + }, 'topic matches') + } + cb(null, sub) + } + + subscribeMultiple(t, s, [{ topic: 'hello', qos: 0 }, { topic: 'foo', qos: 0 }], [0, 0]) +}) + +test('authorize subscribe from config options', function (t) { + t.plan(5) + + const s = connect(setup(aedes({ + authorizeSubscribe: function (client, sub, cb) { + t.ok(client, 'client exists') + t.deepEqual(sub, { + topic: 'hello', + qos: 0 + }, 'topic matches') + cb(null, sub) + } + }))) + t.tearDown(s.broker.close.bind(s.broker)) + + subscribe(t, s, 'hello', 0) +}) + +test('negate subscription', function (t) { + t.plan(5) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.authorizeSubscribe = function (client, sub, cb) { + t.ok(client, 'client exists') + t.deepEqual(sub, { + topic: 'hello', + qos: 0 + }, 'topic matches') + cb(null, null) + } + + s.inStream.write({ + cmd: 'subscribe', + messageId: 24, + subscriptions: [{ + topic: 'hello', + qos: 0 + }] + }) + + s.outStream.once('data', function (packet) { + t.equal(packet.cmd, 'suback') + t.deepEqual(packet.granted, [128]) + t.equal(packet.messageId, 24) + }) +}) + +test('negate multiple subscriptions', function (t) { + t.plan(5) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.authorizeSubscribe = function (client, sub, cb) { + t.ok(client, 'client exists') + cb(null, null) + } + + s.inStream.write({ + cmd: 'subscribe', + messageId: 24, + subscriptions: [{ + topic: 'hello', + qos: 0 + }, { + topic: 'world', + qos: 0 + }] + }) + + s.outStream.once('data', function (packet) { + t.equal(packet.cmd, 'suback') + t.deepEqual(packet.granted, [128, 128]) + t.equal(packet.messageId, 24) + }) +}) + +test('negate subscription with correct persistence', function (t) { + t.plan(6) + + const expected = [{ + topic: 'hello', + qos: 0 + }, { + topic: 'world', + qos: 0 + }] + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.authorizeSubscribe = function (client, sub, cb) { + t.ok(client, 'client exists') + if (sub.topic === 'hello') { + sub = null + } + cb(null, sub) + } + + const s = connect(setup(broker), { clean: false, clientId: 'abcde' }) + s.outStream.once('data', function (packet) { + t.equal(packet.cmd, 'suback') + t.deepEqual(packet.granted, [128, 0]) + broker.persistence.subscriptionsByClient(broker.clients.abcde, function (_, subs, client) { + t.deepEqual(subs, expected) + }) + t.equal(packet.messageId, 24) + }) + + s.inStream.write({ + cmd: 'subscribe', + messageId: 24, + subscriptions: [{ + topic: 'hello', + qos: 0 + }, { + topic: 'world', + qos: 0 + }] + }) +}) + +test('negate multiple subscriptions random times', function (t) { + t.plan(5) + + const clock = Faketimers.createClock() + const s = connect(setup()) + t.tearDown(function () { + clock.reset() + s.broker.close() + }) + + s.broker.authorizeSubscribe = function (client, sub, cb) { + t.ok(client, 'client exists') + if (sub.topic === 'hello') { + clock.setTimeout(function () { + cb(null, sub) + }, 100) + } else { + cb(null, null) + clock.tick(100) + } + } + + s.inStream.write({ + cmd: 'subscribe', + messageId: 24, + subscriptions: [{ + topic: 'hello', + qos: 0 + }, { + topic: 'world', + qos: 0 + }] + }) + + s.outStream.once('data', function (packet) { + t.equal(packet.cmd, 'suback') + t.deepEqual(packet.granted, [0, 128]) + t.equal(packet.messageId, 24) + }) +}) + +test('failed authentication does not disconnect other client with same clientId', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = setup(broker) + const s0 = setup(broker) + + broker.authenticate = function (client, username, password, cb) { + cb(null, password.toString() === 'right') + } + + s0.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client', + username: 'my username', + password: 'right', + keepalive: 0 + }) + + s0.outStream.on('data', function (packet) { + t.deepEqual(packet, { + cmd: 'connack', + returnCode: 0, + length: 2, + qos: 0, + retain: false, + dup: false, + topic: null, + payload: null, + sessionPresent: false + }, 'successful connack') + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client', + username: 'my username', + password: 'wrong', + keepalive: 0 + }) + }) + + const removeEos = eos(s0.outStream, function () { + t.fail('ended before time') + }) + + s.outStream.on('data', function (packet) { + t.deepEqual(packet, { + cmd: 'connack', + returnCode: 5, + length: 2, + qos: 0, + retain: false, + dup: false, + topic: null, + payload: null, + sessionPresent: false + }, 'unsuccessful connack') + }) + + eos(s.outStream, function () { + t.pass('ended') + removeEos() + }) +}) + +test('unauthorized connection should not unregister the correct one with same clientId', function (t) { + t.plan(4) + + const broker = aedes({ + authenticate: function (client, username, password, callback) { + if (username === 'correct') { + callback(null, true) + } else { + const error = new Error() + error.returnCode = 4 + callback(error, false) + } + } + }) + t.tearDown(broker.close.bind(broker)) + + broker.on('clientError', function (client, err) { + t.equal(err.message, 'bad user name or password') + t.equal(err.errorCode, 4) + t.equal(broker.connectedClients, 1, 'my-client still connected') + }) + + connect(setup(broker), { + clientId: 'my-client', + username: 'correct' + }, function () { + t.equal(broker.connectedClients, 1, 'my-client connected') + connect(setup(broker), { + clientId: 'my-client', + username: 'unauthorized' + }, function () { + // other unauthorized connection with the same clientId should not unregister the correct one. + t.fail('unauthorized should not connect') + }) + }) +}) + +test('set authentication method in config options', function (t) { + t.plan(5) + + const s = setup(aedes({ + authenticate: function (client, username, password, cb) { + t.ok(client instanceof Client, 'client is there') + t.equal(username, 'my username', 'username is there') + t.deepEqual(password, Buffer.from('my pass'), 'password is there') + cb(null, false) + } + })) + t.tearDown(s.broker.close.bind(s.broker)) + + s.outStream.on('data', function (packet) { + t.deepEqual(packet, { + cmd: 'connack', + returnCode: 5, + length: 2, + qos: 0, + retain: false, + dup: false, + topic: null, + payload: null, + sessionPresent: false + }, 'unsuccessful connack') + }) + + eos(s.outStream, function () { + t.equal(s.broker.connectedClients, 0, 'no connected clients') + }) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client', + username: 'my username', + password: 'my pass', + keepalive: 0 + }) +}) + +test('change a topic name inside authorizeForward method in QoS 1 mode', function (t) { + t.plan(3) + + const broker = aedes({ + authorizeForward: function (client, packet) { + packet.payload = Buffer.from('another-world') + packet.messageId = 2 + return packet + } + }) + t.tearDown(broker.close.bind(broker)) + + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('another-world'), + dup: false, + length: 22, + qos: 1, + retain: false, + messageId: 2 + } + + broker.on('client', function (client) { + client.subscribe({ + topic: 'hello', + qos: 1 + }, function (err) { + t.error(err, 'no error') + + broker.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 1 + }, function (err) { + t.error(err, 'no error') + }) + }) + }) + + const s = connect(setup(broker)) + + s.outStream.once('data', function (packet) { + t.deepEqual(packet, expected, 'packet matches') + }) +}) + +;[true, false].forEach(function (cleanSession) { + test(`unauthorized forward publish in QoS 1 mode [clean=${cleanSession}]`, function (t) { + t.plan(2) + + const broker = aedes({ + authorizeForward: function (client, packet) { + return null + } + }) + t.tearDown(broker.close.bind(broker)) + + broker.on('client', function (client) { + client.subscribe({ + topic: 'hello', + qos: 1 + }, function (err) { + t.error(err, 'no error') + + broker.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 1 + }, function (err) { + t.error(err, 'no error') + }) + }) + }) + + const s = connect(setup(broker), { clean: cleanSession }) + + s.outStream.once('data', function (packet) { + t.fail('Should have not recieved this packet') + }) + }) +}) + +test('prevent publish in QoS 0 mode', function (t) { + t.plan(2) + + const broker = aedes({ + authorizeForward: function (client, packet) { + return null + } + }) + t.tearDown(broker.close.bind(broker)) + + broker.on('client', function (client) { + client.subscribe({ + topic: 'hello', + qos: 0 + }, function (err) { + t.error(err, 'no error') + + broker.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 0 + }, function (err) { + t.error(err, 'no error') + }) + }) + }) + + const s = connect(setup(broker)) + + s.outStream.once('data', function (packet) { + t.fail('Should have not recieved this packet') + }) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/basic.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/basic.js new file mode 100644 index 000000000..985e3f006 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/basic.js @@ -0,0 +1,815 @@ +'use strict' + +const { test } = require('tap') +const eos = require('end-of-stream') +const { setup, connect, subscribe, subscribeMultiple, noError } = require('./helper') +const aedes = require('../') +const proxyquire = require('proxyquire') + +test('test aedes.Server', function (t) { + t.plan(1) + + const broker = new aedes.Server() + t.tearDown(broker.close.bind(broker)) + + connect(setup(broker), {}, function () { + t.pass('connected') + }) +}) + +test('publish QoS 0', function (t) { + t.plan(2) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + var expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + retain: false, + dup: false + } + + s.broker.mq.on('hello', function (packet, cb) { + expected.brokerId = s.broker.id + expected.brokerCounter = s.broker.counter + t.equal(packet.messageId, undefined, 'MUST not contain a packet identifier in QoS 0') + t.deepEqual(packet, expected, 'packet matches') + cb() + }) + + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world' + }) +}) + +test('messageId shoud reset to 1 if it reached 65535', function (t) { + t.plan(7) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + const publishPacket = { + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42 + } + var count = 0 + s.broker.on('clientReady', function (client) { + subscribe(t, s, 'hello', 1, function () { + client._nextId = 65535 + s.outStream.on('data', function (packet) { + if (packet.cmd === 'puback') { + t.equal(packet.messageId, 42) + } + if (packet.cmd === 'publish') { + t.equal(packet.messageId, count++ === 0 ? 65535 : 1) + } + }) + s.inStream.write(publishPacket) + s.inStream.write(publishPacket) + }) + }) +}) + +test('publish empty topic throws error', function (t) { + t.plan(1) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + s.inStream.write({ + cmd: 'publish', + topic: '', + payload: 'world' + }) + + s.broker.on('clientError', function (client, err) { + t.pass('should emit error') + }) +}) + +;[{ qos: 0, clean: false }, { qos: 0, clean: true }, { qos: 1, clean: false }, { qos: 1, clean: true }].forEach(function (ele) { + test('subscribe a single topic in QoS ' + ele.qos + ' [clean=' + ele.clean + ']', function (t) { + t.plan(5) + + const s = connect(setup(), { clean: ele.clean }) + t.tearDown(s.broker.close.bind(s.broker)) + + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + dup: false, + length: 12, + qos: 0, + retain: false + } + const expectedSubs = ele.clean ? null : [{ topic: 'hello', qos: ele.qos }] + + subscribe(t, s, 'hello', ele.qos, function () { + s.outStream.once('data', function (packet) { + t.deepEqual(packet, expected, 'packet matches') + }) + + s.broker.persistence.subscriptionsByClient(s.client, function (_, subs) { + t.deepEqual(subs, expectedSubs) + }) + + s.broker.publish({ + cmd: 'publish', + topic: 'hello', + payload: 'world' + }) + }) + }) +}) + +// Catch invalid packet writeToStream errors +test('return write errors to callback', function (t) { + t.plan(1) + + const write = proxyquire('../lib/write.js', { + 'mqtt-packet': { + writeToStream: () => { + throw Error('error') + } + } + }) + + var client = { + conn: { + writable: true + }, + connecting: true + } + + write(client, {}, function (err) { + t.equal(err.message, 'packet received not valid', 'should return the error to callback') + }) +}) + +;[{ qos: 0, clean: false }, { qos: 0, clean: true }, { qos: 1, clean: false }, { qos: 1, clean: true }].forEach(function (ele) { + test('subscribe multipe topics in QoS ' + ele.qos + ' [clean=' + ele.clean + ']', function (t) { + t.plan(5) + + const s = connect(setup(), { clean: ele.clean }) + t.tearDown(s.broker.close.bind(s.broker)) + + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + dup: false, + length: 12, + qos: 0, + retain: false + } + const subs = [{ topic: 'hello', qos: ele.qos }, { topic: 'world', qos: ele.qos }] + const expectedSubs = ele.clean ? null : subs + + subscribeMultiple(t, s, subs, [ele.qos, ele.qos], function () { + s.outStream.on('data', function (packet) { + t.deepEqual(packet, expected, 'packet matches') + }) + + s.broker.persistence.subscriptionsByClient(s.client, function (_, saveSubs) { + t.deepEqual(saveSubs, expectedSubs) + }) + + s.broker.publish({ + cmd: 'publish', + topic: 'hello', + payload: 'world' + }) + }) + }) +}) + +test('does not die badly on connection error', function (t) { + t.plan(3) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + s.inStream.write({ + cmd: 'subscribe', + messageId: 42, + subscriptions: [{ + topic: 'hello', + qos: 0 + }] + }) + + s.broker.on('clientError', function (client, err) { + t.ok(client, 'client is passed') + t.ok(err, 'err is passed') + }) + + s.outStream.on('data', function (packet) { + s.conn.destroy() + s.broker.publish({ + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world') + }, function () { + t.pass('calls the callback') + }) + }) +}) + +// Guarded in mqtt-packet +test('subscribe should have messageId', function (t) { + t.plan(1) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + s.inStream.write({ + cmd: 'subscribe', + subscriptions: [{ + topic: 'hello', + qos: 0 + }] + }) + s.broker.on('connectionError', function (client, err) { + t.ok(err.message, 'Invalid messageId') + }) +}) + +test('unsubscribe', function (t) { + t.plan(5) + + const s = noError(connect(setup()), t) + t.tearDown(s.broker.close.bind(s.broker)) + + subscribe(t, s, 'hello', 0, function () { + s.inStream.write({ + cmd: 'unsubscribe', + messageId: 43, + unsubscriptions: ['hello'] + }) + + s.outStream.once('data', function (packet) { + t.deepEqual(packet, { + cmd: 'unsuback', + messageId: 43, + dup: false, + length: 2, + qos: 0, + retain: false + }, 'packet matches') + + s.outStream.on('data', function (packet) { + t.fail('packet received') + }) + + s.broker.publish({ + cmd: 'publish', + topic: 'hello', + payload: 'world' + }, function () { + t.pass('publish finished') + }) + }) + }) +}) + +test('unsubscribe without subscribe', function (t) { + t.plan(1) + + const s = noError(connect(setup()), t) + t.tearDown(s.broker.close.bind(s.broker)) + + s.inStream.write({ + cmd: 'unsubscribe', + messageId: 43, + unsubscriptions: ['hello'] + }) + + s.outStream.once('data', function (packet) { + t.deepEqual(packet, { + cmd: 'unsuback', + messageId: 43, + dup: false, + length: 2, + qos: 0, + retain: false + }, 'packet matches') + }) +}) + +test('unsubscribe on disconnect for a clean=true client', function (t) { + t.plan(6) + + const opts = { clean: true } + const s = connect(setup(), opts) + t.tearDown(s.broker.close.bind(s.broker)) + + subscribe(t, s, 'hello', 0, function () { + s.conn.destroy(null, function () { + t.pass('closed streams') + }) + s.outStream.on('data', function () { + t.fail('should not receive any more messages') + }) + s.broker.once('unsubscribe', function () { + t.pass('should emit unsubscribe') + }) + s.broker.publish({ + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world') + }, function () { + t.pass('calls the callback') + }) + }) +}) + +test('unsubscribe on disconnect for a clean=false client', function (t) { + t.plan(5) + + const opts = { clean: false } + const s = connect(setup(), opts) + t.tearDown(s.broker.close.bind(s.broker)) + + subscribe(t, s, 'hello', 0, function () { + s.conn.destroy(null, function () { + t.pass('closed streams') + }) + s.outStream.on('data', function () { + t.fail('should not receive any more messages') + }) + s.broker.once('unsubscribe', function () { + t.fail('should not emit unsubscribe') + }) + s.broker.publish({ + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world') + }, function () { + t.pass('calls the callback') + }) + }) +}) + +test('disconnect', function (t) { + t.plan(1) + + const s = noError(connect(setup()), t) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.on('clientDisconnect', function () { + t.pass('closed stream') + }) + + s.inStream.write({ + cmd: 'disconnect' + }) +}) + +test('disconnect client on wrong cmd', function (t) { + t.plan(1) + + const s = noError(connect(setup()), t) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.on('clientDisconnect', function () { + t.pass('closed stream') + }) + + s.broker.on('clientReady', function (c) { + // don't use stream write here because it will throw an error on mqtt_packet genetete + c._parser.emit('packet', { cmd: 'pippo' }) + }) +}) + +test('client closes', function (t) { + t.plan(5) + + const broker = aedes() + const client = noError(connect(setup(broker), { clientId: 'abcde' })) + broker.on('clientReady', function () { + const brokerClient = broker.clients.abcde + t.equal(brokerClient.connected, true, 'client connected') + eos(client.conn, t.pass.bind(t, 'client closes')) + setImmediate(() => { + brokerClient.close(function () { + t.equal(broker.clients.abcde, undefined, 'client instance is removed') + }) + t.equal(brokerClient.connected, false, 'client disconnected') + broker.close(function (err) { + t.error(err, 'no error') + }) + }) + }) +}) + +test('broker closes', function (t) { + t.plan(4) + + const broker = aedes() + const client = noError(connect(setup(broker), { + clientId: 'abcde' + }, function () { + eos(client.conn, t.pass.bind(t, 'client closes')) + broker.close(function (err) { + t.error(err, 'no error') + t.ok(broker.closed) + t.equal(broker.clients.abcde, undefined, 'client instance is removed') + }) + })) +}) + +test('broker closes gracefully', function (t) { + t.plan(7) + + const broker = aedes() + const client1 = noError(connect(setup(broker), { + }, function () { + const client2 = noError(connect(setup(broker), { + }, function () { + t.equal(broker.connectedClients, 2, '2 connected clients') + eos(client1.conn, t.pass.bind(t, 'client1 closes')) + eos(client2.conn, t.pass.bind(t, 'client2 closes')) + broker.close(function (err) { + t.error(err, 'no error') + t.ok(broker.mq.closed, 'broker mq closes') + t.ok(broker.closed, 'broker closes') + t.equal(broker.connectedClients, 0, 'no connected clients') + }) + })) + })) +}) + +test('testing other event', function (t) { + t.plan(1) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const client = setup(broker) + + broker.on('connectionError', function (client, error) { + t.notOk(client.id, null) + }) + client.conn.emit('error', 'Connect not yet arrived') +}) + +test('connect without a clientId for MQTT 3.1.1', function (t) { + t.plan(1) + + const s = setup() + t.tearDown(s.broker.close.bind(s.broker)) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + keepalive: 0 + }) + + s.outStream.on('data', function (packet) { + t.deepEqual(packet, { + cmd: 'connack', + returnCode: 0, + length: 2, + qos: 0, + retain: false, + dup: false, + topic: null, + payload: null, + sessionPresent: false + }, 'successful connack') + }) +}) + +test('disconnect existing client with the same clientId', function (t) { + t.plan(2) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const c1 = connect(setup(broker), { + clientId: 'abcde' + }, function () { + eos(c1.conn, function () { + t.pass('first client disconnected') + }) + + connect(setup(broker), { + clientId: 'abcde' + }, function () { + t.pass('second client connected') + }) + }) +}) + +test('disconnect if another broker connects the same clientId', function (t) { + t.plan(2) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const c1 = connect(setup(broker), { + clientId: 'abcde' + }, function () { + eos(c1.conn, function () { + t.pass('disconnect first client') + }) + + broker.publish({ + topic: '$SYS/anotherBroker/new/clients', + payload: Buffer.from('abcde') + }, function () { + t.pass('second client connects to another broker') + }) + }) +}) + +test('publish to $SYS/broker/new/clients', function (t) { + t.plan(1) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.mq.on('$SYS/' + broker.id + '/new/clients', function (packet, done) { + t.equal(packet.payload.toString(), 'abcde', 'clientId matches') + done() + }) + + connect(setup(broker), { + clientId: 'abcde' + }) +}) + +test('publish to $SYS/broker/new/subsribers and $SYS/broker/new/unsubsribers', function (t) { + t.plan(7) + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const sub = { + topic: 'hello', + qos: 0 + } + + broker.mq.on('$SYS/' + broker.id + '/new/subscribes', function (packet, done) { + var payload = JSON.parse(packet.payload.toString()) + t.equal(payload.clientId, 'abcde', 'clientId matches') + t.deepEqual(payload.subs, [sub], 'subscriptions matches') + done() + }) + + broker.mq.on('$SYS/' + broker.id + '/new/unsubscribes', function (packet, done) { + var payload = JSON.parse(packet.payload.toString()) + t.equal(payload.clientId, 'abcde', 'clientId matches') + t.deepEqual(payload.subs, [sub.topic], 'unsubscriptions matches') + done() + }) + + var subscriber = connect(setup(broker), { + clean: false, clientId: 'abcde' + }, function () { + subscribe(t, subscriber, sub.topic, sub.qos, function () { + subscriber.inStream.write({ + cmd: 'unsubscribe', + messageId: 43, + unsubscriptions: ['hello'] + }) + }) + }) +}) + +test('restore QoS 0 subscriptions not clean', function (t) { + t.plan(5) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + dup: false, + length: 12, + retain: false + } + + var subscriber = connect(setup(broker), { + clean: false, clientId: 'abcde' + }, function () { + subscribe(t, subscriber, 'hello', 0, function () { + subscriber.inStream.end() + + const publisher = connect(setup(broker), { + }, function () { + subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }, function (connect) { + t.equal(connect.sessionPresent, true, 'session present is set to true') + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 0 + }) + }) + subscriber.outStream.once('data', function (packet) { + t.deepEqual(packet, expected, 'packet must match') + }) + }) + }) + }) +}) + +test('do not restore QoS 0 subscriptions when clean', function (t) { + t.plan(5) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + var subscriber = connect(setup(broker), { + clean: true, clientId: 'abcde' + }, function () { + subscribe(t, subscriber, 'hello', 0, function () { + subscriber.inStream.end() + subscriber.broker.persistence.subscriptionsByClient(broker.clients.abcde, function (_, subs, client) { + t.equal(subs, null, 'no previous subscriptions restored') + }) + const publisher = connect(setup(broker), { + }, function () { + subscriber = connect(setup(broker), { + clean: true, clientId: 'abcde' + }, function (connect) { + t.equal(connect.sessionPresent, false, 'session present is set to false') + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 0 + }) + }) + subscriber.outStream.once('data', function (packet) { + t.fail('packet received') + }) + }) + }) + }) +}) + +test('double sub does not double deliver', function (t) { + t.plan(7) + + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + dup: false, + length: 12, + qos: 0, + retain: false + } + const s = connect(setup(), { + }, function () { + subscribe(t, s, 'hello', 0, function () { + subscribe(t, s, 'hello', 0, function () { + s.outStream.once('data', function (packet) { + t.deepEqual(packet, expected, 'packet matches') + s.outStream.on('data', function () { + t.fail('double deliver') + }) + }) + + s.broker.publish({ + cmd: 'publish', + topic: 'hello', + payload: 'world' + }) + }) + }) + }) + t.tearDown(s.broker.close.bind(s.broker)) +}) + +test('overlapping sub does not double deliver', function (t) { + t.plan(7) + + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + dup: false, + length: 12, + qos: 0, + retain: false + } + const s = connect(setup(), { + }, function () { + subscribe(t, s, 'hello', 0, function () { + subscribe(t, s, 'hello/#', 0, function () { + s.outStream.once('data', function (packet) { + t.deepEqual(packet, expected, 'packet matches') + s.outStream.on('data', function () { + t.fail('double deliver') + }) + }) + + s.broker.publish({ + cmd: 'publish', + topic: 'hello', + payload: 'world' + }) + }) + }) + }) + t.tearDown(s.broker.close.bind(s.broker)) +}) + +test('clear drain', function (t) { + t.plan(4) + + const s = connect(setup(), { + }, function () { + subscribe(t, s, 'hello', 0, function () { + // fake a busy socket + s.conn.write = function (chunk, enc, cb) { + return false + } + + s.broker.publish({ + cmd: 'publish', + topic: 'hello', + payload: 'world' + }, function () { + t.pass('callback called') + }) + + s.conn.destroy() + }) + }) + + t.tearDown(s.broker.close.bind(s.broker)) +}) + +test('id option', function (t) { + t.plan(2) + + const broker1 = aedes() + + setup(broker1).conn.destroy() + t.ok(broker1.id, 'broker gets random id when id option not set') + + const broker2 = aedes({ id: 'abc' }) + setup(broker2).conn.destroy() + t.equal(broker2.id, 'abc', 'broker id equals id option when set') + + t.tearDown(() => { + broker1.close() + broker2.close() + }) +}) + +test('not duplicate client close when client error occurs', function (t) { + t.plan(1) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + connect(setup(broker)) + broker.on('client', function (client) { + client.conn.on('drain', () => { + t.pass('client closed ok') + }) + client.close() + // add back to test if there is duplicated close() call + client.conn.on('drain', () => { + t.fail('double client close calls') + }) + }) +}) + +test('not duplicate client close when double close() called', function (t) { + t.plan(1) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + connect(setup(broker)) + broker.on('clientReady', function (client) { + client.conn.on('drain', () => { + t.pass('client closed ok') + }) + client.close() + // add back to test if there is duplicated close() call + client.conn.on('drain', () => { + t.fail('double execute client close function') + }) + client.close() + }) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/client-pub-sub.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/client-pub-sub.js new file mode 100644 index 000000000..6f9c9a849 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/client-pub-sub.js @@ -0,0 +1,942 @@ +'use strict' + +const { test } = require('tap') +const { setup, connect, subscribe, noError } = require('./helper') +const aedes = require('../') + +test('publish direct to a single client QoS 0', function (t) { + t.plan(2) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + dup: false, + length: 12, + qos: 0, + retain: false + } + + broker.on('client', function (client) { + client.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 0 + }, function (err) { + t.error(err, 'no error') + }) + }) + + const s = connect(setup(broker)) + + s.outStream.once('data', function (packet) { + t.deepEqual(packet, expected, 'packet matches') + }) +}) + +test('publish direct to a single client throws error', function (t) { + t.plan(1) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.persistence.outgoingEnqueue = function (sub, packet, done) { + done(new Error('Throws error')) + } + + broker.on('client', function (client) { + client.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + retain: false + }, function (err) { + t.pass('Throws error', err.message, 'throws error') + }) + }) + + connect(setup(broker), { clean: false }) +}) + +test('publish direct to a single client throws error 2', function (t) { + t.plan(1) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.persistence.outgoingUpdate = function (client, packet, done) { + done(new Error('Throws error'), client, packet) + } + + broker.on('client', function (client) { + client.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + retain: false + }, () => {}) + + client.once('error', function (err) { + t.pass('Throws error', err.message, 'throws error') + }) + }) + + connect(setup(broker), { clean: false }) +}) + +test('publish direct to a single client QoS 1', function (t) { + t.plan(2) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + dup: false, + length: 14, + qos: 1, + retain: false + } + + broker.on('client', function (client) { + client.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 1 + }, function (err) { + t.error(err, 'no error') + }) + }) + + const s = connect(setup(broker)) + + s.outStream.once('data', function (packet) { + expected.messageId = packet.messageId + t.deepEqual(packet, expected, 'packet matches') + s.inStream.write({ + cmd: 'puback', + messageId: packet.messageId + }) + }) +}) + +test('publish QoS 2 throws error in pubrel', function (t) { + t.plan(2) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = connect(setup(broker)) + + broker.on('clientError', function (c, err) { + t.pass('throws error') + }) + + s.outStream.on('data', function (packet) { + if (packet.cmd === 'publish') { + s.inStream.write({ + cmd: 'pubrec', + messageId: packet.messageId + }) + s.broker.persistence.outgoingUpdate = function (client, pubrel, cb) { + cb(new Error('error')) + } + } + }) + + broker.on('clientReady', function (client) { + client.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 2 + }, function (err) { + t.error(err, 'no error') + }) + }) +}) + +test('publish direct to a single client QoS 2', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + var publishCount = 0 + var nonPublishCount = 0 + + broker.on('clientReady', function (client) { + client.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 2 + }, function (err) { + t.error(err, 'no error') + }) + client.on('error', function (err) { + t.error(err) + }) + }) + + const s = connect(setup(broker)) + + s.inStream.on('close', () => { + t.equal(publishCount, 1) + t.equal(nonPublishCount, 1) + }) + + s.outStream.on('data', function (packet) { + if (packet.cmd === 'publish') { + publishCount++ + s.inStream.write({ + cmd: 'pubrec', + messageId: packet.messageId + }) + } else { + nonPublishCount++ + s.inStream.write({ + cmd: 'pubcomp', + messageId: packet.messageId + }) + s.inStream.destroy() + } + }) +}) + +test('emit a `ack` event on PUBACK for QoS 1 [clean=false]', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + var expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + retain: false, + dup: false + } + + broker.on('clientReady', function (client) { + client.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 1 + }, function (err) { + t.error(err, 'no error') + }) + }) + + broker.once('ack', function (packet, client) { + expected.brokerId = packet.brokerId + expected.brokerCounter = packet.brokerCounter + expected.messageId = packet.messageId + t.deepEqual(packet, expected, 'ack packet is origianl packet') + t.pass('got the ack event') + }) + + const s = connect(setup(broker), { clean: false }) + + s.outStream.once('data', function (packet) { + s.inStream.write({ + cmd: 'puback', + messageId: packet.messageId + }) + }) +}) + +test('emit a `ack` event on PUBACK for QoS 1 [clean=true]', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.on('clientReady', function (client) { + client.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 1 + }, function (err) { + t.error(err, 'no error') + }) + }) + + broker.once('ack', function (packet, client) { + t.equal(packet, undefined, 'ack packet is undefined') + t.pass('got the ack event') + }) + + const s = connect(setup(broker), { clean: true }) + + s.outStream.once('data', function (packet) { + s.inStream.write({ + cmd: 'puback', + messageId: packet.messageId + }) + }) +}) + +test('emit a `ack` event on PUBCOMP for QoS 2 [clean=false]', function (t) { + t.plan(5) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + var messageId + var clientId + + broker.on('clientReady', function (client) { + clientId = client.id + client.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 2 + }, function (err) { + t.error(err, 'no error') + }) + }) + + broker.once('ack', function (packet, client) { + t.equal(client.id, clientId) + t.equal(packet.messageId, messageId) + t.equal(packet.cmd, 'pubrel', 'ack packet is purel') + t.pass('got the ack event') + }) + + const s = connect(setup(broker), { clean: false }) + + s.outStream.on('data', function (packet) { + if (packet.cmd === 'publish') { + s.inStream.write({ + cmd: 'pubrec', + messageId: packet.messageId + }) + } else { + messageId = packet.messageId + s.inStream.write({ + cmd: 'pubcomp', + messageId: packet.messageId + }) + } + }) +}) + +test('emit a `ack` event on PUBCOMP for QoS 2 [clean=true]', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.on('clientReady', function (client) { + client.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 2 + }, function (err) { + t.error(err, 'no error') + }) + }) + + broker.once('ack', function (packet, client) { + t.equal(packet, undefined, 'ack packet is undefined') + t.pass('got the ack event') + }) + + const s = connect(setup(broker), { clean: true }) + + s.outStream.on('data', function (packet) { + if (packet.cmd === 'publish') { + s.inStream.write({ + cmd: 'pubrec', + messageId: packet.messageId + }) + } else { + s.inStream.write({ + cmd: 'pubcomp', + messageId: packet.messageId + }) + } + }) +}) + +test('offline message support for direct publish', function (t) { + t.plan(2) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + dup: false, + length: 14, + qos: 1, + retain: false + } + const opts = { + clean: false, + clientId: 'abcde' + } + + broker.once('client', function (client) { + client.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 1 + }, function (err) { + t.error(err, 'no error') + }) + }) + + var s = connect(setup(broker), opts) + + s.outStream.once('data', function (packet) { + s = connect(setup(broker), opts) + + s.outStream.once('data', function (packet) { + s = connect(setup(broker), opts) + s.inStream.write({ + cmd: 'puback', + messageId: packet.messageId + }) + delete packet.messageId + t.deepEqual(packet, expected, 'packet must match') + }) + }) +}) + +test('subscribe a client programmatically', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + dup: false, + length: 12, + qos: 0, + retain: false + } + + broker.on('client', function (client) { + client.subscribe({ + topic: 'hello', + qos: 0 + }, function (err) { + t.error(err, 'no error') + + broker.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 0 + }, function (err) { + t.error(err, 'no error') + }) + }) + }) + + const s = connect(setup(broker)) + + s.outStream.once('data', function (packet) { + t.deepEqual(packet, expected, 'packet matches') + }) +}) + +test('subscribe throws error when QoS > 0', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.on('clientReady', function (client) { + client.subscribe({ + topic: 'hello', + qos: 1 + }, function (err) { + t.error(err, 'no error') + + // makes writeQos throw error + client.connected = false + client.connecting = false + + broker.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 1 + }, function (err) { + t.error(err, 'no error') + }) + }) + }) + + broker.on('clientError', function (client, error) { + t.equal(error.message, 'connection closed', 'should throw clientError') + }) + + connect(setup(broker)) +}) + +test('subscribe a client programmatically - wildcard', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const expected = { + cmd: 'publish', + topic: 'hello/world/1', + payload: Buffer.from('world'), + dup: false, + length: 20, + qos: 0, + retain: false + } + + broker.on('clientReady', function (client) { + client.subscribe({ + topic: '+/world/1', + qos: 0 + }, function (err) { + t.error(err, 'no error') + + broker.publish({ + topic: 'hello/world/1', + payload: Buffer.from('world'), + qos: 0 + }, function (err) { + t.error(err, 'no error') + }) + }) + }) + + const s = connect(setup(broker)) + + s.outStream.once('data', function (packet) { + t.deepEqual(packet, expected, 'packet matches') + }) +}) + +test('unsubscribe a client', function (t) { + t.plan(2) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.on('client', function (client) { + client.subscribe({ + topic: 'hello', + qos: 0 + }, function (err) { + t.error(err, 'no error') + client.unsubscribe([{ + topic: 'hello', + qos: 0 + }], function (err) { + t.error(err, 'no error') + }) + }) + }) + connect(setup(broker)) +}) + +test('unsubscribe should not call removeSubscriptions when [clean=true]', function (t) { + t.plan(2) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.persistence.removeSubscriptions = function (client, subs, cb) { + cb(Error('remove subscription is called')) + } + + broker.on('client', function (client) { + client.subscribe({ + topic: 'hello', + qos: 1 + }, function (err) { + t.error(err, 'no error') + client.unsubscribe({ + unsubscriptions: [{ + topic: 'hello', + qos: 1 + }], + messageId: 42 + }, function (err) { + t.error(err, 'no error') + }) + }) + }) + connect(setup(broker), { clean: true }) +}) + +test('unsubscribe throws error', function (t) { + t.plan(2) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.on('client', function (client) { + client.subscribe({ + topic: 'hello', + qos: 0 + }, function (err) { + t.error(err, 'no error') + broker.unsubscribe = function (topic, func, done) { + done(new Error('error')) + } + client.unsubscribe({ + topic: 'hello', + qos: 0 + }, function () { + t.pass('throws error') + }) + }) + }) + connect(setup(broker)) +}) + +test('unsubscribe throws error 2', function (t) { + t.plan(2) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.on('client', function (client) { + client.subscribe({ + topic: 'hello', + qos: 2 + }, function (err) { + t.error(err, 'no error') + broker.persistence.removeSubscriptions = function (client, unsubscriptions, done) { + done(new Error('error')) + } + client.unsubscribe({ + unsubscriptions: [{ + topic: 'hello', + qos: 2 + }], + messageId: 42 + }, function () { + t.pass('throws error') + }) + }) + }) + connect(setup(broker)) +}) + +test('subscribe a client programmatically multiple topics', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + dup: false, + length: 12, + qos: 0, + retain: false + } + + broker.on('client', function (client) { + client.subscribe([{ + topic: 'hello', + qos: 0 + }, { + topic: 'aaa', + qos: 0 + }], function (err) { + t.error(err, 'no error') + + broker.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 0 + }, function (err) { + t.error(err, 'no error') + }) + }) + }) + + const s = connect(setup(broker)) + + s.outStream.once('data', function (packet) { + t.deepEqual(packet, expected, 'packet matches') + }) +}) + +test('subscribe a client programmatically with full packet', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + dup: false, + length: 12, + qos: 0, + retain: false + } + + broker.on('client', function (client) { + client.subscribe({ + subscriptions: [{ + topic: 'hello', + qos: 0 + }, { + topic: 'aaa', + qos: 0 + }] + }, function (err) { + t.error(err, 'no error') + + broker.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 0 + }, function (err) { + t.error(err, 'no error') + }) + }) + }) + + const s = connect(setup(broker)) + + s.outStream.once('data', function (packet) { + t.deepEqual(packet, expected, 'packet matches') + }) +}) + +test('get message when client connects', function (t) { + t.plan(2) + + const client1 = 'gav' + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.on('client', function (client) { + client.subscribe({ + subscriptions: [{ + topic: '$SYS/+/new/clients', + qos: 0 + }] + }, function (err) { + t.error(err, 'no error') + }) + }) + + const s1 = connect(setup(broker), { clientId: client1 }) + + s1.outStream.on('data', function (packet) { + t.equal(client1, packet.payload.toString()) + }) +}) + +test('get message when client disconnects', function (t) { + t.plan(2) + + const client1 = 'gav' + const client2 = 'friend' + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.on('client', function (client) { + if (client.id === client1) { + client.subscribe({ + subscriptions: [{ + topic: '$SYS/+/disconnect/clients', + qos: 0 + }] + }, function (err) { + t.error(err, 'no error') + }) + } else { + client.close() + } + }) + + const s1 = connect(setup(broker), { clientId: client1 }) + connect(setup(broker), { clientId: client2 }) + + s1.outStream.on('data', function (packet) { + t.equal(client2, packet.payload.toString()) + }) +}) + +test('should not receive a message on negated subscription', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.authorizeSubscribe = function (client, sub, callback) { + callback(null, null) + } + + broker.on('client', function (client) { + broker.publish({ + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + retain: true + }, function (err) { + t.error(err, 'no error') + client.subscribe([{ + topic: 'hello', + qos: 0 + }, + { + topic: 'hello', + qos: 0 + }], function (err) { + t.error(err, 'no error') + }) + }) + }) + + broker.on('subscribe', function (subs) { + t.pass(subs.length, 1, 'Should dedupe subs') + t.pass(subs[0].qos, 128, 'Qos should be 128 (Fail)') + }) + + const s = connect(setup(broker)) + s.outStream.once('data', function (packet) { + t.fail('Packet should not be received') + }) +}) + +test('programmatically add custom subscribe', function (t) { + t.plan(6) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = connect(setup(broker)) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + retain: false, + length: 12, + dup: false + } + var deliverP = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + retain: false, + dup: false + } + subscribe(t, s, 'hello', 0, function () { + broker.subscribe('hello', deliver, function () { + t.pass('subscribed') + }) + s.outStream.on('data', function (packet) { + t.deepEqual(packet, expected, 'packet matches') + }) + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 0, + messageId: 42 + }) + }) + function deliver (packet, cb) { + deliverP.brokerId = s.broker.id + deliverP.brokerCounter = s.broker.counter + t.deepEqual(packet, deliverP, 'packet matches') + cb() + } +}) + +test('custom function in broker.subscribe', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = setup(broker) + var expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + retain: false, + dup: false, + messageId: undefined + } + connect(s, {}, function () { + broker.subscribe('hello', deliver, function () { + t.pass('subscribed') + }) + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42 + }) + }) + broker.on('publish', function (packet, client) { + if (client) { + t.equal(packet.topic, 'hello') + t.equal(packet.messageId, 42) + } + }) + function deliver (packet, cb) { + expected.brokerId = s.broker.id + expected.brokerCounter = s.broker.counter + t.deepEqual(packet, expected, 'packet matches') + cb() + } +}) + +test('custom function in broker.unsubscribe', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = noError(setup(broker)) + connect(s, {}, function () { + broker.subscribe('hello', deliver, function () { + t.pass('subscribed') + broker.unsubscribe('hello', deliver, function () { + t.pass('unsubscribe') + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'word', + qos: 1, + messageId: 42 + }) + }) + }) + }) + broker.on('publish', function (packet, client) { + if (client) { + t.pass('publish') + } + }) + function deliver (packet, cb) { + t.fail('should not be called') + cb() + } +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/close_socket_by_other_party.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/close_socket_by_other_party.js new file mode 100644 index 000000000..da1530bb1 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/close_socket_by_other_party.js @@ -0,0 +1,175 @@ +'use strict' + +const { test } = require('tap') +const EventEmitter = require('events') +const { setup, connect, subscribe } = require('./helper') +const aedes = require('../') + +test('aedes is closed before client authenticate returns', function (t) { + t.plan(1) + + const evt = new EventEmitter() + const broker = aedes({ + authenticate: (client, username, password, done) => { + evt.emit('AuthenticateBegin', client) + setTimeout(function () { + done(null, true) + }, 2000) + } + }) + + broker.on('client', function (client) { + t.fail('should no client registration') + }) + broker.on('connackSent', function () { + t.fail('should no connack be sent') + }) + broker.on('clientError', function (client, err) { + t.error(err) + }) + + connect(setup(broker)) + + evt.on('AuthenticateBegin', function (client) { + t.equal(broker.connectedClients, 0) + broker.close() + }) +}) + +test('client is closed before authenticate returns', function (t) { + t.plan(1) + + const evt = new EventEmitter() + const broker = aedes({ + authenticate: async (client, username, password, done) => { + evt.emit('AuthenticateBegin', client) + setTimeout(function () { + done(null, true) + }, 2000) + } + }) + t.tearDown(broker.close.bind(broker)) + + broker.on('client', function (client) { + t.fail('should no client registration') + }) + broker.on('connackSent', function () { + t.fail('should no connack be sent') + }) + broker.on('clientError', function (client, err) { + t.error(err) + }) + + connect(setup(broker)) + + evt.on('AuthenticateBegin', function (client) { + t.equal(broker.connectedClients, 0) + client.close() + }) +}) + +test('client is closed before authorizePublish returns', function (t) { + t.plan(3) + + const evt = new EventEmitter() + const broker = aedes({ + authorizePublish: (client, packet, done) => { + evt.emit('AuthorizePublishBegin', client) + // simulate latency writing to persistent store. + setTimeout(function () { + done() + evt.emit('AuthorizePublishEnd', client) + }, 2000) + } + }) + + broker.on('clientError', function (client, err) { + t.equal(err.message, 'connection closed') + }) + + const s = connect(setup(broker)) + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 10, + retain: false + }) + + evt.on('AuthorizePublishBegin', function (client) { + t.equal(broker.connectedClients, 1) + client.close() + }) + evt.on('AuthorizePublishEnd', function (client) { + t.equal(broker.connectedClients, 0) + broker.close() + }) +}) + +test('close client when its socket is closed', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const subscriber = connect(setup(broker)) + + subscribe(t, subscriber, 'hello', 1, function () { + subscriber.inStream.end() + subscriber.conn.on('close', function () { + t.equal(broker.connectedClients, 0, 'no connected client') + }) + }) +}) + +test('multiple clients subscribe same topic, and all clients still receive message except the closed one', function (t) { + t.plan(5) + + const mqtt = require('mqtt') + const broker = aedes() + t.tearDown(() => { + client2.end() + broker.close() + server.close() + }) + + const server = require('net').createServer(broker.handle) + const port = 1883 + server.listen(port) + broker.on('clientError', function (client, err) { + t.error(err) + }) + + var client1, client2 + const _sameTopic = 'hello' + + // client 1 + client1 = mqtt.connect('mqtt://localhost', { clientId: 'client1', resubscribe: false, reconnectPeriod: -1 }) + client1.on('message', () => { + t.fail('client1 receives message') + }) + + client1.subscribe(_sameTopic, { qos: 0, retain: false }, () => { + t.pass('client1 sub callback') + // stimulate closed socket by users + client1.stream.destroy() + + // client 2 + client2 = mqtt.connect('mqtt://localhost', { clientId: 'client2', resubscribe: false }) + client2.on('message', () => { + t.pass('client2 receives message') + t.equal(broker.connectedClients, 1) + }) + client2.subscribe(_sameTopic, { qos: 0, retain: false }, () => { + t.pass('client2 sub callback') + + // pubClient + const pubClient = mqtt.connect('mqtt://localhost', { clientId: 'pubClient' }) + pubClient.publish(_sameTopic, 'world', { qos: 0, retain: false }, () => { + t.pass('pubClient publish event') + pubClient.end() + }) + }) + }) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/connect.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/connect.js new file mode 100644 index 000000000..b5dba3503 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/connect.js @@ -0,0 +1,1071 @@ +'use strict' + +const { test } = require('tap') +const http = require('http') +const ws = require('websocket-stream') +const mqtt = require('mqtt') +const mqttPacket = require('mqtt-packet') +const net = require('net') +const proxyProtocol = require('proxy-protocol-js') +const { protocolDecoder } = require('aedes-protocol-decoder') +const { setup, connect, delay } = require('./helper') +const aedes = require('../') + +;[{ ver: 3, id: 'MQIsdp' }, { ver: 4, id: 'MQTT' }].forEach(function (ele) { + test('connect and connack (minimal)', function (t) { + t.plan(2) + + const s = setup() + t.tearDown(s.broker.close.bind(s.broker)) + + s.inStream.write({ + cmd: 'connect', + protocolId: ele.id, + protocolVersion: ele.ver, + clean: true, + clientId: 'my-client', + keepalive: 0 + }) + + s.outStream.on('data', function (packet) { + t.deepEqual(packet, { + cmd: 'connack', + returnCode: 0, + length: 2, + qos: 0, + retain: false, + dup: false, + topic: null, + payload: null, + sessionPresent: false + }, 'successful connack') + t.equal(s.client.version, ele.ver) + }) + }) +}) + +// [MQTT-3.1.2-2] +test('reject client requested for unacceptable protocol version', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = setup(broker) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQIsdp', + protocolVersion: 5, + clean: true, + clientId: 'my-client', + keepalive: 0 + }) + s.outStream.on('data', function (packet) { + t.equal(packet.cmd, 'connack') + t.equal(packet.returnCode, 1, 'unacceptable protocol version') + t.equal(broker.connectedClients, 0) + }) + broker.on('clientError', function (client, err) { + t.fail('should not raise clientError error') + }) + broker.on('connectionError', function (client, err) { + t.equal(err.message, 'unacceptable protocol version') + }) +}) + +// [MQTT-3.1.2-1], Guarded in mqtt-packet +test('reject client requested for unsupported protocol version', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = setup(broker) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 2, + clean: true, + clientId: 'my-client', + keepalive: 0 + }) + s.outStream.on('data', function (packet) { + t.fail('no data sent') + }) + broker.on('connectionError', function (client, err) { + t.equal(client.version, null) + t.equal(err.message, 'Invalid protocol version') + t.equal(broker.connectedClients, 0) + }) +}) + +// Guarded in mqtt-packet +test('reject clients with no clientId running on MQTT 3.1.0', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = setup(broker) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQIsdp', + protocolVersion: 3, + clean: true, + keepalive: 0 + }) + s.outStream.on('data', function (packet) { + t.fail('no data sent') + }) + broker.on('connectionError', function (client, err) { + t.equal(client.version, null) + t.equal(err.message, 'clientId must be supplied before 3.1.1') + t.equal(broker.connectedClients, 0) + }) +}) + +// [MQTT-3.1.3-7], Guarded in mqtt-packet +test('reject clients without clientid and clean=false on MQTT 3.1.1', function (t) { + t.plan(2) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = setup(broker) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: false, + clientId: '', + keepalive: 0 + }) + s.outStream.on('data', function (packet) { + t.fail('no data sent') + }) + broker.on('connectionError', function (client, err) { + t.equal(err.message, 'clientId must be given if cleanSession set to 0') + t.equal(broker.connectedClients, 0) + }) +}) + +test('clients without clientid and clean=true on MQTT 3.1.1 will get a generated clientId', function (t) { + t.plan(5) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = setup(broker) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + keepalive: 0 + }) + s.outStream.on('data', function (packet) { + t.equal(packet.cmd, 'connack') + t.equal(packet.returnCode, 0) + t.equal(broker.connectedClients, 1) + t.equal(s.client.version, 4) + }) + broker.on('connectionError', function (client, err) { + t.error(err, 'no error') + }) + broker.on('client', function (client) { + t.ok(client.id.startsWith('aedes_')) + }) +}) + +test('client connect error while fetching subscriptions', function (t) { + t.plan(2) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = setup(broker) + + broker.persistence.subscriptionsByClient = function (c, cb) { + cb(new Error('error'), [], c) + } + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: false, + clientId: 'my-client', + keepalive: 0 + }) + + broker.on('clientError', function (client, err) { + t.equal(client.version, 4) + t.pass('throws error') + }) +}) + +test('client connect clear outgoing', function (t) { + t.plan(1) + + const clientId = 'abcde' + const brokerId = 'pippo' + + const broker = aedes({ id: brokerId }) + t.tearDown(broker.close.bind(broker)) + + const subs = [{ clientId: clientId }] + const packet = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + brokerId: brokerId, + brokerCounter: 2, + retain: true, + messageId: 42, + dup: false + } + + broker.persistence.outgoingEnqueueCombi(subs, packet, function () { + const s = setup(broker) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: clientId, + keepalive: 0 + }) + + broker.on('clientReady', function (client) { + broker.persistence.outgoingUpdate(client, packet, function (err) { + t.equal('no such packet', err.message, 'packet not found') + }) + }) + }) +}) + +test('clients with zero-byte clientid and clean=true on MQTT 3.1.1 will get a generated clientId', function (t) { + t.plan(5) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = setup(broker) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: '', + keepalive: 0 + }) + s.outStream.on('data', function (packet) { + t.equal(packet.cmd, 'connack') + t.equal(packet.returnCode, 0) + t.equal(broker.connectedClients, 1) + t.equal(s.client.version, 4) + }) + broker.on('connectionError', function (client, err) { + t.error(err, 'no error') + }) + broker.on('client', function (client) { + t.ok(client.id.startsWith('aedes_')) + }) +}) + +// [MQTT-3.1.3-7] +test('reject clients with > 23 clientId length in MQTT 3.1.0', function (t) { + t.plan(7) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = setup(broker) + + var conn = s.client.conn + var end = conn.end + + conn.end = function () { + t.fail('should not call `conn.end()`') + end() + } + + function drain () { + t.pass('should empty connection request queue') + } + + conn._writableState.getBuffer = () => [{ callback: drain }, { callback: drain }] + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQIsdp', + protocolVersion: 3, + clean: true, + clientId: 'abcdefghijklmnopqrstuvwxyz', + keepalive: 0 + }) + s.outStream.on('data', function (packet) { + t.equal(packet.cmd, 'connack') + t.equal(packet.returnCode, 2, 'identifier rejected') + t.equal(broker.connectedClients, 0) + t.equal(s.client.version, null) + }) + broker.on('connectionError', function (client, err) { + t.equal(err.message, 'identifier rejected') + }) +}) + +test('connect clients with > 23 clientId length using aedes maxClientsIdLength option in MQTT 3.1.0', function (t) { + t.plan(4) + + const broker = aedes({ maxClientsIdLength: 26 }) + t.tearDown(broker.close.bind(broker)) + + const s = setup(broker) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 3, + clean: true, + clientId: 'abcdefghijklmnopqrstuvwxyz', + keepalive: 0 + }) + s.outStream.on('data', function (packet) { + t.equal(packet.cmd, 'connack') + t.equal(packet.returnCode, 0) + t.equal(broker.connectedClients, 1) + t.equal(s.client.version, 3) + }) + broker.on('connectionError', function (client, err) { + t.error(err, 'no error') + }) +}) + +test('connect with > 23 clientId length in MQTT 3.1.1', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = setup(broker) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'abcdefghijklmnopqrstuvwxyz', + keepalive: 0 + }) + s.outStream.on('data', function (packet) { + t.equal(packet.cmd, 'connack') + t.equal(packet.returnCode, 0) + t.equal(broker.connectedClients, 1) + t.equal(s.client.version, 4) + }) + broker.on('connectionError', function (client, err) { + t.error(err, 'no error') + }) +}) + +// [MQTT-3.1.0-1] +test('the first Packet MUST be a CONNECT Packet', function (t) { + t.plan(2) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const packet = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + retain: false + } + const s = setup(broker) + s.inStream.write(packet) + + broker.on('connectionError', function (client, err) { + t.equal(err.message, 'Invalid protocol') + }) + setImmediate(() => { + t.ok(s.conn.destroyed, 'close connection if first packet is not a CONNECT') + s.conn.destroy() + }) +}) + +// [MQTT-3.1.0-2] +test('second CONNECT Packet sent from a Client as a protocol violation and disconnect the Client', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const packet = { + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client', + keepalive: 0 + } + broker.on('clientError', function (client, err) { + t.equal(err.message, 'Invalid protocol') + }) + const s = connect(setup(broker), { clientId: 'abcde' }) + s.broker.on('clientReady', function () { + t.ok(broker.clients.abcde.connected) + // destory client when there is a 2nd cmd:connect, even the clientId is dfferent + s.inStream.write(packet) + setImmediate(() => { + t.equal(broker.clients.abcde, undefined, 'client instance is removed') + t.ok(s.conn.destroyed, 'close connection if packet is a CONNECT after network is established') + }) + }) +}) + +test('connect handler calls done when preConnect throws error', function (t) { + t.plan(1) + + const broker = aedes({ + preConnect: function (client, done) { + done(Error('error in preconnect')) + } + }) + + t.tearDown(broker.close.bind(broker)) + + const s = setup(broker) + + var handleConnect = require('../lib/handlers/connect') + + handleConnect(s.client, {}, function done (err) { + t.equal(err.message, 'error in preconnect', 'calls done with error') + }) +}) + +test('handler calls done when disconnect or unknown packet cmd is received', function (t) { + t.plan(2) + + const broker = aedes() + + t.tearDown(broker.close.bind(broker)) + + const s = setup(broker) + + var handle = require('../lib/handlers/index') + + handle(s.client, { cmd: 'disconnect' }, function done () { + t.pass('calls done when disconnect cmd is received') + }) + + handle(s.client, { cmd: 'fsfadgragae' }, function done () { + t.pass('calls done when unknown cmd is received') + }) +}) + +test('reject second CONNECT Packet sent while first CONNECT still in preConnect stage', function (t) { + t.plan(2) + + const packet1 = { + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client-1', + keepalive: 0 + } + const packet2 = { + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client-2', + keepalive: 0 + } + + var i = 0 + const broker = aedes({ + preConnect: function (client, done) { + var ms = i++ === 0 ? 2000 : 500 + setTimeout(function () { + done(null, true) + }, ms) + } + }) + t.tearDown(broker.close.bind(broker)) + + const s = setup(broker) + + broker.on('connectionError', function (client, err) { + t.equal(err.info.clientId, 'my-client-2') + t.equal(err.message, 'Invalid protocol') + }) + + const msg = async (s, ms, msg) => { + await delay(ms) + s.inStream.write(msg) + } + + ;(async () => { + await Promise.all([msg(s, 100, packet1), msg(s, 200, packet2)]) + })().catch( + (error) => { + t.fail(error) + } + ) +}) + +// [MQTT-3.1.2-1], Guarded in mqtt-packet +test('reject clients with wrong protocol name', function (t) { + t.plan(2) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = setup(broker) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT_hello', + protocolVersion: 3, + clean: true, + clientId: 'my-client', + keepalive: 0 + }) + s.outStream.on('data', function (packet) { + t.fail('no data sent') + }) + broker.on('connectionError', function (client, err) { + t.equal(err.message, 'Invalid protocolId') + t.equal(broker.connectedClients, 0) + }) +}) + +test('After first CONNECT Packet, others are queued until \'connect\' event', function (t) { + t.plan(2) + + const queueLimit = 50 + const broker = aedes({ queueLimit }) + t.tearDown(broker.close.bind(broker)) + + const publishP = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + retain: false + } + + const connectP = { + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'abcde', + keepalive: 0 + } + + const s = setup(broker) + s.inStream.write(connectP) + + process.once('warning', e => t.fail('Memory leak detected')) + + for (let i = 0; i < queueLimit; i++) { + s.inStream.write(publishP) + } + + broker.on('client', function (client) { + t.equal(client._parser._queue.length, queueLimit, 'Packets have been queued') + + client.once('connected', () => { + t.equal(client._parser._queue, null, 'Queue is empty') + s.conn.destroy() + }) + }) +}) + +test('Test queue limit', function (t) { + t.plan(1) + + const queueLimit = 50 + const broker = aedes({ queueLimit }) + t.tearDown(broker.close.bind(broker)) + + const publishP = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + retain: false + } + + const connectP = { + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'abcde', + keepalive: 0 + } + + const s = setup(broker) + s.inStream.write(connectP) + + process.once('warning', e => t.fail('Memory leak detected')) + + for (let i = 0; i < queueLimit + 1; i++) { + s.inStream.write(publishP) + } + + broker.on('connectionError', function (conn, err) { + t.equal(err.message, 'Client queue limit reached', 'Queue error is thrown') + s.conn.destroy() + }) +}) + +;[['fail with no error msg', 3, null, false], ['succeed with no error msg', 9, null, true], ['fail with error msg', 6, new Error('connection banned'), false], ['succeed with error msg', 6, new Error('connection banned'), true]].forEach(function (ele, idx) { + const title = ele[0] + const plan = ele[1] + const err = ele[2] + const ok = ele[3] + test('preConnect handler - ' + title, function (t) { + t.plan(plan) + + const broker = aedes({ + preConnect: function (client, done) { + t.ok(client.connecting) + t.notOk(client.connected) + t.equal(client.version, null) + return done(err, ok) + } + }) + t.tearDown(broker.close.bind(broker)) + + const s = setup(broker) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client-' + idx, + keepalive: 0 + }) + broker.on('client', function (client) { + if (ok && !err) { + t.ok(client.connecting) + t.notOk(client.connected) + t.pass('register client ok') + } else { + t.fail('no reach here') + } + }) + broker.on('clientReady', function (client) { + t.notOk(client.connecting) + t.ok(client.connected) + t.pass('connect ok') + }) + broker.on('clientError', function (client, err) { + t.fail('no client error') + }) + broker.on('connectionError', function (client, err) { + if (err) { + t.notOk(client.connecting) + t.notOk(client.connected) + t.equal(err.message, 'connection banned') + } else { + t.fail('no connection error') + } + }) + }) +}) + +// websocket-stream based connections +test('websocket clients have access to the request object', function (t) { + t.plan(3) + + const port = 4883 + const broker = aedes() + broker.on('client', function (client) { + if (client.req) { + t.pass('client request object present') + if (client.req.headers) { + t.equal('sample', client.req.headers['x-test-protocol']) + } + } else { + t.fail('no request object present') + } + }) + + const server = http.createServer() + ws.createServer({ + server: server + }, broker.handle) + + server.listen(port, function (err) { + t.error(err, 'no error') + }) + + const client = mqtt.connect(`ws://localhost:${port}`, { + wsOptions: { + headers: { + 'X-Test-Protocol': 'sample' + } + } + }) + + t.tearDown(() => { + client.end(true) + broker.close() + server.close() + }) +}) + +// test ipAddress property presence when trustProxy is enabled +test('tcp clients have access to the ipAddress from the socket', function (t) { + t.plan(2) + + const port = 4883 + const broker = aedes({ + preConnect: function (client, done) { + if (client && client.connDetails && client.connDetails.ipAddress) { + client.ip = client.connDetails.ipAddress + t.equal('::ffff:127.0.0.1', client.ip) + } else { + t.fail('no ip address present') + } + done(null, true) + }, + decodeProtocol: protocolDecoder, + trustProxy: true + }) + + const server = net.createServer(broker.handle) + server.listen(port, function (err) { + t.error(err, 'no error') + }) + + const client = mqtt.connect({ + port, + keepalive: 0, + clientId: 'mqtt-client', + clean: false + }) + + t.tearDown(() => { + client.end(true) + broker.close() + server.close() + }) +}) + +test('tcp proxied (protocol v1) clients have access to the ipAddress(v4)', function (t) { + t.plan(2) + + const port = 4883 + const clientIp = '192.168.0.140' + const packet = { + cmd: 'connect', + protocolId: 'MQIsdp', + protocolVersion: 3, + clean: true, + clientId: 'my-client-proxyV1', + keepalive: 0 + } + + const buf = mqttPacket.generate(packet) + const src = new proxyProtocol.Peer(clientIp, 12345) + const dst = new proxyProtocol.Peer('127.0.0.1', port) + const protocol = new proxyProtocol.V1BinaryProxyProtocol( + proxyProtocol.INETProtocol.TCP4, + src, + dst, + buf + ).build() + + const broker = aedes({ + preConnect: function (client, done) { + if (client.connDetails && client.connDetails.ipAddress) { + client.ip = client.connDetails.ipAddress + t.equal(clientIp, client.ip) + } else { + t.fail('no ip address present') + } + done(null, true) + }, + decodeProtocol: protocolDecoder, + trustProxy: true + }) + + const server = net.createServer(broker.handle) + server.listen(port, function (err) { + t.error(err, 'no error') + }) + + const client = net.connect({ + port, + timeout: 0 + }, function () { + client.write(protocol) + }) + + t.tearDown(() => { + client.end() + broker.close() + server.close() + }) +}) + +test('tcp proxied (protocol v2) clients have access to the ipAddress(v4)', function (t) { + t.plan(2) + + const port = 4883 + const clientIp = '192.168.0.140' + const packet = { + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client-proxyV2' + } + + const protocol = new proxyProtocol.V2ProxyProtocol( + proxyProtocol.Command.LOCAL, + proxyProtocol.TransportProtocol.DGRAM, + new proxyProtocol.IPv4ProxyAddress( + proxyProtocol.IPv4Address.createFrom(clientIp.split('.')), + 12345, + proxyProtocol.IPv4Address.createFrom([127, 0, 0, 1]), + port + ), + mqttPacket.generate(packet) + ).build() + + const broker = aedes({ + preConnect: function (client, done) { + if (client.connDetails && client.connDetails.ipAddress) { + client.ip = client.connDetails.ipAddress + t.equal(clientIp, client.ip) + } else { + t.fail('no ip address present') + } + done(null, true) + }, + decodeProtocol: protocolDecoder, + trustProxy: true + }) + + const server = net.createServer(broker.handle) + server.listen(port, function (err) { + t.error(err, 'no error') + }) + + const client = net.createConnection( + { + port, + timeout: 0 + }, function () { + client.write(Buffer.from(protocol)) + } + ) + + t.tearDown(() => { + client.end() + broker.close() + server.close() + }) +}) + +test('tcp proxied (protocol v2) clients have access to the ipAddress(v6)', function (t) { + t.plan(2) + + const port = 4883 + const clientIpArray = [0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 192, 168, 1, 128] + const clientIp = '::ffff:c0a8:180:' + const packet = { + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: 'my-client-proxyV2' + } + + const protocol = new proxyProtocol.V2ProxyProtocol( + proxyProtocol.Command.PROXY, + proxyProtocol.TransportProtocol.STREAM, + new proxyProtocol.IPv6ProxyAddress( + proxyProtocol.IPv6Address.createFrom(clientIpArray), + 12345, + proxyProtocol.IPv6Address.createWithEmptyAddress(), + port + ), + mqttPacket.generate(packet) + ).build() + + const broker = aedes({ + preConnect: function (client, done) { + if (client.connDetails && client.connDetails.ipAddress) { + client.ip = client.connDetails.ipAddress + t.equal(clientIp, client.ip) + } else { + t.fail('no ip address present') + } + done(null, true) + }, + decodeProtocol: protocolDecoder, + trustProxy: true + }) + + const server = net.createServer(broker.handle) + server.listen(port, function (err) { + t.error(err, 'no error') + }) + + const client = net.createConnection( + { + port, + timeout: 0 + }, function () { + client.write(Buffer.from(protocol)) + } + ) + + t.tearDown(() => { + client.end() + broker.close() + server.close() + }) +}) + +test('websocket clients have access to the ipAddress from the socket (if no ip header)', function (t) { + t.plan(2) + + const clientIp = '::ffff:127.0.0.1' + const port = 4883 + const broker = aedes({ + preConnect: function (client, done) { + if (client.connDetails && client.connDetails.ipAddress) { + client.ip = client.connDetails.ipAddress + t.equal(clientIp, client.ip) + } else { + t.fail('no ip address present') + } + done(null, true) + }, + decodeProtocol: protocolDecoder, + trustProxy: true + }) + + const server = http.createServer() + ws.createServer({ + server: server + }, broker.handle) + + server.listen(port, function (err) { + t.error(err, 'no error') + }) + + const client = mqtt.connect(`ws://localhost:${port}`) + + t.tearDown(() => { + client.end(true) + broker.close() + server.close() + }) +}) + +test('websocket proxied clients have access to the ipAddress from x-real-ip header', function (t) { + t.plan(2) + + const clientIp = '192.168.0.140' + const port = 4883 + const broker = aedes({ + preConnect: function (client, done) { + if (client.connDetails && client.connDetails.ipAddress) { + client.ip = client.connDetails.ipAddress + t.equal(clientIp, client.ip) + } else { + t.fail('no ip address present') + } + done(null, true) + }, + decodeProtocol: protocolDecoder, + trustProxy: true + }) + + const server = http.createServer() + ws.createServer({ + server: server + }, broker.handle) + + server.listen(port, function (err) { + t.error(err, 'no error') + }) + + const client = mqtt.connect(`ws://localhost:${port}`, { + wsOptions: { + headers: { + 'X-Real-Ip': clientIp + } + } + }) + + t.tearDown(() => { + client.end(true) + broker.close() + server.close() + }) +}) + +test('websocket proxied clients have access to the ipAddress from x-forwarded-for header', function (t) { + t.plan(2) + + const clientIp = '192.168.0.140' + const port = 4883 + const broker = aedes({ + preConnect: function (client, done) { + if (client.connDetails && client.connDetails.ipAddress) { + client.ip = client.connDetails.ipAddress + t.equal(clientIp, client.ip) + } else { + t.fail('no ip address present') + } + done(null, true) + }, + decodeProtocol: protocolDecoder, + trustProxy: true + }) + + const server = http.createServer() + ws.createServer({ + server: server + }, broker.handle) + + server.listen(port, function (err) { + t.error(err, 'no error') + }) + + const client = mqtt.connect(`ws://localhost:${port}`, { + wsOptions: { + headers: { + 'X-Forwarded-For': clientIp + } + } + }) + + t.tearDown(() => { + client.end(true) + broker.close() + server.close() + }) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/events.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/events.js new file mode 100644 index 000000000..70549b43f --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/events.js @@ -0,0 +1,182 @@ +'use strict' + +const { test } = require('tap') +const { setup, connect, subscribe } = require('./helper') +const aedes = require('../') + +test('publishes an hearbeat', function (t) { + t.plan(2) + + const broker = aedes({ + heartbeatInterval: 10 // ms + }) + t.tearDown(broker.close.bind(broker)) + + broker.subscribe('$SYS/+/heartbeat', function (message, cb) { + const id = message.topic.match(/\$SYS\/([^/]+)\/heartbeat/)[1] + t.equal(id, broker.id, 'broker id matches') + t.deepEqual(message.payload.toString(), id, 'message has id as the payload') + }) +}) + +;['$mcollina', '$SYS'].forEach(function (topic) { + test('does not forward $ prefixed topics to # subscription - ' + topic, function (t) { + t.plan(4) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + subscribe(t, s, '#', 0, function () { + s.outStream.once('data', function (packet) { + t.fail('no packet should be received') + }) + + s.broker.mq.emit({ + cmd: 'publish', + topic: topic + '/hello', + payload: 'world' + }, function () { + t.pass('nothing happened') + }) + }) + }) + + test('does not forward $ prefixed topics to +/# subscription - ' + topic, function (t) { + t.plan(4) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + subscribe(t, s, '+/#', 0, function () { + s.outStream.once('data', function (packet) { + t.fail('no packet should be received') + }) + + s.broker.mq.emit({ + cmd: 'publish', + topic: topic + '/hello', + payload: 'world' + }, function () { + t.pass('nothing happened') + }) + }) + }) +}) + +test('does not store $SYS topics to QoS 1 # subscription', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const opts = { clean: false, clientId: 'abcde' } + var s = connect(setup(broker), opts) + + subscribe(t, s, '#', 1, function () { + s.inStream.end() + + s.broker.publish({ + cmd: 'publish', + topic: '$SYS/hello', + payload: 'world', + qos: 1 + }, function () { + s = connect(setup(broker), { clean: false, clientId: 'abcde' }) + + s.outStream.once('data', function (packet) { + t.fail('no packet should be received') + }) + }) + }) +}) + +test('Emit event when receives a ping', { timeout: 2000 }, function (t) { + t.plan(5) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.on('ping', function (packet, client) { + if (client && client) { + t.equal(client.id, 'abcde') + t.equal(packet.cmd, 'pingreq') + t.equal(packet.payload, null) + t.equal(packet.topic, null) + t.equal(packet.length, 0) + } + }) + + const s = connect(setup(broker), { clientId: 'abcde' }) + + s.inStream.write({ + cmd: 'pingreq' + }) +}) + +test('Emit event when broker closed', function (t) { + t.plan(1) + + const broker = aedes() + broker.once('closed', function () { + t.ok(true) + }) + broker.close() +}) + +test('Emit closed event one only when double broker.close()', function (t) { + t.plan(4) + + const broker = aedes() + broker.on('closed', function () { + t.pass('closed') + }) + t.notOk(broker.closed) + broker.close() + t.ok(broker.closed) + broker.close() + t.ok(broker.closed) +}) + +test('Test backpressure aedes published function', function (t) { + t.plan(2) + + var publishCount = 10 + var count = 0 + + const broker = aedes({ + published: function (packet, client, done) { + if (client) { + count++ + setTimeout(() => { + publisher.end() + done() + }) + } else { done() } + } + }) + + const mqtt = require('mqtt') + const server = require('net').createServer(broker.handle) + var publisher + + server.listen(0, function () { + const port = server.address().port + publisher = mqtt.connect({ port: port, host: 'localhost', clean: true, keepalive: 30 }) + + function next () { + if (--publishCount > 0) { process.nextTick(publish) } + } + + function publish () { + publisher.publish('test', 'payload', next) + } + + publisher.on('connect', publish) + publisher.on('end', function () { + t.ok(count > publishCount) + t.equal(publishCount, 0) + broker.close() + server.close() + }) + }) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/helper.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/helper.js new file mode 100644 index 000000000..fbf7983f0 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/helper.js @@ -0,0 +1,119 @@ +'use strict' + +const duplexify = require('duplexify') +const mqtt = require('mqtt-connection') +const { through } = require('../lib/utils') +const util = require('util') +const aedes = require('../') + +const parseStream = mqtt.parseStream +const generateStream = mqtt.generateStream +var clients = 0 + +function setup (broker) { + const inStream = generateStream() + const outStream = parseStream() + const conn = duplexify(outStream, inStream) + + broker = broker || aedes() + + return { + client: broker.handle(conn), + conn: conn, + inStream: inStream, + outStream: outStream, + broker: broker + } +} + +function connect (s, opts, connected) { + s = Object.create(s) + s.outStream = s.outStream.pipe(through(filter)) + + opts = opts || {} + + opts.cmd = 'connect' + opts.protocolId = opts.protocolId || 'MQTT' + opts.protocolVersion = opts.protocolVersion || 4 + opts.clean = !!opts.clean + opts.clientId = opts.clientId || 'my-client-' + clients++ + opts.keepalive = opts.keepalive || 0 + + s.inStream.write(opts) + + return s + + function filter (packet, enc, cb) { + if (packet.cmd !== 'publish') { + delete packet.topic + delete packet.payload + } + + // using setImmediate to wait for connected to be fired + // setup also needs to return first + if (packet.cmd !== 'connack') { + setImmediate(this.push.bind(this, packet)) + } else if (connected && packet.returnCode === 0) { + setImmediate(connected, packet) + } + cb() + } +} + +function noError (s, t) { + s.broker.on('clientError', function (client, err) { + if (err) throw err + t.notOk(err, 'must not error') + }) + + return s +} + +function subscribe (t, subscriber, topic, qos, done) { + subscriber.inStream.write({ + cmd: 'subscribe', + messageId: 24, + subscriptions: [{ + topic: topic, + qos: qos + }] + }) + + subscriber.outStream.once('data', function (packet) { + t.equal(packet.cmd, 'suback') + t.deepEqual(packet.granted, [qos]) + t.equal(packet.messageId, 24) + + if (done) { + done(null, packet) + } + }) +} + +// subs: [{topic:, qos:}] +function subscribeMultiple (t, subscriber, subs, expectedGranted, done) { + subscriber.inStream.write({ + cmd: 'subscribe', + messageId: 24, + subscriptions: subs + }) + + subscriber.outStream.once('data', function (packet) { + t.equal(packet.cmd, 'suback') + t.deepEqual(packet.granted, expectedGranted) + t.equal(packet.messageId, 24) + + if (done) { + done(null, packet) + } + }) +} + +module.exports = { + setup: setup, + connect: connect, + noError: noError, + subscribe: subscribe, + subscribeMultiple: subscribeMultiple, + delay: util.promisify(setTimeout) +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/keep-alive.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/keep-alive.js new file mode 100644 index 000000000..3fed3677d --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/keep-alive.js @@ -0,0 +1,92 @@ +'use strict' + +const { test } = require('tap') +const eos = require('end-of-stream') +const Faketimers = require('@sinonjs/fake-timers') +const { setup, connect, noError } = require('./helper') +const aedes = require('../') + +test('supports pingreq/pingresp', function (t) { + t.plan(1) + + const s = noError(connect(setup())) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.on('keepaliveTimeout', function (client) { + t.fail('keep alive should not timeout') + }) + + s.inStream.write({ + cmd: 'pingreq' + }) + + s.outStream.on('data', function (packet) { + t.equal(packet.cmd, 'pingresp', 'the response is a pingresp') + }) +}) + +test('supports keep alive disconnections', function (t) { + t.plan(2) + + const clock = Faketimers.install() + const s = connect(setup(), { keepalive: 1 }) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.on('keepaliveTimeout', function (client) { + t.pass('keep alive timeout') + }) + eos(s.conn, function () { + t.pass('waits 1 and a half the keepalive timeout') + }) + + setTimeout(() => { + clock.uninstall() + }, 1.5) + clock.tick(1.5) +}) + +test('supports keep alive disconnections after a pingreq', function (t) { + t.plan(3) + + const clock = Faketimers.install() + const s = connect(setup(), { keepalive: 1 }) + t.tearDown(s.broker.close.bind(s.broker)) + + eos(s.conn, function () { + t.pass('waits 1 and a half the keepalive timeout') + }) + s.broker.on('keepaliveTimeout', function (client) { + t.pass('keep alive timeout') + }) + s.outStream.on('data', function (packet) { + t.equal(packet.cmd, 'pingresp', 'the response is a pingresp') + }) + setTimeout(() => { + s.inStream.write({ + cmd: 'pingreq' + }) + clock.uninstall() + }, 1) + clock.tick(3) +}) + +test('disconnect if a connect does not arrive in time', function (t) { + t.plan(2) + + const clock = Faketimers.install() + const s = setup(aedes({ + connectTimeout: 500 + })) + t.tearDown(s.broker.close.bind(s.broker)) + + s.client.on('error', function (err) { + t.equal(err.message, 'connect did not arrive in time') + }) + eos(s.conn, function () { + t.pass('waits waitConnectTimeout before ending') + }) + setTimeout(() => { + clock.uninstall() + }, 1000) + clock.tick(1000) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/meta.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/meta.js new file mode 100644 index 000000000..b822ed453 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/meta.js @@ -0,0 +1,327 @@ +'use strict' + +const { test } = require('tap') +const { setup, connect, subscribe, noError } = require('./helper') +const aedes = require('../') + +test('count connected clients', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + t.equal(broker.connectedClients, 0, 'no connected clients') + + connect(setup(broker), { + }, function () { + t.equal(broker.connectedClients, 1, 'one connected clients') + + const last = connect(setup(broker), { + }, function () { + t.equal(broker.connectedClients, 2, 'two connected clients') + + last.conn.destroy() + + // needed because destroy() will do the trick before + // the next tick + setImmediate(function () { + t.equal(broker.connectedClients, 1, 'one connected clients') + }) + }) + }) +}) + +test('call published method', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.published = function (packet, client, done) { + t.equal(packet.topic, 'hello', 'topic matches') + t.equal(packet.payload.toString(), 'world', 'payload matches') + t.equal(client, null, 'no client') + done() + } + + broker.publish({ + topic: 'hello', + payload: Buffer.from('world') + }, function (err) { + t.error(err, 'no error') + }) +}) + +test('call published method with client', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.published = function (packet, client, done) { + // for internal messages, client will be null + if (client) { + t.equal(packet.topic, 'hello', 'topic matches') + t.equal(packet.payload.toString(), 'world', 'payload matches') + t.equal(packet.qos, 1) + t.equal(packet.messageId, 42) + done() + } + } + + const s = connect(setup(broker)) + + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + messageId: 42 + }) +}) + +test('emit publish event with client - QoS 0', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.on('publish', function (packet, client) { + // for internal messages, client will be null + if (client) { + t.equal(packet.qos, 0) + t.equal(packet.topic, 'hello', 'topic matches') + t.equal(packet.payload.toString(), 'world', 'payload matches') + } + }) + + const s = connect(setup(broker)) + + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0 + }) +}) + +test('emit publish event with client - QoS 1', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.on('publish', function (packet, client) { + // for internal messages, client will be null + if (client) { + t.equal(packet.qos, 1) + t.equal(packet.messageId, 42) + t.equal(packet.topic, 'hello', 'topic matches') + t.equal(packet.payload.toString(), 'world', 'payload matches') + } + }) + + const s = connect(setup(broker)) + + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + messageId: 42 + }) +}) + +test('emit subscribe event', function (t) { + t.plan(6) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = connect(setup(broker), { clientId: 'abcde' }) + + broker.on('subscribe', function (subscriptions, client) { + t.deepEqual(subscriptions, [{ + topic: 'hello', + qos: 0 + }], 'topic matches') + t.equal(client.id, 'abcde', 'client matches') + }) + + subscribe(t, s, 'hello', 0, function () { + t.pass('subscribe completed') + }) +}) + +test('emit subscribe event if unrecognized params in subscribe packet structure', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = noError(connect(setup(broker))) + const subs = [{ topic: 'hello', qos: 0 }] + + broker.on('subscribe', function (subscriptions, client) { + t.equal(subscriptions, subs) + t.deepEqual(client, s.client) + }) + + s.client.subscribe({ + subscriptions: subs, + restore: true + }, function (err) { + t.error(err) + }) +}) + +test('emit unsubscribe event', function (t) { + t.plan(6) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = connect(setup(broker), { clean: true, clientId: 'abcde' }) + + broker.on('unsubscribe', function (unsubscriptions, client) { + t.deepEqual(unsubscriptions, [ + 'hello' + ], 'unsubscription matches') + t.equal(client.id, 'abcde', 'client matches') + }) + + subscribe(t, s, 'hello', 0, function () { + s.inStream.write({ + cmd: 'unsubscribe', + messageId: 43, + unsubscriptions: ['hello'] + }) + + s.outStream.once('data', function (packet) { + t.pass('subscribe completed') + }) + }) +}) + +test('emit unsubscribe event if unrecognized params in unsubscribe packet structure', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = noError(connect(setup(broker))) + const unsubs = [{ topic: 'hello', qos: 0 }] + + broker.on('unsubscribe', function (unsubscriptions, client) { + t.equal(unsubscriptions, unsubs) + t.deepEqual(client, s.client) + }) + + s.client.unsubscribe({ + unsubscriptions: unsubs, + close: true + }, function (err) { + t.error(err) + }) +}) + +test('dont emit unsubscribe event on client close', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = noError(connect(setup(broker), { clientId: 'abcde' }), t) + + broker.on('unsubscribe', function (unsubscriptions, client) { + t.error('unsubscribe should not be emitted') + }) + + subscribe(t, s, 'hello', 0, function () { + s.inStream.end({ + cmd: 'disconnect' + }) + s.outStream.once('data', function (packet) { + t.pass('unsubscribe completed') + }) + }) +}) + +test('emit clientDisconnect event', function (t) { + t.plan(1) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.on('clientDisconnect', function (client) { + t.equal(client.id, 'abcde', 'client matches') + }) + + const s = noError(connect(setup(broker), { clientId: 'abcde' }), t) + + s.inStream.end({ + cmd: 'disconnect' + }) + s.outStream.resume() +}) + +test('emits client', function (t) { + t.plan(1) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.on('client', function (client) { + t.equal(client.id, 'abcde', 'clientId matches') + }) + + connect(setup(broker), { + clientId: 'abcde' + }) +}) + +test('get aedes version', function (t) { + t.plan(1) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + t.equal(broker.version, require('../package.json').version) +}) + +test('connect and connackSent event', { timeout: 50 }, function (t) { + t.plan(3) + + const s = setup() + t.tearDown(s.broker.close.bind(s.broker)) + + const clientId = 'my-client' + + s.broker.on('connackSent', function (packet, client) { + t.equal(packet.returnCode, 0) + t.equal(client.id, clientId, 'connackSent event and clientId matches') + }) + + s.inStream.write({ + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + clientId: clientId, + keepalive: 0 + }) + + s.outStream.on('data', function (packet) { + t.deepEqual(packet, { + cmd: 'connack', + returnCode: 0, + length: 2, + qos: 0, + retain: false, + dup: false, + topic: null, + payload: null, + sessionPresent: false + }, 'successful connack') + }) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/not-blocking.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/not-blocking.js new file mode 100644 index 000000000..2c8149a62 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/not-blocking.js @@ -0,0 +1,209 @@ +'use strict' + +const { test } = require('tap') +const mqtt = require('mqtt') +const net = require('net') +const Faketimers = require('@sinonjs/fake-timers') +const aedes = require('../') + +test('connect 200 concurrent clients', function (t) { + t.plan(3) + + const broker = aedes() + const server = net.createServer(broker.handle) + const total = 200 + + server.listen(0, function (err) { + t.error(err, 'no error') + + const clock = Faketimers.createClock() + t.tearDown(clock.reset.bind(clock)) + + const port = server.address().port + + var connected = 0 + var clients = [] + clock.setTimeout(function () { + t.equal(clients.length, total) + t.equal(connected, total) + for (var i = 0; i < clients.length; i++) { + clients[i].end() + } + broker.close() + server.close() + }, total) + + for (var i = 0; i < total; i++) { + clients[i] = mqtt.connect({ + port: port, + keepalive: 0 + }).on('connect', function () { + connected++ + if ((connected % (total / 10)) === 0) { + console.log('connected', connected) + } + clock.tick(1) + }).on('error', function () { + clock.tick(1) + }) + } + }) +}) + +test('do not block after a subscription', function (t) { + t.plan(3) + + const broker = aedes() + const server = net.createServer(broker.handle) + const total = 10000 + var sent = 0 + var received = 0 + + server.listen(0, function (err) { + t.error(err, 'no error') + + const clock = Faketimers.createClock() + t.tearDown(clock.reset.bind(clock)) + + const clockId = clock.setTimeout(finish, total) + + const port = server.address().port + + const publisher = mqtt.connect({ + port: port, + keepalive: 0 + }).on('error', function (err) { + clock.clearTimeout(clockId) + t.fail(err) + }) + + var subscriber + + function immediatePublish () { + setImmediate(publish) + } + + function publish () { + if (sent === total) { + publisher.end() + } else { + sent++ + publisher.publish('test', 'payload', immediatePublish) + } + } + + function startSubscriber () { + subscriber = mqtt.connect({ + port: port, + keepalive: 0 + }).on('error', function (err) { + if (err.code !== 'ECONNRESET') { + clock.clearTimeout(clockId) + t.fail(err) + } + }) + + subscriber.subscribe('test', publish) + + subscriber.on('message', function () { + if (received % (total / 10) === 0) { + console.log('sent / received', sent, received) + } + received++ + clock.tick(1) + }) + } + + publisher.on('connect', startSubscriber) + + function finish () { + subscriber.end() + publisher.end() + broker.close() + server.close() + t.equal(total, sent, 'messages sent') + t.equal(total, received, 'messages received') + } + }) +}) + +test('do not block with overlapping subscription', function (t) { + t.plan(3) + + const broker = aedes({ concurrency: 15 }) + const server = net.createServer(broker.handle) + const total = 10000 + var sent = 0 + var received = 0 + + server.listen(0, function (err) { + t.error(err, 'no error') + + const clock = Faketimers.createClock() + t.tearDown(clock.reset.bind(clock)) + + const clockId = clock.setTimeout(finish, total) + + const port = server.address().port + + const publisher = mqtt.connect({ + port: port, + keepalive: 0 + }).on('error', function (err) { + clock.clearTimeout(clockId) + t.fail(err) + }) + + var subscriber + + function immediatePublish (e) { + setImmediate(publish) + } + + function publish () { + if (sent === total) { + publisher.end() + } else { + sent++ + publisher.publish('test', 'payload', immediatePublish) + } + } + + function startSubscriber () { + subscriber = mqtt.connect({ + port: port, + keepalive: 0 + }).on('error', function (err) { + if (err.code !== 'ECONNRESET') { + clock.clearTimeout(clockId) + t.fail(err) + } + }) + + subscriber.subscribe('#', function () { + subscriber.subscribe('test', function () { + immediatePublish() + }) + }) + + subscriber.on('message', function () { + if (received % (total / 10) === 0) { + console.log('sent / received', sent, received) + } + received++ + clock.tick(1) + }) + } + + publisher.on('connect', startSubscriber) + + function finish () { + subscriber.end() + publisher.end() + broker.close() + server.close() + t.equal(total, sent, 'messages sent') + t.equal(total, received, 'messages received') + } + }) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/qos1.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/qos1.js new file mode 100644 index 000000000..202271ae8 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/qos1.js @@ -0,0 +1,831 @@ +'use strict' + +const { test } = require('tap') +const concat = require('concat-stream') +const { setup, connect, subscribe } = require('./helper') +const aedes = require('../') + +test('publish QoS 1', function (t) { + t.plan(1) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + const expected = { + cmd: 'puback', + messageId: 42, + qos: 0, + dup: false, + length: 2, + retain: false + } + + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42 + }) + + s.outStream.on('data', function (packet) { + t.deepEqual(packet, expected, 'packet must match') + }) +}) + +test('publish QoS 1 throws error', function (t) { + t.plan(1) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.persistence.subscriptionsByTopic = function (packet, done) { + return done(new Error('Throws error')) + } + + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42 + }) + + s.broker.on('error', function (err) { + t.equal('Throws error', err.message, 'Throws error') + }) +}) + +test('publish QoS 1 throws error on write', function (t) { + t.plan(1) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.on('client', function (client) { + client.connected = false + client.connecting = false + + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42 + }) + }) + + s.broker.on('clientError', function (client, err) { + t.equal(err.message, 'connection closed', 'throws error') + }) +}) + +test('publish QoS 1 and check offline queue', function (t) { + t.plan(13) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker), { clean: false }) + const subscriberClient = { + id: 'abcde' + } + const subscriber = connect(setup(broker), { clean: false, clientId: subscriberClient.id }) + var expected = { + cmd: 'publish', + topic: 'hello', + qos: 1, + dup: false, + retain: false + } + const expectedAck = { + cmd: 'puback', + retain: false, + qos: 0, + dup: false, + length: 2, + messageId: 10 + } + var sent = { + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 10, + retain: false, + dup: false + } + var queue = [] + subscribe(t, subscriber, 'hello', 1, function () { + publisher.outStream.on('data', function (packet) { + t.deepEqual(packet, expectedAck, 'ack packet must patch') + }) + subscriber.outStream.on('data', function (packet) { + queue.push(packet) + delete packet.payload + delete packet.length + t.notEqual(packet.messageId, undefined, 'messageId is assigned a value') + t.notEqual(packet.messageId, 10, 'messageId should be unique') + expected.messageId = packet.messageId + t.deepEqual(packet, expected, 'publish packet must patch') + if (queue.length === 2) { + setImmediate(() => { + for (var i = 0; i < queue.length; i++) { + broker.persistence.outgoingClearMessageId(subscriberClient, queue[i], function (_, origPacket) { + if (origPacket) { + delete origPacket.brokerId + delete origPacket.brokerCounter + delete origPacket.payload + delete origPacket.messageId + delete sent.payload + delete sent.messageId + t.deepEqual(origPacket, sent, 'origPacket must match') + } + }) + } + }) + } + }) + publisher.inStream.write(sent) + sent.payload = 'world2world' + publisher.inStream.write(sent) + }) +}) + +test('publish QoS 1 and empty offline queue', function (t) { + t.plan(13) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker), { clean: false }) + const subscriberClient = { + id: 'abcde' + } + const subscriber = connect(setup(broker), { clean: false, clientId: subscriberClient.id }) + var expected = { + cmd: 'publish', + topic: 'hello', + qos: 1, + dup: false, + retain: false + } + const expectedAck = { + cmd: 'puback', + retain: false, + qos: 0, + dup: false, + length: 2, + messageId: 10 + } + var sent = { + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 10, + retain: false, + dup: false + } + var queue = [] + subscribe(t, subscriber, 'hello', 1, function () { + publisher.outStream.on('data', function (packet) { + t.deepEqual(packet, expectedAck, 'ack packet must patch') + }) + subscriber.outStream.on('data', function (packet) { + queue.push(packet) + delete packet.payload + delete packet.length + t.notEqual(packet.messageId, undefined, 'messageId is assigned a value') + t.notEqual(packet.messageId, 10, 'messageId should be unique') + expected.messageId = packet.messageId + t.deepEqual(packet, expected, 'publish packet must patch') + if (queue.length === 2) { + setImmediate(() => { + broker.clients[subscriberClient.id].emptyOutgoingQueue(function () { + for (var i = 0; i < queue.length; i++) { + broker.persistence.outgoingClearMessageId(subscriberClient, queue[i], function (_, origPacket) { + t.equal(!!origPacket, false, 'Packet has been removed') + }) + } + }) + }) + } + }) + publisher.inStream.write(sent) + sent.payload = 'world2world' + publisher.inStream.write(sent) + }) +}) + +test('subscribe QoS 1', function (t) { + t.plan(5) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker)) + const subscriber = connect(setup(broker)) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + dup: false, + length: 14, + retain: false + } + + subscribe(t, subscriber, 'hello', 1, function () { + subscriber.outStream.once('data', function (packet) { + subscriber.inStream.write({ + cmd: 'puback', + messageId: packet.messageId + }) + t.notEqual(packet.messageId, 42, 'messageId must differ') + delete packet.messageId + t.deepEqual(packet, expected, 'packet must match') + }) + + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42 + }) + }) +}) + +test('subscribe QoS 0, but publish QoS 1', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker)) + const subscriber = connect(setup(broker)) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + dup: false, + length: 12, + retain: false + } + + subscribe(t, subscriber, 'hello', 0, function () { + subscriber.outStream.once('data', function (packet) { + t.deepEqual(packet, expected, 'packet must match') + }) + + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42 + }) + }) +}) + +test('restore QoS 1 subscriptions not clean', function (t) { + t.plan(7) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + var subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + dup: false, + length: 14, + retain: false + } + + subscribe(t, subscriber, 'hello', 1, function () { + subscriber.inStream.end() + + const publisher = connect(setup(broker)) + + subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }, function (connect) { + t.equal(connect.sessionPresent, true, 'session present is set to true') + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42 + }) + }) + + publisher.outStream.once('data', function (packet) { + t.equal(packet.cmd, 'puback') + }) + + subscriber.outStream.once('data', function (packet) { + subscriber.inStream.write({ + cmd: 'puback', + messageId: packet.messageId + }) + t.notEqual(packet.messageId, 42, 'messageId must differ') + delete packet.messageId + t.deepEqual(packet, expected, 'packet must match') + }) + }) +}) + +test('restore multiple QoS 1 subscriptions not clean w/ authorizeSubscribe', function (t) { + t.plan(11) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + var subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }) + const expected = { + cmd: 'publish', + topic: 'foo', + payload: Buffer.from('bar'), + qos: 1, + dup: false, + length: 10, + retain: false + } + const publisher = connect(setup(broker)) + + subscribe(t, subscriber, 'hello', 1, function () { + subscribe(t, subscriber, 'foo', 1, function () { + subscriber.inStream.end() + broker.authorizeSubscribe = function (client, sub, done) { + done(null, sub.topic === 'hello' ? 123 : sub) + } + subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }, function (connect) { + t.equal(connect.sessionPresent, true, 'session present is set to true') + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42 + }) + publisher.inStream.write({ + cmd: 'publish', + topic: 'foo', + payload: 'bar', + qos: 1, + messageId: 48 + }) + }) + publisher.outStream.on('data', function (packet) { + t.equal(packet.cmd, 'puback') + }) + + subscriber.outStream.on('data', function (packet) { + subscriber.inStream.write({ + cmd: 'puback', + messageId: packet.messageId + }) + t.notEqual(packet.messageId, 48, 'messageId must differ') + delete packet.messageId + t.deepEqual(packet, expected, 'packet must match') + }) + }) + }) +}) + +test('remove stored subscriptions if connected with clean=true', function (t) { + t.plan(5) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + var subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }) + + subscribe(t, subscriber, 'hello', 1, function () { + subscriber.inStream.end() + + const publisher = connect(setup(broker)) + + subscriber = connect(setup(broker), { clean: true, clientId: 'abcde' }, function (packet) { + t.equal(packet.sessionPresent, false, 'session present is set to false') + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42 + }) + + subscriber.inStream.end() + + subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }, function (connect) { + t.equal(connect.sessionPresent, false, 'session present is set to false') + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 43 + }) + }) + + subscriber.outStream.once('data', function (packet) { + t.fail('publish received') + }) + }) + + subscriber.outStream.once('data', function (packet) { + t.fail('publish received') + }) + }) +}) + +test('resend publish on non-clean reconnect QoS 1', function (t) { + t.plan(8) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const opts = { clean: false, clientId: 'abcde' } + var subscriber = connect(setup(broker), opts) + const subscriberClient = { + id: opts.clientId + } + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + dup: false, + length: 14, + retain: false + } + + subscribe(t, subscriber, 'hello', 1, function () { + subscriber.inStream.end() + + const publisher = connect(setup(broker)) + + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42 + }) + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world world', + qos: 1, + messageId: 42 + }) + publisher.outStream.once('data', function (packet) { + t.equal(packet.cmd, 'puback') + + subscriber = connect(setup(broker), opts) + + subscriber.outStream.once('data', function (packet) { + subscriber.inStream.write({ + cmd: 'puback', + messageId: packet.messageId + }) + t.notEqual(packet.messageId, 42, 'messageId must differ') + delete packet.messageId + t.deepEqual(packet, expected, 'packet must match') + setImmediate(() => { + const stream = broker.persistence.outgoingStream(subscriberClient) + stream.pipe(concat(function (list) { + t.equal(list.length, 1, 'should remain one item in queue') + t.deepEqual(list[0].payload, Buffer.from('world world'), 'packet must match') + })) + }) + }) + }) + }) +}) + +test('do not resend QoS 1 packets at each reconnect', function (t) { + t.plan(6) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + var subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + dup: false, + length: 14, + retain: false + } + + subscribe(t, subscriber, 'hello', 1, function () { + subscriber.inStream.end() + + const publisher = connect(setup(broker)) + + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42 + }) + + publisher.outStream.once('data', function (packet) { + t.equal(packet.cmd, 'puback') + + subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }) + + subscriber.outStream.once('data', function (packet) { + subscriber.inStream.end({ + cmd: 'puback', + messageId: packet.messageId + }) + + t.notEqual(packet.messageId, 42, 'messageId must differ') + delete packet.messageId + t.deepEqual(packet, expected, 'packet must match') + + const subscriber2 = connect(setup(broker), { clean: false, clientId: 'abcde' }) + + subscriber2.outStream.once('data', function (packet) { + t.fail('this should never happen') + }) + }) + }) + }) +}) + +test('do not resend QoS 1 packets if reconnect is clean', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + var subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }) + + subscribe(t, subscriber, 'hello', 1, function () { + subscriber.inStream.end() + + const publisher = connect(setup(broker)) + + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42 + }) + + publisher.outStream.once('data', function (packet) { + t.equal(packet.cmd, 'puback') + + subscriber = connect(setup(broker), { clean: true, clientId: 'abcde' }) + + subscriber.outStream.once('data', function (packet) { + t.fail('this should never happen') + }) + }) + }) +}) + +test('do not resend QoS 1 packets at reconnect if puback was received', function (t) { + t.plan(5) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + var subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + dup: false, + length: 14, + retain: false + } + + subscribe(t, subscriber, 'hello', 1, function () { + const publisher = connect(setup(broker)) + + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42 + }) + + publisher.outStream.once('data', function (packet) { + t.equal(packet.cmd, 'puback') + }) + + subscriber.outStream.once('data', function (packet) { + subscriber.inStream.end({ + cmd: 'puback', + messageId: packet.messageId + }) + + delete packet.messageId + t.deepEqual(packet, expected, 'packet must match') + + subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }) + + subscriber.outStream.once('data', function (packet) { + t.fail('this should never happen') + }) + }) + }) +}) + +test('remove stored subscriptions after unsubscribe', function (t) { + t.plan(5) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + var subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }) + + subscribe(t, subscriber, 'hello', 1, function () { + subscriber.inStream.write({ + cmd: 'unsubscribe', + messageId: 43, + unsubscriptions: ['hello'] + }) + + subscriber.outStream.once('data', function (packet) { + t.deepEqual(packet, { + cmd: 'unsuback', + messageId: 43, + dup: false, + length: 2, + qos: 0, + retain: false + }, 'packet matches') + + subscriber.inStream.end() + + const publisher = connect(setup(broker)) + + subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }, function (packet) { + t.equal(packet.sessionPresent, false, 'session present is set to false') + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42 + }) + + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 43 + }, function () { + subscriber.inStream.end() + }) + + subscriber.outStream.once('data', function (packet) { + t.fail('publish received') + }) + }) + + subscriber.outStream.once('data', function (packet) { + t.fail('publish received') + }) + }) + }) +}) + +test('upgrade a QoS 0 subscription to QoS 1', function (t) { + t.plan(8) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + length: 14, + retain: false, + dup: false + } + + subscribe(t, s, 'hello', 0, function () { + subscribe(t, s, 'hello', 1, function () { + s.outStream.once('data', function (packet) { + t.ok(packet.messageId, 'has messageId') + delete packet.messageId + t.deepEqual(packet, expected, 'packet matches') + }) + + s.broker.publish({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1 + }) + }) + }) +}) + +test('downgrade QoS 0 publish on QoS 1 subsciption', function (t) { + t.plan(4) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + length: 12, + retain: false, + dup: false + } + + subscribe(t, s, 'hello', 1, function () { + s.outStream.once('data', function (packet) { + t.deepEqual(packet, expected, 'packet matches') + }) + s.broker.publish({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 0 + }) + }) +}) + +test('subscribe and publish QoS 1 in parallel', function (t) { + t.plan(5) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = connect(setup(broker)) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + dup: false, + length: 14, + retain: false + } + + broker.on('clientError', function (client, err) { + console.log(err.stack) + // t.fail('no client error') + }) + + s.outStream.once('data', function (packet) { + t.equal(packet.cmd, 'puback') + t.equal(packet.messageId, 42, 'messageId must match') + s.outStream.on('data', function (packet) { + if (packet.cmd === 'suback') { + t.deepEqual(packet.granted, [1]) + t.equal(packet.messageId, 24) + } + if (packet.cmd === 'publish') { + s.inStream.write({ + cmd: 'puback', + messageId: packet.messageId + }) + delete packet.messageId + t.deepEqual(packet, expected, 'packet must match') + } + }) + }) + + s.inStream.write({ + cmd: 'subscribe', + messageId: 24, + subscriptions: [{ + topic: 'hello', + qos: 1 + }] + }) + + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42 + }) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/qos2.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/qos2.js new file mode 100644 index 000000000..001aba559 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/qos2.js @@ -0,0 +1,654 @@ +'use strict' + +const { test } = require('tap') +const concat = require('concat-stream') +const { setup, connect, subscribe } = require('./helper') +const aedes = require('../') + +function publish (t, s, packet, done) { + const msgId = packet.messageId + + s.inStream.write(packet) + + s.outStream.once('data', function (packet) { + t.deepEqual(packet, { + cmd: 'pubrec', + messageId: msgId, + length: 2, + dup: false, + retain: false, + qos: 0 + }, 'pubrec must match') + + s.inStream.write({ + cmd: 'pubrel', + messageId: msgId + }) + + s.outStream.once('data', function (packet) { + t.deepEqual(packet, { + cmd: 'pubcomp', + messageId: msgId, + length: 2, + dup: false, + retain: false, + qos: 0 + }, 'pubcomp must match') + + if (done) { + done() + } + }) + }) +} + +function receive (t, subscriber, expected, done) { + subscriber.outStream.once('data', function (packet) { + t.notEqual(packet.messageId, expected.messageId, 'messageId must differ') + + const msgId = packet.messageId + delete packet.messageId + delete expected.messageId + t.deepEqual(packet, expected, 'packet must match') + + subscriber.inStream.write({ + cmd: 'pubrec', + messageId: msgId + }) + + subscriber.outStream.once('data', function (packet) { + subscriber.inStream.write({ + cmd: 'pubcomp', + messageId: msgId + }) + t.deepEqual(packet, { + cmd: 'pubrel', + messageId: msgId, + length: 2, + qos: 1, + retain: false, + dup: false + }, 'pubrel must match') + + if (done) { + done() + } + }) + }) +} + +test('publish QoS 2', function (t) { + t.plan(2) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + const packet = { + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 2, + messageId: 42 + } + publish(t, s, packet) +}) + +test('subscribe QoS 2', function (t) { + t.plan(8) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker)) + const subscriber = connect(setup(broker)) + const toPublish = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + messageId: 42, + dup: false, + length: 14, + retain: false + } + + subscribe(t, subscriber, 'hello', 2, function () { + publish(t, publisher, toPublish) + + receive(t, subscriber, toPublish) + }) +}) + +test('publish QoS 2 throws error on write', function (t) { + t.plan(1) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.on('client', function (client) { + client.connected = false + client.connecting = false + + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 2, + messageId: 42 + }) + }) + + s.broker.on('clientError', function (client, err) { + t.equal(err.message, 'connection closed', 'throws error') + }) +}) + +test('pubrec handler calls done when outgoingUpdate fails (clean=false)', function (t) { + t.plan(1) + + const s = connect(setup(), { clean: false }) + t.tearDown(s.broker.close.bind(s.broker)) + + var handle = require('../lib/handlers/pubrec.js') + + s.broker.persistence.outgoingUpdate = function (client, pubrel, done) { + done(Error('throws error')) + } + + handle(s.client, { messageId: 42 }, function done () { + t.pass('calls done on error') + }) +}) + +test('client.publish with clean=true subscribption QoS 2', function (t) { + t.plan(8) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const toPublish = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + messageId: 42, + dup: false, + length: 14, + retain: false + } + var brokerClient = null + + broker.on('client', function (client) { + brokerClient = client + + brokerClient.on('error', function (err) { + t.error(err) + }) + }) + + const subscriber = connect(setup(broker), { clean: true }) + + subscribe(t, subscriber, 'hello', 2, function () { + t.pass('subscribed') + receive(t, subscriber, toPublish) + brokerClient.publish(toPublish, function (err) { + t.error(err) + }) + }) +}) + +test('call published method with client with QoS 2', function (t) { + t.plan(9) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker)) + const subscriber = connect(setup(broker)) + const toPublish = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + messageId: 42, + dup: false, + length: 14, + retain: false + } + + broker.published = function (packet, client, cb) { + // Client is null for all server publishes + if (packet.topic.split('/')[0] !== '$SYS') { + t.ok(client, 'client must be passed to published method') + cb() + } + } + + subscribe(t, subscriber, 'hello', 2, function () { + publish(t, publisher, toPublish) + + receive(t, subscriber, toPublish) + }) +}) + +;[true, false].forEach(function (cleanSession) { + test(`authorized forward publish packets in QoS 2 [clean=${cleanSession}]`, function (t) { + t.plan(9) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const opts = { clean: cleanSession } + const publisher = connect(setup(broker)) + const subscriber = connect(setup(broker), { ...opts, clientId: 'abcde' }) + const forwarded = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + retain: false, + dup: false, + messageId: undefined + } + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + retain: false, + length: 14, + dup: false + } + broker.authorizeForward = function (client, packet) { + forwarded.brokerId = broker.id + forwarded.brokerCounter = broker.counter + t.deepEqual(packet, forwarded, 'forwarded packet must match') + return packet + } + + subscribe(t, subscriber, 'hello', 2, function () { + subscriber.outStream.once('data', function (packet) { + t.notEqual(packet.messageId, 42) + delete packet.messageId + t.deepEqual(packet, expected, 'packet must match') + }) + + publish(t, publisher, { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + retain: false, + messageId: 42, + dup: false + }, function () { + const stream = broker.persistence.outgoingStream({ id: 'abcde' }) + stream.pipe(concat(function (list) { + if (cleanSession) { + t.equal(list.length, 0, 'should have empty item in queue') + } else { + t.equal(list.length, 1, 'should have one item in queue') + } + })) + }) + }) + }) +}) + +;[true, false].forEach(function (cleanSession) { + test(`unauthorized forward publish packets in QoS 2 [clean=${cleanSession}]`, function (t) { + t.plan(6) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const opts = { clean: cleanSession } + + const publisher = connect(setup(broker)) + const subscriber = connect(setup(broker), { ...opts, clientId: 'abcde' }) + + broker.authorizeForward = function (client, packet) { + + } + + subscribe(t, subscriber, 'hello', 2, function () { + subscriber.outStream.once('data', function (packet) { + t.fail('should not receive any packets') + }) + + publish(t, publisher, { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + retain: false, + messageId: 42, + dup: false + }, function () { + const stream = broker.persistence.outgoingStream({ id: 'abcde' }) + stream.pipe(concat(function (list) { + t.equal(list.length, 0, 'should empty in queue') + })) + }) + }) + }) +}) + +test('subscribe QoS 0, but publish QoS 2', function (t) { + t.plan(6) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker)) + const subscriber = connect(setup(broker)) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + dup: false, + length: 12, + retain: false + } + + subscribe(t, subscriber, 'hello', 0, function () { + subscriber.outStream.once('data', function (packet) { + t.deepEqual(packet, expected, 'packet must match') + }) + + publish(t, publisher, { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + retain: false, + messageId: 42, + dup: false + }) + }) +}) + +test('subscribe QoS 1, but publish QoS 2', function (t) { + t.plan(6) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker)) + const subscriber = connect(setup(broker)) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + dup: false, + length: 14, + retain: false + } + + subscribe(t, subscriber, 'hello', 1, function () { + subscriber.outStream.once('data', function (packet) { + delete packet.messageId + t.deepEqual(packet, expected, 'packet must match') + }) + + publish(t, publisher, { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + retain: false, + messageId: 42, + dup: false + }) + }) +}) + +test('restore QoS 2 subscriptions not clean', function (t) { + t.plan(9) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + var subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + dup: false, + length: 14, + messageId: 42, + retain: false + } + + subscribe(t, subscriber, 'hello', 2, function () { + subscriber.inStream.end() + + const publisher = connect(setup(broker)) + + subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }, function (connect) { + t.equal(connect.sessionPresent, true, 'session present is set to true') + publish(t, publisher, expected) + }) + + receive(t, subscriber, expected) + }) +}) + +test('resend publish on non-clean reconnect QoS 2', function (t) { + t.plan(8) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const opts = { clean: false, clientId: 'abcde' } + var subscriber = connect(setup(broker), opts) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + dup: false, + length: 14, + messageId: 42, + retain: false + } + + subscribe(t, subscriber, 'hello', 2, function () { + subscriber.inStream.end() + + const publisher = connect(setup(broker)) + + publish(t, publisher, expected, function () { + subscriber = connect(setup(broker), opts) + + receive(t, subscriber, expected) + }) + }) +}) + +test('resend pubrel on non-clean reconnect QoS 2', function (t) { + t.plan(9) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const opts = { clean: false, clientId: 'abcde' } + var subscriber = connect(setup(broker), opts) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + dup: false, + length: 14, + messageId: 42, + retain: false + } + + subscribe(t, subscriber, 'hello', 2, function () { + subscriber.inStream.end() + + const publisher = connect(setup(broker)) + + publish(t, publisher, expected, function () { + subscriber = connect(setup(broker), opts) + + subscriber.outStream.once('data', function (packet) { + t.notEqual(packet.messageId, expected.messageId, 'messageId must differ') + + const msgId = packet.messageId + delete packet.messageId + delete expected.messageId + t.deepEqual(packet, expected, 'packet must match') + + subscriber.inStream.write({ + cmd: 'pubrec', + messageId: msgId + }) + + subscriber.outStream.once('data', function (packet) { + t.deepEqual(packet, { + cmd: 'pubrel', + messageId: msgId, + length: 2, + qos: 1, + retain: false, + dup: false + }, 'pubrel must match') + + subscriber.inStream.end() + + subscriber = connect(setup(broker), opts) + + subscriber.outStream.once('data', function (packet) { + t.deepEqual(packet, { + cmd: 'pubrel', + messageId: msgId, + length: 2, + qos: 1, + retain: false, + dup: false + }, 'pubrel must match') + + subscriber.inStream.write({ + cmd: 'pubcomp', + messageId: msgId + }) + }) + }) + }) + }) + }) +}) + +test('publish after disconnection', function (t) { + t.plan(10) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker)) + const subscriber = connect(setup(broker)) + const toPublish = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + messageId: 42, + dup: false, + length: 14, + retain: false + } + const toPublish2 = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('worl2'), + qos: 2, + messageId: 43, + dup: false, + length: 14, + retain: false + } + + subscribe(t, subscriber, 'hello', 2, function () { + publish(t, publisher, toPublish) + + receive(t, subscriber, toPublish, function () { + publish(t, publisher, toPublish2) + }) + }) +}) + +test('multiple publish and store one', function (t) { + t.plan(2) + + const broker = aedes() + + const sid = { + id: 'abcde' + } + const s = connect(setup(broker), { clientId: sid.id }) + const toPublish = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 2, + retain: false, + dup: false, + messageId: 42 + } + + var count = 5 + while (count--) { + s.inStream.write(toPublish) + } + var recvcnt = 0 + s.outStream.on('data', function (packet) { + if (++recvcnt < 5) return + broker.close(function () { + broker.persistence.incomingGetPacket(sid, toPublish, function (err, origPacket) { + delete origPacket.brokerId + delete origPacket.brokerCounter + t.deepEqual(origPacket, toPublish, 'packet must match') + t.error(err) + }) + }) + }) +}) + +test('packet is written to stream after being stored', function (t) { + const s = connect(setup()) + + var broker = s.broker + + t.tearDown(broker.close.bind(s.broker)) + + var packetStored = false + + var fn = broker.persistence.incomingStorePacket.bind(broker.persistence) + + s.broker.persistence.incomingStorePacket = function (client, packet, done) { + packetStored = true + t.pass('packet stored') + fn(client, packet, done) + } + + const packet = { + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 2, + messageId: 42 + } + + publish(t, s, packet) + + s.outStream.once('data', function (packet) { + t.equal(packet.cmd, 'pubrec', 'pubrec received') + t.equal(packetStored, true, 'after packet store') + t.end() + }) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/regr-21.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/regr-21.js new file mode 100644 index 000000000..45e1cfb86 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/regr-21.js @@ -0,0 +1,34 @@ +'use strict' + +const { test } = require('tap') +const { setup, connect } = require('./helper') + +test('after an error, outstanding packets are discarded', function (t) { + t.plan(1) + + const s = connect(setup(), { + keepalive: 1000 + }) + t.tearDown(s.broker.close.bind(s.broker)) + + const packet = { + cmd: 'publish', + topic: 'hello', + payload: 'world' + } + + s.broker.mq.on('hello', function (msg, cb) { + t.pass('first msg received') + s.inStream.destroy(new Error('something went wrong')) + cb() + setImmediate(() => { + packet.topic = 'foo' + s.inStream.write(packet) + s.inStream.write(packet) + }) + }) + s.broker.mq.on('foo', function (msg, cb) { + t.fail('msg received') + }) + s.inStream.write(packet) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/retain.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/retain.js new file mode 100644 index 000000000..7a723de6f --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/retain.js @@ -0,0 +1,646 @@ +'use strict' + +const { test } = require('tap') +const { through } = require('../lib/utils') +const Faketimers = require('@sinonjs/fake-timers') +const { setup, connect, subscribe, noError } = require('./helper') +const aedes = require('../') + +// [MQTT-3.3.1-9] +test('live retain packets', function (t) { + t.plan(5) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + retain: false, + dup: false, + length: 12, + qos: 0 + } + + const s = noError(connect(setup()), t) + t.tearDown(s.broker.close.bind(s.broker)) + + subscribe(t, s, 'hello', 0, function () { + s.outStream.on('data', function (packet) { + t.deepEqual(packet, expected) + }) + + s.broker.publish({ + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + retain: true, + dup: false, + length: 12, + qos: 0 + }, function () { + t.pass('publish finished') + }) + }) +}) + +test('retain messages', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker)) + const subscriber = connect(setup(broker)) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + dup: false, + length: 12, + retain: true + } + + broker.subscribe('hello', function (packet, cb) { + cb() + + // defer this or it will receive the message which + // is being published + setImmediate(function () { + subscribe(t, subscriber, 'hello', 0, function () { + subscriber.outStream.once('data', function (packet) { + t.deepEqual(packet, expected, 'packet must match') + }) + }) + }) + }) + + publisher.inStream.write(expected) +}) + +test('avoid wrong deduping of retain messages', function (t) { + t.plan(7) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker)) + const subscriber = connect(setup(broker)) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + dup: false, + length: 12, + retain: true + } + + broker.subscribe('hello', function (packet, cb) { + cb() + // subscribe and publish another topic + subscribe(t, subscriber, 'hello2', 0, function () { + cb() + + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello2', + payload: Buffer.from('world'), + qos: 0, + dup: false + }) + + subscriber.outStream.once('data', function (packet) { + subscribe(t, subscriber, 'hello', 0, function () { + subscriber.outStream.once('data', function (packet) { + t.deepEqual(packet, expected, 'packet must match') + }) + }) + }) + }) + }) + + publisher.inStream.write(expected) +}) + +test('reconnected subscriber will not receive retained messages when QoS 0 and clean', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker), { clean: true }) + var subscriber = connect(setup(broker), { clean: true }) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + retain: false, + dup: false, + length: 12 + } + subscribe(t, subscriber, 'hello', 0, function () { + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 0, + retain: false + }) + subscriber.outStream.once('data', function (packet) { + t.deepEqual(packet, expected, 'packet must match') + subscriber.inStream.end() + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'foo', + qos: 0, + retain: true + }) + subscriber = connect(setup(broker), { clean: true }) + subscriber.outStream.on('data', function (packet) { + t.fail('should not received retain message') + }) + }) + }) +}) + +// [MQTT-3.3.1-6] +test('new QoS 0 subscribers receive QoS 0 retained messages when clean', function (t) { + t.plan(9) + + const clock = Faketimers.createClock() + const broker = aedes() + t.tearDown(function () { + clock.reset() + broker.close() + }) + + const publisher = connect(setup(broker), { clean: true }) + const expected = { + cmd: 'publish', + topic: 'hello/world', + payload: Buffer.from('big big world'), + qos: 0, + retain: true, + dup: false, + length: 26 + } + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello/world', + payload: 'big big world', + qos: 0, + retain: true + }) + const subscriber1 = connect(setup(broker), { clean: true }) + subscribe(t, subscriber1, 'hello/world', 0, function () { + subscriber1.outStream.on('data', function (packet) { + t.deepEqual(packet, expected, 'packet must match') + clock.tick(100) + }) + }) + const subscriber2 = connect(setup(broker), { clean: true }) + subscribe(t, subscriber2, 'hello/+', 0, function () { + subscriber2.outStream.on('data', function (packet) { + t.deepEqual(packet, expected, 'packet must match') + clock.tick(100) + }) + }) + + clock.setTimeout(() => { + t.equal(broker.counter, 8) + }, 200) +}) + +// [MQTT-3.3.1-5] +test('new QoS 0 subscribers receive downgraded QoS 1 retained messages when clean', function (t) { + t.plan(6) + + const broker = aedes() + + const publisher = connect(setup(broker), { clean: true }) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + retain: true, + dup: false, + length: 12 + } + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + retain: true, + messageId: 42 + }) + publisher.outStream.on('data', function (packet) { + const subscriber = connect(setup(broker), { clean: true }) + subscribe(t, subscriber, 'hello', 0, function () { + subscriber.outStream.on('data', function (packet) { + t.notEqual(packet.messageId, 42, 'messageId should not be the same') + delete packet.messageId + t.deepEqual(packet, expected, 'packet must match') + broker.close() + }) + }) + }) + broker.on('closed', function () { + t.equal(broker.counter, 7) + }) +}) + +// [MQTT-3.3.1-10] +test('clean retained messages', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker), { clean: true }) + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 0, + retain: true + }) + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: '', + qos: 0, + retain: true + }) + const subscriber = connect(setup(broker), { clean: true }) + subscribe(t, subscriber, 'hello', 0, function () { + subscriber.outStream.once('data', function (packet) { + t.fail('should not received retain message') + }) + }) +}) + +// [MQTT-3.3.1-11] +test('broker not store zero-byte retained messages', function (t) { + t.plan(0) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = connect(setup(broker)) + + s.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: '', + retain: true + }) + s.broker.on('publish', function (packet, client) { + if (packet.topic.startsWith('$SYS/')) { + return + } + const stream = s.broker.persistence.createRetainedStream(packet.topic) + stream.pipe(through(function sendRetained (packet, enc, cb) { + t.fail('not store zero-byte retained messages') + })) + }) +}) + +test('fail to clean retained messages without retain flag', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker), { clean: true }) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + retain: true, + dup: false, + length: 12 + } + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 0, + retain: true + }) + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: '', + qos: 0, + retain: false + }) + const subscriber = connect(setup(broker), { clean: true }) + subscribe(t, subscriber, 'hello', 0, function () { + subscriber.outStream.on('data', function (packet) { + t.deepEqual(packet, expected, 'packet must match') + }) + }) +}) + +test('only get the last retained messages in same topic', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker), { clean: true }) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('foo'), + qos: 0, + retain: true, + dup: false, + length: 10 + } + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 0, + retain: true + }) + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'foo', + qos: 0, + retain: true + }) + const subscriber = connect(setup(broker), { clean: true }) + subscribe(t, subscriber, 'hello', 0, function () { + subscriber.outStream.on('data', function (packet) { + t.deepEqual(packet, expected, 'packet must match') + }) + }) +}) + +test('deliver QoS 1 retained messages to new subscriptions', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker)) + const subscriber = connect(setup(broker)) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + dup: false, + length: 14, + retain: true + } + + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42, + retain: true + }) + + publisher.outStream.on('data', function (packet) { + subscribe(t, subscriber, 'hello', 1, function () { + subscriber.outStream.once('data', function (packet) { + delete packet.messageId + t.deepEqual(packet, expected, 'packet must match') + }) + }) + }) +}) + +test('deliver QoS 1 retained messages to established subscriptions', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker)) + const subscriber = connect(setup(broker)) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + dup: false, + length: 14, + retain: false + } + + subscribe(t, subscriber, 'hello', 1, function () { + subscriber.outStream.once('data', function (packet) { + delete packet.messageId + t.deepEqual(packet, expected, 'packet must match') + }) + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42, + retain: true + }) + }) +}) + +test('deliver QoS 0 retained message with QoS 1 subscription', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker)) + const subscriber = connect(setup(broker)) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + dup: false, + length: 12, + retain: true + } + + broker.mq.on('hello', function (msg, cb) { + cb() + + // defer this or it will receive the message which + // is being published + setImmediate(function () { + subscribe(t, subscriber, 'hello', 1, function () { + subscriber.outStream.once('data', function (packet) { + t.deepEqual(packet, expected, 'packet must match') + }) + }) + }) + }) + + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + messageId: 42, + retain: true + }) +}) + +test('disconnect and retain messages with QoS 1 [clean=false]', function (t) { + t.plan(7) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + var subscriber = noError(connect(setup(broker), { clean: false, clientId: 'abcde' }), t) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 1, + dup: false, + length: 14, + retain: true + } + + subscribe(t, subscriber, 'hello', 1, function () { + subscriber.inStream.write({ + cmd: 'disconnect' + }) + + subscriber.outStream.on('data', function (packet) { + console.log('original', packet) + }) + + const publisher = noError(connect(setup(broker)), t) + + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 42, + retain: true + }) + + publisher.outStream.once('data', function (packet) { + t.equal(packet.cmd, 'puback') + + subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }, function (connect) { + t.equal(connect.sessionPresent, true, 'session present is set to true') + }) + + subscriber.outStream.once('data', function (packet) { + // receive any queued messages (no matter they are retained messages) at the disconnected time + t.notEqual(packet.messageId, 42, 'messageId must differ') + delete packet.messageId + packet.length = 14 + t.deepEqual(packet, expected, 'packet must match') + + // there should be no messages come from restored subscriptions + subscriber.outStream.once('data', function (packet) { + t.fail('should not receive any more messages') + }) + }) + }) + }) +}) + +test('disconnect and two retain messages with QoS 1 [clean=false]', function (t) { + t.plan(15) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + var subscriber = noError(connect(setup(broker), { clean: false, clientId: 'abcde' }), t) + const expected = { + cmd: 'publish', + topic: 'hello', + qos: 1, + dup: false, + length: 14, + retain: true + } + + subscribe(t, subscriber, 'hello', 1, function () { + subscriber.inStream.write({ + cmd: 'disconnect' + }) + + subscriber.outStream.on('data', function (packet) { + console.log('original', packet) + }) + + const publisher = noError(connect(setup(broker)), t) + + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world', + qos: 1, + messageId: 41, + retain: true + }) + + publisher.outStream.once('data', function (packet) { + t.equal(packet.cmd, 'puback') + + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello', + payload: 'world2', + qos: 1, + messageId: 42, + retain: true + }) + + publisher.outStream.once('data', function (packet) { + t.equal(packet.cmd, 'puback') + + subscriber = connect(setup(broker), { clean: false, clientId: 'abcde' }, function (connect) { + t.equal(connect.sessionPresent, true, 'session present is set to true') + }) + + subscriber.outStream.once('data', function (packet) { + // receive any queued messages (included retained messages) at the disconnected time + t.notEqual(packet.messageId, 41, 'messageId must differ') + delete packet.messageId + packet.length = 14 + expected.payload = Buffer.from('world') + t.deepEqual(packet, expected, 'packet must match') + + // receive any queued messages (included retained messages) at the disconnected time + subscriber.outStream.once('data', function (packet) { + t.notEqual(packet.messageId, 42, 'messageId must differ') + delete packet.messageId + packet.length = 14 + expected.payload = Buffer.from('world2') + t.deepEqual(packet, expected, 'packet must match') + + // should get the last retained message when we do a subscribe + subscribe(t, subscriber, 'hello', 1, function () { + subscriber.outStream.on('data', function (packet) { + t.notEqual(packet.messageId, 42, 'messageId must differ') + delete packet.messageId + packet.length = 14 + expected.payload = Buffer.from('world2') + t.deepEqual(packet, expected, 'packet must match') + }) + }) + }) + }) + }) + }) + }) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/topics.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/topics.js new file mode 100644 index 000000000..7862bda48 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/topics.js @@ -0,0 +1,294 @@ +'use strict' + +const { test } = require('tap') +const { setup, connect, subscribe } = require('./helper') +const aedes = require('../') + +// [MQTT-4.7.1-3] +test('Single-level wildcard should match empty level', function (t) { + t.plan(4) + + var s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + subscribe(t, s, 'a/+/b', 0, function () { + s.outStream.once('data', function (packet) { + t.pass('ok') + }) + + s.inStream.write({ + cmd: 'publish', + topic: 'a//b', + payload: 'world' + }) + }) +}) + +// [MQTT-4.7.3-1] +test('publish empty topic', function (t) { + t.plan(4) + + const s = connect(setup()) + + subscribe(t, s, '#', 0, function () { + s.outStream.once('data', function (packet) { + t.fail('no packet') + }) + + s.inStream.write({ + cmd: 'publish', + topic: '', + payload: 'world' + }) + + s.broker.close(function () { + t.equal(s.broker.connectedClients, 0, 'no connected clients') + }) + }) +}) + +test('publish invalid topic with #', function (t) { + t.plan(4) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + subscribe(t, s, '#', 0, function () { + s.outStream.once('data', function (packet) { + t.fail('no packet') + }) + + s.inStream.write({ + cmd: 'publish', + topic: 'hello/#', + payload: 'world' + }) + }) + + s.broker.on('clientError', function () { + t.pass('raise an error') + }) +}) + +test('publish invalid topic with +', function (t) { + t.plan(4) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + subscribe(t, s, '#', 0, function () { + s.outStream.once('data', function (packet) { + t.fail('no packet') + }) + + s.inStream.write({ + cmd: 'publish', + topic: 'hello/+/eee', + payload: 'world' + }) + }) + + s.broker.on('clientError', function () { + t.pass('raise an error') + }) +}) + +;['base/#/sub', 'base/#sub', 'base/sub#', 'base/xyz+/sub', 'base/+xyz/sub', ''].forEach(function (topic) { + test('subscribe to invalid topic with "' + topic + '"', function (t) { + t.plan(1) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.on('clientError', function () { + t.pass('raise an error') + }) + + s.inStream.write({ + cmd: 'subscribe', + messageId: 24, + subscriptions: [{ + topic: topic, + qos: 0 + }] + }) + }) + + test('unsubscribe to invalid topic with "' + topic + '"', function (t) { + t.plan(1) + + const s = connect(setup()) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.on('clientError', function () { + t.pass('raise an error') + }) + + s.inStream.write({ + cmd: 'unsubscribe', + messageId: 24, + unsubscriptions: [topic] + }) + }) +}) + +test('topics are case-sensitive', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const publisher = connect(setup(broker), { clean: true }) + const subscriber = connect(setup(broker), { clean: true }) + const expected = { + cmd: 'publish', + topic: 'hello', + payload: Buffer.from('world'), + qos: 0, + dup: false, + length: 12, + retain: false + } + + subscribe(t, subscriber, 'hello', 0, function () { + subscriber.outStream.on('data', function (packet) { + t.deepEqual(packet, expected, 'packet mush match') + }) + ;['hello', 'HELLO', 'heLLo', 'HELLO/#', 'hello/+'].forEach(function (topic) { + publisher.inStream.write({ + cmd: 'publish', + topic: topic, + payload: 'world', + qos: 0, + retain: false + }) + }) + }) +}) + +function subscribeMultipleTopics (t, broker, qos, subscriber, subscriptions, done) { + const publisher = connect(setup(broker)) + subscriber.inStream.write({ + cmd: 'subscribe', + messageId: 24, + subscriptions: subscriptions + }) + + subscriber.outStream.once('data', function (packet) { + t.equal(packet.cmd, 'suback') + t.deepEqual(packet.granted, subscriptions.map(obj => obj.qos)) + t.equal(packet.messageId, 24) + + publisher.inStream.write({ + cmd: 'publish', + topic: 'hello/world', + payload: 'world', + qos: qos, + messageId: 42 + }) + + if (done) { + done(null, packet) + } + }) +} + +test('Overlapped topics with same QoS', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const subscriber = connect(setup(broker)) + const expected = { + cmd: 'publish', + topic: 'hello/world', + payload: Buffer.from('world'), + qos: 1, + dup: false, + length: 20, + retain: false + } + const sub = [ + { topic: 'hello/world', qos: 1 }, + { topic: 'hello/#', qos: 1 }] + subscribeMultipleTopics(t, broker, 1, subscriber, sub, function () { + subscriber.outStream.on('data', function (packet) { + delete packet.messageId + t.deepEqual(packet, expected, 'packet must match') + }) + }) +}) + +// [MQTT-3.3.5-1] +test('deliver overlapped topics respecting the maximum QoS of all the matching subscriptions - QoS 0 publish', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const subscriber = connect(setup(broker)) + const expected = { + cmd: 'publish', + topic: 'hello/world', + payload: Buffer.from('world'), + qos: 0, + dup: false, + length: 18, + retain: false + } + const sub = [ + { topic: 'hello/world', qos: 0 }, + { topic: 'hello/#', qos: 2 }] + subscribeMultipleTopics(t, broker, 0, subscriber, sub, function () { + subscriber.outStream.on('data', function (packet) { + delete packet.messageId + t.deepEqual(packet, expected, 'packet must match') + }) + }) +}) + +// [MQTT-3.3.5-1] +test('deliver overlapped topics respecting the maximum QoS of all the matching subscriptions - QoS 2 publish', function (t) { + t.plan(3) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const subscriber = connect(setup(broker)) + + const sub = [ + { topic: 'hello/world', qos: 0 }, + { topic: 'hello/#', qos: 2 }] + subscribeMultipleTopics(t, broker, 2, subscriber, sub, function () { + subscriber.outStream.on('data', function () { + t.fail('should receive messages with the maximum QoS') + }) + }) +}) + +test('Overlapped topics with QoS downgrade', function (t) { + t.plan(4) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const subscriber = connect(setup(broker)) + const expected = { + cmd: 'publish', + topic: 'hello/world', + payload: Buffer.from('world'), + qos: 0, + dup: false, + length: 18, + retain: false + } + const sub = [ + { topic: 'hello/world', qos: 1 }, + { topic: 'hello/#', qos: 1 }] + subscribeMultipleTopics(t, broker, 0, subscriber, sub, function () { + subscriber.outStream.on('data', function (packet) { + t.deepEqual(packet, expected, 'packet must match') + }) + }) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/types/index.ts b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/types/index.ts new file mode 100644 index 000000000..6b99780cb --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/types/index.ts @@ -0,0 +1,141 @@ +/* eslint no-unused-vars: 0 */ +/* eslint no-undef: 0 */ + +import { Server, Client, AuthenticateError, AedesPublishPacket, PublishPacket, Subscription } from '../../aedes' +import { createServer, Socket } from 'net' + +const broker = Server({ + concurrency: 100, + heartbeatInterval: 60000, + connectTimeout: 30000, + id: 'aedes', + preConnect: (client: Client, callback) => { + if (client.req) { + callback(new Error('not websocket stream'), false) + } + if (client.conn instanceof Socket && client.conn.remoteAddress === '::1') { + callback(null, true) + } else { + callback(new Error('connection error'), false) + } + }, + authenticate: (client: Client, username: string, password: Buffer, callback) => { + if (username === 'test' && password === Buffer.from('test') && client.version === 4) { + callback(null, true) + } else { + const error = new Error() as AuthenticateError + error.returnCode = 1 + + callback(error, false) + } + }, + authorizePublish: (client: Client, packet: PublishPacket, callback) => { + if (packet.topic === 'aaaa') { + return callback(new Error('wrong topic')) + } + + if (packet.topic === 'bbb') { + packet.payload = Buffer.from('overwrite packet payload') + } + + callback(null) + }, + authorizeSubscribe: (client: Client, sub: Subscription, callback) => { + if (sub.topic === 'aaaa') { + return callback(new Error('wrong topic')) + } + + if (sub.topic === 'bbb') { + // overwrites subscription + sub.qos = 2 + } + + callback(null, sub) + }, + authorizeForward: (client: Client, packet: AedesPublishPacket) => { + if (packet.topic === 'aaaa' && client.id === 'I should not see this') { + return null + // also works with return undefined + } else if (packet.topic === 'aaaa' && client.id === 'I should not see this either') { + return + } + + if (packet.topic === 'bbb') { + packet.payload = Buffer.from('overwrite packet payload') + } + + return packet + } +}) + +const server = createServer(broker.handle) + +broker.on('closed', () => { + console.log('closed') +}) + +broker.on('client', client => { + console.log(`client: ${client.id} connected`) +}) + +broker.on('clientReady', client => { + console.log(`client: ${client.id} is ready`) +}) + +broker.on('clientDisconnect', client => { + console.log(`client: ${client.id} disconnected`) +}) + +broker.on('keepaliveTimeout', client => { + console.log(`client: ${client.id} timed out`) +}) + +broker.on('connackSent', (packet, client) => { + console.log(`client: ${client.id} connack sent`) +}) + +broker.on('clientError', client => { + console.log(`client: ${client.id} error`) +}) + +broker.on('connectionError', client => { + console.log('connectionError') +}) + +broker.on('ping', (packet, client) => { + console.log(`client: ${client.id} ping with packet ${packet.cmd}`) +}) + +broker.on('publish', (packet, client) => { + console.log(`client: ${client.id} published packet ${packet.cmd}`) +}) + +broker.on('ack', (packet, client) => { + console.log(`client: ${client.id} ack with packet ${packet.cmd}`) +}) + +broker.on('subscribe', (subscriptions, client) => { + console.log(`client: ${client.id} subscribe`) +}) + +broker.on('unsubscribe', (subscriptions, client) => { + console.log(`client: ${client.id} subscribe`) +}) + +broker.subscribe('aaaa', (packet: AedesPublishPacket, cb) => { + console.log('cmd') + console.log(packet.cmd) + cb() +}, () => { + console.log('done subscribing') +}) + +broker.unsubscribe('aaaa', (packet: AedesPublishPacket, cb) => { + console.log('cmd') + console.log(packet.cmd) + cb() +}, () => { + console.log('done unsubscribing') +}) + +broker.close() diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/types/tsconfig.json b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/types/tsconfig.json new file mode 100644 index 000000000..791484233 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/types/tsconfig.json @@ -0,0 +1,11 @@ +{ + "compilerOptions": { + "module": "commonjs", + "target": "es6", + "noEmit": true, + "strict": true + }, + "files": [ + "./index.ts" + ] +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/aedes/test/will.js b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/will.js new file mode 100644 index 000000000..bbb040f90 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/aedes/test/will.js @@ -0,0 +1,527 @@ +'use strict' + +const { test } = require('tap') +const memory = require('aedes-persistence') +const Faketimers = require('@sinonjs/fake-timers') +const { setup, connect, noError } = require('./helper') +const aedes = require('../') + +function willConnect (s, opts, connected) { + opts = opts || {} + opts.will = { + topic: 'mywill', + payload: Buffer.from('last will'), + qos: 0, + retain: false + } + + return connect(s, opts, connected) +} + +test('delivers a will', function (t) { + t.plan(4) + + const opts = {} + // willConnect populates opts with a will + const s = willConnect(setup(), + opts, + function () { + s.conn.destroy() + } + ) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.mq.on('mywill', function (packet, cb) { + t.equal(packet.topic, opts.will.topic, 'topic matches') + t.deepEqual(packet.payload, opts.will.payload, 'payload matches') + t.equal(packet.qos, opts.will.qos, 'qos matches') + t.equal(packet.retain, opts.will.retain, 'retain matches') + cb() + }) +}) + +test('calling close two times should not deliver two wills', function (t) { + t.plan(4) + + const opts = {} + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.on('client', function (client) { + client.close() + client.close() + }) + + broker.mq.on('mywill', onWill) + + // willConnect populates opts with a will + willConnect(setup(broker), opts) + + function onWill (packet, cb) { + broker.mq.removeListener('mywill', onWill) + broker.mq.on('mywill', t.fail.bind(t)) + t.equal(packet.topic, opts.will.topic, 'topic matches') + t.deepEqual(packet.payload, opts.will.payload, 'payload matches') + t.equal(packet.qos, opts.will.qos, 'qos matches') + t.equal(packet.retain, opts.will.retain, 'retain matches') + cb() + } +}) + +test('delivers old will in case of a crash', function (t) { + t.plan(6) + + const persistence = memory() + const will = { + topic: 'mywill', + payload: Buffer.from('last will'), + qos: 0, + retain: false + } + + persistence.broker = { + id: 'anotherBroker' + } + + persistence.putWill({ + id: 'myClientId42' + }, will, function (err) { + t.error(err, 'no error') + + const interval = 10 // ms, so that the will check happens fast! + const broker = aedes({ + persistence: persistence, + heartbeatInterval: interval + }) + t.tearDown(broker.close.bind(broker)) + + const start = Date.now() + + broker.mq.on('mywill', check) + + function check (packet, cb) { + broker.mq.removeListener('mywill', check) + t.ok(Date.now() - start >= 3 * interval, 'the will needs to be emitted after 3 heartbeats') + t.equal(packet.topic, will.topic, 'topic matches') + t.deepEqual(packet.payload, will.payload, 'payload matches') + t.equal(packet.qos, will.qos, 'qos matches') + t.equal(packet.retain, will.retain, 'retain matches') + broker.mq.on('mywill', function (packet) { + t.fail('the will must be delivered only once') + }) + cb() + } + }) +}) + +test('delete old broker', function (t) { + t.plan(1) + + const clock = Faketimers.install() + + const heartbeatInterval = 100 + const broker = aedes({ + heartbeatInterval: heartbeatInterval + }) + t.tearDown(broker.close.bind(broker)) + + const brokerId = 'dummyBroker' + + broker.brokers[brokerId] = Date.now() - heartbeatInterval * 3.5 + + setTimeout(() => { + t.equal(broker.brokers[brokerId], undefined, 'Broker deleted') + }, heartbeatInterval * 4) + + clock.tick(heartbeatInterval * 4) + + clock.uninstall() +}) + +test('store the will in the persistence', function (t) { + t.plan(5) + + const opts = { + clientId: 'abcde' + } + + // willConnect populates opts with a will + const s = willConnect(setup(), opts) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.on('client', function () { + // this is connack + s.broker.persistence.getWill({ + id: opts.clientId + }, function (err, packet) { + t.error(err, 'no error') + t.deepEqual(packet.topic, opts.will.topic, 'will topic matches') + t.deepEqual(packet.payload, opts.will.payload, 'will payload matches') + t.deepEqual(packet.qos, opts.will.qos, 'will qos matches') + t.deepEqual(packet.retain, opts.will.retain, 'will retain matches') + }) + }) +}) + +test('delete the will in the persistence after publish', function (t) { + t.plan(2) + + const opts = { + clientId: 'abcde' + } + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + broker.on('client', function (client) { + setImmediate(function () { + client.close() + }) + }) + + broker.mq.on('mywill', check) + + // willConnect populates opts with a will + willConnect(setup(broker), opts) + + function check (packet, cb) { + broker.mq.removeListener('mywill', check) + setImmediate(function () { + broker.persistence.getWill({ + id: opts.clientId + }, function (err, p) { + t.error(err, 'no error') + t.notOk(p, 'packet is empty') + }) + }) + cb() + } +}) + +test('delivers a will with authorization', function (t) { + t.plan(6) + + let authorized = false + const opts = {} + // willConnect populates opts with a will + const s = willConnect( + setup(aedes({ + authorizePublish: (client, packet, callback) => { + authorized = true + callback(null) + } + })), + opts, + function () { + s.conn.destroy() + } + ) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.on('clientDisconnect', function (client) { + t.equal(client.connected, false) + }) + + s.broker.mq.on('mywill', function (packet, cb) { + t.equal(packet.topic, opts.will.topic, 'topic matches') + t.deepEqual(packet.payload, opts.will.payload, 'payload matches') + t.equal(packet.qos, opts.will.qos, 'qos matches') + t.equal(packet.retain, opts.will.retain, 'retain matches') + t.equal(authorized, true, 'authorization called') + cb() + }) +}) + +test('delivers a will waits for authorization', function (t) { + t.plan(6) + + let authorized = false + const opts = {} + // willConnect populates opts with a will + const s = willConnect( + setup(aedes({ + authorizePublish: (client, packet, callback) => { + authorized = true + setImmediate(() => { callback(null) }) + } + })), + opts, + function () { + s.conn.destroy() + } + ) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.on('clientDisconnect', function () { + t.pass('client is disconnected') + }) + + s.broker.mq.on('mywill', function (packet, cb) { + t.equal(packet.topic, opts.will.topic, 'topic matches') + t.deepEqual(packet.payload, opts.will.payload, 'payload matches') + t.equal(packet.qos, opts.will.qos, 'qos matches') + t.equal(packet.retain, opts.will.retain, 'retain matches') + t.equal(authorized, true, 'authorization called') + cb() + }) +}) + +test('does not deliver a will without authorization', function (t) { + t.plan(1) + + let authorized = false + const opts = {} + // willConnect populates opts with a will + const s = willConnect( + setup(aedes({ + authorizePublish: (username, packet, callback) => { + authorized = true + callback(new Error()) + } + })), + opts, + function () { + s.conn.destroy() + } + ) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.on('clientDisconnect', function () { + t.equal(authorized, true, 'authorization called') + }) + + s.broker.mq.on('mywill', function (packet, cb) { + t.fail('received will without authorization') + cb() + }) +}) + +test('does not deliver a will without authentication', function (t) { + t.plan(1) + + let authenticated = false + const opts = {} + // willConnect populates opts with a will + const s = willConnect( + setup(aedes({ + authenticate: (client, username, password, callback) => { + authenticated = true + callback(new Error(), false) + } + })), + opts + ) + t.tearDown(s.broker.close.bind(s.broker)) + + s.broker.on('clientError', function () { + t.equal(authenticated, true, 'authentication called') + t.end() + }) + + s.broker.mq.on('mywill', function (packet, cb) { + t.fail('received will without authentication') + cb() + }) +}) + +test('does not deliver will if broker is closed during authentication', function (t) { + t.plan(0) + + const opts = { keepalive: 1 } + + const broker = aedes({ + authenticate: function (client, username, password, callback) { + setTimeout(function () { + callback(null, true) + }) + broker.close() + } + }) + + broker.on('keepaliveTimeout', function () { + t.fail('keepalive timer shoud not be set') + }) + + broker.mq.on('mywill', function (packet, cb) { + t.fail('Received will when it was not expected') + cb() + }) + + willConnect(setup(broker), opts) +}) + +// [MQTT-3.14.4-3] +test('does not deliver will when client sends a DISCONNECT', function (t) { + t.plan(0) + + const broker = aedes() + t.tearDown(broker.close.bind(broker)) + + const s = noError(willConnect(setup(broker), {}, function () { + s.inStream.end({ + cmd: 'disconnect' + }) + }), t) + + s.broker.mq.on('mywill', function (packet, cb) { + t.fail(packet) + cb() + }) +}) + +test('does not store multiple will with same clientid', function (t) { + t.plan(4) + + const opts = { clientId: 'abcde' } + + const broker = aedes() + + var s = noError(willConnect(setup(broker), opts, function () { + // gracefully close client so no will is sent + s.inStream.end({ + cmd: 'disconnect' + }) + }), t) + + broker.on('clientDisconnect', function (client) { + // reconnect same client with will + s = willConnect(setup(broker), opts, function () { + // check that there are not 2 will messages for the same clientid + s.broker.persistence.delWill({ id: opts.clientId }, function (err, packet) { + t.error(err, 'no error') + t.equal(packet.clientId, opts.clientId, 'will packet found') + s.broker.persistence.delWill({ id: opts.clientId }, function (err, packet) { + t.error(err, 'no error') + t.equal(!!packet, false, 'no duplicated packets') + broker.close() + }) + }) + }) + }) +}) + +test('don\'t delivers a will if broker alive', function (t) { + const persistence = memory() + const will = { + topic: 'mywill', + payload: Buffer.from('last will'), + qos: 0, + retain: false + } + + var oldBroker = 'broker1' + + persistence.broker = { + id: oldBroker + } + + persistence.putWill({ + id: 'myClientId42' + }, will, function (err) { + t.error(err, 'no error') + + const opts = { + persistence: persistence, + heartbeatInterval: 10 + } + + var count = 0 + + const broker = aedes(opts) + t.tearDown(broker.close.bind(broker)) + + var streamWill = persistence.streamWill + persistence.streamWill = function () { + // don't pass broker.brokers to streamWill + return streamWill.call(persistence) + } + + broker.mq.on('mywill', function (packet, cb) { + t.fail('Will received') + cb() + }) + + broker.mq.on('$SYS/+/heartbeat', function () { + t.pass('Heartbeat received') + broker.brokers[oldBroker] = Date.now() + + if (++count === 5) { + t.end() + } + }) + }) +}) + +test('handle will publish error', function (t) { + t.plan(2) + const persistence = memory() + const will = { + topic: 'mywill', + payload: Buffer.from('last will'), + qos: 0, + retain: false + } + + persistence.broker = { + id: 'broker1' + } + + persistence.putWill({ + id: 'myClientId42' + }, will, function (err) { + t.error(err, 'no error') + + const opts = { + persistence: persistence, + heartbeatInterval: 10 + } + + persistence.delWill = function (client, cb) { + cb(new Error('Throws error')) + } + + const broker = aedes(opts) + t.tearDown(broker.close.bind(broker)) + + broker.once('error', function (err) { + t.equal('Throws error', err.message, 'throws error') + }) + }) +}) + +test('handle will publish error 2', function (t) { + t.plan(2) + const persistence = memory() + const will = { + topic: 'mywill', + payload: Buffer.from('last will'), + qos: 0, + retain: true + } + + persistence.broker = { + id: 'broker1' + } + + persistence.putWill({ + id: 'myClientId42' + }, will, function (err) { + t.error(err, 'no error') + + const opts = { + persistence: persistence, + heartbeatInterval: 10 + } + + persistence.storeRetained = function (packet, cb) { + cb(new Error('Throws error')) + } + + const broker = aedes(opts) + t.tearDown(broker.close.bind(broker)) + + broker.once('error', function (err) { + t.equal('Throws error', err.message, 'throws error') + }) + }) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/base64-js/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/base64-js/LICENSE new file mode 100644 index 000000000..6d52b8acf --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/base64-js/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Jameson Little + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/base64-js/README.md b/sdklab/meantimerecovery/aedes/node_modules/base64-js/README.md new file mode 100644 index 000000000..b42a48f41 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/base64-js/README.md @@ -0,0 +1,34 @@ +base64-js +========= + +`base64-js` does basic base64 encoding/decoding in pure JS. + +[![build status](https://secure.travis-ci.org/beatgammit/base64-js.png)](http://travis-ci.org/beatgammit/base64-js) + +Many browsers already have base64 encoding/decoding functionality, but it is for text data, not all-purpose binary data. + +Sometimes encoding/decoding binary data in the browser is useful, and that is what this module does. + +## install + +With [npm](https://npmjs.org) do: + +`npm install base64-js` and `var base64js = require('base64-js')` + +For use in web browsers do: + +`` + +[Get supported base64-js with the Tidelift Subscription](https://tidelift.com/subscription/pkg/npm-base64-js?utm_source=npm-base64-js&utm_medium=referral&utm_campaign=readme) + +## methods + +`base64js` has three exposed functions, `byteLength`, `toByteArray` and `fromByteArray`, which both take a single argument. + +* `byteLength` - Takes a base64 string and returns length of byte array +* `toByteArray` - Takes a base64 string and returns a byte array +* `fromByteArray` - Takes a byte array and returns a base64 string + +## license + +MIT diff --git a/sdklab/meantimerecovery/aedes/node_modules/base64-js/base64js.min.js b/sdklab/meantimerecovery/aedes/node_modules/base64-js/base64js.min.js new file mode 100644 index 000000000..908ac83fd --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/base64-js/base64js.min.js @@ -0,0 +1 @@ +(function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"==typeof window?"undefined"==typeof global?"undefined"==typeof self?this:self:global:window,b.base64js=a()}})(function(){return function(){function b(d,e,g){function a(j,i){if(!e[j]){if(!d[j]){var f="function"==typeof require&&require;if(!i&&f)return f(j,!0);if(h)return h(j,!0);var c=new Error("Cannot find module '"+j+"'");throw c.code="MODULE_NOT_FOUND",c}var k=e[j]={exports:{}};d[j][0].call(k.exports,function(b){var c=d[j][1][b];return a(c||b)},k,k.exports,b,d,e,g)}return e[j].exports}for(var h="function"==typeof require&&require,c=0;c>16,j[k++]=255&b>>8,j[k++]=255&b;return 2===h&&(b=l[a.charCodeAt(c)]<<2|l[a.charCodeAt(c+1)]>>4,j[k++]=255&b),1===h&&(b=l[a.charCodeAt(c)]<<10|l[a.charCodeAt(c+1)]<<4|l[a.charCodeAt(c+2)]>>2,j[k++]=255&b>>8,j[k++]=255&b),j}function g(a){return k[63&a>>18]+k[63&a>>12]+k[63&a>>6]+k[63&a]}function h(a,b,c){for(var d,e=[],f=b;fj?j:g+f));return 1===d?(b=a[c-1],e.push(k[b>>2]+k[63&b<<4]+"==")):2===d&&(b=(a[c-2]<<8)+a[c-1],e.push(k[b>>10]+k[63&b>>4]+k[63&b<<2]+"=")),e.join("")}c.byteLength=function(a){var b=d(a),c=b[0],e=b[1];return 3*(c+e)/4-e},c.toByteArray=f,c.fromByteArray=j;for(var k=[],l=[],m="undefined"==typeof Uint8Array?Array:Uint8Array,n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",o=0,p=n.length;o 0) { + throw new Error('Invalid string. Length must be a multiple of 4') + } + + // Trim off extra bytes after placeholder bytes are found + // See: https://github.com/beatgammit/base64-js/issues/42 + var validLen = b64.indexOf('=') + if (validLen === -1) validLen = len + + var placeHoldersLen = validLen === len + ? 0 + : 4 - (validLen % 4) + + return [validLen, placeHoldersLen] +} + +// base64 is 4/3 + up to two characters of the original data +function byteLength (b64) { + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function _byteLength (b64, validLen, placeHoldersLen) { + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function toByteArray (b64) { + var tmp + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + + var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) + + var curByte = 0 + + // if there are placeholders, only get up to the last complete 4 chars + var len = placeHoldersLen > 0 + ? validLen - 4 + : validLen + + var i + for (i = 0; i < len; i += 4) { + tmp = + (revLookup[b64.charCodeAt(i)] << 18) | + (revLookup[b64.charCodeAt(i + 1)] << 12) | + (revLookup[b64.charCodeAt(i + 2)] << 6) | + revLookup[b64.charCodeAt(i + 3)] + arr[curByte++] = (tmp >> 16) & 0xFF + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 2) { + tmp = + (revLookup[b64.charCodeAt(i)] << 2) | + (revLookup[b64.charCodeAt(i + 1)] >> 4) + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 1) { + tmp = + (revLookup[b64.charCodeAt(i)] << 10) | + (revLookup[b64.charCodeAt(i + 1)] << 4) | + (revLookup[b64.charCodeAt(i + 2)] >> 2) + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + return arr +} + +function tripletToBase64 (num) { + return lookup[num >> 18 & 0x3F] + + lookup[num >> 12 & 0x3F] + + lookup[num >> 6 & 0x3F] + + lookup[num & 0x3F] +} + +function encodeChunk (uint8, start, end) { + var tmp + var output = [] + for (var i = start; i < end; i += 3) { + tmp = + ((uint8[i] << 16) & 0xFF0000) + + ((uint8[i + 1] << 8) & 0xFF00) + + (uint8[i + 2] & 0xFF) + output.push(tripletToBase64(tmp)) + } + return output.join('') +} + +function fromByteArray (uint8) { + var tmp + var len = uint8.length + var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes + var parts = [] + var maxChunkLength = 16383 // must be multiple of 3 + + // go through the array every three bytes, we'll deal with trailing stuff later + for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { + parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) + } + + // pad the end with zeros, but make sure to not forget the extra bytes + if (extraBytes === 1) { + tmp = uint8[len - 1] + parts.push( + lookup[tmp >> 2] + + lookup[(tmp << 4) & 0x3F] + + '==' + ) + } else if (extraBytes === 2) { + tmp = (uint8[len - 2] << 8) + uint8[len - 1] + parts.push( + lookup[tmp >> 10] + + lookup[(tmp >> 4) & 0x3F] + + lookup[(tmp << 2) & 0x3F] + + '=' + ) + } + + return parts.join('') +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/base64-js/package.json b/sdklab/meantimerecovery/aedes/node_modules/base64-js/package.json new file mode 100644 index 000000000..1c5567943 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/base64-js/package.json @@ -0,0 +1,75 @@ +{ + "_from": "base64-js@^1.3.1", + "_id": "base64-js@1.5.1", + "_inBundle": false, + "_integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "_location": "/base64-js", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "base64-js@^1.3.1", + "name": "base64-js", + "escapedName": "base64-js", + "rawSpec": "^1.3.1", + "saveSpec": null, + "fetchSpec": "^1.3.1" + }, + "_requiredBy": [ + "/buffer" + ], + "_resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "_shasum": "1b1b440160a5bf7ad40b650f095963481903930a", + "_spec": "base64-js@^1.3.1", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\buffer", + "author": { + "name": "T. Jameson Little", + "email": "t.jameson.little@gmail.com" + }, + "bugs": { + "url": "https://github.com/beatgammit/base64-js/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Base64 encoding/decoding in pure JS", + "devDependencies": { + "babel-minify": "^0.5.1", + "benchmark": "^2.1.4", + "browserify": "^16.3.0", + "standard": "*", + "tape": "4.x" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "homepage": "https://github.com/beatgammit/base64-js", + "keywords": [ + "base64" + ], + "license": "MIT", + "main": "index.js", + "name": "base64-js", + "repository": { + "type": "git", + "url": "git://github.com/beatgammit/base64-js.git" + }, + "scripts": { + "build": "browserify -s base64js -r ./ | minify > base64js.min.js", + "lint": "standard", + "test": "npm run lint && npm run unit", + "unit": "tape test/*.js" + }, + "typings": "index.d.ts", + "version": "1.5.1" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/bl/.travis.yml b/sdklab/meantimerecovery/aedes/node_modules/bl/.travis.yml new file mode 100644 index 000000000..016eaf556 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/bl/.travis.yml @@ -0,0 +1,17 @@ +sudo: false +arch: + - amd64 + - ppc64le +language: node_js +node_js: + - '6' + - '8' + - '10' + - '12' + - '14' + - '15' + - lts/* +notifications: + email: + - rod@vagg.org + - matteo.collina@gmail.com diff --git a/sdklab/meantimerecovery/aedes/node_modules/bl/BufferList.js b/sdklab/meantimerecovery/aedes/node_modules/bl/BufferList.js new file mode 100644 index 000000000..471ee7788 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/bl/BufferList.js @@ -0,0 +1,396 @@ +'use strict' + +const { Buffer } = require('buffer') +const symbol = Symbol.for('BufferList') + +function BufferList (buf) { + if (!(this instanceof BufferList)) { + return new BufferList(buf) + } + + BufferList._init.call(this, buf) +} + +BufferList._init = function _init (buf) { + Object.defineProperty(this, symbol, { value: true }) + + this._bufs = [] + this.length = 0 + + if (buf) { + this.append(buf) + } +} + +BufferList.prototype._new = function _new (buf) { + return new BufferList(buf) +} + +BufferList.prototype._offset = function _offset (offset) { + if (offset === 0) { + return [0, 0] + } + + let tot = 0 + + for (let i = 0; i < this._bufs.length; i++) { + const _t = tot + this._bufs[i].length + if (offset < _t || i === this._bufs.length - 1) { + return [i, offset - tot] + } + tot = _t + } +} + +BufferList.prototype._reverseOffset = function (blOffset) { + const bufferId = blOffset[0] + let offset = blOffset[1] + + for (let i = 0; i < bufferId; i++) { + offset += this._bufs[i].length + } + + return offset +} + +BufferList.prototype.get = function get (index) { + if (index > this.length || index < 0) { + return undefined + } + + const offset = this._offset(index) + + return this._bufs[offset[0]][offset[1]] +} + +BufferList.prototype.slice = function slice (start, end) { + if (typeof start === 'number' && start < 0) { + start += this.length + } + + if (typeof end === 'number' && end < 0) { + end += this.length + } + + return this.copy(null, 0, start, end) +} + +BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) { + if (typeof srcStart !== 'number' || srcStart < 0) { + srcStart = 0 + } + + if (typeof srcEnd !== 'number' || srcEnd > this.length) { + srcEnd = this.length + } + + if (srcStart >= this.length) { + return dst || Buffer.alloc(0) + } + + if (srcEnd <= 0) { + return dst || Buffer.alloc(0) + } + + const copy = !!dst + const off = this._offset(srcStart) + const len = srcEnd - srcStart + let bytes = len + let bufoff = (copy && dstStart) || 0 + let start = off[1] + + // copy/slice everything + if (srcStart === 0 && srcEnd === this.length) { + if (!copy) { + // slice, but full concat if multiple buffers + return this._bufs.length === 1 + ? this._bufs[0] + : Buffer.concat(this._bufs, this.length) + } + + // copy, need to copy individual buffers + for (let i = 0; i < this._bufs.length; i++) { + this._bufs[i].copy(dst, bufoff) + bufoff += this._bufs[i].length + } + + return dst + } + + // easy, cheap case where it's a subset of one of the buffers + if (bytes <= this._bufs[off[0]].length - start) { + return copy + ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes) + : this._bufs[off[0]].slice(start, start + bytes) + } + + if (!copy) { + // a slice, we need something to copy in to + dst = Buffer.allocUnsafe(len) + } + + for (let i = off[0]; i < this._bufs.length; i++) { + const l = this._bufs[i].length - start + + if (bytes > l) { + this._bufs[i].copy(dst, bufoff, start) + bufoff += l + } else { + this._bufs[i].copy(dst, bufoff, start, start + bytes) + bufoff += l + break + } + + bytes -= l + + if (start) { + start = 0 + } + } + + // safeguard so that we don't return uninitialized memory + if (dst.length > bufoff) return dst.slice(0, bufoff) + + return dst +} + +BufferList.prototype.shallowSlice = function shallowSlice (start, end) { + start = start || 0 + end = typeof end !== 'number' ? this.length : end + + if (start < 0) { + start += this.length + } + + if (end < 0) { + end += this.length + } + + if (start === end) { + return this._new() + } + + const startOffset = this._offset(start) + const endOffset = this._offset(end) + const buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1) + + if (endOffset[1] === 0) { + buffers.pop() + } else { + buffers[buffers.length - 1] = buffers[buffers.length - 1].slice(0, endOffset[1]) + } + + if (startOffset[1] !== 0) { + buffers[0] = buffers[0].slice(startOffset[1]) + } + + return this._new(buffers) +} + +BufferList.prototype.toString = function toString (encoding, start, end) { + return this.slice(start, end).toString(encoding) +} + +BufferList.prototype.consume = function consume (bytes) { + // first, normalize the argument, in accordance with how Buffer does it + bytes = Math.trunc(bytes) + // do nothing if not a positive number + if (Number.isNaN(bytes) || bytes <= 0) return this + + while (this._bufs.length) { + if (bytes >= this._bufs[0].length) { + bytes -= this._bufs[0].length + this.length -= this._bufs[0].length + this._bufs.shift() + } else { + this._bufs[0] = this._bufs[0].slice(bytes) + this.length -= bytes + break + } + } + + return this +} + +BufferList.prototype.duplicate = function duplicate () { + const copy = this._new() + + for (let i = 0; i < this._bufs.length; i++) { + copy.append(this._bufs[i]) + } + + return copy +} + +BufferList.prototype.append = function append (buf) { + if (buf == null) { + return this + } + + if (buf.buffer) { + // append a view of the underlying ArrayBuffer + this._appendBuffer(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength)) + } else if (Array.isArray(buf)) { + for (let i = 0; i < buf.length; i++) { + this.append(buf[i]) + } + } else if (this._isBufferList(buf)) { + // unwrap argument into individual BufferLists + for (let i = 0; i < buf._bufs.length; i++) { + this.append(buf._bufs[i]) + } + } else { + // coerce number arguments to strings, since Buffer(number) does + // uninitialized memory allocation + if (typeof buf === 'number') { + buf = buf.toString() + } + + this._appendBuffer(Buffer.from(buf)) + } + + return this +} + +BufferList.prototype._appendBuffer = function appendBuffer (buf) { + this._bufs.push(buf) + this.length += buf.length +} + +BufferList.prototype.indexOf = function (search, offset, encoding) { + if (encoding === undefined && typeof offset === 'string') { + encoding = offset + offset = undefined + } + + if (typeof search === 'function' || Array.isArray(search)) { + throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.') + } else if (typeof search === 'number') { + search = Buffer.from([search]) + } else if (typeof search === 'string') { + search = Buffer.from(search, encoding) + } else if (this._isBufferList(search)) { + search = search.slice() + } else if (Array.isArray(search.buffer)) { + search = Buffer.from(search.buffer, search.byteOffset, search.byteLength) + } else if (!Buffer.isBuffer(search)) { + search = Buffer.from(search) + } + + offset = Number(offset || 0) + + if (isNaN(offset)) { + offset = 0 + } + + if (offset < 0) { + offset = this.length + offset + } + + if (offset < 0) { + offset = 0 + } + + if (search.length === 0) { + return offset > this.length ? this.length : offset + } + + const blOffset = this._offset(offset) + let blIndex = blOffset[0] // index of which internal buffer we're working on + let buffOffset = blOffset[1] // offset of the internal buffer we're working on + + // scan over each buffer + for (; blIndex < this._bufs.length; blIndex++) { + const buff = this._bufs[blIndex] + + while (buffOffset < buff.length) { + const availableWindow = buff.length - buffOffset + + if (availableWindow >= search.length) { + const nativeSearchResult = buff.indexOf(search, buffOffset) + + if (nativeSearchResult !== -1) { + return this._reverseOffset([blIndex, nativeSearchResult]) + } + + buffOffset = buff.length - search.length + 1 // end of native search window + } else { + const revOffset = this._reverseOffset([blIndex, buffOffset]) + + if (this._match(revOffset, search)) { + return revOffset + } + + buffOffset++ + } + } + + buffOffset = 0 + } + + return -1 +} + +BufferList.prototype._match = function (offset, search) { + if (this.length - offset < search.length) { + return false + } + + for (let searchOffset = 0; searchOffset < search.length; searchOffset++) { + if (this.get(offset + searchOffset) !== search[searchOffset]) { + return false + } + } + return true +} + +;(function () { + const methods = { + readDoubleBE: 8, + readDoubleLE: 8, + readFloatBE: 4, + readFloatLE: 4, + readInt32BE: 4, + readInt32LE: 4, + readUInt32BE: 4, + readUInt32LE: 4, + readInt16BE: 2, + readInt16LE: 2, + readUInt16BE: 2, + readUInt16LE: 2, + readInt8: 1, + readUInt8: 1, + readIntBE: null, + readIntLE: null, + readUIntBE: null, + readUIntLE: null + } + + for (const m in methods) { + (function (m) { + if (methods[m] === null) { + BufferList.prototype[m] = function (offset, byteLength) { + return this.slice(offset, offset + byteLength)[m](0, byteLength) + } + } else { + BufferList.prototype[m] = function (offset = 0) { + return this.slice(offset, offset + methods[m])[m](0) + } + } + }(m)) + } +}()) + +// Used internally by the class and also as an indicator of this object being +// a `BufferList`. It's not possible to use `instanceof BufferList` in a browser +// environment because there could be multiple different copies of the +// BufferList class and some `BufferList`s might be `BufferList`s. +BufferList.prototype._isBufferList = function _isBufferList (b) { + return b instanceof BufferList || BufferList.isBufferList(b) +} + +BufferList.isBufferList = function isBufferList (b) { + return b != null && b[symbol] +} + +module.exports = BufferList diff --git a/sdklab/meantimerecovery/aedes/node_modules/bl/LICENSE.md b/sdklab/meantimerecovery/aedes/node_modules/bl/LICENSE.md new file mode 100644 index 000000000..ecbe51637 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/bl/LICENSE.md @@ -0,0 +1,13 @@ +The MIT License (MIT) +===================== + +Copyright (c) 2013-2019 bl contributors +---------------------------------- + +*bl contributors listed at * + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/bl/README.md b/sdklab/meantimerecovery/aedes/node_modules/bl/README.md new file mode 100644 index 000000000..9680b1dcb --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/bl/README.md @@ -0,0 +1,247 @@ +# bl *(BufferList)* + +[![Build Status](https://api.travis-ci.com/rvagg/bl.svg?branch=master)](https://travis-ci.com/rvagg/bl/) + +**A Node.js Buffer list collector, reader and streamer thingy.** + +[![NPM](https://nodei.co/npm/bl.svg)](https://nodei.co/npm/bl/) + +**bl** is a storage object for collections of Node Buffers, exposing them with the main Buffer readable API. Also works as a duplex stream so you can collect buffers from a stream that emits them and emit buffers to a stream that consumes them! + +The original buffers are kept intact and copies are only done as necessary. Any reads that require the use of a single original buffer will return a slice of that buffer only (which references the same memory as the original buffer). Reads that span buffers perform concatenation as required and return the results transparently. + +```js +const { BufferList } = require('bl') + +const bl = new BufferList() +bl.append(Buffer.from('abcd')) +bl.append(Buffer.from('efg')) +bl.append('hi') // bl will also accept & convert Strings +bl.append(Buffer.from('j')) +bl.append(Buffer.from([ 0x3, 0x4 ])) + +console.log(bl.length) // 12 + +console.log(bl.slice(0, 10).toString('ascii')) // 'abcdefghij' +console.log(bl.slice(3, 10).toString('ascii')) // 'defghij' +console.log(bl.slice(3, 6).toString('ascii')) // 'def' +console.log(bl.slice(3, 8).toString('ascii')) // 'defgh' +console.log(bl.slice(5, 10).toString('ascii')) // 'fghij' + +console.log(bl.indexOf('def')) // 3 +console.log(bl.indexOf('asdf')) // -1 + +// or just use toString! +console.log(bl.toString()) // 'abcdefghij\u0003\u0004' +console.log(bl.toString('ascii', 3, 8)) // 'defgh' +console.log(bl.toString('ascii', 5, 10)) // 'fghij' + +// other standard Buffer readables +console.log(bl.readUInt16BE(10)) // 0x0304 +console.log(bl.readUInt16LE(10)) // 0x0403 +``` + +Give it a callback in the constructor and use it just like **[concat-stream](https://github.com/maxogden/node-concat-stream)**: + +```js +const { BufferListStream } = require('bl') +const fs = require('fs') + +fs.createReadStream('README.md') + .pipe(BufferListStream((err, data) => { // note 'new' isn't strictly required + // `data` is a complete Buffer object containing the full data + console.log(data.toString()) + })) +``` + +Note that when you use the *callback* method like this, the resulting `data` parameter is a concatenation of all `Buffer` objects in the list. If you want to avoid the overhead of this concatenation (in cases of extreme performance consciousness), then avoid the *callback* method and just listen to `'end'` instead, like a standard Stream. + +Or to fetch a URL using [hyperquest](https://github.com/substack/hyperquest) (should work with [request](http://github.com/mikeal/request) and even plain Node http too!): + +```js +const hyperquest = require('hyperquest') +const { BufferListStream } = require('bl') + +const url = 'https://raw.github.com/rvagg/bl/master/README.md' + +hyperquest(url).pipe(BufferListStream((err, data) => { + console.log(data.toString()) +})) +``` + +Or, use it as a readable stream to recompose a list of Buffers to an output source: + +```js +const { BufferListStream } = require('bl') +const fs = require('fs') + +var bl = new BufferListStream() +bl.append(Buffer.from('abcd')) +bl.append(Buffer.from('efg')) +bl.append(Buffer.from('hi')) +bl.append(Buffer.from('j')) + +bl.pipe(fs.createWriteStream('gibberish.txt')) +``` + +## API + + * new BufferList([ buf ]) + * BufferList.isBufferList(obj) + * bl.length + * bl.append(buffer) + * bl.get(index) + * bl.indexOf(value[, byteOffset][, encoding]) + * bl.slice([ start[, end ] ]) + * bl.shallowSlice([ start[, end ] ]) + * bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ]) + * bl.duplicate() + * bl.consume(bytes) + * bl.toString([encoding, [ start, [ end ]]]) + * bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8() + * new BufferListStream([ callback ]) + +-------------------------------------------------------- + +### new BufferList([ Buffer | Buffer array | BufferList | BufferList array | String ]) +No arguments are _required_ for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` objects. + +`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with: + +```js +const { BufferList } = require('bl') +const bl = BufferList() + +// equivalent to: + +const { BufferList } = require('bl') +const bl = new BufferList() +``` + +-------------------------------------------------------- + +### BufferList.isBufferList(obj) +Determines if the passed object is a `BufferList`. It will return `true` if the passed object is an instance of `BufferList` **or** `BufferListStream` and `false` otherwise. + +N.B. this won't return `true` for `BufferList` or `BufferListStream` instances created by versions of this library before this static method was added. + +-------------------------------------------------------- + +### bl.length +Get the length of the list in bytes. This is the sum of the lengths of all of the buffers contained in the list, minus any initial offset for a semi-consumed buffer at the beginning. Should accurately represent the total number of bytes that can be read from the list. + +-------------------------------------------------------- + +### bl.append(Buffer | Buffer array | BufferList | BufferList array | String) +`append(buffer)` adds an additional buffer or BufferList to the internal list. `this` is returned so it can be chained. + +-------------------------------------------------------- + +### bl.get(index) +`get()` will return the byte at the specified index. + +-------------------------------------------------------- + +### bl.indexOf(value[, byteOffset][, encoding]) +`get()` will return the byte at the specified index. +`indexOf()` method returns the first index at which a given element can be found in the BufferList, or -1 if it is not present. + +-------------------------------------------------------- + +### bl.slice([ start, [ end ] ]) +`slice()` returns a new `Buffer` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively. + +If the requested range spans a single internal buffer then a slice of that buffer will be returned which shares the original memory range of that Buffer. If the range spans multiple buffers then copy operations will likely occur to give you a uniform Buffer. + +-------------------------------------------------------- + +### bl.shallowSlice([ start, [ end ] ]) +`shallowSlice()` returns a new `BufferList` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively. + +No copies will be performed. All buffers in the result share memory with the original list. + +-------------------------------------------------------- + +### bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ]) +`copy()` copies the content of the list in the `dest` buffer, starting from `destStart` and containing the bytes within the range specified with `srcStart` to `srcEnd`. `destStart`, `start` and `end` are optional and will default to the beginning of the `dest` buffer, and the beginning and end of the list respectively. + +-------------------------------------------------------- + +### bl.duplicate() +`duplicate()` performs a **shallow-copy** of the list. The internal Buffers remains the same, so if you change the underlying Buffers, the change will be reflected in both the original and the duplicate. This method is needed if you want to call `consume()` or `pipe()` and still keep the original list.Example: + +```js +var bl = new BufferListStream() + +bl.append('hello') +bl.append(' world') +bl.append('\n') + +bl.duplicate().pipe(process.stdout, { end: false }) + +console.log(bl.toString()) +``` + +-------------------------------------------------------- + +### bl.consume(bytes) +`consume()` will shift bytes *off the start of the list*. The number of bytes consumed don't need to line up with the sizes of the internal Buffers—initial offsets will be calculated accordingly in order to give you a consistent view of the data. + +-------------------------------------------------------- + +### bl.toString([encoding, [ start, [ end ]]]) +`toString()` will return a string representation of the buffer. The optional `start` and `end` arguments are passed on to `slice()`, while the `encoding` is passed on to `toString()` of the resulting Buffer. See the [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end) documentation for more information. + +-------------------------------------------------------- + +### bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8() + +All of the standard byte-reading methods of the `Buffer` interface are implemented and will operate across internal Buffer boundaries transparently. + +See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work. + +-------------------------------------------------------- + +### new BufferListStream([ callback | Buffer | Buffer array | BufferList | BufferList array | String ]) +**BufferListStream** is a Node **[Duplex Stream](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_duplex)**, so it can be read from and written to like a standard Node stream. You can also `pipe()` to and from a **BufferListStream** instance. + +The constructor takes an optional callback, if supplied, the callback will be called with an error argument followed by a reference to the **bl** instance, when `bl.end()` is called (i.e. from a piped stream). This is a convenient method of collecting the entire contents of a stream, particularly when the stream is *chunky*, such as a network stream. + +Normally, no arguments are required for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` object. + +`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with: + +```js +const { BufferListStream } = require('bl') +const bl = BufferListStream() + +// equivalent to: + +const { BufferListStream } = require('bl') +const bl = new BufferListStream() +``` + +N.B. For backwards compatibility reasons, `BufferListStream` is the **default** export when you `require('bl')`: + +```js +const { BufferListStream } = require('bl') +// equivalent to: +const BufferListStream = require('bl') +``` + +-------------------------------------------------------- + +## Contributors + +**bl** is brought to you by the following hackers: + + * [Rod Vagg](https://github.com/rvagg) + * [Matteo Collina](https://github.com/mcollina) + * [Jarett Cruger](https://github.com/jcrugzz) + + +## License & copyright + +Copyright (c) 2013-2019 bl contributors (listed above). + +bl is licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details. diff --git a/sdklab/meantimerecovery/aedes/node_modules/bl/bl.js b/sdklab/meantimerecovery/aedes/node_modules/bl/bl.js new file mode 100644 index 000000000..40228f879 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/bl/bl.js @@ -0,0 +1,84 @@ +'use strict' + +const DuplexStream = require('readable-stream').Duplex +const inherits = require('inherits') +const BufferList = require('./BufferList') + +function BufferListStream (callback) { + if (!(this instanceof BufferListStream)) { + return new BufferListStream(callback) + } + + if (typeof callback === 'function') { + this._callback = callback + + const piper = function piper (err) { + if (this._callback) { + this._callback(err) + this._callback = null + } + }.bind(this) + + this.on('pipe', function onPipe (src) { + src.on('error', piper) + }) + this.on('unpipe', function onUnpipe (src) { + src.removeListener('error', piper) + }) + + callback = null + } + + BufferList._init.call(this, callback) + DuplexStream.call(this) +} + +inherits(BufferListStream, DuplexStream) +Object.assign(BufferListStream.prototype, BufferList.prototype) + +BufferListStream.prototype._new = function _new (callback) { + return new BufferListStream(callback) +} + +BufferListStream.prototype._write = function _write (buf, encoding, callback) { + this._appendBuffer(buf) + + if (typeof callback === 'function') { + callback() + } +} + +BufferListStream.prototype._read = function _read (size) { + if (!this.length) { + return this.push(null) + } + + size = Math.min(size, this.length) + this.push(this.slice(0, size)) + this.consume(size) +} + +BufferListStream.prototype.end = function end (chunk) { + DuplexStream.prototype.end.call(this, chunk) + + if (this._callback) { + this._callback(null, this.slice()) + this._callback = null + } +} + +BufferListStream.prototype._destroy = function _destroy (err, cb) { + this._bufs.length = 0 + this.length = 0 + cb(err) +} + +BufferListStream.prototype._isBufferList = function _isBufferList (b) { + return b instanceof BufferListStream || b instanceof BufferList || BufferListStream.isBufferList(b) +} + +BufferListStream.isBufferList = BufferList.isBufferList + +module.exports = BufferListStream +module.exports.BufferListStream = BufferListStream +module.exports.BufferList = BufferList diff --git a/sdklab/meantimerecovery/aedes/node_modules/bl/package.json b/sdklab/meantimerecovery/aedes/node_modules/bl/package.json new file mode 100644 index 000000000..6c2dcc334 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/bl/package.json @@ -0,0 +1,65 @@ +{ + "_from": "bl@^4.0.2", + "_id": "bl@4.1.0", + "_inBundle": false, + "_integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "_location": "/bl", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "bl@^4.0.2", + "name": "bl", + "escapedName": "bl", + "rawSpec": "^4.0.2", + "saveSpec": null, + "fetchSpec": "^4.0.2" + }, + "_requiredBy": [ + "/mqtt-packet" + ], + "_resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "_shasum": "451535264182bec2fbbc83a62ab98cf11d9f7b3a", + "_spec": "bl@^4.0.2", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\mqtt-packet", + "authors": [ + "Rod Vagg (https://github.com/rvagg)", + "Matteo Collina (https://github.com/mcollina)", + "Jarett Cruger (https://github.com/jcrugzz)" + ], + "bugs": { + "url": "https://github.com/rvagg/bl/issues" + }, + "bundleDependencies": false, + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + }, + "deprecated": false, + "description": "Buffer List: collect buffers and access with a standard readable Buffer interface, streamable too!", + "devDependencies": { + "faucet": "~0.0.1", + "standard": "^14.3.0", + "tape": "^4.11.0" + }, + "homepage": "https://github.com/rvagg/bl", + "keywords": [ + "buffer", + "buffers", + "stream", + "awesomesauce" + ], + "license": "MIT", + "main": "bl.js", + "name": "bl", + "repository": { + "type": "git", + "url": "git+https://github.com/rvagg/bl.git" + }, + "scripts": { + "lint": "standard *.js test/*.js", + "test": "npm run lint && node test/test.js | faucet" + }, + "version": "4.1.0" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/bl/test/convert.js b/sdklab/meantimerecovery/aedes/node_modules/bl/test/convert.js new file mode 100644 index 000000000..9f3e23599 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/bl/test/convert.js @@ -0,0 +1,21 @@ +'use strict' + +const tape = require('tape') +const { BufferList, BufferListStream } = require('../') +const { Buffer } = require('buffer') + +tape('convert from BufferList to BufferListStream', (t) => { + const data = Buffer.from(`TEST-${Date.now()}`) + const bl = new BufferList(data) + const bls = new BufferListStream(bl) + t.ok(bl.slice().equals(bls.slice())) + t.end() +}) + +tape('convert from BufferListStream to BufferList', (t) => { + const data = Buffer.from(`TEST-${Date.now()}`) + const bls = new BufferListStream(data) + const bl = new BufferList(bls) + t.ok(bl.slice().equals(bls.slice())) + t.end() +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/bl/test/indexOf.js b/sdklab/meantimerecovery/aedes/node_modules/bl/test/indexOf.js new file mode 100644 index 000000000..62dcb01f3 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/bl/test/indexOf.js @@ -0,0 +1,492 @@ +'use strict' + +const tape = require('tape') +const BufferList = require('../') +const { Buffer } = require('buffer') + +tape('indexOf single byte needle', (t) => { + const bl = new BufferList(['abcdefg', 'abcdefg', '12345']) + + t.equal(bl.indexOf('e'), 4) + t.equal(bl.indexOf('e', 5), 11) + t.equal(bl.indexOf('e', 12), -1) + t.equal(bl.indexOf('5'), 18) + + t.end() +}) + +tape('indexOf multiple byte needle', (t) => { + const bl = new BufferList(['abcdefg', 'abcdefg']) + + t.equal(bl.indexOf('ef'), 4) + t.equal(bl.indexOf('ef', 5), 11) + + t.end() +}) + +tape('indexOf multiple byte needles across buffer boundaries', (t) => { + const bl = new BufferList(['abcdefg', 'abcdefg']) + + t.equal(bl.indexOf('fgabc'), 5) + + t.end() +}) + +tape('indexOf takes a Uint8Array search', (t) => { + const bl = new BufferList(['abcdefg', 'abcdefg']) + const search = new Uint8Array([102, 103, 97, 98, 99]) // fgabc + + t.equal(bl.indexOf(search), 5) + + t.end() +}) + +tape('indexOf takes a buffer list search', (t) => { + const bl = new BufferList(['abcdefg', 'abcdefg']) + const search = new BufferList('fgabc') + + t.equal(bl.indexOf(search), 5) + + t.end() +}) + +tape('indexOf a zero byte needle', (t) => { + const b = new BufferList('abcdef') + const bufEmpty = Buffer.from('') + + t.equal(b.indexOf(''), 0) + t.equal(b.indexOf('', 1), 1) + t.equal(b.indexOf('', b.length + 1), b.length) + t.equal(b.indexOf('', Infinity), b.length) + t.equal(b.indexOf(bufEmpty), 0) + t.equal(b.indexOf(bufEmpty, 1), 1) + t.equal(b.indexOf(bufEmpty, b.length + 1), b.length) + t.equal(b.indexOf(bufEmpty, Infinity), b.length) + + t.end() +}) + +tape('indexOf buffers smaller and larger than the needle', (t) => { + const bl = new BufferList(['abcdefg', 'a', 'bcdefg', 'a', 'bcfgab']) + + t.equal(bl.indexOf('fgabc'), 5) + t.equal(bl.indexOf('fgabc', 6), 12) + t.equal(bl.indexOf('fgabc', 13), -1) + + t.end() +}) + +// only present in node 6+ +;(process.version.substr(1).split('.')[0] >= 6) && tape('indexOf latin1 and binary encoding', (t) => { + const b = new BufferList('abcdef') + + // test latin1 encoding + t.equal( + new BufferList(Buffer.from(b.toString('latin1'), 'latin1')) + .indexOf('d', 0, 'latin1'), + 3 + ) + t.equal( + new BufferList(Buffer.from(b.toString('latin1'), 'latin1')) + .indexOf(Buffer.from('d', 'latin1'), 0, 'latin1'), + 3 + ) + t.equal( + new BufferList(Buffer.from('aa\u00e8aa', 'latin1')) + .indexOf('\u00e8', 'latin1'), + 2 + ) + t.equal( + new BufferList(Buffer.from('\u00e8', 'latin1')) + .indexOf('\u00e8', 'latin1'), + 0 + ) + t.equal( + new BufferList(Buffer.from('\u00e8', 'latin1')) + .indexOf(Buffer.from('\u00e8', 'latin1'), 'latin1'), + 0 + ) + + // test binary encoding + t.equal( + new BufferList(Buffer.from(b.toString('binary'), 'binary')) + .indexOf('d', 0, 'binary'), + 3 + ) + t.equal( + new BufferList(Buffer.from(b.toString('binary'), 'binary')) + .indexOf(Buffer.from('d', 'binary'), 0, 'binary'), + 3 + ) + t.equal( + new BufferList(Buffer.from('aa\u00e8aa', 'binary')) + .indexOf('\u00e8', 'binary'), + 2 + ) + t.equal( + new BufferList(Buffer.from('\u00e8', 'binary')) + .indexOf('\u00e8', 'binary'), + 0 + ) + t.equal( + new BufferList(Buffer.from('\u00e8', 'binary')) + .indexOf(Buffer.from('\u00e8', 'binary'), 'binary'), + 0 + ) + + t.end() +}) + +tape('indexOf the entire nodejs10 buffer test suite', (t) => { + const b = new BufferList('abcdef') + const bufA = Buffer.from('a') + const bufBc = Buffer.from('bc') + const bufF = Buffer.from('f') + const bufZ = Buffer.from('z') + + const stringComparison = 'abcdef' + + t.equal(b.indexOf('a'), 0) + t.equal(b.indexOf('a', 1), -1) + t.equal(b.indexOf('a', -1), -1) + t.equal(b.indexOf('a', -4), -1) + t.equal(b.indexOf('a', -b.length), 0) + t.equal(b.indexOf('a', NaN), 0) + t.equal(b.indexOf('a', -Infinity), 0) + t.equal(b.indexOf('a', Infinity), -1) + t.equal(b.indexOf('bc'), 1) + t.equal(b.indexOf('bc', 2), -1) + t.equal(b.indexOf('bc', -1), -1) + t.equal(b.indexOf('bc', -3), -1) + t.equal(b.indexOf('bc', -5), 1) + t.equal(b.indexOf('bc', NaN), 1) + t.equal(b.indexOf('bc', -Infinity), 1) + t.equal(b.indexOf('bc', Infinity), -1) + t.equal(b.indexOf('f'), b.length - 1) + t.equal(b.indexOf('z'), -1) + + // empty search tests + t.equal(b.indexOf(bufA), 0) + t.equal(b.indexOf(bufA, 1), -1) + t.equal(b.indexOf(bufA, -1), -1) + t.equal(b.indexOf(bufA, -4), -1) + t.equal(b.indexOf(bufA, -b.length), 0) + t.equal(b.indexOf(bufA, NaN), 0) + t.equal(b.indexOf(bufA, -Infinity), 0) + t.equal(b.indexOf(bufA, Infinity), -1) + t.equal(b.indexOf(bufBc), 1) + t.equal(b.indexOf(bufBc, 2), -1) + t.equal(b.indexOf(bufBc, -1), -1) + t.equal(b.indexOf(bufBc, -3), -1) + t.equal(b.indexOf(bufBc, -5), 1) + t.equal(b.indexOf(bufBc, NaN), 1) + t.equal(b.indexOf(bufBc, -Infinity), 1) + t.equal(b.indexOf(bufBc, Infinity), -1) + t.equal(b.indexOf(bufF), b.length - 1) + t.equal(b.indexOf(bufZ), -1) + t.equal(b.indexOf(0x61), 0) + t.equal(b.indexOf(0x61, 1), -1) + t.equal(b.indexOf(0x61, -1), -1) + t.equal(b.indexOf(0x61, -4), -1) + t.equal(b.indexOf(0x61, -b.length), 0) + t.equal(b.indexOf(0x61, NaN), 0) + t.equal(b.indexOf(0x61, -Infinity), 0) + t.equal(b.indexOf(0x61, Infinity), -1) + t.equal(b.indexOf(0x0), -1) + + // test offsets + t.equal(b.indexOf('d', 2), 3) + t.equal(b.indexOf('f', 5), 5) + t.equal(b.indexOf('f', -1), 5) + t.equal(b.indexOf('f', 6), -1) + + t.equal(b.indexOf(Buffer.from('d'), 2), 3) + t.equal(b.indexOf(Buffer.from('f'), 5), 5) + t.equal(b.indexOf(Buffer.from('f'), -1), 5) + t.equal(b.indexOf(Buffer.from('f'), 6), -1) + + t.equal(Buffer.from('ff').indexOf(Buffer.from('f'), 1, 'ucs2'), -1) + + // test invalid and uppercase encoding + t.equal(b.indexOf('b', 'utf8'), 1) + t.equal(b.indexOf('b', 'UTF8'), 1) + t.equal(b.indexOf('62', 'HEX'), 1) + t.throws(() => b.indexOf('bad', 'enc'), TypeError) + + // test hex encoding + t.equal( + Buffer.from(b.toString('hex'), 'hex') + .indexOf('64', 0, 'hex'), + 3 + ) + t.equal( + Buffer.from(b.toString('hex'), 'hex') + .indexOf(Buffer.from('64', 'hex'), 0, 'hex'), + 3 + ) + + // test base64 encoding + t.equal( + Buffer.from(b.toString('base64'), 'base64') + .indexOf('ZA==', 0, 'base64'), + 3 + ) + t.equal( + Buffer.from(b.toString('base64'), 'base64') + .indexOf(Buffer.from('ZA==', 'base64'), 0, 'base64'), + 3 + ) + + // test ascii encoding + t.equal( + Buffer.from(b.toString('ascii'), 'ascii') + .indexOf('d', 0, 'ascii'), + 3 + ) + t.equal( + Buffer.from(b.toString('ascii'), 'ascii') + .indexOf(Buffer.from('d', 'ascii'), 0, 'ascii'), + 3 + ) + + // test optional offset with passed encoding + t.equal(Buffer.from('aaaa0').indexOf('30', 'hex'), 4) + t.equal(Buffer.from('aaaa00a').indexOf('3030', 'hex'), 4) + + { + // test usc2 encoding + const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2') + + t.equal(8, twoByteString.indexOf('\u0395', 4, 'ucs2')) + t.equal(6, twoByteString.indexOf('\u03a3', -4, 'ucs2')) + t.equal(4, twoByteString.indexOf('\u03a3', -6, 'ucs2')) + t.equal(4, twoByteString.indexOf( + Buffer.from('\u03a3', 'ucs2'), -6, 'ucs2')) + t.equal(-1, twoByteString.indexOf('\u03a3', -2, 'ucs2')) + } + + const mixedByteStringUcs2 = + Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395', 'ucs2') + + t.equal(6, mixedByteStringUcs2.indexOf('bc', 0, 'ucs2')) + t.equal(10, mixedByteStringUcs2.indexOf('\u03a3', 0, 'ucs2')) + t.equal(-1, mixedByteStringUcs2.indexOf('\u0396', 0, 'ucs2')) + + t.equal( + 6, mixedByteStringUcs2.indexOf(Buffer.from('bc', 'ucs2'), 0, 'ucs2')) + t.equal( + 10, mixedByteStringUcs2.indexOf(Buffer.from('\u03a3', 'ucs2'), 0, 'ucs2')) + t.equal( + -1, mixedByteStringUcs2.indexOf(Buffer.from('\u0396', 'ucs2'), 0, 'ucs2')) + + { + const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2') + + // Test single char pattern + t.equal(0, twoByteString.indexOf('\u039a', 0, 'ucs2')) + let index = twoByteString.indexOf('\u0391', 0, 'ucs2') + t.equal(2, index, `Alpha - at index ${index}`) + index = twoByteString.indexOf('\u03a3', 0, 'ucs2') + t.equal(4, index, `First Sigma - at index ${index}`) + index = twoByteString.indexOf('\u03a3', 6, 'ucs2') + t.equal(6, index, `Second Sigma - at index ${index}`) + index = twoByteString.indexOf('\u0395', 0, 'ucs2') + t.equal(8, index, `Epsilon - at index ${index}`) + index = twoByteString.indexOf('\u0392', 0, 'ucs2') + t.equal(-1, index, `Not beta - at index ${index}`) + + // Test multi-char pattern + index = twoByteString.indexOf('\u039a\u0391', 0, 'ucs2') + t.equal(0, index, `Lambda Alpha - at index ${index}`) + index = twoByteString.indexOf('\u0391\u03a3', 0, 'ucs2') + t.equal(2, index, `Alpha Sigma - at index ${index}`) + index = twoByteString.indexOf('\u03a3\u03a3', 0, 'ucs2') + t.equal(4, index, `Sigma Sigma - at index ${index}`) + index = twoByteString.indexOf('\u03a3\u0395', 0, 'ucs2') + t.equal(6, index, `Sigma Epsilon - at index ${index}`) + } + + const mixedByteStringUtf8 = Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395') + + t.equal(5, mixedByteStringUtf8.indexOf('bc')) + t.equal(5, mixedByteStringUtf8.indexOf('bc', 5)) + t.equal(5, mixedByteStringUtf8.indexOf('bc', -8)) + t.equal(7, mixedByteStringUtf8.indexOf('\u03a3')) + t.equal(-1, mixedByteStringUtf8.indexOf('\u0396')) + + // Test complex string indexOf algorithms. Only trigger for long strings. + // Long string that isn't a simple repeat of a shorter string. + let longString = 'A' + for (let i = 66; i < 76; i++) { // from 'B' to 'K' + longString = longString + String.fromCharCode(i) + longString + } + + const longBufferString = Buffer.from(longString) + + // pattern of 15 chars, repeated every 16 chars in long + let pattern = 'ABACABADABACABA' + for (let i = 0; i < longBufferString.length - pattern.length; i += 7) { + const index = longBufferString.indexOf(pattern, i) + t.equal((i + 15) & ~0xf, index, + `Long ABACABA...-string at index ${i}`) + } + + let index = longBufferString.indexOf('AJABACA') + t.equal(510, index, `Long AJABACA, First J - at index ${index}`) + index = longBufferString.indexOf('AJABACA', 511) + t.equal(1534, index, `Long AJABACA, Second J - at index ${index}`) + + pattern = 'JABACABADABACABA' + index = longBufferString.indexOf(pattern) + t.equal(511, index, `Long JABACABA..., First J - at index ${index}`) + index = longBufferString.indexOf(pattern, 512) + t.equal( + 1535, index, `Long JABACABA..., Second J - at index ${index}`) + + // Search for a non-ASCII string in a pure ASCII string. + const asciiString = Buffer.from( + 'somethingnotatallsinisterwhichalsoworks') + t.equal(-1, asciiString.indexOf('\x2061')) + t.equal(3, asciiString.indexOf('eth', 0)) + + // Search in string containing many non-ASCII chars. + const allCodePoints = [] + for (let i = 0; i < 65536; i++) { + allCodePoints[i] = i + } + + const allCharsString = String.fromCharCode.apply(String, allCodePoints) + const allCharsBufferUtf8 = Buffer.from(allCharsString) + const allCharsBufferUcs2 = Buffer.from(allCharsString, 'ucs2') + + // Search for string long enough to trigger complex search with ASCII pattern + // and UC16 subject. + t.equal(-1, allCharsBufferUtf8.indexOf('notfound')) + t.equal(-1, allCharsBufferUcs2.indexOf('notfound')) + + // Needle is longer than haystack, but only because it's encoded as UTF-16 + t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'ucs2'), -1) + + t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'utf8'), 0) + t.equal(Buffer.from('aaaa').indexOf('你好', 'ucs2'), -1) + + // Haystack has odd length, but the needle is UCS2. + t.equal(Buffer.from('aaaaa').indexOf('b', 'ucs2'), -1) + + { + // Find substrings in Utf8. + const lengths = [1, 3, 15] // Single char, simple and complex. + const indices = [0x5, 0x60, 0x400, 0x680, 0x7ee, 0xFF02, 0x16610, 0x2f77b] + for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) { + for (let i = 0; i < indices.length; i++) { + const index = indices[i] + let length = lengths[lengthIndex] + + if (index + length > 0x7F) { + length = 2 * length + } + + if (index + length > 0x7FF) { + length = 3 * length + } + + if (index + length > 0xFFFF) { + length = 4 * length + } + + const patternBufferUtf8 = allCharsBufferUtf8.slice(index, index + length) + t.equal(index, allCharsBufferUtf8.indexOf(patternBufferUtf8)) + + const patternStringUtf8 = patternBufferUtf8.toString() + t.equal(index, allCharsBufferUtf8.indexOf(patternStringUtf8)) + } + } + } + + { + // Find substrings in Usc2. + const lengths = [2, 4, 16] // Single char, simple and complex. + const indices = [0x5, 0x65, 0x105, 0x205, 0x285, 0x2005, 0x2085, 0xfff0] + + for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) { + for (let i = 0; i < indices.length; i++) { + const index = indices[i] * 2 + const length = lengths[lengthIndex] + + const patternBufferUcs2 = + allCharsBufferUcs2.slice(index, index + length) + t.equal( + index, allCharsBufferUcs2.indexOf(patternBufferUcs2, 0, 'ucs2')) + + const patternStringUcs2 = patternBufferUcs2.toString('ucs2') + t.equal( + index, allCharsBufferUcs2.indexOf(patternStringUcs2, 0, 'ucs2')) + } + } + } + + [ + () => {}, + {}, + [] + ].forEach((val) => { + t.throws(() => b.indexOf(val), TypeError, `"${JSON.stringify(val)}" should throw`) + }) + + // Test weird offset arguments. + // The following offsets coerce to NaN or 0, searching the whole Buffer + t.equal(b.indexOf('b', undefined), 1) + t.equal(b.indexOf('b', {}), 1) + t.equal(b.indexOf('b', 0), 1) + t.equal(b.indexOf('b', null), 1) + t.equal(b.indexOf('b', []), 1) + + // The following offset coerces to 2, in other words +[2] === 2 + t.equal(b.indexOf('b', [2]), -1) + + // Behavior should match String.indexOf() + t.equal( + b.indexOf('b', undefined), + stringComparison.indexOf('b', undefined)) + t.equal( + b.indexOf('b', {}), + stringComparison.indexOf('b', {})) + t.equal( + b.indexOf('b', 0), + stringComparison.indexOf('b', 0)) + t.equal( + b.indexOf('b', null), + stringComparison.indexOf('b', null)) + t.equal( + b.indexOf('b', []), + stringComparison.indexOf('b', [])) + t.equal( + b.indexOf('b', [2]), + stringComparison.indexOf('b', [2])) + + // test truncation of Number arguments to uint8 + { + const buf = Buffer.from('this is a test') + + t.equal(buf.indexOf(0x6973), 3) + t.equal(buf.indexOf(0x697320), 4) + t.equal(buf.indexOf(0x69732069), 2) + t.equal(buf.indexOf(0x697374657374), 0) + t.equal(buf.indexOf(0x69737374), 0) + t.equal(buf.indexOf(0x69737465), 11) + t.equal(buf.indexOf(0x69737465), 11) + t.equal(buf.indexOf(-140), 0) + t.equal(buf.indexOf(-152), 1) + t.equal(buf.indexOf(0xff), -1) + t.equal(buf.indexOf(0xffff), -1) + } + + // Test that Uint8Array arguments are okay. + { + const needle = new Uint8Array([0x66, 0x6f, 0x6f]) + const haystack = new BufferList(Buffer.from('a foo b foo')) + t.equal(haystack.indexOf(needle), 2) + } + + t.end() +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/bl/test/isBufferList.js b/sdklab/meantimerecovery/aedes/node_modules/bl/test/isBufferList.js new file mode 100644 index 000000000..9d895d59b --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/bl/test/isBufferList.js @@ -0,0 +1,32 @@ +'use strict' + +const tape = require('tape') +const { BufferList, BufferListStream } = require('../') +const { Buffer } = require('buffer') + +tape('isBufferList positives', (t) => { + t.ok(BufferList.isBufferList(new BufferList())) + t.ok(BufferList.isBufferList(new BufferListStream())) + + t.end() +}) + +tape('isBufferList negatives', (t) => { + const types = [ + null, + undefined, + NaN, + true, + false, + {}, + [], + Buffer.alloc(0), + [Buffer.alloc(0)] + ] + + for (const obj of types) { + t.notOk(BufferList.isBufferList(obj)) + } + + t.end() +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/bl/test/test.js b/sdklab/meantimerecovery/aedes/node_modules/bl/test/test.js new file mode 100644 index 000000000..e523d0c3f --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/bl/test/test.js @@ -0,0 +1,869 @@ +'use strict' + +const tape = require('tape') +const crypto = require('crypto') +const fs = require('fs') +const path = require('path') +const BufferList = require('../') +const { Buffer } = require('buffer') + +const encodings = + ('hex utf8 utf-8 ascii binary base64' + + (process.browser ? '' : ' ucs2 ucs-2 utf16le utf-16le')).split(' ') + +require('./indexOf') +require('./isBufferList') +require('./convert') + +tape('single bytes from single buffer', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + + t.equal(bl.length, 4) + t.equal(bl.get(-1), undefined) + t.equal(bl.get(0), 97) + t.equal(bl.get(1), 98) + t.equal(bl.get(2), 99) + t.equal(bl.get(3), 100) + t.equal(bl.get(4), undefined) + + t.end() +}) + +tape('single bytes from multiple buffers', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.length, 10) + + t.equal(bl.get(0), 97) + t.equal(bl.get(1), 98) + t.equal(bl.get(2), 99) + t.equal(bl.get(3), 100) + t.equal(bl.get(4), 101) + t.equal(bl.get(5), 102) + t.equal(bl.get(6), 103) + t.equal(bl.get(7), 104) + t.equal(bl.get(8), 105) + t.equal(bl.get(9), 106) + + t.end() +}) + +tape('multi bytes from single buffer', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + + t.equal(bl.length, 4) + + t.equal(bl.slice(0, 4).toString('ascii'), 'abcd') + t.equal(bl.slice(0, 3).toString('ascii'), 'abc') + t.equal(bl.slice(1, 4).toString('ascii'), 'bcd') + t.equal(bl.slice(-4, -1).toString('ascii'), 'abc') + + t.end() +}) + +tape('multi bytes from single buffer (negative indexes)', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('buffer')) + + t.equal(bl.length, 6) + + t.equal(bl.slice(-6, -1).toString('ascii'), 'buffe') + t.equal(bl.slice(-6, -2).toString('ascii'), 'buff') + t.equal(bl.slice(-5, -2).toString('ascii'), 'uff') + + t.end() +}) + +tape('multiple bytes from multiple buffers', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') + t.equal(bl.slice(3, 6).toString('ascii'), 'def') + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') + t.equal(bl.slice(-7, -4).toString('ascii'), 'def') + + t.end() +}) + +tape('multiple bytes from multiple buffer lists', function (t) { + const bl = new BufferList() + + bl.append(new BufferList([Buffer.from('abcd'), Buffer.from('efg')])) + bl.append(new BufferList([Buffer.from('hi'), Buffer.from('j')])) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') + t.equal(bl.slice(3, 6).toString('ascii'), 'def') + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') + + t.end() +}) + +// same data as previous test, just using nested constructors +tape('multiple bytes from crazy nested buffer lists', function (t) { + const bl = new BufferList() + + bl.append(new BufferList([ + new BufferList([ + new BufferList(Buffer.from('abc')), + Buffer.from('d'), + new BufferList(Buffer.from('efg')) + ]), + new BufferList([Buffer.from('hi')]), + new BufferList(Buffer.from('j')) + ])) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') + t.equal(bl.slice(3, 6).toString('ascii'), 'def') + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') + + t.end() +}) + +tape('append accepts arrays of Buffers', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abc')) + bl.append([Buffer.from('def')]) + bl.append([Buffer.from('ghi'), Buffer.from('jkl')]) + bl.append([Buffer.from('mnop'), Buffer.from('qrstu'), Buffer.from('vwxyz')]) + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + + t.end() +}) + +tape('append accepts arrays of Uint8Arrays', function (t) { + const bl = new BufferList() + + bl.append(new Uint8Array([97, 98, 99])) + bl.append([Uint8Array.from([100, 101, 102])]) + bl.append([new Uint8Array([103, 104, 105]), new Uint8Array([106, 107, 108])]) + bl.append([new Uint8Array([109, 110, 111, 112]), new Uint8Array([113, 114, 115, 116, 117]), new Uint8Array([118, 119, 120, 121, 122])]) + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + + t.end() +}) + +tape('append accepts arrays of BufferLists', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abc')) + bl.append([new BufferList('def')]) + bl.append(new BufferList([Buffer.from('ghi'), new BufferList('jkl')])) + bl.append([Buffer.from('mnop'), new BufferList([Buffer.from('qrstu'), Buffer.from('vwxyz')])]) + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + + t.end() +}) + +tape('append chainable', function (t) { + const bl = new BufferList() + + t.ok(bl.append(Buffer.from('abcd')) === bl) + t.ok(bl.append([Buffer.from('abcd')]) === bl) + t.ok(bl.append(new BufferList(Buffer.from('abcd'))) === bl) + t.ok(bl.append([new BufferList(Buffer.from('abcd'))]) === bl) + + t.end() +}) + +tape('append chainable (test results)', function (t) { + const bl = new BufferList('abc') + .append([new BufferList('def')]) + .append(new BufferList([Buffer.from('ghi'), new BufferList('jkl')])) + .append([Buffer.from('mnop'), new BufferList([Buffer.from('qrstu'), Buffer.from('vwxyz')])]) + + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + + t.end() +}) + +tape('consuming from multiple buffers', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + + bl.consume(3) + t.equal(bl.length, 7) + t.equal(bl.slice(0, 7).toString('ascii'), 'defghij') + + bl.consume(2) + t.equal(bl.length, 5) + t.equal(bl.slice(0, 5).toString('ascii'), 'fghij') + + bl.consume(1) + t.equal(bl.length, 4) + t.equal(bl.slice(0, 4).toString('ascii'), 'ghij') + + bl.consume(1) + t.equal(bl.length, 3) + t.equal(bl.slice(0, 3).toString('ascii'), 'hij') + + bl.consume(2) + t.equal(bl.length, 1) + t.equal(bl.slice(0, 1).toString('ascii'), 'j') + + t.end() +}) + +tape('complete consumption', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('a')) + bl.append(Buffer.from('b')) + + bl.consume(2) + + t.equal(bl.length, 0) + t.equal(bl._bufs.length, 0) + + t.end() +}) + +tape('test readUInt8 / readInt8', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(3) + const bl = new BufferList() + + buf1[0] = 0x1 + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUInt8(), 0x1) + t.equal(bl.readUInt8(2), 0x3) + t.equal(bl.readInt8(2), 0x3) + t.equal(bl.readUInt8(3), 0x4) + t.equal(bl.readInt8(3), 0x4) + t.equal(bl.readUInt8(4), 0x23) + t.equal(bl.readInt8(4), 0x23) + t.equal(bl.readUInt8(5), 0x42) + t.equal(bl.readInt8(5), 0x42) + + t.end() +}) + +tape('test readUInt16LE / readUInt16BE / readInt16LE / readInt16BE', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(3) + const bl = new BufferList() + + buf1[0] = 0x1 + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUInt16BE(), 0x0100) + t.equal(bl.readUInt16LE(), 0x0001) + t.equal(bl.readUInt16BE(2), 0x0304) + t.equal(bl.readUInt16LE(2), 0x0403) + t.equal(bl.readInt16BE(2), 0x0304) + t.equal(bl.readInt16LE(2), 0x0403) + t.equal(bl.readUInt16BE(3), 0x0423) + t.equal(bl.readUInt16LE(3), 0x2304) + t.equal(bl.readInt16BE(3), 0x0423) + t.equal(bl.readInt16LE(3), 0x2304) + t.equal(bl.readUInt16BE(4), 0x2342) + t.equal(bl.readUInt16LE(4), 0x4223) + t.equal(bl.readInt16BE(4), 0x2342) + t.equal(bl.readInt16LE(4), 0x4223) + + t.end() +}) + +tape('test readUInt32LE / readUInt32BE / readInt32LE / readInt32BE', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(3) + const bl = new BufferList() + + buf1[0] = 0x1 + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUInt32BE(), 0x01000304) + t.equal(bl.readUInt32LE(), 0x04030001) + t.equal(bl.readUInt32BE(2), 0x03042342) + t.equal(bl.readUInt32LE(2), 0x42230403) + t.equal(bl.readInt32BE(2), 0x03042342) + t.equal(bl.readInt32LE(2), 0x42230403) + + t.end() +}) + +tape('test readUIntLE / readUIntBE / readIntLE / readIntBE', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(3) + const bl = new BufferList() + + buf2[0] = 0x2 + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + buf3[2] = 0x61 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUIntBE(1, 1), 0x02) + t.equal(bl.readUIntBE(1, 2), 0x0203) + t.equal(bl.readUIntBE(1, 3), 0x020304) + t.equal(bl.readUIntBE(1, 4), 0x02030423) + t.equal(bl.readUIntBE(1, 5), 0x0203042342) + t.equal(bl.readUIntBE(1, 6), 0x020304234261) + t.equal(bl.readUIntLE(1, 1), 0x02) + t.equal(bl.readUIntLE(1, 2), 0x0302) + t.equal(bl.readUIntLE(1, 3), 0x040302) + t.equal(bl.readUIntLE(1, 4), 0x23040302) + t.equal(bl.readUIntLE(1, 5), 0x4223040302) + t.equal(bl.readUIntLE(1, 6), 0x614223040302) + t.equal(bl.readIntBE(1, 1), 0x02) + t.equal(bl.readIntBE(1, 2), 0x0203) + t.equal(bl.readIntBE(1, 3), 0x020304) + t.equal(bl.readIntBE(1, 4), 0x02030423) + t.equal(bl.readIntBE(1, 5), 0x0203042342) + t.equal(bl.readIntBE(1, 6), 0x020304234261) + t.equal(bl.readIntLE(1, 1), 0x02) + t.equal(bl.readIntLE(1, 2), 0x0302) + t.equal(bl.readIntLE(1, 3), 0x040302) + t.equal(bl.readIntLE(1, 4), 0x23040302) + t.equal(bl.readIntLE(1, 5), 0x4223040302) + t.equal(bl.readIntLE(1, 6), 0x614223040302) + + t.end() +}) + +tape('test readFloatLE / readFloatBE', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(3) + const bl = new BufferList() + + buf1[0] = 0x01 + buf2[1] = 0x00 + buf2[2] = 0x00 + buf3[0] = 0x80 + buf3[1] = 0x3f + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + const canonical = Buffer.concat([buf1, buf2, buf3]) + t.equal(bl.readFloatLE(), canonical.readFloatLE()) + t.equal(bl.readFloatBE(), canonical.readFloatBE()) + t.equal(bl.readFloatLE(2), canonical.readFloatLE(2)) + t.equal(bl.readFloatBE(2), canonical.readFloatBE(2)) + + t.end() +}) + +tape('test readDoubleLE / readDoubleBE', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(10) + const bl = new BufferList() + + buf1[0] = 0x01 + buf2[1] = 0x55 + buf2[2] = 0x55 + buf3[0] = 0x55 + buf3[1] = 0x55 + buf3[2] = 0x55 + buf3[3] = 0x55 + buf3[4] = 0xd5 + buf3[5] = 0x3f + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + const canonical = Buffer.concat([buf1, buf2, buf3]) + t.equal(bl.readDoubleBE(), canonical.readDoubleBE()) + t.equal(bl.readDoubleLE(), canonical.readDoubleLE()) + t.equal(bl.readDoubleBE(2), canonical.readDoubleBE(2)) + t.equal(bl.readDoubleLE(2), canonical.readDoubleLE(2)) + + t.end() +}) + +tape('test toString', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.toString('ascii', 0, 10), 'abcdefghij') + t.equal(bl.toString('ascii', 3, 10), 'defghij') + t.equal(bl.toString('ascii', 3, 6), 'def') + t.equal(bl.toString('ascii', 3, 8), 'defgh') + t.equal(bl.toString('ascii', 5, 10), 'fghij') + + t.end() +}) + +tape('test toString encoding', function (t) { + const bl = new BufferList() + const b = Buffer.from('abcdefghij\xff\x00') + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + bl.append(Buffer.from('\xff\x00')) + + encodings.forEach(function (enc) { + t.equal(bl.toString(enc), b.toString(enc), enc) + }) + + t.end() +}) + +tape('uninitialized memory', function (t) { + const secret = crypto.randomBytes(256) + for (let i = 0; i < 1e6; i++) { + const clone = Buffer.from(secret) + const bl = new BufferList() + bl.append(Buffer.from('a')) + bl.consume(-1024) + const buf = bl.slice(1) + if (buf.indexOf(clone) !== -1) { + t.fail(`Match (at ${i})`) + break + } + } + t.end() +}) + +!process.browser && tape('test stream', function (t) { + const random = crypto.randomBytes(65534) + + const bl = new BufferList((err, buf) => { + t.ok(Buffer.isBuffer(buf)) + t.ok(err === null) + t.ok(random.equals(bl.slice())) + t.ok(random.equals(buf.slice())) + + bl.pipe(fs.createWriteStream('/tmp/bl_test_rnd_out.dat')) + .on('close', function () { + const rndhash = crypto.createHash('md5').update(random).digest('hex') + const md5sum = crypto.createHash('md5') + const s = fs.createReadStream('/tmp/bl_test_rnd_out.dat') + + s.on('data', md5sum.update.bind(md5sum)) + s.on('end', function () { + t.equal(rndhash, md5sum.digest('hex'), 'woohoo! correct hash!') + t.end() + }) + }) + }) + + fs.writeFileSync('/tmp/bl_test_rnd.dat', random) + fs.createReadStream('/tmp/bl_test_rnd.dat').pipe(bl) +}) + +tape('instantiation with Buffer', function (t) { + const buf = crypto.randomBytes(1024) + const buf2 = crypto.randomBytes(1024) + let b = BufferList(buf) + + t.equal(buf.toString('hex'), b.slice().toString('hex'), 'same buffer') + b = BufferList([buf, buf2]) + t.equal(b.slice().toString('hex'), Buffer.concat([buf, buf2]).toString('hex'), 'same buffer') + + t.end() +}) + +tape('test String appendage', function (t) { + const bl = new BufferList() + const b = Buffer.from('abcdefghij\xff\x00') + + bl.append('abcd') + bl.append('efg') + bl.append('hi') + bl.append('j') + bl.append('\xff\x00') + + encodings.forEach(function (enc) { + t.equal(bl.toString(enc), b.toString(enc)) + }) + + t.end() +}) + +tape('test Number appendage', function (t) { + const bl = new BufferList() + const b = Buffer.from('1234567890') + + bl.append(1234) + bl.append(567) + bl.append(89) + bl.append(0) + + encodings.forEach(function (enc) { + t.equal(bl.toString(enc), b.toString(enc)) + }) + + t.end() +}) + +tape('write nothing, should get empty buffer', function (t) { + t.plan(3) + BufferList(function (err, data) { + t.notOk(err, 'no error') + t.ok(Buffer.isBuffer(data), 'got a buffer') + t.equal(0, data.length, 'got a zero-length buffer') + t.end() + }).end() +}) + +tape('unicode string', function (t) { + t.plan(2) + + const inp1 = '\u2600' + const inp2 = '\u2603' + const exp = inp1 + ' and ' + inp2 + const bl = BufferList() + + bl.write(inp1) + bl.write(' and ') + bl.write(inp2) + t.equal(exp, bl.toString()) + t.equal(Buffer.from(exp).toString('hex'), bl.toString('hex')) +}) + +tape('should emit finish', function (t) { + const source = BufferList() + const dest = BufferList() + + source.write('hello') + source.pipe(dest) + + dest.on('finish', function () { + t.equal(dest.toString('utf8'), 'hello') + t.end() + }) +}) + +tape('basic copy', function (t) { + const buf = crypto.randomBytes(1024) + const buf2 = Buffer.alloc(1024) + const b = BufferList(buf) + + b.copy(buf2) + t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer') + + t.end() +}) + +tape('copy after many appends', function (t) { + const buf = crypto.randomBytes(512) + const buf2 = Buffer.alloc(1024) + const b = BufferList(buf) + + b.append(buf) + b.copy(buf2) + t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer') + + t.end() +}) + +tape('copy at a precise position', function (t) { + const buf = crypto.randomBytes(1004) + const buf2 = Buffer.alloc(1024) + const b = BufferList(buf) + + b.copy(buf2, 20) + t.equal(b.slice().toString('hex'), buf2.slice(20).toString('hex'), 'same buffer') + + t.end() +}) + +tape('copy starting from a precise location', function (t) { + const buf = crypto.randomBytes(10) + const buf2 = Buffer.alloc(5) + const b = BufferList(buf) + + b.copy(buf2, 0, 5) + t.equal(b.slice(5).toString('hex'), buf2.toString('hex'), 'same buffer') + + t.end() +}) + +tape('copy in an interval', function (t) { + const rnd = crypto.randomBytes(10) + const b = BufferList(rnd) // put the random bytes there + const actual = Buffer.alloc(3) + const expected = Buffer.alloc(3) + + rnd.copy(expected, 0, 5, 8) + b.copy(actual, 0, 5, 8) + + t.equal(actual.toString('hex'), expected.toString('hex'), 'same buffer') + + t.end() +}) + +tape('copy an interval between two buffers', function (t) { + const buf = crypto.randomBytes(10) + const buf2 = Buffer.alloc(10) + const b = BufferList(buf) + + b.append(buf) + b.copy(buf2, 0, 5, 15) + + t.equal(b.slice(5, 15).toString('hex'), buf2.toString('hex'), 'same buffer') + + t.end() +}) + +tape('shallow slice across buffer boundaries', function (t) { + const bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice(3, 13).toString(), 'stSecondTh') + + t.end() +}) + +tape('shallow slice within single buffer', function (t) { + t.plan(2) + + const bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice(5, 10).toString(), 'Secon') + t.equal(bl.shallowSlice(7, 10).toString(), 'con') + + t.end() +}) + +tape('shallow slice single buffer', function (t) { + t.plan(3) + + const bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice(0, 5).toString(), 'First') + t.equal(bl.shallowSlice(5, 11).toString(), 'Second') + t.equal(bl.shallowSlice(11, 16).toString(), 'Third') +}) + +tape('shallow slice with negative or omitted indices', function (t) { + t.plan(4) + + const bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice().toString(), 'FirstSecondThird') + t.equal(bl.shallowSlice(5).toString(), 'SecondThird') + t.equal(bl.shallowSlice(5, -3).toString(), 'SecondTh') + t.equal(bl.shallowSlice(-8).toString(), 'ondThird') +}) + +tape('shallow slice does not make a copy', function (t) { + t.plan(1) + + const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')] + const bl = (new BufferList(buffers)).shallowSlice(5, -3) + + buffers[1].fill('h') + buffers[2].fill('h') + + t.equal(bl.toString(), 'hhhhhhhh') +}) + +tape('shallow slice with 0 length', function (t) { + t.plan(1) + + const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')] + const bl = (new BufferList(buffers)).shallowSlice(0, 0) + + t.equal(bl.length, 0) +}) + +tape('shallow slice with 0 length from middle', function (t) { + t.plan(1) + + const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')] + const bl = (new BufferList(buffers)).shallowSlice(10, 10) + + t.equal(bl.length, 0) +}) + +tape('duplicate', function (t) { + t.plan(2) + + const bl = new BufferList('abcdefghij\xff\x00') + const dup = bl.duplicate() + + t.equal(bl.prototype, dup.prototype) + t.equal(bl.toString('hex'), dup.toString('hex')) +}) + +tape('destroy no pipe', function (t) { + t.plan(2) + + const bl = new BufferList('alsdkfja;lsdkfja;lsdk') + + bl.destroy() + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) +}) + +tape('destroy with error', function (t) { + t.plan(3) + + const bl = new BufferList('alsdkfja;lsdkfja;lsdk') + const err = new Error('kaboom') + + bl.destroy(err) + bl.on('error', function (_err) { + t.equal(_err, err) + }) + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) +}) + +!process.browser && tape('destroy with pipe before read end', function (t) { + t.plan(2) + + const bl = new BufferList() + fs.createReadStream(path.join(__dirname, '/test.js')) + .pipe(bl) + + bl.destroy() + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) +}) + +!process.browser && tape('destroy with pipe before read end with race', function (t) { + t.plan(2) + + const bl = new BufferList() + + fs.createReadStream(path.join(__dirname, '/test.js')) + .pipe(bl) + + setTimeout(function () { + bl.destroy() + setTimeout(function () { + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) + }, 500) + }, 500) +}) + +!process.browser && tape('destroy with pipe after read end', function (t) { + t.plan(2) + + const bl = new BufferList() + + fs.createReadStream(path.join(__dirname, '/test.js')) + .on('end', onEnd) + .pipe(bl) + + function onEnd () { + bl.destroy() + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) + } +}) + +!process.browser && tape('destroy with pipe while writing to a destination', function (t) { + t.plan(4) + + const bl = new BufferList() + const ds = new BufferList() + + fs.createReadStream(path.join(__dirname, '/test.js')) + .on('end', onEnd) + .pipe(bl) + + function onEnd () { + bl.pipe(ds) + + setTimeout(function () { + bl.destroy() + + t.equals(bl._bufs.length, 0) + t.equals(bl.length, 0) + + ds.destroy() + + t.equals(bl._bufs.length, 0) + t.equals(bl.length, 0) + }, 100) + } +}) + +!process.browser && tape('handle error', function (t) { + t.plan(2) + + fs.createReadStream('/does/not/exist').pipe(BufferList(function (err, data) { + t.ok(err instanceof Error, 'has error') + t.notOk(data, 'no data') + })) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/buffer/AUTHORS.md b/sdklab/meantimerecovery/aedes/node_modules/buffer/AUTHORS.md new file mode 100644 index 000000000..22eb17129 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/buffer/AUTHORS.md @@ -0,0 +1,70 @@ +# Authors + +#### Ordered by first contribution. + +- Romain Beauxis (toots@rastageeks.org) +- Tobias Koppers (tobias.koppers@googlemail.com) +- Janus (ysangkok@gmail.com) +- Rainer Dreyer (rdrey1@gmail.com) +- Tõnis Tiigi (tonistiigi@gmail.com) +- James Halliday (mail@substack.net) +- Michael Williamson (mike@zwobble.org) +- elliottcable (github@elliottcable.name) +- rafael (rvalle@livelens.net) +- Andrew Kelley (superjoe30@gmail.com) +- Andreas Madsen (amwebdk@gmail.com) +- Mike Brevoort (mike.brevoort@pearson.com) +- Brian White (mscdex@mscdex.net) +- Feross Aboukhadijeh (feross@feross.org) +- Ruben Verborgh (ruben@verborgh.org) +- eliang (eliang.cs@gmail.com) +- Jesse Tane (jesse.tane@gmail.com) +- Alfonso Boza (alfonso@cloud.com) +- Mathias Buus (mathiasbuus@gmail.com) +- Devon Govett (devongovett@gmail.com) +- Daniel Cousens (github@dcousens.com) +- Joseph Dykstra (josephdykstra@gmail.com) +- Parsha Pourkhomami (parshap+git@gmail.com) +- Damjan Košir (damjan.kosir@gmail.com) +- daverayment (dave.rayment@gmail.com) +- kawanet (u-suke@kawa.net) +- Linus Unnebäck (linus@folkdatorn.se) +- Nolan Lawson (nolan.lawson@gmail.com) +- Calvin Metcalf (calvin.metcalf@gmail.com) +- Koki Takahashi (hakatasiloving@gmail.com) +- Guy Bedford (guybedford@gmail.com) +- Jan Schär (jscissr@gmail.com) +- RaulTsc (tomescu.raul@gmail.com) +- Matthieu Monsch (monsch@alum.mit.edu) +- Dan Ehrenberg (littledan@chromium.org) +- Kirill Fomichev (fanatid@ya.ru) +- Yusuke Kawasaki (u-suke@kawa.net) +- DC (dcposch@dcpos.ch) +- John-David Dalton (john.david.dalton@gmail.com) +- adventure-yunfei (adventure030@gmail.com) +- Emil Bay (github@tixz.dk) +- Sam Sudar (sudar.sam@gmail.com) +- Volker Mische (volker.mische@gmail.com) +- David Walton (support@geekstocks.com) +- Сковорода Никита Андреевич (chalkerx@gmail.com) +- greenkeeper[bot] (greenkeeper[bot]@users.noreply.github.com) +- ukstv (sergey.ukustov@machinomy.com) +- Renée Kooi (renee@kooi.me) +- ranbochen (ranbochen@qq.com) +- Vladimir Borovik (bobahbdb@gmail.com) +- greenkeeper[bot] (23040076+greenkeeper[bot]@users.noreply.github.com) +- kumavis (aaron@kumavis.me) +- Sergey Ukustov (sergey.ukustov@machinomy.com) +- Fei Liu (liu.feiwood@gmail.com) +- Blaine Bublitz (blaine.bublitz@gmail.com) +- clement (clement@seald.io) +- Koushik Dutta (koushd@gmail.com) +- Jordan Harband (ljharb@gmail.com) +- Niklas Mischkulnig (mischnic@users.noreply.github.com) +- Nikolai Vavilov (vvnicholas@gmail.com) +- Fedor Nezhivoi (gyzerok@users.noreply.github.com) +- Peter Newman (peternewman@users.noreply.github.com) +- mathmakgakpak (44949126+mathmakgakpak@users.noreply.github.com) +- jkkang (jkkang@smartauth.kr) + +#### Generated by bin/update-authors.sh. diff --git a/sdklab/meantimerecovery/aedes/node_modules/buffer/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/buffer/LICENSE new file mode 100644 index 000000000..d6bf75dcf --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh, and other contributors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/buffer/README.md b/sdklab/meantimerecovery/aedes/node_modules/buffer/README.md new file mode 100644 index 000000000..9a23d7cfa --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/buffer/README.md @@ -0,0 +1,410 @@ +# buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/buffer/master.svg +[travis-url]: https://travis-ci.org/feross/buffer +[npm-image]: https://img.shields.io/npm/v/buffer.svg +[npm-url]: https://npmjs.org/package/buffer +[downloads-image]: https://img.shields.io/npm/dm/buffer.svg +[downloads-url]: https://npmjs.org/package/buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### The buffer module from [node.js](https://nodejs.org/), for the browser. + +[![saucelabs][saucelabs-image]][saucelabs-url] + +[saucelabs-image]: https://saucelabs.com/browser-matrix/buffer.svg +[saucelabs-url]: https://saucelabs.com/u/buffer + +With [browserify](http://browserify.org), simply `require('buffer')` or use the `Buffer` global and you will get this module. + +The goal is to provide an API that is 100% identical to +[node's Buffer API](https://nodejs.org/api/buffer.html). Read the +[official docs](https://nodejs.org/api/buffer.html) for the full list of properties, +instance methods, and class methods that are supported. + +## features + +- Manipulate binary data like a boss, in all browsers! +- Super fast. Backed by Typed Arrays (`Uint8Array`/`ArrayBuffer`, not `Object`) +- Extremely small bundle size (**6.75KB minified + gzipped**, 51.9KB with comments) +- Excellent browser support (Chrome, Firefox, Edge, Safari 9+, IE 11, iOS 9+, Android, etc.) +- Preserves Node API exactly, with one minor difference (see below) +- Square-bracket `buf[4]` notation works! +- Does not modify any browser prototypes or put anything on `window` +- Comprehensive test suite (including all buffer tests from node.js core) + +## install + +To use this module directly (without browserify), install it: + +```bash +npm install buffer +``` + +This module was previously called **native-buffer-browserify**, but please use **buffer** +from now on. + +If you do not use a bundler, you can use the [standalone script](https://bundle.run/buffer). + +## usage + +The module's API is identical to node's `Buffer` API. Read the +[official docs](https://nodejs.org/api/buffer.html) for the full list of properties, +instance methods, and class methods that are supported. + +As mentioned above, `require('buffer')` or use the `Buffer` global with +[browserify](http://browserify.org) and this module will automatically be included +in your bundle. Almost any npm module will work in the browser, even if it assumes that +the node `Buffer` API will be available. + +To depend on this module explicitly (without browserify), require it like this: + +```js +var Buffer = require('buffer/').Buffer // note: the trailing slash is important! +``` + +To require this module explicitly, use `require('buffer/')` which tells the node.js module +lookup algorithm (also used by browserify) to use the **npm module** named `buffer` +instead of the **node.js core** module named `buffer`! + + +## how does it work? + +The Buffer constructor returns instances of `Uint8Array` that have their prototype +changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of `Uint8Array`, +so the returned instances will have all the node `Buffer` methods and the +`Uint8Array` methods. Square bracket notation works as expected -- it returns a +single octet. + +The `Uint8Array` prototype remains unmodified. + + +## tracking the latest node api + +This module tracks the Buffer API in the latest (unstable) version of node.js. The Buffer +API is considered **stable** in the +[node stability index](https://nodejs.org/docs/latest/api/documentation.html#documentation_stability_index), +so it is unlikely that there will ever be breaking changes. +Nonetheless, when/if the Buffer API changes in node, this module's API will change +accordingly. + +## related packages + +- [`buffer-reverse`](https://www.npmjs.com/package/buffer-reverse) - Reverse a buffer +- [`buffer-xor`](https://www.npmjs.com/package/buffer-xor) - Bitwise xor a buffer +- [`is-buffer`](https://www.npmjs.com/package/is-buffer) - Determine if an object is a Buffer without including the whole `Buffer` package + +## conversion packages + +### convert typed array to buffer + +Use [`typedarray-to-buffer`](https://www.npmjs.com/package/typedarray-to-buffer) to convert any kind of typed array to a `Buffer`. Does not perform a copy, so it's super fast. + +### convert buffer to typed array + +`Buffer` is a subclass of `Uint8Array` (which is a typed array). So there is no need to explicitly convert to typed array. Just use the buffer as a `Uint8Array`. + +### convert blob to buffer + +Use [`blob-to-buffer`](https://www.npmjs.com/package/blob-to-buffer) to convert a `Blob` to a `Buffer`. + +### convert buffer to blob + +To convert a `Buffer` to a `Blob`, use the `Blob` constructor: + +```js +var blob = new Blob([ buffer ]) +``` + +Optionally, specify a mimetype: + +```js +var blob = new Blob([ buffer ], { type: 'text/html' }) +``` + +### convert arraybuffer to buffer + +To convert an `ArrayBuffer` to a `Buffer`, use the `Buffer.from` function. Does not perform a copy, so it's super fast. + +```js +var buffer = Buffer.from(arrayBuffer) +``` + +### convert buffer to arraybuffer + +To convert a `Buffer` to an `ArrayBuffer`, use the `.buffer` property (which is present on all `Uint8Array` objects): + +```js +var arrayBuffer = buffer.buffer.slice( + buffer.byteOffset, buffer.byteOffset + buffer.byteLength +) +``` + +Alternatively, use the [`to-arraybuffer`](https://www.npmjs.com/package/to-arraybuffer) module. + +## performance + +See perf tests in `/perf`. + +`BrowserBuffer` is the browser `buffer` module (this repo). `Uint8Array` is included as a +sanity check (since `BrowserBuffer` uses `Uint8Array` under the hood, `Uint8Array` will +always be at least a bit faster). Finally, `NodeBuffer` is the node.js buffer module, +which is included to compare against. + +NOTE: Performance has improved since these benchmarks were taken. PR welcome to update the README. + +### Chrome 38 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 11,457,464 ops/sec | ±0.86% | 66 | ✓ | +| Uint8Array#bracket-notation | 10,824,332 ops/sec | ±0.74% | 65 | | +| | | | | +| BrowserBuffer#concat | 450,532 ops/sec | ±0.76% | 68 | | +| Uint8Array#concat | 1,368,911 ops/sec | ±1.50% | 62 | ✓ | +| | | | | +| BrowserBuffer#copy(16000) | 903,001 ops/sec | ±0.96% | 67 | | +| Uint8Array#copy(16000) | 1,422,441 ops/sec | ±1.04% | 66 | ✓ | +| | | | | +| BrowserBuffer#copy(16) | 11,431,358 ops/sec | ±0.46% | 69 | | +| Uint8Array#copy(16) | 13,944,163 ops/sec | ±1.12% | 68 | ✓ | +| | | | | +| BrowserBuffer#new(16000) | 106,329 ops/sec | ±6.70% | 44 | | +| Uint8Array#new(16000) | 131,001 ops/sec | ±2.85% | 31 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 1,554,491 ops/sec | ±1.60% | 65 | | +| Uint8Array#new(16) | 6,623,930 ops/sec | ±1.66% | 65 | ✓ | +| | | | | +| BrowserBuffer#readDoubleBE | 112,830 ops/sec | ±0.51% | 69 | ✓ | +| DataView#getFloat64 | 93,500 ops/sec | ±0.57% | 68 | | +| | | | | +| BrowserBuffer#readFloatBE | 146,678 ops/sec | ±0.95% | 68 | ✓ | +| DataView#getFloat32 | 99,311 ops/sec | ±0.41% | 67 | | +| | | | | +| BrowserBuffer#readUInt32LE | 843,214 ops/sec | ±0.70% | 69 | ✓ | +| DataView#getUint32 | 103,024 ops/sec | ±0.64% | 67 | | +| | | | | +| BrowserBuffer#slice | 1,013,941 ops/sec | ±0.75% | 67 | | +| Uint8Array#subarray | 1,903,928 ops/sec | ±0.53% | 67 | ✓ | +| | | | | +| BrowserBuffer#writeFloatBE | 61,387 ops/sec | ±0.90% | 67 | | +| DataView#setFloat32 | 141,249 ops/sec | ±0.40% | 66 | ✓ | + + +### Firefox 33 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 20,800,421 ops/sec | ±1.84% | 60 | | +| Uint8Array#bracket-notation | 20,826,235 ops/sec | ±2.02% | 61 | ✓ | +| | | | | +| BrowserBuffer#concat | 153,076 ops/sec | ±2.32% | 61 | | +| Uint8Array#concat | 1,255,674 ops/sec | ±8.65% | 52 | ✓ | +| | | | | +| BrowserBuffer#copy(16000) | 1,105,312 ops/sec | ±1.16% | 63 | | +| Uint8Array#copy(16000) | 1,615,911 ops/sec | ±0.55% | 66 | ✓ | +| | | | | +| BrowserBuffer#copy(16) | 16,357,599 ops/sec | ±0.73% | 68 | | +| Uint8Array#copy(16) | 31,436,281 ops/sec | ±1.05% | 68 | ✓ | +| | | | | +| BrowserBuffer#new(16000) | 52,995 ops/sec | ±6.01% | 35 | | +| Uint8Array#new(16000) | 87,686 ops/sec | ±5.68% | 45 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 252,031 ops/sec | ±1.61% | 66 | | +| Uint8Array#new(16) | 8,477,026 ops/sec | ±0.49% | 68 | ✓ | +| | | | | +| BrowserBuffer#readDoubleBE | 99,871 ops/sec | ±0.41% | 69 | | +| DataView#getFloat64 | 285,663 ops/sec | ±0.70% | 68 | ✓ | +| | | | | +| BrowserBuffer#readFloatBE | 115,540 ops/sec | ±0.42% | 69 | | +| DataView#getFloat32 | 288,722 ops/sec | ±0.82% | 68 | ✓ | +| | | | | +| BrowserBuffer#readUInt32LE | 633,926 ops/sec | ±1.08% | 67 | ✓ | +| DataView#getUint32 | 294,808 ops/sec | ±0.79% | 64 | | +| | | | | +| BrowserBuffer#slice | 349,425 ops/sec | ±0.46% | 69 | | +| Uint8Array#subarray | 5,965,819 ops/sec | ±0.60% | 65 | ✓ | +| | | | | +| BrowserBuffer#writeFloatBE | 59,980 ops/sec | ±0.41% | 67 | | +| DataView#setFloat32 | 317,634 ops/sec | ±0.63% | 68 | ✓ | + +### Safari 8 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 10,279,729 ops/sec | ±2.25% | 56 | ✓ | +| Uint8Array#bracket-notation | 10,030,767 ops/sec | ±2.23% | 59 | | +| | | | | +| BrowserBuffer#concat | 144,138 ops/sec | ±1.38% | 65 | | +| Uint8Array#concat | 4,950,764 ops/sec | ±1.70% | 63 | ✓ | +| | | | | +| BrowserBuffer#copy(16000) | 1,058,548 ops/sec | ±1.51% | 64 | | +| Uint8Array#copy(16000) | 1,409,666 ops/sec | ±1.17% | 65 | ✓ | +| | | | | +| BrowserBuffer#copy(16) | 6,282,529 ops/sec | ±1.88% | 58 | | +| Uint8Array#copy(16) | 11,907,128 ops/sec | ±2.87% | 58 | ✓ | +| | | | | +| BrowserBuffer#new(16000) | 101,663 ops/sec | ±3.89% | 57 | | +| Uint8Array#new(16000) | 22,050,818 ops/sec | ±6.51% | 46 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 176,072 ops/sec | ±2.13% | 64 | | +| Uint8Array#new(16) | 24,385,731 ops/sec | ±5.01% | 51 | ✓ | +| | | | | +| BrowserBuffer#readDoubleBE | 41,341 ops/sec | ±1.06% | 67 | | +| DataView#getFloat64 | 322,280 ops/sec | ±0.84% | 68 | ✓ | +| | | | | +| BrowserBuffer#readFloatBE | 46,141 ops/sec | ±1.06% | 65 | | +| DataView#getFloat32 | 337,025 ops/sec | ±0.43% | 69 | ✓ | +| | | | | +| BrowserBuffer#readUInt32LE | 151,551 ops/sec | ±1.02% | 66 | | +| DataView#getUint32 | 308,278 ops/sec | ±0.94% | 67 | ✓ | +| | | | | +| BrowserBuffer#slice | 197,365 ops/sec | ±0.95% | 66 | | +| Uint8Array#subarray | 9,558,024 ops/sec | ±3.08% | 58 | ✓ | +| | | | | +| BrowserBuffer#writeFloatBE | 17,518 ops/sec | ±1.03% | 63 | | +| DataView#setFloat32 | 319,751 ops/sec | ±0.48% | 68 | ✓ | + + +### Node 0.11.14 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 10,489,828 ops/sec | ±3.25% | 90 | | +| Uint8Array#bracket-notation | 10,534,884 ops/sec | ±0.81% | 92 | ✓ | +| NodeBuffer#bracket-notation | 10,389,910 ops/sec | ±0.97% | 87 | | +| | | | | +| BrowserBuffer#concat | 487,830 ops/sec | ±2.58% | 88 | | +| Uint8Array#concat | 1,814,327 ops/sec | ±1.28% | 88 | ✓ | +| NodeBuffer#concat | 1,636,523 ops/sec | ±1.88% | 73 | | +| | | | | +| BrowserBuffer#copy(16000) | 1,073,665 ops/sec | ±0.77% | 90 | | +| Uint8Array#copy(16000) | 1,348,517 ops/sec | ±0.84% | 89 | ✓ | +| NodeBuffer#copy(16000) | 1,289,533 ops/sec | ±0.82% | 93 | | +| | | | | +| BrowserBuffer#copy(16) | 12,782,706 ops/sec | ±0.74% | 85 | | +| Uint8Array#copy(16) | 14,180,427 ops/sec | ±0.93% | 92 | ✓ | +| NodeBuffer#copy(16) | 11,083,134 ops/sec | ±1.06% | 89 | | +| | | | | +| BrowserBuffer#new(16000) | 141,678 ops/sec | ±3.30% | 67 | | +| Uint8Array#new(16000) | 161,491 ops/sec | ±2.96% | 60 | | +| NodeBuffer#new(16000) | 292,699 ops/sec | ±3.20% | 55 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 1,655,466 ops/sec | ±2.41% | 82 | | +| Uint8Array#new(16) | 14,399,926 ops/sec | ±0.91% | 94 | ✓ | +| NodeBuffer#new(16) | 3,894,696 ops/sec | ±0.88% | 92 | | +| | | | | +| BrowserBuffer#readDoubleBE | 109,582 ops/sec | ±0.75% | 93 | ✓ | +| DataView#getFloat64 | 91,235 ops/sec | ±0.81% | 90 | | +| NodeBuffer#readDoubleBE | 88,593 ops/sec | ±0.96% | 81 | | +| | | | | +| BrowserBuffer#readFloatBE | 139,854 ops/sec | ±1.03% | 85 | ✓ | +| DataView#getFloat32 | 98,744 ops/sec | ±0.80% | 89 | | +| NodeBuffer#readFloatBE | 92,769 ops/sec | ±0.94% | 93 | | +| | | | | +| BrowserBuffer#readUInt32LE | 710,861 ops/sec | ±0.82% | 92 | | +| DataView#getUint32 | 117,893 ops/sec | ±0.84% | 91 | | +| NodeBuffer#readUInt32LE | 851,412 ops/sec | ±0.72% | 93 | ✓ | +| | | | | +| BrowserBuffer#slice | 1,673,877 ops/sec | ±0.73% | 94 | | +| Uint8Array#subarray | 6,919,243 ops/sec | ±0.67% | 90 | ✓ | +| NodeBuffer#slice | 4,617,604 ops/sec | ±0.79% | 93 | | +| | | | | +| BrowserBuffer#writeFloatBE | 66,011 ops/sec | ±0.75% | 93 | | +| DataView#setFloat32 | 127,760 ops/sec | ±0.72% | 93 | ✓ | +| NodeBuffer#writeFloatBE | 103,352 ops/sec | ±0.83% | 93 | | + +### iojs 1.8.1 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 10,990,488 ops/sec | ±1.11% | 91 | | +| Uint8Array#bracket-notation | 11,268,757 ops/sec | ±0.65% | 97 | | +| NodeBuffer#bracket-notation | 11,353,260 ops/sec | ±0.83% | 94 | ✓ | +| | | | | +| BrowserBuffer#concat | 378,954 ops/sec | ±0.74% | 94 | | +| Uint8Array#concat | 1,358,288 ops/sec | ±0.97% | 87 | | +| NodeBuffer#concat | 1,934,050 ops/sec | ±1.11% | 78 | ✓ | +| | | | | +| BrowserBuffer#copy(16000) | 894,538 ops/sec | ±0.56% | 84 | | +| Uint8Array#copy(16000) | 1,442,656 ops/sec | ±0.71% | 96 | | +| NodeBuffer#copy(16000) | 1,457,898 ops/sec | ±0.53% | 92 | ✓ | +| | | | | +| BrowserBuffer#copy(16) | 12,870,457 ops/sec | ±0.67% | 95 | | +| Uint8Array#copy(16) | 16,643,989 ops/sec | ±0.61% | 93 | ✓ | +| NodeBuffer#copy(16) | 14,885,848 ops/sec | ±0.74% | 94 | | +| | | | | +| BrowserBuffer#new(16000) | 109,264 ops/sec | ±4.21% | 63 | | +| Uint8Array#new(16000) | 138,916 ops/sec | ±1.87% | 61 | | +| NodeBuffer#new(16000) | 281,449 ops/sec | ±3.58% | 51 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 1,362,935 ops/sec | ±0.56% | 99 | | +| Uint8Array#new(16) | 6,193,090 ops/sec | ±0.64% | 95 | ✓ | +| NodeBuffer#new(16) | 4,745,425 ops/sec | ±1.56% | 90 | | +| | | | | +| BrowserBuffer#readDoubleBE | 118,127 ops/sec | ±0.59% | 93 | ✓ | +| DataView#getFloat64 | 107,332 ops/sec | ±0.65% | 91 | | +| NodeBuffer#readDoubleBE | 116,274 ops/sec | ±0.94% | 95 | | +| | | | | +| BrowserBuffer#readFloatBE | 150,326 ops/sec | ±0.58% | 95 | ✓ | +| DataView#getFloat32 | 110,541 ops/sec | ±0.57% | 98 | | +| NodeBuffer#readFloatBE | 121,599 ops/sec | ±0.60% | 87 | | +| | | | | +| BrowserBuffer#readUInt32LE | 814,147 ops/sec | ±0.62% | 93 | | +| DataView#getUint32 | 137,592 ops/sec | ±0.64% | 90 | | +| NodeBuffer#readUInt32LE | 931,650 ops/sec | ±0.71% | 96 | ✓ | +| | | | | +| BrowserBuffer#slice | 878,590 ops/sec | ±0.68% | 93 | | +| Uint8Array#subarray | 2,843,308 ops/sec | ±1.02% | 90 | | +| NodeBuffer#slice | 4,998,316 ops/sec | ±0.68% | 90 | ✓ | +| | | | | +| BrowserBuffer#writeFloatBE | 65,927 ops/sec | ±0.74% | 93 | | +| DataView#setFloat32 | 139,823 ops/sec | ±0.97% | 89 | ✓ | +| NodeBuffer#writeFloatBE | 135,763 ops/sec | ±0.65% | 96 | | +| | | | | + +## Testing the project + +First, install the project: + + npm install + +Then, to run tests in Node.js, run: + + npm run test-node + +To test locally in a browser, you can run: + + npm run test-browser-es5-local # For ES5 browsers that don't support ES6 + npm run test-browser-es6-local # For ES6 compliant browsers + +This will print out a URL that you can then open in a browser to run the tests, using [airtap](https://www.npmjs.com/package/airtap). + +To run automated browser tests using Saucelabs, ensure that your `SAUCE_USERNAME` and `SAUCE_ACCESS_KEY` environment variables are set, then run: + + npm test + +This is what's run in Travis, to check against various browsers. The list of browsers is kept in the `bin/airtap-es5.yml` and `bin/airtap-es6.yml` files. + +## JavaScript Standard Style + +This module uses [JavaScript Standard Style](https://github.com/feross/standard). + +[![JavaScript Style Guide](https://cdn.rawgit.com/feross/standard/master/badge.svg)](https://github.com/feross/standard) + +To test that the code conforms to the style, `npm install` and run: + + ./node_modules/.bin/standard + +## credit + +This was originally forked from [buffer-browserify](https://github.com/toots/buffer-browserify). + +## Security Policies and Procedures + +The `buffer` team and community take all security bugs in `buffer` seriously. Please see our [security policies and procedures](https://github.com/feross/security) document to learn how to report issues. + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org), and other contributors. Originally forked from an MIT-licensed module by Romain Beauxis. diff --git a/sdklab/meantimerecovery/aedes/node_modules/buffer/index.d.ts b/sdklab/meantimerecovery/aedes/node_modules/buffer/index.d.ts new file mode 100644 index 000000000..5d1a804e5 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/buffer/index.d.ts @@ -0,0 +1,186 @@ +export class Buffer extends Uint8Array { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + reverse(): this; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer | Uint8Array): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initializing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/buffer/index.js b/sdklab/meantimerecovery/aedes/node_modules/buffer/index.js new file mode 100644 index 000000000..609cf3113 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/buffer/index.js @@ -0,0 +1,1817 @@ +/*! + * The buffer module from node.js, for the browser. + * + * @author Feross Aboukhadijeh + * @license MIT + */ +/* eslint-disable no-proto */ + +'use strict' + +var base64 = require('base64-js') +var ieee754 = require('ieee754') +var customInspectSymbol = + (typeof Symbol === 'function' && typeof Symbol['for'] === 'function') // eslint-disable-line dot-notation + ? Symbol['for']('nodejs.util.inspect.custom') // eslint-disable-line dot-notation + : null + +exports.Buffer = Buffer +exports.SlowBuffer = SlowBuffer +exports.INSPECT_MAX_BYTES = 50 + +var K_MAX_LENGTH = 0x7fffffff +exports.kMaxLength = K_MAX_LENGTH + +/** + * If `Buffer.TYPED_ARRAY_SUPPORT`: + * === true Use Uint8Array implementation (fastest) + * === false Print warning and recommend using `buffer` v4.x which has an Object + * implementation (most compatible, even IE6) + * + * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+, + * Opera 11.6+, iOS 4.2+. + * + * We report that the browser does not support typed arrays if the are not subclassable + * using __proto__. Firefox 4-29 lacks support for adding new properties to `Uint8Array` + * (See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438). IE 10 lacks support + * for __proto__ and has a buggy typed array implementation. + */ +Buffer.TYPED_ARRAY_SUPPORT = typedArraySupport() + +if (!Buffer.TYPED_ARRAY_SUPPORT && typeof console !== 'undefined' && + typeof console.error === 'function') { + console.error( + 'This browser lacks typed array (Uint8Array) support which is required by ' + + '`buffer` v5.x. Use `buffer` v4.x if you require old browser support.' + ) +} + +function typedArraySupport () { + // Can typed array instances can be augmented? + try { + var arr = new Uint8Array(1) + var proto = { foo: function () { return 42 } } + Object.setPrototypeOf(proto, Uint8Array.prototype) + Object.setPrototypeOf(arr, proto) + return arr.foo() === 42 + } catch (e) { + return false + } +} + +Object.defineProperty(Buffer.prototype, 'parent', { + enumerable: true, + get: function () { + if (!Buffer.isBuffer(this)) return undefined + return this.buffer + } +}) + +Object.defineProperty(Buffer.prototype, 'offset', { + enumerable: true, + get: function () { + if (!Buffer.isBuffer(this)) return undefined + return this.byteOffset + } +}) + +function createBuffer (length) { + if (length > K_MAX_LENGTH) { + throw new RangeError('The value "' + length + '" is invalid for option "size"') + } + // Return an augmented `Uint8Array` instance + var buf = new Uint8Array(length) + Object.setPrototypeOf(buf, Buffer.prototype) + return buf +} + +/** + * The Buffer constructor returns instances of `Uint8Array` that have their + * prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of + * `Uint8Array`, so the returned instances will have all the node `Buffer` methods + * and the `Uint8Array` methods. Square bracket notation works as expected -- it + * returns a single octet. + * + * The `Uint8Array` prototype remains unmodified. + */ + +function Buffer (arg, encodingOrOffset, length) { + // Common case. + if (typeof arg === 'number') { + if (typeof encodingOrOffset === 'string') { + throw new TypeError( + 'The "string" argument must be of type string. Received type number' + ) + } + return allocUnsafe(arg) + } + return from(arg, encodingOrOffset, length) +} + +Buffer.poolSize = 8192 // not used by this implementation + +function from (value, encodingOrOffset, length) { + if (typeof value === 'string') { + return fromString(value, encodingOrOffset) + } + + if (ArrayBuffer.isView(value)) { + return fromArrayView(value) + } + + if (value == null) { + throw new TypeError( + 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' + + 'or Array-like Object. Received type ' + (typeof value) + ) + } + + if (isInstance(value, ArrayBuffer) || + (value && isInstance(value.buffer, ArrayBuffer))) { + return fromArrayBuffer(value, encodingOrOffset, length) + } + + if (typeof SharedArrayBuffer !== 'undefined' && + (isInstance(value, SharedArrayBuffer) || + (value && isInstance(value.buffer, SharedArrayBuffer)))) { + return fromArrayBuffer(value, encodingOrOffset, length) + } + + if (typeof value === 'number') { + throw new TypeError( + 'The "value" argument must not be of type number. Received type number' + ) + } + + var valueOf = value.valueOf && value.valueOf() + if (valueOf != null && valueOf !== value) { + return Buffer.from(valueOf, encodingOrOffset, length) + } + + var b = fromObject(value) + if (b) return b + + if (typeof Symbol !== 'undefined' && Symbol.toPrimitive != null && + typeof value[Symbol.toPrimitive] === 'function') { + return Buffer.from( + value[Symbol.toPrimitive]('string'), encodingOrOffset, length + ) + } + + throw new TypeError( + 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' + + 'or Array-like Object. Received type ' + (typeof value) + ) +} + +/** + * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError + * if value is a number. + * Buffer.from(str[, encoding]) + * Buffer.from(array) + * Buffer.from(buffer) + * Buffer.from(arrayBuffer[, byteOffset[, length]]) + **/ +Buffer.from = function (value, encodingOrOffset, length) { + return from(value, encodingOrOffset, length) +} + +// Note: Change prototype *after* Buffer.from is defined to workaround Chrome bug: +// https://github.com/feross/buffer/pull/148 +Object.setPrototypeOf(Buffer.prototype, Uint8Array.prototype) +Object.setPrototypeOf(Buffer, Uint8Array) + +function assertSize (size) { + if (typeof size !== 'number') { + throw new TypeError('"size" argument must be of type number') + } else if (size < 0) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } +} + +function alloc (size, fill, encoding) { + assertSize(size) + if (size <= 0) { + return createBuffer(size) + } + if (fill !== undefined) { + // Only pay attention to encoding if it's a string. This + // prevents accidentally sending in a number that would + // be interpreted as a start offset. + return typeof encoding === 'string' + ? createBuffer(size).fill(fill, encoding) + : createBuffer(size).fill(fill) + } + return createBuffer(size) +} + +/** + * Creates a new filled Buffer instance. + * alloc(size[, fill[, encoding]]) + **/ +Buffer.alloc = function (size, fill, encoding) { + return alloc(size, fill, encoding) +} + +function allocUnsafe (size) { + assertSize(size) + return createBuffer(size < 0 ? 0 : checked(size) | 0) +} + +/** + * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance. + * */ +Buffer.allocUnsafe = function (size) { + return allocUnsafe(size) +} +/** + * Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance. + */ +Buffer.allocUnsafeSlow = function (size) { + return allocUnsafe(size) +} + +function fromString (string, encoding) { + if (typeof encoding !== 'string' || encoding === '') { + encoding = 'utf8' + } + + if (!Buffer.isEncoding(encoding)) { + throw new TypeError('Unknown encoding: ' + encoding) + } + + var length = byteLength(string, encoding) | 0 + var buf = createBuffer(length) + + var actual = buf.write(string, encoding) + + if (actual !== length) { + // Writing a hex string, for example, that contains invalid characters will + // cause everything after the first invalid character to be ignored. (e.g. + // 'abxxcd' will be treated as 'ab') + buf = buf.slice(0, actual) + } + + return buf +} + +function fromArrayLike (array) { + var length = array.length < 0 ? 0 : checked(array.length) | 0 + var buf = createBuffer(length) + for (var i = 0; i < length; i += 1) { + buf[i] = array[i] & 255 + } + return buf +} + +function fromArrayView (arrayView) { + if (isInstance(arrayView, Uint8Array)) { + var copy = new Uint8Array(arrayView) + return fromArrayBuffer(copy.buffer, copy.byteOffset, copy.byteLength) + } + return fromArrayLike(arrayView) +} + +function fromArrayBuffer (array, byteOffset, length) { + if (byteOffset < 0 || array.byteLength < byteOffset) { + throw new RangeError('"offset" is outside of buffer bounds') + } + + if (array.byteLength < byteOffset + (length || 0)) { + throw new RangeError('"length" is outside of buffer bounds') + } + + var buf + if (byteOffset === undefined && length === undefined) { + buf = new Uint8Array(array) + } else if (length === undefined) { + buf = new Uint8Array(array, byteOffset) + } else { + buf = new Uint8Array(array, byteOffset, length) + } + + // Return an augmented `Uint8Array` instance + Object.setPrototypeOf(buf, Buffer.prototype) + + return buf +} + +function fromObject (obj) { + if (Buffer.isBuffer(obj)) { + var len = checked(obj.length) | 0 + var buf = createBuffer(len) + + if (buf.length === 0) { + return buf + } + + obj.copy(buf, 0, 0, len) + return buf + } + + if (obj.length !== undefined) { + if (typeof obj.length !== 'number' || numberIsNaN(obj.length)) { + return createBuffer(0) + } + return fromArrayLike(obj) + } + + if (obj.type === 'Buffer' && Array.isArray(obj.data)) { + return fromArrayLike(obj.data) + } +} + +function checked (length) { + // Note: cannot use `length < K_MAX_LENGTH` here because that fails when + // length is NaN (which is otherwise coerced to zero.) + if (length >= K_MAX_LENGTH) { + throw new RangeError('Attempt to allocate Buffer larger than maximum ' + + 'size: 0x' + K_MAX_LENGTH.toString(16) + ' bytes') + } + return length | 0 +} + +function SlowBuffer (length) { + if (+length != length) { // eslint-disable-line eqeqeq + length = 0 + } + return Buffer.alloc(+length) +} + +Buffer.isBuffer = function isBuffer (b) { + return b != null && b._isBuffer === true && + b !== Buffer.prototype // so Buffer.isBuffer(Buffer.prototype) will be false +} + +Buffer.compare = function compare (a, b) { + if (isInstance(a, Uint8Array)) a = Buffer.from(a, a.offset, a.byteLength) + if (isInstance(b, Uint8Array)) b = Buffer.from(b, b.offset, b.byteLength) + if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { + throw new TypeError( + 'The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array' + ) + } + + if (a === b) return 0 + + var x = a.length + var y = b.length + + for (var i = 0, len = Math.min(x, y); i < len; ++i) { + if (a[i] !== b[i]) { + x = a[i] + y = b[i] + break + } + } + + if (x < y) return -1 + if (y < x) return 1 + return 0 +} + +Buffer.isEncoding = function isEncoding (encoding) { + switch (String(encoding).toLowerCase()) { + case 'hex': + case 'utf8': + case 'utf-8': + case 'ascii': + case 'latin1': + case 'binary': + case 'base64': + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return true + default: + return false + } +} + +Buffer.concat = function concat (list, length) { + if (!Array.isArray(list)) { + throw new TypeError('"list" argument must be an Array of Buffers') + } + + if (list.length === 0) { + return Buffer.alloc(0) + } + + var i + if (length === undefined) { + length = 0 + for (i = 0; i < list.length; ++i) { + length += list[i].length + } + } + + var buffer = Buffer.allocUnsafe(length) + var pos = 0 + for (i = 0; i < list.length; ++i) { + var buf = list[i] + if (isInstance(buf, Uint8Array)) { + if (pos + buf.length > buffer.length) { + Buffer.from(buf).copy(buffer, pos) + } else { + Uint8Array.prototype.set.call( + buffer, + buf, + pos + ) + } + } else if (!Buffer.isBuffer(buf)) { + throw new TypeError('"list" argument must be an Array of Buffers') + } else { + buf.copy(buffer, pos) + } + pos += buf.length + } + return buffer +} + +function byteLength (string, encoding) { + if (Buffer.isBuffer(string)) { + return string.length + } + if (ArrayBuffer.isView(string) || isInstance(string, ArrayBuffer)) { + return string.byteLength + } + if (typeof string !== 'string') { + throw new TypeError( + 'The "string" argument must be one of type string, Buffer, or ArrayBuffer. ' + + 'Received type ' + typeof string + ) + } + + var len = string.length + var mustMatch = (arguments.length > 2 && arguments[2] === true) + if (!mustMatch && len === 0) return 0 + + // Use a for loop to avoid recursion + var loweredCase = false + for (;;) { + switch (encoding) { + case 'ascii': + case 'latin1': + case 'binary': + return len + case 'utf8': + case 'utf-8': + return utf8ToBytes(string).length + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return len * 2 + case 'hex': + return len >>> 1 + case 'base64': + return base64ToBytes(string).length + default: + if (loweredCase) { + return mustMatch ? -1 : utf8ToBytes(string).length // assume utf8 + } + encoding = ('' + encoding).toLowerCase() + loweredCase = true + } + } +} +Buffer.byteLength = byteLength + +function slowToString (encoding, start, end) { + var loweredCase = false + + // No need to verify that "this.length <= MAX_UINT32" since it's a read-only + // property of a typed array. + + // This behaves neither like String nor Uint8Array in that we set start/end + // to their upper/lower bounds if the value passed is out of range. + // undefined is handled specially as per ECMA-262 6th Edition, + // Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization. + if (start === undefined || start < 0) { + start = 0 + } + // Return early if start > this.length. Done here to prevent potential uint32 + // coercion fail below. + if (start > this.length) { + return '' + } + + if (end === undefined || end > this.length) { + end = this.length + } + + if (end <= 0) { + return '' + } + + // Force coercion to uint32. This will also coerce falsey/NaN values to 0. + end >>>= 0 + start >>>= 0 + + if (end <= start) { + return '' + } + + if (!encoding) encoding = 'utf8' + + while (true) { + switch (encoding) { + case 'hex': + return hexSlice(this, start, end) + + case 'utf8': + case 'utf-8': + return utf8Slice(this, start, end) + + case 'ascii': + return asciiSlice(this, start, end) + + case 'latin1': + case 'binary': + return latin1Slice(this, start, end) + + case 'base64': + return base64Slice(this, start, end) + + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return utf16leSlice(this, start, end) + + default: + if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) + encoding = (encoding + '').toLowerCase() + loweredCase = true + } + } +} + +// This property is used by `Buffer.isBuffer` (and the `is-buffer` npm package) +// to detect a Buffer instance. It's not possible to use `instanceof Buffer` +// reliably in a browserify context because there could be multiple different +// copies of the 'buffer' package in use. This method works even for Buffer +// instances that were created from another copy of the `buffer` package. +// See: https://github.com/feross/buffer/issues/154 +Buffer.prototype._isBuffer = true + +function swap (b, n, m) { + var i = b[n] + b[n] = b[m] + b[m] = i +} + +Buffer.prototype.swap16 = function swap16 () { + var len = this.length + if (len % 2 !== 0) { + throw new RangeError('Buffer size must be a multiple of 16-bits') + } + for (var i = 0; i < len; i += 2) { + swap(this, i, i + 1) + } + return this +} + +Buffer.prototype.swap32 = function swap32 () { + var len = this.length + if (len % 4 !== 0) { + throw new RangeError('Buffer size must be a multiple of 32-bits') + } + for (var i = 0; i < len; i += 4) { + swap(this, i, i + 3) + swap(this, i + 1, i + 2) + } + return this +} + +Buffer.prototype.swap64 = function swap64 () { + var len = this.length + if (len % 8 !== 0) { + throw new RangeError('Buffer size must be a multiple of 64-bits') + } + for (var i = 0; i < len; i += 8) { + swap(this, i, i + 7) + swap(this, i + 1, i + 6) + swap(this, i + 2, i + 5) + swap(this, i + 3, i + 4) + } + return this +} + +Buffer.prototype.toString = function toString () { + var length = this.length + if (length === 0) return '' + if (arguments.length === 0) return utf8Slice(this, 0, length) + return slowToString.apply(this, arguments) +} + +Buffer.prototype.toLocaleString = Buffer.prototype.toString + +Buffer.prototype.equals = function equals (b) { + if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer') + if (this === b) return true + return Buffer.compare(this, b) === 0 +} + +Buffer.prototype.inspect = function inspect () { + var str = '' + var max = exports.INSPECT_MAX_BYTES + str = this.toString('hex', 0, max).replace(/(.{2})/g, '$1 ').trim() + if (this.length > max) str += ' ... ' + return '' +} +if (customInspectSymbol) { + Buffer.prototype[customInspectSymbol] = Buffer.prototype.inspect +} + +Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) { + if (isInstance(target, Uint8Array)) { + target = Buffer.from(target, target.offset, target.byteLength) + } + if (!Buffer.isBuffer(target)) { + throw new TypeError( + 'The "target" argument must be one of type Buffer or Uint8Array. ' + + 'Received type ' + (typeof target) + ) + } + + if (start === undefined) { + start = 0 + } + if (end === undefined) { + end = target ? target.length : 0 + } + if (thisStart === undefined) { + thisStart = 0 + } + if (thisEnd === undefined) { + thisEnd = this.length + } + + if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) { + throw new RangeError('out of range index') + } + + if (thisStart >= thisEnd && start >= end) { + return 0 + } + if (thisStart >= thisEnd) { + return -1 + } + if (start >= end) { + return 1 + } + + start >>>= 0 + end >>>= 0 + thisStart >>>= 0 + thisEnd >>>= 0 + + if (this === target) return 0 + + var x = thisEnd - thisStart + var y = end - start + var len = Math.min(x, y) + + var thisCopy = this.slice(thisStart, thisEnd) + var targetCopy = target.slice(start, end) + + for (var i = 0; i < len; ++i) { + if (thisCopy[i] !== targetCopy[i]) { + x = thisCopy[i] + y = targetCopy[i] + break + } + } + + if (x < y) return -1 + if (y < x) return 1 + return 0 +} + +// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`, +// OR the last index of `val` in `buffer` at offset <= `byteOffset`. +// +// Arguments: +// - buffer - a Buffer to search +// - val - a string, Buffer, or number +// - byteOffset - an index into `buffer`; will be clamped to an int32 +// - encoding - an optional encoding, relevant is val is a string +// - dir - true for indexOf, false for lastIndexOf +function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) { + // Empty buffer means no match + if (buffer.length === 0) return -1 + + // Normalize byteOffset + if (typeof byteOffset === 'string') { + encoding = byteOffset + byteOffset = 0 + } else if (byteOffset > 0x7fffffff) { + byteOffset = 0x7fffffff + } else if (byteOffset < -0x80000000) { + byteOffset = -0x80000000 + } + byteOffset = +byteOffset // Coerce to Number. + if (numberIsNaN(byteOffset)) { + // byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer + byteOffset = dir ? 0 : (buffer.length - 1) + } + + // Normalize byteOffset: negative offsets start from the end of the buffer + if (byteOffset < 0) byteOffset = buffer.length + byteOffset + if (byteOffset >= buffer.length) { + if (dir) return -1 + else byteOffset = buffer.length - 1 + } else if (byteOffset < 0) { + if (dir) byteOffset = 0 + else return -1 + } + + // Normalize val + if (typeof val === 'string') { + val = Buffer.from(val, encoding) + } + + // Finally, search either indexOf (if dir is true) or lastIndexOf + if (Buffer.isBuffer(val)) { + // Special case: looking for empty string/buffer always fails + if (val.length === 0) { + return -1 + } + return arrayIndexOf(buffer, val, byteOffset, encoding, dir) + } else if (typeof val === 'number') { + val = val & 0xFF // Search for a byte value [0-255] + if (typeof Uint8Array.prototype.indexOf === 'function') { + if (dir) { + return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset) + } else { + return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset) + } + } + return arrayIndexOf(buffer, [val], byteOffset, encoding, dir) + } + + throw new TypeError('val must be string, number or Buffer') +} + +function arrayIndexOf (arr, val, byteOffset, encoding, dir) { + var indexSize = 1 + var arrLength = arr.length + var valLength = val.length + + if (encoding !== undefined) { + encoding = String(encoding).toLowerCase() + if (encoding === 'ucs2' || encoding === 'ucs-2' || + encoding === 'utf16le' || encoding === 'utf-16le') { + if (arr.length < 2 || val.length < 2) { + return -1 + } + indexSize = 2 + arrLength /= 2 + valLength /= 2 + byteOffset /= 2 + } + } + + function read (buf, i) { + if (indexSize === 1) { + return buf[i] + } else { + return buf.readUInt16BE(i * indexSize) + } + } + + var i + if (dir) { + var foundIndex = -1 + for (i = byteOffset; i < arrLength; i++) { + if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) { + if (foundIndex === -1) foundIndex = i + if (i - foundIndex + 1 === valLength) return foundIndex * indexSize + } else { + if (foundIndex !== -1) i -= i - foundIndex + foundIndex = -1 + } + } + } else { + if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength + for (i = byteOffset; i >= 0; i--) { + var found = true + for (var j = 0; j < valLength; j++) { + if (read(arr, i + j) !== read(val, j)) { + found = false + break + } + } + if (found) return i + } + } + + return -1 +} + +Buffer.prototype.includes = function includes (val, byteOffset, encoding) { + return this.indexOf(val, byteOffset, encoding) !== -1 +} + +Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, true) +} + +Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, false) +} + +function hexWrite (buf, string, offset, length) { + offset = Number(offset) || 0 + var remaining = buf.length - offset + if (!length) { + length = remaining + } else { + length = Number(length) + if (length > remaining) { + length = remaining + } + } + + var strLen = string.length + + if (length > strLen / 2) { + length = strLen / 2 + } + for (var i = 0; i < length; ++i) { + var parsed = parseInt(string.substr(i * 2, 2), 16) + if (numberIsNaN(parsed)) return i + buf[offset + i] = parsed + } + return i +} + +function utf8Write (buf, string, offset, length) { + return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length) +} + +function asciiWrite (buf, string, offset, length) { + return blitBuffer(asciiToBytes(string), buf, offset, length) +} + +function base64Write (buf, string, offset, length) { + return blitBuffer(base64ToBytes(string), buf, offset, length) +} + +function ucs2Write (buf, string, offset, length) { + return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length) +} + +Buffer.prototype.write = function write (string, offset, length, encoding) { + // Buffer#write(string) + if (offset === undefined) { + encoding = 'utf8' + length = this.length + offset = 0 + // Buffer#write(string, encoding) + } else if (length === undefined && typeof offset === 'string') { + encoding = offset + length = this.length + offset = 0 + // Buffer#write(string, offset[, length][, encoding]) + } else if (isFinite(offset)) { + offset = offset >>> 0 + if (isFinite(length)) { + length = length >>> 0 + if (encoding === undefined) encoding = 'utf8' + } else { + encoding = length + length = undefined + } + } else { + throw new Error( + 'Buffer.write(string, encoding, offset[, length]) is no longer supported' + ) + } + + var remaining = this.length - offset + if (length === undefined || length > remaining) length = remaining + + if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) { + throw new RangeError('Attempt to write outside buffer bounds') + } + + if (!encoding) encoding = 'utf8' + + var loweredCase = false + for (;;) { + switch (encoding) { + case 'hex': + return hexWrite(this, string, offset, length) + + case 'utf8': + case 'utf-8': + return utf8Write(this, string, offset, length) + + case 'ascii': + case 'latin1': + case 'binary': + return asciiWrite(this, string, offset, length) + + case 'base64': + // Warning: maxLength not taken into account in base64Write + return base64Write(this, string, offset, length) + + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return ucs2Write(this, string, offset, length) + + default: + if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) + encoding = ('' + encoding).toLowerCase() + loweredCase = true + } + } +} + +Buffer.prototype.toJSON = function toJSON () { + return { + type: 'Buffer', + data: Array.prototype.slice.call(this._arr || this, 0) + } +} + +function base64Slice (buf, start, end) { + if (start === 0 && end === buf.length) { + return base64.fromByteArray(buf) + } else { + return base64.fromByteArray(buf.slice(start, end)) + } +} + +function utf8Slice (buf, start, end) { + end = Math.min(buf.length, end) + var res = [] + + var i = start + while (i < end) { + var firstByte = buf[i] + var codePoint = null + var bytesPerSequence = (firstByte > 0xEF) + ? 4 + : (firstByte > 0xDF) + ? 3 + : (firstByte > 0xBF) + ? 2 + : 1 + + if (i + bytesPerSequence <= end) { + var secondByte, thirdByte, fourthByte, tempCodePoint + + switch (bytesPerSequence) { + case 1: + if (firstByte < 0x80) { + codePoint = firstByte + } + break + case 2: + secondByte = buf[i + 1] + if ((secondByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F) + if (tempCodePoint > 0x7F) { + codePoint = tempCodePoint + } + } + break + case 3: + secondByte = buf[i + 1] + thirdByte = buf[i + 2] + if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F) + if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) { + codePoint = tempCodePoint + } + } + break + case 4: + secondByte = buf[i + 1] + thirdByte = buf[i + 2] + fourthByte = buf[i + 3] + if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F) + if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) { + codePoint = tempCodePoint + } + } + } + } + + if (codePoint === null) { + // we did not generate a valid codePoint so insert a + // replacement char (U+FFFD) and advance only 1 byte + codePoint = 0xFFFD + bytesPerSequence = 1 + } else if (codePoint > 0xFFFF) { + // encode to utf16 (surrogate pair dance) + codePoint -= 0x10000 + res.push(codePoint >>> 10 & 0x3FF | 0xD800) + codePoint = 0xDC00 | codePoint & 0x3FF + } + + res.push(codePoint) + i += bytesPerSequence + } + + return decodeCodePointsArray(res) +} + +// Based on http://stackoverflow.com/a/22747272/680742, the browser with +// the lowest limit is Chrome, with 0x10000 args. +// We go 1 magnitude less, for safety +var MAX_ARGUMENTS_LENGTH = 0x1000 + +function decodeCodePointsArray (codePoints) { + var len = codePoints.length + if (len <= MAX_ARGUMENTS_LENGTH) { + return String.fromCharCode.apply(String, codePoints) // avoid extra slice() + } + + // Decode in chunks to avoid "call stack size exceeded". + var res = '' + var i = 0 + while (i < len) { + res += String.fromCharCode.apply( + String, + codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH) + ) + } + return res +} + +function asciiSlice (buf, start, end) { + var ret = '' + end = Math.min(buf.length, end) + + for (var i = start; i < end; ++i) { + ret += String.fromCharCode(buf[i] & 0x7F) + } + return ret +} + +function latin1Slice (buf, start, end) { + var ret = '' + end = Math.min(buf.length, end) + + for (var i = start; i < end; ++i) { + ret += String.fromCharCode(buf[i]) + } + return ret +} + +function hexSlice (buf, start, end) { + var len = buf.length + + if (!start || start < 0) start = 0 + if (!end || end < 0 || end > len) end = len + + var out = '' + for (var i = start; i < end; ++i) { + out += hexSliceLookupTable[buf[i]] + } + return out +} + +function utf16leSlice (buf, start, end) { + var bytes = buf.slice(start, end) + var res = '' + // If bytes.length is odd, the last 8 bits must be ignored (same as node.js) + for (var i = 0; i < bytes.length - 1; i += 2) { + res += String.fromCharCode(bytes[i] + (bytes[i + 1] * 256)) + } + return res +} + +Buffer.prototype.slice = function slice (start, end) { + var len = this.length + start = ~~start + end = end === undefined ? len : ~~end + + if (start < 0) { + start += len + if (start < 0) start = 0 + } else if (start > len) { + start = len + } + + if (end < 0) { + end += len + if (end < 0) end = 0 + } else if (end > len) { + end = len + } + + if (end < start) end = start + + var newBuf = this.subarray(start, end) + // Return an augmented `Uint8Array` instance + Object.setPrototypeOf(newBuf, Buffer.prototype) + + return newBuf +} + +/* + * Need to make sure that buffer isn't trying to write out of bounds. + */ +function checkOffset (offset, ext, length) { + if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint') + if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length') +} + +Buffer.prototype.readUintLE = +Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) { + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var val = this[offset] + var mul = 1 + var i = 0 + while (++i < byteLength && (mul *= 0x100)) { + val += this[offset + i] * mul + } + + return val +} + +Buffer.prototype.readUintBE = +Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) { + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) { + checkOffset(offset, byteLength, this.length) + } + + var val = this[offset + --byteLength] + var mul = 1 + while (byteLength > 0 && (mul *= 0x100)) { + val += this[offset + --byteLength] * mul + } + + return val +} + +Buffer.prototype.readUint8 = +Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 1, this.length) + return this[offset] +} + +Buffer.prototype.readUint16LE = +Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 2, this.length) + return this[offset] | (this[offset + 1] << 8) +} + +Buffer.prototype.readUint16BE = +Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 2, this.length) + return (this[offset] << 8) | this[offset + 1] +} + +Buffer.prototype.readUint32LE = +Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + + return ((this[offset]) | + (this[offset + 1] << 8) | + (this[offset + 2] << 16)) + + (this[offset + 3] * 0x1000000) +} + +Buffer.prototype.readUint32BE = +Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset] * 0x1000000) + + ((this[offset + 1] << 16) | + (this[offset + 2] << 8) | + this[offset + 3]) +} + +Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) { + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var val = this[offset] + var mul = 1 + var i = 0 + while (++i < byteLength && (mul *= 0x100)) { + val += this[offset + i] * mul + } + mul *= 0x80 + + if (val >= mul) val -= Math.pow(2, 8 * byteLength) + + return val +} + +Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) { + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var i = byteLength + var mul = 1 + var val = this[offset + --i] + while (i > 0 && (mul *= 0x100)) { + val += this[offset + --i] * mul + } + mul *= 0x80 + + if (val >= mul) val -= Math.pow(2, 8 * byteLength) + + return val +} + +Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 1, this.length) + if (!(this[offset] & 0x80)) return (this[offset]) + return ((0xff - this[offset] + 1) * -1) +} + +Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 2, this.length) + var val = this[offset] | (this[offset + 1] << 8) + return (val & 0x8000) ? val | 0xFFFF0000 : val +} + +Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 2, this.length) + var val = this[offset + 1] | (this[offset] << 8) + return (val & 0x8000) ? val | 0xFFFF0000 : val +} + +Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset]) | + (this[offset + 1] << 8) | + (this[offset + 2] << 16) | + (this[offset + 3] << 24) +} + +Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset] << 24) | + (this[offset + 1] << 16) | + (this[offset + 2] << 8) | + (this[offset + 3]) +} + +Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + return ieee754.read(this, offset, true, 23, 4) +} + +Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + return ieee754.read(this, offset, false, 23, 4) +} + +Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 8, this.length) + return ieee754.read(this, offset, true, 52, 8) +} + +Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 8, this.length) + return ieee754.read(this, offset, false, 52, 8) +} + +function checkInt (buf, value, offset, ext, max, min) { + if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance') + if (value > max || value < min) throw new RangeError('"value" argument is out of bounds') + if (offset + ext > buf.length) throw new RangeError('Index out of range') +} + +Buffer.prototype.writeUintLE = +Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) { + value = +value + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) { + var maxBytes = Math.pow(2, 8 * byteLength) - 1 + checkInt(this, value, offset, byteLength, maxBytes, 0) + } + + var mul = 1 + var i = 0 + this[offset] = value & 0xFF + while (++i < byteLength && (mul *= 0x100)) { + this[offset + i] = (value / mul) & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeUintBE = +Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) { + value = +value + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) { + var maxBytes = Math.pow(2, 8 * byteLength) - 1 + checkInt(this, value, offset, byteLength, maxBytes, 0) + } + + var i = byteLength - 1 + var mul = 1 + this[offset + i] = value & 0xFF + while (--i >= 0 && (mul *= 0x100)) { + this[offset + i] = (value / mul) & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeUint8 = +Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0) + this[offset] = (value & 0xff) + return offset + 1 +} + +Buffer.prototype.writeUint16LE = +Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + return offset + 2 +} + +Buffer.prototype.writeUint16BE = +Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) + this[offset] = (value >>> 8) + this[offset + 1] = (value & 0xff) + return offset + 2 +} + +Buffer.prototype.writeUint32LE = +Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) + this[offset + 3] = (value >>> 24) + this[offset + 2] = (value >>> 16) + this[offset + 1] = (value >>> 8) + this[offset] = (value & 0xff) + return offset + 4 +} + +Buffer.prototype.writeUint32BE = +Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) + this[offset] = (value >>> 24) + this[offset + 1] = (value >>> 16) + this[offset + 2] = (value >>> 8) + this[offset + 3] = (value & 0xff) + return offset + 4 +} + +Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) { + var limit = Math.pow(2, (8 * byteLength) - 1) + + checkInt(this, value, offset, byteLength, limit - 1, -limit) + } + + var i = 0 + var mul = 1 + var sub = 0 + this[offset] = value & 0xFF + while (++i < byteLength && (mul *= 0x100)) { + if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) { + sub = 1 + } + this[offset + i] = ((value / mul) >> 0) - sub & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) { + var limit = Math.pow(2, (8 * byteLength) - 1) + + checkInt(this, value, offset, byteLength, limit - 1, -limit) + } + + var i = byteLength - 1 + var mul = 1 + var sub = 0 + this[offset + i] = value & 0xFF + while (--i >= 0 && (mul *= 0x100)) { + if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) { + sub = 1 + } + this[offset + i] = ((value / mul) >> 0) - sub & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80) + if (value < 0) value = 0xff + value + 1 + this[offset] = (value & 0xff) + return offset + 1 +} + +Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + return offset + 2 +} + +Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) + this[offset] = (value >>> 8) + this[offset + 1] = (value & 0xff) + return offset + 2 +} + +Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + this[offset + 2] = (value >>> 16) + this[offset + 3] = (value >>> 24) + return offset + 4 +} + +Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) + if (value < 0) value = 0xffffffff + value + 1 + this[offset] = (value >>> 24) + this[offset + 1] = (value >>> 16) + this[offset + 2] = (value >>> 8) + this[offset + 3] = (value & 0xff) + return offset + 4 +} + +function checkIEEE754 (buf, value, offset, ext, max, min) { + if (offset + ext > buf.length) throw new RangeError('Index out of range') + if (offset < 0) throw new RangeError('Index out of range') +} + +function writeFloat (buf, value, offset, littleEndian, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) { + checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38) + } + ieee754.write(buf, value, offset, littleEndian, 23, 4) + return offset + 4 +} + +Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) { + return writeFloat(this, value, offset, true, noAssert) +} + +Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) { + return writeFloat(this, value, offset, false, noAssert) +} + +function writeDouble (buf, value, offset, littleEndian, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) { + checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308) + } + ieee754.write(buf, value, offset, littleEndian, 52, 8) + return offset + 8 +} + +Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) { + return writeDouble(this, value, offset, true, noAssert) +} + +Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) { + return writeDouble(this, value, offset, false, noAssert) +} + +// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length) +Buffer.prototype.copy = function copy (target, targetStart, start, end) { + if (!Buffer.isBuffer(target)) throw new TypeError('argument should be a Buffer') + if (!start) start = 0 + if (!end && end !== 0) end = this.length + if (targetStart >= target.length) targetStart = target.length + if (!targetStart) targetStart = 0 + if (end > 0 && end < start) end = start + + // Copy 0 bytes; we're done + if (end === start) return 0 + if (target.length === 0 || this.length === 0) return 0 + + // Fatal error conditions + if (targetStart < 0) { + throw new RangeError('targetStart out of bounds') + } + if (start < 0 || start >= this.length) throw new RangeError('Index out of range') + if (end < 0) throw new RangeError('sourceEnd out of bounds') + + // Are we oob? + if (end > this.length) end = this.length + if (target.length - targetStart < end - start) { + end = target.length - targetStart + start + } + + var len = end - start + + if (this === target && typeof Uint8Array.prototype.copyWithin === 'function') { + // Use built-in when available, missing from IE11 + this.copyWithin(targetStart, start, end) + } else { + Uint8Array.prototype.set.call( + target, + this.subarray(start, end), + targetStart + ) + } + + return len +} + +// Usage: +// buffer.fill(number[, offset[, end]]) +// buffer.fill(buffer[, offset[, end]]) +// buffer.fill(string[, offset[, end]][, encoding]) +Buffer.prototype.fill = function fill (val, start, end, encoding) { + // Handle string cases: + if (typeof val === 'string') { + if (typeof start === 'string') { + encoding = start + start = 0 + end = this.length + } else if (typeof end === 'string') { + encoding = end + end = this.length + } + if (encoding !== undefined && typeof encoding !== 'string') { + throw new TypeError('encoding must be a string') + } + if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) { + throw new TypeError('Unknown encoding: ' + encoding) + } + if (val.length === 1) { + var code = val.charCodeAt(0) + if ((encoding === 'utf8' && code < 128) || + encoding === 'latin1') { + // Fast path: If `val` fits into a single byte, use that numeric value. + val = code + } + } + } else if (typeof val === 'number') { + val = val & 255 + } else if (typeof val === 'boolean') { + val = Number(val) + } + + // Invalid ranges are not set to a default, so can range check early. + if (start < 0 || this.length < start || this.length < end) { + throw new RangeError('Out of range index') + } + + if (end <= start) { + return this + } + + start = start >>> 0 + end = end === undefined ? this.length : end >>> 0 + + if (!val) val = 0 + + var i + if (typeof val === 'number') { + for (i = start; i < end; ++i) { + this[i] = val + } + } else { + var bytes = Buffer.isBuffer(val) + ? val + : Buffer.from(val, encoding) + var len = bytes.length + if (len === 0) { + throw new TypeError('The value "' + val + + '" is invalid for argument "value"') + } + for (i = 0; i < end - start; ++i) { + this[i + start] = bytes[i % len] + } + } + + return this +} + +// HELPER FUNCTIONS +// ================ + +var INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g + +function base64clean (str) { + // Node takes equal signs as end of the Base64 encoding + str = str.split('=')[0] + // Node strips out invalid characters like \n and \t from the string, base64-js does not + str = str.trim().replace(INVALID_BASE64_RE, '') + // Node converts strings with length < 2 to '' + if (str.length < 2) return '' + // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not + while (str.length % 4 !== 0) { + str = str + '=' + } + return str +} + +function utf8ToBytes (string, units) { + units = units || Infinity + var codePoint + var length = string.length + var leadSurrogate = null + var bytes = [] + + for (var i = 0; i < length; ++i) { + codePoint = string.charCodeAt(i) + + // is surrogate component + if (codePoint > 0xD7FF && codePoint < 0xE000) { + // last char was a lead + if (!leadSurrogate) { + // no lead yet + if (codePoint > 0xDBFF) { + // unexpected trail + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + continue + } else if (i + 1 === length) { + // unpaired lead + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + continue + } + + // valid lead + leadSurrogate = codePoint + + continue + } + + // 2 leads in a row + if (codePoint < 0xDC00) { + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + leadSurrogate = codePoint + continue + } + + // valid surrogate pair + codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000 + } else if (leadSurrogate) { + // valid bmp char, but last char was a lead + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + } + + leadSurrogate = null + + // encode utf8 + if (codePoint < 0x80) { + if ((units -= 1) < 0) break + bytes.push(codePoint) + } else if (codePoint < 0x800) { + if ((units -= 2) < 0) break + bytes.push( + codePoint >> 0x6 | 0xC0, + codePoint & 0x3F | 0x80 + ) + } else if (codePoint < 0x10000) { + if ((units -= 3) < 0) break + bytes.push( + codePoint >> 0xC | 0xE0, + codePoint >> 0x6 & 0x3F | 0x80, + codePoint & 0x3F | 0x80 + ) + } else if (codePoint < 0x110000) { + if ((units -= 4) < 0) break + bytes.push( + codePoint >> 0x12 | 0xF0, + codePoint >> 0xC & 0x3F | 0x80, + codePoint >> 0x6 & 0x3F | 0x80, + codePoint & 0x3F | 0x80 + ) + } else { + throw new Error('Invalid code point') + } + } + + return bytes +} + +function asciiToBytes (str) { + var byteArray = [] + for (var i = 0; i < str.length; ++i) { + // Node's code seems to be doing this and not & 0x7F.. + byteArray.push(str.charCodeAt(i) & 0xFF) + } + return byteArray +} + +function utf16leToBytes (str, units) { + var c, hi, lo + var byteArray = [] + for (var i = 0; i < str.length; ++i) { + if ((units -= 2) < 0) break + + c = str.charCodeAt(i) + hi = c >> 8 + lo = c % 256 + byteArray.push(lo) + byteArray.push(hi) + } + + return byteArray +} + +function base64ToBytes (str) { + return base64.toByteArray(base64clean(str)) +} + +function blitBuffer (src, dst, offset, length) { + for (var i = 0; i < length; ++i) { + if ((i + offset >= dst.length) || (i >= src.length)) break + dst[i + offset] = src[i] + } + return i +} + +// ArrayBuffer or Uint8Array objects from other contexts (i.e. iframes) do not pass +// the `instanceof` check but they should be treated as of that type. +// See: https://github.com/feross/buffer/issues/166 +function isInstance (obj, type) { + return obj instanceof type || + (obj != null && obj.constructor != null && obj.constructor.name != null && + obj.constructor.name === type.name) +} +function numberIsNaN (obj) { + // For IE11 support + return obj !== obj // eslint-disable-line no-self-compare +} + +// Create lookup table for `toString('hex')` +// See: https://github.com/feross/buffer/issues/219 +var hexSliceLookupTable = (function () { + var alphabet = '0123456789abcdef' + var table = new Array(256) + for (var i = 0; i < 16; ++i) { + var i16 = i * 16 + for (var j = 0; j < 16; ++j) { + table[i16 + j] = alphabet[i] + alphabet[j] + } + } + return table +})() diff --git a/sdklab/meantimerecovery/aedes/node_modules/buffer/package.json b/sdklab/meantimerecovery/aedes/node_modules/buffer/package.json new file mode 100644 index 000000000..918cfe42a --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/buffer/package.json @@ -0,0 +1,127 @@ +{ + "_from": "buffer@^5.5.0", + "_id": "buffer@5.7.1", + "_inBundle": false, + "_integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "_location": "/buffer", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "buffer@^5.5.0", + "name": "buffer", + "escapedName": "buffer", + "rawSpec": "^5.5.0", + "saveSpec": null, + "fetchSpec": "^5.5.0" + }, + "_requiredBy": [ + "/bl" + ], + "_resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "_shasum": "ba62e7c13133053582197160851a8f648e99eed0", + "_spec": "buffer@^5.5.0", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\bl", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/buffer/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Romain Beauxis", + "email": "toots@rastageeks.org" + }, + { + "name": "James Halliday", + "email": "mail@substack.net" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + }, + "deprecated": false, + "description": "Node.js Buffer API, for the browser", + "devDependencies": { + "airtap": "^3.0.0", + "benchmark": "^2.1.4", + "browserify": "^17.0.0", + "concat-stream": "^2.0.0", + "hyperquest": "^2.1.3", + "is-buffer": "^2.0.4", + "is-nan": "^1.3.0", + "split": "^1.0.1", + "standard": "*", + "tape": "^5.0.1", + "through2": "^4.0.2", + "uglify-js": "^3.11.3" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "homepage": "https://github.com/feross/buffer", + "jspm": { + "map": { + "./index.js": { + "node": "@node/buffer" + } + } + }, + "keywords": [ + "arraybuffer", + "browser", + "browserify", + "buffer", + "compatible", + "dataview", + "uint8array" + ], + "license": "MIT", + "main": "index.js", + "name": "buffer", + "repository": { + "type": "git", + "url": "git://github.com/feross/buffer.git" + }, + "scripts": { + "perf": "browserify --debug perf/bracket-notation.js > perf/bundle.js && open perf/index.html", + "perf-node": "node perf/bracket-notation.js && node perf/concat.js && node perf/copy-big.js && node perf/copy.js && node perf/new-big.js && node perf/new.js && node perf/readDoubleBE.js && node perf/readFloatBE.js && node perf/readUInt32LE.js && node perf/slice.js && node perf/writeFloatBE.js", + "size": "browserify -r ./ | uglifyjs -c -m | gzip | wc -c", + "test": "standard && node ./bin/test.js", + "test-browser-es5": "airtap -- test/*.js", + "test-browser-es5-local": "airtap --local -- test/*.js", + "test-browser-es6": "airtap -- test/*.js test/node/*.js", + "test-browser-es6-local": "airtap --local -- test/*.js test/node/*.js", + "test-node": "tape test/*.js test/node/*.js", + "update-authors": "./bin/update-authors.sh" + }, + "standard": { + "ignore": [ + "test/node/**/*.js", + "test/common.js", + "test/_polyfill.js", + "perf/**/*.js" + ], + "globals": [ + "SharedArrayBuffer" + ] + }, + "types": "index.d.ts", + "version": "5.7.1" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/.travis.yml b/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/.travis.yml new file mode 100644 index 000000000..e303031eb --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/.travis.yml @@ -0,0 +1,5 @@ +language: node_js +node_js: + - '6' + - '8' + - '10' diff --git a/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/LICENSE new file mode 100644 index 000000000..66a4d2a14 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/README.md b/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/README.md new file mode 100644 index 000000000..5f18d884b --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/README.md @@ -0,0 +1,43 @@ +# bulk-write-stream + +Writable stream that forwards everything in the `highWaterMark` buffer +to a custom `write` function using the new [writev](https://nodejs.org/api/stream.html#stream_writable_writev_chunks_callback) api in streams + +``` +npm install bulk-write-stream +``` + +[![build status](http://img.shields.io/travis/mafintosh/bulk-write-stream.svg?style=flat)](http://travis-ci.org/mafintosh/bulk-write-stream) + +## Usage + +``` js +var bulk = require('bulk-write-stream') + +var ws = bulk.obj(function (list, cb) { + console.log('should write list of objects', list) + cb() +}) + +ws.write('a') +ws.write('b') +ws.write('c') +ws.write('d') +``` + +## API + +#### `var ws = bulk([options], write, [flush])` + +Create a new binary bulk write stream. Options are forwarded to the writable stream constructor. +Write is called with `write(list, cb)` where list is everything currently buffered in the writable stream. + +If you specify a flush function that will be called with `flush(cb)` before the stream emits `finish`. + +#### `var ws = bulk.obj([options], write, [flush])` + +A shorthand for setting `objectMode: true` + +## License + +MIT diff --git a/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/index.js b/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/index.js new file mode 100644 index 000000000..3f0212b22 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/index.js @@ -0,0 +1,70 @@ +var stream = require('readable-stream') +var inherits = require('inherits') + +var SIGNAL_FLUSH = Buffer.from([0]) + +var Bulk = function (opts, worker, flush) { + if (!(this instanceof Bulk)) return new Bulk(opts, worker, flush) + + if (typeof opts === 'function') { + flush = worker + worker = opts + opts = {} + } + + stream.Writable.call(this, opts) + this._worker = worker + this._flush = flush + this.destroyed = false +} + +inherits(Bulk, stream.Writable) + +Bulk.obj = function (opts, worker, flush) { + if (typeof opts === 'function') return Bulk.obj(null, opts, worker) + if (!opts) opts = {} + opts.objectMode = true + return new Bulk(opts, worker, flush) +} + +Bulk.prototype.end = function (data, enc, cb) { + if (!this._flush) return stream.Writable.prototype.end.apply(this, arguments) + if (typeof data === 'function') return this.end(null, null, data) + if (typeof enc === 'function') return this.end(data, null, enc) + if (data) this.write(data) + if (!this._writableState.ending) this.write(SIGNAL_FLUSH) + return stream.Writable.prototype.end.call(this, cb) +} + +Bulk.prototype.destroy = function (err) { + if (this.destroyed) return + this.destroyed = true + if (err) this.emit('error', err) + this.emit('close') +} + +Bulk.prototype._write = function (data, enc, cb) { + if (data === SIGNAL_FLUSH) this._flush(cb) + else this._worker([data], cb) +} + +Bulk.prototype._writev = function (batch, cb) { + var len = batch.length + if (batch[batch.length - 1].chunk === SIGNAL_FLUSH) { + cb = this._flusher(cb) + if (!--len) return cb() + } + var arr = new Array(len) + for (var i = 0; i < len; i++) arr[i] = batch[i].chunk + this._worker(arr, cb) +} + +Bulk.prototype._flusher = function (cb) { + var self = this + return function (err) { + if (err) return cb(err) + self._flush(cb) + } +} + +module.exports = Bulk diff --git a/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/package.json b/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/package.json new file mode 100644 index 000000000..81cfe5afb --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/package.json @@ -0,0 +1,55 @@ +{ + "_from": "bulk-write-stream@^2.0.1", + "_id": "bulk-write-stream@2.0.1", + "_inBundle": false, + "_integrity": "sha512-XWOLjgHtpDasHfwM8oO4df1JoZwa7/OwTsXDzh4rUTo+9CowzeOFBZz43w+H14h1fyq+xl28tVIBrdjcjj4Gug==", + "_location": "/bulk-write-stream", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "bulk-write-stream@^2.0.1", + "name": "bulk-write-stream", + "escapedName": "bulk-write-stream", + "rawSpec": "^2.0.1", + "saveSpec": null, + "fetchSpec": "^2.0.1" + }, + "_requiredBy": [ + "/aedes" + ], + "_resolved": "https://registry.npmjs.org/bulk-write-stream/-/bulk-write-stream-2.0.1.tgz", + "_shasum": "085bdc65caf19ceece4ff365fdb951ef0c6e3db8", + "_spec": "bulk-write-stream@^2.0.1", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes", + "author": { + "name": "Mathias Buus", + "url": "@mafintosh" + }, + "bugs": { + "url": "https://github.com/mafintosh/bulk-write-stream/issues" + }, + "bundleDependencies": false, + "dependencies": { + "inherits": "^2.0.3", + "readable-stream": "^3.1.1" + }, + "deprecated": false, + "description": "Writable stream that forwards everything in the highWaterMark buffer to a custom write function using the new writev api in streams", + "devDependencies": { + "standard": "^11.0.1", + "tape": "^4.0.0" + }, + "homepage": "https://github.com/mafintosh/bulk-write-stream", + "license": "MIT", + "main": "index.js", + "name": "bulk-write-stream", + "repository": { + "type": "git", + "url": "git+https://github.com/mafintosh/bulk-write-stream.git" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "version": "2.0.1" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/test.js b/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/test.js new file mode 100644 index 000000000..7edacbd2a --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/bulk-write-stream/test.js @@ -0,0 +1,102 @@ +var tape = require('tape') +var bulk = require('./') + +tape('input matches', function (t) { + var expected = ['a', 'b', 'c', 'd'] + var clone = expected.slice(0) + + var ws = bulk.obj(function (list, cb) { + while (list.length) t.same(list.shift(), expected.shift()) + process.nextTick(cb) + }) + + for (var i = 0; i < clone.length; i++) ws.write(clone[i]) + + ws.end(function () { + t.end() + }) +}) + +tape('bulk list', function (t) { + var expected = [['a'], ['b', 'c', 'd']] + + var ws = bulk.obj(function (list, cb) { + t.same(list, expected.shift()) + process.nextTick(cb) + }) + + ws.write('a') + ws.write('b') + ws.write('c') + ws.write('d') + + ws.end(function () { + t.end() + }) +}) + +tape('flush one', function (t) { + var expected = [[Buffer.from('a')]] + var flushed = false + + var ws = bulk(function (list, cb) { + t.same(list, expected.shift()) + process.nextTick(cb) + }, function (cb) { + flushed = true + cb() + }) + + ws.write('a') + + ws.end(function () { + t.ok(flushed) + t.end() + }) +}) + +tape('flush', function (t) { + var expected = [['a'], ['b', 'c', 'd']] + var flushed = false + + var ws = bulk.obj(function (list, cb) { + t.same(list, expected.shift()) + process.nextTick(cb) + }, function (cb) { + flushed = true + cb() + }) + + ws.write('a') + ws.write('b') + ws.write('c') + ws.write('d') + + ws.end(function () { + t.ok(flushed) + t.end() + }) +}) + +tape('flush binary', function (t) { + var expected = [[Buffer.from('a')], [Buffer.from('b'), Buffer.from('c'), Buffer.from('d')]] + var flushed = false + + var ws = bulk.obj(function (list, cb) { + t.same(list, expected.shift()) + process.nextTick(cb) + }, function (cb) { + flushed = true + cb() + }) + + ws.write(Buffer.from('a')) + ws.write(Buffer.from('b')) + ws.write(Buffer.from('c')) + ws.write(Buffer.from('d')) + + ws.end(function () { + t.ok(flushed) + t.end() + }) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/core-util-is/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/core-util-is/LICENSE new file mode 100644 index 000000000..d8d7f9437 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/core-util-is/LICENSE @@ -0,0 +1,19 @@ +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/core-util-is/README.md b/sdklab/meantimerecovery/aedes/node_modules/core-util-is/README.md new file mode 100644 index 000000000..5a76b4149 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/core-util-is/README.md @@ -0,0 +1,3 @@ +# core-util-is + +The `util.is*` functions introduced in Node v0.12. diff --git a/sdklab/meantimerecovery/aedes/node_modules/core-util-is/package.json b/sdklab/meantimerecovery/aedes/node_modules/core-util-is/package.json new file mode 100644 index 000000000..fcbe1376f --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/core-util-is/package.json @@ -0,0 +1,68 @@ +{ + "_from": "core-util-is@~1.0.0", + "_id": "core-util-is@1.0.3", + "_inBundle": false, + "_integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "_location": "/core-util-is", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "core-util-is@~1.0.0", + "name": "core-util-is", + "escapedName": "core-util-is", + "rawSpec": "~1.0.0", + "saveSpec": null, + "fetchSpec": "~1.0.0" + }, + "_requiredBy": [ + "/from2/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "_shasum": "a6042d3634c2b27e9328f837b965fac83808db85", + "_spec": "core-util-is@~1.0.0", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\from2\\node_modules\\readable-stream", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/isaacs/core-util-is/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "The `util.is*` functions introduced in Node v0.12.", + "devDependencies": { + "tap": "^15.0.9" + }, + "files": [ + "lib" + ], + "homepage": "https://github.com/isaacs/core-util-is#readme", + "keywords": [ + "util", + "isBuffer", + "isArray", + "isNumber", + "isString", + "isRegExp", + "isThis", + "isThat", + "polyfill" + ], + "license": "MIT", + "main": "lib/util.js", + "name": "core-util-is", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/core-util-is.git" + }, + "scripts": { + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preversion": "npm test", + "test": "tap test.js" + }, + "version": "1.0.3" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/debug/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/debug/LICENSE new file mode 100644 index 000000000..1a9820e26 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/debug/LICENSE @@ -0,0 +1,20 @@ +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the 'Software'), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial +portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/sdklab/meantimerecovery/aedes/node_modules/debug/README.md b/sdklab/meantimerecovery/aedes/node_modules/debug/README.md new file mode 100644 index 000000000..e9c3e047c --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/debug/README.md @@ -0,0 +1,481 @@ +# debug +[![Build Status](https://travis-ci.org/debug-js/debug.svg?branch=master)](https://travis-ci.org/debug-js/debug) [![Coverage Status](https://coveralls.io/repos/github/debug-js/debug/badge.svg?branch=master)](https://coveralls.io/github/debug-js/debug?branch=master) [![Slack](https://visionmedia-community-slackin.now.sh/badge.svg)](https://visionmedia-community-slackin.now.sh/) [![OpenCollective](https://opencollective.com/debug/backers/badge.svg)](#backers) +[![OpenCollective](https://opencollective.com/debug/sponsors/badge.svg)](#sponsors) + + + +A tiny JavaScript debugging utility modelled after Node.js core's debugging +technique. Works in Node.js and web browsers. + +## Installation + +```bash +$ npm install debug +``` + +## Usage + +`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole. + +Example [_app.js_](./examples/node/app.js): + +```js +var debug = require('debug')('http') + , http = require('http') + , name = 'My App'; + +// fake app + +debug('booting %o', name); + +http.createServer(function(req, res){ + debug(req.method + ' ' + req.url); + res.end('hello\n'); +}).listen(3000, function(){ + debug('listening'); +}); + +// fake worker of some kind + +require('./worker'); +``` + +Example [_worker.js_](./examples/node/worker.js): + +```js +var a = require('debug')('worker:a') + , b = require('debug')('worker:b'); + +function work() { + a('doing lots of uninteresting work'); + setTimeout(work, Math.random() * 1000); +} + +work(); + +function workb() { + b('doing some work'); + setTimeout(workb, Math.random() * 2000); +} + +workb(); +``` + +The `DEBUG` environment variable is then used to enable these based on space or +comma-delimited names. + +Here are some examples: + +screen shot 2017-08-08 at 12 53 04 pm +screen shot 2017-08-08 at 12 53 38 pm +screen shot 2017-08-08 at 12 53 25 pm + +#### Windows command prompt notes + +##### CMD + +On Windows the environment variable is set using the `set` command. + +```cmd +set DEBUG=*,-not_this +``` + +Example: + +```cmd +set DEBUG=* & node app.js +``` + +##### PowerShell (VS Code default) + +PowerShell uses different syntax to set environment variables. + +```cmd +$env:DEBUG = "*,-not_this" +``` + +Example: + +```cmd +$env:DEBUG='app';node app.js +``` + +Then, run the program to be debugged as usual. + +npm script example: +```js + "windowsDebug": "@powershell -Command $env:DEBUG='*';node app.js", +``` + +## Namespace Colors + +Every debug instance has a color generated for it based on its namespace name. +This helps when visually parsing the debug output to identify which debug instance +a debug line belongs to. + +#### Node.js + +In Node.js, colors are enabled when stderr is a TTY. You also _should_ install +the [`supports-color`](https://npmjs.org/supports-color) module alongside debug, +otherwise debug will only use a small handful of basic colors. + + + +#### Web Browser + +Colors are also enabled on "Web Inspectors" that understand the `%c` formatting +option. These are WebKit web inspectors, Firefox ([since version +31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/)) +and the Firebug plugin for Firefox (any version). + + + + +## Millisecond diff + +When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls. + + + +When stdout is not a TTY, `Date#toISOString()` is used, making it more useful for logging the debug information as shown below: + + + + +## Conventions + +If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". If you append a "*" to the end of your name, it will always be enabled regardless of the setting of the DEBUG environment variable. You can then use it for normal output as well as debug output. + +## Wildcards + +The `*` character may be used as a wildcard. Suppose for example your library has +debuggers named "connect:bodyParser", "connect:compress", "connect:session", +instead of listing all three with +`DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do +`DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`. + +You can also exclude specific debuggers by prefixing them with a "-" character. +For example, `DEBUG=*,-connect:*` would include all debuggers except those +starting with "connect:". + +## Environment Variables + +When running through Node.js, you can set a few environment variables that will +change the behavior of the debug logging: + +| Name | Purpose | +|-----------|-------------------------------------------------| +| `DEBUG` | Enables/disables specific debugging namespaces. | +| `DEBUG_HIDE_DATE` | Hide date from debug output (non-TTY). | +| `DEBUG_COLORS`| Whether or not to use colors in the debug output. | +| `DEBUG_DEPTH` | Object inspection depth. | +| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. | + + +__Note:__ The environment variables beginning with `DEBUG_` end up being +converted into an Options object that gets used with `%o`/`%O` formatters. +See the Node.js documentation for +[`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options) +for the complete list. + +## Formatters + +Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting. +Below are the officially supported formatters: + +| Formatter | Representation | +|-----------|----------------| +| `%O` | Pretty-print an Object on multiple lines. | +| `%o` | Pretty-print an Object all on a single line. | +| `%s` | String. | +| `%d` | Number (both integer and float). | +| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. | +| `%%` | Single percent sign ('%'). This does not consume an argument. | + + +### Custom formatters + +You can add custom formatters by extending the `debug.formatters` object. +For example, if you wanted to add support for rendering a Buffer as hex with +`%h`, you could do something like: + +```js +const createDebug = require('debug') +createDebug.formatters.h = (v) => { + return v.toString('hex') +} + +// …elsewhere +const debug = createDebug('foo') +debug('this is hex: %h', new Buffer('hello world')) +// foo this is hex: 68656c6c6f20776f726c6421 +0ms +``` + + +## Browser Support + +You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify), +or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest), +if you don't want to build it yourself. + +Debug's enable state is currently persisted by `localStorage`. +Consider the situation shown below where you have `worker:a` and `worker:b`, +and wish to debug both. You can enable this using `localStorage.debug`: + +```js +localStorage.debug = 'worker:*' +``` + +And then refresh the page. + +```js +a = debug('worker:a'); +b = debug('worker:b'); + +setInterval(function(){ + a('doing some work'); +}, 1000); + +setInterval(function(){ + b('doing some work'); +}, 1200); +``` + +In Chromium-based web browsers (e.g. Brave, Chrome, and Electron), the JavaScript console will—by default—only show messages logged by `debug` if the "Verbose" log level is _enabled_. + + + +## Output streams + + By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method: + +Example [_stdout.js_](./examples/node/stdout.js): + +```js +var debug = require('debug'); +var error = debug('app:error'); + +// by default stderr is used +error('goes to stderr!'); + +var log = debug('app:log'); +// set this namespace to log via console.log +log.log = console.log.bind(console); // don't forget to bind to console! +log('goes to stdout'); +error('still goes to stderr!'); + +// set all output to go via console.info +// overrides all per-namespace log settings +debug.log = console.info.bind(console); +error('now goes to stdout via console.info'); +log('still goes to stdout, but via console.info now'); +``` + +## Extend +You can simply extend debugger +```js +const log = require('debug')('auth'); + +//creates new debug instance with extended namespace +const logSign = log.extend('sign'); +const logLogin = log.extend('login'); + +log('hello'); // auth hello +logSign('hello'); //auth:sign hello +logLogin('hello'); //auth:login hello +``` + +## Set dynamically + +You can also enable debug dynamically by calling the `enable()` method : + +```js +let debug = require('debug'); + +console.log(1, debug.enabled('test')); + +debug.enable('test'); +console.log(2, debug.enabled('test')); + +debug.disable(); +console.log(3, debug.enabled('test')); + +``` + +print : +``` +1 false +2 true +3 false +``` + +Usage : +`enable(namespaces)` +`namespaces` can include modes separated by a colon and wildcards. + +Note that calling `enable()` completely overrides previously set DEBUG variable : + +``` +$ DEBUG=foo node -e 'var dbg = require("debug"); dbg.enable("bar"); console.log(dbg.enabled("foo"))' +=> false +``` + +`disable()` + +Will disable all namespaces. The functions returns the namespaces currently +enabled (and skipped). This can be useful if you want to disable debugging +temporarily without knowing what was enabled to begin with. + +For example: + +```js +let debug = require('debug'); +debug.enable('foo:*,-foo:bar'); +let namespaces = debug.disable(); +debug.enable(namespaces); +``` + +Note: There is no guarantee that the string will be identical to the initial +enable string, but semantically they will be identical. + +## Checking whether a debug target is enabled + +After you've created a debug instance, you can determine whether or not it is +enabled by checking the `enabled` property: + +```javascript +const debug = require('debug')('http'); + +if (debug.enabled) { + // do stuff... +} +``` + +You can also manually toggle this property to force the debug instance to be +enabled or disabled. + +## Usage in child processes + +Due to the way `debug` detects if the output is a TTY or not, colors are not shown in child processes when `stderr` is piped. A solution is to pass the `DEBUG_COLORS=1` environment variable to the child process. +For example: + +```javascript +worker = fork(WORKER_WRAP_PATH, [workerPath], { + stdio: [ + /* stdin: */ 0, + /* stdout: */ 'pipe', + /* stderr: */ 'pipe', + 'ipc', + ], + env: Object.assign({}, process.env, { + DEBUG_COLORS: 1 // without this settings, colors won't be shown + }), +}); + +worker.stderr.pipe(process.stderr, { end: false }); +``` + + +## Authors + + - TJ Holowaychuk + - Nathan Rajlich + - Andrew Rhyne + - Josh Junon + +## Backers + +Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Sponsors + +Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## License + +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk <tj@vision-media.ca> +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/debug/package.json b/sdklab/meantimerecovery/aedes/node_modules/debug/package.json new file mode 100644 index 000000000..5c76be470 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/debug/package.json @@ -0,0 +1,101 @@ +{ + "_from": "debug@^4.1.1", + "_id": "debug@4.3.4", + "_inBundle": false, + "_integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "_location": "/debug", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "debug@^4.1.1", + "name": "debug", + "escapedName": "debug", + "rawSpec": "^4.1.1", + "saveSpec": null, + "fetchSpec": "^4.1.1" + }, + "_requiredBy": [ + "/mqtt-packet" + ], + "_resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "_shasum": "1319f6579357f2338d3337d2cdd4914bb5dcc865", + "_spec": "debug@^4.1.1", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\mqtt-packet", + "author": { + "name": "Josh Junon", + "email": "josh.junon@protonmail.com" + }, + "browser": "./src/browser.js", + "bugs": { + "url": "https://github.com/debug-js/debug/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca" + }, + { + "name": "Nathan Rajlich", + "email": "nathan@tootallnate.net", + "url": "http://n8.io" + }, + { + "name": "Andrew Rhyne", + "email": "rhyneandrew@gmail.com" + } + ], + "dependencies": { + "ms": "2.1.2" + }, + "deprecated": false, + "description": "Lightweight debugging utility for Node.js and the browser", + "devDependencies": { + "brfs": "^2.0.1", + "browserify": "^16.2.3", + "coveralls": "^3.0.2", + "istanbul": "^0.4.5", + "karma": "^3.1.4", + "karma-browserify": "^6.0.0", + "karma-chrome-launcher": "^2.2.0", + "karma-mocha": "^1.3.0", + "mocha": "^5.2.0", + "mocha-lcov-reporter": "^1.2.0", + "xo": "^0.23.0" + }, + "engines": { + "node": ">=6.0" + }, + "files": [ + "src", + "LICENSE", + "README.md" + ], + "homepage": "https://github.com/debug-js/debug#readme", + "keywords": [ + "debug", + "log", + "debugger" + ], + "license": "MIT", + "main": "./src/index.js", + "name": "debug", + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + }, + "repository": { + "type": "git", + "url": "git://github.com/debug-js/debug.git" + }, + "scripts": { + "lint": "xo", + "test": "npm run test:node && npm run test:browser && npm run lint", + "test:browser": "karma start --single-run", + "test:coverage": "cat ./coverage/lcov.info | coveralls", + "test:node": "istanbul cover _mocha -- test.js" + }, + "version": "4.3.4" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/debug/src/browser.js b/sdklab/meantimerecovery/aedes/node_modules/debug/src/browser.js new file mode 100644 index 000000000..cd0fc35d1 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/debug/src/browser.js @@ -0,0 +1,269 @@ +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; diff --git a/sdklab/meantimerecovery/aedes/node_modules/debug/src/common.js b/sdklab/meantimerecovery/aedes/node_modules/debug/src/common.js new file mode 100644 index 000000000..e3291b20f --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/debug/src/common.js @@ -0,0 +1,274 @@ + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require('ms'); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; diff --git a/sdklab/meantimerecovery/aedes/node_modules/debug/src/index.js b/sdklab/meantimerecovery/aedes/node_modules/debug/src/index.js new file mode 100644 index 000000000..bf4c57f25 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/debug/src/index.js @@ -0,0 +1,10 @@ +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ + +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = require('./browser.js'); +} else { + module.exports = require('./node.js'); +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/debug/src/node.js b/sdklab/meantimerecovery/aedes/node_modules/debug/src/node.js new file mode 100644 index 000000000..79bc085cb --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/debug/src/node.js @@ -0,0 +1,263 @@ +/** + * Module dependencies. + */ + +const tty = require('tty'); +const util = require('util'); + +/** + * This is the Node.js implementation of `debug()`. + */ + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = require('supports-color'); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} + +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ + +function log(...args) { + return process.stderr.write(util.format(...args) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; + +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; diff --git a/sdklab/meantimerecovery/aedes/node_modules/end-of-stream/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/end-of-stream/LICENSE new file mode 100644 index 000000000..757562ec5 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/end-of-stream/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/sdklab/meantimerecovery/aedes/node_modules/end-of-stream/README.md b/sdklab/meantimerecovery/aedes/node_modules/end-of-stream/README.md new file mode 100644 index 000000000..857b14bd7 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/end-of-stream/README.md @@ -0,0 +1,54 @@ +# end-of-stream + +A node module that calls a callback when a readable/writable/duplex stream has completed or failed. + + npm install end-of-stream + +[![Build status](https://travis-ci.org/mafintosh/end-of-stream.svg?branch=master)](https://travis-ci.org/mafintosh/end-of-stream) + +## Usage + +Simply pass a stream and a callback to the `eos`. +Both legacy streams, streams2 and stream3 are supported. + +``` js +var eos = require('end-of-stream'); + +eos(readableStream, function(err) { + // this will be set to the stream instance + if (err) return console.log('stream had an error or closed early'); + console.log('stream has ended', this === readableStream); +}); + +eos(writableStream, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has finished', this === writableStream); +}); + +eos(duplexStream, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has ended and finished', this === duplexStream); +}); + +eos(duplexStream, {readable:false}, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has finished but might still be readable'); +}); + +eos(duplexStream, {writable:false}, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has ended but might still be writable'); +}); + +eos(readableStream, {error:false}, function(err) { + // do not treat emit('error', err) as a end-of-stream +}); +``` + +## License + +MIT + +## Related + +`end-of-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. diff --git a/sdklab/meantimerecovery/aedes/node_modules/end-of-stream/index.js b/sdklab/meantimerecovery/aedes/node_modules/end-of-stream/index.js new file mode 100644 index 000000000..c77f0d5d7 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/end-of-stream/index.js @@ -0,0 +1,94 @@ +var once = require('once'); + +var noop = function() {}; + +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; + +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; + +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + + callback = once(callback || noop); + + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; + + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; + + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; + + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; + + var onerror = function(err) { + callback.call(stream, err); + }; + + var onclose = function() { + process.nextTick(onclosenexttick); + }; + + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; + + var onrequest = function() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + if (isChildProcess(stream)) stream.on('exit', onexit); + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; + +module.exports = eos; diff --git a/sdklab/meantimerecovery/aedes/node_modules/end-of-stream/package.json b/sdklab/meantimerecovery/aedes/node_modules/end-of-stream/package.json new file mode 100644 index 000000000..e560331f6 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/end-of-stream/package.json @@ -0,0 +1,65 @@ +{ + "_from": "end-of-stream@^1.4.4", + "_id": "end-of-stream@1.4.4", + "_inBundle": false, + "_integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "_location": "/end-of-stream", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "end-of-stream@^1.4.4", + "name": "end-of-stream", + "escapedName": "end-of-stream", + "rawSpec": "^1.4.4", + "saveSpec": null, + "fetchSpec": "^1.4.4" + }, + "_requiredBy": [ + "/aedes" + ], + "_resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "_shasum": "5ae64a5f45057baf3626ec14da0ca5e4b2431eb0", + "_spec": "end-of-stream@^1.4.4", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes", + "author": { + "name": "Mathias Buus", + "email": "mathiasbuus@gmail.com" + }, + "bugs": { + "url": "https://github.com/mafintosh/end-of-stream/issues" + }, + "bundleDependencies": false, + "dependencies": { + "once": "^1.4.0" + }, + "deprecated": false, + "description": "Call a callback when a readable/writable/duplex stream has completed or failed.", + "devDependencies": { + "tape": "^4.11.0" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/mafintosh/end-of-stream", + "keywords": [ + "stream", + "streams", + "callback", + "finish", + "close", + "end", + "wait" + ], + "license": "MIT", + "main": "index.js", + "name": "end-of-stream", + "repository": { + "type": "git", + "url": "git://github.com/mafintosh/end-of-stream.git" + }, + "scripts": { + "test": "node test.js" + }, + "version": "1.4.4" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastfall/.npmignore b/sdklab/meantimerecovery/aedes/node_modules/fastfall/.npmignore new file mode 100644 index 000000000..123ae94d0 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastfall/.npmignore @@ -0,0 +1,27 @@ +# Logs +logs +*.log + +# Runtime data +pids +*.pid +*.seed + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (http://nodejs.org/api/addons.html) +build/Release + +# Dependency directory +# https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git +node_modules diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastfall/.travis.yml b/sdklab/meantimerecovery/aedes/node_modules/fastfall/.travis.yml new file mode 100644 index 000000000..b3cae876a --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastfall/.travis.yml @@ -0,0 +1,16 @@ +sudo: false +language: node_js + +node_js: + - "0.10" + - "0.12" + - "iojs" + - "4" + - "5" + +cache: + directories: + - node_modules + +after_script: + npm run coveralls \ No newline at end of file diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastfall/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/fastfall/LICENSE new file mode 100644 index 000000000..fbf3a01d8 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastfall/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Matteo Collina + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastfall/README.md b/sdklab/meantimerecovery/aedes/node_modules/fastfall/README.md new file mode 100644 index 000000000..d88109edc --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastfall/README.md @@ -0,0 +1,232 @@ +# fastfall + +[![npm version][npm-badge]][npm-url] +[![Build Status][travis-badge]][travis-url] +[![Coverage Status][coveralls-badge]][coveralls-url] +[![Dependency Status][david-badge]][david-url] + +## call your callbacks in a waterfall, without overhead + +Benchmark for doing 3 calls `setImmediate` 100 thousands times: + +* non-reusable setImmediate: 407ms +* [async.waterfall](https://github.com/caolan/async#waterfall): 1203ms +* [run-waterfall](http://npm.im/run-waterfall): 1432ms +* [insync.wasterfall](https://www.npmjs.com/package/insync#waterfall): + 1570ms +* [neo-async.wasterfall](http://suguru03.github.io/neo-async/doc/async.waterfall.html): + 445ms +* [waterfallize](http://npm.im/waterfallize): 757ms +* `fastfall`: 432ms +* `fastfall` compiled: 428ms + + +These benchmarks where taken via `bench.js` on node 4.2.2, on a MacBook +Pro Retina 2014 (i7, 16GB of RAM). + +If you need zero-overhead series function call, check out +[fastseries](http://npm.im/fastseries), for parallel calls check out +[fastparallel](http://npm.im/fastparallel), and for a fast work queue +use [fastq](http://npm.im/fastq). + +[![js-standard-style](https://raw.githubusercontent.com/feross/standard/master/badge.png)](https://github.com/feross/standard) + +## Install + +``` +npm install fastfall --save +``` + +## Usage + +```js +var fall = require('fastfall')() + +fall([ + function a (cb) { + console.log('called a') + cb(null, 'a') + }, + function b (a, cb) { + console.log('called b with:', a) + cb(null, 'a', 'b') + }, + function c (a, b, cb) { + console.log('called c with:', a, b) + cb(null, 'a', 'b', 'c') + }], function result (err, a, b, c) { + console.log('result arguments', arguments) + }) +``` + +You can also set `this` when you create a fall: + +```js +var that = { hello: 'world' } +var fall = require('fastfall')(that) + +fall([a, b, c], result) + +function a (cb) { + console.log(this) + console.log('called a') + cb(null, 'a') +} + +function b (a, cb) { + console.log('called b with:', a) + cb(null, 'a', 'b') +} + +function c (a, b, cb) { + console.log('called c with:', a, b) + cb(null, 'a', 'b', 'c') +} + +function result (err, a, b, c) { + console.log('result arguments', arguments) +} +``` + +You can also set `this` when you run a task: + +```js +var that = { hello: 'world' } +var fall = require('fastfall')() + +fall(new State('world'), [ + a, b, c, +], console.log) + +function State (value) { + this.value = value +} + +function a (cb) { + console.log(this.value) + console.log('called a') + cb(null, 'a') +} + +function b (a, cb) { + console.log('called b with:', a) + cb(null, 'a', 'b') +} + +function c (a, b, cb) { + console.log('called c with:', a, b) + cb(null, 'a', 'b', 'c') +} +``` + +### Compile a waterfall + +```js +var fall = require('fastfall')([ + function a (arg, cb) { + console.log('called a') + cb(null, arg) + }, + function b (a, cb) { + console.log('called b with:', a) + cb(null, 'a', 'b') + }, + function c (a, b, cb) { + console.log('called c with:', a, b) + cb(null, 'a', 'b', 'c') + }]) + +// a compiled fall supports arguments too! +fall(42, function result (err, a, b, c) { + console.log('result arguments', arguments) +}) +``` + +You can set `this` by doing: + +```js +var that = { hello: 'world' } +var fall = require('fastfall')(that, [ + function a (arg, cb) { + console.log('this is', this) + console.log('called a') + cb(null, arg) + }, + function b (a, cb) { + console.log('called b with:', a) + cb(null, 'a', 'b') + }, + function c (a, b, cb) { + console.log('called c with:', a, b) + cb(null, 'a', 'b', 'c') + }]) + +// a compiled fall supports arguments too! +fall(42, function result (err, a, b, c) { + console.log('result arguments', arguments) +}) +``` + +or you can simply attach it to an object: + +```js +var that = { hello: 'world' } +that.doSomething = require('fastfall')([ + function a (arg, cb) { + console.log('this is', this) + console.log('called a') + cb(null, arg) + }, + function b (a, cb) { + console.log('called b with:', a) + cb(null, 'a', 'b') + }, + function c (a, b, cb) { + console.log('called c with:', a, b) + cb(null, 'a', 'b', 'c') + }]) + +// a compiled fall supports arguments too! +that.doSomething(42, function result (err, a, b, c) { + console.log('this is', this) + console.log('result arguments', arguments) +}) +``` + +## API + +### fastfall([this], [functions]) + +Creates a `fall`, it can either be pre-filled with a `this` value +and an array of functions. + +If there is no list of functions, [a not-compiled fall](#not-compiled) +is returned, if there is a list of function [a compiled fall](#compiled) +is returned. + + +### fall([this], functions, [done]) + +Calls the functions in a waterfall, forwarding the arguments from one to +another. Calls `done` when it has finished. + + +### fall(args..., [done]) + +Calls the compiled functions in a waterfall, forwarding the arguments from one to +another. Additionally, a user can specify some arguments for the first +function, too. Calls `done` when it has finished. + +## License + +MIT + + +[npm-badge]: https://badge.fury.io/js/fastfall.svg +[npm-url]: https://badge.fury.io/js/fastfall +[travis-badge]: https://api.travis-ci.org/mcollina/fastfall.svg +[travis-url]: https://travis-ci.org/mcollina/fastfall +[coveralls-badge]:https://coveralls.io/repos/mcollina/fastfall/badge.svg?branch=master&service=github +[coveralls-url]: https://coveralls.io/github/mcollina/fastfall?branch=master +[david-badge]: https://david-dm.org/mcollina/fastfall.svg +[david-url]: https://david-dm.org/mcollina/fastfall diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastfall/bench.js b/sdklab/meantimerecovery/aedes/node_modules/fastfall/bench.js new file mode 100644 index 000000000..2d2d50867 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastfall/bench.js @@ -0,0 +1,81 @@ +'use strict' + +var max = 100000 +var async = require('async') +var insync = require('insync') +var neoAsync = require('neo-async') +var fall = require('./')() +var runWaterfall = require('run-waterfall') +var waterfallize = require('waterfallize') +var bench = require('fastbench') + +var nextDone +var nextCount + +function benchSetImmediate (done) { + nextCount = 3 + nextDone = done + setImmediate(somethingImmediate) +} + +function somethingImmediate () { + nextCount-- + if (nextCount === 0) { + nextDone() + } else { + setImmediate(somethingImmediate) + } +} + +function somethingB (cb) { + setImmediate(cb) +} + +function somethingA (cb) { + setImmediate(cb) +} + +var toCall = [somethingA, somethingB, somethingB] +function benchAsyncWaterfall (done) { + async.waterfall(toCall, done) +} + +function benchFastFall (done) { + fall(toCall, done) +} + +function benchWaterfallize (done) { + var next = waterfallize() + + next(toCall[0]) + next(toCall[1]) + next(toCall[2]) + next(done) +} + +function benchRunWaterFall (done) { + runWaterfall(toCall, done) +} + +function benchInsync (done) { + insync.waterfall(toCall, done) +} + +function benchNeoAsync (done) { + neoAsync.waterfall(toCall, done) +} + +var compiled = require('./')(toCall) + +var run = bench([ + benchAsyncWaterfall, + benchInsync, + benchNeoAsync, + benchRunWaterFall, + benchSetImmediate, + benchWaterfallize, + benchFastFall, + compiled +], max) + +run(run) diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastfall/example.js b/sdklab/meantimerecovery/aedes/node_modules/fastfall/example.js new file mode 100644 index 000000000..7038c390d --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastfall/example.js @@ -0,0 +1,20 @@ +'use strict' + +var fall = require('./')() + +fall([ + function a (cb) { + console.log('called a') + cb(null, 'a') + }, + function b (a, cb) { + console.log('called b with:', a) + cb(null, 'a', 'b') + }, + function c (a, b, cb) { + console.log('called c with:', a, b) + cb(null, 'a', 'b', 'c') + } +], function result (err, a, b, c) { + console.log('result arguments', err, a, b, c) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastfall/fall.js b/sdklab/meantimerecovery/aedes/node_modules/fastfall/fall.js new file mode 100644 index 000000000..6744e3450 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastfall/fall.js @@ -0,0 +1,150 @@ +'use strict' + +var reusify = require('reusify') +var empty = [] + +function fastfall (context, template) { + if (Array.isArray(context)) { + template = context + context = null + } + + var queue = reusify(Holder) + + return template ? compiled : fall + + function fall () { + var current = queue.get() + current.release = release + + if (arguments.length === 3) { + current.context = arguments[0] + current.list = arguments[1] + current.callback = arguments[2] || noop + } else { + current.context = context + current.list = arguments[0] + current.callback = arguments[1] || noop + } + + current.work() + } + + function release (holder) { + queue.release(holder) + } + + function compiled () { + var current = queue.get() + current.release = release + + current.list = template + + var args + var i + var len = arguments.length - 1 + + current.context = this || context + current.callback = arguments[len] || noop + + switch (len) { + case 0: + current.work() + break + case 1: + current.work(null, arguments[0]) + break + case 2: + current.work(null, arguments[0], arguments[1]) + break + case 3: + current.work(null, arguments[0], arguments[1], arguments[2]) + break + case 4: + current.work(null, arguments[0], arguments[1], arguments[2], arguments[3]) + break + default: + args = new Array(len + 1) + args[0] = null + for (i = 0; i < len; i++) { + args[i + 1] = arguments[i] + } + current.work.apply(null, args) + } + } +} + +function noop () {} + +function Holder () { + this.list = empty + this.callback = noop + this.count = 0 + this.context = undefined + this.release = noop + + var that = this + + this.work = function work () { + if (arguments.length > 0 && arguments[0]) { + return that.callback.call(that.context, arguments[0]) + } + + var len = arguments.length + var i + var args + var func + + if (that.count < that.list.length) { + func = that.list[that.count++] + switch (len) { + case 0: + case 1: + return func.call(that.context, work) + case 2: + return func.call(that.context, arguments[1], work) + case 3: + return func.call(that.context, arguments[1], arguments[2], work) + case 4: + return func.call(that.context, arguments[1], arguments[2], arguments[3], work) + default: + args = new Array(len) + for (i = 1; i < len; i++) { + args[i - 1] = arguments[i] + } + args[len - 1] = work + func.apply(that.context, args) + } + } else { + switch (len) { + case 0: + that.callback.call(that.context) + break + case 1: + that.callback.call(that.context, arguments[0]) + break + case 2: + that.callback.call(that.context, arguments[0], arguments[1]) + break + case 3: + that.callback.call(that.context, arguments[0], arguments[1], arguments[2]) + break + case 4: + that.callback.call(that.context, arguments[0], arguments[1], arguments[2], arguments[3]) + break + default: + args = new Array(len) + for (i = 0; i < len; i++) { + args[i] = arguments[i] + } + that.callback.apply(that.context, args) + } + that.context = undefined + that.list = empty + that.count = 0 + that.release(that) + } + } +} + +module.exports = fastfall diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastfall/package.json b/sdklab/meantimerecovery/aedes/node_modules/fastfall/package.json new file mode 100644 index 000000000..f3933ed15 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastfall/package.json @@ -0,0 +1,82 @@ +{ + "_from": "fastfall@^1.5.1", + "_id": "fastfall@1.5.1", + "_inBundle": false, + "_integrity": "sha512-KH6p+Z8AKPXnmA7+Iz2Lh8ARCMr+8WNPVludm1LGkZoD2MjY6LVnRMtTKhkdzI+jr0RzQWXKzKyBJm1zoHEL4Q==", + "_location": "/fastfall", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "fastfall@^1.5.1", + "name": "fastfall", + "escapedName": "fastfall", + "rawSpec": "^1.5.1", + "saveSpec": null, + "fetchSpec": "^1.5.1" + }, + "_requiredBy": [ + "/aedes" + ], + "_resolved": "https://registry.npmjs.org/fastfall/-/fastfall-1.5.1.tgz", + "_shasum": "3fee03331a49d1d39b3cdf7a5e9cd66f475e7b94", + "_spec": "fastfall@^1.5.1", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes", + "author": { + "name": "Matteo Collina", + "email": "hello@matteocollina.com" + }, + "bugs": { + "url": "https://github.com/mcollina/fastfall/issues" + }, + "bundleDependencies": false, + "dependencies": { + "reusify": "^1.0.0" + }, + "deprecated": false, + "description": "call your callbacks in a waterfall, at speed", + "devDependencies": { + "async": "^1.0.0", + "coveralls": "^2.11.6", + "fastbench": "^1.0.0", + "faucet": "0.0.1", + "insync": "^2.1.1", + "istanbul": "^0.4.1", + "neo-async": "^1.7.0", + "pre-commit": "^1.0.10", + "run-waterfall": "^1.1.1", + "standard": "^5.0.0", + "tap-spec": "^4.1.1", + "tape": "^4.0.0", + "waterfallize": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "homepage": "https://github.com/mcollina/fastfall#readme", + "keywords": [ + "async", + "waterfall", + "fall", + "fast", + "callback" + ], + "license": "MIT", + "main": "fall.js", + "name": "fastfall", + "precommit": [ + "lint", + "test" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/mcollina/fastfall.git" + }, + "scripts": { + "coverage": "istanbul cover tape test.js | tap-spec", + "coveralls": "npm run coverage ; cat ./coverage/lcov.info | coveralls", + "lint": "standard", + "test": "tape test.js | faucet" + }, + "version": "1.5.1" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastfall/test.js b/sdklab/meantimerecovery/aedes/node_modules/fastfall/test.js new file mode 100644 index 000000000..f15b553b2 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastfall/test.js @@ -0,0 +1,193 @@ +'use strict' + +var test = require('tape') +var fastfall = require('./') + +test('basically works', function (t) { + t.plan(22) + + var fall = fastfall() + + fall([ + function a (cb) { + cb(null, 'a') + }, + function b (a, cb) { + t.equal(a, 'a', 'second function arg matches') + cb(null, 'a', 'b') + }, + function c (a, b, cb) { + t.equal(a, 'a', 'third function 1st arg matches') + t.equal(b, 'b', 'third function 2nd arg matches') + cb(null, 'a', 'b', 'c') + }, + function d (a, b, c, cb) { + t.equal(a, 'a', 'fourth function 1st arg matches') + t.equal(b, 'b', 'fourth function 2nd arg matches') + t.equal(c, 'c', 'fourth function 3rd arg matches') + cb(null, 'a', 'b', 'c', 'd') + }, + function e (a, b, c, d, cb) { + t.equal(a, 'a', 'fifth function 1st arg matches') + t.equal(b, 'b', 'fifth function 2nd arg matches') + t.equal(c, 'c', 'fifth function 3rd arg matches') + t.equal(d, 'd', 'fifth function 4th arg matches') + cb(null, 'a', 'b', 'c', 'd', 'e') + }, + function f (a, b, c, d, e, cb) { + t.equal(a, 'a', 'sixth function 1st arg matches') + t.equal(b, 'b', 'sixth function 2nd arg matches') + t.equal(c, 'c', 'sixth function 3rd arg matches') + t.equal(d, 'd', 'sixth function 4th arg matches') + t.equal(e, 'e', 'sixth function 5th arg matches') + cb(null, 'a', 'b', 'c', 'd', 'e', 'f') + } + ], function result (err, a, b, c, d, e, f) { + t.error(err, 'no error') + t.equal(a, 'a', 'result function 2nd arg matches') + t.equal(b, 'b', 'result function 3rd arg matches') + t.equal(c, 'c', 'result function 4th arg matches') + t.equal(d, 'd', 'result function 5th arg matches') + t.equal(e, 'e', 'result function 6th arg matches') + t.equal(f, 'f', 'result function 7th arg matches') + }) +}) + +test('call with error', function (t) { + t.plan(4) + + var fall = fastfall() + + fall([ + function a (cb) { + cb(null, 'a') + }, + function b (a, cb) { + t.equal(a, 'a', 'second function arg matches') + cb(new Error('this is expected!'), 'a', 'b') + }, + function c (a, b, cb) { + t.fail('this should never happen') + } + ], function result (err, a, b, c) { + t.ok(err, 'error') + t.notOk(a, 'no 2nd arg') + t.notOk(b, 'no 3rd arg') + }) +}) + +test('compiles a reusable fall', function (t) { + t.plan(10) + + var fall = fastfall([ + function a (arg, cb) { + cb(null, arg) + }, + function b (a, cb) { + cb(null, a, 'b') + }, + function c (a, b, cb) { + t.equal(b, 'b', 'third function 2nd arg matches') + cb(null, a, 'b', 'c') + } + ]) + + fall(42, function result (err, a, b, c) { + t.error(err, 'no error') + t.equal(a, 42, 'result function 2nd arg matches') + t.equal(b, 'b', 'result function 3rd arg matches') + t.equal(c, 'c', 'result function 4th arg matches') + }) + + fall(24, function result (err, a, b, c) { + t.error(err, 'no error') + t.equal(a, 24, 'result function 2nd arg matches') + t.equal(b, 'b', 'result function 3rd arg matches') + t.equal(c, 'c', 'result function 4th arg matches') + }) +}) + +test('set this', function (t) { + t.plan(2) + + var that = {} + var fall = fastfall(that) + + fall([ + function a (cb) { + t.equal(this, that, 'this is set') + cb(null, 'a') + } + ], function result (err, a, b, c) { + t.error(err, 'no error') + }) +}) + +test('set this in compiled mode', function (t) { + t.plan(4) + + var that = {} + var fall = fastfall(that, [ + function a (arg, cb) { + t.equal(this, that, 'this is set') + cb(null, arg) + } + ]) + + fall(42, function result (err, a, b, c) { + t.error(err, 'no error') + t.equal(a, 42, 'result function 2nd arg matches') + t.equal(this, that, 'this is set') + }) +}) + +test('set this for a normal fall', function (t) { + t.plan(4) + + var that = {} + var fall = fastfall() + + fall(that, [ + function a (cb) { + t.equal(this, that, 'this is set') + cb(null, 'a') + } + ], function result (err, a) { + t.error(err, 'no error') + t.equal(this, that, 'this is set') + t.equal(a, 'a', 'result function 2nd arg matches') + }) +}) + +test('use the this of the called object in compiled mode', function (t) { + t.plan(4) + + var that = {} + var fall = fastfall([ + function a (arg, cb) { + t.equal(this, that, 'this is set') + cb(null, arg) + } + ]) + + fall.call(that, 42, function result (err, a, b, c) { + t.error(err, 'no error') + t.equal(a, 42, 'result function 2nd arg matches') + t.equal(this, that, 'this is set') + }) +}) + +test('support errors in compiled mode', function (t) { + t.plan(2) + + var fall = fastfall([ + function a (arg, cb) { + t.pass('function is called') + cb(new Error('muahaha'), arg) + } + ]) + + fall(42, function result (err) { + t.ok(err, 'error is forwarded') + }) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastparallel/.github/dependabot.yml b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/.github/dependabot.yml new file mode 100644 index 000000000..7e7cbe1b0 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/.github/dependabot.yml @@ -0,0 +1,11 @@ +version: 2 +updates: +- package-ecosystem: npm + directory: "/" + schedule: + interval: daily + open-pull-requests-limit: 10 + ignore: + - dependency-name: standard + versions: + - 16.0.3 diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastparallel/.github/workflows/ci.yml b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/.github/workflows/ci.yml new file mode 100644 index 000000000..db3911bb5 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/.github/workflows/ci.yml @@ -0,0 +1,27 @@ +name: ci + +on: [push, pull_request] + +jobs: + test: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: ['0.10', '0.12', 4.x, 6.x, 8.x, 12.x, 14.x, 16.x] + + steps: + - uses: actions/checkout@v1 + + - name: Use Node.js + uses: actions/setup-node@v2 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install + + - name: Run tests + run: | + npm run test diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastparallel/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/LICENSE new file mode 100644 index 000000000..0b148242c --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2015, Matteo Collina + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastparallel/README.md b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/README.md new file mode 100644 index 000000000..5eea335fa --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/README.md @@ -0,0 +1,124 @@ +# fastparallel [![ci](https://github.com/mcollina/fastparallel/actions/workflows/ci.yml/badge.svg)](https://github.com/mcollina/fastparallel/actions/workflows/ci.yml) + +Zero-overhead parallel function call for node.js. Also supports each +and map! + +Benchmark for doing 3 calls `setImmediate` 1 million times: + +``` +benchSetImmediate*1000000: 1378.514ms +benchAsyncParallel*1000000: 1740.304ms +benchAsyncEach*1000000: 1566.517ms +benchAsyncMap*1000000: 1687.518ms +benchNeoParallel*1000000: 1388.223ms +benchNeoEach*1000000: 1473.006ms +benchNeoMap*1000000: 1402.986ms +benchInsyncParallel*1000000: 1957.863ms +benchInsyncEach*1000000: 1383.822ms +benchInsyncMap*1000000: 1822.954ms +benchItemsParallel*1000000: 1690.118ms +benchParallelize*1000000: 1570.064ms +benchFastParallel*1000000: 1536.692ms +benchFastParallelNoResults*1000000: 1363.145ms +benchFastParallelEachResults*1000000: 1508.134ms +benchFastParallelEach*1000000: 1325.314ms +``` + +Obtained on node 12.18.2, on a dedicated server. + +If you need zero-overhead series function call, check out +[fastseries](http://npm.im/fastseries). If you need a fast work queue +check out [fastq](http://npm.im/fastq). If you need to run fast +waterfall calls, use [fastfall](http://npm.im/fastfall). + +[![js-standard-style](https://raw.githubusercontent.com/feross/standard/master/badge.png)](https://github.com/feross/standard) + +__The major difference between version 1.x.x and 2.x.x is the order of +results__, this is now ready to replace async in every case. + +## Example for parallel call + +```js +var parallel = require('fastparallel')({ + // this is a function that will be called + // when a parallel completes + released: completed, + + // if you want the results, then here you are + results: true +}) + +parallel( + {}, // what will be this in the functions + [something, something, something], // functions to call + 42, // the first argument of the functions + done // the function to be called when the parallel ends +) + +function something (arg, cb) { + setImmediate(cb, null, 'myresult') +} + +function done (err, results) { + console.log('parallel completed, results:', results) +} + +function completed () { + console.log('parallel completed!') +} +``` + +## Example for each and map calls + +```js +var parallel = require('fastparallel')({ + // this is a function that will be called + // when a parallel completes + released: completed, + + // if you want the results, then here you are + // passing false disables map + results: true +}) + +parallel( + {}, // what will be this in the functions + something, // functions to call + [1, 2, 3], // the first argument of the functions + done // the function to be called when the parallel ends +) + +function something (arg, cb) { + setImmediate(cb, null, 'myresult') +} + +function done (err, results) { + console.log('parallel completed, results:', results) +} + +function completed () { + console.log('parallel completed!') +} + +``` + +## Caveats + +The `done` function will be called only once, even if more than one error happen. + +This library works by caching the latest used function, so that running a new parallel +does not cause **any memory allocations**. + +## Why it is so fast? + +1. This library is caching functions a lot. + +2. V8 optimizations: thanks to caching, the functions can be optimized by V8 (if they are optimizable, and I took great care of making them so). + +3. Don't use arrays if you just need a queue. A linked list implemented via processes is much faster if you don't need to access elements in between. + +4. Accept passing a this for the functions. Thanks to this hack, you can extract your functions, and place them in a outer level where they are not created at every execution. + +## License + +ISC diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastparallel/bench.js b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/bench.js new file mode 100644 index 000000000..03c1aca32 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/bench.js @@ -0,0 +1,120 @@ +var max = 1000000 +var parallel = require('./')() +var parallelNoResults = require('./')({ results: false }) +var bench = require('fastbench') +var async = require('async') +var neo = require('neo-async') +var insync = require('insync') +var items = require('items') +var parallelize = require('parallelize') + +function benchFastParallel (done) { + parallel(null, [somethingP, somethingP, somethingP], 42, done) +} + +function benchFastParallelNoResults (done) { + parallelNoResults(null, [somethingP, somethingP, somethingP], 42, done) +} + +function benchFastParallelEach (done) { + parallelNoResults(null, somethingP, [1, 2, 3], done) +} + +function benchFastParallelEachResults (done) { + parallel(null, somethingP, [1, 2, 3], done) +} + +function benchAsyncParallel (done) { + async.parallel([somethingA, somethingA, somethingA], done) +} + +function benchInsyncParallel (done) { + insync.parallel([somethingA, somethingA, somethingA], done) +} + +function benchNeoParallel (done) { + neo.parallel([somethingA, somethingA, somethingA], done) +} + +function benchItemsParallel (done) { + items.parallel.execute([somethingA, somethingA, somethingA], done) +} + +function benchParallelize (done) { + var next = parallelize(done) + + somethingA(next()) + somethingA(next()) + somethingA(next()) +} + +function benchAsyncEach (done) { + async.each([1, 2, 3], somethingP, done) +} + +function benchNeoEach (done) { + neo.each([1, 2, 3], somethingP, done) +} + +function benchAsyncMap (done) { + async.map([1, 2, 3], somethingP, done) +} + +function benchNeoMap (done) { + neo.map([1, 2, 3], somethingP, done) +} + +function benchInsyncEach (done) { + insync.each([1, 2, 3], somethingP, done) +} + +function benchInsyncMap (done) { + insync.map([1, 2, 3], somethingP, done) +} + +var nextDone +var nextCount + +function benchSetImmediate (done) { + nextCount = 3 + nextDone = done + setImmediate(somethingImmediate) + setImmediate(somethingImmediate) + setImmediate(somethingImmediate) +} + +function somethingImmediate () { + nextCount-- + if (nextCount === 0) { + nextDone() + } +} + +function somethingP (arg, cb) { + setImmediate(cb) +} + +function somethingA (cb) { + setImmediate(cb) +} + +var run = bench([ + benchSetImmediate, + benchAsyncParallel, + benchAsyncEach, + benchAsyncMap, + benchNeoParallel, + benchNeoEach, + benchNeoMap, + benchInsyncParallel, + benchInsyncEach, + benchInsyncMap, + benchItemsParallel, + benchParallelize, + benchFastParallel, + benchFastParallelNoResults, + benchFastParallelEachResults, + benchFastParallelEach +], max) + +run(run) diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastparallel/bench_long.js b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/bench_long.js new file mode 100644 index 000000000..44e1b637b --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/bench_long.js @@ -0,0 +1,41 @@ +var max = 1000000 +var parallel = require('./')() +var parallelNoResults = require('./')({ results: false }) +var bench = require('fastbench') +var async = require('async') +var neo = require('neo-async') + +var funcs = [] + +for (var i = 0; i < 25; i++) { + funcs.push(something) +} + +function benchFastParallel (done) { + parallel(null, funcs, 42, done) +} + +function benchFastParallelNoResults (done) { + parallelNoResults(null, funcs, 42, done) +} + +function benchAsyncParallel (done) { + async.parallel(funcs, done) +} + +function benchNeoParallel (done) { + neo.parallel(funcs, done) +} + +function something (cb) { + setImmediate(cb) +} + +var run = bench([ + benchAsyncParallel, + benchNeoParallel, + benchFastParallel, + benchFastParallelNoResults +], max) + +run(run) diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastparallel/example.js b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/example.js new file mode 100644 index 000000000..b0f0e37de --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/example.js @@ -0,0 +1,40 @@ +var parallel = require('./')({ + // this is a function that will be called + // when a parallel completes + released: completed, + + // we want results and errors + // passing false will make it faster! + results: true +}) + +parallel( + {}, // what will be this in the functions + [something, something, something], // functions to call + 42, // the first argument of the functions + next // the function to be called when the parallel ends +) + +function something (arg, cb) { + setImmediate(cb, null, 'myresult') +} + +function next (err, results) { + if (err) { + // do something here! + } + console.log('parallel completed, results:', results) + + parallel({}, something, [1, 2, 3], done) +} + +function done (err, results) { + if (err) { + // do something here! + } + console.log('parallel completed, results:', results) +} + +function completed () { + console.log('parallel completed!') +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastparallel/package.json b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/package.json new file mode 100644 index 000000000..295c84496 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/package.json @@ -0,0 +1,77 @@ +{ + "_from": "fastparallel@^2.3.0", + "_id": "fastparallel@2.4.1", + "_inBundle": false, + "_integrity": "sha512-qUmhxPgNHmvRjZKBFUNI0oZuuH9OlSIOXmJ98lhKPxMZZ7zS/Fi0wRHOihDSz0R1YiIOjxzOY4bq65YTcdBi2Q==", + "_location": "/fastparallel", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "fastparallel@^2.3.0", + "name": "fastparallel", + "escapedName": "fastparallel", + "rawSpec": "^2.3.0", + "saveSpec": null, + "fetchSpec": "^2.3.0" + }, + "_requiredBy": [ + "/aedes", + "/mqemitter" + ], + "_resolved": "https://registry.npmjs.org/fastparallel/-/fastparallel-2.4.1.tgz", + "_shasum": "0d984a5813ffa67f30b4a5cb4cb8cbe61c7ee5a5", + "_spec": "fastparallel@^2.3.0", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes", + "author": { + "name": "Matteo Collina", + "email": "hello@matteocollina.com" + }, + "bugs": { + "url": "https://github.com/mcollina/fastparallel/issues" + }, + "bundleDependencies": false, + "dependencies": { + "reusify": "^1.0.4", + "xtend": "^4.0.2" + }, + "deprecated": false, + "description": "Zero-overhead asynchronous parallel/each/map function call", + "devDependencies": { + "async": "^3.1.0", + "coveralls": "^3.0.5", + "fastbench": "^1.0.1", + "faucet": "0.0.1", + "insync": "^2.1.1", + "items": "^2.1.2", + "neo-async": "^2.6.1", + "nyc": "^14.1.1", + "parallelize": "^3.0.1", + "pre-commit": "^1.2.2", + "standard": "^13.0.1", + "tape": "^4.11.0" + }, + "homepage": "https://github.com/mcollina/fastparallel", + "keywords": [ + "parallel", + "fast", + "async" + ], + "license": "ISC", + "main": "parallel.js", + "name": "fastparallel", + "pre-commit": [ + "lint", + "test" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/mcollina/fastparallel.git" + }, + "scripts": { + "coverage": "nyc --reporter=lcov tape test.js; cat coverage/lcov.info | coveralls", + "lint": "standard", + "test": "tape test.js | faucet" + }, + "version": "2.4.1" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastparallel/parallel.js b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/parallel.js new file mode 100644 index 000000000..b551c0514 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/parallel.js @@ -0,0 +1,199 @@ +'use strict' + +var xtend = require('xtend') +var reusify = require('reusify') +var defaults = { + released: nop, + results: true +} + +function fastparallel (options) { + options = xtend(defaults, options) + + var released = options.released + var queue = reusify(options.results ? ResultsHolder : NoResultsHolder) + var queueSingleCaller = reusify(SingleCaller) + var goArray = options.results ? goResultsArray : goNoResultsArray + var goFunc = options.results ? goResultsFunc : goNoResultsFunc + + return parallel + + function parallel (that, toCall, arg, done) { + var holder = queue.get() + done = done || nop + if (toCall.length === 0) { + done.call(that) + released(holder) + } else { + holder._callback = done + holder._callThat = that + holder._release = release + + if (typeof toCall === 'function') { + goFunc(that, toCall, arg, holder) + } else { + goArray(that, toCall, arg, holder) + } + + if (holder._count === 0) { + holder.release() + } + } + } + + function release (holder) { + queue.release(holder) + released(holder) + } + + function singleCallerRelease (holder) { + queueSingleCaller.release(holder) + } + + function goResultsFunc (that, toCall, arg, holder) { + var singleCaller = null + holder._count = arg.length + holder._results = new Array(holder._count) + for (var i = 0; i < arg.length; i++) { + singleCaller = queueSingleCaller.get() + singleCaller._release = singleCallerRelease + singleCaller.parent = holder + singleCaller.pos = i + if (that) { + toCall.call(that, arg[i], singleCaller.release) + } else { + toCall(arg[i], singleCaller.release) + } + } + } + + function goResultsArray (that, funcs, arg, holder) { + var sc = null + var tc = nop + holder._count = funcs.length + holder._results = new Array(holder._count) + for (var i = 0; i < funcs.length; i++) { + sc = queueSingleCaller.get() + sc._release = singleCallerRelease + sc.parent = holder + sc.pos = i + tc = funcs[i] + if (that) { + if (tc.length === 1) tc.call(that, sc.release) + else tc.call(that, arg, sc.release) + } else { + if (tc.length === 1) tc(sc.release) + else tc(arg, sc.release) + } + } + } + + function goNoResultsFunc (that, toCall, arg, holder) { + holder._count = arg.length + for (var i = 0; i < arg.length; i++) { + if (that) { + toCall.call(that, arg[i], holder.release) + } else { + toCall(arg[i], holder.release) + } + } + } + + function goNoResultsArray (that, funcs, arg, holder) { + var toCall = null + holder._count = funcs.length + for (var i = 0; i < funcs.length; i++) { + toCall = funcs[i] + if (that) { + if (toCall.length === 1) { + toCall.call(that, holder.release) + } else { + toCall.call(that, arg, holder.release) + } + } else { + if (toCall.length === 1) { + toCall(holder.release) + } else { + toCall(arg, holder.release) + } + } + } + } +} + +function NoResultsHolder () { + this._count = -1 + this._callback = nop + this._callThat = null + this._release = null + this.next = null + + var that = this + var i = 0 + this.release = function () { + var cb = that._callback + if (++i === that._count || that._count === 0) { + if (that._callThat) { + cb.call(that._callThat) + } else { + cb() + } + that._callback = nop + that._callThat = null + i = 0 + that._release(that) + } + } +} + +function SingleCaller () { + this.pos = -1 + this._release = nop + this.parent = null + this.next = null + + var that = this + this.release = function (err, result) { + that.parent.release(err, that.pos, result) + that.pos = -1 + that.parent = null + that._release(that) + } +} + +function ResultsHolder () { + this._count = -1 + this._callback = nop + this._results = null + this._err = null + this._callThat = null + this._release = nop + this.next = null + + var that = this + var i = 0 + this.release = function (err, pos, result) { + that._err = that._err || err + if (pos >= 0) { + that._results[pos] = result + } + var cb = that._callback + if (++i === that._count || that._count === 0) { + if (that._callThat) { + cb.call(that._callThat, that._err, that._results) + } else { + cb(that._err, that._results) + } + that._callback = nop + that._results = null + that._err = null + that._callThat = null + i = 0 + that._release(that) + } + } +} + +function nop () { } + +module.exports = fastparallel diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastparallel/test.js b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/test.js new file mode 100644 index 000000000..260b44a7c --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastparallel/test.js @@ -0,0 +1,465 @@ +var test = require('tape') +var parallel = require('./') + +test('basically works', function (t) { + t.plan(6) + + var instance = parallel({ + released: released + }) + var count = 0 + var obj = {} + + instance(obj, [something, something], 42, function done () { + t.equal(count, 2, 'all functions must have completed') + }) + + function something (arg, cb) { + t.equal(obj, this) + t.equal(arg, 42) + setImmediate(function () { + count++ + cb() + }) + } + + function released () { + t.pass('release') + } +}) + +test('accumulates results', function (t) { + t.plan(8) + + var instance = parallel({ + released: released + }) + var count = 0 + var obj = {} + + instance(obj, [something, something], 42, function done (err, results) { + t.notOk(err, 'no error') + t.equal(count, 2, 'all functions must have completed') + t.deepEqual(results, [1, 2]) + }) + + function something (arg, cb) { + t.equal(obj, this) + t.equal(arg, 42) + setImmediate(function () { + count++ + cb(null, count) + }) + } + + function released () { + t.pass() + } +}) + +test('fowards errs', function (t) { + t.plan(3) + + var instance = parallel({ + released: released + }) + var count = 0 + var obj = {} + + instance(obj, [somethingErr, something], 42, function done (err, results) { + t.ok(err) + t.equal(count, 2, 'all functions must have completed') + }) + + function something (arg, cb) { + setImmediate(function () { + count++ + cb(null, count) + }) + } + + function somethingErr (arg, cb) { + setImmediate(function () { + count++ + cb(new Error('this is an err!')) + }) + } + + function released () { + t.pass() + } +}) + +test('fowards errs (bis)', function (t) { + t.plan(3) + + var instance = parallel({ + released: released + }) + var count = 0 + var obj = {} + + instance(obj, [something, somethingErr], 42, function done (err, results) { + t.ok(err) + t.equal(count, 2, 'all functions must have completed') + }) + + function something (arg, cb) { + setImmediate(function () { + count++ + cb(null, count) + }) + } + + function somethingErr (arg, cb) { + setImmediate(function () { + count++ + cb(new Error('this is an err!')) + }) + } + + function released () { + t.pass() + } +}) + +test('does not forward errors or result with results:false flag', function (t) { + t.plan(8) + + var instance = parallel({ + released: released, + results: false + }) + var count = 0 + var obj = {} + + instance(obj, [something, something], 42, function done (err, results) { + t.equal(err, undefined, 'no err') + t.equal(results, undefined, 'no err') + t.equal(count, 2, 'all functions must have completed') + }) + + function something (arg, cb) { + t.equal(obj, this) + t.equal(arg, 42) + setImmediate(function () { + count++ + cb() + }) + } + + function released () { + t.pass() + } +}) + +test('should call done and released if an empty is passed', function (t) { + t.plan(2) + + var instance = parallel({ + released: released + }) + var obj = {} + + instance(obj, [], 42, function done () { + t.pass() + }) + + function released () { + t.pass() + } +}) + +test('each support', function (t) { + t.plan(8) + + var instance = parallel({ + released: released + }) + var count = 0 + var obj = {} + var args = [1, 2, 3] + var i = 0 + + instance(obj, something, args, function done () { + t.equal(count, 3, 'all functions must have completed') + }) + + function something (arg, cb) { + t.equal(obj, this, 'this matches') + t.equal(args[i++], arg, 'the arg is correct') + setImmediate(function () { + count++ + cb() + }) + } + + function released () { + t.pass() + } +}) + +test('call the callback with the given this', function (t) { + t.plan(1) + + var instance = parallel() + var obj = {} + + instance(obj, [build(), build()], 42, function done () { + t.equal(obj, this, 'this matches') + }) + + function build () { + return function something (arg, cb) { + setImmediate(cb) + } + } +}) + +test('call the callback with the given this with no results', function (t) { + t.plan(1) + + var instance = parallel({ results: false }) + var obj = {} + + instance(obj, [build(), build()], 42, function done () { + t.equal(obj, this, 'this matches') + }) + + function build () { + return function something (arg, cb) { + setImmediate(cb) + } + } +}) + +test('call the callback with the given this with no data', function (t) { + t.plan(1) + + var instance = parallel() + var obj = {} + + instance(obj, [], 42, function done () { + t.equal(obj, this, 'this matches') + }) +}) + +test('call the result callback when the each array is empty', function (t) { + t.plan(1) + + var instance = parallel() + var obj = {} + + instance(obj, something, [], function done () { + t.pass('the result function has been called') + }) + + function something (arg, cb) { + t.error('this should never be called') + } +}) + +test('call the result callback when the each array is empty with no results', function (t) { + t.plan(1) + + var instance = parallel({ results: false }) + var obj = {} + + instance(obj, something, [], function done () { + t.pass('the result function has been called') + }) + + function something (arg, cb) { + t.error('this should never be called') + } +}) + +test('does not require a done callback', function (t) { + t.plan(4) + + var instance = parallel() + var obj = {} + + instance(obj, [something, something], 42) + + function something (arg, cb) { + t.equal(obj, this) + t.equal(arg, 42) + setImmediate(cb) + } +}) + +test('works with sync functions with no results', function (t) { + t.plan(6) + + var instance = parallel({ + results: false, + released: released + }) + var count = 0 + var obj = {} + + instance(obj, [something, something], 42, function done () { + t.equal(2, count, 'all functions must have completed') + }) + + function something (arg, cb) { + t.equal(this, obj) + t.equal(42, arg) + count++ + cb() + } + + function released () { + t.pass('release') + } +}) + +test('accumulates results in order', function (t) { + t.plan(8) + + var instance = parallel({ + released: released + }) + var count = 2 + var obj = {} + + instance(obj, [something, something], 42, function done (err, results) { + t.notOk(err, 'no error') + t.equal(count, 0, 'all functions must have completed') + t.deepEqual(results, [2, 1]) + }) + + function something (arg, cb) { + t.equal(obj, this) + t.equal(arg, 42) + var value = count-- + setTimeout(function () { + cb(null, value) + }, 10 * value) + } + + function released () { + t.pass() + } +}) + +test('call without arg if there is no arg with no results', function (t) { + t.plan(3) + + var instance = parallel({ + results: false + }) + var count = 0 + var obj = {} + + instance(obj, [something, something], 42, function done () { + t.equal(count, 2, 'all functions must have completed') + }) + + function something (cb) { + t.equal(obj, this) + setImmediate(function () { + count++ + cb() + }) + } +}) + +test('call without arg if there is no arg with results', function (t) { + t.plan(3) + + var instance = parallel() + var count = 0 + var obj = {} + + instance(obj, [something, something], 42, function done () { + t.equal(count, 2, 'all functions must have completed') + }) + + function something (cb) { + t.equal(obj, this) + setImmediate(function () { + count++ + cb() + }) + } +}) + +test('each support with nothing to process', function (t) { + t.plan(2) + + var instance = parallel() + var obj = {} + var args = [] + + instance(obj, something, args, function done (err, results) { + t.error(err) + t.deepEqual(results, [], 'empty results') + }) + + function something (arg, cb) { + t.fail('this should never happen') + } +}) + +test('each without results support with nothing to process', function (t) { + t.plan(1) + + var instance = parallel({ results: false }) + var obj = {} + var args = [] + + instance(obj, something, args, function done () { + t.pass('done called') + }) + + function something (arg, cb) { + t.fail('this should never happen') + } +}) + +test('each works with arrays of objects', function (t) { + t.plan(3) + + var instance = parallel({ results: false }) + var obj = {} + var args = [{ val: true }, { val: true }] + + instance(obj, something, args, function () { + t.ok('done called') + }) + + function something (arg, cb) { + t.ok(arg.val) + cb() + } +}) + +test('using same instance multiple times clears the state of result holder', function (t) { + var total = 10 + t.plan(total) + + var instance = parallel({ + results: false, + released: released + }) + var obj = {} + var count = 0 + + function released () { + if (count < total) { + instance(obj, [something], 42, function done () { + t.ok(true, 'done is called') + count++ + }) + } + } + + released() + function something (cb) { + setImmediate(function () { + cb() + }) + } +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastseries/.github/workflows/ci.yml b/sdklab/meantimerecovery/aedes/node_modules/fastseries/.github/workflows/ci.yml new file mode 100644 index 000000000..36a65d02d --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastseries/.github/workflows/ci.yml @@ -0,0 +1,43 @@ +name: ci + +on: [push, pull_request] + +jobs: + test: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [10.x, 12.x, 13.x] + + steps: + - uses: actions/checkout@v2 + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install + + - name: Run tests + run: | + npm run test + + - name: Coveralls Parallel + uses: coverallsapp/github-action@master + with: + github-token: ${{ secrets.github_token }} + parallel: true + + coverage: + needs: test + runs-on: ubuntu-latest + steps: + - name: Coveralls Finished + uses: coverallsapp/github-action@master + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + parallel-finished: true diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastseries/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/fastseries/LICENSE new file mode 100644 index 000000000..0b148242c --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastseries/LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2015, Matteo Collina + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastseries/README.md b/sdklab/meantimerecovery/aedes/node_modules/fastseries/README.md new file mode 100644 index 000000000..52aef098e --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastseries/README.md @@ -0,0 +1,115 @@ +# fastseries + +![ci][ci-url] +[![npm version][npm-badge]][npm-url] +[![Coverage Status][coveralls-badge]][coveralls-url] +[![Dependency Status][david-badge]][david-url] + +Zero-overhead series function call for node.js. +Also supports `each` and `map`! + +If you need zero-overhead parallel function call, check out +[fastparallel](http://npm.im/fastparallel). + +[![js-standard-style](https://raw.githubusercontent.com/feross/standard/master/badge.png)](https://github.com/feross/standard) + +## Example for series call + +```js +var series = require('fastseries')({ + // if you want the results, then here you are + results: true +}) + +series( + {}, // what will be this in the functions + [something, something, something], // functions to call + 42, // the first argument of the functions + done // the function to be called when the series ends +) + +function late (arg, cb) { + console.log('finishing', arg) + cb(null, 'myresult-' + arg) +} + +function something (arg, cb) { + setTimeout(late, 1000, arg, cb) +} + +function done (err, results) { + console.log('series completed, results:', results) +} +``` + +## Example for each and map calls + +```js +var series = require('fastseries')({ + // if you want the results, then here you are + // passing false disables map + results: true +}) + +series( + {}, // what will be this in the functions + something, // functions to call + [1, 2, 3], // the first argument of the functions + done // the function to be called when the series ends +) + +function late (arg, cb) { + console.log('finishing', arg) + cb(null, 'myresult-' + arg) +} + +function something (arg, cb) { + setTimeout(late, 1000, arg, cb) +} + +function done (err, results) { + console.log('series completed, results:', results) +} +``` + +## Caveats + +The `done` function will be called only once, even if more than one error happen. + +This library works by caching the latest used function, so that running a new series +does not cause **any memory allocations**. + +## Benchmarks + +Benchmark for doing 3 calls `setImmediate` 1 million times: + +``` +benchSetImmediate*1000000: 2460.623ms +benchAsyncSeries*1000000: 3064.569ms +benchAsyncEachSeries*1000000: 2913.525ms +benchAsyncMapSeries*1000000: 3020.794ms +benchNeoSeries*1000000: 2617.064ms +benchNeoEachSeries*1000000: 2621.672ms +benchNeoMapSeries*1000000: 2611.294ms +benchTinyEachAsync*1000000: 2706.457ms +benchFastSeries*1000000: 2540.653ms +benchFastSeriesNoResults*1000000: 2538.674ms +benchFastSeriesEach*1000000: 2534.856ms +benchFastSeriesEachResults*1000000: 2545.394ms +``` + +Benchmarks taken on Node 12.16.1 on a dedicated server. + +See [bench.js](./bench.js) for mode details. + +## License + +ISC + +[ci-url]: https://github.com/mcollina/fastseries/workflows/ci/badge.svg +[npm-badge]: https://badge.fury.io/js/fastseries.svg +[npm-url]: https://badge.fury.io/js/fastseries +[coveralls-badge]:https://coveralls.io/repos/mcollina/fastseries/badge.svg?branch=master&service=github +[coveralls-url]: https://coveralls.io/github/mcollina/fastseries?branch=master +[david-badge]: https://david-dm.org/mcollina/fastseries.svg +[david-url]: https://david-dm.org/mcollina/fastseries diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastseries/bench.js b/sdklab/meantimerecovery/aedes/node_modules/fastseries/bench.js new file mode 100644 index 000000000..7cf9eca55 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastseries/bench.js @@ -0,0 +1,94 @@ +var max = 1000000 +var series = require('./')() +var seriesNoResults = require('./')({ results: false }) +var async = require('async') +var neo = require('neo-async') +var bench = require('fastbench') +var tinyEachAsync = require('tiny-each-async') + +function benchFastSeries (done) { + series(null, [somethingP, somethingP, somethingP], 42, done) +} + +function benchFastSeriesNoResults (done) { + seriesNoResults(null, [somethingP, somethingP, somethingP], 42, done) +} + +function benchFastSeriesEach (done) { + seriesNoResults(null, somethingP, [1, 2, 3], done) +} + +function benchFastSeriesEachResults (done) { + series(null, somethingP, [1, 2, 3], done) +} + +function benchAsyncSeries (done) { + async.series([somethingA, somethingA, somethingA], done) +} + +function benchAsyncEachSeries (done) { + async.eachSeries([1, 2, 3], somethingP, done) +} + +function benchAsyncMapSeries (done) { + async.mapSeries([1, 2, 3], somethingP, done) +} + +function benchNeoSeries (done) { + neo.series([somethingA, somethingA, somethingA], done) +} + +function benchNeoEachSeries (done) { + neo.eachSeries([1, 2, 3], somethingP, done) +} + +function benchNeoMapSeries (done) { + neo.mapSeries([1, 2, 3], somethingP, done) +} + +function benchTinyEachAsync (done) { + tinyEachAsync([1, 2, 3], 1, somethingP, done) +} + +var nextDone +var nextCount + +function benchSetImmediate (done) { + nextCount = 3 + nextDone = done + setImmediate(somethingImmediate) +} + +function somethingImmediate () { + nextCount-- + if (nextCount === 0) { + nextDone() + } else { + setImmediate(somethingImmediate) + } +} + +function somethingP (arg, cb) { + setImmediate(cb) +} + +function somethingA (cb) { + setImmediate(cb) +} + +var run = bench([ + benchSetImmediate, + benchAsyncSeries, + benchAsyncEachSeries, + benchAsyncMapSeries, + benchNeoSeries, + benchNeoEachSeries, + benchNeoMapSeries, + benchTinyEachAsync, + benchFastSeries, + benchFastSeriesNoResults, + benchFastSeriesEach, + benchFastSeriesEachResults +], max) + +run(run) diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastseries/example.js b/sdklab/meantimerecovery/aedes/node_modules/fastseries/example.js new file mode 100644 index 000000000..93851379a --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastseries/example.js @@ -0,0 +1,45 @@ +var series = require('./')({ + // this is a function that will be called + // when a series completes + released: completed, + + // we want results and errors + // passing false will make it faster! + results: true +}) + +series( + {}, // what will be this in the functions + [something, something, something], // functions to call + 42, // the first argument of the functions + next // the function to be called when the series ends +) + +function late (arg, cb) { + console.log('finishing', arg) + cb(null, 'myresult-' + arg) +} + +function something (arg, cb) { + setTimeout(late, 1000, arg, cb) +} + +function next (err, results) { + if (err) { + // do something here! + } + console.log('series completed, results:', results) + + series({}, something, [1, 2, 3], done) +} + +function done (err, results) { + if (err) { + // do something here! + } + console.log('series completed, results:', results) +} + +function completed () { + console.log('series completed!') +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastseries/package.json b/sdklab/meantimerecovery/aedes/node_modules/fastseries/package.json new file mode 100644 index 000000000..d442cf83d --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastseries/package.json @@ -0,0 +1,72 @@ +{ + "_from": "fastseries@^2.0.0", + "_id": "fastseries@2.0.0", + "_inBundle": false, + "_integrity": "sha512-XBU9RXeoYc2/VnvMhplAxEmZLfIk7cvTBu+xwoBuTI8pL19E03cmca17QQycKIdxgwCeFA/a4u27gv1h3ya5LQ==", + "_location": "/fastseries", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "fastseries@^2.0.0", + "name": "fastseries", + "escapedName": "fastseries", + "rawSpec": "^2.0.0", + "saveSpec": null, + "fetchSpec": "^2.0.0" + }, + "_requiredBy": [ + "/aedes" + ], + "_resolved": "https://registry.npmjs.org/fastseries/-/fastseries-2.0.0.tgz", + "_shasum": "d43eb975f6175dd83457d1d68fbd30f936ec7f2b", + "_spec": "fastseries@^2.0.0", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes", + "author": { + "name": "Matteo Collina", + "email": "hello@matteocollina.com" + }, + "bugs": { + "url": "https://github.com/mcollina/fastseries/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Zero-overhead asynchronous series/each/map function calls", + "devDependencies": { + "async": "^3.2.0", + "fastbench": "^1.0.1", + "neo-async": "^2.6.1", + "nyc": "^15.0.0", + "pre-commit": "^1.2.2", + "snazzy": "^8.0.0", + "standard": "^14.3.1", + "tape": "^4.13.0", + "tiny-each-async": "^2.0.3" + }, + "homepage": "https://github.com/mcollina/fastseries", + "keywords": [ + "series", + "fast", + "async" + ], + "license": "ISC", + "main": "series.js", + "name": "fastseries", + "pre-commit": [ + "test" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/mcollina/fastseries.git" + }, + "scripts": { + "cov": "nyc --reporter=text tape test.js", + "lint": "standard --verbose | snazzy", + "test": "npm run lint && npm run unit:cov", + "test:report": "npm run lint && npm run unit:report", + "unit": "tape test.js", + "unit:cov": "nyc --reporter=lcovonly tape test.js", + "unit:report": "nyc --reporter=html --reporter=cobertura --reporter=text tape test.js" + }, + "version": "2.0.0" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastseries/series.js b/sdklab/meantimerecovery/aedes/node_modules/fastseries/series.js new file mode 100644 index 000000000..8872f3e92 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastseries/series.js @@ -0,0 +1,155 @@ +'use strict' + +var defaults = { + results: true +} + +function fastseries (options) { + options = Object.assign({}, defaults, options) + + var seriesEach + var seriesList + + if (options.results) { + seriesEach = resultEach + seriesList = resultList + } else { + seriesEach = noResultEach + seriesList = noResultList + } + + return series + + function series (that, toCall, arg, done) { + done = (done || nop).bind(that) + + if (toCall.length === 0) { + done.call(that) + } else if (toCall.bind) { + if (that) { + toCall = toCall.bind(that) + } + seriesEach(toCall, arg, done) + } else { + var _list + if (that) { + var length = toCall.length + _list = new Array(length) + for (var i = 0; i < length; i++) { + _list[i] = toCall[i].bind(that) + } + } else { + _list = toCall + } + + seriesList(_list, arg, done) + } + } +} + +function noResultEach (each, list, cb) { + var i = 0 + var length = list.length + + release() + + function release () { + if (i < length) { + makeCallTwo(each, list[i++], release) + } else { + cb() + } + } +} + +function noResultList (list, arg, cb) { + var i = 0 + var length = list.length + var makeCall + + if (list[0].length === 1) { + makeCall = makeCallOne + } else { + makeCall = makeCallTwo + } + + release() + + function release () { + if (i < length) { + makeCall(list[i++], arg, release) + } else { + cb() + } + } +} + +function resultEach (each, list, cb) { + var i = 0 + var length = list.length + var results = new Array(length) + + release(null, null) + + function release (err, result) { + if (err) { + cb(err) + return + } + + if (i > 0) { + results[i - 1] = result + } + + if (i < length) { + makeCallTwo(each, list[i++], release) + } else { + cb(null, results) + } + } +} + +function resultList (list, arg, cb) { + var i = 0 + var length = list.length + var makeCall + + if (list[0].length === 1) { + makeCall = makeCallOne + } else { + makeCall = makeCallTwo + } + + var results = new Array(length) + + release(null, null) + + function release (err, result) { + if (err) { + cb(err) + return + } + + if (i > 0) { + results[i - 1] = result + } + + if (i < length) { + makeCall(list[i++], arg, release) + } else { + cb(null, results) + } + } +} + +function makeCallOne (cb, arg, release) { + cb(release) +} + +function makeCallTwo (cb, arg, release) { + cb(arg, release) +} + +function nop () { } + +module.exports = fastseries diff --git a/sdklab/meantimerecovery/aedes/node_modules/fastseries/test.js b/sdklab/meantimerecovery/aedes/node_modules/fastseries/test.js new file mode 100644 index 000000000..26beca443 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/fastseries/test.js @@ -0,0 +1,368 @@ +'use strict' + +var test = require('tape') +var series = require('./') + +test('basically works', function (t) { + t.plan(7) + + var instance = series() + var count = 0 + var obj = {} + + instance(obj, [build(0), build(1)], 42, function done () { + t.equal(count, 2, 'all functions must have completed') + }) + + function build (expected) { + return function something (arg, cb) { + t.equal(obj, this) + t.equal(arg, 42) + t.equal(expected, count) + setImmediate(function () { + count++ + cb() + }) + } + } +}) + +test('without this', function (t) { + t.plan(7) + + var instance = series() + var count = 0 + + instance(null, [build(0), build(1)], 42, function done () { + t.equal(count, 2, 'all functions must have completed') + }) + + function build (expected) { + return function something (arg, cb) { + t.equal(undefined, this) + t.equal(arg, 42) + t.equal(expected, count) + setImmediate(function () { + count++ + cb() + }) + } + } +}) + +test('accumulates results', function (t) { + t.plan(7) + + var instance = series() + var count = 0 + var obj = {} + + instance(obj, [something, something], 42, function done (err, results) { + t.notOk(err, 'no error') + t.equal(count, 2, 'all functions must have completed') + t.deepEqual(results, [1, 2]) + }) + + function something (arg, cb) { + t.equal(obj, this) + t.equal(arg, 42) + setImmediate(function () { + count++ + cb(null, count) + }) + } +}) + +test('fowards errs', function (t) { + t.plan(3) + + var instance = series() + var count = 0 + var obj = {} + + instance(obj, [somethingErr, something], 42, function done (err, results) { + t.ok(err, 'error exists') + t.equal(err.message, 'this is an err!') + t.equal(count, 1, 'only the first function must have completed') + }) + + function something (arg, cb) { + setImmediate(function () { + count++ + cb(null, count) + }) + } + + function somethingErr (arg, cb) { + setImmediate(function () { + count++ + cb(new Error('this is an err!')) + }) + } +}) + +test('does not forward errors or result with results:false flag', function (t) { + t.plan(7) + + var instance = series({ + results: false + }) + var count = 0 + var obj = {} + + instance(obj, [something, something], 42, function done (err, results) { + t.equal(err, undefined, 'no err') + t.equal(results, undefined, 'no err') + t.equal(count, 2, 'all functions must have completed') + }) + + function something (arg, cb) { + t.equal(obj, this) + t.equal(arg, 42) + setImmediate(function () { + count++ + cb() + }) + } +}) + +test('should call done iff an empty is passed', function (t) { + t.plan(1) + + var instance = series() + var obj = {} + + instance(obj, [], 42, function done () { + t.pass() + }) +}) + +test('each support', function (t) { + t.plan(7) + + var instance = series() + var count = 0 + var obj = {} + var args = [1, 2, 3] + var i = 0 + + instance(obj, something, [].concat(args), function done () { + t.equal(count, 3, 'all functions must have completed') + }) + + function something (arg, cb) { + t.equal(obj, this, 'this matches') + t.equal(args[i++], arg, 'the arg is correct') + setImmediate(function () { + count++ + cb() + }) + } +}) + +test('each errors', function (t) { + t.plan(2) + + var instance = series() + var obj = {} + var args = [1, 2, 3] + var err = new Error('kaboom') + + instance(obj, something, [].concat(args), function done (_err) { + t.equal(err, _err) + }) + + function something (arg, cb) { + t.pass('something called') + cb(err) + } +}) + +test('each without this', function (t) { + t.plan(7) + + var instance = series() + var count = 0 + var args = [1, 2, 3] + var i = 0 + + instance(null, something, [].concat(args), function done () { + t.equal(count, 3, 'all functions must have completed') + }) + + function something (arg, cb) { + t.equal(undefined, this, 'this matches') + t.equal(args[i++], arg, 'the arg is correct') + setImmediate(function () { + count++ + cb() + }) + } +}) + +test('call the callback with the given this', function (t) { + t.plan(1) + + var instance = series() + var obj = {} + + instance(obj, [build(), build()], 42, function done () { + t.equal(obj, this, 'this matches') + }) + + function build () { + return function something (arg, cb) { + setImmediate(cb) + } + } +}) + +test('call the callback with the given this with no results', function (t) { + t.plan(1) + + var instance = series({ results: false }) + var obj = {} + + instance(obj, [build(), build()], 42, function done () { + t.equal(obj, this, 'this matches') + }) + + function build () { + return function something (arg, cb) { + setImmediate(cb) + } + } +}) + +test('call the callback with the given this with no data', function (t) { + t.plan(1) + + var instance = series() + var obj = {} + + instance(obj, [], 42, function done () { + t.equal(obj, this, 'this matches') + }) +}) + +test('support no final callback', function (t) { + t.plan(6) + + var instance = series() + var count = 0 + var obj = {} + + instance(obj, [build(0), build(1)], 42) + + function build (expected) { + return function something (arg, cb) { + t.equal(obj, this) + t.equal(arg, 42) + t.equal(expected, count) + setImmediate(function () { + count++ + cb() + }) + } + } +}) + +test('call without arg if there is no arg with no results', function (t) { + t.plan(3) + + var instance = series({ + results: false + }) + var count = 0 + var obj = {} + + instance(obj, [something, something], 42, function done () { + t.equal(count, 2, 'all functions must have completed') + }) + + function something (cb) { + t.equal(obj, this) + setImmediate(function () { + count++ + cb() + }) + } +}) + +test('call without arg if there is no arg with results', function (t) { + t.plan(3) + + var instance = series() + var count = 0 + var obj = {} + + instance(obj, [something, something], 42, function done () { + t.equal(count, 2, 'all functions must have completed') + }) + + function something (cb) { + t.equal(obj, this) + setImmediate(function () { + count++ + cb() + }) + } +}) + +test('each support with nothing to process', function (t) { + t.plan(2) + + var instance = series() + var obj = {} + var args = [] + + instance(obj, something, args, function done (err, results) { + t.error(err) + t.deepEqual(results, [], 'empty results') + }) + + function something (arg, cb) { + t.fail('this should never happen') + } +}) + +test('each without results support with nothing to process', function (t) { + t.plan(1) + + var instance = series({ results: false }) + var obj = {} + var args = [] + + instance(obj, something, args, function done () { + t.pass('done called') + }) + + function something (arg, cb) { + t.fail('this should never happen') + } +}) + +test('each without results', function (t) { + t.plan(7) + + var instance = series({ + results: false + }) + var count = 0 + var obj = {} + var args = [1, 2, 3] + var i = 0 + + instance(obj, something, [].concat(args), function done () { + t.equal(count, 3, 'all functions must have completed') + }) + + function something (arg, cb) { + t.equal(obj, this, 'this matches') + t.equal(args[i++], arg, 'the arg is correct') + setImmediate(function () { + count++ + cb() + }) + } +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/forwarded/HISTORY.md b/sdklab/meantimerecovery/aedes/node_modules/forwarded/HISTORY.md new file mode 100644 index 000000000..2599a5573 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/forwarded/HISTORY.md @@ -0,0 +1,16 @@ +0.1.2 / 2017-09-14 +================== + + * perf: improve header parsing + * perf: reduce overhead when no `X-Forwarded-For` header + +0.1.1 / 2017-09-10 +================== + + * Fix trimming leading / trailing OWS + * perf: hoist regular expression + +0.1.0 / 2014-09-21 +================== + + * Initial release diff --git a/sdklab/meantimerecovery/aedes/node_modules/forwarded/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/forwarded/LICENSE new file mode 100644 index 000000000..84441fbb5 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/forwarded/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2014-2017 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/forwarded/README.md b/sdklab/meantimerecovery/aedes/node_modules/forwarded/README.md new file mode 100644 index 000000000..c776ee54e --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/forwarded/README.md @@ -0,0 +1,57 @@ +# forwarded + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Parse HTTP X-Forwarded-For header + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install forwarded +``` + +## API + +```js +var forwarded = require('forwarded') +``` + +### forwarded(req) + +```js +var addresses = forwarded(req) +``` + +Parse the `X-Forwarded-For` header from the request. Returns an array +of the addresses, including the socket address for the `req`, in reverse +order (i.e. index `0` is the socket address and the last index is the +furthest address, typically the end-user). + +## Testing + +```sh +$ npm test +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/forwarded.svg +[npm-url]: https://npmjs.org/package/forwarded +[node-version-image]: https://img.shields.io/node/v/forwarded.svg +[node-version-url]: https://nodejs.org/en/download/ +[travis-image]: https://img.shields.io/travis/jshttp/forwarded/master.svg +[travis-url]: https://travis-ci.org/jshttp/forwarded +[coveralls-image]: https://img.shields.io/coveralls/jshttp/forwarded/master.svg +[coveralls-url]: https://coveralls.io/r/jshttp/forwarded?branch=master +[downloads-image]: https://img.shields.io/npm/dm/forwarded.svg +[downloads-url]: https://npmjs.org/package/forwarded diff --git a/sdklab/meantimerecovery/aedes/node_modules/forwarded/index.js b/sdklab/meantimerecovery/aedes/node_modules/forwarded/index.js new file mode 100644 index 000000000..7833b3de5 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/forwarded/index.js @@ -0,0 +1,76 @@ +/*! + * forwarded + * Copyright(c) 2014-2017 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = forwarded + +/** + * Get all addresses in the request, using the `X-Forwarded-For` header. + * + * @param {object} req + * @return {array} + * @public + */ + +function forwarded (req) { + if (!req) { + throw new TypeError('argument req is required') + } + + // simple header parsing + var proxyAddrs = parse(req.headers['x-forwarded-for'] || '') + var socketAddr = req.connection.remoteAddress + var addrs = [socketAddr].concat(proxyAddrs) + + // return all addresses + return addrs +} + +/** + * Parse the X-Forwarded-For header. + * + * @param {string} header + * @private + */ + +function parse (header) { + var end = header.length + var list = [] + var start = header.length + + // gather addresses, backwards + for (var i = header.length - 1; i >= 0; i--) { + switch (header.charCodeAt(i)) { + case 0x20: /* */ + if (start === end) { + start = end = i + } + break + case 0x2c: /* , */ + if (start !== end) { + list.push(header.substring(start, end)) + } + start = end = i + break + default: + start = i + break + } + } + + // final address + if (start !== end) { + list.push(header.substring(start, end)) + } + + return list +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/forwarded/package.json b/sdklab/meantimerecovery/aedes/node_modules/forwarded/package.json new file mode 100644 index 000000000..04c53b49e --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/forwarded/package.json @@ -0,0 +1,78 @@ +{ + "_from": "forwarded@^0.1.2", + "_id": "forwarded@0.1.2", + "_inBundle": false, + "_integrity": "sha512-Ua9xNhH0b8pwE3yRbFfXJvfdWF0UHNCdeyb2sbi9Ul/M+r3PTdrz7Cv4SCfZRMjmzEM9PhraqfZFbGTIg3OMyA==", + "_location": "/forwarded", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "forwarded@^0.1.2", + "name": "forwarded", + "escapedName": "forwarded", + "rawSpec": "^0.1.2", + "saveSpec": null, + "fetchSpec": "^0.1.2" + }, + "_requiredBy": [ + "/aedes-protocol-decoder" + ], + "_resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", + "_shasum": "98c23dab1175657b8c0573e8ceccd91b0ff18c84", + "_spec": "forwarded@^0.1.2", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes-protocol-decoder", + "bugs": { + "url": "https://github.com/jshttp/forwarded/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Douglas Christopher Wilson", + "email": "doug@somethingdoug.com" + } + ], + "deprecated": false, + "description": "Parse HTTP X-Forwarded-For header", + "devDependencies": { + "beautify-benchmark": "0.2.4", + "benchmark": "2.1.4", + "eslint": "3.19.0", + "eslint-config-standard": "10.2.1", + "eslint-plugin-import": "2.7.0", + "eslint-plugin-node": "5.1.1", + "eslint-plugin-promise": "3.5.0", + "eslint-plugin-standard": "3.0.1", + "istanbul": "0.4.5", + "mocha": "1.21.5" + }, + "engines": { + "node": ">= 0.6" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "README.md", + "index.js" + ], + "homepage": "https://github.com/jshttp/forwarded#readme", + "keywords": [ + "x-forwarded-for", + "http", + "req" + ], + "license": "MIT", + "name": "forwarded", + "repository": { + "type": "git", + "url": "git+https://github.com/jshttp/forwarded.git" + }, + "scripts": { + "bench": "node benchmark/index.js", + "lint": "eslint .", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + }, + "version": "0.1.2" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/.travis.yml b/sdklab/meantimerecovery/aedes/node_modules/from2/.travis.yml new file mode 100644 index 000000000..b03ffabca --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/.travis.yml @@ -0,0 +1,8 @@ +language: node_js +before_install: + - npm install -g npm +node_js: + - "0.8" + - "0.10" + - "0.12" + - "iojs" diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/LICENSE.md b/sdklab/meantimerecovery/aedes/node_modules/from2/LICENSE.md new file mode 100644 index 000000000..146cb32a7 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/LICENSE.md @@ -0,0 +1,21 @@ +## The MIT License (MIT) ## + +Copyright (c) 2014 Hugh Kennedy + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/README.md b/sdklab/meantimerecovery/aedes/node_modules/from2/README.md new file mode 100644 index 000000000..3e041a412 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/README.md @@ -0,0 +1,70 @@ +# from2 [![Flattr this!](https://api.flattr.com/button/flattr-badge-large.png)](https://flattr.com/submit/auto?user_id=hughskennedy&url=http://github.com/hughsk/from2&title=from2&description=hughsk/from2%20on%20GitHub&language=en_GB&tags=flattr,github,javascript&category=software)[![experimental](http://hughsk.github.io/stability-badges/dist/experimental.svg)](http://github.com/hughsk/stability-badges) # + +`from2` is a high-level module for creating readable streams that properly handle backpressure. + +Convience wrapper for +[readable-stream](http://github.com/isaacs/readable-stream)'s `ReadableStream` +base class, with an API lifted from +[from](http://github.com/dominictarr/from) and +[through2](http://github.com/rvagg/through2). + +## Usage ## + +[![from2](https://nodei.co/npm/from2.png?mini=true)](https://nodei.co/npm/from2) + +### `stream = from2([opts], read)` ### + +Where `opts` are the options to pass on to the `ReadableStream` constructor, +and `read(size, next)` is called when data is requested from the stream. + +* `size` is the recommended amount of data (in bytes) to retrieve. +* `next(err)` should be called when you're ready to emit more data. + +For example, here's a readable stream that emits the contents of a given +string: + +``` javascript +var from = require('from2') + +function fromString(string) { + return from(function(size, next) { + // if there's no more content + // left in the string, close the stream. + if (string.length <= 0) return next(null, null) + + // Pull in a new chunk of text, + // removing it from the string. + var chunk = string.slice(0, size) + string = string.slice(size) + + // Emit "chunk" from the stream. + next(null, chunk) + }) +} + +// pipe "hello world" out +// to stdout. +fromString('hello world').pipe(process.stdout) +``` + +### `stream = from2.obj([opts], read)` ### + +Shorthand for `from2({ objectMode: true }, read)`. + +### `createStream = from2.ctor([opts], read)` ### + +If you're creating similar streams in quick succession you can improve +performance by generating a stream **constructor** that you can reuse instead +of creating one-off streams on each call. + +Takes the same options as `from2`, instead returning a constructor which you +can use to create new streams. + +### See Also + +- [from2-array](https://github.com/binocarlos/from2-array) - Create a from2 stream based on an array of source values. +- [from2-string](https://github.com/yoshuawuyts/from2-string) - Create a stream from a string. Sugary wrapper around from2. + +## License ## + +MIT. See [LICENSE.md](http://github.com/hughsk/from2/blob/master/LICENSE.md) for details. diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/index.js b/sdklab/meantimerecovery/aedes/node_modules/from2/index.js new file mode 100644 index 000000000..cb200c610 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/index.js @@ -0,0 +1,103 @@ +var Readable = require('readable-stream').Readable +var inherits = require('inherits') + +module.exports = from2 + +from2.ctor = ctor +from2.obj = obj + +var Proto = ctor() + +function toFunction(list) { + list = list.slice() + return function (_, cb) { + var err = null + var item = list.length ? list.shift() : null + if (item instanceof Error) { + err = item + item = null + } + + cb(err, item) + } +} + +function from2(opts, read) { + if (typeof opts !== 'object' || Array.isArray(opts)) { + read = opts + opts = {} + } + + var rs = new Proto(opts) + rs._from = Array.isArray(read) ? toFunction(read) : (read || noop) + return rs +} + +function ctor(opts, read) { + if (typeof opts === 'function') { + read = opts + opts = {} + } + + opts = defaults(opts) + + inherits(Class, Readable) + function Class(override) { + if (!(this instanceof Class)) return new Class(override) + this._reading = false + this._callback = check + this.destroyed = false + Readable.call(this, override || opts) + + var self = this + var hwm = this._readableState.highWaterMark + + function check(err, data) { + if (self.destroyed) return + if (err) return self.destroy(err) + if (data === null) return self.push(null) + self._reading = false + if (self.push(data)) self._read(hwm) + } + } + + Class.prototype._from = read || noop + Class.prototype._read = function(size) { + if (this._reading || this.destroyed) return + this._reading = true + this._from(size, this._callback) + } + + Class.prototype.destroy = function(err) { + if (this.destroyed) return + this.destroyed = true + + var self = this + process.nextTick(function() { + if (err) self.emit('error', err) + self.emit('close') + }) + } + + return Class +} + +function obj(opts, read) { + if (typeof opts === 'function' || Array.isArray(opts)) { + read = opts + opts = {} + } + + opts = defaults(opts) + opts.objectMode = true + opts.highWaterMark = 16 + + return from2(opts, read) +} + +function noop () {} + +function defaults(opts) { + opts = opts || {} + return opts +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/.travis.yml b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/.travis.yml new file mode 100644 index 000000000..f62cdac06 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/.travis.yml @@ -0,0 +1,34 @@ +sudo: false +language: node_js +before_install: + - (test $NPM_LEGACY && npm install -g npm@2 && npm install -g npm@3) || true +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: NPM_LEGACY=true + - node_js: '0.10' + env: NPM_LEGACY=true + - node_js: '0.11' + env: NPM_LEGACY=true + - node_js: '0.12' + env: NPM_LEGACY=true + - node_js: 1 + env: NPM_LEGACY=true + - node_js: 2 + env: NPM_LEGACY=true + - node_js: 3 + env: NPM_LEGACY=true + - node_js: 4 + - node_js: 5 + - node_js: 6 + - node_js: 7 + - node_js: 8 + - node_js: 9 +script: "npm run test" +env: + global: + - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= + - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/CONTRIBUTING.md b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 000000000..f478d58dc --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/GOVERNANCE.md b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 000000000..16ffb93f2 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/LICENSE new file mode 100644 index 000000000..2873b3b2e --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/README.md b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/README.md new file mode 100644 index 000000000..23fe3f3e3 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/README.md @@ -0,0 +1,58 @@ +# readable-stream + +***Node-core v8.11.1 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) + +```bash +npm install --save readable-stream +``` + +***Node-core streams for userland*** + +This package is a mirror of the Streams2 and Streams3 implementations in +Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.11.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> + - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> + - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D +* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 000000000..83275f192 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,60 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section + + diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/duplex-browser.js b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/duplex-browser.js new file mode 100644 index 000000000..f8b2db83d --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/duplex-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_duplex.js'); diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/duplex.js b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/duplex.js new file mode 100644 index 000000000..46924cbfd --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require('./readable').Duplex diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/package.json b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/package.json new file mode 100644 index 000000000..2cb959a38 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/package.json @@ -0,0 +1,81 @@ +{ + "_from": "readable-stream@^2.0.0", + "_id": "readable-stream@2.3.7", + "_inBundle": false, + "_integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "_location": "/from2/readable-stream", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "readable-stream@^2.0.0", + "name": "readable-stream", + "escapedName": "readable-stream", + "rawSpec": "^2.0.0", + "saveSpec": null, + "fetchSpec": "^2.0.0" + }, + "_requiredBy": [ + "/from2" + ], + "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "_shasum": "1eca1cf711aef814c04f62252a36a62f6cb23b57", + "_spec": "readable-stream@^2.0.0", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\from2", + "browser": { + "util": false, + "./readable.js": "./readable-browser.js", + "./writable.js": "./writable-browser.js", + "./duplex.js": "./duplex-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "bugs": { + "url": "https://github.com/nodejs/readable-stream/issues" + }, + "bundleDependencies": false, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "deprecated": false, + "description": "Streams3, a user-land copy of the stream library from Node.js", + "devDependencies": { + "assert": "^1.4.0", + "babel-polyfill": "^6.9.1", + "buffer": "^4.9.0", + "lolex": "^2.3.2", + "nyc": "^6.4.0", + "tap": "^0.7.0", + "tape": "^4.8.0" + }, + "homepage": "https://github.com/nodejs/readable-stream#readme", + "keywords": [ + "readable", + "stream", + "pipe" + ], + "license": "MIT", + "main": "readable.js", + "name": "readable-stream", + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js" + }, + "version": "2.3.7" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/passthrough.js b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/passthrough.js new file mode 100644 index 000000000..ffd791d7f --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require('./readable').PassThrough diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/readable-browser.js b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/readable-browser.js new file mode 100644 index 000000000..e50372592 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,7 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/readable.js b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/readable.js new file mode 100644 index 000000000..ec89ec533 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/readable.js @@ -0,0 +1,19 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/transform.js b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/transform.js new file mode 100644 index 000000000..b1baba26d --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require('./readable').Transform diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/writable-browser.js b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/writable-browser.js new file mode 100644 index 000000000..ebdde6a85 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/writable-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_writable.js'); diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/writable.js b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/writable.js new file mode 100644 index 000000000..3211a6f80 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/readable-stream/writable.js @@ -0,0 +1,8 @@ +var Stream = require("stream") +var Writable = require("./lib/_stream_writable.js") + +if (process.env.READABLE_STREAM === 'disable') { + module.exports = Stream && Stream.Writable || Writable +} else { + module.exports = Writable +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/LICENSE new file mode 100644 index 000000000..0c068ceec --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/README.md b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/README.md new file mode 100644 index 000000000..e9a81afd0 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/index.d.ts b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/index.d.ts new file mode 100644 index 000000000..e9fed809a --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/index.js b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/index.js new file mode 100644 index 000000000..22438dabb --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/index.js @@ -0,0 +1,62 @@ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/package.json b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/package.json new file mode 100644 index 000000000..62999dd0c --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/safe-buffer/package.json @@ -0,0 +1,63 @@ +{ + "_from": "safe-buffer@~5.1.1", + "_id": "safe-buffer@5.1.2", + "_inBundle": false, + "_integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "_location": "/from2/safe-buffer", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "safe-buffer@~5.1.1", + "name": "safe-buffer", + "escapedName": "safe-buffer", + "rawSpec": "~5.1.1", + "saveSpec": null, + "fetchSpec": "~5.1.1" + }, + "_requiredBy": [ + "/from2/readable-stream", + "/from2/string_decoder" + ], + "_resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "_shasum": "991ec69d296e0313747d59bdfd2b745c35f8828d", + "_spec": "safe-buffer@~5.1.1", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\from2\\node_modules\\readable-stream", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "http://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Safer Node.js Buffer API", + "devDependencies": { + "standard": "*", + "tape": "^4.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "name": "safe-buffer", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + }, + "types": "index.d.ts", + "version": "5.1.2" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/string_decoder/.travis.yml b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/string_decoder/.travis.yml new file mode 100644 index 000000000..3347a7254 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/string_decoder/.travis.yml @@ -0,0 +1,50 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/string_decoder/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/string_decoder/LICENSE new file mode 100644 index 000000000..778edb207 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/string_decoder/README.md b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/string_decoder/README.md new file mode 100644 index 000000000..5fd58315e --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/string_decoder/package.json b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/string_decoder/package.json new file mode 100644 index 000000000..c6755c7c8 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/node_modules/string_decoder/package.json @@ -0,0 +1,59 @@ +{ + "_from": "string_decoder@~1.1.1", + "_id": "string_decoder@1.1.1", + "_inBundle": false, + "_integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "_location": "/from2/string_decoder", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "string_decoder@~1.1.1", + "name": "string_decoder", + "escapedName": "string_decoder", + "rawSpec": "~1.1.1", + "saveSpec": null, + "fetchSpec": "~1.1.1" + }, + "_requiredBy": [ + "/from2/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "_shasum": "9cf1611ba62685d7030ae9e4ba34149c3af03fc8", + "_spec": "string_decoder@~1.1.1", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\from2\\node_modules\\readable-stream", + "bugs": { + "url": "https://github.com/nodejs/string_decoder/issues" + }, + "bundleDependencies": false, + "dependencies": { + "safe-buffer": "~5.1.0" + }, + "deprecated": false, + "description": "The string_decoder module from Node core", + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT", + "main": "lib/string_decoder.js", + "name": "string_decoder", + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "test": "tap test/parallel/*.js && node test/verify-dependencies" + }, + "version": "1.1.1" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/package.json b/sdklab/meantimerecovery/aedes/node_modules/from2/package.json new file mode 100644 index 000000000..678bd5e9c --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/package.json @@ -0,0 +1,75 @@ +{ + "_from": "from2@^2.3.0", + "_id": "from2@2.3.0", + "_inBundle": false, + "_integrity": "sha512-OMcX/4IC/uqEPVgGeyfN22LJk6AZrMkRZHxcHBMBvHScDGgwTm2GT2Wkgtocyd3JfZffjj2kYUDXXII0Fk9W0g==", + "_location": "/from2", + "_phantomChildren": { + "core-util-is": "1.0.3", + "inherits": "2.0.4", + "isarray": "1.0.0", + "process-nextick-args": "2.0.1", + "util-deprecate": "1.0.2" + }, + "_requested": { + "type": "range", + "registry": true, + "raw": "from2@^2.3.0", + "name": "from2", + "escapedName": "from2", + "rawSpec": "^2.3.0", + "saveSpec": null, + "fetchSpec": "^2.3.0" + }, + "_requiredBy": [ + "/aedes-persistence" + ], + "_resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz", + "_shasum": "8bfb5502bde4a4d36cfdeea007fcca21d7e382af", + "_spec": "from2@^2.3.0", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes-persistence", + "author": { + "name": "Hugh Kennedy", + "email": "hughskennedy@gmail.com", + "url": "http://hughsk.io/" + }, + "bugs": { + "url": "https://github.com/hughsk/from2/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Mathias Buus", + "email": "mathiasbuus@gmail.com" + } + ], + "dependencies": { + "inherits": "^2.0.1", + "readable-stream": "^2.0.0" + }, + "deprecated": false, + "description": "Convenience wrapper for ReadableStream, with an API lifted from \"from\" and \"through2\"", + "devDependencies": { + "tape": "^4.0.0" + }, + "homepage": "https://github.com/hughsk/from2", + "keywords": [ + "from", + "stream", + "readable", + "pull", + "convenience", + "wrapper" + ], + "license": "MIT", + "main": "index.js", + "name": "from2", + "repository": { + "type": "git", + "url": "git://github.com/hughsk/from2.git" + }, + "scripts": { + "test": "node test" + }, + "version": "2.3.0" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/from2/test.js b/sdklab/meantimerecovery/aedes/node_modules/from2/test.js new file mode 100644 index 000000000..b11bd6cd8 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/from2/test.js @@ -0,0 +1,123 @@ +var test = require('tape') +var path = require('path') +var from = require('./') +var fs = require('fs') + +var tmp = path.resolve( + __dirname, 'tmp.txt' +) + +function fromString(string) { + return from(function(size, next) { + if (string.length <= 0) return next(null, null) + var chunk = string.slice(0, size) + string = string.slice(size) + next(null, chunk) + }) +} + +test('from2', function(t) { + var contents = fs.readFileSync(__filename, 'utf8') + var stream = fromString(contents) + + stream + .pipe(fs.createWriteStream(tmp)) + .on('close', function() { + t.equal(fs.readFileSync(tmp, 'utf8'), contents) + fs.unlinkSync(tmp) + t.end() + }) +}) + +test('old mode', function(t) { + var contents = fs.readFileSync(__filename, 'utf8') + var stream = fromString(contents) + var buffer = '' + + stream.on('data', function(data) { + buffer += data + }).on('end', function() { + t.equal(buffer, contents) + t.end() + }) +}) + +test('destroy', function(t) { + var stream = from(function(size, next) { + process.nextTick(function() { + next(null, 'no') + }) + }) + + stream.on('data', function(data) { + t.ok(false) + }).on('close', function() { + t.ok(true) + t.end() + }) + + stream.destroy() +}) + +test('arrays', function (t) { + var input = ['a', 'b', 'c'] + var stream = from(input) + var output = [] + stream.on('data', function (letter) { + output.push(letter.toString()) + }) + stream.on('end', function () { + t.deepEqual(input, output) + t.end() + }) +}) + +test('obj arrays', function (t) { + var input = [{foo:'a'}, {foo:'b'}, {foo:'c'}] + var stream = from.obj(input) + var output = [] + stream.on('data', function (letter) { + output.push(letter) + }) + stream.on('end', function () { + t.deepEqual(input, output) + t.end() + }) +}) + + +test('arrays can emit errors', function (t) { + var input = ['a', 'b', new Error('ooops'), 'c'] + var stream = from(input) + var output = [] + stream.on('data', function (letter) { + output.push(letter.toString()) + }) + stream.on('error', function(e){ + t.deepEqual(['a', 'b'], output) + t.equal('ooops', e.message) + t.end() + }) + stream.on('end', function () { + t.fail('the stream should have errored') + }) +}) + +test('obj arrays can emit errors', function (t) { + var input = [{foo:'a'}, {foo:'b'}, new Error('ooops'), {foo:'c'}] + var stream = from.obj(input) + var output = [] + stream.on('data', function (letter) { + output.push(letter) + }) + stream.on('error', function(e){ + t.deepEqual([{foo:'a'}, {foo:'b'}], output) + t.equal('ooops', e.message) + t.end() + }) + stream.on('end', function () { + t.fail('the stream should have errored') + }) +}) + + diff --git a/sdklab/meantimerecovery/aedes/node_modules/ieee754/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/ieee754/LICENSE new file mode 100644 index 000000000..5aac82c78 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/ieee754/LICENSE @@ -0,0 +1,11 @@ +Copyright 2008 Fair Oaks Labs, Inc. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/ieee754/README.md b/sdklab/meantimerecovery/aedes/node_modules/ieee754/README.md new file mode 100644 index 000000000..cb7527b3c --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/ieee754/README.md @@ -0,0 +1,51 @@ +# ieee754 [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/ieee754/master.svg +[travis-url]: https://travis-ci.org/feross/ieee754 +[npm-image]: https://img.shields.io/npm/v/ieee754.svg +[npm-url]: https://npmjs.org/package/ieee754 +[downloads-image]: https://img.shields.io/npm/dm/ieee754.svg +[downloads-url]: https://npmjs.org/package/ieee754 +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +[![saucelabs][saucelabs-image]][saucelabs-url] + +[saucelabs-image]: https://saucelabs.com/browser-matrix/ieee754.svg +[saucelabs-url]: https://saucelabs.com/u/ieee754 + +### Read/write IEEE754 floating point numbers from/to a Buffer or array-like object. + +## install + +``` +npm install ieee754 +``` + +## methods + +`var ieee754 = require('ieee754')` + +The `ieee754` object has the following functions: + +``` +ieee754.read = function (buffer, offset, isLE, mLen, nBytes) +ieee754.write = function (buffer, value, offset, isLE, mLen, nBytes) +``` + +The arguments mean the following: + +- buffer = the buffer +- offset = offset into the buffer +- value = value to set (only for `write`) +- isLe = is little endian? +- mLen = mantissa length +- nBytes = number of bytes + +## what is ieee754? + +The IEEE Standard for Floating-Point Arithmetic (IEEE 754) is a technical standard for floating-point computation. [Read more](http://en.wikipedia.org/wiki/IEEE_floating_point). + +## license + +BSD 3 Clause. Copyright (c) 2008, Fair Oaks Labs, Inc. diff --git a/sdklab/meantimerecovery/aedes/node_modules/ieee754/index.d.ts b/sdklab/meantimerecovery/aedes/node_modules/ieee754/index.d.ts new file mode 100644 index 000000000..f1e435487 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/ieee754/index.d.ts @@ -0,0 +1,10 @@ +declare namespace ieee754 { + export function read( + buffer: Uint8Array, offset: number, isLE: boolean, mLen: number, + nBytes: number): number; + export function write( + buffer: Uint8Array, value: number, offset: number, isLE: boolean, + mLen: number, nBytes: number): void; + } + + export = ieee754; \ No newline at end of file diff --git a/sdklab/meantimerecovery/aedes/node_modules/ieee754/index.js b/sdklab/meantimerecovery/aedes/node_modules/ieee754/index.js new file mode 100644 index 000000000..81d26c343 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/ieee754/index.js @@ -0,0 +1,85 @@ +/*! ieee754. BSD-3-Clause License. Feross Aboukhadijeh */ +exports.read = function (buffer, offset, isLE, mLen, nBytes) { + var e, m + var eLen = (nBytes * 8) - mLen - 1 + var eMax = (1 << eLen) - 1 + var eBias = eMax >> 1 + var nBits = -7 + var i = isLE ? (nBytes - 1) : 0 + var d = isLE ? -1 : 1 + var s = buffer[offset + i] + + i += d + + e = s & ((1 << (-nBits)) - 1) + s >>= (-nBits) + nBits += eLen + for (; nBits > 0; e = (e * 256) + buffer[offset + i], i += d, nBits -= 8) {} + + m = e & ((1 << (-nBits)) - 1) + e >>= (-nBits) + nBits += mLen + for (; nBits > 0; m = (m * 256) + buffer[offset + i], i += d, nBits -= 8) {} + + if (e === 0) { + e = 1 - eBias + } else if (e === eMax) { + return m ? NaN : ((s ? -1 : 1) * Infinity) + } else { + m = m + Math.pow(2, mLen) + e = e - eBias + } + return (s ? -1 : 1) * m * Math.pow(2, e - mLen) +} + +exports.write = function (buffer, value, offset, isLE, mLen, nBytes) { + var e, m, c + var eLen = (nBytes * 8) - mLen - 1 + var eMax = (1 << eLen) - 1 + var eBias = eMax >> 1 + var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0) + var i = isLE ? 0 : (nBytes - 1) + var d = isLE ? 1 : -1 + var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0 + + value = Math.abs(value) + + if (isNaN(value) || value === Infinity) { + m = isNaN(value) ? 1 : 0 + e = eMax + } else { + e = Math.floor(Math.log(value) / Math.LN2) + if (value * (c = Math.pow(2, -e)) < 1) { + e-- + c *= 2 + } + if (e + eBias >= 1) { + value += rt / c + } else { + value += rt * Math.pow(2, 1 - eBias) + } + if (value * c >= 2) { + e++ + c /= 2 + } + + if (e + eBias >= eMax) { + m = 0 + e = eMax + } else if (e + eBias >= 1) { + m = ((value * c) - 1) * Math.pow(2, mLen) + e = e + eBias + } else { + m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen) + e = 0 + } + } + + for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {} + + e = (e << mLen) | m + eLen += mLen + for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {} + + buffer[offset + i - d] |= s * 128 +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/ieee754/package.json b/sdklab/meantimerecovery/aedes/node_modules/ieee754/package.json new file mode 100644 index 000000000..2417f5ef6 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/ieee754/package.json @@ -0,0 +1,84 @@ +{ + "_from": "ieee754@^1.1.13", + "_id": "ieee754@1.2.1", + "_inBundle": false, + "_integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "_location": "/ieee754", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "ieee754@^1.1.13", + "name": "ieee754", + "escapedName": "ieee754", + "rawSpec": "^1.1.13", + "saveSpec": null, + "fetchSpec": "^1.1.13" + }, + "_requiredBy": [ + "/buffer" + ], + "_resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "_shasum": "8eb7a10a63fff25d15a57b001586d177d1b0d352", + "_spec": "ieee754@^1.1.13", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\buffer", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/ieee754/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Romain Beauxis", + "email": "toots@rastageeks.org" + } + ], + "deprecated": false, + "description": "Read/write IEEE754 floating point numbers from/to a Buffer or array-like object", + "devDependencies": { + "airtap": "^3.0.0", + "standard": "*", + "tape": "^5.0.1" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "homepage": "https://github.com/feross/ieee754#readme", + "keywords": [ + "IEEE 754", + "buffer", + "convert", + "floating point", + "ieee754" + ], + "license": "BSD-3-Clause", + "main": "index.js", + "name": "ieee754", + "repository": { + "type": "git", + "url": "git://github.com/feross/ieee754.git" + }, + "scripts": { + "test": "standard && npm run test-node && npm run test-browser", + "test-browser": "airtap -- test/*.js", + "test-browser-local": "airtap --local -- test/*.js", + "test-node": "tape test/*.js" + }, + "types": "index.d.ts", + "version": "1.2.1" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/inherits/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/inherits/LICENSE new file mode 100644 index 000000000..dea3013d6 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/inherits/LICENSE @@ -0,0 +1,16 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + diff --git a/sdklab/meantimerecovery/aedes/node_modules/inherits/README.md b/sdklab/meantimerecovery/aedes/node_modules/inherits/README.md new file mode 100644 index 000000000..b1c566585 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/inherits/README.md @@ -0,0 +1,42 @@ +Browser-friendly inheritance fully compatible with standard node.js +[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). + +This package exports standard `inherits` from node.js `util` module in +node environment, but also provides alternative browser-friendly +implementation through [browser +field](https://gist.github.com/shtylman/4339901). Alternative +implementation is a literal copy of standard one located in standalone +module to avoid requiring of `util`. It also has a shim for old +browsers with no `Object.create` support. + +While keeping you sure you are using standard `inherits` +implementation in node.js environment, it allows bundlers such as +[browserify](https://github.com/substack/node-browserify) to not +include full `util` package to your client code if all you need is +just `inherits` function. It worth, because browser shim for `util` +package is large and `inherits` is often the single function you need +from it. + +It's recommended to use this package instead of +`require('util').inherits` for any code that has chances to be used +not only in node.js but in browser too. + +## usage + +```js +var inherits = require('inherits'); +// then use exactly as the standard one +``` + +## note on version ~1.0 + +Version ~1.0 had completely different motivation and is not compatible +neither with 2.0 nor with standard node.js `inherits`. + +If you are using version ~1.0 and planning to switch to ~2.0, be +careful: + +* new version uses `super_` instead of `super` for referencing + superclass +* new version overwrites current prototype while old one preserves any + existing fields on it diff --git a/sdklab/meantimerecovery/aedes/node_modules/inherits/inherits.js b/sdklab/meantimerecovery/aedes/node_modules/inherits/inherits.js new file mode 100644 index 000000000..f71f2d932 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/inherits/inherits.js @@ -0,0 +1,9 @@ +try { + var util = require('util'); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = require('./inherits_browser.js'); +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/inherits/inherits_browser.js b/sdklab/meantimerecovery/aedes/node_modules/inherits/inherits_browser.js new file mode 100644 index 000000000..86bbb3dc2 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/inherits/inherits_browser.js @@ -0,0 +1,27 @@ +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/inherits/package.json b/sdklab/meantimerecovery/aedes/node_modules/inherits/package.json new file mode 100644 index 000000000..f59ef39cd --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/inherits/package.json @@ -0,0 +1,65 @@ +{ + "_from": "inherits@^2.0.4", + "_id": "inherits@2.0.4", + "_inBundle": false, + "_integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "_location": "/inherits", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "inherits@^2.0.4", + "name": "inherits", + "escapedName": "inherits", + "rawSpec": "^2.0.4", + "saveSpec": null, + "fetchSpec": "^2.0.4" + }, + "_requiredBy": [ + "/bl", + "/bulk-write-stream", + "/from2", + "/from2/readable-stream", + "/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "_shasum": "0fa2c64f932917c3433a0ded55363aae37416b7c", + "_spec": "inherits@^2.0.4", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\bl", + "browser": "./inherits_browser.js", + "bugs": { + "url": "https://github.com/isaacs/inherits/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", + "devDependencies": { + "tap": "^14.2.4" + }, + "files": [ + "inherits.js", + "inherits_browser.js" + ], + "homepage": "https://github.com/isaacs/inherits#readme", + "keywords": [ + "inheritance", + "class", + "klass", + "oop", + "object-oriented", + "inherits", + "browser", + "browserify" + ], + "license": "ISC", + "main": "./inherits.js", + "name": "inherits", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/inherits.git" + }, + "scripts": { + "test": "tap" + }, + "version": "2.0.4" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/isarray/.npmignore b/sdklab/meantimerecovery/aedes/node_modules/isarray/.npmignore new file mode 100644 index 000000000..3c3629e64 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/isarray/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/sdklab/meantimerecovery/aedes/node_modules/isarray/.travis.yml b/sdklab/meantimerecovery/aedes/node_modules/isarray/.travis.yml new file mode 100644 index 000000000..cc4dba29d --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/isarray/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - "0.8" + - "0.10" diff --git a/sdklab/meantimerecovery/aedes/node_modules/isarray/Makefile b/sdklab/meantimerecovery/aedes/node_modules/isarray/Makefile new file mode 100644 index 000000000..787d56e1e --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/isarray/Makefile @@ -0,0 +1,6 @@ + +test: + @node_modules/.bin/tape test.js + +.PHONY: test + diff --git a/sdklab/meantimerecovery/aedes/node_modules/isarray/README.md b/sdklab/meantimerecovery/aedes/node_modules/isarray/README.md new file mode 100644 index 000000000..16d2c59c6 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/isarray/README.md @@ -0,0 +1,60 @@ + +# isarray + +`Array#isArray` for older browsers. + +[![build status](https://secure.travis-ci.org/juliangruber/isarray.svg)](http://travis-ci.org/juliangruber/isarray) +[![downloads](https://img.shields.io/npm/dm/isarray.svg)](https://www.npmjs.org/package/isarray) + +[![browser support](https://ci.testling.com/juliangruber/isarray.png) +](https://ci.testling.com/juliangruber/isarray) + +## Usage + +```js +var isArray = require('isarray'); + +console.log(isArray([])); // => true +console.log(isArray({})); // => false +``` + +## Installation + +With [npm](http://npmjs.org) do + +```bash +$ npm install isarray +``` + +Then bundle for the browser with +[browserify](https://github.com/substack/browserify). + +With [component](http://component.io) do + +```bash +$ component install juliangruber/isarray +``` + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/isarray/component.json b/sdklab/meantimerecovery/aedes/node_modules/isarray/component.json new file mode 100644 index 000000000..9e31b6838 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/isarray/component.json @@ -0,0 +1,19 @@ +{ + "name" : "isarray", + "description" : "Array#isArray for older browsers", + "version" : "0.0.1", + "repository" : "juliangruber/isarray", + "homepage": "https://github.com/juliangruber/isarray", + "main" : "index.js", + "scripts" : [ + "index.js" + ], + "dependencies" : {}, + "keywords": ["browser","isarray","array"], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/isarray/index.js b/sdklab/meantimerecovery/aedes/node_modules/isarray/index.js new file mode 100644 index 000000000..a57f63495 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/isarray/index.js @@ -0,0 +1,5 @@ +var toString = {}.toString; + +module.exports = Array.isArray || function (arr) { + return toString.call(arr) == '[object Array]'; +}; diff --git a/sdklab/meantimerecovery/aedes/node_modules/isarray/package.json b/sdklab/meantimerecovery/aedes/node_modules/isarray/package.json new file mode 100644 index 000000000..8a7bba2a8 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/isarray/package.json @@ -0,0 +1,73 @@ +{ + "_from": "isarray@~1.0.0", + "_id": "isarray@1.0.0", + "_inBundle": false, + "_integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "_location": "/isarray", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "isarray@~1.0.0", + "name": "isarray", + "escapedName": "isarray", + "rawSpec": "~1.0.0", + "saveSpec": null, + "fetchSpec": "~1.0.0" + }, + "_requiredBy": [ + "/from2/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "_shasum": "bb935d48582cba168c06834957a54a3e07124f11", + "_spec": "isarray@~1.0.0", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\from2\\node_modules\\readable-stream", + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "bugs": { + "url": "https://github.com/juliangruber/isarray/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Array#isArray for older browsers", + "devDependencies": { + "tape": "~2.13.4" + }, + "homepage": "https://github.com/juliangruber/isarray", + "keywords": [ + "browser", + "isarray", + "array" + ], + "license": "MIT", + "main": "index.js", + "name": "isarray", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/isarray.git" + }, + "scripts": { + "test": "tape test.js" + }, + "testling": { + "files": "test.js", + "browsers": [ + "ie/8..latest", + "firefox/17..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + }, + "version": "1.0.0" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/isarray/test.js b/sdklab/meantimerecovery/aedes/node_modules/isarray/test.js new file mode 100644 index 000000000..e0c3444d8 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/isarray/test.js @@ -0,0 +1,20 @@ +var isArray = require('./'); +var test = require('tape'); + +test('is array', function(t){ + t.ok(isArray([])); + t.notOk(isArray({})); + t.notOk(isArray(null)); + t.notOk(isArray(false)); + + var obj = {}; + obj[0] = true; + t.notOk(isArray(obj)); + + var arr = []; + arr.foo = 'bar'; + t.ok(isArray(arr)); + + t.end(); +}); + diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqemitter/.github/dependabot.yml b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/.github/dependabot.yml new file mode 100644 index 000000000..7e7cbe1b0 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/.github/dependabot.yml @@ -0,0 +1,11 @@ +version: 2 +updates: +- package-ecosystem: npm + directory: "/" + schedule: + interval: daily + open-pull-requests-limit: 10 + ignore: + - dependency-name: standard + versions: + - 16.0.3 diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqemitter/.github/workflows/ci.yml b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/.github/workflows/ci.yml new file mode 100644 index 000000000..df53cb6c8 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/.github/workflows/ci.yml @@ -0,0 +1,27 @@ +name: ci + +on: [push, pull_request] + +jobs: + test: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [10.x, 12.x, 13.x, 14.x, 16.x] + + steps: + - uses: actions/checkout@v1 + + - name: Use Node.js + uses: actions/setup-node@v2 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install + + - name: Run tests + run: | + npm run test diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqemitter/.nycrc b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/.nycrc new file mode 100644 index 000000000..1a0bab932 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/.nycrc @@ -0,0 +1,11 @@ +{ + "branches": 100, + "lines": 100, + "functions": 100, + "statements": 100, + "check-coverage": true, + "exclude": [ + "abstractTest.js", + "test/*.js" + ] +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqemitter/CODE_OF_CONDUCT.md b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..0d5ca1b9d --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/CODE_OF_CONDUCT.md @@ -0,0 +1,84 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience +* Focusing on what is best not just for us as individuals, but for the overall community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or + advances of any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email + address, without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at hello@matteocollina.com. All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series of actions. + +**Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, +available at https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity). + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at https://www.contributor-covenant.org/translations. diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqemitter/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/LICENSE new file mode 100644 index 000000000..5d7e3ae63 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/LICENSE @@ -0,0 +1,15 @@ +ISC License + +Copyright (c) 2014-2020, Matteo Collina + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqemitter/README.md b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/README.md new file mode 100644 index 000000000..2ddbcf930 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/README.md @@ -0,0 +1,190 @@ + + +# MQEmitter + +![ci](https://github.com/mcollina/mqemitter/workflows/ci/badge.svg) +[![Known Vulnerabilities](https://snyk.io/test/github/mcollina/mqemitter/badge.svg)](https://snyk.io/test/github/mcollina/mqemitter) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](http://standardjs.com/)\ +[![Dependencies Status](https://david-dm.org/mcollina/mqemitter/status.svg)](https://david-dm.org/mcollina/mqemitter) +[![devDependencies Status](https://david-dm.org/mcollina/mqemitter/dev-status.svg)](https://david-dm.org/mcollina/mqemitter?type=dev)\ +[![NPM version](https://img.shields.io/npm/v/mqemitter.svg?style=flat)](https://www.npmjs.com/mqemitter) +[![NPM downloads](https://img.shields.io/npm/dm/mqemitter.svg?style=flat)](https://www.npmjs.com/mqemitter) + +An Opinionated Message Queue with an emitter-style API, but with callbacks. + +If you need a multi process MQEmitter, check out the table below: + +- [mqemitter-redis]: Redis-powered mqemitter +- [mqemitter-mongodb]: Mongodb based mqemitter +- [mqemitter-child-process]: Share the same mqemitter between a hierarchy of child processes +- [mqemitter-cs]: Expose a MQEmitter via a simple client/server protocol +- [mqemitter-p2p]: A P2P implementation of MQEmitter, based on HyperEmitter and a Merkle DAG +- [mqemitter-aerospike]: Aerospike mqemitter + +## Installation + +```sh +npm install mqemitter +``` + +## Examples + +```js +const mq = require('mqemitter') +const emitter = mq({ concurrency: 5 }) +const message + +emitter.on('hello world', function (message, cb) { + // call callback when you are done + // do not pass any errors, the emitter cannot handle it. + cb() +}) + +// topic is mandatory +message = { topic: 'hello world', payload: 'or any other fields' } +emitter.emit(message, function () { + // emitter will never return an error +}) +``` + +## API + +- [new MQEmitter ([options])](#new-mqemitter-options) +- [emitter.emit (message, callback)](#emitteremit-message-callback) +- [emitter.on (topic, listener, [callback])](#emitteron-topic-listener-callback) +- [emitter.removeListener (topic, listener, [callback])](#emitterremovelistener-topic-listener-callback) +- [emitter.close (callback)](#emitterclose-callback) + +## new MQEmitter ([options]) + +- options `` + - `concurrency` `` maximum number of concurrent messages that can be on concurrent delivery. __Default__: `0` + - `wildcardOne` `` a char to use for matching exactly one _non-empty_ level word. __Default__: `+` + - `wildcardSome` `` a char to use for matching multiple level wildcards. __Default__: #` + - `matchEmptyLevels` `` If true then `wildcardOne` also matches an empty word. __Default__: `true` + - `separator` `` a separator character to use for separating words. __Default__: `/` + +Create a new MQEmitter class. + +MQEmitter is the class and function exposed by this module. +It can be created by `MQEmitter()` or using `new MQEmitter()`. + +For more information on wildcards, see [this explanation](#wildcards) or [Qlobber](https://www.npmjs.com/qlobber). + +## emitter.emit (message, callback) + +- `message` `` +- `callback` `` `(error) => void` + - error `` | `null` + +Emit the given message, which must have a `topic` property, which can contain wildcards as defined on creation. + +## emitter.on (topic, listener, [callback]) + +- `topic` `` +- `listener` `` `(message, done) => void` +- `callback` `` `() => void` + +Add the given listener to the passed topic. Topic can contain wildcards, as defined on creation. + +The `listener` __must never error__ and `done` must not be called with an __`err`__ object. + +`callback` will be called when the event subscribe is done correctly. + +## emitter.removeListener (topic, listener, [callback]) + +The inverse of `on`. + +## emitter.close (callback) + +- `callback` `` `() => void` + +Close the given emitter. After, all writes will return an error. + +## Wildcards + +__MQEmitter__ supports the use of wildcards: every topic is splitted according to `separator`. + +The wildcard character `+` matches exactly _non-empty_ one word: + +```js +const mq = require('mqemitter') +const emitter = mq() + +emitter.on('hello/+/world', function(message, cb) { + // will ONLY capture { topic: 'hello/my/world', 'something': 'more' } + console.log(message) + cb() +}) +emitter.on('hello/+', function(message, cb) { + // will not be called + console.log(message) + cb() +}) + +emitter.emit({ topic: 'hello/my/world', something: 'more' }) +emitter.emit({ topic: 'hello//world', something: 'more' }) +``` + +The wildcard character `+` matches one word: + +```js +const mq = require('mqemitter') +const emitter = mq({ matchEmptyLevels: true }) + +emitter.on('hello/+/world', function(message, cb) { + // will capture { topic: 'hello/my/world', 'something': 'more' } + // and capture { topic: 'hello//world', 'something': 'more' } + console.log(message) + cb() +}) + +emitter.on('hello/+', function(message, cb) { + // will not be called + console.log(message) + cb() +}) + +emitter.emit({ topic: 'hello/my/world', something: 'more' }) +emitter.emit({ topic: 'hello//world', something: 'more' }) +``` + +The wildcard character `#` matches zero or more words: + +```js +const mq = require('mqemitter') +const emitter = mq() + +emitter.on('hello/#', function(message, cb) { + // this will print { topic: 'hello/my/world', 'something': 'more' } + console.log(message) + cb() +}) + +emitter.on('#', function(message, cb) { + // this will print { topic: 'hello/my/world', 'something': 'more' } + console.log(message) + cb() +}) + +emitter.on('hello/my/world/#', function(message, cb) { + // this will print { topic: 'hello/my/world', 'something': 'more' } + console.log(message) + cb() +}) + +emitter.emit({ topic: 'hello/my/world', something: 'more' }) +``` + +Of course, you can mix `#` and `+` in the same subscription. + +## LICENSE + +MIT + +[mqemitter-redis]: https://www.npmjs.com/mqemitter-redis +[mqemitter-mongodb]: https://www.npmjs.com/mqemitter-mongodb +[mqemitter-child-process]: https://www.npmjs.com/mqemitter-child-process +[mqemitter-cs]: https://www.npmjs.com/mqemitter-cs +[mqemitter-p2p]: https://www.npmjs.com/mqemitter-p2p +[mqemitter-aerospike]: https://www.npmjs.com/mqemitter-aerospike diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqemitter/abstractTest.js b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/abstractTest.js new file mode 100644 index 000000000..0a7f24fe1 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/abstractTest.js @@ -0,0 +1,544 @@ + +'use strict' + +module.exports = function abstractTests (opts) { + const builder = opts.builder + const test = opts.test + + test('support on and emit', function (t) { + t.plan(4) + + const e = builder() + const expected = { + topic: 'hello world', + payload: { my: 'message' } + } + + e.on('hello world', function (message, cb) { + t.equal(e.current, 1, 'number of current messages') + t.deepEqual(message, expected) + t.equal(this, e) + cb() + }, function () { + e.emit(expected, function () { + e.close(function () { + t.pass('closed') + }) + }) + }) + }) + + test('support multiple subscribers', function (t) { + t.plan(3) + + const e = builder() + const expected = { + topic: 'hello world', + payload: { my: 'message' } + } + + e.on('hello world', function (message, cb) { + t.ok(message, 'message received') + cb() + }, function () { + e.on('hello world', function (message, cb) { + t.ok(message, 'message received') + cb() + }, function () { + e.emit(expected, function () { + e.close(function () { + t.pass('closed') + }) + }) + }) + }) + }) + + test('support multiple subscribers and unsubscribers', function (t) { + t.plan(2) + + const e = builder() + const expected = { + topic: 'hello world', + payload: { my: 'message' } + } + + function first (message, cb) { + t.fail('first listener should not receive any events') + cb() + } + + function second (message, cb) { + t.ok(message, 'second listener must receive the message') + cb() + e.close(function () { + t.pass('closed') + }) + } + + e.on('hello world', first, function () { + e.on('hello world', second, function () { + e.removeListener('hello world', first, function () { + e.emit(expected) + }) + }) + }) + }) + + test('removeListener', function (t) { + t.plan(1) + + const e = builder() + const expected = { + topic: 'hello world', + payload: { my: 'message' } + } + let toRemoveCalled = false + + function toRemove (message, cb) { + toRemoveCalled = true + cb() + } + + e.on('hello world', function (message, cb) { + cb() + }, function () { + e.on('hello world', toRemove, function () { + e.removeListener('hello world', toRemove, function () { + e.emit(expected, function () { + e.close(function () { + t.notOk(toRemoveCalled, 'the toRemove function must not be called') + }) + }) + }) + }) + }) + }) + + test('without a callback on emit and on', function (t) { + t.plan(1) + + const e = builder() + const expected = { + topic: 'hello world', + payload: { my: 'message' } + } + + e.on('hello world', function (message, cb) { + cb() + e.close(function () { + t.pass('closed') + }) + }) + + setTimeout(function () { + e.emit(expected) + }, 100) + }) + + test('without any listeners', function (t) { + t.plan(2) + + const e = builder() + const expected = { + topic: 'hello world', + payload: { my: 'message' } + } + + e.emit(expected) + t.equal(e.current, 0, 'reset the current messages trackers') + e.close(function () { + t.pass('closed') + }) + }) + + test('support one level wildcard', function (t) { + t.plan(2) + + const e = builder() + const expected = { + topic: 'hello/world', + payload: { my: 'message' } + } + + e.on('hello/+', function (message, cb) { + t.equal(message.topic, 'hello/world') + cb() + }, function () { + // this will not be catched + e.emit({ topic: 'hello/my/world' }) + + // this will be catched + e.emit(expected, function () { + e.close(function () { + t.pass('closed') + }) + }) + }) + }) + + test('support one level wildcard - not match empty words', function (t) { + t.plan(2) + + const e = builder({ matchEmptyLevels: false }) + const expected = { + topic: 'hello/dummy/world', + payload: { my: 'message' } + } + + e.on('hello/+/world', function (message, cb) { + t.equal(message.topic, 'hello/dummy/world') + cb() + }, function () { + // this will not be catched + e.emit({ topic: 'hello//world' }) + + // this will be catched + e.emit(expected, function () { + e.close(function () { + t.pass('closed') + }) + }) + }) + }) + + test('support one level wildcard - match empty words', function (t) { + t.plan(3) + + const e = builder({ matchEmptyLevels: true }) + + e.on('hello/+/world', function (message, cb) { + const topic = message.topic + if (topic === 'hello//world' || topic === 'hello/dummy/world') { + t.pass('received ' + topic) + } + cb() + }, function () { + // this will be catched + e.emit({ topic: 'hello//world' }) + // this will be catched + e.emit({ topic: 'hello/dummy/world' }, function () { + e.close(function () { + t.pass('closed') + }) + }) + }) + }) + + test('support one level wildcard - match empty words', function (t) { + t.plan(2) + + const e = builder({ matchEmptyLevels: true }) + + e.on('hello/+', function (message, cb) { + t.equal(message.topic, 'hello/') + cb() + }, function () { + // this will be catched + e.emit({ topic: 'hello/' }, function () { + e.close(function () { + t.pass('closed') + }) + }) + }) + }) + + test('support one level wildcard - not match empty words', function (t) { + t.plan(1) + + const e = builder({ matchEmptyLevels: false }) + + e.on('hello/+', function (message, cb) { + t.fail('should not catch') + cb() + }, function () { + // this will not be catched + e.emit({ topic: 'hello/' }, function () { + e.close(function () { + t.pass('closed') + }) + }) + }) + }) + + test('support changing one level wildcard', function (t) { + t.plan(2) + + const e = builder({ wildcardOne: '~' }) + const expected = { + topic: 'hello/world', + payload: { my: 'message' } + } + + e.on('hello/~', function (message, cb) { + t.equal(message.topic, 'hello/world') + cb() + }, function () { + e.emit(expected, function () { + e.close(function () { + t.pass('closed') + }) + }) + }) + }) + + test('support deep wildcard', function (t) { + t.plan(2) + + const e = builder() + const expected = { + topic: 'hello/my/world', + payload: { my: 'message' } + } + + e.on('hello/#', function (message, cb) { + t.equal(message.topic, 'hello/my/world') + cb() + }, function () { + e.emit(expected, function () { + e.close(function () { + t.pass('closed') + }) + }) + }) + }) + + test('support deep wildcard without separator', function (t) { + t.plan(2) + + const e = builder() + const expected = { + topic: 'hello', + payload: { my: 'message' } + } + + e.on('#', function (message, cb) { + t.equal(message.topic, expected.topic) + cb() + }, function () { + e.emit(expected, function () { + e.close(function () { + t.pass('closed') + }) + }) + }) + }) + + test('support deep wildcard - match empty words', function (t) { + t.plan(2) + + const e = builder({ matchEmptyLevels: true }) + const expected = { + topic: 'hello', + payload: { my: 'message' } + } + + const wrong = { + topic: 'hellooo', + payload: { my: 'message' } + } + + e.on('hello/#', function (message, cb) { + t.equal(message.topic, expected.topic) + cb() + }, function () { + e.emit(wrong) // this should not be received + e.emit(expected, function () { + e.close(function () { + t.pass('closed') + }) + }) + }) + }) + + test('support changing deep wildcard', function (t) { + t.plan(2) + + const e = builder({ wildcardSome: '*' }) + const expected = { + topic: 'hello/my/world', + payload: { my: 'message' } + } + + e.on('hello/*', function (message, cb) { + t.equal(message.topic, 'hello/my/world') + cb() + }, function () { + e.emit(expected, function () { + e.close(function () { + t.pass('closed') + }) + }) + }) + }) + + test('support changing the level separator', function (t) { + t.plan(2) + + const e = builder({ separator: '~' }) + const expected = { + topic: 'hello~world', + payload: { my: 'message' } + } + + e.on('hello~+', function (message, cb) { + t.equal(message.topic, 'hello~world') + cb() + }, function () { + e.emit(expected, function () { + e.close(function () { + t.pass('closed') + }) + }) + }) + }) + + test('close support', function (t) { + const e = builder() + let check = false + + t.notOk(e.closed, 'must have a false closed property') + + e.close(function () { + t.ok(check, 'must delay the close callback') + t.ok(e.closed, 'must have a true closed property') + t.end() + }) + + check = true + }) + + test('emit after close errors', function (t) { + const e = builder() + + e.close(function () { + e.emit({ topic: 'hello' }, function (err) { + t.ok(err, 'must return an error') + t.end() + }) + }) + }) + + test('support multiple subscribers with wildcards', function (t) { + const e = builder() + const expected = { + topic: 'hello/world', + payload: { my: 'message' } + } + let firstCalled = false + let secondCalled = false + + e.on('hello/#', function (message, cb) { + t.notOk(firstCalled, 'first subscriber must only be called once') + firstCalled = true + cb() + }) + + e.on('hello/+', function (message, cb) { + t.notOk(secondCalled, 'second subscriber must only be called once') + secondCalled = true + cb() + }, function () { + e.emit(expected, function () { + e.close(function () { + t.end() + }) + }) + }) + }) + + test('support multiple subscribers with wildcards (deep)', function (t) { + const e = builder() + const expected = { + topic: 'hello/my/world', + payload: { my: 'message' } + } + let firstCalled = false + let secondCalled = false + + e.on('hello/#', function (message, cb) { + t.notOk(firstCalled, 'first subscriber must only be called once') + firstCalled = true + cb() + }) + + e.on('hello/+/world', function (message, cb) { + t.notOk(secondCalled, 'second subscriber must only be called once') + secondCalled = true + cb() + }, function () { + e.emit(expected, function () { + e.close(function () { + t.end() + }) + }) + }) + }) + + test('emit & receive buffers', function (t) { + const e = builder() + const msg = Buffer.from('hello') + const expected = { + topic: 'hello', + payload: msg + } + + e.on('hello', function (message, cb) { + t.deepEqual(msg, message.payload) + cb() + }, function () { + e.emit(expected, function () { + e.close(function () { + t.end() + }) + }) + }) + }) + + test('packets are emitted in order', function (t) { + const e = builder() + const total = 10000 + const topic = 'test' + + let received = 0 + + e.on(topic, function (msg, cb) { + let fail = false + if (received !== msg.payload) { + t.fail(`leak detected. Count: ${received} - Payload: ${msg.payload}`) + fail = true + } + + received++ + + if (fail || received === total) { + e.close(function () { + t.end() + }) + } + cb() + }) + + for (let payload = 0; payload < total; payload++) { + e.emit({ topic, payload }) + } + }) + + test('calling emit without cb when closed doesn\'t throw error', function (t) { + const e = builder() + const msg = Buffer.from('hello') + const expected = { + topic: 'hello', + payload: msg + } + + e.close(function () { + try { + e.emit(expected) + } catch (error) { + t.error('throws error') + } + t.end() + }) + }) +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqemitter/bench.js b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/bench.js new file mode 100644 index 000000000..cfb9a3453 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/bench.js @@ -0,0 +1,40 @@ +'use strict' + +const mqemitter = require('./') +const emitter = mqemitter({ concurrency: 10 }) +const total = 1000000 +let written = 0 +let received = 0 +const timerKey = 'time for sending ' + total + ' messages' + +function write () { + if (written === total) { + return + } + + written++ + + emitter.emit({ topic: 'hello', payload: 'world' }, write) +} + +emitter.on('hello', function (msg, cb) { + received++ + if (received === total) { + console.timeEnd(timerKey) + } + setImmediate(cb) +}) + +console.time(timerKey) +write() +write() +write() +write() +write() +write() +write() +write() +write() +write() +write() +write() diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqemitter/example.js b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/example.js new file mode 100644 index 000000000..833c5695e --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/example.js @@ -0,0 +1,61 @@ +'use strict' + +const mqemitter = require('.') + +const mq = mqemitter() + +function subscribe (topic, obj) { + mq.on(topic, callback) + obj.close = close + + function callback (value, cb) { + obj.push(value) + cb() + } + + function close () { + mq.removeListener(topic, callback) + } +} + +class MyQueue { + push (value) { + console.log(value) + } +} + +const a = new MyQueue() +const b = new MyQueue() +const c = new MyQueue() + +subscribe('hello', a) +subscribe('hello', b) +subscribe('hello', c) + +mq.emit({ topic: 'hello', payload: 'world' }) + +a.close() +b.close() +c.close() + +mq.emit({ topic: 'hello', payload: 'world' }) + +// const listeners = new Map() +// +// +// const queues = new Map() +// +// function subscribe (topic, queue) { +// if (listeners.has(topic)) { +// +// } +// +// function callback (err) { +// +// for (var value of queues) { +// } +// } +// +// listeners.set(topic, callback) +// queues.set(topic, [queue]) +// } diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqemitter/mqemitter.js b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/mqemitter.js new file mode 100644 index 000000000..0ba60e158 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/mqemitter.js @@ -0,0 +1,128 @@ +'use strict' + +const { Qlobber } = require('qlobber') +const assert = require('assert') +const fastparallel = require('fastparallel') + +function MQEmitter (opts) { + if (!(this instanceof MQEmitter)) { + return new MQEmitter(opts) + } + + const that = this + + opts = opts || {} + opts.matchEmptyLevels = opts.matchEmptyLevels === undefined ? true : !!opts.matchEmptyLevels + opts.separator = opts.separator || '/' + opts.wildcardOne = opts.wildcardOne || '+' + opts.wildcardSome = opts.wildcardSome || '#' + + this._messageQueue = [] + this._messageCallbacks = [] + this._parallel = fastparallel({ + results: false, + released + }) + + this.concurrency = opts.concurrency || 0 + + this.current = 0 + this._doing = false + this._matcher = new Qlobber({ + match_empty_levels: opts.matchEmptyLevels, + separator: opts.separator, + wildcard_one: opts.wildcardOne, + wildcard_some: opts.wildcardSome + }) + + this.closed = false + this._released = released + + function released () { + that.current-- + + const message = that._messageQueue.shift() + const callback = that._messageCallbacks.shift() + + if (message) { + that._do(message, callback) + } else { + that._doing = false + } + } +} + +Object.defineProperty(MQEmitter.prototype, 'length', { + get: function () { + return this._messageQueue.length + }, + enumerable: true +}) + +MQEmitter.prototype.on = function on (topic, notify, done) { + assert(topic) + assert(notify) + this._matcher.add(topic, notify) + + if (done) { + setImmediate(done) + } + + return this +} + +MQEmitter.prototype.removeListener = function removeListener (topic, notify, done) { + assert(topic) + assert(notify) + this._matcher.remove(topic, notify) + + if (done) { + setImmediate(done) + } + + return this +} + +MQEmitter.prototype.emit = function emit (message, cb) { + assert(message) + + cb = cb || noop + + if (this.closed) { + return cb(new Error('mqemitter is closed')) + } + + if (this.concurrency > 0 && this.current >= this.concurrency) { + this._messageQueue.push(message) + this._messageCallbacks.push(cb) + if (!this._doing) { + process.emitWarning('MqEmitter leak detected', { detail: 'For more info check: https://github.com/mcollina/mqemitter/pull/94' }) + this._released() + } + } else { + this._do(message, cb) + } + + return this +} + +MQEmitter.prototype.close = function close (cb) { + this.closed = true + setImmediate(cb) + + return this +} + +MQEmitter.prototype._do = function (message, callback) { + this._doing = true + const matches = this._matcher.match(message.topic) + + this.current++ + this._parallel(this, matches, message, callback) + + return this +} + +function noop () { } + +module.exports = MQEmitter diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqemitter/package.json b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/package.json new file mode 100644 index 000000000..f6ba9001e --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/package.json @@ -0,0 +1,101 @@ +{ + "_from": "mqemitter@^4.2.0", + "_id": "mqemitter@4.5.0", + "_inBundle": false, + "_integrity": "sha512-Mp/zytFeIv6piJQkEKnncHcP4R/ErJc5C7dfonkhkNUT2LA/nTayrfNxbipp3M5iCJUTQSUtzfQAQA3XVcKz6w==", + "_location": "/mqemitter", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "mqemitter@^4.2.0", + "name": "mqemitter", + "escapedName": "mqemitter", + "rawSpec": "^4.2.0", + "saveSpec": null, + "fetchSpec": "^4.2.0" + }, + "_requiredBy": [ + "/aedes" + ], + "_resolved": "https://registry.npmjs.org/mqemitter/-/mqemitter-4.5.0.tgz", + "_shasum": "ffe74cdf0e3e88b6f37a9dfe4bb7546ac5ae7aa8", + "_spec": "mqemitter@^4.2.0", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes", + "author": { + "name": "Matteo Collina", + "email": "hello@matteocollina.com" + }, + "bugs": { + "url": "http://github.com/mcollina/mqemitter/issues" + }, + "bundleDependencies": false, + "dependencies": { + "fastparallel": "^2.3.0", + "qlobber": "^5.0.0" + }, + "deprecated": false, + "description": "An Opinionated Message Queue with an emitter-style API", + "devDependencies": { + "@types/node": "^16.11.1", + "@typescript-eslint/eslint-plugin": "^2.19.2", + "@typescript-eslint/parser": "^2.19.2", + "markdownlint-cli": "^0.29.0", + "nyc": "^15.0.0", + "pre-commit": "^1.2.2", + "snazzy": "^9.0.0", + "standard": "^16.0.0", + "tape": "^5.0.1", + "tsd": "^0.18.0", + "typescript": "^4.0.2" + }, + "engines": { + "node": ">=10" + }, + "homepage": "https://github.com/mcollina/mqemitter#readme", + "keywords": [ + "emitter", + "events", + "message queue", + "mq", + "publish", + "subscribe", + "pub", + "sub" + ], + "license": "ISC", + "main": "mqemitter.js", + "name": "mqemitter", + "pre-commit": [ + "test" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/mcollina/mqemitter.git" + }, + "scripts": { + "lint": "npm run lint:standard && npm run lint:typescript && npm run lint:markdown", + "lint:markdown": "markdownlint README.md", + "lint:standard": "standard --verbose | snazzy", + "lint:typescript": "standard --parser @typescript-eslint/parser --plugin @typescript-eslint/eslint-plugin test/types/*.ts types/mqemitter.d.ts", + "test": "npm run lint && npm run unit:cov && tsd && npm run typescript", + "test:report": "npm run lint && npm run unit:report && npm run typescript", + "test:types": "tsd", + "typescript": "tsc --project ./test/types/tsconfig.json", + "unit": "tape test/*.js", + "unit:cov": "nyc --reporter=text tape test/*.js", + "unit:report": "nyc --reporter=html --reporter=cobertura --reporter=text tape test/*.js" + }, + "types": "types/mqemitter.d.ts", + "version": "4.5.0", + "warnings": [ + { + "code": "ENOTSUP", + "required": { + "node": ">=10" + }, + "pkgid": "mqemitter@4.5.0" + } + ], + "website": "https://github.com/mcollina/mqemitter" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqemitter/test/test.js b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/test/test.js new file mode 100644 index 000000000..9fa4b4559 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/test/test.js @@ -0,0 +1,130 @@ +'use strict' + +const { test } = require('tape') +const mq = require('../') + +require('../abstractTest')({ + builder: mq, + test +}) + +test('queue concurrency', function (t) { + t.plan(3) + + const e = mq({ concurrency: 1 }) + let completed1 = false + + t.equal(e.concurrency, 1) + + e.on('hello 1', function (message, cb) { + setTimeout(cb, 10) + }) + + e.on('hello 2', function (message, cb) { + cb() + }) + + e.emit({ topic: 'hello 1' }, function () { + completed1 = true + }) + + e.emit({ topic: 'hello 2' }, function () { + t.ok(completed1, 'the first message must be completed') + }) + + t.equal(e.length, 1) +}) + +test('queue released when full', function (t) { + t.plan(21) + + const e = mq({ concurrency: 1 }) + + e.on('hello 1', function (message, cb) { + t.ok(true, 'message received') + setTimeout(cb, 10) + }) + + function onSent () { + t.ok(true, 'message sent') + } + + for (let i = 0; i < 9; i++) { + e._messageQueue.push({ topic: 'hello 1' }) + e._messageCallbacks.push(onSent) + e.current++ + } + + e.emit({ topic: 'hello 1' }, onSent) + + process.once('warning', function (warning) { + t.equal(warning.message, 'MqEmitter leak detected', 'warning message') + }) +}) + +test('without any listeners and a callback', function (t) { + const e = mq() + const expected = { + topic: 'hello world', + payload: { my: 'message' } + } + + e.emit(expected, function () { + t.equal(e.current, 1, 'there 1 message that is being processed') + e.close(function () { + t.end() + }) + }) +}) + +test('queue concurrency with overlapping subscriptions', function (t) { + t.plan(3) + + const e = mq({ concurrency: 1 }) + let completed1 = false + + t.equal(e.concurrency, 1) + + e.on('000001/021/#', function (message, cb) { + setTimeout(cb, 10) + }) + + e.on('000001/021/000B/0001/01', function (message, cb) { + setTimeout(cb, 20) + }) + + e.emit({ topic: '000001/021/000B/0001/01' }, function () { + completed1 = true + }) + + e.emit({ topic: '000001/021/000B/0001/01' }, function () { + t.ok(completed1, 'the first message must be completed') + process.nextTick(function () { + t.equal(e.current, 0, 'no message is in flight') + }) + }) +}) + +test('removeListener without a callback does not throw', function (t) { + const e = mq() + function fn () {} + + e.on('hello', fn) + e.removeListener('hello', fn) + + t.end() +}) + +test('set defaults to opts', function (t) { + const opts = {} + mq(opts) + + t.deepEqual(opts, { + matchEmptyLevels: true, + separator: '/', + wildcardOne: '+', + wildcardSome: '#' + }) + + t.end() +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqemitter/test/types/index.ts b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/test/types/index.ts new file mode 100644 index 000000000..98457ab61 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/test/types/index.ts @@ -0,0 +1,46 @@ +/* eslint no-unused-vars: 0 */ +/* eslint no-undef: 0 */ + +import MQEmitter, { Message } from '../../types/mqemitter' + +const noop = function () {} + +let mq = MQEmitter() +mq = MQEmitter({ + concurrency: 100 +}) +mq.close(noop) + +mq = MQEmitter({ + concurrency: 100, + matchEmptyLevels: true, + separator: ',' +}) +mq.close(noop) + +mq = MQEmitter({ + concurrency: 10, + matchEmptyLevels: true, + separator: '/', + wildcardOne: '+', + wildcardSome: '#' +}) + +const notify = function (msg: Message, cb: () => void) { + if (msg.topic === 'hello/world') { + console.log(msg) + } + cb() +} + +mq.on('hello/+', notify) + +mq.emit({ topic: 'hello/world', payload: 'or any other fields', [Symbol.for('me')]: 42 }) + +mq.emit({ topic: 'hello/world' }, function (err) { + console.log(err) +}) + +mq.removeListener('hello/+', notify) + +mq.close(noop) diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqemitter/test/types/tsconfig.json b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/test/types/tsconfig.json new file mode 100644 index 000000000..791484233 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/test/types/tsconfig.json @@ -0,0 +1,11 @@ +{ + "compilerOptions": { + "module": "commonjs", + "target": "es6", + "noEmit": true, + "strict": true + }, + "files": [ + "./index.ts" + ] +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqemitter/types/mqemitter.d.ts b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/types/mqemitter.d.ts new file mode 100644 index 000000000..0d04068ad --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/types/mqemitter.d.ts @@ -0,0 +1,24 @@ +/// + +declare function MQEmitter(options?: MQEmitterOptions): MQEmitter + +export default MQEmitter + +interface MQEmitterOptions { + concurrency?: number + matchEmptyLevels?: boolean + separator?: string + wildcardOne?: string + wildcardSome?: string +} + +export type Message = Record & { topic: string } + +export interface MQEmitter { + current: number + concurrent: number + on(topic: string, listener: (message: Message, done: () => void) => void, callback?: () => void): this + emit(message: Message, callback?: (error?: Error) => void): void + removeListener(topic: string, listener: (message: Message, done: () => void) => void, callback?: () => void): void + close(callback: () => void): void +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqemitter/types/mqemitter.test-d.ts b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/types/mqemitter.test-d.ts new file mode 100644 index 000000000..6c9e737c5 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqemitter/types/mqemitter.test-d.ts @@ -0,0 +1,32 @@ +import { expectError, expectType } from 'tsd'; +import mqEmitter, { Message, MQEmitter } from './mqemitter'; + +expectType(mqEmitter()); + +expectType(mqEmitter({ concurrency: 200, matchEmptyLevels: true })); + +expectType( + mqEmitter({ + concurrency: 10, + matchEmptyLevels: true, + separator: '/', + wildcardOne: '+', + wildcardSome: '#', + }) +); + +function listener(message: Message, done: () => void) {} + +expectType(mqEmitter().on('topic', listener)); + +expectError(mqEmitter().emit(null)); + +expectType( + mqEmitter().emit({ topic: 'test', prop1: 'prop1', [Symbol.for('me')]: 42 }) +); + +expectType(mqEmitter().emit({ topic: 'test', prop1: 'prop1' }, () => {})); + +expectType(mqEmitter().removeListener('topic', listener)); + +expectType(mqEmitter().close(() => null)); diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/.github/workflows/ci.yml b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/.github/workflows/ci.yml new file mode 100644 index 000000000..04ad172da --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/.github/workflows/ci.yml @@ -0,0 +1,27 @@ +name: ci + +on: [push, pull_request] + +jobs: + test: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [6.x, 8.x, 10.x, 12.x, 14.x] + + steps: + - uses: actions/checkout@v1 + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install + + - name: Run tests + run: | + npm run ci diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/CONTRIBUTING.md b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/CONTRIBUTING.md new file mode 100644 index 000000000..cbe1f77f7 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/CONTRIBUTING.md @@ -0,0 +1,27 @@ +# mqtt-packet is an OPEN Open Source Project + +----------------------------------------- + +## What? + +Individuals making significant and valuable contributions are given commit-access to the project to contribute as they see fit. This project is more like an open wiki than a standard guarded open source project. + +## Rules + +There are a few basic ground-rules for contributors: + +1. **No `--force` pushes** or modifying the Git history in any way. +1. **Non-master branches** ought to be used for ongoing work. +1. **External API changes and significant modifications** ought to be subject to an **internal pull-request** to solicit feedback from other contributors. +1. Internal pull-requests to solicit feedback are *encouraged* for any other non-trivial contribution but left to the discretion of the contributor. +1. Contributors should attempt to adhere to the prevailing code-style. + +## Releases + +Declaring formal releases remains the prerogative of the project maintainer. + +## Changes to this arrangement + +This is an experiment and feedback is welcome! This document may also be subject to pull-requests or changes by contributors where you believe you have something valuable to add or change. + +----------------------------------------- diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/LICENSE.md b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/LICENSE.md new file mode 100644 index 000000000..833b75705 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/LICENSE.md @@ -0,0 +1,13 @@ +The MIT License (MIT) +===================== + +Copyright (c) 2014-2017 mqtt-packet contributors +--------------------------------------- + +*mqtt-packet contributors listed at * + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/README.md b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/README.md new file mode 100644 index 000000000..72a035684 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/README.md @@ -0,0 +1,491 @@ +mqtt-packet +=========== + +Encode and Decode MQTT 3.1.1, 5.0 packets the node way. + +[![JavaScript Style Guide](https://cdn.rawgit.com/feross/standard/master/badge.svg)](https://github.com/feross/standard) + + * Installation + * Examples + * Packets + * API + * Contributing + * License & copyright + +This library is tested with node v6, v8, v10, v12 and v14. The last version to support +older versions of node was mqtt-packet@4.1.2. + +Installation +------------ + +```bash +npm install mqtt-packet --save +``` + +Examples +-------- + +### Generating + +```js +const mqtt = require('mqtt-packet'); +const object = { + cmd: 'publish', + retain: false, + qos: 0, + dup: false, + length: 10, + topic: 'test', + payload: 'test' // Can also be a Buffer +}; +const opts = { protocolVersion: 4 }; // default is 4. Usually, opts is a connect packet + +console.log(mqtt.generate(object)) +// Prints: +// +// +// +// Which is the same as: +// +// Buffer.from([ +// 48, 10, // Header (publish) +// 0, 4, // Topic length +// 116, 101, 115, 116, // Topic (test) +// 116, 101, 115, 116 // Payload (test) +// ]) +``` + +### Parsing + +```js +const mqtt = require('mqtt-packet'); +const opts = { protocolVersion: 4 }; // default is 4. Usually, opts is a connect packet +const parser = mqtt.parser(opts); + +// Synchronously emits all the parsed packets +parser.on('packet', packet => { + console.log(packet) + // Prints: + // + // { + // cmd: 'publish', + // retain: false, + // qos: 0, + // dup: false, + // length: 10, + // topic: 'test', + // payload: + // } +}) + +parser.parse(Buffer.from([ + 48, 10, // Header (publish) + 0, 4, // Topic length + 116, 101, 115, 116, // Topic (test) + 116, 101, 115, 116 // Payload (test) +])) +// Returns the number of bytes left in the parser +``` + +API +--- + + * mqtt#generate() + * mqtt#writeToStream() + * mqtt#parser() + + + +### mqtt.generate(object, [opts]) + +Generates a `Buffer` containing an MQTT packet. +The object must be one of the ones specified by the [packets](#packets) +section. Throws an `Error` if a packet cannot be generated. + + + +### mqtt.writeToStream(object, stream, [opts]) + +Writes the mqtt packet defined by `object` to the given stream. +The object must be one of the ones specified by the [packets](#packets) +section. Emits an `Error` on the stream if a packet cannot be generated. +On node >= 0.12, this function automatically calls `cork()` on your stream, +and then it calls `uncork()` on the next tick. +By default cache for number buffers is enabled. +It creates a list of buffers for faster write. To disable cache set `mqtt.writeToStream.cacheNumbers = false`. +Should be set before any `writeToStream` calls. + + + +### mqtt.parser([opts]) + +Returns a new `Parser` object. `Parser` inherits from `EventEmitter` and +will emit: + + * `packet`, when a new packet is parsed, according to + [packets](#packets) + * `error`, if an error happens + + + +#### Parser.parse(buffer) + +Parses a given `Buffer` and emits synchronously all the MQTT packets that +are included. Returns the number of bytes left to parse. + +If an error happens, an `error` event will be emitted, but no `packet` events +will be emitted after that. Calling `parse()` again clears the error and +previous buffer, as if you created a new `Parser`. + +Packets +------- + +This section describes the format of all packets emitted by the `Parser` +and that you can input to `generate`. + +### Connect + +```js +{ + cmd: 'connect', + protocolId: 'MQTT', // Or 'MQIsdp' in MQTT 3.1 and 5.0 + protocolVersion: 4, // Or 3 in MQTT 3.1, or 5 in MQTT 5.0 + clean: true, // Can also be false + clientId: 'my-device', + keepalive: 0, // Seconds which can be any positive number, with 0 as the default setting + username: 'matteo', + password: Buffer.from('collina'), // Passwords are buffers + will: { + topic: 'mydevice/status', + payload: Buffer.from('dead'), // Payloads are buffers + properties: { // MQTT 5.0 + willDelayInterval: 1234, + payloadFormatIndicator: false, + messageExpiryInterval: 4321, + contentType: 'test', + responseTopic: 'topic', + correlationData: Buffer.from([1, 2, 3, 4]), + userProperties: { + 'test': 'test' + } + } + }, + properties: { // MQTT 5.0 properties + sessionExpiryInterval: 1234, + receiveMaximum: 432, + maximumPacketSize: 100, + topicAliasMaximum: 456, + requestResponseInformation: true, + requestProblemInformation: true, + userProperties: { + 'test': 'test' + }, + authenticationMethod: 'test', + authenticationData: Buffer.from([1, 2, 3, 4]) + } +} +``` + +If `protocolVersion` is 3, `clientId` is mandatory and `generate` will throw if +missing. + +If `password` or `will.payload` are passed as strings, they will +automatically be converted into a `Buffer`. + +### Connack + +```js +{ + cmd: 'connack', + returnCode: 0, // Or whatever else you see fit MQTT < 5.0 + sessionPresent: false, // Can also be true. + reasonCode: 0, // reason code MQTT 5.0 + properties: { // MQTT 5.0 properties + sessionExpiryInterval: 1234, + receiveMaximum: 432, + maximumQoS: 2, + retainAvailable: true, + maximumPacketSize: 100, + assignedClientIdentifier: 'test', + topicAliasMaximum: 456, + reasonString: 'test', + userProperties: { + 'test': 'test' + }, + wildcardSubscriptionAvailable: true, + subscriptionIdentifiersAvailable: true, + sharedSubscriptionAvailable: false, + serverKeepAlive: 1234, + responseInformation: 'test', + serverReference: 'test', + authenticationMethod: 'test', + authenticationData: Buffer.from([1, 2, 3, 4]) + } +} +``` + +The only mandatory argument is `returnCode`, as `generate` will throw if +missing. + +### Subscribe + +```js +{ + cmd: 'subscribe', + messageId: 42, + properties: { // MQTT 5.0 properties + subscriptionIdentifier: 145, + userProperties: { + test: 'test' + } + } + subscriptions: [{ + topic: 'test', + qos: 0, + nl: false, // no Local MQTT 5.0 flag + rap: true, // Retain as Published MQTT 5.0 flag + rh: 1 // Retain Handling MQTT 5.0 + }] +} +``` + +All properties are mandatory. + +### Suback + +```js +{ + cmd: 'suback', + messageId: 42, + properties: { // MQTT 5.0 properties + reasonString: 'test', + userProperties: { + 'test': 'test' + } + } + granted: [0, 1, 2, 128] +} +``` + +All the granted qos __must__ be < 256, as they are encoded as UInt8. +All properties are mandatory. + +### Unsubscribe + +```js +{ + cmd: 'unsubscribe', + messageId: 42, + properties: { // MQTT 5.0 properties + userProperties: { + 'test': 'test' + } + } + unsubscriptions: [ + 'test', + 'a/topic' + ] +} +``` + +All properties are mandatory. + +### Unsuback + +```js +{ + cmd: 'unsuback', + messageId: 42, + properties: { // MQTT 5.0 properties + reasonString: 'test', + userProperties: { + 'test': 'test' + } + } +} +``` + +All properties are mandatory. + +### Publish + +```js +{ + cmd: 'publish', + messageId: 42, + qos: 2, + dup: false, + topic: 'test', + payload: Buffer.from('test'), + retain: false, + properties: { // optional properties MQTT 5.0 + payloadFormatIndicator: true, + messageExpiryInterval: 4321, + topicAlias: 100, + responseTopic: 'topic', + correlationData: Buffer.from([1, 2, 3, 4]), + userProperties: { + 'test': 'test' + }, + subscriptionIdentifier: 120, // can be an Array in message from broker, if message included in few another subscriptions + contentType: 'test' + } +} +``` + +Only the `topic` property is mandatory. +Both `topic` and `payload` can be `Buffer` objects instead of strings. +`messageId` is mandatory for `qos > 0`. + +### Puback + +```js +{ + cmd: 'puback', + messageId: 42, + reasonCode: 16, // only for MQTT 5.0 + properties: { // MQTT 5.0 properties + reasonString: 'test', + userProperties: { + 'test': 'test' + } + } +} +``` + +The only mandatory property is `messageId`, as `generate` will throw if +missing. + +### Pubrec + +```js +{ + cmd: 'pubrec', + messageId: 42, + reasonCode: 16, // only for MQTT 5.0 + properties: { // properties MQTT 5.0 + reasonString: 'test', + userProperties: { + 'test': 'test' + } + } +} +``` + +The only mandatory property is `messageId`, as `generate` will throw if +missing. + +### Pubrel + +```js +{ + cmd: 'pubrel', + messageId: 42, + reasonCode: 16, // only for MQTT 5.0 + properties: { // properties MQTT 5.0 + reasonString: 'test', + userProperties: { + 'test': 'test' + } + } +} +``` + +The only mandatory property is `messageId`, as `generate` will throw if +missing. + +### Pubcomp + +```js +{ + cmd: 'pubcomp', + messageId: 42, + reasonCode: 16, // only for MQTT 5.0 + properties: { // properties MQTT 5.0 + reasonString: 'test', + userProperties: { + 'test': 'test' + } + } +} +``` + +The only mandatory property is `messageId`, as `generate` will throw if +missing. + +### Pingreq + +```js +{ + cmd: 'pingreq' +} +``` + +### Pingresp + +```js +{ + cmd: 'pingresp' +} +``` + +### Disconnect + +```js +{ + cmd: 'disconnect', + reasonCode: 0, // MQTT 5.0 code + properties: { // properties MQTT 5.0 + sessionExpiryInterval: 145, + reasonString: 'test', + userProperties: { + 'test': 'test' + }, + serverReference: 'test' + } +} +``` + +### Auth + +```js +{ + cmd: 'auth', + reasonCode: 0, // MQTT 5.0 code + properties: { // properties MQTT 5.0 + authenticationMethod: 'test', + authenticationData: Buffer.from([0, 1, 2, 3]), + reasonString: 'test', + userProperties: { + 'test': 'test' + } + } +} +``` + + + +Contributing +------------ + +mqtt-packet is an **OPEN Open Source Project**. This means that: + +> Individuals making significant and valuable contributions are given commit-access to the project to contribute as they see fit. This project is more like an open wiki than a standard guarded open source project. + +See the [CONTRIBUTING.md](https://github.com/mqttjs/mqtt-packet/blob/master/CONTRIBUTING.md) file for more details. + +### Contributors + +mqtt-packet is only possible due to the excellent work of the following contributors: + + + + + + +
Matteo CollinaGitHub/mcollinaTwitter/@matteocollina
Adam RuddGitHub/adamvrTwitter/@adam_vr
Peter SorowkaGitHub/psorowkaTwitter/@psorowka
Siarhei BuntsevichGitHub/scarry1992
+ +License +------- + +MIT diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/benchmarks/generate.js b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/benchmarks/generate.js new file mode 100644 index 000000000..0d7ce7e84 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/benchmarks/generate.js @@ -0,0 +1,26 @@ +const mqtt = require('../') +const max = 100000 +let i +const buf = Buffer.from('test') + +// initialize it +mqtt.generate({ + cmd: 'publish', + topic: 'test', + payload: buf +}) + +const start = Date.now() + +for (i = 0; i < max; i++) { + mqtt.generate({ + cmd: 'publish', + topic: 'test', + payload: buf + }) +} + +const time = Date.now() - start +console.log('Total time', time) +console.log('Total packets', max) +console.log('Packet/s', max / time * 1000) diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/benchmarks/generateNet.js b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/benchmarks/generateNet.js new file mode 100644 index 000000000..852ac9418 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/benchmarks/generateNet.js @@ -0,0 +1,51 @@ + +const mqtt = require('../') +const max = 1000000 +let i = 0 +const start = Date.now() +let time +const buf = Buffer.allocUnsafe(10) +const net = require('net') +const server = net.createServer(handle) +let dest + +buf.fill('test') + +function handle (sock) { + sock.resume() +} + +server.listen(0, () => { + dest = net.connect(server.address()) + + dest.on('connect', tickWait) + dest.on('drain', tickWait) + dest.on('finish', () => { + time = Date.now() - start + console.log('Total time', time) + console.log('Total packets', max) + console.log('Packet/s', max / time * 1000) + server.close() + }) +}) + +function tickWait () { + // console.log('tickWait', i) + let res = true + // var toSend = new Buffer(5 + buf.length) + + for (; i < max && res; i++) { + res = dest.write(mqtt.generate({ + cmd: 'publish', + topic: 'test', + payload: buf + })) + // buf.copy(toSend, 5) + // res = dest.write(toSend, 'buffer') + // console.log(res) + } + + if (i >= max) { + dest.end() + } +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/benchmarks/parse.js b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/benchmarks/parse.js new file mode 100644 index 000000000..e6df8ee09 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/benchmarks/parse.js @@ -0,0 +1,20 @@ + +const mqtt = require('../') +const parser = mqtt.parser() +const max = 10000000 +let i +const start = Date.now() / 1000 + +for (i = 0; i < max; i++) { + parser.parse(Buffer.from([ + 48, 10, // Header (publish) + 0, 4, // Topic length + 116, 101, 115, 116, // Topic (test) + 116, 101, 115, 116 // Payload (test) + ])) +} + +const time = Date.now() / 1000 - start +console.log('Total packets', max) +console.log('Total time', Math.round(time * 100) / 100) +console.log('Packet/s', max / time) diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/benchmarks/writeToStream.js b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/benchmarks/writeToStream.js new file mode 100644 index 000000000..919b53d97 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/benchmarks/writeToStream.js @@ -0,0 +1,49 @@ + +const mqtt = require('../') +const max = 1000000 +let i = 0 +const start = Date.now() +let time +const buf = Buffer.allocUnsafe(10) +const net = require('net') +const server = net.createServer(handle) +let dest + +function handle (sock) { + sock.resume() +} + +buf.fill('test') + +server.listen(0, () => { + dest = net.connect(server.address()) + + dest.on('connect', tickWait) + dest.on('drain', tickWait) + dest.on('finish', () => { + time = Date.now() - start + console.log('Total time', time) + console.log('Total packets', max) + console.log('Packet/s', max / time * 1000) + server.close() + }) +}) + +function tickWait () { + let res = true + // var toSend = new Buffer(5) + + for (; i < max && res; i++) { + res = mqtt.writeToStream({ + cmd: 'publish', + topic: 'test', + payload: buf + }, dest) + // dest.write(toSend, 'buffer') + // res = dest.write(buf, 'buffer') + } + + if (i >= max) { + dest.end() + } +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/constants.js b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/constants.js new file mode 100644 index 000000000..1e55e4e35 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/constants.js @@ -0,0 +1,187 @@ +/* Protocol - protocol constants */ +const protocol = module.exports + +/* Command code => mnemonic */ +protocol.types = { + 0: 'reserved', + 1: 'connect', + 2: 'connack', + 3: 'publish', + 4: 'puback', + 5: 'pubrec', + 6: 'pubrel', + 7: 'pubcomp', + 8: 'subscribe', + 9: 'suback', + 10: 'unsubscribe', + 11: 'unsuback', + 12: 'pingreq', + 13: 'pingresp', + 14: 'disconnect', + 15: 'auth' +} + +/* Mnemonic => Command code */ +protocol.codes = {} +for (const k in protocol.types) { + const v = protocol.types[k] + protocol.codes[v] = k +} + +/* Header */ +protocol.CMD_SHIFT = 4 +protocol.CMD_MASK = 0xF0 +protocol.DUP_MASK = 0x08 +protocol.QOS_MASK = 0x03 +protocol.QOS_SHIFT = 1 +protocol.RETAIN_MASK = 0x01 + +/* Length */ +protocol.VARBYTEINT_MASK = 0x7F +protocol.VARBYTEINT_FIN_MASK = 0x80 +protocol.VARBYTEINT_MAX = 268435455 + +/* Connack */ +protocol.SESSIONPRESENT_MASK = 0x01 +protocol.SESSIONPRESENT_HEADER = Buffer.from([protocol.SESSIONPRESENT_MASK]) +protocol.CONNACK_HEADER = Buffer.from([protocol.codes.connack << protocol.CMD_SHIFT]) + +/* Connect */ +protocol.USERNAME_MASK = 0x80 +protocol.PASSWORD_MASK = 0x40 +protocol.WILL_RETAIN_MASK = 0x20 +protocol.WILL_QOS_MASK = 0x18 +protocol.WILL_QOS_SHIFT = 3 +protocol.WILL_FLAG_MASK = 0x04 +protocol.CLEAN_SESSION_MASK = 0x02 +protocol.CONNECT_HEADER = Buffer.from([protocol.codes.connect << protocol.CMD_SHIFT]) + +/* Properties */ +protocol.properties = { + sessionExpiryInterval: 17, + willDelayInterval: 24, + receiveMaximum: 33, + maximumPacketSize: 39, + topicAliasMaximum: 34, + requestResponseInformation: 25, + requestProblemInformation: 23, + userProperties: 38, + authenticationMethod: 21, + authenticationData: 22, + payloadFormatIndicator: 1, + messageExpiryInterval: 2, + contentType: 3, + responseTopic: 8, + correlationData: 9, + maximumQoS: 36, + retainAvailable: 37, + assignedClientIdentifier: 18, + reasonString: 31, + wildcardSubscriptionAvailable: 40, + subscriptionIdentifiersAvailable: 41, + sharedSubscriptionAvailable: 42, + serverKeepAlive: 19, + responseInformation: 26, + serverReference: 28, + topicAlias: 35, + subscriptionIdentifier: 11 +} +protocol.propertiesCodes = {} +for (const prop in protocol.properties) { + const id = protocol.properties[prop] + protocol.propertiesCodes[id] = prop +} +protocol.propertiesTypes = { + sessionExpiryInterval: 'int32', + willDelayInterval: 'int32', + receiveMaximum: 'int16', + maximumPacketSize: 'int32', + topicAliasMaximum: 'int16', + requestResponseInformation: 'byte', + requestProblemInformation: 'byte', + userProperties: 'pair', + authenticationMethod: 'string', + authenticationData: 'binary', + payloadFormatIndicator: 'byte', + messageExpiryInterval: 'int32', + contentType: 'string', + responseTopic: 'string', + correlationData: 'binary', + maximumQoS: 'int8', + retainAvailable: 'byte', + assignedClientIdentifier: 'string', + reasonString: 'string', + wildcardSubscriptionAvailable: 'byte', + subscriptionIdentifiersAvailable: 'byte', + sharedSubscriptionAvailable: 'byte', + serverKeepAlive: 'int16', + responseInformation: 'string', + serverReference: 'string', + topicAlias: 'int16', + subscriptionIdentifier: 'var' +} + +function genHeader (type) { + return [0, 1, 2].map(qos => { + return [0, 1].map(dup => { + return [0, 1].map(retain => { + const buf = Buffer.alloc(1) + buf.writeUInt8( + protocol.codes[type] << protocol.CMD_SHIFT | + (dup ? protocol.DUP_MASK : 0) | + qos << protocol.QOS_SHIFT | retain, 0, true) + return buf + }) + }) + }) +} + +/* Publish */ +protocol.PUBLISH_HEADER = genHeader('publish') + +/* Subscribe */ +protocol.SUBSCRIBE_HEADER = genHeader('subscribe') +protocol.SUBSCRIBE_OPTIONS_QOS_MASK = 0x03 +protocol.SUBSCRIBE_OPTIONS_NL_MASK = 0x01 +protocol.SUBSCRIBE_OPTIONS_NL_SHIFT = 2 +protocol.SUBSCRIBE_OPTIONS_RAP_MASK = 0x01 +protocol.SUBSCRIBE_OPTIONS_RAP_SHIFT = 3 +protocol.SUBSCRIBE_OPTIONS_RH_MASK = 0x03 +protocol.SUBSCRIBE_OPTIONS_RH_SHIFT = 4 +protocol.SUBSCRIBE_OPTIONS_RH = [0x00, 0x10, 0x20] +protocol.SUBSCRIBE_OPTIONS_NL = 0x04 +protocol.SUBSCRIBE_OPTIONS_RAP = 0x08 +protocol.SUBSCRIBE_OPTIONS_QOS = [0x00, 0x01, 0x02] + +/* Unsubscribe */ +protocol.UNSUBSCRIBE_HEADER = genHeader('unsubscribe') + +/* Confirmations */ +protocol.ACKS = { + unsuback: genHeader('unsuback'), + puback: genHeader('puback'), + pubcomp: genHeader('pubcomp'), + pubrel: genHeader('pubrel'), + pubrec: genHeader('pubrec') +} + +protocol.SUBACK_HEADER = Buffer.from([protocol.codes.suback << protocol.CMD_SHIFT]) + +/* Protocol versions */ +protocol.VERSION3 = Buffer.from([3]) +protocol.VERSION4 = Buffer.from([4]) +protocol.VERSION5 = Buffer.from([5]) +protocol.VERSION131 = Buffer.from([131]) +protocol.VERSION132 = Buffer.from([132]) + +/* QoS */ +protocol.QOS = [0, 1, 2].map(qos => { + return Buffer.from([qos]) +}) + +/* Empty packets */ +protocol.EMPTY = { + pingreq: Buffer.from([protocol.codes.pingreq << 4, 0]), + pingresp: Buffer.from([protocol.codes.pingresp << 4, 0]), + disconnect: Buffer.from([protocol.codes.disconnect << 4, 0]) +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/generate.js b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/generate.js new file mode 100644 index 000000000..a426bdbb0 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/generate.js @@ -0,0 +1,52 @@ +const writeToStream = require('./writeToStream') +const EventEmitter = require('events') + +function generate (packet, opts) { + const stream = new Accumulator() + writeToStream(packet, stream, opts) + return stream.concat() +} + +class Accumulator extends EventEmitter { + constructor () { + super() + this._array = new Array(20) + this._i = 0 + } + + write (chunk) { + this._array[this._i++] = chunk + return true + } + + concat () { + let length = 0 + const lengths = new Array(this._array.length) + const list = this._array + let pos = 0 + let i + + for (i = 0; i < list.length && list[i] !== undefined; i++) { + if (typeof list[i] !== 'string') lengths[i] = list[i].length + else lengths[i] = Buffer.byteLength(list[i]) + + length += lengths[i] + } + + const result = Buffer.allocUnsafe(length) + + for (i = 0; i < list.length && list[i] !== undefined; i++) { + if (typeof list[i] !== 'string') { + list[i].copy(result, pos) + pos += lengths[i] + } else { + result.write(list[i], pos) + pos += lengths[i] + } + } + + return result + } +} + +module.exports = generate diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/mqtt.js b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/mqtt.js new file mode 100644 index 000000000..f23c8d76b --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/mqtt.js @@ -0,0 +1,3 @@ +exports.parser = require('./parser').parser +exports.generate = require('./generate') +exports.writeToStream = require('./writeToStream') diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/numbers.js b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/numbers.js new file mode 100644 index 000000000..c89ea11fe --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/numbers.js @@ -0,0 +1,58 @@ +const max = 65536 +const cache = {} + +// in node 6 Buffer.subarray returns a Uint8Array instead of a Buffer +// later versions return a Buffer +// alternative is Buffer.slice but that creates a new buffer +// creating new buffers takes time +// SubOk is only false on node < 8 +const SubOk = Buffer.isBuffer(Buffer.from([1, 2]).subarray(0, 1)) + +function generateBuffer (i) { + const buffer = Buffer.allocUnsafe(2) + buffer.writeUInt8(i >> 8, 0) + buffer.writeUInt8(i & 0x00FF, 0 + 1) + + return buffer +} + +function generateCache () { + for (let i = 0; i < max; i++) { + cache[i] = generateBuffer(i) + } +} + +function genBufVariableByteInt (num) { + const maxLength = 4 // max 4 bytes + let digit = 0 + let pos = 0 + const buffer = Buffer.allocUnsafe(maxLength) + + do { + digit = num % 128 | 0 + num = num / 128 | 0 + if (num > 0) digit = digit | 0x80 + + buffer.writeUInt8(digit, pos++) + } while (num > 0 && pos < maxLength) + + if (num > 0) { + pos = 0 + } + + return SubOk ? buffer.subarray(0, pos) : buffer.slice(0, pos) +} + +function generate4ByteBuffer (num) { + const buffer = Buffer.allocUnsafe(4) + buffer.writeUInt32BE(num, 0) + return buffer +} + +module.exports = { + cache, + generateCache, + generateNumber: generateBuffer, + genBufVariableByteInt, + generate4ByteBuffer +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/package.json b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/package.json new file mode 100644 index 000000000..b1021bc64 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/package.json @@ -0,0 +1,91 @@ +{ + "_from": "mqtt-packet@^6.3.2", + "_id": "mqtt-packet@6.10.0", + "_inBundle": false, + "_integrity": "sha512-ja8+mFKIHdB1Tpl6vac+sktqy3gA8t9Mduom1BA75cI+R9AHnZOiaBQwpGiWnaVJLDGRdNhQmFaAqd7tkKSMGA==", + "_location": "/mqtt-packet", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "mqtt-packet@^6.3.2", + "name": "mqtt-packet", + "escapedName": "mqtt-packet", + "rawSpec": "^6.3.2", + "saveSpec": null, + "fetchSpec": "^6.3.2" + }, + "_requiredBy": [ + "/aedes", + "/aedes-packet" + ], + "_resolved": "https://registry.npmjs.org/mqtt-packet/-/mqtt-packet-6.10.0.tgz", + "_shasum": "c8b507832c4152e3e511c0efa104ae4a64cd418f", + "_spec": "mqtt-packet@^6.3.2", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes", + "bugs": { + "url": "https://github.com/mqttjs/mqtt-packet/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Matteo Collina", + "email": "matteo.collina@gmail.com", + "url": "https://github.com/mcollina" + }, + { + "name": "Adam Rudd", + "email": "damvrr@gmail.com" + }, + { + "name": "Peter Sorowka", + "url": "https://github.com/psorowka" + }, + { + "name": "Wouter Klijn", + "email": "contact@wuhkuh.com", + "url": "https://github.com/wuhkuh" + }, + { + "name": "Siarhei Buntsevich", + "url": "https://github.com/scarry1992" + } + ], + "dependencies": { + "bl": "^4.0.2", + "debug": "^4.1.1", + "process-nextick-args": "^2.0.1" + }, + "deprecated": false, + "description": "Parse and generate MQTT packets like a breeze", + "devDependencies": { + "pre-commit": "^1.2.2", + "readable-stream": "^3.6.0", + "standard": "^14.3.4", + "tap-spec": "^5.0.0", + "tape": "^5.0.1" + }, + "homepage": "https://github.com/mqttjs/mqtt-packet", + "keywords": [ + "MQTT", + "packet", + "parse", + "publish", + "subscribe", + "pubsub" + ], + "license": "MIT", + "main": "mqtt.js", + "name": "mqtt-packet", + "pre-commit": "test", + "repository": { + "type": "git", + "url": "git+https://github.com/mqttjs/mqtt-packet.git" + }, + "scripts": { + "ci": "tape test.js && node testRandom && standard", + "test": "tape test.js | tap-spec && standard" + }, + "types": "types/index.d.ts", + "version": "6.10.0" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/packet.js b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/packet.js new file mode 100644 index 000000000..4ef1e787d --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/packet.js @@ -0,0 +1,13 @@ +class Packet { + constructor () { + this.cmd = null + this.retain = false + this.qos = 0 + this.dup = false + this.length = -1 + this.topic = null + this.payload = null + } +} + +module.exports = Packet diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/parser.js b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/parser.js new file mode 100644 index 000000000..da599ebef --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/parser.js @@ -0,0 +1,716 @@ +const bl = require('bl') +const EventEmitter = require('events') +const Packet = require('./packet') +const constants = require('./constants') +const debug = require('debug')('mqtt-packet:parser') + +class Parser extends EventEmitter { + constructor () { + super() + this.parser = this.constructor.parser + } + + static parser (opt) { + if (!(this instanceof Parser)) return (new Parser()).parser(opt) + + this.settings = opt || {} + + this._states = [ + '_parseHeader', + '_parseLength', + '_parsePayload', + '_newPacket' + ] + + this._resetState() + return this + } + + _resetState () { + debug('_resetState: resetting packet, error, _list, and _stateCounter') + this.packet = new Packet() + this.error = null + this._list = bl() + this._stateCounter = 0 + } + + parse (buf) { + if (this.error) this._resetState() + + this._list.append(buf) + debug('parse: current state: %s', this._states[this._stateCounter]) + while ((this.packet.length !== -1 || this._list.length > 0) && + this[this._states[this._stateCounter]]() && + !this.error) { + this._stateCounter++ + debug('parse: state complete. _stateCounter is now: %d', this._stateCounter) + debug('parse: packet.length: %d, buffer list length: %d', this.packet.length, this._list.length) + if (this._stateCounter >= this._states.length) this._stateCounter = 0 + } + debug('parse: exited while loop. packet: %d, buffer list length: %d', this.packet.length, this._list.length) + return this._list.length + } + + _parseHeader () { + // There is at least one byte in the buffer + const zero = this._list.readUInt8(0) + this.packet.cmd = constants.types[zero >> constants.CMD_SHIFT] + this.packet.retain = (zero & constants.RETAIN_MASK) !== 0 + this.packet.qos = (zero >> constants.QOS_SHIFT) & constants.QOS_MASK + this.packet.dup = (zero & constants.DUP_MASK) !== 0 + debug('_parseHeader: packet: %o', this.packet) + + this._list.consume(1) + + return true + } + + _parseLength () { + // There is at least one byte in the list + const result = this._parseVarByteNum(true) + + if (result) { + this.packet.length = result.value + this._list.consume(result.bytes) + } + debug('_parseLength %d', result.value) + return !!result + } + + _parsePayload () { + debug('_parsePayload: payload %O', this._list) + let result = false + + // Do we have a payload? Do we have enough data to complete the payload? + // PINGs have no payload + if (this.packet.length === 0 || this._list.length >= this.packet.length) { + this._pos = 0 + + switch (this.packet.cmd) { + case 'connect': + this._parseConnect() + break + case 'connack': + this._parseConnack() + break + case 'publish': + this._parsePublish() + break + case 'puback': + case 'pubrec': + case 'pubrel': + case 'pubcomp': + this._parseConfirmation() + break + case 'subscribe': + this._parseSubscribe() + break + case 'suback': + this._parseSuback() + break + case 'unsubscribe': + this._parseUnsubscribe() + break + case 'unsuback': + this._parseUnsuback() + break + case 'pingreq': + case 'pingresp': + // These are empty, nothing to do + break + case 'disconnect': + this._parseDisconnect() + break + case 'auth': + this._parseAuth() + break + default: + this._emitError(new Error('Not supported')) + } + + result = true + } + debug('_parsePayload complete result: %s', result) + return result + } + + _parseConnect () { + debug('_parseConnect') + let topic // Will topic + let payload // Will payload + let password // Password + let username // Username + const flags = {} + const packet = this.packet + + // Parse protocolId + const protocolId = this._parseString() + + if (protocolId === null) return this._emitError(new Error('Cannot parse protocolId')) + if (protocolId !== 'MQTT' && protocolId !== 'MQIsdp') { + return this._emitError(new Error('Invalid protocolId')) + } + + packet.protocolId = protocolId + + // Parse constants version number + if (this._pos >= this._list.length) return this._emitError(new Error('Packet too short')) + + packet.protocolVersion = this._list.readUInt8(this._pos) + + if (packet.protocolVersion >= 128) { + packet.bridgeMode = true + packet.protocolVersion = packet.protocolVersion - 128 + } + + if (packet.protocolVersion !== 3 && packet.protocolVersion !== 4 && packet.protocolVersion !== 5) { + return this._emitError(new Error('Invalid protocol version')) + } + + this._pos++ + + if (this._pos >= this._list.length) { + return this._emitError(new Error('Packet too short')) + } + + // Parse connect flags + flags.username = (this._list.readUInt8(this._pos) & constants.USERNAME_MASK) + flags.password = (this._list.readUInt8(this._pos) & constants.PASSWORD_MASK) + flags.will = (this._list.readUInt8(this._pos) & constants.WILL_FLAG_MASK) + + if (flags.will) { + packet.will = {} + packet.will.retain = (this._list.readUInt8(this._pos) & constants.WILL_RETAIN_MASK) !== 0 + packet.will.qos = (this._list.readUInt8(this._pos) & + constants.WILL_QOS_MASK) >> constants.WILL_QOS_SHIFT + } + + packet.clean = (this._list.readUInt8(this._pos) & constants.CLEAN_SESSION_MASK) !== 0 + this._pos++ + + // Parse keepalive + packet.keepalive = this._parseNum() + if (packet.keepalive === -1) return this._emitError(new Error('Packet too short')) + + // parse properties + if (packet.protocolVersion === 5) { + const properties = this._parseProperties() + if (Object.getOwnPropertyNames(properties).length) { + packet.properties = properties + } + } + // Parse clientId + const clientId = this._parseString() + if (clientId === null) return this._emitError(new Error('Packet too short')) + packet.clientId = clientId + debug('_parseConnect: packet.clientId: %s', packet.clientId) + + if (flags.will) { + if (packet.protocolVersion === 5) { + const willProperties = this._parseProperties() + if (Object.getOwnPropertyNames(willProperties).length) { + packet.will.properties = willProperties + } + } + // Parse will topic + topic = this._parseString() + if (topic === null) return this._emitError(new Error('Cannot parse will topic')) + packet.will.topic = topic + debug('_parseConnect: packet.will.topic: %s', packet.will.topic) + + // Parse will payload + payload = this._parseBuffer() + if (payload === null) return this._emitError(new Error('Cannot parse will payload')) + packet.will.payload = payload + debug('_parseConnect: packet.will.paylaod: %s', packet.will.payload) + } + + // Parse username + if (flags.username) { + username = this._parseString() + if (username === null) return this._emitError(new Error('Cannot parse username')) + packet.username = username + debug('_parseConnect: packet.username: %s', packet.username) + } + + // Parse password + if (flags.password) { + password = this._parseBuffer() + if (password === null) return this._emitError(new Error('Cannot parse password')) + packet.password = password + } + // need for right parse auth packet and self set up + this.settings = packet + debug('_parseConnect: complete') + return packet + } + + _parseConnack () { + debug('_parseConnack') + const packet = this.packet + + if (this._list.length < 1) return null + packet.sessionPresent = !!(this._list.readUInt8(this._pos++) & constants.SESSIONPRESENT_MASK) + + if (this.settings.protocolVersion === 5) { + if (this._list.length >= 2) { + packet.reasonCode = this._list.readUInt8(this._pos++) + } else { + packet.reasonCode = 0 + } + } else { + if (this._list.length < 2) return null + packet.returnCode = this._list.readUInt8(this._pos++) + } + + if (packet.returnCode === -1 || packet.reasonCode === -1) return this._emitError(new Error('Cannot parse return code')) + // mqtt 5 properties + if (this.settings.protocolVersion === 5) { + const properties = this._parseProperties() + if (Object.getOwnPropertyNames(properties).length) { + packet.properties = properties + } + } + debug('_parseConnack: complete') + } + + _parsePublish () { + debug('_parsePublish') + const packet = this.packet + packet.topic = this._parseString() + + if (packet.topic === null) return this._emitError(new Error('Cannot parse topic')) + + // Parse messageId + if (packet.qos > 0) if (!this._parseMessageId()) { return } + + // Properties mqtt 5 + if (this.settings.protocolVersion === 5) { + const properties = this._parseProperties() + if (Object.getOwnPropertyNames(properties).length) { + packet.properties = properties + } + } + + packet.payload = this._list.slice(this._pos, packet.length) + debug('_parsePublish: payload from buffer list: %o', packet.payload) + } + + _parseSubscribe () { + debug('_parseSubscribe') + const packet = this.packet + let topic + let options + let qos + let rh + let rap + let nl + let subscription + + if (packet.qos !== 1) { + return this._emitError(new Error('Wrong subscribe header')) + } + + packet.subscriptions = [] + + if (!this._parseMessageId()) { return } + + // Properties mqtt 5 + if (this.settings.protocolVersion === 5) { + const properties = this._parseProperties() + if (Object.getOwnPropertyNames(properties).length) { + packet.properties = properties + } + } + + while (this._pos < packet.length) { + // Parse topic + topic = this._parseString() + if (topic === null) return this._emitError(new Error('Cannot parse topic')) + if (this._pos >= packet.length) return this._emitError(new Error('Malformed Subscribe Payload')) + + options = this._parseByte() + qos = options & constants.SUBSCRIBE_OPTIONS_QOS_MASK + nl = ((options >> constants.SUBSCRIBE_OPTIONS_NL_SHIFT) & constants.SUBSCRIBE_OPTIONS_NL_MASK) !== 0 + rap = ((options >> constants.SUBSCRIBE_OPTIONS_RAP_SHIFT) & constants.SUBSCRIBE_OPTIONS_RAP_MASK) !== 0 + rh = (options >> constants.SUBSCRIBE_OPTIONS_RH_SHIFT) & constants.SUBSCRIBE_OPTIONS_RH_MASK + + subscription = { topic, qos } + + // mqtt 5 options + if (this.settings.protocolVersion === 5) { + subscription.nl = nl + subscription.rap = rap + subscription.rh = rh + } else if (this.settings.bridgeMode) { + subscription.rh = 0 + subscription.rap = true + subscription.nl = true + } + + // Push pair to subscriptions + debug('_parseSubscribe: push subscription `%s` to subscription', subscription) + packet.subscriptions.push(subscription) + } + } + + _parseSuback () { + debug('_parseSuback') + const packet = this.packet + this.packet.granted = [] + + if (!this._parseMessageId()) { return } + + // Properties mqtt 5 + if (this.settings.protocolVersion === 5) { + const properties = this._parseProperties() + if (Object.getOwnPropertyNames(properties).length) { + packet.properties = properties + } + } + + // Parse granted QoSes + while (this._pos < this.packet.length) { + this.packet.granted.push(this._list.readUInt8(this._pos++)) + } + } + + _parseUnsubscribe () { + debug('_parseUnsubscribe') + const packet = this.packet + + packet.unsubscriptions = [] + + // Parse messageId + if (!this._parseMessageId()) { return } + + // Properties mqtt 5 + if (this.settings.protocolVersion === 5) { + const properties = this._parseProperties() + if (Object.getOwnPropertyNames(properties).length) { + packet.properties = properties + } + } + + while (this._pos < packet.length) { + // Parse topic + const topic = this._parseString() + if (topic === null) return this._emitError(new Error('Cannot parse topic')) + + // Push topic to unsubscriptions + debug('_parseUnsubscribe: push topic `%s` to unsubscriptions', topic) + packet.unsubscriptions.push(topic) + } + } + + _parseUnsuback () { + debug('_parseUnsuback') + const packet = this.packet + if (!this._parseMessageId()) return this._emitError(new Error('Cannot parse messageId')) + // Properties mqtt 5 + if (this.settings.protocolVersion === 5) { + const properties = this._parseProperties() + if (Object.getOwnPropertyNames(properties).length) { + packet.properties = properties + } + // Parse granted QoSes + packet.granted = [] + while (this._pos < this.packet.length) { + this.packet.granted.push(this._list.readUInt8(this._pos++)) + } + } + } + + // parse packets like puback, pubrec, pubrel, pubcomp + _parseConfirmation () { + debug('_parseConfirmation: packet.cmd: `%s`', this.packet.cmd) + const packet = this.packet + + this._parseMessageId() + + if (this.settings.protocolVersion === 5) { + if (packet.length > 2) { + // response code + packet.reasonCode = this._parseByte() + debug('_parseConfirmation: packet.reasonCode `%d`', packet.reasonCode) + } else { + packet.reasonCode = 0 + } + + if (packet.length > 3) { + // properies mqtt 5 + const properties = this._parseProperties() + if (Object.getOwnPropertyNames(properties).length) { + packet.properties = properties + } + } + } + + return true + } + + // parse disconnect packet + _parseDisconnect () { + const packet = this.packet + debug('_parseDisconnect') + + if (this.settings.protocolVersion === 5) { + // response code + if (this._list.length > 0) { + packet.reasonCode = this._parseByte() + } else { + packet.reasonCode = 0 + } + // properies mqtt 5 + const properties = this._parseProperties() + if (Object.getOwnPropertyNames(properties).length) { + packet.properties = properties + } + } + + debug('_parseDisconnect result: true') + return true + } + + // parse auth packet + _parseAuth () { + debug('_parseAuth') + const packet = this.packet + + if (this.settings.protocolVersion !== 5) { + return this._emitError(new Error('Not supported auth packet for this version MQTT')) + } + + // response code + packet.reasonCode = this._parseByte() + // properies mqtt 5 + const properties = this._parseProperties() + if (Object.getOwnPropertyNames(properties).length) { + packet.properties = properties + } + + debug('_parseAuth: result: true') + return true + } + + _parseMessageId () { + const packet = this.packet + + packet.messageId = this._parseNum() + + if (packet.messageId === null) { + this._emitError(new Error('Cannot parse messageId')) + return false + } + + debug('_parseMessageId: packet.messageId %d', packet.messageId) + return true + } + + _parseString (maybeBuffer) { + const length = this._parseNum() + const end = length + this._pos + + if (length === -1 || end > this._list.length || end > this.packet.length) return null + + const result = this._list.toString('utf8', this._pos, end) + this._pos += length + debug('_parseString: result: %s', result) + return result + } + + _parseStringPair () { + debug('_parseStringPair') + return { + name: this._parseString(), + value: this._parseString() + } + } + + _parseBuffer () { + const length = this._parseNum() + const end = length + this._pos + + if (length === -1 || end > this._list.length || end > this.packet.length) return null + + const result = this._list.slice(this._pos, end) + + this._pos += length + debug('_parseBuffer: result: %o', result) + return result + } + + _parseNum () { + if (this._list.length - this._pos < 2) return -1 + + const result = this._list.readUInt16BE(this._pos) + this._pos += 2 + debug('_parseNum: result: %s', result) + return result + } + + _parse4ByteNum () { + if (this._list.length - this._pos < 4) return -1 + + const result = this._list.readUInt32BE(this._pos) + this._pos += 4 + debug('_parse4ByteNum: result: %s', result) + return result + } + + _parseVarByteNum (fullInfoFlag) { + debug('_parseVarByteNum') + const maxBytes = 4 + let bytes = 0 + let mul = 1 + let value = 0 + let result = false + let current + const padding = this._pos ? this._pos : 0 + + while (bytes < maxBytes && (padding + bytes) < this._list.length) { + current = this._list.readUInt8(padding + bytes++) + value += mul * (current & constants.VARBYTEINT_MASK) + mul *= 0x80 + + if ((current & constants.VARBYTEINT_FIN_MASK) === 0) { + result = true + break + } + if (this._list.length <= bytes) { + break + } + } + + if (!result && bytes === maxBytes && this._list.length >= bytes) { + this._emitError(new Error('Invalid variable byte integer')) + } + + if (padding) { + this._pos += bytes + } + + result = result + ? fullInfoFlag ? { + bytes, + value + } : value + : false + + debug('_parseVarByteNum: result: %o', result) + return result + } + + _parseByte () { + let result + if (this._pos < this._list.length) { + result = this._list.readUInt8(this._pos) + this._pos++ + } + debug('_parseByte: result: %o', result) + return result + } + + _parseByType (type) { + debug('_parseByType: type: %s', type) + switch (type) { + case 'byte': { + return this._parseByte() !== 0 + } + case 'int8': { + return this._parseByte() + } + case 'int16': { + return this._parseNum() + } + case 'int32': { + return this._parse4ByteNum() + } + case 'var': { + return this._parseVarByteNum() + } + case 'string': { + return this._parseString() + } + case 'pair': { + return this._parseStringPair() + } + case 'binary': { + return this._parseBuffer() + } + } + } + + _parseProperties () { + debug('_parseProperties') + const length = this._parseVarByteNum() + const start = this._pos + const end = start + length + const result = {} + while (this._pos < end) { + const type = this._parseByte() + if (!type) { + this._emitError(new Error('Cannot parse property code type')) + return false + } + const name = constants.propertiesCodes[type] + if (!name) { + this._emitError(new Error('Unknown property')) + return false + } + // user properties process + if (name === 'userProperties') { + if (!result[name]) { + result[name] = Object.create(null) + } + const currentUserProperty = this._parseByType(constants.propertiesTypes[name]) + if (result[name][currentUserProperty.name]) { + if (Array.isArray(result[name][currentUserProperty.name])) { + result[name][currentUserProperty.name].push(currentUserProperty.value) + } else { + const currentValue = result[name][currentUserProperty.name] + result[name][currentUserProperty.name] = [currentValue] + result[name][currentUserProperty.name].push(currentUserProperty.value) + } + } else { + result[name][currentUserProperty.name] = currentUserProperty.value + } + continue + } + if (result[name]) { + if (Array.isArray(result[name])) { + result[name].push(this._parseByType(constants.propertiesTypes[name])) + } else { + result[name] = [result[name]] + result[name].push(this._parseByType(constants.propertiesTypes[name])) + } + } else { + result[name] = this._parseByType(constants.propertiesTypes[name]) + } + } + return result + } + + _newPacket () { + debug('_newPacket') + if (this.packet) { + this._list.consume(this.packet.length) + debug('_newPacket: parser emit packet: packet.cmd: %s, packet.payload: %s, packet.length: %d', this.packet.cmd, this.packet.payload, this.packet.length) + this.emit('packet', this.packet) + } + debug('_newPacket: new packet') + this.packet = new Packet() + + this._pos = 0 + + return true + } + + _emitError (err) { + debug('_emitError') + this.error = err + this.emit('error', err) + } +} + +module.exports = Parser diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/test.js b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/test.js new file mode 100644 index 000000000..f12de523e --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/test.js @@ -0,0 +1,2866 @@ +const util = require('util') + +const test = require('tape') +const mqtt = require('./') +const WS = require('readable-stream').Writable + +function normalExpectedObject (object) { + if (object.username != null) object.username = object.username.toString() + if (object.password != null) object.password = Buffer.from(object.password) + return object +} + +function testParseGenerate (name, object, buffer, opts) { + test(`${name} parse`, t => { + t.plan(2) + + const parser = mqtt.parser(opts) + const expected = object + const fixture = buffer + + parser.on('packet', packet => { + if (packet.cmd !== 'publish') { + delete packet.topic + delete packet.payload + } + t.deepLooseEqual(packet, normalExpectedObject(expected), 'expected packet') + }) + + parser.on('error', err => { + t.fail(err) + }) + + t.equal(parser.parse(fixture), 0, 'remaining bytes') + }) + + test(`${name} generate`, t => { + // For really large buffers, the expanded hex string can be so long as to + // generate an error in nodejs 14.x, so only do the test with extra output + // for relatively small buffers. + const bigLength = 10000 + const generatedBuffer = mqtt.generate(object, opts) + if (generatedBuffer.length < bigLength && buffer.length < bigLength) { + t.equal(generatedBuffer.toString('hex'), buffer.toString('hex')) + } else { + const bufferOkay = generatedBuffer.equals(buffer) + if (bufferOkay) { + t.pass() + } else { + // Output abbreviated representations of the buffers. + t.comment('Expected:\n' + util.inspect(buffer)) + t.comment('Got:\n' + util.inspect(generatedBuffer)) + t.fail('Large buffers not equal') + } + } + t.end() + }) + + test(`${name} mirror`, t => { + t.plan(2) + + const parser = mqtt.parser(opts) + const expected = object + const fixture = mqtt.generate(object, opts) + + parser.on('packet', packet => { + if (packet.cmd !== 'publish') { + delete packet.topic + delete packet.payload + } + t.deepLooseEqual(packet, normalExpectedObject(expected), 'expected packet') + }) + + parser.on('error', err => { + t.fail(err) + }) + + t.equal(parser.parse(fixture), 0, 'remaining bytes') + }) + + test(`${name} writeToStream`, t => { + const stream = WS() + stream.write = () => true + stream.on('error', (err) => t.fail(err)) + + const result = mqtt.writeToStream(object, stream, opts) + t.equal(result, true, 'should return true') + t.end() + }) +} + +// the API allows to pass strings as buffers to writeToStream and generate +// parsing them back will result in a string so only generate and compare to buffer +function testGenerateOnly (name, object, buffer, opts) { + test(name, t => { + t.equal(mqtt.generate(object, opts).toString('hex'), buffer.toString('hex')) + t.end() + }) +} + +function testParseOnly (name, object, buffer, opts) { + test(name, t => { + const parser = mqtt.parser(opts) + // const expected = object + // const fixture = buffer + + t.plan(2 + Object.keys(object).length) + + parser.on('packet', packet => { + t.equal(Object.keys(object).length, Object.keys(packet).length, 'key count') + Object.keys(object).forEach(key => { + t.deepEqual(packet[key], object[key], `expected packet property ${key}`) + }) + }) + + t.equal(parser.parse(buffer), 0, 'remaining bytes') + t.end() + }) +} + +function testParseError (expected, fixture, opts) { + test(expected, t => { + t.plan(1) + + const parser = mqtt.parser(opts) + + parser.on('error', err => { + t.equal(err.message, expected, 'expected error message') + }) + + parser.on('packet', () => { + t.fail('parse errors should not be followed by packet events') + }) + + parser.parse(fixture) + t.end() + }) +} + +function testGenerateError (expected, fixture, opts, name) { + test(name || expected, t => { + t.plan(1) + + try { + mqtt.generate(fixture, opts) + } catch (err) { + t.equal(expected, err.message) + } + t.end() + }) +} + +function testGenerateErrorMultipleCmds (cmds, expected, fixture, opts) { + cmds.forEach(cmd => { + const obj = Object.assign({}, fixture) + obj.cmd = cmd + testGenerateError(expected, obj, opts, `${expected} on ${cmd}`) + } + ) +} + +function testParseGenerateDefaults (name, object, buffer, generated, opts) { + testParseOnly(`${name} parse`, generated, buffer, opts) + testGenerateOnly(`${name} generate`, object, buffer, opts) +} + +function testParseAndGenerate (name, object, buffer, opts) { + testParseOnly(`${name} parse`, object, buffer, opts) + testGenerateOnly(`${name} generate`, object, buffer, opts) +} + +function testWriteToStreamError (expected, fixture) { + test(`writeToStream ${expected} error`, t => { + t.plan(2) + + const stream = WS() + + stream.write = () => t.fail('should not have called write') + stream.on('error', () => t.pass('error emitted')) + + const result = mqtt.writeToStream(fixture, stream) + + t.false(result, 'result should be false') + t.end() + }) +} + +test('cacheNumbers get/set/unset', t => { + t.true(mqtt.writeToStream.cacheNumbers, 'initial state of cacheNumbers is enabled') + mqtt.writeToStream.cacheNumbers = false + t.false(mqtt.writeToStream.cacheNumbers, 'cacheNumbers can be disabled') + mqtt.writeToStream.cacheNumbers = true + t.true(mqtt.writeToStream.cacheNumbers, 'cacheNumbers can be enabled') + t.end() +}) + +test('disabled numbers cache', t => { + const stream = WS() + const message = { + cmd: 'publish', + retain: false, + qos: 0, + dup: false, + length: 10, + topic: Buffer.from('test'), + payload: Buffer.from('test') + } + const expected = Buffer.from([ + 48, 10, // Header + 0, 4, // Topic length + 116, 101, 115, 116, // Topic (test) + 116, 101, 115, 116 // Payload (test) + ]) + let written = Buffer.alloc(0) + + stream.write = (chunk) => { + written = Buffer.concat([written, chunk]) + } + mqtt.writeToStream.cacheNumbers = false + + mqtt.writeToStream(message, stream) + + t.deepEqual(written, expected, 'written buffer is expected') + + mqtt.writeToStream.cacheNumbers = true + + stream.end() + t.end() +}) + +testGenerateError('Unknown command', {}) + +testParseError('Not supported', Buffer.from([0, 1, 0]), {}) + +// Length header field +testParseError('Invalid variable byte integer', Buffer.from( + [16, 255, 255, 255, 255] +), {}) +testParseError('Invalid variable byte integer', Buffer.from( + [16, 255, 255, 255, 128] +), {}) +testParseError('Invalid variable byte integer', Buffer.from( + [16, 255, 255, 255, 255, 1] +), {}) +testParseError('Invalid variable byte integer', Buffer.from( + [16, 255, 255, 255, 255, 127] +), {}) +testParseError('Invalid variable byte integer', Buffer.from( + [16, 255, 255, 255, 255, 128] +), {}) +testParseError('Invalid variable byte integer', Buffer.from( + [16, 255, 255, 255, 255, 255, 1] +), {}) + +testParseGenerate('minimal connect', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 18, + protocolId: 'MQIsdp', + protocolVersion: 3, + clean: false, + keepalive: 30, + clientId: 'test' +}, Buffer.from([ + 16, 18, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 3, // Protocol version + 0, // Connect flags + 0, 30, // Keepalive + 0, 4, // Client ID length + 116, 101, 115, 116 // Client ID +])) + +testGenerateOnly('minimal connect with clientId as Buffer', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 18, + protocolId: 'MQIsdp', + protocolVersion: 3, + clean: false, + keepalive: 30, + clientId: Buffer.from('test') +}, Buffer.from([ + 16, 18, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 3, // Protocol version + 0, // Connect flags + 0, 30, // Keepalive + 0, 4, // Client ID length + 116, 101, 115, 116 // Client ID +])) + +testParseGenerate('connect MQTT bridge 131', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 18, + protocolId: 'MQIsdp', + protocolVersion: 3, + bridgeMode: true, + clean: false, + keepalive: 30, + clientId: 'test' +}, Buffer.from([ + 16, 18, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 131, // Protocol version + 0, // Connect flags + 0, 30, // Keepalive + 0, 4, // Client ID length + 116, 101, 115, 116 // Client ID +])) + +testParseGenerate('connect MQTT bridge 132', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 18, + protocolId: 'MQIsdp', + protocolVersion: 4, + bridgeMode: true, + clean: false, + keepalive: 30, + clientId: 'test' +}, Buffer.from([ + 16, 18, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 132, // Protocol version + 0, // Connect flags + 0, 30, // Keepalive + 0, 4, // Client ID length + 116, 101, 115, 116 // Client ID +])) + +testParseGenerate('connect MQTT 5', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 125, + protocolId: 'MQTT', + protocolVersion: 5, + will: { + retain: true, + qos: 2, + properties: { + willDelayInterval: 1234, + payloadFormatIndicator: false, + messageExpiryInterval: 4321, + contentType: 'test', + responseTopic: 'topic', + correlationData: Buffer.from([1, 2, 3, 4]), + userProperties: { + test: 'test' + } + }, + topic: 'topic', + payload: Buffer.from([4, 3, 2, 1]) + }, + clean: true, + keepalive: 30, + properties: { + sessionExpiryInterval: 1234, + receiveMaximum: 432, + maximumPacketSize: 100, + topicAliasMaximum: 456, + requestResponseInformation: true, + requestProblemInformation: true, + userProperties: { + test: 'test' + }, + authenticationMethod: 'test', + authenticationData: Buffer.from([1, 2, 3, 4]) + }, + clientId: 'test' +}, Buffer.from([ + 16, 125, // Header + 0, 4, // Protocol ID length + 77, 81, 84, 84, // Protocol ID + 5, // Protocol version + 54, // Connect flags + 0, 30, // Keepalive + 47, // properties length + 17, 0, 0, 4, 210, // sessionExpiryInterval + 33, 1, 176, // receiveMaximum + 39, 0, 0, 0, 100, // maximumPacketSize + 34, 1, 200, // topicAliasMaximum + 25, 1, // requestResponseInformation + 23, 1, // requestProblemInformation, + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // userProperties, + 21, 0, 4, 116, 101, 115, 116, // authenticationMethod + 22, 0, 4, 1, 2, 3, 4, // authenticationData + 0, 4, // Client ID length + 116, 101, 115, 116, // Client ID + 47, // will properties + 24, 0, 0, 4, 210, // will delay interval + 1, 0, // payload format indicator + 2, 0, 0, 16, 225, // message expiry interval + 3, 0, 4, 116, 101, 115, 116, // content type + 8, 0, 5, 116, 111, 112, 105, 99, // response topic + 9, 0, 4, 1, 2, 3, 4, // corelation data + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // user properties + 0, 5, // Will topic length + 116, 111, 112, 105, 99, // Will topic + 0, 4, // Will payload length + 4, 3, 2, 1// Will payload +])) + +testParseGenerate('connect MQTT 5 with will properties but with empty will payload', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 121, + protocolId: 'MQTT', + protocolVersion: 5, + will: { + retain: true, + qos: 2, + properties: { + willDelayInterval: 1234, + payloadFormatIndicator: false, + messageExpiryInterval: 4321, + contentType: 'test', + responseTopic: 'topic', + correlationData: Buffer.from([1, 2, 3, 4]), + userProperties: { + test: 'test' + } + }, + topic: 'topic', + payload: Buffer.from([]) + }, + clean: true, + keepalive: 30, + properties: { + sessionExpiryInterval: 1234, + receiveMaximum: 432, + maximumPacketSize: 100, + topicAliasMaximum: 456, + requestResponseInformation: true, + requestProblemInformation: true, + userProperties: { + test: 'test' + }, + authenticationMethod: 'test', + authenticationData: Buffer.from([1, 2, 3, 4]) + }, + clientId: 'test' +}, Buffer.from([ + 16, 121, // Header + 0, 4, // Protocol ID length + 77, 81, 84, 84, // Protocol ID + 5, // Protocol version + 54, // Connect flags + 0, 30, // Keepalive + 47, // properties length + 17, 0, 0, 4, 210, // sessionExpiryInterval + 33, 1, 176, // receiveMaximum + 39, 0, 0, 0, 100, // maximumPacketSize + 34, 1, 200, // topicAliasMaximum + 25, 1, // requestResponseInformation + 23, 1, // requestProblemInformation, + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // userProperties, + 21, 0, 4, 116, 101, 115, 116, // authenticationMethod + 22, 0, 4, 1, 2, 3, 4, // authenticationData + 0, 4, // Client ID length + 116, 101, 115, 116, // Client ID + 47, // will properties + 24, 0, 0, 4, 210, // will delay interval + 1, 0, // payload format indicator + 2, 0, 0, 16, 225, // message expiry interval + 3, 0, 4, 116, 101, 115, 116, // content type + 8, 0, 5, 116, 111, 112, 105, 99, // response topic + 9, 0, 4, 1, 2, 3, 4, // corelation data + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // user properties + 0, 5, // Will topic length + 116, 111, 112, 105, 99, // Will topic + 0, 0 // Will payload length +])) + +testParseGenerate('connect MQTT 5 w/o will properties', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 78, + protocolId: 'MQTT', + protocolVersion: 5, + will: { + retain: true, + qos: 2, + topic: 'topic', + payload: Buffer.from([4, 3, 2, 1]) + }, + clean: true, + keepalive: 30, + properties: { + sessionExpiryInterval: 1234, + receiveMaximum: 432, + maximumPacketSize: 100, + topicAliasMaximum: 456, + requestResponseInformation: true, + requestProblemInformation: true, + userProperties: { + test: 'test' + }, + authenticationMethod: 'test', + authenticationData: Buffer.from([1, 2, 3, 4]) + }, + clientId: 'test' +}, Buffer.from([ + 16, 78, // Header + 0, 4, // Protocol ID length + 77, 81, 84, 84, // Protocol ID + 5, // Protocol version + 54, // Connect flags + 0, 30, // Keepalive + 47, // properties length + 17, 0, 0, 4, 210, // sessionExpiryInterval + 33, 1, 176, // receiveMaximum + 39, 0, 0, 0, 100, // maximumPacketSize + 34, 1, 200, // topicAliasMaximum + 25, 1, // requestResponseInformation + 23, 1, // requestProblemInformation, + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // userProperties, + 21, 0, 4, 116, 101, 115, 116, // authenticationMethod + 22, 0, 4, 1, 2, 3, 4, // authenticationData + 0, 4, // Client ID length + 116, 101, 115, 116, // Client ID + 0, // will properties + 0, 5, // Will topic length + 116, 111, 112, 105, 99, // Will topic + 0, 4, // Will payload length + 4, 3, 2, 1// Will payload +])) + +testParseGenerate('no clientId with 3.1.1', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 12, + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + keepalive: 30, + clientId: '' +}, Buffer.from([ + 16, 12, // Header + 0, 4, // Protocol ID length + 77, 81, 84, 84, // Protocol ID + 4, // Protocol version + 2, // Connect flags + 0, 30, // Keepalive + 0, 0 // Client ID length +])) + +testParseGenerateDefaults('no clientId with 5.0', { + cmd: 'connect', + protocolId: 'MQTT', + protocolVersion: 5, + clean: true, + keepalive: 60, + properties: + { + receiveMaximum: 20 + }, + clientId: '' +}, Buffer.from( + [16, 16, 0, 4, 77, 81, 84, 84, 5, 2, 0, 60, 3, 33, 0, 20, 0, 0] +), { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 16, + topic: null, + payload: null, + protocolId: 'MQTT', + protocolVersion: 5, + clean: true, + keepalive: 60, + properties: { + receiveMaximum: 20 + }, + clientId: '' +}, { protocolVersion: 5 }) + +testParseGenerateDefaults('utf-8 clientId with 5.0', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 23, + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + keepalive: 30, + clientId: 'Ŧėśt🜄' +}, Buffer.from([ + 16, 23, // Header + 0, 4, // Protocol ID length + 77, 81, 84, 84, // Protocol ID + 4, // Protocol version + 2, // Connect flags + 0, 30, // Keepalive + 0, 11, // Client ID length + 197, 166, // Ŧ (UTF-8: 0xc5a6) + 196, 151, // ė (UTF-8: 0xc497) + 197, 155, // ś (utf-8: 0xc59b) + 116, // t (utf-8: 0x74) + 240, 159, 156, 132 // 🜄 (utf-8: 0xf09f9c84) +]), { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 23, + topic: null, + payload: null, + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + keepalive: 30, + clientId: 'Ŧėśt🜄' +}, { protocol: 5 }) + +testParseGenerateDefaults('default connect', { + cmd: 'connect', + clientId: 'test' +}, Buffer.from([ + 16, 16, 0, 4, 77, 81, 84, + 84, 4, 2, 0, 0, + 0, 4, 116, 101, 115, 116 +]), { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 16, + topic: null, + payload: null, + protocolId: 'MQTT', + protocolVersion: 4, + clean: true, + keepalive: 0, + clientId: 'test' +}) + +testParseAndGenerate('Version 4 CONACK', { + cmd: 'connack', + retain: false, + qos: 0, + dup: false, + length: 2, + topic: null, + payload: null, + sessionPresent: false, + returnCode: 1 +}, Buffer.from([ + 32, 2, // Fixed Header (CONNACK, Remaining Length) + 0, 1 // Variable Header (Session not present, Connection Refused - unacceptable protocol version) +]), {}) // Default protocolVersion (4) + +testParseAndGenerate('Version 5 CONACK', { + cmd: 'connack', + retain: false, + qos: 0, + dup: false, + length: 3, + topic: null, + payload: null, + sessionPresent: false, + reasonCode: 140 +}, Buffer.from([ + 32, 3, // Fixed Header (CONNACK, Remaining Length) + 0, 140, // Variable Header (Session not present, Bad authentication method) + 0 // Property Length Zero +]), { protocolVersion: 5 }) + +testParseOnly('Version 4 CONACK in Version 5 mode', { + cmd: 'connack', + retain: false, + qos: 0, + dup: false, + length: 2, + topic: null, + payload: null, + sessionPresent: false, + reasonCode: 1 // a version 4 return code stored in the version 5 reasonCode because this client is in version 5 +}, Buffer.from([ + 32, 2, // Fixed Header (CONNACK, Remaining Length) + 0, 1 // Variable Header (Session not present, Connection Refused - unacceptable protocol version) +]), { protocolVersion: 5 }) // message is in version 4 format, but this client is in version 5 mode + +testParseOnly('Version 5 PUBACK test 1', { + cmd: 'puback', + messageId: 42, + retain: false, + qos: 0, + dup: false, + length: 2, + topic: null, + payload: null, + reasonCode: 0 +}, Buffer.from([ + 64, 2, // Fixed Header (PUBACK, Remaining Length) + 0, 42 // Variable Header (2 Bytes: Packet Identifier 42, Implied Reason code: Success, Implied no properties) +]), { protocolVersion: 5 } +) + +testParseAndGenerate('Version 5 PUBACK test 2', { + cmd: 'puback', + messageId: 42, + retain: false, + qos: 0, + dup: false, + length: 3, + topic: null, + payload: null, + reasonCode: 0 +}, Buffer.from([ + 64, 3, // Fixed Header (PUBACK, Remaining Length) + 0, 42, 0 // Variable Header (2 Bytes: Packet Identifier 42, Reason code: 0 Success, Implied no properties) +]), { protocolVersion: 5 } +) + +testParseOnly('Version 5 PUBACK test 3', { + cmd: 'puback', + messageId: 42, + retain: false, + qos: 0, + dup: false, + length: 4, + topic: null, + payload: null, + reasonCode: 0 +}, Buffer.from([ + 64, 4, // Fixed Header (PUBACK, Remaining Length) + 0, 42, 0, // Variable Header (2 Bytes: Packet Identifier 42, Reason code: 0 Success) + 0 // no properties +]), { protocolVersion: 5 } +) + +testParseOnly('Version 5 CONNACK test 1', { + cmd: 'connack', + retain: false, + qos: 0, + dup: false, + length: 1, + topic: null, + payload: null, + sessionPresent: true, + reasonCode: 0 +}, Buffer.from([ + 32, 1, // Fixed Header (CONNACK, Remaining Length) + 1 // Variable Header (Session Present: 1 => true, Implied Reason code: Success, Implied no properties) +]), { protocolVersion: 5 } +) + +testParseOnly('Version 5 CONNACK test 2', { + cmd: 'connack', + retain: false, + qos: 0, + dup: false, + length: 2, + topic: null, + payload: null, + sessionPresent: true, + reasonCode: 0 +}, Buffer.from([ + 32, 2, // Fixed Header (CONNACK, Remaining Length) + 1, 0 // Variable Header (Session Present: 1 => true, Connect Reason code: Success, Implied no properties) +]), { protocolVersion: 5 } +) + +testParseAndGenerate('Version 5 CONNACK test 3', { + cmd: 'connack', + retain: false, + qos: 0, + dup: false, + length: 3, + topic: null, + payload: null, + sessionPresent: true, + reasonCode: 0 +}, Buffer.from([ + 32, 3, // Fixed Header (CONNACK, Remaining Length) + 1, 0, // Variable Header (Session Present: 1 => true, Connect Reason code: Success) + 0 // no properties +]), { protocolVersion: 5 } +) + +testParseOnly('Version 5 DISCONNECT test 1', { + cmd: 'disconnect', + retain: false, + qos: 0, + dup: false, + length: 0, + topic: null, + payload: null, + reasonCode: 0 +}, Buffer.from([ + 224, 0 // Fixed Header (DISCONNECT, Remaining Length), Implied Reason code: Normal Disconnection +]), { protocolVersion: 5 } +) + +testParseOnly('Version 5 DISCONNECT test 2', { + cmd: 'disconnect', + retain: false, + qos: 0, + dup: false, + length: 1, + topic: null, + payload: null, + reasonCode: 0 +}, Buffer.from([ + 224, 1, // Fixed Header (DISCONNECT, Remaining Length) + 0 // reason Code (Normal disconnection) +]), { protocolVersion: 5 } +) + +testParseAndGenerate('Version 5 DISCONNECT test 3', { + cmd: 'disconnect', + retain: false, + qos: 0, + dup: false, + length: 2, + topic: null, + payload: null, + reasonCode: 0 +}, Buffer.from([ + 224, 2, // Fixed Header (DISCONNECT, Remaining Length) + 0, // reason Code (Normal disconnection) + 0 // no properties +]), { protocolVersion: 5 } +) + +testParseGenerate('empty will payload', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 47, + protocolId: 'MQIsdp', + protocolVersion: 3, + will: { + retain: true, + qos: 2, + topic: 'topic', + payload: Buffer.alloc(0) + }, + clean: true, + keepalive: 30, + clientId: 'test', + username: 'username', + password: Buffer.from('password') +}, Buffer.from([ + 16, 47, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 3, // Protocol version + 246, // Connect flags + 0, 30, // Keepalive + 0, 4, // Client ID length + 116, 101, 115, 116, // Client ID + 0, 5, // Will topic length + 116, 111, 112, 105, 99, // Will topic + 0, 0, // Will payload length + // Will payload + 0, 8, // Username length + 117, 115, 101, 114, 110, 97, 109, 101, // Username + 0, 8, // Password length + 112, 97, 115, 115, 119, 111, 114, 100 // Password +])) + +testParseGenerate('empty buffer username payload', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 20, + protocolId: 'MQIsdp', + protocolVersion: 3, + clean: true, + keepalive: 30, + clientId: 'test', + username: Buffer.from('') +}, Buffer.from([ + 16, 20, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 3, // Protocol version + 130, // Connect flags + 0, 30, // Keepalive + 0, 4, // Client ID length + 116, 101, 115, 116, // Client ID + 0, 0 // Username length + // Empty Username payload +])) + +testParseGenerate('empty string username payload', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 20, + protocolId: 'MQIsdp', + protocolVersion: 3, + clean: true, + keepalive: 30, + clientId: 'test', + username: '' +}, Buffer.from([ + 16, 20, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 3, // Protocol version + 130, // Connect flags + 0, 30, // Keepalive + 0, 4, // Client ID length + 116, 101, 115, 116, // Client ID + 0, 0 // Username length + // Empty Username payload +])) + +testParseGenerate('empty buffer password payload', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 30, + protocolId: 'MQIsdp', + protocolVersion: 3, + clean: true, + keepalive: 30, + clientId: 'test', + username: 'username', + password: Buffer.from('') +}, Buffer.from([ + 16, 30, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 3, // Protocol version + 194, // Connect flags + 0, 30, // Keepalive + 0, 4, // Client ID length + 116, 101, 115, 116, // Client ID + 0, 8, // Username length + 117, 115, 101, 114, 110, 97, 109, 101, // Username payload + 0, 0 // Password length + // Empty password payload +])) + +testParseGenerate('empty string password payload', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 30, + protocolId: 'MQIsdp', + protocolVersion: 3, + clean: true, + keepalive: 30, + clientId: 'test', + username: 'username', + password: '' +}, Buffer.from([ + 16, 30, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 3, // Protocol version + 194, // Connect flags + 0, 30, // Keepalive + 0, 4, // Client ID length + 116, 101, 115, 116, // Client ID + 0, 8, // Username length + 117, 115, 101, 114, 110, 97, 109, 101, // Username payload + 0, 0 // Password length + // Empty password payload +])) + +testParseGenerate('empty string username and password payload', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 22, + protocolId: 'MQIsdp', + protocolVersion: 3, + clean: true, + keepalive: 30, + clientId: 'test', + username: '', + password: Buffer.from('') +}, Buffer.from([ + 16, 22, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 3, // Protocol version + 194, // Connect flags + 0, 30, // Keepalive + 0, 4, // Client ID length + 116, 101, 115, 116, // Client ID + 0, 0, // Username length + // Empty Username payload + 0, 0 // Password length + // Empty password payload +])) + +testParseGenerate('maximal connect', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 54, + protocolId: 'MQIsdp', + protocolVersion: 3, + will: { + retain: true, + qos: 2, + topic: 'topic', + payload: Buffer.from('payload') + }, + clean: true, + keepalive: 30, + clientId: 'test', + username: 'username', + password: Buffer.from('password') +}, Buffer.from([ + 16, 54, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 3, // Protocol version + 246, // Connect flags + 0, 30, // Keepalive + 0, 4, // Client ID length + 116, 101, 115, 116, // Client ID + 0, 5, // Will topic length + 116, 111, 112, 105, 99, // Will topic + 0, 7, // Will payload length + 112, 97, 121, 108, 111, 97, 100, // Will payload + 0, 8, // Username length + 117, 115, 101, 114, 110, 97, 109, 101, // Username + 0, 8, // Password length + 112, 97, 115, 115, 119, 111, 114, 100 // Password +])) + +testParseGenerate('max connect with special chars', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 57, + protocolId: 'MQIsdp', + protocolVersion: 3, + will: { + retain: true, + qos: 2, + topic: 'tòpic', + payload: Buffer.from('pay£oad') + }, + clean: true, + keepalive: 30, + clientId: 'te$t', + username: 'u$ern4me', + password: Buffer.from('p4$$w0£d') +}, Buffer.from([ + 16, 57, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 3, // Protocol version + 246, // Connect flags + 0, 30, // Keepalive + 0, 4, // Client ID length + 116, 101, 36, 116, // Client ID + 0, 6, // Will topic length + 116, 195, 178, 112, 105, 99, // Will topic + 0, 8, // Will payload length + 112, 97, 121, 194, 163, 111, 97, 100, // Will payload + 0, 8, // Username length + 117, 36, 101, 114, 110, 52, 109, 101, // Username + 0, 9, // Password length + 112, 52, 36, 36, 119, 48, 194, 163, 100 // Password +])) + +testGenerateOnly('connect all strings generate', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 54, + protocolId: 'MQIsdp', + protocolVersion: 3, + will: { + retain: true, + qos: 2, + topic: 'topic', + payload: 'payload' + }, + clean: true, + keepalive: 30, + clientId: 'test', + username: 'username', + password: 'password' +}, Buffer.from([ + 16, 54, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 3, // Protocol version + 246, // Connect flags + 0, 30, // Keepalive + 0, 4, // Client ID length + 116, 101, 115, 116, // Client ID + 0, 5, // Will topic length + 116, 111, 112, 105, 99, // Will topic + 0, 7, // Will payload length + 112, 97, 121, 108, 111, 97, 100, // Will payload + 0, 8, // Username length + 117, 115, 101, 114, 110, 97, 109, 101, // Username + 0, 8, // Password length + 112, 97, 115, 115, 119, 111, 114, 100 // Password +])) + +testParseError('Cannot parse protocolId', Buffer.from([ + 16, 4, + 0, 6, + 77, 81 +])) + +// missing protocol version on connect +testParseError('Packet too short', Buffer.from([ + 16, 8, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112 // Protocol ID +])) + +// missing keepalive on connect +testParseError('Packet too short', Buffer.from([ + 16, 10, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 3, // Protocol version + 246 // Connect flags +])) + +// missing clientid on connect +testParseError('Packet too short', Buffer.from([ + 16, 10, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 3, // Protocol version + 246, // Connect flags + 0, 30 // Keepalive +])) + +// missing will topic on connect +testParseError('Cannot parse will topic', Buffer.from([ + 16, 16, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 3, // Protocol version + 246, // Connect flags + 0, 30, // Keepalive + 0, 2, // Will topic length + 0, 0 // Will topic +])) + +// missing will payload on connect +testParseError('Cannot parse will payload', Buffer.from([ + 16, 23, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 3, // Protocol version + 246, // Connect flags + 0, 30, // Keepalive + 0, 5, // Will topic length + 116, 111, 112, 105, 99, // Will topic + 0, 2, // Will payload length + 0, 0 // Will payload +])) + +// missing username on connect +testParseError('Cannot parse username', Buffer.from([ + 16, 32, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 3, // Protocol version + 246, // Connect flags + 0, 30, // Keepalive + 0, 5, // Will topic length + 116, 111, 112, 105, 99, // Will topic + 0, 7, // Will payload length + 112, 97, 121, 108, 111, 97, 100, // Will payload + 0, 2, // Username length + 0, 0 // Username +])) + +// missing password on connect +testParseError('Cannot parse password', Buffer.from([ + 16, 42, // Header + 0, 6, // Protocol ID length + 77, 81, 73, 115, 100, 112, // Protocol ID + 3, // Protocol version + 246, // Connect flags + 0, 30, // Keepalive + 0, 5, // Will topic length + 116, 111, 112, 105, 99, // Will topic + 0, 7, // Will payload length + 112, 97, 121, 108, 111, 97, 100, // Will payload + 0, 8, // Username length + 117, 115, 101, 114, 110, 97, 109, 101, // Username + 0, 2, // Password length + 0, 0 // Password +])) + +testParseGenerate('connack with return code 0', { + cmd: 'connack', + retain: false, + qos: 0, + dup: false, + length: 2, + sessionPresent: false, + returnCode: 0 +}, Buffer.from([ + 32, 2, 0, 0 +])) + +testParseGenerate('connack MQTT 5 with properties', { + cmd: 'connack', + retain: false, + qos: 0, + dup: false, + length: 87, + sessionPresent: false, + reasonCode: 0, + properties: { + sessionExpiryInterval: 1234, + receiveMaximum: 432, + maximumQoS: 2, + retainAvailable: true, + maximumPacketSize: 100, + assignedClientIdentifier: 'test', + topicAliasMaximum: 456, + reasonString: 'test', + userProperties: { + test: 'test' + }, + wildcardSubscriptionAvailable: true, + subscriptionIdentifiersAvailable: true, + sharedSubscriptionAvailable: false, + serverKeepAlive: 1234, + responseInformation: 'test', + serverReference: 'test', + authenticationMethod: 'test', + authenticationData: Buffer.from([1, 2, 3, 4]) + } +}, Buffer.from([ + 32, 87, 0, 0, + 84, // properties length + 17, 0, 0, 4, 210, // sessionExpiryInterval + 33, 1, 176, // receiveMaximum + 36, 2, // Maximum qos + 37, 1, // retainAvailable + 39, 0, 0, 0, 100, // maximumPacketSize + 18, 0, 4, 116, 101, 115, 116, // assignedClientIdentifier + 34, 1, 200, // topicAliasMaximum + 31, 0, 4, 116, 101, 115, 116, // reasonString + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // userProperties + 40, 1, // wildcardSubscriptionAvailable + 41, 1, // subscriptionIdentifiersAvailable + 42, 0, // sharedSubscriptionAvailable + 19, 4, 210, // serverKeepAlive + 26, 0, 4, 116, 101, 115, 116, // responseInformation + 28, 0, 4, 116, 101, 115, 116, // serverReference + 21, 0, 4, 116, 101, 115, 116, // authenticationMethod + 22, 0, 4, 1, 2, 3, 4 // authenticationData +]), { protocolVersion: 5 }) + +testParseGenerate('connack MQTT 5 with properties and doubled user properties', { + cmd: 'connack', + retain: false, + qos: 0, + dup: false, + length: 100, + sessionPresent: false, + reasonCode: 0, + properties: { + sessionExpiryInterval: 1234, + receiveMaximum: 432, + maximumQoS: 2, + retainAvailable: true, + maximumPacketSize: 100, + assignedClientIdentifier: 'test', + topicAliasMaximum: 456, + reasonString: 'test', + userProperties: { + test: ['test', 'test'] + }, + wildcardSubscriptionAvailable: true, + subscriptionIdentifiersAvailable: true, + sharedSubscriptionAvailable: false, + serverKeepAlive: 1234, + responseInformation: 'test', + serverReference: 'test', + authenticationMethod: 'test', + authenticationData: Buffer.from([1, 2, 3, 4]) + } +}, Buffer.from([ + 32, 100, 0, 0, + 97, // properties length + 17, 0, 0, 4, 210, // sessionExpiryInterval + 33, 1, 176, // receiveMaximum + 36, 2, // Maximum qos + 37, 1, // retainAvailable + 39, 0, 0, 0, 100, // maximumPacketSize + 18, 0, 4, 116, 101, 115, 116, // assignedClientIdentifier + 34, 1, 200, // topicAliasMaximum + 31, 0, 4, 116, 101, 115, 116, // reasonString + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // userProperties + 40, 1, // wildcardSubscriptionAvailable + 41, 1, // subscriptionIdentifiersAvailable + 42, 0, // sharedSubscriptionAvailable + 19, 4, 210, // serverKeepAlive + 26, 0, 4, 116, 101, 115, 116, // responseInformation + 28, 0, 4, 116, 101, 115, 116, // serverReference + 21, 0, 4, 116, 101, 115, 116, // authenticationMethod + 22, 0, 4, 1, 2, 3, 4 // authenticationData +]), { protocolVersion: 5 }) + +testParseGenerate('connack with return code 0 session present bit set', { + cmd: 'connack', + retain: false, + qos: 0, + dup: false, + length: 2, + sessionPresent: true, + returnCode: 0 +}, Buffer.from([ + 32, 2, 1, 0 +])) + +testParseGenerate('connack with return code 5', { + cmd: 'connack', + retain: false, + qos: 0, + dup: false, + length: 2, + sessionPresent: false, + returnCode: 5 +}, Buffer.from([ + 32, 2, 0, 5 +])) + +testGenerateError('Invalid return code', { + cmd: 'connack', + retain: false, + qos: 0, + dup: false, + length: 2, + sessionPresent: false, + returnCode: '5' // returncode must be a number +}) + +testParseGenerate('minimal publish', { + cmd: 'publish', + retain: false, + qos: 0, + dup: false, + length: 10, + topic: 'test', + payload: Buffer.from('test') +}, Buffer.from([ + 48, 10, // Header + 0, 4, // Topic length + 116, 101, 115, 116, // Topic (test) + 116, 101, 115, 116 // Payload (test) +])) + +testParseGenerate('publish MQTT 5 properties', { + cmd: 'publish', + retain: true, + qos: 2, + dup: true, + length: 86, + topic: 'test', + payload: Buffer.from('test'), + messageId: 10, + properties: { + payloadFormatIndicator: true, + messageExpiryInterval: 4321, + topicAlias: 100, + responseTopic: 'topic', + correlationData: Buffer.from([1, 2, 3, 4]), + userProperties: { + test: ['test', 'test', 'test'] + }, + subscriptionIdentifier: 120, + contentType: 'test' + } +}, Buffer.from([ + 61, 86, // Header + 0, 4, // Topic length + 116, 101, 115, 116, // Topic (test) + 0, 10, // Message ID + 73, // properties length + 1, 1, // payloadFormatIndicator + 2, 0, 0, 16, 225, // message expiry interval + 35, 0, 100, // topicAlias + 8, 0, 5, 116, 111, 112, 105, 99, // response topic + 9, 0, 4, 1, 2, 3, 4, // correlationData + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // userProperties + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // userProperties + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // userProperties + 11, 120, // subscriptionIdentifier + 3, 0, 4, 116, 101, 115, 116, // content type + 116, 101, 115, 116 // Payload (test) +]), { protocolVersion: 5 }) + +testParseGenerate('publish MQTT 5 with multiple same properties', { + cmd: 'publish', + retain: true, + qos: 2, + dup: true, + length: 64, + topic: 'test', + payload: Buffer.from('test'), + messageId: 10, + properties: { + payloadFormatIndicator: true, + messageExpiryInterval: 4321, + topicAlias: 100, + responseTopic: 'topic', + correlationData: Buffer.from([1, 2, 3, 4]), + userProperties: { + test: 'test' + }, + subscriptionIdentifier: [120, 121, 122], + contentType: 'test' + } +}, Buffer.from([ + 61, 64, // Header + 0, 4, // Topic length + 116, 101, 115, 116, // Topic (test) + 0, 10, // Message ID + 51, // properties length + 1, 1, // payloadFormatIndicator + 2, 0, 0, 16, 225, // message expiry interval + 35, 0, 100, // topicAlias + 8, 0, 5, 116, 111, 112, 105, 99, // response topic + 9, 0, 4, 1, 2, 3, 4, // correlationData + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // userProperties + 11, 120, // subscriptionIdentifier + 11, 121, // subscriptionIdentifier + 11, 122, // subscriptionIdentifier + 3, 0, 4, 116, 101, 115, 116, // content type + 116, 101, 115, 116 // Payload (test) +]), { protocolVersion: 5 }) + +testParseGenerate('publish MQTT 5 properties with 0-4 byte varbyte', { + cmd: 'publish', + retain: true, + qos: 2, + dup: true, + length: 27, + topic: 'test', + payload: Buffer.from('test'), + messageId: 10, + properties: { + payloadFormatIndicator: false, + subscriptionIdentifier: [128, 16384, 2097152] // this tests the varbyte handling + } +}, Buffer.from([ + 61, 27, // Header + 0, 4, // Topic length + 116, 101, 115, 116, // Topic (test) + 0, 10, // Message ID + 14, // properties length + 1, 0, // payloadFormatIndicator + 11, 128, 1, // subscriptionIdentifier + 11, 128, 128, 1, // subscriptionIdentifier + 11, 128, 128, 128, 1, // subscriptionIdentifier + 116, 101, 115, 116 // Payload (test) +]), { protocolVersion: 5 }) + +testParseGenerate('publish MQTT 5 properties with max value varbyte', { + cmd: 'publish', + retain: true, + qos: 2, + dup: true, + length: 22, + topic: 'test', + payload: Buffer.from('test'), + messageId: 10, + properties: { + payloadFormatIndicator: false, + subscriptionIdentifier: [1, 268435455] + } +}, Buffer.from([ + 61, 22, // Header + 0, 4, // Topic length + 116, 101, 115, 116, // Topic (test) + 0, 10, // Message ID + 9, // properties length + 1, 0, // payloadFormatIndicator + 11, 1, // subscriptionIdentifier + 11, 255, 255, 255, 127, // subscriptionIdentifier (max value) + 116, 101, 115, 116 // Payload (test) +]), { protocolVersion: 5 }) + +; (() => { + const buffer = Buffer.alloc(2048) + testParseGenerate('2KB publish packet', { + cmd: 'publish', + retain: false, + qos: 0, + dup: false, + length: 2054, + topic: 'test', + payload: buffer + }, Buffer.concat([Buffer.from([ + 48, 134, 16, // Header + 0, 4, // Topic length + 116, 101, 115, 116 // Topic (test) + ]), buffer])) +})() + +; (() => { + const maxLength = 268435455 + const buffer = Buffer.alloc(maxLength - 6) + testParseGenerate('Max payload publish packet', { + cmd: 'publish', + retain: false, + qos: 0, + dup: false, + length: maxLength, + topic: 'test', + payload: buffer + }, Buffer.concat([Buffer.from([ + 48, 255, 255, 255, 127, // Header + 0, 4, // Topic length + 116, 101, 115, 116 // Topic (test) + ]), buffer])) +})() + +testParseGenerate('maximal publish', { + cmd: 'publish', + retain: true, + qos: 2, + length: 12, + dup: true, + topic: 'test', + messageId: 10, + payload: Buffer.from('test') +}, Buffer.from([ + 61, 12, // Header + 0, 4, // Topic length + 116, 101, 115, 116, // Topic + 0, 10, // Message ID + 116, 101, 115, 116 // Payload +])) + +test('publish all strings generate', t => { + const message = { + cmd: 'publish', + retain: true, + qos: 2, + length: 12, + dup: true, + topic: 'test', + messageId: 10, + payload: Buffer.from('test') + } + const expected = Buffer.from([ + 61, 12, // Header + 0, 4, // Topic length + 116, 101, 115, 116, // Topic + 0, 10, // Message ID + 116, 101, 115, 116 // Payload + ]) + + t.equal(mqtt.generate(message).toString('hex'), expected.toString('hex')) + t.end() +}) + +testParseGenerate('empty publish', { + cmd: 'publish', + retain: false, + qos: 0, + dup: false, + length: 6, + topic: 'test', + payload: Buffer.alloc(0) +}, Buffer.from([ + 48, 6, // Header + 0, 4, // Topic length + 116, 101, 115, 116 // Topic + // Empty payload +])) + +test('splitted publish parse', t => { + t.plan(3) + + const parser = mqtt.parser() + const expected = { + cmd: 'publish', + retain: false, + qos: 0, + dup: false, + length: 10, + topic: 'test', + payload: Buffer.from('test') + } + + parser.on('packet', packet => { + t.deepLooseEqual(packet, expected, 'expected packet') + }) + + t.equal(parser.parse(Buffer.from([ + 48, 10, // Header + 0, 4, // Topic length + 116, 101, 115, 116 // Topic (test) + ])), 6, 'remaining bytes') + + t.equal(parser.parse(Buffer.from([ + 116, 101, 115, 116 // Payload (test) + ])), 0, 'remaining bytes') +}) + +test('split publish longer', t => { + t.plan(3) + + const length = 255 + const topic = 'test' + // Minus two bytes for the topic length specifier + const payloadLength = length - topic.length - 2 + + const parser = mqtt.parser() + const expected = { + cmd: 'publish', + retain: false, + qos: 0, + dup: false, + length: length, + topic: topic, + payload: Buffer.from('a'.repeat(payloadLength)) + } + + parser.on('packet', packet => { + t.deepLooseEqual(packet, expected, 'expected packet') + }) + + t.equal(parser.parse(Buffer.from([ + 48, 255, 1, // Header + 0, topic.length, // Topic length + 116, 101, 115, 116 // Topic (test) + ])), 6, 'remaining bytes') + + t.equal(parser.parse(Buffer.from(Array(payloadLength).fill(97))), + 0, 'remaining bytes') +}) + +test('split length parse', t => { + t.plan(4) + + const length = 255 + const topic = 'test' + const payloadLength = length - topic.length - 2 + + const parser = mqtt.parser() + const expected = { + cmd: 'publish', + retain: false, + qos: 0, + dup: false, + length: length, + topic: topic, + payload: Buffer.from('a'.repeat(payloadLength)) + } + + parser.on('packet', packet => { + t.deepLooseEqual(packet, expected, 'expected packet') + }) + + t.equal(parser.parse(Buffer.from([ + 48, 255 // Header (partial length) + ])), 1, 'remaining bytes') + + t.equal(parser.parse(Buffer.from([ + 1, // Rest of header length + 0, topic.length, // Topic length + 116, 101, 115, 116 // Topic (test) + ])), 6, 'remaining bytes') + + t.equal(parser.parse(Buffer.from(Array(payloadLength).fill(97))), + 0, 'remaining bytes') +}) + +testGenerateError('Invalid variable byte integer: 268435456', { + cmd: 'publish', + retain: false, + qos: 0, + dup: false, + length: (268435455 + 1), + topic: 'test', + payload: Buffer.alloc(268435455 + 1 - 6) +}, {}, 'Length var byte integer over max allowed value throws error') + +testGenerateError('Invalid subscriptionIdentifier: 268435456', { + cmd: 'publish', + retain: true, + qos: 2, + dup: true, + length: 27, + topic: 'test', + payload: Buffer.from('test'), + messageId: 10, + properties: { + payloadFormatIndicator: false, + subscriptionIdentifier: 268435456 + } +}, { protocolVersion: 5 }, 'MQTT 5.0 var byte integer >24 bits throws error') + +testParseGenerate('puback', { + cmd: 'puback', + retain: false, + qos: 0, + dup: false, + length: 2, + messageId: 2 +}, Buffer.from([ + 64, 2, // Header + 0, 2 // Message ID +])) + +testParseGenerate('puback with reason and no MQTT 5 properties', { + cmd: 'puback', + retain: false, + qos: 0, + dup: false, + length: 3, + messageId: 2, + reasonCode: 16 +}, Buffer.from([ + 64, 3, // Header + 0, 2, // Message ID + 16 // reason code +]), { protocolVersion: 5 }) + +testParseGenerate('puback MQTT 5 properties', { + cmd: 'puback', + retain: false, + qos: 0, + dup: false, + length: 24, + messageId: 2, + reasonCode: 16, + properties: { + reasonString: 'test', + userProperties: { + test: 'test' + } + } +}, Buffer.from([ + 64, 24, // Header + 0, 2, // Message ID + 16, // reason code + 20, // properties length + 31, 0, 4, 116, 101, 115, 116, // reasonString + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116 // userProperties +]), { protocolVersion: 5 }) + +testParseGenerate('pubrec', { + cmd: 'pubrec', + retain: false, + qos: 0, + dup: false, + length: 2, + messageId: 2 +}, Buffer.from([ + 80, 2, // Header + 0, 2 // Message ID +])) + +testParseGenerate('pubrec MQTT 5 properties', { + cmd: 'pubrec', + retain: false, + qos: 0, + dup: false, + length: 24, + messageId: 2, + reasonCode: 16, + properties: { + reasonString: 'test', + userProperties: { + test: 'test' + } + } +}, Buffer.from([ + 80, 24, // Header + 0, 2, // Message ID + 16, // reason code + 20, // properties length + 31, 0, 4, 116, 101, 115, 116, // reasonString + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116 // userProperties +]), { protocolVersion: 5 }) + +testParseGenerate('pubrel', { + cmd: 'pubrel', + retain: false, + qos: 1, + dup: false, + length: 2, + messageId: 2 +}, Buffer.from([ + 98, 2, // Header + 0, 2 // Message ID +])) + +testParseGenerate('pubrel MQTT5 properties', { + cmd: 'pubrel', + retain: false, + qos: 1, + dup: false, + length: 24, + messageId: 2, + reasonCode: 16, + properties: { + reasonString: 'test', + userProperties: { + test: 'test' + } + } +}, Buffer.from([ + 98, 24, // Header + 0, 2, // Message ID + 16, // reason code + 20, // properties length + 31, 0, 4, 116, 101, 115, 116, // reasonString + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116 // userProperties +]), { protocolVersion: 5 }) + +testParseGenerate('pubcomp', { + cmd: 'pubcomp', + retain: false, + qos: 0, + dup: false, + length: 2, + messageId: 2 +}, Buffer.from([ + 112, 2, // Header + 0, 2 // Message ID +])) + +testParseGenerate('pubcomp MQTT 5 properties', { + cmd: 'pubcomp', + retain: false, + qos: 0, + dup: false, + length: 24, + messageId: 2, + reasonCode: 16, + properties: { + reasonString: 'test', + userProperties: { + test: 'test' + } + } +}, Buffer.from([ + 112, 24, // Header + 0, 2, // Message ID + 16, // reason code + 20, // properties length + 31, 0, 4, 116, 101, 115, 116, // reasonString + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116 // userProperties +]), { protocolVersion: 5 }) + +testParseError('Wrong subscribe header', Buffer.from([ + 128, 9, // Header (subscribeqos=0length=9) + 0, 6, // Message ID (6) + 0, 4, // Topic length, + 116, 101, 115, 116, // Topic (test) + 0 // Qos (0) +])) + +testParseGenerate('subscribe to one topic', { + cmd: 'subscribe', + retain: false, + qos: 1, + dup: false, + length: 9, + subscriptions: [ + { + topic: 'test', + qos: 0 + } + ], + messageId: 6 +}, Buffer.from([ + 130, 9, // Header (subscribeqos=1length=9) + 0, 6, // Message ID (6) + 0, 4, // Topic length, + 116, 101, 115, 116, // Topic (test) + 0 // Qos (0) +])) + +testParseGenerate('subscribe to one topic by MQTT 5', { + cmd: 'subscribe', + retain: false, + qos: 1, + dup: false, + length: 26, + subscriptions: [ + { + topic: 'test', + qos: 0, + nl: false, + rap: true, + rh: 1 + } + ], + messageId: 6, + properties: { + subscriptionIdentifier: 145, + userProperties: { + test: 'test' + } + } +}, Buffer.from([ + 130, 26, // Header (subscribeqos=1length=9) + 0, 6, // Message ID (6) + 16, // properties length + 11, 145, 1, // subscriptionIdentifier + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // userProperties + 0, 4, // Topic length, + 116, 101, 115, 116, // Topic (test) + 24 // settings(qos: 0, noLocal: false, Retain as Published: true, retain handling: 1) +]), { protocolVersion: 5 }) + +testParseGenerate('subscribe to three topics', { + cmd: 'subscribe', + retain: false, + qos: 1, + dup: false, + length: 23, + subscriptions: [ + { + topic: 'test', + qos: 0 + }, { + topic: 'uest', + qos: 1 + }, { + topic: 'tfst', + qos: 2 + } + ], + messageId: 6 +}, Buffer.from([ + 130, 23, // Header (publishqos=1length=9) + 0, 6, // Message ID (6) + 0, 4, // Topic length, + 116, 101, 115, 116, // Topic (test) + 0, // Qos (0) + 0, 4, // Topic length + 117, 101, 115, 116, // Topic (uest) + 1, // Qos (1) + 0, 4, // Topic length + 116, 102, 115, 116, // Topic (tfst) + 2 // Qos (2) +])) + +testParseGenerate('subscribe to 3 topics by MQTT 5', { + cmd: 'subscribe', + retain: false, + qos: 1, + dup: false, + length: 40, + subscriptions: [ + { + topic: 'test', + qos: 0, + nl: false, + rap: true, + rh: 1 + }, + { + topic: 'uest', + qos: 1, + nl: false, + rap: false, + rh: 0 + }, { + topic: 'tfst', + qos: 2, + nl: true, + rap: false, + rh: 0 + } + ], + messageId: 6, + properties: { + subscriptionIdentifier: 145, + userProperties: { + test: 'test' + } + } +}, Buffer.from([ + 130, 40, // Header (subscribeqos=1length=9) + 0, 6, // Message ID (6) + 16, // properties length + 11, 145, 1, // subscriptionIdentifier + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // userProperties + 0, 4, // Topic length, + 116, 101, 115, 116, // Topic (test) + 24, // settings(qos: 0, noLocal: false, Retain as Published: true, retain handling: 1) + 0, 4, // Topic length + 117, 101, 115, 116, // Topic (uest) + 1, // Qos (1) + 0, 4, // Topic length + 116, 102, 115, 116, // Topic (tfst) + 6 // Qos (2), No Local: true +]), { protocolVersion: 5 }) + +testParseGenerate('suback', { + cmd: 'suback', + retain: false, + qos: 0, + dup: false, + length: 6, + granted: [0, 1, 2, 128], + messageId: 6 +}, Buffer.from([ + 144, 6, // Header + 0, 6, // Message ID + 0, 1, 2, 128 // Granted qos (0, 1, 2) and a rejected being 0x80 +])) + +testParseGenerate('suback MQTT 5', { + cmd: 'suback', + retain: false, + qos: 0, + dup: false, + length: 27, + granted: [0, 1, 2, 128], + messageId: 6, + properties: { + reasonString: 'test', + userProperties: { + test: 'test' + } + } +}, Buffer.from([ + 144, 27, // Header + 0, 6, // Message ID + 20, // properties length + 31, 0, 4, 116, 101, 115, 116, // reasonString + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // userProperties + 0, 1, 2, 128 // Granted qos (0, 1, 2) and a rejected being 0x80 +]), { protocolVersion: 5 }) + +testParseGenerate('unsubscribe', { + cmd: 'unsubscribe', + retain: false, + qos: 1, + dup: false, + length: 14, + unsubscriptions: [ + 'tfst', + 'test' + ], + messageId: 7 +}, Buffer.from([ + 162, 14, + 0, 7, // Message ID (7) + 0, 4, // Topic length + 116, 102, 115, 116, // Topic (tfst) + 0, 4, // Topic length, + 116, 101, 115, 116 // Topic (test) +])) + +testGenerateError('Invalid unsubscriptions', { + cmd: 'unsubscribe', + retain: false, + qos: 1, + dup: true, + length: 5, + unsubscriptions: 5, + messageId: 7 +}, {}, 'unsubscribe with unsubscriptions not an array') + +testGenerateError('Invalid unsubscriptions', { + cmd: 'unsubscribe', + retain: false, + qos: 1, + dup: true, + length: 5, + unsubscriptions: [1, 2], + messageId: 7 +}, {}, 'unsubscribe with unsubscriptions as an object') + +testParseGenerate('unsubscribe MQTT 5', { + cmd: 'unsubscribe', + retain: false, + qos: 1, + dup: false, + length: 28, + unsubscriptions: [ + 'tfst', + 'test' + ], + messageId: 7, + properties: { + userProperties: { + test: 'test' + } + } +}, Buffer.from([ + 162, 28, + 0, 7, // Message ID (7) + 13, // properties length + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // userProperties + 0, 4, // Topic length + 116, 102, 115, 116, // Topic (tfst) + 0, 4, // Topic length, + 116, 101, 115, 116 // Topic (test) +]), { protocolVersion: 5 }) + +testParseGenerate('unsuback', { + cmd: 'unsuback', + retain: false, + qos: 0, + dup: false, + length: 2, + messageId: 8 +}, Buffer.from([ + 176, 2, // Header + 0, 8 // Message ID +])) + +testParseGenerate('unsuback MQTT 5', { + cmd: 'unsuback', + retain: false, + qos: 0, + dup: false, + length: 25, + messageId: 8, + properties: { + reasonString: 'test', + userProperties: { + test: 'test' + } + }, + granted: [0, 128] +}, Buffer.from([ + 176, 25, // Header + 0, 8, // Message ID + 20, // properties length + 31, 0, 4, 116, 101, 115, 116, // reasonString + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // userProperties + 0, 128 // success and error +]), { protocolVersion: 5 }) + +testParseGenerate('pingreq', { + cmd: 'pingreq', + retain: false, + qos: 0, + dup: false, + length: 0 +}, Buffer.from([ + 192, 0 // Header +])) + +testParseGenerate('pingresp', { + cmd: 'pingresp', + retain: false, + qos: 0, + dup: false, + length: 0 +}, Buffer.from([ + 208, 0 // Header +])) + +testParseGenerate('disconnect', { + cmd: 'disconnect', + retain: false, + qos: 0, + dup: false, + length: 0 +}, Buffer.from([ + 224, 0 // Header +])) + +testParseGenerate('disconnect MQTT 5', { + cmd: 'disconnect', + retain: false, + qos: 0, + dup: false, + length: 34, + reasonCode: 0, + properties: { + sessionExpiryInterval: 145, + reasonString: 'test', + userProperties: { + test: 'test' + }, + serverReference: 'test' + } +}, Buffer.from([ + 224, 34, // Header + 0, // reason code + 32, // properties length + 17, 0, 0, 0, 145, // sessionExpiryInterval + 31, 0, 4, 116, 101, 115, 116, // reasonString + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // userProperties + 28, 0, 4, 116, 101, 115, 116// serverReference +]), { protocolVersion: 5 }) + +testParseGenerate('disconnect MQTT 5 with no properties', { + cmd: 'disconnect', + retain: false, + qos: 0, + dup: false, + length: 2, + reasonCode: 0 +}, Buffer.from([ + 224, 2, // Fixed Header (DISCONNECT, Remaining Length) + 0, // Reason Code (Normal Disconnection) + 0 // Property Length (0 => No Properties) +]), { protocolVersion: 5 }) + +testParseGenerate('auth MQTT 5', { + cmd: 'auth', + retain: false, + qos: 0, + dup: false, + length: 36, + reasonCode: 0, + properties: { + authenticationMethod: 'test', + authenticationData: Buffer.from([0, 1, 2, 3]), + reasonString: 'test', + userProperties: { + test: 'test' + } + } +}, Buffer.from([ + 240, 36, // Header + 0, // reason code + 34, // properties length + 21, 0, 4, 116, 101, 115, 116, // auth method + 22, 0, 4, 0, 1, 2, 3, // auth data + 31, 0, 4, 116, 101, 115, 116, // reasonString + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116 // userProperties +]), { protocolVersion: 5 }) + +testGenerateError('Invalid protocolId', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 54, + protocolId: 42, + protocolVersion: 3, + will: { + retain: true, + qos: 2, + topic: 'topic', + payload: 'payload' + }, + clean: true, + keepalive: 30, + clientId: 'test', + username: 'username', + password: 'password' +}) + +testGenerateError('Invalid protocol version', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 54, + protocolId: 'MQIsdp', + protocolVersion: 1, + will: { + retain: true, + qos: 2, + topic: 'topic', + payload: 'payload' + }, + clean: true, + keepalive: 30, + clientId: 'test', + username: 'username', + password: 'password' +}) + +testGenerateError('clientId must be supplied before 3.1.1', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 54, + protocolId: 'MQIsdp', + protocolVersion: 3, + will: { + retain: true, + qos: 2, + topic: 'topic', + payload: 'payload' + }, + clean: true, + keepalive: 30, + username: 'username', + password: 'password' +}) + +testGenerateError('clientId must be given if cleanSession set to 0', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 54, + protocolId: 'MQTT', + protocolVersion: 4, + will: { + retain: true, + qos: 2, + topic: 'topic', + payload: 'payload' + }, + clean: false, + keepalive: 30, + username: 'username', + password: 'password' +}) + +testGenerateError('Invalid keepalive', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 54, + protocolId: 'MQIsdp', + protocolVersion: 3, + will: { + retain: true, + qos: 2, + topic: 'topic', + payload: 'payload' + }, + clean: true, + keepalive: 'hello', + clientId: 'test', + username: 'username', + password: 'password' +}) + +testGenerateError('Invalid keepalive', { + cmd: 'connect', + keepalive: 3.1416 +}) + +testGenerateError('Invalid will', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 54, + protocolId: 'MQIsdp', + protocolVersion: 3, + will: 42, + clean: true, + keepalive: 30, + clientId: 'test', + username: 'username', + password: 'password' +}) + +testGenerateError('Invalid will topic', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 54, + protocolId: 'MQIsdp', + protocolVersion: 3, + will: { + retain: true, + qos: 2, + payload: 'payload' + }, + clean: true, + keepalive: 30, + clientId: 'test', + username: 'username', + password: 'password' +}) + +testGenerateError('Invalid will payload', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 54, + protocolId: 'MQIsdp', + protocolVersion: 3, + will: { + retain: true, + qos: 2, + topic: 'topic', + payload: 42 + }, + clean: true, + keepalive: 30, + clientId: 'test', + username: 'username', + password: 'password' +}) + +testGenerateError('Invalid username', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 54, + protocolId: 'MQIsdp', + protocolVersion: 3, + will: { + retain: true, + qos: 2, + topic: 'topic', + payload: 'payload' + }, + clean: true, + keepalive: 30, + clientId: 'test', + username: 42, + password: 'password' +}) + +testGenerateError('Invalid password', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 54, + protocolId: 'MQIsdp', + protocolVersion: 3, + will: { + retain: true, + qos: 2, + topic: 'topic', + payload: 'payload' + }, + clean: true, + keepalive: 30, + clientId: 'test', + username: 'username', + password: 42 +}) + +testGenerateError('Username is required to use password', { + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 54, + protocolId: 'MQIsdp', + protocolVersion: 3, + will: { + retain: true, + qos: 2, + topic: 'topic', + payload: 'payload' + }, + clean: true, + keepalive: 30, + clientId: 'test', + password: 'password' +}) + +testGenerateError('Invalid messageExpiryInterval: -4321', { + cmd: 'publish', + retain: true, + qos: 2, + dup: true, + length: 60, + topic: 'test', + payload: Buffer.from('test'), + messageId: 10, + properties: { + payloadFormatIndicator: true, + messageExpiryInterval: -4321, + topicAlias: 100, + responseTopic: 'topic', + correlationData: Buffer.from([1, 2, 3, 4]), + userProperties: { + test: 'test' + }, + subscriptionIdentifier: 120, + contentType: 'test' + } +}, { protocolVersion: 5 }) + +testGenerateError('Invalid topicAlias: -100', { + cmd: 'publish', + retain: true, + qos: 2, + dup: true, + length: 60, + topic: 'test', + payload: Buffer.from('test'), + messageId: 10, + properties: { + payloadFormatIndicator: true, + messageExpiryInterval: 4321, + topicAlias: -100, + responseTopic: 'topic', + correlationData: Buffer.from([1, 2, 3, 4]), + userProperties: { + test: 'test' + }, + subscriptionIdentifier: 120, + contentType: 'test' + } +}, { protocolVersion: 5 }) + +testGenerateError('Invalid subscriptionIdentifier: -120', { + cmd: 'publish', + retain: true, + qos: 2, + dup: true, + length: 60, + topic: 'test', + payload: Buffer.from('test'), + messageId: 10, + properties: { + payloadFormatIndicator: true, + messageExpiryInterval: 4321, + topicAlias: 100, + responseTopic: 'topic', + correlationData: Buffer.from([1, 2, 3, 4]), + userProperties: { + test: 'test' + }, + subscriptionIdentifier: -120, + contentType: 'test' + } +}, { protocolVersion: 5 }) + +test('support cork', t => { + t.plan(9) + + const dest = WS() + + dest._write = (chunk, enc, cb) => { + t.pass('_write called') + cb() + } + + mqtt.writeToStream({ + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 18, + protocolId: 'MQIsdp', + protocolVersion: 3, + clean: false, + keepalive: 30, + clientId: 'test' + }, dest) + + dest.end() +}) + +// The following test case was designed after experiencing errors +// when trying to connect with tls on a non tls mqtt port +// the specific behaviour is: +// - first byte suggests this is a connect message +// - second byte suggests message length to be smaller than buffer length +// thus payload processing starts +// - the first two bytes suggest a protocol identifier string length +// that leads the parser pointer close to the end of the buffer +// - when trying to read further connect flags the buffer produces +// a "out of range" Error +// +testParseError('Packet too short', Buffer.from([ + 16, 9, + 0, 6, + 77, 81, 73, 115, 100, 112, + 3 +])) + +// CONNECT Packets that show other protocol IDs than +// the valid values MQTT and MQIsdp should cause an error +// those packets are a hint that this is not a mqtt connection +testParseError('Invalid protocolId', Buffer.from([ + 16, 18, + 0, 6, + 65, 65, 65, 65, 65, 65, // AAAAAA + 3, // Protocol version + 0, // Connect flags + 0, 10, // Keepalive + 0, 4, // Client ID length + 116, 101, 115, 116 // Client ID +])) + +// CONNECT Packets that contain an unsupported protocol version +// Flag (i.e. not `3` or `4` or '5') should cause an error +testParseError('Invalid protocol version', Buffer.from([ + 16, 18, + 0, 6, + 77, 81, 73, 115, 100, 112, // Protocol ID + 1, // Protocol version + 0, // Connect flags + 0, 10, // Keepalive + 0, 4, // Client ID length + 116, 101, 115, 116 // Client ID +])) + +// When a packet contains a string in the variable header and the +// given string length of this exceeds the overall length of the packet that +// was specified in the fixed header, parsing must fail. +// this case simulates this behavior with the protocol ID string of the +// CONNECT packet. The fixed header suggests a remaining length of 8 bytes +// which would be exceeded by the string length of 15 +// in this case, a protocol ID parse error is expected +testParseError('Cannot parse protocolId', Buffer.from([ + 16, 8, // Fixed header + 0, 15, // string length 15 --> 15 > 8 --> error! + 77, 81, 73, 115, 100, 112, + 77, 81, 73, 115, 100, 112, + 77, 81, 73, 115, 100, 112, + 77, 81, 73, 115, 100, 112, + 77, 81, 73, 115, 100, 112, + 77, 81, 73, 115, 100, 112, + 77, 81, 73, 115, 100, 112, + 77, 81, 73, 115, 100, 112 +])) + +testParseError('Unknown property', Buffer.from([ + 61, 60, // Header + 0, 4, // Topic length + 116, 101, 115, 116, // Topic (test) + 0, 10, // Message ID + 47, // properties length + 126, 1, // unknown property + 2, 0, 0, 16, 225, // message expiry interval + 35, 0, 100, // topicAlias + 8, 0, 5, 116, 111, 112, 105, 99, // response topic + 9, 0, 4, 1, 2, 3, 4, // correlationData + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116, // userProperties + 11, 120, // subscriptionIdentifier + 3, 0, 4, 116, 101, 115, 116, // content type + 116, 101, 115, 116 // Payload (test) +]), { protocolVersion: 5 }) + +testParseError('Not supported auth packet for this version MQTT', Buffer.from([ + 240, 36, // Header + 0, // reason code + 34, // properties length + 21, 0, 4, 116, 101, 115, 116, // auth method + 22, 0, 4, 0, 1, 2, 3, // auth data + 31, 0, 4, 116, 101, 115, 116, // reasonString + 38, 0, 4, 116, 101, 115, 116, 0, 4, 116, 101, 115, 116 // userProperties +])) + +// When a Subscribe packet contains a topic_filter and the given +// length is topic_filter.length + 1 then the last byte (requested QoS) is interpreted as topic_filter +// reading the requested_qos at the end causes 'Index out of range' read +testParseError('Malformed Subscribe Payload', Buffer.from([ + 130, 14, // subscribe header and remaining length + 0, 123, // packet ID + 0, 10, // topic filter length + 104, 105, 106, 107, 108, 47, 109, 110, 111, // topic filter with length of 9 bytes + 0 // requested QoS +])) + +test('Cannot parse property code type', t => { + const packets = Buffer.from([ + 16, 16, 0, 4, 77, 81, 84, 84, 5, 2, 0, 60, 3, 33, 0, 20, 0, 0, 98, 2, 211, 1, 224, 2, 0, 32 + ]) + + t.plan(3) + + const parser = mqtt.parser() + + parser.on('error', err => { + t.equal(err.message, 'Cannot parse property code type', 'expected error message') + t.end() + }) + + parser.on('packet', (packet) => { + t.pass('Packet parsed') + }) + + parser.parse(packets) +}) + +testWriteToStreamError('Invalid command', { + cmd: 'invalid' +}) + +testWriteToStreamError('Invalid protocolId', { + cmd: 'connect', + protocolId: {} +}) + +test('userProperties null prototype', t => { + t.plan(3) + + const packet = mqtt.generate({ + cmd: 'connect', + retain: false, + qos: 0, + dup: false, + length: 125, + protocolId: 'MQTT', + protocolVersion: 5, + will: { + retain: true, + qos: 2, + properties: { + willDelayInterval: 1234, + payloadFormatIndicator: false, + messageExpiryInterval: 4321, + contentType: 'test', + responseTopic: 'topic', + correlationData: Buffer.from([1, 2, 3, 4]), + userProperties: { + test: 'test' + } + }, + topic: 'topic', + payload: Buffer.from([4, 3, 2, 1]) + }, + clean: true, + keepalive: 30, + properties: { + sessionExpiryInterval: 1234, + receiveMaximum: 432, + maximumPacketSize: 100, + topicAliasMaximum: 456, + requestResponseInformation: true, + requestProblemInformation: true, + userProperties: { + test: 'test' + }, + authenticationMethod: 'test', + authenticationData: Buffer.from([1, 2, 3, 4]) + }, + clientId: 'test' + }) + + const parser = mqtt.parser() + + parser.on('packet', packet => { + t.equal(packet.cmd, 'connect') + t.equal(Object.getPrototypeOf(packet.properties.userProperties), null) + t.equal(Object.getPrototypeOf(packet.will.properties.userProperties), null) + }) + + parser.parse(packet) +}) + +test('stops parsing after first error', t => { + t.plan(4) + + const parser = mqtt.parser() + + let packetCount = 0 + let errorCount = 0 + let expectedPackets = 1 + let expectedErrors = 1 + + parser.on('packet', packet => { + t.ok(++packetCount <= expectedPackets, `expected <= ${expectedPackets} packets`) + }) + + parser.on('error', erroneous => { + t.ok(++errorCount <= expectedErrors, `expected <= ${expectedErrors} errors`) + }) + + parser.parse(Buffer.from([ + // First, a valid connect packet: + + 16, 12, // Header + 0, 4, // Protocol ID length + 77, 81, 84, 84, // Protocol ID + 4, // Protocol version + 2, // Connect flags + 0, 30, // Keepalive + 0, 0, // Client ID length + + // Then an invalid subscribe packet: + + 128, 9, // Header (subscribeqos=0length=9) + 0, 6, // Message ID (6) + 0, 4, // Topic length, + 116, 101, 115, 116, // Topic (test) + 0, // Qos (0) + + // And another invalid subscribe packet: + + 128, 9, // Header (subscribeqos=0length=9) + 0, 6, // Message ID (6) + 0, 4, // Topic length, + 116, 101, 115, 116, // Topic (test) + 0, // Qos (0) + + // Finally, a valid disconnect packet: + + 224, 0 // Header + ])) + + // Calling parse again clears the error and continues parsing + packetCount = 0 + errorCount = 0 + expectedPackets = 2 + expectedErrors = 0 + + parser.parse(Buffer.from([ + // Connect: + + 16, 12, // Header + 0, 4, // Protocol ID length + 77, 81, 84, 84, // Protocol ID + 4, // Protocol version + 2, // Connect flags + 0, 30, // Keepalive + 0, 0, // Client ID length + + // Disconnect: + + 224, 0 // Header + ])) +}) + +testGenerateErrorMultipleCmds([ + 'publish', + 'puback', + 'pubrec', + 'pubrel', + 'subscribe', + 'suback', + 'unsubscribe', + 'unsuback' +], 'Invalid messageId', { + qos: 1, // required for publish + topic: 'test', // required for publish + messageId: 'a' +}, {}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/testRandom.js b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/testRandom.js new file mode 100644 index 000000000..9beff90aa --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/testRandom.js @@ -0,0 +1,86 @@ +const mqtt = require('./') +const crypto = require('crypto') +const max = 1E5 +const start = Date.now() / 1000 +var errors = 0 +var packets = 0 +var randomPacket +const firstBytes = [ + 16 * 1, // CONNECT + 16 * 2, // CONNACK + 16 * 3, // PUBLISH, QoS: 0, No Retain, No Dup + 16 * 3 + 1, // PUBLISH, QoS: 0, Retain, No Dup + 16 * 3 + 8, // PUBLISH, QoS: 0, No Retain, Dup + 16 * 3 + 1 + 8, // PUBLISH, QoS: 0, Retain, Dup + 16 * 3 + 2, // PUBLISH, QoS: 1, No Retain, No Dup + 16 * 3 + 2 + 1, // PUBLISH, QoS: 1, Retain, No Dup + 16 * 3 + 2 + 8, // PUBLISH, QoS: 1, No Retain, Dup + 16 * 3 + 2 + 1 + 8, // PUBLISH, QoS: 1, Retain, Dup + 16 * 3 + 4, // PUBLISH, QoS: 2, No Retain, No Dup + 16 * 3 + 4 + 1, // PUBLISH, QoS: 2, Retain, No Dup + 16 * 3 + 4 + 8, // PUBLISH, QoS: 2, No Retain, Dup + 16 * 3 + 4 + 1 + 8, // PUBLISH, QoS: 2, Retain, Dup + 16 * 4, // PUBACK + 16 * 5, // PUBREC + 16 * 6, // PUBREL + 16 * 7, // PUBCOMP + 16 * 8, // SUBSCRIBE + 16 * 9, // SUBACK + 16 * 10, // UNSUBSCRIBE + 16 * 11, // UNSUBACK + 16 * 12, // PINGREQ + 16 * 13, // PINGRESP + 16 * 14, // DISCONNECT + 16 * 15 // RESERVED +] + +function doParse () { + const parser = mqtt.parser() + + parser.on('error', onError) + parser.on('packet', onPacket) + randomPacket = crypto.randomBytes(Math.floor(Math.random() * 512)) + + // Increase probability to have a valid first byte in order to at least + // enter the parser + if (Math.random() > 0.2 && randomPacket.length > 0) randomPacket.writeUInt8(firstBytes[Math.floor(Math.random() * firstBytes.length)], 0) + parser.parse(randomPacket) +} + +try { + console.log('Starting benchmark') + for (let i = 0; i < max; i++) { + doParse() + } +} catch (e) { + console.log('Exception occurred at packet') + console.log(randomPacket) + console.log(e.message) + console.log(e.stack) +} + +function onError () { + errors++ +} + +function onPacket () { + packets++ +} + +const delta = Math.abs(max - packets - errors) +const time = Date.now() / 1000 - start +console.log('Benchmark complete') +console.log('==========================') +console.log('Sent packets:', max) +console.log('Total time:', Math.round(time * 100) / 100, 'seconds', '\r\n') + +console.log('Valid packets:', packets) +console.log('Erroneous packets:', errors) + +if ((max - packets - errors) < 0) console.log('Excess packets:', delta, '\r\n') +else console.log('Missing packets:', delta, '\r\n') + +console.log('Total packets:', packets + errors) +console.log('Total errors:', errors + delta) +console.log('Error rate:', `${((errors + delta) / max * 100).toFixed(2)}%`) +console.log('==========================') diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/types/index.d.ts b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/types/index.d.ts new file mode 100644 index 000000000..0bc74657f --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/types/index.d.ts @@ -0,0 +1,255 @@ +import EventEmitter = NodeJS.EventEmitter +import WritableStream = NodeJS.WritableStream + +export declare type QoS = 0 | 1 | 2 + +export declare type PacketCmd = 'auth' | + 'connack' | + 'connect' | + 'disconnect' | + 'pingreq' | + 'pingresp' | + 'puback' | + 'pubcomp' | + 'publish' | + 'pubrel' | + 'pubrec' | + 'suback' | + 'subscribe' | + 'unsuback' | + 'unsubscribe' + +export declare type UserProperties = {[index: string]: string | string[]} + +export interface IPacket { + cmd: PacketCmd + messageId?: number + length?: number +} + +export interface IAuthPacket extends IPacket { + cmd: 'auth' + reasonCode: number, + properties?: { + authenticationMethod?: string, + authenticationData?: Buffer, + reasonString?: string, + userProperties?: UserProperties, + } +} + +export interface IConnectPacket extends IPacket { + cmd: 'connect' + clientId: string + protocolVersion?: 4 | 5 | 3 + protocolId?: 'MQTT' | 'MQIsdp' + clean?: boolean + keepalive?: number + username?: string + password?: Buffer + will?: { + topic: string + payload: Buffer + qos?: QoS + retain?: boolean + properties?: { + willDelayInterval?: number, + payloadFormatIndicator?: number, + messageExpiryInterval?: number, + contentType?: string, + responseTopic?: string, + correlationData?: Buffer, + userProperties?: UserProperties + } + } + properties?: { + sessionExpiryInterval?: number, + receiveMaximum?: number, + maximumPacketSize?: number, + topicAliasMaximum?: number, + requestResponseInformation?: boolean, + requestProblemInformation?: boolean, + userProperties?: UserProperties, + authenticationMethod?: string, + authenticationData?: Buffer + } +} + +export interface IPublishPacket extends IPacket { + cmd: 'publish' + qos: QoS + dup: boolean + retain: boolean + topic: string + payload: string | Buffer + properties?: { + payloadFormatIndicator?: boolean, + messageExpiryInterval?: number, + topicAlias?: number, + responseTopic?: string, + correlationData?: Buffer, + userProperties?: UserProperties, + subscriptionIdentifier?: number, + contentType?: string + } +} + +export interface IConnackPacket extends IPacket { + cmd: 'connack' + returnCode?: number, + reasonCode?: number, + sessionPresent: boolean + properties?: { + sessionExpiryInterval?: number, + receiveMaximum?: number, + maximumQoS?: number, + retainAvailable?: boolean, + maximumPacketSize?: number, + assignedClientIdentifier?: string, + topicAliasMaximum?: number, + reasonString?: string, + userProperties?: UserProperties, + wildcardSubscriptionAvailable?: boolean, + subscriptionIdentifiersAvailable?: boolean, + sharedSubscriptionAvailable?: boolean, + serverKeepAlive?: number, + responseInformation?: string, + serverReference?: string, + authenticationMethod?: string, + authenticationData?: Buffer + } +} + +export interface ISubscription { + topic: string + qos: QoS, + nl?: boolean, + rap?: boolean, + rh?: number +} + +export interface ISubscribePacket extends IPacket { + cmd: 'subscribe' + subscriptions: ISubscription[], + properties?: { + reasonString?: string, + userProperties?: UserProperties + } +} + +export interface ISubackPacket extends IPacket { + cmd: 'suback', + reasonCode?: number, + properties?: { + reasonString?: string, + userProperties?: UserProperties + }, + granted: number[] | Object[] +} + +export interface IUnsubscribePacket extends IPacket { + cmd: 'unsubscribe', + properties?: { + reasonString?: string, + userProperties?: UserProperties + }, + unsubscriptions: string[] +} + +export interface IUnsubackPacket extends IPacket { + cmd: 'unsuback', + reasonCode?: number, + properties?: { + reasonString?: string, + userProperties?: UserProperties + } +} + +export interface IPubackPacket extends IPacket { + cmd: 'puback', + reasonCode?: number, + properties?: { + reasonString?: string, + userProperties?: UserProperties + } +} + +export interface IPubcompPacket extends IPacket { + cmd: 'pubcomp', + reasonCode?: number, + properties?: { + reasonString?: string, + userProperties?: UserProperties + } +} + +export interface IPubrelPacket extends IPacket { + cmd: 'pubrel', + reasonCode?: number, + properties?: { + reasonString?: string, + userProperties?: UserProperties + } +} + +export interface IPubrecPacket extends IPacket { + cmd: 'pubrec', + reasonCode?: number, + properties?: { + reasonString?: string, + userProperties?: UserProperties + } +} + +export interface IPingreqPacket extends IPacket { + cmd: 'pingreq' +} + +export interface IPingrespPacket extends IPacket { + cmd: 'pingresp' +} + +export interface IDisconnectPacket extends IPacket { + cmd: 'disconnect', + reasonCode?: number, + properties?: { + sessionExpiryInterval?: number, + reasonString?: string, + userProperties?: UserProperties, + serverReference?: string + } +} + +export declare type Packet = IConnectPacket | + IPublishPacket | + IConnackPacket | + ISubscribePacket | + ISubackPacket | + IUnsubscribePacket | + IUnsubackPacket | + IPubackPacket | + IPubcompPacket | + IPubrelPacket | + IPingreqPacket | + IPingrespPacket | + IDisconnectPacket | + IPubrecPacket | + IAuthPacket + +export interface Parser extends EventEmitter { + on(event: 'packet', callback: (packet: Packet) => void): this + + on(event: 'error', callback: (error: any) => void): this + + parse(buffer: Buffer, opts?: Object): number +} + +export declare function parser(opts?: Object): Parser + +export declare function generate(packet: Packet, opts?: Object): Buffer + +export declare function writeToStream(object: Packet, stream: WritableStream, opts?: Object): void + +export declare namespace writeToStream { + let cacheNumbers: boolean +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/writeToStream.js b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/writeToStream.js new file mode 100644 index 000000000..e7b6d1895 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/mqtt-packet/writeToStream.js @@ -0,0 +1,1117 @@ +const protocol = require('./constants') +const empty = Buffer.allocUnsafe(0) +const zeroBuf = Buffer.from([0]) +const numbers = require('./numbers') +const nextTick = require('process-nextick-args').nextTick +const debug = require('debug')('mqtt-packet:writeToStream') + +const numCache = numbers.cache +const generateNumber = numbers.generateNumber +const generateCache = numbers.generateCache +const genBufVariableByteInt = numbers.genBufVariableByteInt +const generate4ByteBuffer = numbers.generate4ByteBuffer +let writeNumber = writeNumberCached +let toGenerate = true + +function generate (packet, stream, opts) { + debug('generate called') + if (stream.cork) { + stream.cork() + nextTick(uncork, stream) + } + + if (toGenerate) { + toGenerate = false + generateCache() + } + debug('generate: packet.cmd: %s', packet.cmd) + switch (packet.cmd) { + case 'connect': + return connect(packet, stream, opts) + case 'connack': + return connack(packet, stream, opts) + case 'publish': + return publish(packet, stream, opts) + case 'puback': + case 'pubrec': + case 'pubrel': + case 'pubcomp': + return confirmation(packet, stream, opts) + case 'subscribe': + return subscribe(packet, stream, opts) + case 'suback': + return suback(packet, stream, opts) + case 'unsubscribe': + return unsubscribe(packet, stream, opts) + case 'unsuback': + return unsuback(packet, stream, opts) + case 'pingreq': + case 'pingresp': + return emptyPacket(packet, stream, opts) + case 'disconnect': + return disconnect(packet, stream, opts) + case 'auth': + return auth(packet, stream, opts) + default: + stream.emit('error', new Error('Unknown command')) + return false + } +} +/** + * Controls numbers cache. + * Set to "false" to allocate buffers on-the-flight instead of pre-generated cache + */ +Object.defineProperty(generate, 'cacheNumbers', { + get () { + return writeNumber === writeNumberCached + }, + set (value) { + if (value) { + if (!numCache || Object.keys(numCache).length === 0) toGenerate = true + writeNumber = writeNumberCached + } else { + toGenerate = false + writeNumber = writeNumberGenerated + } + } +}) + +function uncork (stream) { + stream.uncork() +} + +function connect (packet, stream, opts) { + const settings = packet || {} + const protocolId = settings.protocolId || 'MQTT' + let protocolVersion = settings.protocolVersion || 4 + const will = settings.will + let clean = settings.clean + const keepalive = settings.keepalive || 0 + const clientId = settings.clientId || '' + const username = settings.username + const password = settings.password + /* mqtt5 new oprions */ + const properties = settings.properties + + if (clean === undefined) clean = true + + let length = 0 + + // Must be a string and non-falsy + if (!protocolId || + (typeof protocolId !== 'string' && !Buffer.isBuffer(protocolId))) { + stream.emit('error', new Error('Invalid protocolId')) + return false + } else length += protocolId.length + 2 + + // Must be 3 or 4 or 5 + if (protocolVersion !== 3 && protocolVersion !== 4 && protocolVersion !== 5) { + stream.emit('error', new Error('Invalid protocol version')) + return false + } else length += 1 + + // ClientId might be omitted in 3.1.1 and 5, but only if cleanSession is set to 1 + if ((typeof clientId === 'string' || Buffer.isBuffer(clientId)) && + (clientId || protocolVersion >= 4) && (clientId || clean)) { + length += Buffer.byteLength(clientId) + 2 + } else { + if (protocolVersion < 4) { + stream.emit('error', new Error('clientId must be supplied before 3.1.1')) + return false + } + if ((clean * 1) === 0) { + stream.emit('error', new Error('clientId must be given if cleanSession set to 0')) + return false + } + } + + // Must be a two byte number + if (typeof keepalive !== 'number' || + keepalive < 0 || + keepalive > 65535 || + keepalive % 1 !== 0) { + stream.emit('error', new Error('Invalid keepalive')) + return false + } else length += 2 + + // Connect flags + length += 1 + + // Properties + if (protocolVersion === 5) { + var propertiesData = getProperties(stream, properties) + if (!propertiesData) { return false } + length += propertiesData.length + } + + // If will exists... + if (will) { + // It must be an object + if (typeof will !== 'object') { + stream.emit('error', new Error('Invalid will')) + return false + } + // It must have topic typeof string + if (!will.topic || typeof will.topic !== 'string') { + stream.emit('error', new Error('Invalid will topic')) + return false + } else { + length += Buffer.byteLength(will.topic) + 2 + } + + // Payload + length += 2 // payload length + if (will.payload) { + if (will.payload.length >= 0) { + if (typeof will.payload === 'string') { + length += Buffer.byteLength(will.payload) + } else { + length += will.payload.length + } + } else { + stream.emit('error', new Error('Invalid will payload')) + return false + } + } + // will properties + var willProperties = {} + if (protocolVersion === 5) { + willProperties = getProperties(stream, will.properties) + if (!willProperties) { return false } + length += willProperties.length + } + } + + // Username + let providedUsername = false + if (username != null) { + if (isStringOrBuffer(username)) { + providedUsername = true + length += Buffer.byteLength(username) + 2 + } else { + stream.emit('error', new Error('Invalid username')) + return false + } + } + + // Password + if (password != null) { + if (!providedUsername) { + stream.emit('error', new Error('Username is required to use password')) + return false + } + + if (isStringOrBuffer(password)) { + length += byteLength(password) + 2 + } else { + stream.emit('error', new Error('Invalid password')) + return false + } + } + + // Generate header + stream.write(protocol.CONNECT_HEADER) + + // Generate length + writeVarByteInt(stream, length) + + // Generate protocol ID + writeStringOrBuffer(stream, protocolId) + + if (settings.bridgeMode) { + protocolVersion += 128 + } + + stream.write( + protocolVersion === 131 + ? protocol.VERSION131 + : protocolVersion === 132 + ? protocol.VERSION132 + : protocolVersion === 4 + ? protocol.VERSION4 + : protocolVersion === 5 + ? protocol.VERSION5 + : protocol.VERSION3 + ) + + // Connect flags + let flags = 0 + flags |= (username != null) ? protocol.USERNAME_MASK : 0 + flags |= (password != null) ? protocol.PASSWORD_MASK : 0 + flags |= (will && will.retain) ? protocol.WILL_RETAIN_MASK : 0 + flags |= (will && will.qos) ? will.qos << protocol.WILL_QOS_SHIFT : 0 + flags |= will ? protocol.WILL_FLAG_MASK : 0 + flags |= clean ? protocol.CLEAN_SESSION_MASK : 0 + + stream.write(Buffer.from([flags])) + + // Keepalive + writeNumber(stream, keepalive) + + // Properties + if (protocolVersion === 5) { + propertiesData.write() + } + + // Client ID + writeStringOrBuffer(stream, clientId) + + // Will + if (will) { + if (protocolVersion === 5) { + willProperties.write() + } + writeString(stream, will.topic) + writeStringOrBuffer(stream, will.payload) + } + + // Username and password + if (username != null) { + writeStringOrBuffer(stream, username) + } + if (password != null) { + writeStringOrBuffer(stream, password) + } + // This is a small packet that happens only once on a stream + // We assume the stream is always free to receive more data after this + return true +} + +function connack (packet, stream, opts) { + const version = opts ? opts.protocolVersion : 4 + const settings = packet || {} + const rc = version === 5 ? settings.reasonCode : settings.returnCode + const properties = settings.properties + let length = 2 // length of rc and sessionHeader + + // Check return code + if (typeof rc !== 'number') { + stream.emit('error', new Error('Invalid return code')) + return false + } + // mqtt5 properties + let propertiesData = null + if (version === 5) { + propertiesData = getProperties(stream, properties) + if (!propertiesData) { return false } + length += propertiesData.length + } + + stream.write(protocol.CONNACK_HEADER) + // length + writeVarByteInt(stream, length) + stream.write(settings.sessionPresent ? protocol.SESSIONPRESENT_HEADER : zeroBuf) + + stream.write(Buffer.from([rc])) + if (propertiesData != null) { + propertiesData.write() + } + return true +} + +function publish (packet, stream, opts) { + debug('publish: packet: %o', packet) + const version = opts ? opts.protocolVersion : 4 + const settings = packet || {} + const qos = settings.qos || 0 + const retain = settings.retain ? protocol.RETAIN_MASK : 0 + const topic = settings.topic + const payload = settings.payload || empty + const id = settings.messageId + const properties = settings.properties + + let length = 0 + + // Topic must be a non-empty string or Buffer + if (typeof topic === 'string') length += Buffer.byteLength(topic) + 2 + else if (Buffer.isBuffer(topic)) length += topic.length + 2 + else { + stream.emit('error', new Error('Invalid topic')) + return false + } + + // Get the payload length + if (!Buffer.isBuffer(payload)) length += Buffer.byteLength(payload) + else length += payload.length + + // Message ID must a number if qos > 0 + if (qos && typeof id !== 'number') { + stream.emit('error', new Error('Invalid messageId')) + return false + } else if (qos) length += 2 + + // mqtt5 properties + let propertiesData = null + if (version === 5) { + propertiesData = getProperties(stream, properties) + if (!propertiesData) { return false } + length += propertiesData.length + } + + // Header + stream.write(protocol.PUBLISH_HEADER[qos][settings.dup ? 1 : 0][retain ? 1 : 0]) + + // Remaining length + writeVarByteInt(stream, length) + + // Topic + writeNumber(stream, byteLength(topic)) + stream.write(topic) + + // Message ID + if (qos > 0) writeNumber(stream, id) + + // Properties + if (propertiesData != null) { + propertiesData.write() + } + + // Payload + debug('publish: payload: %o', payload) + return stream.write(payload) +} + +/* Puback, pubrec, pubrel and pubcomp */ +function confirmation (packet, stream, opts) { + const version = opts ? opts.protocolVersion : 4 + const settings = packet || {} + const type = settings.cmd || 'puback' + const id = settings.messageId + const dup = (settings.dup && type === 'pubrel') ? protocol.DUP_MASK : 0 + let qos = 0 + const reasonCode = settings.reasonCode + const properties = settings.properties + let length = version === 5 ? 3 : 2 + + if (type === 'pubrel') qos = 1 + + // Check message ID + if (typeof id !== 'number') { + stream.emit('error', new Error('Invalid messageId')) + return false + } + + // properies mqtt 5 + let propertiesData = null + if (version === 5) { + // Confirm should not add empty property length with no properties (rfc 3.4.2.2.1) + if (typeof properties === 'object') { + propertiesData = getPropertiesByMaximumPacketSize(stream, properties, opts, length) + if (!propertiesData) { return false } + length += propertiesData.length + } + } + + // Header + stream.write(protocol.ACKS[type][qos][dup][0]) + + // Length + writeVarByteInt(stream, length) + + // Message ID + writeNumber(stream, id) + + // reason code in header + if (version === 5) { + stream.write(Buffer.from([reasonCode])) + } + + // properies mqtt 5 + if (propertiesData !== null) { + propertiesData.write() + } + return true +} + +function subscribe (packet, stream, opts) { + debug('subscribe: packet: ') + const version = opts ? opts.protocolVersion : 4 + const settings = packet || {} + const dup = settings.dup ? protocol.DUP_MASK : 0 + const id = settings.messageId + const subs = settings.subscriptions + const properties = settings.properties + + let length = 0 + + // Check message ID + if (typeof id !== 'number') { + stream.emit('error', new Error('Invalid messageId')) + return false + } else length += 2 + + // properies mqtt 5 + let propertiesData = null + if (version === 5) { + propertiesData = getProperties(stream, properties) + if (!propertiesData) { return false } + length += propertiesData.length + } + + // Check subscriptions + if (typeof subs === 'object' && subs.length) { + for (let i = 0; i < subs.length; i += 1) { + const itopic = subs[i].topic + const iqos = subs[i].qos + + if (typeof itopic !== 'string') { + stream.emit('error', new Error('Invalid subscriptions - invalid topic')) + return false + } + if (typeof iqos !== 'number') { + stream.emit('error', new Error('Invalid subscriptions - invalid qos')) + return false + } + + if (version === 5) { + const nl = subs[i].nl || false + if (typeof nl !== 'boolean') { + stream.emit('error', new Error('Invalid subscriptions - invalid No Local')) + return false + } + const rap = subs[i].rap || false + if (typeof rap !== 'boolean') { + stream.emit('error', new Error('Invalid subscriptions - invalid Retain as Published')) + return false + } + const rh = subs[i].rh || 0 + if (typeof rh !== 'number' || rh > 2) { + stream.emit('error', new Error('Invalid subscriptions - invalid Retain Handling')) + return false + } + } + + length += Buffer.byteLength(itopic) + 2 + 1 + } + } else { + stream.emit('error', new Error('Invalid subscriptions')) + return false + } + + // Generate header + debug('subscribe: writing to stream: %o', protocol.SUBSCRIBE_HEADER) + stream.write(protocol.SUBSCRIBE_HEADER[1][dup ? 1 : 0][0]) + + // Generate length + writeVarByteInt(stream, length) + + // Generate message ID + writeNumber(stream, id) + + // properies mqtt 5 + if (propertiesData !== null) { + propertiesData.write() + } + + let result = true + + // Generate subs + for (const sub of subs) { + const jtopic = sub.topic + const jqos = sub.qos + const jnl = +sub.nl + const jrap = +sub.rap + const jrh = sub.rh + let joptions + + // Write topic string + writeString(stream, jtopic) + + // options process + joptions = protocol.SUBSCRIBE_OPTIONS_QOS[jqos] + if (version === 5) { + joptions |= jnl ? protocol.SUBSCRIBE_OPTIONS_NL : 0 + joptions |= jrap ? protocol.SUBSCRIBE_OPTIONS_RAP : 0 + joptions |= jrh ? protocol.SUBSCRIBE_OPTIONS_RH[jrh] : 0 + } + // Write options + result = stream.write(Buffer.from([joptions])) + } + + return result +} + +function suback (packet, stream, opts) { + const version = opts ? opts.protocolVersion : 4 + const settings = packet || {} + const id = settings.messageId + const granted = settings.granted + const properties = settings.properties + let length = 0 + + // Check message ID + if (typeof id !== 'number') { + stream.emit('error', new Error('Invalid messageId')) + return false + } else length += 2 + + // Check granted qos vector + if (typeof granted === 'object' && granted.length) { + for (let i = 0; i < granted.length; i += 1) { + if (typeof granted[i] !== 'number') { + stream.emit('error', new Error('Invalid qos vector')) + return false + } + length += 1 + } + } else { + stream.emit('error', new Error('Invalid qos vector')) + return false + } + + // properies mqtt 5 + let propertiesData = null + if (version === 5) { + propertiesData = getPropertiesByMaximumPacketSize(stream, properties, opts, length) + if (!propertiesData) { return false } + length += propertiesData.length + } + + // header + stream.write(protocol.SUBACK_HEADER) + + // Length + writeVarByteInt(stream, length) + + // Message ID + writeNumber(stream, id) + + // properies mqtt 5 + if (propertiesData !== null) { + propertiesData.write() + } + + return stream.write(Buffer.from(granted)) +} + +function unsubscribe (packet, stream, opts) { + const version = opts ? opts.protocolVersion : 4 + const settings = packet || {} + const id = settings.messageId + const dup = settings.dup ? protocol.DUP_MASK : 0 + const unsubs = settings.unsubscriptions + const properties = settings.properties + + let length = 0 + + // Check message ID + if (typeof id !== 'number') { + stream.emit('error', new Error('Invalid messageId')) + return false + } else { + length += 2 + } + // Check unsubs + if (typeof unsubs === 'object' && unsubs.length) { + for (let i = 0; i < unsubs.length; i += 1) { + if (typeof unsubs[i] !== 'string') { + stream.emit('error', new Error('Invalid unsubscriptions')) + return false + } + length += Buffer.byteLength(unsubs[i]) + 2 + } + } else { + stream.emit('error', new Error('Invalid unsubscriptions')) + return false + } + // properies mqtt 5 + let propertiesData = null + if (version === 5) { + propertiesData = getProperties(stream, properties) + if (!propertiesData) { return false } + length += propertiesData.length + } + + // Header + stream.write(protocol.UNSUBSCRIBE_HEADER[1][dup ? 1 : 0][0]) + + // Length + writeVarByteInt(stream, length) + + // Message ID + writeNumber(stream, id) + + // properies mqtt 5 + if (propertiesData !== null) { + propertiesData.write() + } + + // Unsubs + let result = true + for (let j = 0; j < unsubs.length; j++) { + result = writeString(stream, unsubs[j]) + } + + return result +} + +function unsuback (packet, stream, opts) { + const version = opts ? opts.protocolVersion : 4 + const settings = packet || {} + const id = settings.messageId + const dup = settings.dup ? protocol.DUP_MASK : 0 + const granted = settings.granted + const properties = settings.properties + const type = settings.cmd + const qos = 0 + + let length = 2 + + // Check message ID + if (typeof id !== 'number') { + stream.emit('error', new Error('Invalid messageId')) + return false + } + + // Check granted + if (version === 5) { + if (typeof granted === 'object' && granted.length) { + for (let i = 0; i < granted.length; i += 1) { + if (typeof granted[i] !== 'number') { + stream.emit('error', new Error('Invalid qos vector')) + return false + } + length += 1 + } + } else { + stream.emit('error', new Error('Invalid qos vector')) + return false + } + } + + // properies mqtt 5 + let propertiesData = null + if (version === 5) { + propertiesData = getPropertiesByMaximumPacketSize(stream, properties, opts, length) + if (!propertiesData) { return false } + length += propertiesData.length + } + + // Header + stream.write(protocol.ACKS[type][qos][dup][0]) + + // Length + writeVarByteInt(stream, length) + + // Message ID + writeNumber(stream, id) + + // properies mqtt 5 + if (propertiesData !== null) { + propertiesData.write() + } + + // payload + if (version === 5) { + stream.write(Buffer.from(granted)) + } + return true +} + +function emptyPacket (packet, stream, opts) { + return stream.write(protocol.EMPTY[packet.cmd]) +} + +function disconnect (packet, stream, opts) { + const version = opts ? opts.protocolVersion : 4 + const settings = packet || {} + const reasonCode = settings.reasonCode + const properties = settings.properties + let length = version === 5 ? 1 : 0 + + // properies mqtt 5 + let propertiesData = null + if (version === 5) { + propertiesData = getPropertiesByMaximumPacketSize(stream, properties, opts, length) + if (!propertiesData) { return false } + length += propertiesData.length + } + + // Header + stream.write(Buffer.from([protocol.codes.disconnect << 4])) + + // Length + writeVarByteInt(stream, length) + + // reason code in header + if (version === 5) { + stream.write(Buffer.from([reasonCode])) + } + + // properies mqtt 5 + if (propertiesData !== null) { + propertiesData.write() + } + + return true +} + +function auth (packet, stream, opts) { + const version = opts ? opts.protocolVersion : 4 + const settings = packet || {} + const reasonCode = settings.reasonCode + const properties = settings.properties + let length = version === 5 ? 1 : 0 + + if (version !== 5) stream.emit('error', new Error('Invalid mqtt version for auth packet')) + + // properies mqtt 5 + const propertiesData = getPropertiesByMaximumPacketSize(stream, properties, opts, length) + if (!propertiesData) { return false } + length += propertiesData.length + + // Header + stream.write(Buffer.from([protocol.codes.auth << 4])) + + // Length + writeVarByteInt(stream, length) + + // reason code in header + stream.write(Buffer.from([reasonCode])) + + // properies mqtt 5 + if (propertiesData !== null) { + propertiesData.write() + } + return true +} + +/** + * writeVarByteInt - write an MQTT style variable byte integer to the buffer + * + * @param buffer - destination + * @param pos - offset + * @param length - length (>0) + * @returns number of bytes written + * + * @api private + */ + +const varByteIntCache = {} +function writeVarByteInt (stream, num) { + if (num > protocol.VARBYTEINT_MAX) { + stream.emit('error', new Error(`Invalid variable byte integer: ${num}`)) + return false + } + + let buffer = varByteIntCache[num] + + if (!buffer) { + buffer = genBufVariableByteInt(num) + if (num < 16384) varByteIntCache[num] = buffer + } + debug('writeVarByteInt: writing to stream: %o', buffer) + return stream.write(buffer) +} + +/** + * writeString - write a utf8 string to the buffer + * + * @param buffer - destination + * @param pos - offset + * @param string - string to write + * @return number of bytes written + * + * @api private + */ + +function writeString (stream, string) { + const strlen = Buffer.byteLength(string) + writeNumber(stream, strlen) + + debug('writeString: %s', string) + return stream.write(string, 'utf8') +} + +/** + * writeStringPair - write a utf8 string pairs to the buffer + * + * @param buffer - destination + * @param name - string name to write + * @param value - string value to write + * @return number of bytes written + * + * @api private + */ +function writeStringPair (stream, name, value) { + writeString(stream, name) + writeString(stream, value) +} + +/** + * writeNumber - write a two byte number to the buffer + * + * @param buffer - destination + * @param pos - offset + * @param number - number to write + * @return number of bytes written + * + * @api private + */ +function writeNumberCached (stream, number) { + debug('writeNumberCached: number: %d', number) + debug('writeNumberCached: %o', numCache[number]) + return stream.write(numCache[number]) +} +function writeNumberGenerated (stream, number) { + const generatedNumber = generateNumber(number) + debug('writeNumberGenerated: %o', generatedNumber) + return stream.write(generatedNumber) +} +function write4ByteNumber (stream, number) { + const generated4ByteBuffer = generate4ByteBuffer(number) + debug('write4ByteNumber: %o', generated4ByteBuffer) + return stream.write(generated4ByteBuffer) +} +/** + * writeStringOrBuffer - write a String or Buffer with the its length prefix + * + * @param buffer - destination + * @param pos - offset + * @param toWrite - String or Buffer + * @return number of bytes written + */ +function writeStringOrBuffer (stream, toWrite) { + if (typeof toWrite === 'string') { + writeString(stream, toWrite) + } else if (toWrite) { + writeNumber(stream, toWrite.length) + stream.write(toWrite) + } else writeNumber(stream, 0) +} + +function getProperties (stream, properties) { + /* connect properties */ + if (typeof properties !== 'object' || properties.length != null) { + return { + length: 1, + write () { + writeProperties(stream, {}, 0) + } + } + } + let propertiesLength = 0 + function getLengthProperty (name, value) { + const type = protocol.propertiesTypes[name] + let length = 0 + switch (type) { + case 'byte': { + if (typeof value !== 'boolean') { + stream.emit('error', new Error(`Invalid ${name}: ${value}`)) + return false + } + length += 1 + 1 + break + } + case 'int8': { + if (typeof value !== 'number' || value < 0 || value > 0xff) { + stream.emit('error', new Error(`Invalid ${name}: ${value}`)) + return false + } + length += 1 + 1 + break + } + case 'binary': { + if (value && value === null) { + stream.emit('error', new Error(`Invalid ${name}: ${value}`)) + return false + } + length += 1 + Buffer.byteLength(value) + 2 + break + } + case 'int16': { + if (typeof value !== 'number' || value < 0 || value > 0xffff) { + stream.emit('error', new Error(`Invalid ${name}: ${value}`)) + return false + } + length += 1 + 2 + break + } + case 'int32': { + if (typeof value !== 'number' || value < 0 || value > 0xffffffff) { + stream.emit('error', new Error(`Invalid ${name}: ${value}`)) + return false + } + length += 1 + 4 + break + } + case 'var': { + // var byte integer is max 24 bits packed in 32 bits + if (typeof value !== 'number' || value < 0 || value > 0x0fffffff) { + stream.emit('error', new Error(`Invalid ${name}: ${value}`)) + return false + } + length += 1 + Buffer.byteLength(genBufVariableByteInt(value)) + break + } + case 'string': { + if (typeof value !== 'string') { + stream.emit('error', new Error(`Invalid ${name}: ${value}`)) + return false + } + length += 1 + 2 + Buffer.byteLength(value.toString()) + break + } + case 'pair': { + if (typeof value !== 'object') { + stream.emit('error', new Error(`Invalid ${name}: ${value}`)) + return false + } + length += Object.getOwnPropertyNames(value).reduce((result, name) => { + const currentValue = value[name] + if (Array.isArray(currentValue)) { + result += currentValue.reduce((currentLength, value) => { + currentLength += 1 + 2 + Buffer.byteLength(name.toString()) + 2 + Buffer.byteLength(value.toString()) + return currentLength + }, 0) + } else { + result += 1 + 2 + Buffer.byteLength(name.toString()) + 2 + Buffer.byteLength(value[name].toString()) + } + return result + }, 0) + break + } + default: { + stream.emit('error', new Error(`Invalid property ${name}: ${value}`)) + return false + } + } + return length + } + if (properties) { + for (const propName in properties) { + let propLength = 0 + let propValueLength = 0 + const propValue = properties[propName] + if (Array.isArray(propValue)) { + for (let valueIndex = 0; valueIndex < propValue.length; valueIndex++) { + propValueLength = getLengthProperty(propName, propValue[valueIndex]) + if (!propValueLength) { return false } + propLength += propValueLength + } + } else { + propValueLength = getLengthProperty(propName, propValue) + if (!propValueLength) { return false } + propLength = propValueLength + } + if (!propLength) return false + propertiesLength += propLength + } + } + const propertiesLengthLength = Buffer.byteLength(genBufVariableByteInt(propertiesLength)) + + return { + length: propertiesLengthLength + propertiesLength, + write () { + writeProperties(stream, properties, propertiesLength) + } + } +} + +function getPropertiesByMaximumPacketSize (stream, properties, opts, length) { + const mayEmptyProps = ['reasonString', 'userProperties'] + const maximumPacketSize = opts && opts.properties && opts.properties.maximumPacketSize ? opts.properties.maximumPacketSize : 0 + + let propertiesData = getProperties(stream, properties) + if (maximumPacketSize) { + while (length + propertiesData.length > maximumPacketSize) { + const currentMayEmptyProp = mayEmptyProps.shift() + if (currentMayEmptyProp && properties[currentMayEmptyProp]) { + delete properties[currentMayEmptyProp] + propertiesData = getProperties(stream, properties) + } else { + return false + } + } + } + return propertiesData +} + +function writeProperty (stream, propName, value) { + const type = protocol.propertiesTypes[propName] + switch (type) { + case 'byte': { + stream.write(Buffer.from([protocol.properties[propName]])) + stream.write(Buffer.from([+value])) + break + } + case 'int8': { + stream.write(Buffer.from([protocol.properties[propName]])) + stream.write(Buffer.from([value])) + break + } + case 'binary': { + stream.write(Buffer.from([protocol.properties[propName]])) + writeStringOrBuffer(stream, value) + break + } + case 'int16': { + stream.write(Buffer.from([protocol.properties[propName]])) + writeNumber(stream, value) + break + } + case 'int32': { + stream.write(Buffer.from([protocol.properties[propName]])) + write4ByteNumber(stream, value) + break + } + case 'var': { + stream.write(Buffer.from([protocol.properties[propName]])) + writeVarByteInt(stream, value) + break + } + case 'string': { + stream.write(Buffer.from([protocol.properties[propName]])) + writeString(stream, value) + break + } + case 'pair': { + Object.getOwnPropertyNames(value).forEach(name => { + const currentValue = value[name] + if (Array.isArray(currentValue)) { + currentValue.forEach(value => { + stream.write(Buffer.from([protocol.properties[propName]])) + writeStringPair(stream, name.toString(), value.toString()) + }) + } else { + stream.write(Buffer.from([protocol.properties[propName]])) + writeStringPair(stream, name.toString(), currentValue.toString()) + } + }) + break + } + default: { + stream.emit('error', new Error(`Invalid property ${propName} value: ${value}`)) + return false + } + } +} + +function writeProperties (stream, properties, propertiesLength) { + /* write properties to stream */ + writeVarByteInt(stream, propertiesLength) + for (const propName in properties) { + if (Object.prototype.hasOwnProperty.call(properties, propName) && properties[propName] !== null) { + const value = properties[propName] + if (Array.isArray(value)) { + for (let valueIndex = 0; valueIndex < value.length; valueIndex++) { + writeProperty(stream, propName, value[valueIndex]) + } + } else { + writeProperty(stream, propName, value) + } + } + } +} + +function byteLength (bufOrString) { + if (!bufOrString) return 0 + else if (bufOrString instanceof Buffer) return bufOrString.length + else return Buffer.byteLength(bufOrString) +} + +function isStringOrBuffer (field) { + return typeof field === 'string' || field instanceof Buffer +} + +module.exports = generate diff --git a/sdklab/meantimerecovery/aedes/node_modules/ms/index.js b/sdklab/meantimerecovery/aedes/node_modules/ms/index.js new file mode 100644 index 000000000..c4498bcc2 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/ms/index.js @@ -0,0 +1,162 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/ms/license.md b/sdklab/meantimerecovery/aedes/node_modules/ms/license.md new file mode 100644 index 000000000..69b61253a --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Zeit, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/ms/package.json b/sdklab/meantimerecovery/aedes/node_modules/ms/package.json new file mode 100644 index 000000000..f75aa1e09 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/ms/package.json @@ -0,0 +1,69 @@ +{ + "_from": "ms@2.1.2", + "_id": "ms@2.1.2", + "_inBundle": false, + "_integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "_location": "/ms", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "ms@2.1.2", + "name": "ms", + "escapedName": "ms", + "rawSpec": "2.1.2", + "saveSpec": null, + "fetchSpec": "2.1.2" + }, + "_requiredBy": [ + "/debug" + ], + "_resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "_shasum": "d09d1f357b443f493382a8eb3ccd183872ae6009", + "_spec": "ms@2.1.2", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\debug", + "bugs": { + "url": "https://github.com/zeit/ms/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Tiny millisecond conversion utility", + "devDependencies": { + "eslint": "4.12.1", + "expect.js": "0.3.1", + "husky": "0.14.3", + "lint-staged": "5.0.0", + "mocha": "4.0.1" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/zeit/ms#readme", + "license": "MIT", + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "main": "./index", + "name": "ms", + "repository": { + "type": "git", + "url": "git+https://github.com/zeit/ms.git" + }, + "scripts": { + "lint": "eslint lib/* bin/*", + "precommit": "lint-staged", + "test": "mocha tests.js" + }, + "version": "2.1.2" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/ms/readme.md b/sdklab/meantimerecovery/aedes/node_modules/ms/readme.md new file mode 100644 index 000000000..9a1996b17 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/ms/readme.md @@ -0,0 +1,60 @@ +# ms + +[![Build Status](https://travis-ci.org/zeit/ms.svg?branch=master)](https://travis-ci.org/zeit/ms) +[![Join the community on Spectrum](https://withspectrum.github.io/badge/badge.svg)](https://spectrum.chat/zeit) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +ms('-3 days') // -259200000 +ms('-1h') // -3600000 +ms('-200') // -200 +``` + +### Convert from Milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(-3 * 60000) // "-3m" +ms(ms('10 hours')) // "10h" +``` + +### Time Format Written-Out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(-3 * 60000, { long: true }) // "-3 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [Node.js](https://nodejs.org) and in the browser +- If a number is supplied to `ms`, a string with a unit is returned +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`) +- If you pass a string with a number and a valid unit, the number of equivalent milliseconds is returned + +## Related Packages + +- [ms.macro](https://github.com/knpwrs/ms.macro) - Run `ms` as a macro at build-time. + +## Caught a Bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, Node.js will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/CHANGELOG.md b/sdklab/meantimerecovery/aedes/node_modules/nanoid/CHANGELOG.md new file mode 100644 index 000000000..40f758ea3 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/CHANGELOG.md @@ -0,0 +1,156 @@ +# Change Log +This project adheres to [Semantic Versioning](http://semver.org/). + +## 2.1.11 +* Reduce size (by Anton Evzhakov). + +## 2.1.10 +* Reduce size by 10% (by Anton Khlynovskiy). + +## 2.1.9 +* Reduce `format` and `async/format` size (by Dair Aidarkhanov). + +## 2.1.8 +* Improve React docs (by Nahum Zsilva). + +## 2.1.7 +* Reduce `index`, `async` and `non-secure` size (by @polemius). + +## 2.1.6 +* Reduce size (by Stas Lashmanov). +* Return fast mask for Node.js. + +## 2.1.5 +* Reduce size (by Max Graey). +* Fix IE support. + +## 2.1.4 +* Reduce `generate` size (by Vsevolod Rodionov). +* Reduce `format` and `format` size (by Victor). +* Reduce `async`, `non-secure` and `non-secure/generate` size. +* Speed up `format` and `async/format` (by Max Graey). +* Improve development process on Windows (by Stanislav Lashmanov). + +## 2.1.3 +* Improve performance (by Stephen Richardson). +* Reduce size (by Stephen Richardson). + +## 2.1.2 +* Improve docs. + +## 2.1.1 +* Fix React Native support (by Shawn Hwei). + +## 2.1 +* Improve React Native support (by Sebastian Werner). + +## 2.0.4 +* Improve error text for React Native (by Sebastian Werner). + +## 2.0.3 +* Fix freeze on string in ID length. + +## 2.0.2 +* Improve docs (by Sylvanus Kateile and Mark Stosberg). + +## 2.0.1 +* Reduce npm package size. +* Mark package as not having side effects (by @xiaody). + +## 2.0 +* Use `-` instead of `~` in default alphabet to by file name safe. +* Add `nanoid/non-secure/generate`. + +## 1.3.4 +* Reduce `non-secure` size. +* Add `async` callback type check. + +## 1.3.3 +* Fix `nanoid/async` performance regression. +* Fix old Node.js `not seeded` issue in synchronous version too. + +## 1.3.2 +* Fix random generator `not seeded` issue of old Node.js. + +## 1.3.1 +* Reduce library size. + +## 1.3 +* Add `nanoid/async/format` and `nanoid/async/generate`. +* Improve synchronous API performance. +* Reduce `url` size (by Daniil Poroshin). +* Improve React Native docs (by joelgetaction). + +## 1.2.6 +* Reduce library size (by rqrqrqrq). + +## 1.2.5 +* Fix Node.js 6.11.1 support (by Andrey Belym). + +## 1.2.4 +* Speed up Node.js secure generators (by Dmitriy Tsvettsikh). + +## 1.2.3 +* Fix JSDoc (by Hendry Sadrak). + +## 1.2.2 +* Fix distribution in `nanoid/non-secure` (by Eatall). + +## 1.2.1 +* Fix old Node.js support. + +## 1.2 +* Add `nanoid/async`. +* Fix `nanoid/non-secure` JSDoc. +* Add Chinese documentation (by Wenliang Dai). +* Speed up and reduce size of `nanoid/non-secure` (by Ori Livni). + +## 1.1.1 +* Improve performance and reduce size of non-secure ID generator. + +## 1.1 +* Add non-secure ID generator. +* Suggest to use non-secure ID generator for React Native developers. +* Reduce size. + +## 1.0.7 +* Fix documentation. + +## 1.0.6 +* Fix documentation. + +## 1.0.5 +* Reduce `nanoid/index` size (by Anton Khlynovskiy). + +## 1.0.4 +* Reduce npm package size. + +## 1.0.3 +* Reduce npm package size. + +## 1.0.2 +* Fix Web Workers support (by Zachary Golba). + +## 1.0.1 +* Reduce `nanoid/index` size (by Anton Khlynovskiy). + +## 1.0 +* Use 21 symbols by default (by David Klebanoff). + +## 0.2.2 +* Reduce `nanoid/generate` size (by Anton Khlynovskiy). +* Speed up Node.js random generator. + +## 0.2.1 +* Fix documentation (by Piper Chester). + +## 0.2 +* Add `size` argument to `nanoid()`. +* Improve performance by 50%. +* Reduce library size by 26% (by Vsevolod Rodionov and Oleg Mokhov). + +## 0.1.1 +* Reduce library size by 5%. + +## 0.1 +* Initial release. diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/nanoid/LICENSE new file mode 100644 index 000000000..37f56aa49 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright 2017 Andrey Sitnik + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/README.md b/sdklab/meantimerecovery/aedes/node_modules/nanoid/README.md new file mode 100644 index 000000000..090247d3f --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/README.md @@ -0,0 +1,397 @@ +# Nano ID + +Nano ID logo by Anton Lovchikov + +A tiny, secure, URL-friendly, unique string ID generator for JavaScript. + +* **Small.** 119 bytes (minified and gzipped). No dependencies. + [Size Limit] controls the size. +* **Safe.** It uses cryptographically strong random APIs. + Can be used in clusters. +* **Fast.** It’s 16% faster than UUID. +* **Compact.** It uses a larger alphabet than UUID (`A-Za-z0-9_-`). + So ID size was reduced from 36 to 21 symbols. + +```js +const nanoid = require('nanoid') +model.id = nanoid() //=> "V1StGXR8_Z5jdHi6B-myT" +``` + +Supports [all browsers], Node.js and React Native. + +[all browsers]: http://caniuse.com/#feat=getrandomvalues +[Size Limit]: https://github.com/ai/size-limit + + + Sponsored by Evil Martians + + +## Table of Contents + +1. [Comparison with UUID](#comparison-with-uuid) +2. [Benchmark](#benchmark) +4. [Tools](#tools) +3. [Security](#security) +6. Usage + 1. [JS](#js) + 2. [React](#react) + 3. [React Native](#react-native) + 4. [Web Workers](#web-workers) + 5. [PouchDB and CouchDB](#pouchdb-and-couchdb) + 5. [Mongoose](#mongoose) + 6. [Other Programming Languages](#other-programming-languages) +7. API + 1. [Async](#async) + 2. [Non-Secure](#non-secure) + 3. [Custom Alphabet or Length](#custom-alphabet-or-length) + 4. [Custom Random Bytes Generator](#custom-random-bytes-generator) + + +## Comparison with UUID + +Nano ID is quite comparable to UUID v4 (random-based). +It has a similar number of random bits in the ID +(126 in Nano ID and 122 in UUID), so it has a similar collision probability: + +> For there to be a one in a billion chance of duplication, +> 103 trillion version 4 IDs must be generated. + +There are three main differences between Nano ID and UUID v4: + +1. Nano ID uses a bigger alphabet, so a similar number of random bits + are packed in just 21 symbols instead of 36. +2. Nano ID code is 4 times less than `uuid/v4` package: + 119 bytes instead of 435. +3. Because of memory allocation tricks, Nano ID is 16% faster than UUID. + + +## Benchmark + +```rust +$ ./test/benchmark +nanoid 693,132 ops/sec +nanoid/generate 624,291 ops/sec +uid.sync 487,706 ops/sec +uuid/v4 471,299 ops/sec +secure-random-string 448,386 ops/sec +shortid 66,809 ops/sec + +Async: +nanoid/async 105,024 ops/sec +nanoid/async/generate 106,682 ops/sec +secure-random-string 94,217 ops/sec +uid 92,026 ops/sec + +Non-secure: +nanoid/non-secure 2,555,814 ops/sec +rndm 2,413,565 ops/sec +``` + + +## Tools + +* [ID size calculator] to choice smaller ID size depends on your case. +* [`nanoid-dictionary`] with popular alphabets to use with `nanoid/generate`. +* [`nanoid-cli`] to generate ID from CLI. +* [`nanoid-good`] to be sure that your ID doesn't contain any obscene words. + +[`nanoid-dictionary`]: https://github.com/CyberAP/nanoid-dictionary +[ID size calculator]: https://zelark.github.io/nano-id-cc/ +[`nanoid-cli`]: https://github.com/twhitbeck/nanoid-cli +[`nanoid-good`]: https://github.com/y-gagar1n/nanoid-good + + +## Security + +*See a good article about random generators theory: +[Secure random values (in Node.js)]* + + +### Unpredictability + +Instead of using the unsafe `Math.random()`, Nano ID uses the `crypto` module +in Node.js and the Web Crypto API in browsers. These modules use unpredictable +hardware random generator. + + +### Uniformity + +`random % alphabet` is a popular mistake to make when coding an ID generator. +The spread will not be even; there will be a lower chance for some symbols +to appear compared to others—so it will reduce the number of tries +when brute-forcing. + +Nano ID uses a [better algorithm] and is tested for uniformity. + +Nano ID uniformity + +[Secure random values (in Node.js)]: https://gist.github.com/joepie91/7105003c3b26e65efcea63f3db82dfba +[better algorithm]: https://github.com/ai/nanoid/blob/master/format.js + + +### Vulnerabilities + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. + + +## Usage + +### JS + +The main module uses URL-friendly symbols (`A-Za-z0-9_-`) and returns an ID +with 21 characters (to have a collision probability similar to UUID v4). + +```js +const nanoid = require('nanoid') +model.id = nanoid() //=> "Uakgb_J5m9g-0JDMbcJqLJ" +``` + +If you want to reduce ID length (and increase collisions probability), +you can pass the length as an argument. + +```js +nanoid(10) //=> "IRFa-VaY2b" +``` + +Don’t forget to check the safety of your ID length +in our [ID collision probability] calculator. + +You can also use [custom alphabet](#custom-alphabet-or-length) +or [random generator](#custom-random-bytes-generator). + +[ID collision probability]: https://zelark.github.io/nano-id-cc/ + + +### React + +**Do not** use a nanoid for `key` prop. In React `key` should be consistence +between renders. This is bad code: + +```jsx + /* DON’T DO IT */ +``` + +This is good code. `id` will be generated only once: + +```jsx +const Element = () => { + const [id] = React.useState(nanoid) + return +} +``` + +If you want to use Nano ID for `id`, you must to set some string prefix. +Nano ID could be started from number. HTML ID can’t be started from the number. + +```jsx + +``` + + +### React Native + +React Native doesn’t have built-in random generator. + +1. Check [`expo-random`] docs and install it. +2. Use `nanoid/async` instead of synchronous `nanoid`. + +```js +const nanoid = require('nanoid/async') + +async function createUser () { + user.id = await nanoid() +} +``` + + +### PouchDB and CouchDB + +In PouchDB and CouchDB, IDs can’t start with an underscore `_`. +A prefix is required to prevent this issue, as Nano ID might use a `_` +at the start of the ID by default. + +Override the default ID with the following option: + +```js +db.put({ + _id: 'id' + nanoid(), + … +}) +``` + + +### Mongoose + +```js +const mySchema = new Schema({ + _id: { + type: String, + default: () => nanoid() + } +}) +``` + + +### Web Workers + +Web Workers don’t have access to a secure random generator. + +Security is important in IDs, when IDs should be unpredictable. For instance, +in “access by URL” link generation. + +If you don’t need unpredictable IDs, but you need Web Workers support, +you can use non‑secure ID generator. Note, that they have bigger collision +probability. + +```js +const nanoid = require('nanoid/non-secure') +nanoid() //=> "Uakgb_J5m9g-0JDMbcJqLJ" +``` + + +### Other Programming Languages + +Nano ID was ported to many languages. You can use these ports to have the same +ID generators on client and server side. + +* [C#](https://github.com/codeyu/nanoid-net) +* [Clojure and ClojureScript](https://github.com/zelark/nano-id) +* [Crystal](https://github.com/mamantoha/nanoid.cr) +* [Dart](https://github.com/pd4d10/nanoid-dart) +* [Go](https://github.com/matoous/go-nanoid) +* [Elixir](https://github.com/railsmechanic/nanoid) +* [Haskell](https://github.com/4e6/nanoid-hs) +* [Java](https://github.com/aventrix/jnanoid) +* [Nim](https://github.com/icyphox/nanoid.nim) +* [PHP](https://github.com/hidehalo/nanoid-php) +* [Python](https://github.com/puyuan/py-nanoid) with [dictionaries](https://pypi.org/project/nanoid-dictionary) +* [Ruby](https://github.com/radeno/nanoid.rb) +* [Rust](https://github.com/nikolay-govorov/nanoid) +* [Swift](https://github.com/antiflasher/NanoID) + +Also, [CLI tool] is available to generate IDs from a command line. + +[CLI tool]: https://github.com/twhitbeck/nanoid-cli + + +## API + +### Async + +To generate hardware random bytes, CPU will collect electromagnetic noise. +During the collection, CPU doesn’t work. + +If we will use asynchronous API for random generator, +another code could be executed during the entropy collection. + +```js +const nanoid = require('nanoid/async') + +async function createUser () { + user.id = await nanoid() +} +``` + +Unfortunately, you will not have any benefits in a browser, since Web Crypto API +doesn’t have asynchronous API. + + +### Non-Secure + +By default, Nano ID uses hardware random generator for security +and low collision probability. If you don’t need it, you can use +very fast non-secure generator. + +```js +const nonSecure = require('nanoid/non-secure') +const id = nonSecure() //=> "Uakgb_J5m9g-0JDMbcJqLJ" +``` + +Note that it is predictable and have bigger collision probability. + + +### Custom Alphabet or Length + +If you want to change the ID's alphabet or length +you can use the low-level `generate` module. + +```js +const generate = require('nanoid/generate') +model.id = generate('1234567890abcdef', 10) //=> "4f90d13a42" +``` + +Check the safety of your custom alphabet and ID length +in our [ID collision probability] calculator. +You can find popular alphabets in [`nanoid-dictionary`]. + +Alphabet must contain 256 symbols or less. +Otherwise, the generator will not be secure. + +Asynchronous and non-secure API is also available: + +```js +const generate = require('nanoid/async/generate') +async function createUser () { + user.id = await generate('1234567890abcdef', 10) +} +``` + +```js +const generate = require('nanoid/non-secure/generate') + +user.id = generate('1234567890abcdef', 10) +``` + +[ID collision probability]: https://alex7kom.github.io/nano-nanoid-cc/ +[`nanoid-dictionary`]: https://github.com/CyberAP/nanoid-dictionary + + +### Custom Random Bytes Generator + +You can replace the default safe random generator using the `format` module. +For instance, to use a seed-based generator. + +```js +const format = require('nanoid/format') + +function random (size) { + const result = [] + for (let i = 0; i < size; i++) { + result.push(randomByte()) + } + return result +} + +format(random, "abcdef", 10) //=> "fbaefaadeb" +``` + +`random` callback must accept the array size and return an array +with random numbers. + +If you want to use the same URL-friendly symbols with `format`, +you can get the default alphabet from the `url` file. + +```js +const url = require('nanoid/url') +format(random, url, 10) //=> "93ce_Ltuub" +``` + +Asynchronous API is also available: + +```js +const format = require('nanoid/async/format') +const url = require('nanoid/url') + +function random (size) { + return new Promise(…) +} + +async function createUser () { + user.id = await format(random, url, 10) +} +``` diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/format.browser.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/format.browser.js new file mode 100644 index 000000000..6d47a66c6 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/format.browser.js @@ -0,0 +1,43 @@ +// This file replaces `async/format.js` in bundlers like webpack or Rollup, +// according to `browser` config in `package.json`. + +module.exports = function (random, alphabet, size) { + // We can’t use bytes bigger than the alphabet. To make bytes values closer + // to the alphabet, we apply bitmask on them. We look for the closest + // `2 ** x - 1` number, which will be bigger than alphabet size. If we have + // 30 symbols in the alphabet, we will take 31 (00011111). + // We do not use faster Math.clz32, because it is not available in browsers. + var mask = (2 << Math.log(alphabet.length - 1) / Math.LN2) - 1 + // Bitmask is not a perfect solution (in our example it will pass 31 bytes, + // which is bigger than the alphabet). As a result, we will need more bytes, + // than ID size, because we will refuse bytes bigger than the alphabet. + + // Every hardware random generator call is costly, + // because we need to wait for entropy collection. This is why often it will + // be faster to ask for few extra bytes in advance, to avoid additional calls. + + // Here we calculate how many random bytes should we call in advance. + // It depends on ID length, mask / alphabet size and magic number 1.6 + // (which was selected according benchmarks). + + // -~f => Math.ceil(f) if n is float number + // -~i => i + 1 if n is integer number + var step = -~(1.6 * mask * size / alphabet.length) + + function tick (id) { + return random(step).then(function (bytes) { + // Compact alternative for `for (var i = 0; i < step; i++)` + var i = step + while (i--) { + // If random byte is bigger than alphabet even after bitmask, + // we refuse it by `|| ''`. + id += alphabet[bytes[i] & mask] || '' + // More compact than `id.length + 1 === size` + if (id.length === +size) return id + } + return tick(id) + }) + } + + return tick('') +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/format.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/format.js new file mode 100644 index 000000000..856b5f181 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/format.js @@ -0,0 +1,72 @@ +/** + * Secure random string generator with custom alphabet. + * + * Alphabet must contain 256 symbols or less. Otherwise, the generator + * will not be secure. + * + * @param {asyncGenerator} random The random bytes generator. + * @param {string} alphabet Symbols to be used in new random string. + * @param {size} size The number of symbols in new random string. + * + * @return {Promise} Promise with random string. + * + * @example + * const formatAsync = require('nanoid/async/format') + * + * function random (size) { + * const result = [] + * for (let i = 0; i < size; i++) { + * result.push(randomByte()) + * } + * return Promise.resolve(result) + * } + * + * formatAsync(random, "abcdef", 5).then(id => { + * model.id = id //=> "fbaef" + * }) + * + * @name formatAsync + * @function + */ +module.exports = function (random, alphabet, size) { + // We can’t use bytes bigger than the alphabet. To make bytes values closer + // to the alphabet, we apply bitmask on them. We look for the closest + // `2 ** x - 1` number, which will be bigger than alphabet size. If we have + // 30 symbols in the alphabet, we will take 31 (00011111). + var mask = (2 << 31 - Math.clz32((alphabet.length - 1) | 1)) - 1 + // Bitmask is not a perfect solution (in our example it will pass 31 bytes, + // which is bigger than the alphabet). As a result, we will need more bytes, + // than ID size, because we will refuse bytes bigger than the alphabet. + + // Every hardware random generator call is costly, + // because we need to wait for entropy collection. This is why often it will + // be faster to ask for few extra bytes in advance, to avoid additional calls. + + // Here we calculate how many random bytes should we call in advance. + // It depends on ID length, mask / alphabet size and magic number 1.6 + // (which was selected according benchmarks). + var step = Math.ceil(1.6 * mask * size / alphabet.length) + + function tick (id) { + return random(step).then(function (bytes) { + // Compact alternative for `for (var i = 0; i < step; i++)` + var i = step + while (i--) { + // If random byte is bigger than alphabet even after bitmask, + // we refuse it by `|| ''`. + id += alphabet[bytes[i] & mask] || '' + // More compact than `id.length + 1 === size` + if (id.length === +size) return id + } + return tick(id) + }) + } + + return tick('') +} + +/** + * @callback asyncGenerator + * @param {number} bytes The number of bytes to generate. + * @return {Promise} Promise with array of random bytes. + */ diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/generate.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/generate.js new file mode 100644 index 000000000..b98162b2f --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/generate.js @@ -0,0 +1,24 @@ +var random = require('./random') +var format = require('./format') + +/** + * Low-level function to change alphabet and ID size. + * + * Alphabet must contain 256 symbols or less. Otherwise, the generator + * will not be secure. + * + * @param {string} alphabet Symbols to be used in ID. + * @param {number} size The number of symbols in ID. + * + * @return {Promise} Promise with random string. + * + * @example + * const generateAsync = require('nanoid/async/generate') + * generateAsync('0123456789абвгдеё', 5).then(id => { + * model.id = id //=> "8ё56а" + * }) + * + * @name generateAsync + * @function + */ +module.exports = format.bind(null, random) diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/index.browser.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/index.browser.js new file mode 100644 index 000000000..6489e9368 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/index.browser.js @@ -0,0 +1,37 @@ +// This file replaces `async/index.js` in bundlers like webpack or Rollup, +// according to `browser` config in `package.json`. + +var crypto = self.crypto || self.msCrypto + +// This alphabet uses a-z A-Z 0-9 _- symbols. +// Symbols are generated for smaller size. +// -_zyxwvutsrqponmlkjihgfedcba9876543210ZYXWVUTSRQPONMLKJIHGFEDCBA +var url = '-_' +// Loop from 36 to 0 (from z to a and 9 to 0 in Base36). +var i = 36 +while (i--) { + // 36 is radix. Number.prototype.toString(36) returns number + // in Base36 representation. Base36 is like hex, but it uses 0–9 and a-z. + url += i.toString(36) +} +// Loop from 36 to 10 (from Z to A in Base36). +i = 36 +while (i-- - 10) { + url += i.toString(36).toUpperCase() +} + +module.exports = function (size) { + var id = '' + var bytes = crypto.getRandomValues(new Uint8Array(size || 21)) + i = size || 21 + + // Compact alternative for `for (var i = 0; i < size; i++)` + while (i--) { + // We can’t use bytes bigger than the alphabet. 63 is 00111111 bitmask. + // This mask reduces random byte 0-255 to 0-63 values. + // There is no need in `|| ''` and `* 1.6` hacks in here, + // because bitmask trim bytes exact to alphabet size. + id += url[bytes[i] & 63] + } + return Promise.resolve(id) +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/index.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/index.js new file mode 100644 index 000000000..2a4379ac8 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/index.js @@ -0,0 +1,37 @@ +var random = require('./random') +var url = require('../url') + +/** + * Generate secure URL-friendly unique ID. Non-blocking version. + * + * By default, ID will have 21 symbols to have a collision probability similar + * to UUID v4. + * + * @param {number} [size=21] The number of symbols in ID. + * + * @return {Promise} Promise with random string. + * + * @example + * const nanoidAsync = require('nanoid/async') + * nanoidAsync.then(id => { + * model.id = id + * }) + * + * @name async + * @function + */ +module.exports = function (size) { + size = size || 21 + return random(size).then(function (bytes) { + var id = '' + // Compact alternative for `for (var i = 0; i < size; i++)` + while (size--) { + // We can’t use bytes bigger than the alphabet. 63 is 00111111 bitmask. + // This mask reduces random byte 0-255 to 0-63 values. + // There is no need in `|| ''` and `* 1.6` hacks in here, + // because bitmask trim bytes exact to alphabet size. + id += url[bytes[size] & 63] + } + return id + }) +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/random.browser.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/random.browser.js new file mode 100644 index 000000000..286803cac --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/random.browser.js @@ -0,0 +1,8 @@ +// This file replaces `async/random.js` in bundlers like webpack or Rollup, +// according to `browser` config in `package.json`. + +var crypto = self.crypto || self.msCrypto + +module.exports = function (bytes) { + return Promise.resolve(crypto.getRandomValues(new Uint8Array(bytes))) +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/random.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/random.js new file mode 100644 index 000000000..666db92dc --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/random.js @@ -0,0 +1,31 @@ +var crypto = require('crypto') + +if (crypto.randomFill) { + // `crypto.randomFill()` is a little fatser than `crypto.randomBytes()`, + // because we can use faster `Buffer.allocUnsafe()`. + module.exports = function (bytes) { + return new Promise(function (resolve, reject) { + // `Buffer.allocUnsafe()` faster because it don’t clean memory. + // We do not need it, since we will fill memory with new bytes anyway. + crypto.randomFill(Buffer.allocUnsafe(bytes), function (err, buf) { + if (err) { + reject(err) + } else { + resolve(buf) + } + }) + }) + } +} else { + module.exports = function (bytes) { + return new Promise(function (resolve, reject) { + crypto.randomBytes(bytes, function (err, buf) { + if (err) { + reject(err) + } else { + resolve(buf) + } + }) + }) + } +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/random.rn.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/random.rn.js new file mode 100644 index 000000000..62f421c60 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/async/random.rn.js @@ -0,0 +1,14 @@ +var random +try { + random = require('expo-random') +} catch (e) { + throw new Error( + 'React-Native does not have a built-in secure random generator. ' + + 'Install `expo-random` locally or ' + + 'if you don’t need unpredictable IDs, you can use `nanoid/non-secure`.' + ) +} + +module.exports = function (bytes) { + return random.getRandomBytesAsync(bytes) +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/format.browser.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/format.browser.js new file mode 100644 index 000000000..56bec3f82 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/format.browser.js @@ -0,0 +1,40 @@ +// This file replaces `format.js` in bundlers like webpack or Rollup, +// according to `browser` config in `package.json`. + +module.exports = function (random, alphabet, size) { + // We can’t use bytes bigger than the alphabet. To make bytes values closer + // to the alphabet, we apply bitmask on them. We look for the closest + // `2 ** x - 1` number, which will be bigger than alphabet size. If we have + // 30 symbols in the alphabet, we will take 31 (00011111). + // We do not use faster Math.clz32, because it is not available in browsers. + var mask = (2 << Math.log(alphabet.length - 1) / Math.LN2) - 1 + // Bitmask is not a perfect solution (in our example it will pass 31 bytes, + // which is bigger than the alphabet). As a result, we will need more bytes, + // than ID size, because we will refuse bytes bigger than the alphabet. + + // Every hardware random generator call is costly, + // because we need to wait for entropy collection. This is why often it will + // be faster to ask for few extra bytes in advance, to avoid additional calls. + + // Here we calculate how many random bytes should we call in advance. + // It depends on ID length, mask / alphabet size and magic number 1.6 + // (which was selected according benchmarks). + + // -~f => Math.ceil(f) if n is float number + // -~i => i + 1 if n is integer number + var step = -~(1.6 * mask * size / alphabet.length) + var id = '' + + while (true) { + var bytes = random(step) + // Compact alternative for `for (var i = 0; i < step; i++)` + var i = step + while (i--) { + // If random byte is bigger than alphabet even after bitmask, + // we refuse it by `|| ''`. + id += alphabet[bytes[i] & mask] || '' + // More compact than `id.length + 1 === size` + if (id.length === +size) return id + } + } +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/format.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/format.js new file mode 100644 index 000000000..1e4ebb235 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/format.js @@ -0,0 +1,67 @@ +/** + * Secure random string generator with custom alphabet. + * + * Alphabet must contain 256 symbols or less. Otherwise, the generator + * will not be secure. + * + * @param {generator} random The random bytes generator. + * @param {string} alphabet Symbols to be used in new random string. + * @param {size} size The number of symbols in new random string. + * + * @return {string} Random string. + * + * @example + * const format = require('nanoid/format') + * + * function random (size) { + * const result = [] + * for (let i = 0; i < size; i++) { + * result.push(randomByte()) + * } + * return result + * } + * + * format(random, "abcdef", 5) //=> "fbaef" + * + * @name format + * @function + */ +module.exports = function (random, alphabet, size) { + // We can’t use bytes bigger than the alphabet. To make bytes values closer + // to the alphabet, we apply bitmask on them. We look for the closest + // `2 ** x - 1` number, which will be bigger than alphabet size. If we have + // 30 symbols in the alphabet, we will take 31 (00011111). + var mask = (2 << 31 - Math.clz32((alphabet.length - 1) | 1)) - 1 + // Bitmask is not a perfect solution (in our example it will pass 31 bytes, + // which is bigger than the alphabet). As a result, we will need more bytes, + // than ID size, because we will refuse bytes bigger than the alphabet. + + // Every hardware random generator call is costly, + // because we need to wait for entropy collection. This is why often it will + // be faster to ask for few extra bytes in advance, to avoid additional calls. + + // Here we calculate how many random bytes should we call in advance. + // It depends on ID length, mask / alphabet size and magic number 1.6 + // (which was selected according benchmarks). + var step = Math.ceil(1.6 * mask * size / alphabet.length) + var id = '' + + while (true) { + var bytes = random(step) + // Compact alternative for `for (var i = 0; i < step; i++)` + var i = step + while (i--) { + // If random byte is bigger than alphabet even after bitmask, + // we refuse it by `|| ''`. + id += alphabet[bytes[i] & mask] || '' + // More compact than `id.length + 1 === size` + if (id.length === +size) return id + } + } +} + +/** + * @callback generator + * @param {number} bytes The number of bytes to generate. + * @return {number[]} Random bytes. + */ diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/generate.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/generate.js new file mode 100644 index 000000000..e85517c87 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/generate.js @@ -0,0 +1,22 @@ +var random = require('./random') +var format = require('./format') + +/** + * Low-level function to change alphabet and ID size. + * + * Alphabet must contain 256 symbols or less. Otherwise, the generator + * will not be secure. + * + * @param {string} alphabet Symbols to be used in ID. + * @param {number} size The number of symbols in ID. + * + * @return {string} Unique ID. + * + * @example + * const generate = require('nanoid/generate') + * model.id = generate('0123456789абвгдеё', 5) //=> "8ё56а" + * + * @name generate + * @function + */ +module.exports = format.bind(null, random) diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/index.browser.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/index.browser.js new file mode 100644 index 000000000..6b2ee1ca6 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/index.browser.js @@ -0,0 +1,54 @@ +// This file replaces `index.js` in bundlers like webpack or Rollup, +// according to `browser` config in `package.json`. + +if (process.env.NODE_ENV !== 'production') { + // All bundlers will remove this block in production bundle + if (typeof navigator !== 'undefined' && navigator.product === 'ReactNative') { + throw new Error( + 'React Native does not have a built-in secure random generator. ' + + 'If you don’t need unpredictable IDs, you can use `nanoid/non-secure`. ' + + 'For secure ID install `expo-random` locally and use `nanoid/async`.' + ) + } + if (typeof self === 'undefined' || (!self.crypto && !self.msCrypto)) { + throw new Error( + 'Your browser does not have secure random generator. ' + + 'If you don’t need unpredictable IDs, you can use nanoid/non-secure.' + ) + } +} + +var crypto = self.crypto || self.msCrypto + +// This alphabet uses a-z A-Z 0-9 _- symbols. +// Symbols are generated for smaller size. +// -_zyxwvutsrqponmlkjihgfedcba9876543210ZYXWVUTSRQPONMLKJIHGFEDCBA +var url = '-_' +// Loop from 36 to 0 (from z to a and 9 to 0 in Base36). +var i = 36 +while (i--) { + // 36 is radix. Number.prototype.toString(36) returns number + // in Base36 representation. Base36 is like hex, but it uses 0–9 and a-z. + url += i.toString(36) +} +// Loop from 36 to 10 (from Z to A in Base36). +i = 36 +while (i-- - 10) { + url += i.toString(36).toUpperCase() +} + +module.exports = function (size) { + var id = '' + var bytes = crypto.getRandomValues(new Uint8Array(size || 21)) + i = size || 21 + + // Compact alternative for `for (var i = 0; i < size; i++)` + while (i--) { + // We can’t use bytes bigger than the alphabet. 63 is 00111111 bitmask. + // This mask reduces random byte 0-255 to 0-63 values. + // There is no need in `|| ''` and `* 1.6` hacks in here, + // because bitmask trim bytes exact to alphabet size. + id += url[bytes[i] & 63] + } + return id +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/index.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/index.js new file mode 100644 index 000000000..e81a5f73d --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/index.js @@ -0,0 +1,34 @@ +var random = require('./random') +var url = require('./url') + +/** + * Generate secure URL-friendly unique ID. + * + * By default, ID will have 21 symbols to have a collision probability similar + * to UUID v4. + * + * @param {number} [size=21] The number of symbols in ID. + * + * @return {string} Random string. + * + * @example + * const nanoid = require('nanoid') + * model.id = nanoid() //=> "Uakgb_J5m9g-0JDMbcJqL" + * + * @name nanoid + * @function + */ +module.exports = function (size) { + size = size || 21 + var bytes = random(size) + var id = '' + // Compact alternative for `for (var i = 0; i < size; i++)` + while (size--) { + // We can’t use bytes bigger than the alphabet. 63 is 00111111 bitmask. + // This mask reduces random byte 0-255 to 0-63 values. + // There is no need in `|| ''` and `* 1.6` hacks in here, + // because bitmask trim bytes exact to alphabet size. + id += url[bytes[size] & 63] + } + return id +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/non-secure/generate.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/non-secure/generate.js new file mode 100644 index 000000000..e7893e868 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/non-secure/generate.js @@ -0,0 +1,26 @@ +/** + * Generate URL-friendly unique ID. This method use non-secure predictable + * random generator with bigger collision probability. + * + * @param {string} alphabet Symbols to be used in ID. + * @param {number} [size=21] The number of symbols in ID. + * + * @return {string} Random string. + * + * @example + * const nanoid = require('nanoid/non-secure') + * model.id = nanoid() //=> "Uakgb_J5m9g-0JDMbcJqL" + * + * @name nonSecure + * @function + */ +module.exports = function (alphabet, size) { + size = size || 21 + var id = '' + // Compact alternative for `for (var i = 0; i < size; i++)` + while (size--) { + // `| 0` is compact and faster alternative for `Math.floor()` + id += alphabet[Math.random() * alphabet.length | 0] + } + return id +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/non-secure/index.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/non-secure/index.js new file mode 100644 index 000000000..8691fe08c --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/non-secure/index.js @@ -0,0 +1,42 @@ +// This alphabet uses a-z A-Z 0-9 _- symbols. +// Symbols are generated for smaller size. +// -_zyxwvutsrqponmlkjihgfedcba9876543210ZYXWVUTSRQPONMLKJIHGFEDCBA +var url = '-_' +// Loop from 36 to 0 (from z to a and 9 to 0 in Base36). +var i = 36 +while (i--) { + // 36 is radix. Number.prototype.toString(36) returns number + // in Base36 representation. Base36 is like hex, but it uses 0–9 and a-z. + url += i.toString(36) +} +// Loop from 36 to 10 (from Z to A in Base36). +i = 36 +while (i-- - 10) { + url += i.toString(36).toUpperCase() +} + +/** + * Generate URL-friendly unique ID. This method use non-secure predictable + * random generator with bigger collision probability. + * + * @param {number} [size=21] The number of symbols in ID. + * + * @return {string} Random string. + * + * @example + * const nanoid = require('nanoid/non-secure') + * model.id = nanoid() //=> "Uakgb_J5m9g-0JDMbcJqL" + * + * @name nonSecure + * @function + */ +module.exports = function (size) { + var id = '' + i = size || 21 + // Compact alternative for `for (var i = 0; i < size; i++)` + while (i--) { + // `| 0` is compact and faster alternative for `Math.floor()` + id += url[Math.random() * 64 | 0] + } + return id +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/package.json b/sdklab/meantimerecovery/aedes/node_modules/nanoid/package.json new file mode 100644 index 000000000..caf35ee44 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/package.json @@ -0,0 +1,68 @@ +{ + "_from": "nanoid@^2.1.0", + "_id": "nanoid@2.1.11", + "_inBundle": false, + "_integrity": "sha512-s/snB+WGm6uwi0WjsZdaVcuf3KJXlfGl2LcxgwkEwJF0D/BWzVWAZW/XY4bFaiR7s0Jk3FPvlnepg1H1b1UwlA==", + "_location": "/nanoid", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "nanoid@^2.1.0", + "name": "nanoid", + "escapedName": "nanoid", + "rawSpec": "^2.1.0", + "saveSpec": null, + "fetchSpec": "^2.1.0" + }, + "_requiredBy": [ + "/shortid" + ], + "_resolved": "https://registry.npmjs.org/nanoid/-/nanoid-2.1.11.tgz", + "_shasum": "ec24b8a758d591561531b4176a01e3ab4f0f0280", + "_spec": "nanoid@^2.1.0", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\shortid", + "author": { + "name": "Andrey Sitnik", + "email": "andrey@sitnik.ru" + }, + "browser": { + "./index.js": "./index.browser.js", + "./format.js": "./format.browser.js", + "./random.js": "./random.browser.js", + "./async/index.js": "./async/index.browser.js", + "./async/format.js": "./async/format.browser.js", + "./async/random.js": "./async/random.browser.js" + }, + "bugs": { + "url": "https://github.com/ai/nanoid/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "A tiny (119 bytes), secure URL-friendly unique string ID generator", + "eslintIgnore": [ + "test/demo/build" + ], + "homepage": "https://github.com/ai/nanoid#readme", + "keywords": [ + "uuid", + "random", + "id", + "url" + ], + "license": "MIT", + "name": "nanoid", + "react-native": { + "./async/random.js": "./async/random.rn.js" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/ai/nanoid.git" + }, + "sharec": { + "config": "@logux/sharec-config", + "version": "0.5.6" + }, + "sideEffects": false, + "version": "2.1.11" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/random.browser.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/random.browser.js new file mode 100644 index 000000000..ab7398a8b --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/random.browser.js @@ -0,0 +1,5 @@ +var crypto = self.crypto || self.msCrypto + +module.exports = function (bytes) { + return crypto.getRandomValues(new Uint8Array(bytes)) +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/random.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/random.js new file mode 100644 index 000000000..088504e9a --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/random.js @@ -0,0 +1,19 @@ +var crypto = require('crypto') + +if (crypto.randomFillSync) { + // We reuse buffers with the same size to avoid memory fragmentations + // for better performance + var buffers = { } + module.exports = function (bytes) { + var buffer = buffers[bytes] + if (!buffer) { + // `Buffer.allocUnsafe()` faster because it don’t clean memory. + // We do not need it, since we will fill memory with new bytes anyway. + buffer = Buffer.allocUnsafe(bytes) + if (bytes <= 255) buffers[bytes] = buffer + } + return crypto.randomFillSync(buffer) + } +} else { + module.exports = crypto.randomBytes +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/nanoid/url.js b/sdklab/meantimerecovery/aedes/node_modules/nanoid/url.js new file mode 100644 index 000000000..d00bae8fd --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/nanoid/url.js @@ -0,0 +1,27 @@ +// This alphabet uses a-z A-Z 0-9 _- symbols. +// Despite the fact the source code is quite long, its entropy +// is low and there are lots of duplicates - just what compressors +// like GZIP and Brotli likes the best. + +/** + * URL safe symbols. + * + * @name url + * @type {string} + * + * @example + * const url = require('nanoid/url') + * generate(url, 10) //=> "Uakgb_J5m9" + */ + +// This alphabet uses a-z A-Z 0-9 _- symbols. +// Symbols are generated for smaller size. +// -_zyxwvutsrqponmlkjihgfedcba9876543210ZYXWVUTSRQPONMLKJIHGFEDCBA +module.exports = '-_' +var i = 36 +while (i--) { + // 36 is radix. Number.prototype.toString(36) returns number + // in Base36 representation. Base36 is like hex, but it uses 0–9 and a-z. + module.exports += i.toString(36) + i > 9 && (module.exports += i.toString(36).toUpperCase()) +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/once/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/once/LICENSE new file mode 100644 index 000000000..19129e315 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/once/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/once/README.md b/sdklab/meantimerecovery/aedes/node_modules/once/README.md new file mode 100644 index 000000000..1f1ffca93 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/once/README.md @@ -0,0 +1,79 @@ +# once + +Only call a function once. + +## usage + +```javascript +var once = require('once') + +function load (file, cb) { + cb = once(cb) + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Or add to the Function.prototype in a responsible way: + +```javascript +// only has to be done once +require('once').proto() + +function load (file, cb) { + cb = cb.once() + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Ironically, the prototype feature makes this module twice as +complicated as necessary. + +To check whether you function has been called, use `fn.called`. Once the +function is called for the first time the return value of the original +function is saved in `fn.value` and subsequent calls will continue to +return this value. + +```javascript +var once = require('once') + +function load (cb) { + cb = once(cb) + var stream = createStream() + stream.once('data', cb) + stream.once('end', function () { + if (!cb.called) cb(new Error('not found')) + }) +} +``` + +## `once.strict(func)` + +Throw an error if the function is called twice. + +Some functions are expected to be called only once. Using `once` for them would +potentially hide logical errors. + +In the example below, the `greet` function has to call the callback only once: + +```javascript +function greet (name, cb) { + // return is missing from the if statement + // when no name is passed, the callback is called twice + if (!name) cb('Hello anonymous') + cb('Hello ' + name) +} + +function log (msg) { + console.log(msg) +} + +// this will print 'Hello anonymous' but the logical error will be missed +greet(null, once(msg)) + +// once.strict will print 'Hello anonymous' and throw an error when the callback will be called the second time +greet(null, once.strict(msg)) +``` diff --git a/sdklab/meantimerecovery/aedes/node_modules/once/once.js b/sdklab/meantimerecovery/aedes/node_modules/once/once.js new file mode 100644 index 000000000..235406736 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/once/once.js @@ -0,0 +1,42 @@ +var wrappy = require('wrappy') +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/once/package.json b/sdklab/meantimerecovery/aedes/node_modules/once/package.json new file mode 100644 index 000000000..7f46c1ff9 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/once/package.json @@ -0,0 +1,66 @@ +{ + "_from": "once@^1.4.0", + "_id": "once@1.4.0", + "_inBundle": false, + "_integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "_location": "/once", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "once@^1.4.0", + "name": "once", + "escapedName": "once", + "rawSpec": "^1.4.0", + "saveSpec": null, + "fetchSpec": "^1.4.0" + }, + "_requiredBy": [ + "/end-of-stream" + ], + "_resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "_shasum": "583b1aa775961d4b113ac17d9c50baef9dd76bd1", + "_spec": "once@^1.4.0", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\end-of-stream", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/isaacs/once/issues" + }, + "bundleDependencies": false, + "dependencies": { + "wrappy": "1" + }, + "deprecated": false, + "description": "Run a function exactly one time", + "devDependencies": { + "tap": "^7.0.1" + }, + "directories": { + "test": "test" + }, + "files": [ + "once.js" + ], + "homepage": "https://github.com/isaacs/once#readme", + "keywords": [ + "once", + "function", + "one", + "single" + ], + "license": "ISC", + "main": "once.js", + "name": "once", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/once.git" + }, + "scripts": { + "test": "tap test/*.js" + }, + "version": "1.4.0" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/process-nextick-args/index.js b/sdklab/meantimerecovery/aedes/node_modules/process-nextick-args/index.js new file mode 100644 index 000000000..3eecf1148 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/process-nextick-args/index.js @@ -0,0 +1,45 @@ +'use strict'; + +if (typeof process === 'undefined' || + !process.version || + process.version.indexOf('v0.') === 0 || + process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { + module.exports = { nextTick: nextTick }; +} else { + module.exports = process +} + +function nextTick(fn, arg1, arg2, arg3) { + if (typeof fn !== 'function') { + throw new TypeError('"callback" argument must be a function'); + } + var len = arguments.length; + var args, i; + switch (len) { + case 0: + case 1: + return process.nextTick(fn); + case 2: + return process.nextTick(function afterTickOne() { + fn.call(null, arg1); + }); + case 3: + return process.nextTick(function afterTickTwo() { + fn.call(null, arg1, arg2); + }); + case 4: + return process.nextTick(function afterTickThree() { + fn.call(null, arg1, arg2, arg3); + }); + default: + args = new Array(len - 1); + i = 0; + while (i < args.length) { + args[i++] = arguments[i]; + } + return process.nextTick(function afterTick() { + fn.apply(null, args); + }); + } +} + diff --git a/sdklab/meantimerecovery/aedes/node_modules/process-nextick-args/license.md b/sdklab/meantimerecovery/aedes/node_modules/process-nextick-args/license.md new file mode 100644 index 000000000..c67e3532b --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/process-nextick-args/license.md @@ -0,0 +1,19 @@ +# Copyright (c) 2015 Calvin Metcalf + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.** diff --git a/sdklab/meantimerecovery/aedes/node_modules/process-nextick-args/package.json b/sdklab/meantimerecovery/aedes/node_modules/process-nextick-args/package.json new file mode 100644 index 000000000..00b6b4e4f --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/process-nextick-args/package.json @@ -0,0 +1,51 @@ +{ + "_from": "process-nextick-args@^2.0.1", + "_id": "process-nextick-args@2.0.1", + "_inBundle": false, + "_integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "_location": "/process-nextick-args", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "process-nextick-args@^2.0.1", + "name": "process-nextick-args", + "escapedName": "process-nextick-args", + "rawSpec": "^2.0.1", + "saveSpec": null, + "fetchSpec": "^2.0.1" + }, + "_requiredBy": [ + "/from2/readable-stream", + "/mqtt-packet" + ], + "_resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "_shasum": "7820d9b16120cc55ca9ae7792680ae7dba6d7fe2", + "_spec": "process-nextick-args@^2.0.1", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\mqtt-packet", + "author": "", + "bugs": { + "url": "https://github.com/calvinmetcalf/process-nextick-args/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "process.nextTick but always with args", + "devDependencies": { + "tap": "~0.2.6" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/calvinmetcalf/process-nextick-args", + "license": "MIT", + "main": "index.js", + "name": "process-nextick-args", + "repository": { + "type": "git", + "url": "git+https://github.com/calvinmetcalf/process-nextick-args.git" + }, + "scripts": { + "test": "node test.js" + }, + "version": "2.0.1" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/process-nextick-args/readme.md b/sdklab/meantimerecovery/aedes/node_modules/process-nextick-args/readme.md new file mode 100644 index 000000000..ecb432c9b --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/process-nextick-args/readme.md @@ -0,0 +1,18 @@ +process-nextick-args +===== + +[![Build Status](https://travis-ci.org/calvinmetcalf/process-nextick-args.svg?branch=master)](https://travis-ci.org/calvinmetcalf/process-nextick-args) + +```bash +npm install --save process-nextick-args +``` + +Always be able to pass arguments to process.nextTick, no matter the platform + +```js +var pna = require('process-nextick-args'); + +pna.nextTick(function (a, b, c) { + console.log(a, b, c); +}, 'step', 3, 'profit'); +``` diff --git a/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/LICENSE new file mode 100644 index 000000000..bf8f3ecaa --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/LICENSE @@ -0,0 +1,8 @@ +Copyright 2019 moznion + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/README.md b/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/README.md new file mode 100644 index 000000000..050a684d7 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/README.md @@ -0,0 +1,125 @@ +proxy-protocol-js [![CircleCI](https://circleci.com/gh/moznion/proxy-protocol-js.svg?style=svg)](https://circleci.com/gh/moznion/proxy-protocol-js) [![codecov](https://codecov.io/gh/moznion/proxy-protocol-js/branch/master/graph/badge.svg)](https://codecov.io/gh/moznion/proxy-protocol-js) [![NPM](https://nodei.co/npm/proxy-protocol-js.png?compact=true)](https://nodei.co/npm/proxy-protocol-js/) +== + +A [PROXY protocol](http://www.haproxy.org/download/1.8/doc/proxy-protocol.txt) builder and parser for JavaScript. + +Features +-- + +- Supports the features + - building PROXY protocol payload + - parsing PROXY protocol payload + - identifying the PROXY protocol version +- Supports both of the version: V1 and V2 protocol +- Also supports TypeScript +- It doesn't requre the extra dependencies + +Usage +-- + +See also [examples](./example) and TSDoc. + +### Build (and identity the protocol version) + +#### V1 protocol + +```JavaScript +const proxyProtocol = require('proxy-protocol-js'); + +const src = new proxyProtocol.Peer('127.0.0.1', 12345); +const dst = new proxyProtocol.Peer('192.0.2.1', 54321); +const protocolText = new proxyProtocol.V1ProxyProtocol( + proxyProtocol.INETProtocol.TCP4, + src, + dst, +).build(); +console.log(protocolText); // => PROXY TCP4 127.0.0.1 192.0.2.1 12345 54321\r\n + +const identifiedProtocolVersion = proxyProtocol.ProxyProtocolIdentifier.identify(protocolText); +console.log(identifiedProtocolVersion); // => proxyProtocol.ProxyProtocolVersion.V1 (= 0xx10) +``` + +#### V2 protocol + +```JavaScript +const proxyProtocol = require('proxy-protocol-js'); + +const proto = new proxyProtocol.V2ProxyProtocol( + proxyProtocol.Command.LOCAL, + proxyProtocol.TransportProtocol.DGRAM, + new proxyProtocol.IPv4ProxyAddress( + proxyProtocol.IPv4Address.createFrom([127, 0, 0, 1]), + 12345, + proxyProtocol.IPv4Address.createFrom([192, 0, 2, 1]), + 54321, + ), +).build(); +console.log(proto); + +const identifiedProtocolVersion = proxyProtocol.ProxyProtocolIdentifier.identify(proto); +console.log(identifiedProtocolVersion); // => proxyProtocol.ProxyProtocolVersion.V2 (= 0x20) +``` + +### Parse + +#### V1 protocol + +```JavaScript +const proxyProtocol = require('proxy-protocol'); + +const protocolText = 'PROXY TCP4 127.0.0.1 192.0.2.1 12345 54321\r\n'; +const proto = proxyProtocol.V1ProxyProtocol.parse(protocolText); +console.log(proto); +// => V1ProxyProtocol { +// inetProtocol: 'TCP4', +// source: Host { ipAddress: '127.0.0.1', port: 12345 }, +// destination: Host { ipAddress: '192.0.2.1', port: 54321 }, +// data: '' } +``` + +#### V2 protocol + +```JavaScript +const proxyProtocol = require('proxy-protocol-js'); + +const protoBin = new Uint8Array([13, 10, 13, 10, 0, 13, 10, 81, 85, 73, 84, 10, 32, 18, 0, 12, 127, 0, 0, 1, 192, 0, 2, 1, 48, 57, 212, 49]); +const proto = proxyProtocol.V2ProxyProtocol.parse(protoBin); +console.log(proto); +// => V2ProxyProtocol { +// command: 0, +// transportProtocol: 2, +// proxyAddress: +// IPv4ProxyAddress { +// sourceAddress: IPv4Address { address: [Array] }, +// sourcePort: 12345, +// destinationAddress: IPv4Address { address: [Array] }, +// destinationPort: 54321 }, +// data: Uint8Array [], +// addressFamilyType: 16 }` +``` + +Performance +-- + +The result of the comparison between this library (`proxy-protocol-js`) and [proxy-protocol](https://www.npmjs.com/package/proxy-protocol) is here: + +``` +proxy-protocol.parse x 246,423 ops/sec ±3.10% (32 runs sampled) +proxy-protocol-js.parse x 481,388 ops/sec ±5.32% (69 runs sampled) +Fastest is proxy-protocol-js.parse +``` + +(moreover, `proxy-protocol-js`'s benchmark contains unnecessary dummy codes for fairness) + +This benchmark run on the node v10.15.3 and the code is [here](./bench). + +Author +-- + +moznion () + +License +-- + +[MIT](./LICENSE) + diff --git a/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/example/building/v1.js b/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/example/building/v1.js new file mode 100644 index 000000000..6af2e83c1 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/example/building/v1.js @@ -0,0 +1,13 @@ +const proxyProtocol = require('proxy-protocol-js'); + +const src = new proxyProtocol.Peer('127.0.0.1', 12345); +const dst = new proxyProtocol.Peer('192.0.2.1', 54321); +const protocolText = new proxyProtocol.V1ProxyProtocol( + proxyProtocol.INETProtocol.TCP4, + src, + dst, +).build(); +console.log(protocolText); // => PROXY TCP4 127.0.0.1 192.0.2.1 12345 54321\r\n + +const identifiedProtocolVersion = proxyProtocol.ProxyProtocolIdentifier.identify(protocolText); +console.log(identifiedProtocolVersion); // => proxyProtocol.ProxyProtocolVersion.V1 (= 0xx10) diff --git a/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/example/building/v2.js b/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/example/building/v2.js new file mode 100644 index 000000000..1adbbb4e8 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/example/building/v2.js @@ -0,0 +1,16 @@ +const proxyProtocol = require('proxy-protocol-js'); + +const proto = new proxyProtocol.V2ProxyProtocol( + proxyProtocol.Command.LOCAL, + proxyProtocol.TransportProtocol.DGRAM, + new proxyProtocol.IPv4ProxyAddress( + proxyProtocol.IPv4Address.createFrom([127, 0, 0, 1]), + 12345, + proxyProtocol.IPv4Address.createFrom([192, 0, 2, 1]), + 54321, + ), +).build(); +console.log(proto); + +const identifiedProtocolVersion = proxyProtocol.ProxyProtocolIdentifier.identify(proto); +console.log(identifiedProtocolVersion); // => proxyProtocol.ProxyProtocolVersion.V2 (= 0x20) diff --git a/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/example/parsing/v1.js b/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/example/parsing/v1.js new file mode 100644 index 000000000..1a7c314ee --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/example/parsing/v1.js @@ -0,0 +1,11 @@ +const proxyProtocol = require('proxy-protocol-js'); + +const protocolText = 'PROXY TCP4 127.0.0.1 192.0.2.1 12345 54321\r\n'; +const proto = proxyProtocol.V1ProxyProtocol.parse(protocolText); +console.log(proto); +// => V1ProxyProtocol { +// inetProtocol: 'TCP4', +// source: Host { ipAddress: '127.0.0.1', port: 12345 }, +// destination: Host { ipAddress: '192.0.2.1', port: 54321 }, +// data: '' } + diff --git a/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/example/parsing/v2.js b/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/example/parsing/v2.js new file mode 100644 index 000000000..9e80eddb1 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/example/parsing/v2.js @@ -0,0 +1,16 @@ +const proxyProtocol = require('proxy-protocol-js'); + +const protoBin = new Uint8Array([13, 10, 13, 10, 0, 13, 10, 81, 85, 73, 84, 10, 32, 18, 0, 12, 127, 0, 0, 1, 192, 0, 2, 1, 48, 57, 212, 49]); +const proto = proxyProtocol.V2ProxyProtocol.parse(protoBin); +console.log(proto); +// => V2ProxyProtocol { +// command: 0, +// transportProtocol: 2, +// proxyAddress: +// IPv4ProxyAddress { +// sourceAddress: IPv4Address { address: [Array] }, +// sourcePort: 12345, +// destinationAddress: IPv4Address { address: [Array] }, +// destinationPort: 54321 }, +// data: Uint8Array [], +// addressFamilyType: 16 } diff --git a/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/package.json b/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/package.json new file mode 100644 index 000000000..08154caca --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/proxy-protocol-js/package.json @@ -0,0 +1,87 @@ +{ + "_from": "proxy-protocol-js@^4.0.4", + "_id": "proxy-protocol-js@4.0.6", + "_inBundle": false, + "_integrity": "sha512-SjXgyBmr0dBbKUZ0jOzp0N9urTcDOI1cd1oEeE43W1vG4OMwYYLggCRcMJ0zv0gdTA8Imb4cAiYj8Ic/PWv1mw==", + "_location": "/proxy-protocol-js", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "proxy-protocol-js@^4.0.4", + "name": "proxy-protocol-js", + "escapedName": "proxy-protocol-js", + "rawSpec": "^4.0.4", + "saveSpec": null, + "fetchSpec": "^4.0.4" + }, + "_requiredBy": [ + "/aedes", + "/aedes-protocol-decoder" + ], + "_resolved": "https://registry.npmjs.org/proxy-protocol-js/-/proxy-protocol-js-4.0.6.tgz", + "_shasum": "6214013341c41ccf2e0ba20a2f5afbf2cdb032de", + "_spec": "proxy-protocol-js@^4.0.4", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes", + "author": { + "name": "moznion", + "email": "moznion@gmail.com" + }, + "bugs": { + "url": "https://github.com/moznion/proxy-protocol-js/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "A PROXY protocol builder and parser for JavaScript", + "devDependencies": { + "@types/jest": "^27.4.0", + "@types/node": "^17.0.8", + "@typescript-eslint/eslint-plugin": "^5.9.1", + "@typescript-eslint/parser": "^5.9.1", + "codecov": "^3.3.0", + "eslint": "^8.6.0", + "eslint-plugin-import": "^2.25.4", + "eslint-plugin-prefer-arrow": "^1.2.3", + "jest": "^27.4.7", + "node-notifier": ">=8.0.1", + "prettier": "^2.5.1", + "ts-jest": "^27.1.2", + "typescript": "^4.5.4" + }, + "directories": { + "lib": "lib", + "example": "example" + }, + "files": [ + "lib", + "!lib/.gitkeep", + "example", + "!example/.gitignore" + ], + "homepage": "https://github.com/moznion/proxy-protocol-js", + "keywords": [ + "PROXY-protocol" + ], + "license": "MIT", + "main": "lib/proxy-protocol.js", + "name": "proxy-protocol-js", + "repository": { + "type": "git", + "url": "git+https://github.com/moznion/proxy-protocol-js.git" + }, + "scripts": { + "build": "tsc", + "check": "npm run lint && npm run check-illegal-fmt && npm run test", + "check-illegal-fmt": "npm run prettier -- --list-different", + "clean": "rm -rf lib/*", + "fix": "npm run fmt && npm run fix-lint", + "fix-lint": "npm run lint -- --fix", + "fmt": "npm run prettier -- --write", + "lint": "eslint . --ext .ts", + "prettier": "find src -name \"*.ts\" | xargs prettier --config .prettierrc", + "release": "npm run check && npm run build && npm publish", + "test": "jest --maxWorkers=10" + }, + "types": "lib/proxy-protocol.d.ts", + "version": "4.0.6" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/qlobber/Gruntfile.js b/sdklab/meantimerecovery/aedes/node_modules/qlobber/Gruntfile.js new file mode 100644 index 000000000..36c8e89d5 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/qlobber/Gruntfile.js @@ -0,0 +1,74 @@ +/*jslint node: true */ +"use strict"; + +module.exports = function (grunt) +{ + grunt.initConfig( + { + jshint: { + all: [ 'Gruntfile.js', 'index.js', 'lib/*.js', 'aedes/**/*.js', 'test/*.js', 'bench/**/*.js' ], + options: { + esversion: 9, + node: true + } + }, + + mochaTest: { + src: 'test/*.js' + }, + + apidox: { + input: 'lib/qlobber.js', + output: 'README.md', + fullSourceDescription: true, + extraHeadingLevels: 1 + }, + + exec: { + cover: { + cmd: "./node_modules/.bin/nyc -x Gruntfile.js -x 'test/**' node --expose-gc ./node_modules/.bin/grunt test" + }, + + cover_report: { + cmd: './node_modules/.bin/nyc report -r lcov' + }, + + cover_check: { + cmd: './node_modules/.bin/nyc check-coverage --statements 100 --branches 100 --functions 100 --lines 100' + }, + + coveralls: { + cmd: 'cat coverage/lcov.info | coveralls' + }, + + bench: { + cmd: './node_modules/.bin/bench -c 20000 -i bench/options/default.js,bench/options/dedup.js,bench/options/mapval.js,bench/options/default-native.js,bench/options/dedup-native.js,bench/options/default-cache-splits.js -k options bench/add bench/add_match_remove bench/match bench/match_search bench/test' + }, + + 'bench-check': { + cmd: './node_modules/.bin/bench -c 20000 -i bench/options/check-default.js,bench/options/check-dedup.js,bench/options/check-mapval.js,bench/options/check-default-native.js,bench/options/check-dedup-native.js -k options bench/add bench/add_match_remove bench/match bench/match_search bench/test' + }, + + 'bench-many': { + cmd: './node_modules/.bin/bench -c 1 -i bench/options/default.js,bench/options/dedup.js,bench/options/mapval.js,bench/options/default-native.js,bench/options/dedup-native.js,bench/options/default-cache-splits.js -k options bench/add_many bench/add_shortcut_many bench/match_many bench/match_search_many bench/test_many' + } + } + }); + + grunt.loadNpmTasks('grunt-contrib-jshint'); + grunt.loadNpmTasks('grunt-mocha-test'); + grunt.loadNpmTasks('grunt-apidox'); + grunt.loadNpmTasks('grunt-exec'); + + grunt.registerTask('lint', 'jshint'); + grunt.registerTask('test', 'mochaTest'); + grunt.registerTask('docs', 'apidox'); + grunt.registerTask('coverage', ['exec:cover', + 'exec:cover_report', + 'exec:cover_check']); + grunt.registerTask('coveralls', 'exec:coveralls'); + grunt.registerTask('bench', ['exec:bench', + 'exec:bench-many']); + grunt.registerTask('bench-check', 'exec:bench-check'); + grunt.registerTask('default', ['lint', 'test']); +}; diff --git a/sdklab/meantimerecovery/aedes/node_modules/qlobber/LICENCE b/sdklab/meantimerecovery/aedes/node_modules/qlobber/LICENCE new file mode 100644 index 000000000..bfd8a7472 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/qlobber/LICENCE @@ -0,0 +1,19 @@ +Copyright (c) 2016 David Halls + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is furnished +to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/qlobber/README.md b/sdklab/meantimerecovery/aedes/node_modules/qlobber/README.md new file mode 100644 index 000000000..a28c62740 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/qlobber/README.md @@ -0,0 +1,516 @@ +# qlobber   [![Build Status](https://travis-ci.org/davedoesdev/qlobber.png)](https://travis-ci.org/davedoesdev/qlobber) [![Coverage Status](https://coveralls.io/repos/davedoesdev/qlobber/badge.png?branch=master)](https://coveralls.io/r/davedoesdev/qlobber?branch=master) [![NPM version](https://badge.fury.io/js/qlobber.png)](http://badge.fury.io/js/qlobber) + +Node.js globbing for amqp-like topics. + +__Note:__ Version 5.0.0 adds async and worker thread support when used on Node 12+. + +Example: + +```javascript +var assert = require('assert'); +var Qlobber = require('qlobber').Qlobber; +var matcher = new Qlobber(); +matcher.add('foo.*', 'it matched!'); +assert.deepEqual(matcher.match('foo.bar'), ['it matched!']); +assert(matcher.test('foo.bar', 'it matched!')); +``` + +The API is described [here](#tableofcontents). + +qlobber is implemented using a trie, as described in the RabbitMQ blog posts [here](http://www.rabbitmq.com/blog/2010/09/14/very-fast-and-scalable-topic-routing-part-1/) and [here](http://www.rabbitmq.com/blog/2011/03/28/very-fast-and-scalable-topic-routing-part-2/). + +## Installation + +```shell +npm install qlobber +``` + +## Another Example + +A more advanced example using topics from the [RabbitMQ topic tutorial](http://www.rabbitmq.com/tutorials/tutorial-five-python.html): + +```javascript +var assert = require('assert'); +var Qlobber = require('qlobber').Qlobber; +var matcher = new Qlobber(); +matcher.add('*.orange.*', 'Q1'); +matcher.add('*.*.rabbit', 'Q2'); +matcher.add('lazy.#', 'Q2'); +assert.deepEqual(['quick.orange.rabbit', + 'lazy.orange.elephant', + 'quick.orange.fox', + 'lazy.brown.fox', + 'lazy.pink.rabbit', + 'quick.brown.fox', + 'orange', + 'quick.orange.male.rabbit', + 'lazy.orange.male.rabbit'].map(function (topic) + { + return matcher.match(topic).sort(); + }), + [['Q1', 'Q2'], + ['Q1', 'Q2'], + ['Q1'], + ['Q2'], + ['Q2', 'Q2'], + [], + [], + [], + ['Q2']]); +``` + +## Async Example + +Same as the first example but using `await`: + +```javascript +const assert = require('assert'); +const { Qlobber } = require('qlobber').set_native(require('qlobber-native')); +const matcher = new Qlobber.nativeString(); + +(async () => { + await matcher.addP('foo.*', 'it matched!'); + assert.deepEqual(await matcher.matchP('foo.bar'), ['it matched!']); + assert(await matcher.testP('foo.bar', 'it matched!')); +})(); +``` + +## Worker Thread Example + +Same again but the matching is done on a separate thread: + +``` +const { Qlobber } = require('qlobber').set_native(require('qlobber-native')); +const { + Worker, isMainThread, parentPort, workerData +} = require('worker_threads'); + +if (isMainThread) { + const matcher = new Qlobber.nativeString(); + matcher.add('foo.*', 'it matched!'); + const worker = new Worker(__filename, { + workerData: matcher.state_address + }); + worker.on('message', msg => { + const assert = require('assert'); + assert.deepEqual(msg, [['it matched!'], true]); + }); +} else { + const matcher = new Qlobber.nativeString(workerData); + parentPort.postMessage([ + matcher.match('foo.bar'), + matcher.test('foo.bar', 'it matched!') + ]); +} +``` + +## Licence + +[MIT](LICENCE) + +## Tests + +qlobber passes the [RabbitMQ topic tests](https://github.com/rabbitmq/rabbitmq-server/blob/master/src/rabbit_tests.erl) (I converted them from Erlang to Javascript). + +To run the tests: + +```shell +npm test +``` + +## Lint + +```shell +npm run lint +``` + +## Code Coverage + +```shell +npm run coverage +``` + +[Istanbul](http://gotwarlost.github.io/istanbul/) results are available [here](http://rawgit.davedoesdev.com/davedoesdev/qlobber/master/coverage/lcov-report/index.html). + +Coveralls page is [here](https://coveralls.io/r/davedoesdev/qlobber). + +## Benchmarks + +```shell +grunt bench +``` + +qlobber is also benchmarked in [ascoltatori](https://github.com/mcollina/ascoltatori). + +## Native Qlobbers + +The Javascript Qlobbers don't support asynchronous calls and worker threads +because Javascript values can't be shared between threads. + +In order to support asynchronous calls and worker threads, a native C++ +implementation is available in the +[qlobber-native](https://www.npmjs.com/package/qlobber-native) module. + +Add qlobber-native as a dependency to your project and then add it to qlobber +like this: + +```javascript +require('qlobber').set_native(require('qlobber-native')); +``` + +Note that [`set_native`](#set_nativeqlobber_native) returns qlobber's exports so you can do something like +this: + +```javascript +const { Qlobber } = require('qlobber').set_native(require('qlobber-native')); +``` + +Note that qlobber-native requires Gnu C++ version 9+ and Boost 1.70+. + +Once's you've added it to qlobber, the following classes will be available +alongside the Javascript classes: + +- `Qlobber.nativeString` +- `Qlobber.nativeNumber` +- `QlobberDedup.nativeString` +- `QlobberDedup.nativeNumber` +- `QlobberTrue.native` + +They can only hold values of a single type (currently strings or numbers). + +### Asynchronous calls + +The native classes support the same API as the Javascript classes but have the +following additional methods: + +- `addP` +- `removeP` +- `matchP` +- `match_iterP` +- `testP` +- `clearP` +- `visitP` +- `get_restorerP` + +They correspond to their namesakes but return Promises. Note that `match_iterP` +and `visitP` return async iterators. + +# API + +_Source: [lib/qlobber.js](lib/qlobber.js)_ + + + +- [Qlobber](#qlobberoptions) +- [Qlobber.prototype.add](#qlobberprototypeaddtopic-val) +- [Qlobber.prototype.remove](#qlobberprototyperemovetopic-val) +- [Qlobber.prototype.match](#qlobberprototypematchtopic) +- [Qlobber.prototype.match_iter](#qlobberprototypematch_iter) +- [Qlobber.prototype.test](#qlobberprototypetesttopic-val) +- [Qlobber.prototype.test_values](#qlobberprototypetest_valuesvals-val) +- [Qlobber.prototype.clear](#qlobberprototypeclear) +- [Qlobber.prototype.visit](#qlobberprototypevisit) +- [Qlobber.prototype.get_restorer](#qlobberprototypeget_restoreroptions) +- [QlobberDedup](#qlobberdedupoptions) +- [QlobberDedup.prototype.test_values](#qlobberdedupprototypetest_valuesvals-val) +- [QlobberDedup.prototype.match](#qlobberdedupprototypematchtopic) +- [QlobberTrue](#qlobbertrueoptions) +- [QlobberTrue.prototype.test_values](#qlobbertrueprototypetest_values) +- [QlobberTrue.prototype.match](#qlobbertrueprototypematchtopic) +- [VisitorStream](#visitorstreamqlobber) +- [RestorerStream](#restorerstreamqlobber) +- [set_native](#set_nativeqlobber_native) + +## Qlobber([options]) + +> Creates a new qlobber. + +**Parameters:** + +- `{Object} [options]` Configures the qlobber. Use the following properties: + - `{String} separator` The character to use for separating words in topics. Defaults to '.'. MQTT uses '/' as the separator, for example. + + - `{String} wildcard_one` The character to use for matching exactly one _non-empty_ word in a topic. Defaults to '*'. MQTT uses '+', for example. + + - `{String} wildcard_some` The character to use for matching zero or more words in a topic. Defaults to '#'. MQTT uses '#' too. + + - `{Boolean} match_empty_levels` If `true` then `wilcard_one` also matches an empty word in a topic. Defaults to `false`. + + - `{Boolean|Map} cache_adds` Whether to cache topics when adding topic matchers. This will make adding multiple matchers for the same topic faster at the cost of extra memory usage. Defaults to `false`. If you supply a `Map` then it will be used to cache the topics (use this to enumerate all the topics in the qlobber). + + - `{Integer} cache_splits` How many `topic.split` results to cache. When you pass in a topic, it has to be split on the `separator`. Caching the results will make using the same topics multiple times faster at the cost of extra memory usage. Defaults to `0` (no caching). The number of split results cached is limited by the value you pass here. + + - `{Integer} max_words` Maximum number of words to allow in a topic. Defaults to 100. + + - `{Integer} max_wildcard_somes` Maximum number of `wildcard_some` words in a topic. Defaults to 3. + +Go: [TOC](#tableofcontents) + + + +## Qlobber.prototype.add(topic, val) + +> Add a topic matcher to the qlobber. + +Note you can match more than one value against a topic by calling `add` multiple times with the same topic and different values. + +**Parameters:** + +- `{String} topic` The topic to match against. +- `{Any} val` The value to return if the topic is matched. + +**Return:** + +`{Qlobber}` The qlobber (for chaining). + +Go: [TOC](#tableofcontents) | [Qlobber.prototype](#toc_qlobberprototype) + +## Qlobber.prototype.remove(topic, [val]) + +> Remove a topic matcher from the qlobber. + +**Parameters:** + +- `{String} topic` The topic that's being matched against. +- `{Any} [val]` The value that's being matched. If you don't specify `val` then all matchers for `topic` are removed. + +**Return:** + +`{Qlobber}` The qlobber (for chaining). + +Go: [TOC](#tableofcontents) | [Qlobber.prototype](#toc_qlobberprototype) + +## Qlobber.prototype.match(topic) + +> Match a topic. + +**Parameters:** + +- `{String} topic` The topic to match against. + +**Return:** + +`{Array}` List of values that matched the topic. This may contain duplicates. Use a [`QlobberDedup`](#qlobberdedupoptions) if you don't want duplicates. + +Go: [TOC](#tableofcontents) | [Qlobber.prototype](#toc_qlobberprototype) + +## Qlobber.prototype.match_iter() + +> Match a topic, returning the matches one at a time. + +**Return:** + +`{Iterator}` An iterator on the values that match the topic. There may be duplicate values, even if you use a [`QlobberDedup`](#qlobberdedupoptions). + +Go: [TOC](#tableofcontents) | [Qlobber.prototype](#toc_qlobberprototype) + +## Qlobber.prototype.test(topic, val) + +> Test whether a topic match contains a value. Faster than calling [`match`](#qlobberprototypematchtopic) and searching the result for the value. Values are tested using [`test_values`](#qlobberprototypetest_valuesvals-val). + +**Parameters:** + +- `{String} topic` The topic to match against. +- `{Any} val` The value being tested for. + +**Return:** + +`{Boolean}` Whether matching against `topic` contains `val`. + +Go: [TOC](#tableofcontents) | [Qlobber.prototype](#toc_qlobberprototype) + +## Qlobber.prototype.test_values(vals, val) + +> Test whether values found in a match contain a value passed to [`test`](#qlobberprototypetesttopic-val). You can override this to provide a custom implementation. Defaults to using `indexOf`. + +**Parameters:** + +- `{Array} vals` The values found while matching. +- `{Any} val` The value being tested for. + +**Return:** + +`{Boolean}` Whether `vals` contains `val`. + +Go: [TOC](#tableofcontents) | [Qlobber.prototype](#toc_qlobberprototype) + +## Qlobber.prototype.clear() + +> Reset the qlobber. + +Removes all topic matchers from the qlobber. + +**Return:** + +`{Qlobber}` The qlobber (for chaining). + +Go: [TOC](#tableofcontents) | [Qlobber.prototype](#toc_qlobberprototype) + +## Qlobber.prototype.visit() + +> Visit each node in the qlobber's trie in turn. + +**Return:** + +`{Iterator}` An iterator on the trie. The iterator returns objects which, if fed (in the same order) to the function returned by [`get_restorer`](#qlobberprototypeget_restoreroptions) on a different qlobber, will build that qlobber's trie to the same state. The objects can be serialized using `JSON.stringify`, _if_ the values you store in the qlobber are also serializable. + +Go: [TOC](#tableofcontents) | [Qlobber.prototype](#toc_qlobberprototype) + +## Qlobber.prototype.get_restorer([options]) + +> Get a function which can restore the qlobber's trie to a state you retrieved +by calling [`visit`](#qlobberprototypevisit) on this or another qlobber. + +**Parameters:** + +- `{Object} [options]` Options for restoring the trie. + - `{Boolean} cache_adds` Whether to cache topics when rebuilding the trie. This only applies if you also passed `cache_adds` as true in the [constructor](#qlobberoptions). + +**Return:** + +`{Function}` Function to call in order to rebuild the qlobber's trie. You should call this repeatedly with the objects you received from a call to [`visit`](#qlobberprototypevisit). If you serialized the objects, remember to deserialize them first (e.g. with `JSON.parse`)! + +Go: [TOC](#tableofcontents) | [Qlobber.prototype](#toc_qlobberprototype) + +## QlobberDedup([options]) + +> Creates a new de-duplicating qlobber. + +Inherits from [`Qlobber`](#qlobberoptions). + +**Parameters:** + +- `{Object} [options]` Same options as [Qlobber](#qlobberoptions). + +Go: [TOC](#tableofcontents) + + + +## QlobberDedup.prototype.test_values(vals, val) + +> Test whether values found in a match contain a value passed to [`test`](#qlobberprototypetesttopic_val). You can override this to provide a custom implementation. Defaults to using `has`. + +**Parameters:** + +- `{Set} vals` The values found while matching ([ES6 Set](http://www.ecma-international.org/ecma-262/6.0/#sec-set-objects)). +- `{Any} val` The value being tested for. + +**Return:** + +`{Boolean}` Whether `vals` contains `val`. + +Go: [TOC](#tableofcontents) | [QlobberDedup.prototype](#toc_qlobberdedupprototype) + +## QlobberDedup.prototype.match(topic) + +> Match a topic. + +**Parameters:** + +- `{String} topic` The topic to match against. + +**Return:** + +`{Set}` [ES6 Set](http://www.ecma-international.org/ecma-262/6.0/#sec-set-objects) of values that matched the topic. + +Go: [TOC](#tableofcontents) | [QlobberDedup.prototype](#toc_qlobberdedupprototype) + +## QlobberTrue([options]) + +> Creates a new qlobber which only stores the value `true`. + +Whatever value you [`add`](#qlobberprototypeaddtopic-val) to this qlobber +(even `undefined`), a single, de-duplicated `true` will be stored. Use this +qlobber if you only need to test whether topics match, not about the values +they match to. + +Inherits from [`Qlobber`](#qlobberoptions). + +**Parameters:** + +- `{Object} [options]` Same options as [Qlobber](#qlobberoptions). + +Go: [TOC](#tableofcontents) + + + +## QlobberTrue.prototype.test_values() + +> This override of [`test_values`](#qlobberprototypetest_valuesvals-val) always +returns `true`. When you call [`test`](#qlobberprototypetesttopic-val) on a +`QlobberTrue` instance, the value you pass is ignored since it only cares +whether a topic is matched. + +**Return:** + +`{Boolean}` Always `true`. + +Go: [TOC](#tableofcontents) | [QlobberTrue.prototype](#toc_qlobbertrueprototype) + +## QlobberTrue.prototype.match(topic) + +> Match a topic. + +Since `QlobberTrue` only cares whether a topic is matched and not about values +it matches to, this override of [`match`](#qlobberprototypematchtopic) just +calls [`test`](#qlobberprototypetesttopic-val) (with value `undefined`). + +**Parameters:** + +- `{String} topic` The topic to match against. + +**Return:** + +`{Boolean}` Whether the `QlobberTrue` instance matches the topic. + +Go: [TOC](#tableofcontents) | [QlobberTrue.prototype](#toc_qlobbertrueprototype) + +## VisitorStream(qlobber) + +> Creates a new [`Readable`](https://nodejs.org/dist/latest-v8.x/docs/api/stream.html#stream_class_stream_readable) stream, in object mode, which calls [`visit`](#qlobberprototypevisit) on a qlobber to generate its data. + +You could [`pipe`](https://nodejs.org/dist/latest-v8.x/docs/api/stream.html#stream_readable_pipe_destination_options) this to a [`JSONStream.stringify`](https://github.com/dominictarr/JSONStream#jsonstreamstringifyopen-sep-close) stream, for instance, to serialize the qlobber to JSON. See [this test](test/json.js#L14) for an example. + +Inherits from [`Readable`](https://nodejs.org/dist/latest-v8.x/docs/api/stream.html#stream_class_stream_readable). + +**Parameters:** + +- `{Qlobber} qlobber` The qlobber to call [`visit`](#qlobberprototypevisit) on. + +Go: [TOC](#tableofcontents) + +## RestorerStream(qlobber) + +> Creates a new [`Writable`](https://nodejs.org/dist/latest-v8.x/docs/api/stream.html#stream_class_stream_writable) stream, in object mode, which passes data written to it into the function returned by calling [`get_restorer`](#qlobberprototypeget_restoreroptions) on a qlobber. + +You could [`pipe`](https://nodejs.org/dist/latest-v8.x/docs/api/stream.html#stream_readable_pipe_destination_options) a [`JSONStream.parse`](https://github.com/dominictarr/JSONStream#jsonstreamparsepath) stream to this, for instance, to deserialize the qlobber from JSON. See [this test](test/json.js#L33) for an example. + +Inherits from [`Writable`](https://nodejs.org/dist/latest-v8.x/docs/api/stream.html#stream_class_stream_writable). + +**Parameters:** + +- `{Qlobber} qlobber` The qlobber to call [`get_restorer`](#qlobberprototypeget_restoreroptions) on. + +Go: [TOC](#tableofcontents) + +## set_native(qlobber_native) + +> Add [qlobber-native](https://www.npmjs.com/package/qlobber-native) to qlobber. + +**Parameters:** + +- `{Object} qlobber_native` The qlobber-native module, obtained using `require('qlobber-native')`. + +**Return:** + +`{Object}` The qlobber exports with the following native classes added: + + - `Qlobber.nativeString` + - `Qlobber.nativeNumber` + - `QlobberDedup.nativeString` + - `QlobberDedup.nativeNumber` + - `QlobberTrue.native` + +Go: [TOC](#tableofcontents) + +_—generated by [apidox](https://github.com/codeactual/apidox)—_ diff --git a/sdklab/meantimerecovery/aedes/node_modules/qlobber/aedes/qlobber-sub.js b/sdklab/meantimerecovery/aedes/node_modules/qlobber/aedes/qlobber-sub.js new file mode 100644 index 000000000..3359387fb --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/qlobber/aedes/qlobber-sub.js @@ -0,0 +1,140 @@ +/*jslint node: true */ +"use strict"; + +var util = require('util'), + qlobber = require('..'), + Qlobber = qlobber.Qlobber; + +function QlobberSub (options) +{ + Qlobber.call(this, options); + this.subscriptionsCount = 0; +} + +util.inherits(QlobberSub, Qlobber); + +QlobberSub.prototype._initial_value = function (val) +{ + this.subscriptionsCount += 1; + + let r = { + topic: val.topic, + clientMap: new Map().set(val.clientId, val.qos), + }; + + r[Symbol.iterator] = function* (topic) + { + if (topic === undefined) + { + for (let [clientId, qos] of r.clientMap) + { + yield { topic: r.topic, clientId, qos }; + } + } + else if (r.topic === topic) + { + for (let [clientId, qos] of r.clientMap) + { + yield { clientId, qos }; + } + } + }; + + return r; +}; + +QlobberSub.prototype._add_value = function (existing, val) +{ + var clientMap = existing.clientMap, + size = clientMap.size; + + clientMap.set(val.clientId, val.qos); + + if (clientMap.size > size) + { + this.subscriptionsCount += 1; + } +}; + +QlobberSub.prototype._add_values = function (dest, existing, topic) +{ + var clientIdAndQos; + if (topic === undefined) + { + for (clientIdAndQos of existing.clientMap) + { + dest.push( + { + clientId: clientIdAndQos[0], + topic: existing.topic, + qos: clientIdAndQos[1] + }); + } + } + else if (existing.topic === topic) + { + for (clientIdAndQos of existing.clientMap) + { + dest.push( + { + clientId: clientIdAndQos[0], + qos: clientIdAndQos[1] + }); + } + } +}; + +QlobberSub.prototype._remove_value = function (existing, val) +{ + var clientMap = existing.clientMap, + size_before = clientMap.size; + + clientMap.delete(val.clientId); + + var size_after = clientMap.size; + + if (size_after < size_before) + { + this.subscriptionsCount -= 1; + } + + return size_after === 0; +}; + +// Returns whether client is last subscriber to topic +QlobberSub.prototype.test_values = function (existing, val) +{ + var clientMap = existing.clientMap; + + return (existing.topic === val.topic) && + (clientMap.size === 1) && + clientMap.has(val.clientId); +}; + +QlobberSub.prototype.match = function (topic, ctx) +{ + return this._match([], 0, topic.split(this._separator), this._trie, ctx); +}; + +QlobberSub.prototype.clear = function () +{ + this.subscriptionsCount = 0; + return Qlobber.prototype.clear.call(this); +}; + +QlobberSub.set_native = function (qlobber_native) +{ + // wrap_native.js uses 'async *' which isn't available on Node 8 + try + { + const wrap_native = require('../lib/wrap_native.js'); + QlobberSub.native = wrap_native(qlobber_native.QlobberSub, QlobberSub); + } + catch (ex) + { + } + + return module.exports; +}; + +module.exports = QlobberSub; diff --git a/sdklab/meantimerecovery/aedes/node_modules/qlobber/index.js b/sdklab/meantimerecovery/aedes/node_modules/qlobber/index.js new file mode 100644 index 000000000..ed5d9db3e --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/qlobber/index.js @@ -0,0 +1,3 @@ +/*jslint node: true*/ +"use strict"; +module.exports = require('./lib/qlobber'); diff --git a/sdklab/meantimerecovery/aedes/node_modules/qlobber/package.json b/sdklab/meantimerecovery/aedes/node_modules/qlobber/package.json new file mode 100644 index 000000000..198ba14d9 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/qlobber/package.json @@ -0,0 +1,79 @@ +{ + "_from": "qlobber@^5.0.3", + "_id": "qlobber@5.0.3", + "_inBundle": false, + "_integrity": "sha512-wW4GTZPePyh0RgOsM18oDyOUlXfurVRgoNyJfS+y7VWPyd0GYhQp5T2tycZFZjonH+hngxIfklGJhTP/ghidgQ==", + "_location": "/qlobber", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "qlobber@^5.0.3", + "name": "qlobber", + "escapedName": "qlobber", + "rawSpec": "^5.0.3", + "saveSpec": null, + "fetchSpec": "^5.0.3" + }, + "_requiredBy": [ + "/aedes-persistence", + "/mqemitter" + ], + "_resolved": "https://registry.npmjs.org/qlobber/-/qlobber-5.0.3.tgz", + "_shasum": "24728d6ba5382d502c7e09f6860b95a9c71615cd", + "_spec": "qlobber@^5.0.3", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes-persistence", + "author": { + "name": "David Halls", + "email": "dave@davedoesdev.com", + "url": "http://www.davedoesdev.com" + }, + "bugs": { + "url": "https://github.com/davedoesdev/qlobber/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Node.js globbing for amqp-like topics", + "devDependencies": { + "JSONStream": "^1.3.5", + "b": "git://github.com/davedoesdev/b.git", + "chai": "^4.2.0", + "coveralls": "^3.1.0", + "grunt": "^1.3.0", + "grunt-apidox": "^2.0.10", + "grunt-contrib-jshint": "^2.1.0", + "grunt-exec": "^3.0.0", + "grunt-mocha-test": "^0.13.3", + "mocha": "^8.1.3", + "nyc": "^15.1.0", + "stream-buffers": "^3.0.2" + }, + "directories": { + "test": "test", + "lib": "lib" + }, + "engines": { + "node": ">= 8" + }, + "homepage": "https://github.com/davedoesdev/qlobber", + "keywords": [ + "amqp", + "mqtt", + "rabbitmq", + "ascoltatore" + ], + "license": "MIT", + "main": "index.js", + "name": "qlobber", + "repository": { + "type": "git", + "url": "git+https://github.com/davedoesdev/qlobber.git" + }, + "scripts": { + "coverage": "(cd native && npm install && node-gyp rebuild --debug) && grunt lint coverage", + "coveralls": "grunt coveralls", + "lint": "grunt lint", + "test": "(cd native && npm install && node-gyp rebuild --debug) && node --expose-gc ./node_modules/.bin/grunt lint test" + }, + "version": "5.0.3" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/readable-stream/CONTRIBUTING.md b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 000000000..f478d58dc --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/sdklab/meantimerecovery/aedes/node_modules/readable-stream/GOVERNANCE.md b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 000000000..16ffb93f2 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/sdklab/meantimerecovery/aedes/node_modules/readable-stream/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/LICENSE new file mode 100644 index 000000000..2873b3b2e --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/sdklab/meantimerecovery/aedes/node_modules/readable-stream/README.md b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/README.md new file mode 100644 index 000000000..6f035ab16 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/README.md @@ -0,0 +1,106 @@ +# readable-stream + +***Node.js core streams for userland*** [![Build Status](https://travis-ci.com/nodejs/readable-stream.svg?branch=master)](https://travis-ci.com/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream) + +```bash +npm install --save readable-stream +``` + +This package is a mirror of the streams implementations in Node.js. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.19.0/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +## Version 3.x.x + +v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows: + +1. Error codes: https://github.com/nodejs/node/pull/13310, + https://github.com/nodejs/node/pull/13291, + https://github.com/nodejs/node/pull/16589, + https://github.com/nodejs/node/pull/15042, + https://github.com/nodejs/node/pull/15665, + https://github.com/nodejs/readable-stream/pull/344 +2. 'readable' have precedence over flowing + https://github.com/nodejs/node/pull/18994 +3. make virtual methods errors consistent + https://github.com/nodejs/node/pull/18813 +4. updated streams error handling + https://github.com/nodejs/node/pull/18438 +5. writable.end should return this. + https://github.com/nodejs/node/pull/18780 +6. readable continues to read when push('') + https://github.com/nodejs/node/pull/18211 +7. add custom inspect to BufferList + https://github.com/nodejs/node/pull/17907 +8. always defer 'readable' with nextTick + https://github.com/nodejs/node/pull/17979 + +## Version 2.x.x +v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11. + +### Big Thanks + +Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce] + +# Usage + +You can swap your `require('stream')` with `require('readable-stream')` +without any changes, if you are just using one of the main classes and +functions. + +```js +const { + Readable, + Writable, + Transform, + Duplex, + pipeline, + finished +} = require('readable-stream') +```` + +Note that `require('stream')` will return `Stream`, while +`require('readable-stream')` will return `Readable`. We discourage using +whatever is exported directly, but rather use one of the properties as +shown in the example above. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> +* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) <yoshuawuyts@gmail.com> + +[sauce]: https://saucelabs.com diff --git a/sdklab/meantimerecovery/aedes/node_modules/readable-stream/errors-browser.js b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/errors-browser.js new file mode 100644 index 000000000..fb8e73e18 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/errors-browser.js @@ -0,0 +1,127 @@ +'use strict'; + +function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } + +var codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error; + } + + function getMessage(arg1, arg2, arg3) { + if (typeof message === 'string') { + return message; + } else { + return message(arg1, arg2, arg3); + } + } + + var NodeError = + /*#__PURE__*/ + function (_Base) { + _inheritsLoose(NodeError, _Base); + + function NodeError(arg1, arg2, arg3) { + return _Base.call(this, getMessage(arg1, arg2, arg3)) || this; + } + + return NodeError; + }(Base); + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + codes[code] = NodeError; +} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js + + +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + var len = expected.length; + expected = expected.map(function (i) { + return String(i); + }); + + if (len > 2) { + return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1]; + } else if (len === 2) { + return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]); + } else { + return "of ".concat(thing, " ").concat(expected[0]); + } + } else { + return "of ".concat(thing, " ").concat(String(expected)); + } +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith + + +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith + + +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + + return str.substring(this_len - search.length, this_len) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes + + +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"'; +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + var determiner; + + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + var msg; + + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } else { + var type = includes(name, '.') ? 'property' : 'argument'; + msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } + + msg += ". Received type ".concat(typeof actual); + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented'; +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg; +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); +module.exports.codes = codes; diff --git a/sdklab/meantimerecovery/aedes/node_modules/readable-stream/errors.js b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/errors.js new file mode 100644 index 000000000..8471526d6 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/errors.js @@ -0,0 +1,116 @@ +'use strict'; + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.codes = codes; diff --git a/sdklab/meantimerecovery/aedes/node_modules/readable-stream/experimentalWarning.js b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/experimentalWarning.js new file mode 100644 index 000000000..78e841495 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/experimentalWarning.js @@ -0,0 +1,17 @@ +'use strict' + +var experimentalWarnings = new Set(); + +function emitExperimentalWarning(feature) { + if (experimentalWarnings.has(feature)) return; + var msg = feature + ' is an experimental feature. This feature could ' + + 'change at any time'; + experimentalWarnings.add(feature); + process.emitWarning(msg, 'ExperimentalWarning'); +} + +function noop() {} + +module.exports.emitExperimentalWarning = process.emitWarning + ? emitExperimentalWarning + : noop; diff --git a/sdklab/meantimerecovery/aedes/node_modules/readable-stream/package.json b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/package.json new file mode 100644 index 000000000..ec0e77d5f --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/package.json @@ -0,0 +1,99 @@ +{ + "_from": "readable-stream@^3.6.0", + "_id": "readable-stream@3.6.0", + "_inBundle": false, + "_integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "_location": "/readable-stream", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "readable-stream@^3.6.0", + "name": "readable-stream", + "escapedName": "readable-stream", + "rawSpec": "^3.6.0", + "saveSpec": null, + "fetchSpec": "^3.6.0" + }, + "_requiredBy": [ + "/aedes", + "/bl", + "/bulk-write-stream" + ], + "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "_shasum": "337bbda3adc0706bd3e024426a286d4b4b2c9198", + "_spec": "readable-stream@^3.6.0", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes", + "browser": { + "util": false, + "worker_threads": false, + "./errors": "./errors-browser.js", + "./readable.js": "./readable-browser.js", + "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "bugs": { + "url": "https://github.com/nodejs/readable-stream/issues" + }, + "bundleDependencies": false, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "deprecated": false, + "description": "Streams3, a user-land copy of the stream library from Node.js", + "devDependencies": { + "@babel/cli": "^7.2.0", + "@babel/core": "^7.2.0", + "@babel/polyfill": "^7.0.0", + "@babel/preset-env": "^7.2.0", + "airtap": "0.0.9", + "assert": "^1.4.0", + "bl": "^2.0.0", + "deep-strict-equal": "^0.2.0", + "events.once": "^2.0.2", + "glob": "^7.1.2", + "gunzip-maybe": "^1.4.1", + "hyperquest": "^2.1.3", + "lolex": "^2.6.0", + "nyc": "^11.0.0", + "pump": "^3.0.0", + "rimraf": "^2.6.2", + "tap": "^12.0.0", + "tape": "^4.9.0", + "tar-fs": "^1.16.2", + "util-promisify": "^2.1.0" + }, + "engines": { + "node": ">= 6" + }, + "homepage": "https://github.com/nodejs/readable-stream#readme", + "keywords": [ + "readable", + "stream", + "pipe" + ], + "license": "MIT", + "main": "readable.js", + "name": "readable-stream", + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream.git" + }, + "scripts": { + "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", + "test-browser-local": "airtap --open --local -- test/browser.js", + "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", + "update-browser-errors": "babel -o errors-browser.js errors.js" + }, + "version": "3.6.0" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/readable-stream/readable-browser.js b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/readable-browser.js new file mode 100644 index 000000000..adbf60de8 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,9 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); +exports.finished = require('./lib/internal/streams/end-of-stream.js'); +exports.pipeline = require('./lib/internal/streams/pipeline.js'); diff --git a/sdklab/meantimerecovery/aedes/node_modules/readable-stream/readable.js b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/readable.js new file mode 100644 index 000000000..9e0ca120d --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/readable-stream/readable.js @@ -0,0 +1,16 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); + exports.finished = require('./lib/internal/streams/end-of-stream.js'); + exports.pipeline = require('./lib/internal/streams/pipeline.js'); +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/retimer/.travis.yml b/sdklab/meantimerecovery/aedes/node_modules/retimer/.travis.yml new file mode 100644 index 000000000..5197f30e5 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/retimer/.travis.yml @@ -0,0 +1,6 @@ +language: node_js +node_js: + - "6" + - "8" + - "10" + - "11" diff --git a/sdklab/meantimerecovery/aedes/node_modules/retimer/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/retimer/LICENSE new file mode 100644 index 000000000..fbf3a01d8 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/retimer/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Matteo Collina + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/sdklab/meantimerecovery/aedes/node_modules/retimer/README.md b/sdklab/meantimerecovery/aedes/node_modules/retimer/README.md new file mode 100644 index 000000000..fa6019e4a --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/retimer/README.md @@ -0,0 +1,61 @@ +# retimer  [![Build Status](https://travis-ci.org/mcollina/retimer.png)](https://travis-ci.org/mcollina/retimer) + +reschedulable setTimeout for you node needs. This library is built for +building a keep alive functionality across a large numbers of +clients/sockets. + +Rescheduling a 10000 functions 20 times with an interval of 50ms (see +`bench.js`), with 100 repetitions: + +* `benchSetTimeout*100: 36912.680ms` +* `benchRetimer*100: 33213.134ms` + +## Install + +``` +npm install retimer --save +``` + +## Example + +```js +var retimer = require('retimer') +var timer = retimer(function () { + throw new Error('this should never get called!') +}, 20) + +setTimeout(function () { + timer.reschedule(50) + setTimeout(function () { + timer.clear() + }, 10) +}, 10) +``` + +## API + +### retimer(callback, timeout, [...args]) + +Exactly like your beloved `setTimeout`. +Returns a `Retimer object` + +### timer.reschedule(timeout) + +Reschedule the timer, if the specified timeout comes __after__ the +original timeout. + +Returns true if successful, false otherwise + +### timer.clear() + +Clear the timer, like your beloved `clearTimeout`. + +## How it works + +Timers are stored in a Linked List in node.js, if you create a lot of +timers this Linked List becomes massive which makes __removing a timer an expensive operation__. +Retimer let the old timer run at its time, and schedule a new one accordingly. + +## License + +MIT diff --git a/sdklab/meantimerecovery/aedes/node_modules/retimer/bench.js b/sdklab/meantimerecovery/aedes/node_modules/retimer/bench.js new file mode 100644 index 000000000..b9ea78e10 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/retimer/bench.js @@ -0,0 +1,65 @@ +'use strict' + +var bench = require('fastbench') +var retimer = require('./') +var max = 10000 + +function benchSetTimeout (done) { + var timers = new Array(max) + var completed = 0 + var toReschedule = 20 + + schedule() + + function complete () { + if (++completed === max) { + done() + } + } + + function schedule () { + for (var i = 0; i < max; i++) { + if (timers[i]) { + clearTimeout(timers[i]) + } + timers[i] = setTimeout(complete, 50) + } + if (--toReschedule > 0) { + setTimeout(schedule, 10) + } + } +} + +function benchRetimer (done) { + var timers = new Array(max) + var completed = 0 + var toReschedule = 20 + + schedule() + + function complete () { + if (++completed === max) { + done() + } + } + + function schedule () { + for (var i = 0; i < max; i++) { + if (timers[i]) { + timers[i].reschedule(50) + } else { + timers[i] = retimer(complete, 50) + } + } + if (--toReschedule > 0) { + setTimeout(schedule, 10) + } + } +} + +var run = bench([ + benchSetTimeout, + benchRetimer +], 100) + +run(run) diff --git a/sdklab/meantimerecovery/aedes/node_modules/retimer/package.json b/sdklab/meantimerecovery/aedes/node_modules/retimer/package.json new file mode 100644 index 000000000..84f70e2ef --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/retimer/package.json @@ -0,0 +1,68 @@ +{ + "_from": "retimer@^2.0.0", + "_id": "retimer@2.0.0", + "_inBundle": false, + "_integrity": "sha512-KLXY85WkEq2V2bKex/LOO1ViXVn2KGYe4PYysAdYdjmraYIUsVkXu8O4am+8+5UbaaGl1qho4aqAAPHNQ4GSbg==", + "_location": "/retimer", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "retimer@^2.0.0", + "name": "retimer", + "escapedName": "retimer", + "rawSpec": "^2.0.0", + "saveSpec": null, + "fetchSpec": "^2.0.0" + }, + "_requiredBy": [ + "/aedes" + ], + "_resolved": "https://registry.npmjs.org/retimer/-/retimer-2.0.0.tgz", + "_shasum": "e8bd68c5e5a8ec2f49ccb5c636db84c04063bbca", + "_spec": "retimer@^2.0.0", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes", + "author": { + "name": "Matteo Collina", + "email": "hello@matteocollina.com" + }, + "browser": { + "./time.js": "./time-browser.js" + }, + "bugs": { + "url": "https://github.com/mcollina/retimer/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Reschedulable Timer for your node needs", + "devDependencies": { + "fastbench": "^1.0.0", + "faucet": "0.0.1", + "pre-commit": "^1.0.10", + "standard": "^12.0.0", + "tape": "^4.0.0" + }, + "homepage": "https://github.com/mcollina/retimer#readme", + "keywords": [ + "schedulable", + "reschedulable", + "timer", + "setTimeout" + ], + "license": "MIT", + "main": "retimer.js", + "name": "retimer", + "pre-commit": [ + "lint", + "test" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/mcollina/retimer.git" + }, + "scripts": { + "lint": "standard", + "test": "tape test.js | faucet" + }, + "version": "2.0.0" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/retimer/retimer.js b/sdklab/meantimerecovery/aedes/node_modules/retimer/retimer.js new file mode 100644 index 000000000..74608b429 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/retimer/retimer.js @@ -0,0 +1,63 @@ +'use strict' + +var getTime = require('./time') + +function Retimer (callback, timeout, args) { + var that = this + + this._started = getTime() + this._rescheduled = 0 + this._scheduled = timeout + this._args = args + + this._timer = setTimeout(timerWrapper, timeout) + + function timerWrapper () { + if (that._rescheduled > 0) { + that._scheduled = that._rescheduled - (getTime() - that._started) + that._timer = setTimeout(timerWrapper, that._scheduled) + that._rescheduled = 0 + } else { + callback.apply(null, that._args) + } + } +} + +Retimer.prototype.reschedule = function (timeout) { + var now = getTime() + if ((now + timeout) - (this._started + this._scheduled) < 0) { + return false + } else { + this._started = now + this._rescheduled = timeout + return true + } +} + +Retimer.prototype.clear = function () { + clearTimeout(this._timer) +} + +function retimer () { + if (typeof arguments[0] !== 'function') { + throw new Error('callback needed') + } + + if (typeof arguments[1] !== 'number') { + throw new Error('timeout needed') + } + + var args + + if (arguments.length > 0) { + args = new Array(arguments.length - 2) + + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i + 2] + } + } + + return new Retimer(arguments[0], arguments[1], args) +} + +module.exports = retimer diff --git a/sdklab/meantimerecovery/aedes/node_modules/retimer/test.js b/sdklab/meantimerecovery/aedes/node_modules/retimer/test.js new file mode 100644 index 000000000..a91327e53 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/retimer/test.js @@ -0,0 +1,100 @@ +'use strict' + +var test = require('tape') +var retimer = require('./') + +test('schedule a callback', function (t) { + t.plan(1) + + var start = Date.now() + + retimer(function () { + t.ok(Date.now() - start >= 50, 'it was deferred ok!') + }, 50) +}) + +test('reschedule a callback', function (t) { + t.plan(2) + + var start = Date.now() + + var timer = retimer(function () { + t.ok(Date.now() - start >= 70, 'it was deferred ok!') + }, 50) + + setTimeout(function () { + t.equal(timer.reschedule(50), true, 'returns true') + }, 20) +}) + +test('reschedule multiple times', function (t) { + t.plan(1) + + var start = Date.now() + + var timer = retimer(function () { + t.ok(Date.now() - start >= 90, 'it was deferred ok!') + }, 50) + + setTimeout(function () { + timer.reschedule(50) + setTimeout(function () { + timer.reschedule(50) + }, 20) + }, 20) +}) + +test('clear a timer', function (t) { + t.plan(1) + + var timer = retimer(function () { + t.fail('the timer should never get called') + }, 20) + + timer.clear() + + setTimeout(function () { + t.pass('nothing happened') + }, 50) +}) + +test('clear a timer after a reschedule', function (t) { + t.plan(1) + + var timer = retimer(function () { + t.fail('the timer should never get called') + }, 20) + + setTimeout(function () { + timer.reschedule(50) + setTimeout(function () { + timer.clear() + }, 10) + }, 10) + + setTimeout(function () { + t.pass('nothing happened') + }, 50) +}) + +test('return false if rescheduled too early', function (t) { + t.plan(2) + + var start = Date.now() + + var timer = retimer(function () { + t.ok(Date.now() - start >= 50, 'it was deferred ok!') + }, 50) + + setTimeout(function () { + t.equal(timer.reschedule(10), false, 'return false') + }, 20) +}) + +test('pass arguments to the callback', function (t) { + t.plan(1) + + retimer(function (arg) { + t.equal(arg, 42, 'argument matches') + }, 50, 42) +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/retimer/time-browser.js b/sdklab/meantimerecovery/aedes/node_modules/retimer/time-browser.js new file mode 100644 index 000000000..3f7db7c37 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/retimer/time-browser.js @@ -0,0 +1,5 @@ +'use strict' + +module.exports = function getTime () { + return Date.now() +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/retimer/time.js b/sdklab/meantimerecovery/aedes/node_modules/retimer/time.js new file mode 100644 index 000000000..e7203e01e --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/retimer/time.js @@ -0,0 +1,6 @@ +'use strict' + +module.exports = function getTime () { + var t = process.hrtime() + return Math.floor(t[0] * 1000 + t[1] / 1000000) +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/reusify/.coveralls.yml b/sdklab/meantimerecovery/aedes/node_modules/reusify/.coveralls.yml new file mode 100644 index 000000000..359f68349 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/reusify/.coveralls.yml @@ -0,0 +1 @@ +repo_token: yIxhFqtaaz5iGVYfie9mODehFYogm8S8L diff --git a/sdklab/meantimerecovery/aedes/node_modules/reusify/.travis.yml b/sdklab/meantimerecovery/aedes/node_modules/reusify/.travis.yml new file mode 100644 index 000000000..197047681 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/reusify/.travis.yml @@ -0,0 +1,28 @@ +language: node_js +sudo: false + +node_js: + - 9 + - 8 + - 7 + - 6 + - 5 + - 4 + - 4.0 + - iojs-v3 + - iojs-v2 + - iojs-v1 + - 0.12 + - 0.10 + +cache: + directories: + - node_modules + +after_script: +- npm run coverage + +notifications: + email: + on_success: never + on_failure: always diff --git a/sdklab/meantimerecovery/aedes/node_modules/reusify/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/reusify/LICENSE new file mode 100644 index 000000000..fbf3a01d8 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/reusify/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Matteo Collina + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/sdklab/meantimerecovery/aedes/node_modules/reusify/README.md b/sdklab/meantimerecovery/aedes/node_modules/reusify/README.md new file mode 100644 index 000000000..badcb7ccf --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/reusify/README.md @@ -0,0 +1,145 @@ +# reusify + +[![npm version][npm-badge]][npm-url] +[![Build Status][travis-badge]][travis-url] +[![Coverage Status][coveralls-badge]][coveralls-url] + +Reuse your objects and functions for maximum speed. This technique will +make any function run ~10% faster. You call your functions a +lot, and it adds up quickly in hot code paths. + +``` +$ node benchmarks/createNoCodeFunction.js +Total time 53133 +Total iterations 100000000 +Iteration/s 1882069.5236482036 + +$ node benchmarks/reuseNoCodeFunction.js +Total time 50617 +Total iterations 100000000 +Iteration/s 1975620.838848608 +``` + +The above benchmark uses fibonacci to simulate a real high-cpu load. +The actual numbers might differ for your use case, but the difference +should not. + +The benchmark was taken using Node v6.10.0. + +This library was extracted from +[fastparallel](http://npm.im/fastparallel). + +## Example + +```js +var reusify = require('reusify') +var fib = require('reusify/benchmarks/fib') +var instance = reusify(MyObject) + +// get an object from the cache, +// or creates a new one when cache is empty +var obj = instance.get() + +// set the state +obj.num = 100 +obj.func() + +// reset the state. +// if the state contains any external object +// do not use delete operator (it is slow) +// prefer set them to null +obj.num = 0 + +// store an object in the cache +instance.release(obj) + +function MyObject () { + // you need to define this property + // so V8 can compile MyObject into an + // hidden class + this.next = null + this.num = 0 + + var that = this + + // this function is never reallocated, + // so it can be optimized by V8 + this.func = function () { + if (null) { + // do nothing + } else { + // calculates fibonacci + fib(that.num) + } + } +} +``` + +The above example was intended for synchronous code, let's see async: +```js +var reusify = require('reusify') +var instance = reusify(MyObject) + +for (var i = 0; i < 100; i++) { + getData(i, console.log) +} + +function getData (value, cb) { + var obj = instance.get() + + obj.value = value + obj.cb = cb + obj.run() +} + +function MyObject () { + this.next = null + this.value = null + + var that = this + + this.run = function () { + asyncOperation(that.value, that.handle) + } + + this.handle = function (err, result) { + that.cb(err, result) + that.value = null + that.cb = null + instance.release(that) + } +} +``` + +Also note how in the above examples, the code, that consumes an istance of `MyObject`, +reset the state to initial condition, just before storing it in the cache. +That's needed so that every subsequent request for an instance from the cache, +could get a clean instance. + +## Why + +It is faster because V8 doesn't have to collect all the functions you +create. On a short-lived benchmark, it is as fast as creating the +nested function, but on a longer time frame it creates less +pressure on the garbage collector. + +## Other examples +If you want to see some complex example, checkout [middie](https://github.com/fastify/middie) and [steed](https://github.com/mcollina/steed). + +## Acknowledgements + +Thanks to [Trevor Norris](https://github.com/trevnorris) for +getting me down the rabbit hole of performance, and thanks to [Mathias +Buss](http://github.com/mafintosh) for suggesting me to share this +trick. + +## License + +MIT + +[npm-badge]: https://badge.fury.io/js/reusify.svg +[npm-url]: https://badge.fury.io/js/reusify +[travis-badge]: https://api.travis-ci.org/mcollina/reusify.svg +[travis-url]: https://travis-ci.org/mcollina/reusify +[coveralls-badge]: https://coveralls.io/repos/mcollina/reusify/badge.svg?branch=master&service=github +[coveralls-url]: https://coveralls.io/github/mcollina/reusify?branch=master diff --git a/sdklab/meantimerecovery/aedes/node_modules/reusify/benchmarks/createNoCodeFunction.js b/sdklab/meantimerecovery/aedes/node_modules/reusify/benchmarks/createNoCodeFunction.js new file mode 100644 index 000000000..ce1aac7b7 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/reusify/benchmarks/createNoCodeFunction.js @@ -0,0 +1,30 @@ +'use strict' + +var fib = require('./fib') +var max = 100000000 +var start = Date.now() + +// create a funcion with the typical error +// pattern, that delegates the heavy load +// to something else +function createNoCodeFunction () { + /* eslint no-constant-condition: "off" */ + var num = 100 + + ;(function () { + if (null) { + // do nothing + } else { + fib(num) + } + })() +} + +for (var i = 0; i < max; i++) { + createNoCodeFunction() +} + +var time = Date.now() - start +console.log('Total time', time) +console.log('Total iterations', max) +console.log('Iteration/s', max / time * 1000) diff --git a/sdklab/meantimerecovery/aedes/node_modules/reusify/benchmarks/fib.js b/sdklab/meantimerecovery/aedes/node_modules/reusify/benchmarks/fib.js new file mode 100644 index 000000000..e22cc48de --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/reusify/benchmarks/fib.js @@ -0,0 +1,13 @@ +'use strict' + +function fib (num) { + var fib = [] + + fib[0] = 0 + fib[1] = 1 + for (var i = 2; i <= num; i++) { + fib[i] = fib[i - 2] + fib[i - 1] + } +} + +module.exports = fib diff --git a/sdklab/meantimerecovery/aedes/node_modules/reusify/benchmarks/reuseNoCodeFunction.js b/sdklab/meantimerecovery/aedes/node_modules/reusify/benchmarks/reuseNoCodeFunction.js new file mode 100644 index 000000000..3358d6e50 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/reusify/benchmarks/reuseNoCodeFunction.js @@ -0,0 +1,38 @@ +'use strict' + +var reusify = require('../') +var fib = require('./fib') +var instance = reusify(MyObject) +var max = 100000000 +var start = Date.now() + +function reuseNoCodeFunction () { + var obj = instance.get() + obj.num = 100 + obj.func() + obj.num = 0 + instance.release(obj) +} + +function MyObject () { + this.next = null + var that = this + this.num = 0 + this.func = function () { + /* eslint no-constant-condition: "off" */ + if (null) { + // do nothing + } else { + fib(that.num) + } + } +} + +for (var i = 0; i < max; i++) { + reuseNoCodeFunction() +} + +var time = Date.now() - start +console.log('Total time', time) +console.log('Total iterations', max) +console.log('Iteration/s', max / time * 1000) diff --git a/sdklab/meantimerecovery/aedes/node_modules/reusify/package.json b/sdklab/meantimerecovery/aedes/node_modules/reusify/package.json new file mode 100644 index 000000000..15cafdd15 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/reusify/package.json @@ -0,0 +1,75 @@ +{ + "_from": "reusify@^1.0.4", + "_id": "reusify@1.0.4", + "_inBundle": false, + "_integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "_location": "/reusify", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "reusify@^1.0.4", + "name": "reusify", + "escapedName": "reusify", + "rawSpec": "^1.0.4", + "saveSpec": null, + "fetchSpec": "^1.0.4" + }, + "_requiredBy": [ + "/aedes", + "/fastfall", + "/fastparallel" + ], + "_resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "_shasum": "90da382b1e126efc02146e90845a88db12925d76", + "_spec": "reusify@^1.0.4", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes", + "author": { + "name": "Matteo Collina", + "email": "hello@matteocollina.com" + }, + "bugs": { + "url": "https://github.com/mcollina/reusify/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Reuse objects and functions with style", + "devDependencies": { + "coveralls": "^2.13.3", + "faucet": "0.0.1", + "istanbul": "^0.4.5", + "pre-commit": "^1.2.2", + "standard": "^10.0.3", + "tape": "^4.8.0" + }, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + }, + "homepage": "https://github.com/mcollina/reusify#readme", + "keywords": [ + "reuse", + "object", + "performance", + "function", + "fast" + ], + "license": "MIT", + "main": "reusify.js", + "name": "reusify", + "pre-commit": [ + "lint", + "test" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/mcollina/reusify.git" + }, + "scripts": { + "coverage": "npm run istanbul; cat coverage/lcov.info | coveralls", + "istanbul": "istanbul cover tape test.js", + "lint": "standard", + "test": "tape test.js | faucet" + }, + "version": "1.0.4" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/reusify/reusify.js b/sdklab/meantimerecovery/aedes/node_modules/reusify/reusify.js new file mode 100644 index 000000000..e6f36f3a8 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/reusify/reusify.js @@ -0,0 +1,33 @@ +'use strict' + +function reusify (Constructor) { + var head = new Constructor() + var tail = head + + function get () { + var current = head + + if (current.next) { + head = current.next + } else { + head = new Constructor() + tail = head + } + + current.next = null + + return current + } + + function release (obj) { + tail.next = obj + tail = obj + } + + return { + get: get, + release: release + } +} + +module.exports = reusify diff --git a/sdklab/meantimerecovery/aedes/node_modules/reusify/test.js b/sdklab/meantimerecovery/aedes/node_modules/reusify/test.js new file mode 100644 index 000000000..929cfd719 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/reusify/test.js @@ -0,0 +1,66 @@ +'use strict' + +var test = require('tape') +var reusify = require('./') + +test('reuse objects', function (t) { + t.plan(6) + + function MyObject () { + t.pass('constructor called') + this.next = null + } + + var instance = reusify(MyObject) + var obj = instance.get() + + t.notEqual(obj, instance.get(), 'two instance created') + t.notOk(obj.next, 'next must be null') + + instance.release(obj) + + // the internals keeps a hot copy ready for reuse + // putting this one back in the queue + instance.release(instance.get()) + + // comparing the old one with the one we got + // never do this in real code, after release you + // should never reuse that instance + t.equal(obj, instance.get(), 'instance must be reused') +}) + +test('reuse more than 2 objects', function (t) { + function MyObject () { + t.pass('constructor called') + this.next = null + } + + var instance = reusify(MyObject) + var obj = instance.get() + var obj2 = instance.get() + var obj3 = instance.get() + + t.notOk(obj.next, 'next must be null') + t.notOk(obj2.next, 'next must be null') + t.notOk(obj3.next, 'next must be null') + + t.notEqual(obj, obj2) + t.notEqual(obj, obj3) + t.notEqual(obj3, obj2) + + instance.release(obj) + instance.release(obj2) + instance.release(obj3) + + // skip one + instance.get() + + var obj4 = instance.get() + var obj5 = instance.get() + var obj6 = instance.get() + + t.equal(obj4, obj) + t.equal(obj5, obj2) + t.equal(obj6, obj3) + t.end() +}) diff --git a/sdklab/meantimerecovery/aedes/node_modules/safe-buffer/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/safe-buffer/LICENSE new file mode 100644 index 000000000..0c068ceec --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/safe-buffer/README.md b/sdklab/meantimerecovery/aedes/node_modules/safe-buffer/README.md new file mode 100644 index 000000000..e9a81afd0 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/sdklab/meantimerecovery/aedes/node_modules/safe-buffer/index.d.ts b/sdklab/meantimerecovery/aedes/node_modules/safe-buffer/index.d.ts new file mode 100644 index 000000000..e9fed809a --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/sdklab/meantimerecovery/aedes/node_modules/safe-buffer/index.js b/sdklab/meantimerecovery/aedes/node_modules/safe-buffer/index.js new file mode 100644 index 000000000..f8d3ec988 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/safe-buffer/index.js @@ -0,0 +1,65 @@ +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/safe-buffer/package.json b/sdklab/meantimerecovery/aedes/node_modules/safe-buffer/package.json new file mode 100644 index 000000000..f8241eab4 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/safe-buffer/package.json @@ -0,0 +1,76 @@ +{ + "_from": "safe-buffer@~5.2.0", + "_id": "safe-buffer@5.2.1", + "_inBundle": false, + "_integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "_location": "/safe-buffer", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "safe-buffer@~5.2.0", + "name": "safe-buffer", + "escapedName": "safe-buffer", + "rawSpec": "~5.2.0", + "saveSpec": null, + "fetchSpec": "~5.2.0" + }, + "_requiredBy": [ + "/string_decoder" + ], + "_resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "_shasum": "1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6", + "_spec": "safe-buffer@~5.2.0", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\string_decoder", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Safer Node.js Buffer API", + "devDependencies": { + "standard": "*", + "tape": "^5.0.0" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "name": "safe-buffer", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + }, + "types": "index.d.ts", + "version": "5.2.1" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/shortid/CHANGELOG.md b/sdklab/meantimerecovery/aedes/node_modules/shortid/CHANGELOG.md new file mode 100644 index 000000000..19c20fa1a --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/shortid/CHANGELOG.md @@ -0,0 +1,32 @@ +# Change Log +This project adheres to [Semantic Versioning](http://semver.org/). + +## 2.2.16 +* Add deprecation notice to project readme (by Russell Dunphy). + +## 2.2.15 +* Update `REDUCE_TIME` to generate smaller ID. + +## 2.2.14 +* Reduce npm package size by `clean-publish`. + +## 2.2.13 +* Fix `browser` config for webpack. + +## 2.2.12 +* Fix cluster worker ID for non-Node environments. + +## 2.2.11 +* Fix React Native support. + +## 2.2.10 +* Fix theoretical ID limit length. + +## 2.2.9 +* Fix symbols probability uniformity by using Nano ID (by @shashkovdanil). +* Improve ID generation perfomance. +* Fix `shortid.isValid` with custom alphabet. +* Improve `shortid.isValid` perfomance (by @s7b5en). + +## 2.2.8 +* Clean npm package from unnecessary files. diff --git a/sdklab/meantimerecovery/aedes/node_modules/shortid/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/shortid/LICENSE new file mode 100644 index 000000000..c472d1ece --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/shortid/LICENSE @@ -0,0 +1,34 @@ +Copyright (c) Dylan Greene +All rights reserved. + +MIT +no-false-attribs License + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +Distributions of all or part of the Software intended to be used +by the recipients as they would use the unmodified Software, +containing modifications that substantially alter, remove, or +disable functionality of the Software, outside of the documented +configuration mechanisms provided by the Software, shall be +modified such that the Original Author's bug reporting email +addresses and urls are either replaced with the contact information +of the parties responsible for the changes, or removed entirely. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/sdklab/meantimerecovery/aedes/node_modules/shortid/README.md b/sdklab/meantimerecovery/aedes/node_modules/shortid/README.md new file mode 100644 index 000000000..f5d5187ca --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/shortid/README.md @@ -0,0 +1,252 @@ +## shortid [![Build Status](http://img.shields.io/travis/dylang/shortid.svg)](https://travis-ci.org/dylang/shortid) [![shortid](http://img.shields.io/npm/dm/shortid.svg)](https://www.npmjs.org/package/shortid) + + + +> Amazingly short non-sequential url-friendly unique id generator. + +## **shortid is deprecated, because the architecture is unsafe. we instead recommend [Nano ID](https://github.com/ai/nanoid/), which has the advantage of also being significantly faster than shortid** + + + + +ShortId creates amazingly short non-sequential url-friendly unique ids. Perfect for url shorteners, MongoDB and Redis ids, and any other id users might see. + + * By default 7-14 url-friendly characters: `A-Z`, `a-z`, `0-9`, `_-` + * Supports `cluster` (automatically), custom seeds, custom alphabet. + * Can generate any number of ids without duplicates, even millions per day. + * Perfect for games, especially if you are concerned about cheating so you don't want an easily guessable id. + * Apps can be restarted any number of times without any chance of repeating an id. + * Popular replacement for Mongo ID/Mongoose ID. + * Works in Node, io.js, and web browsers. + * Includes [Mocha](http://mochajs.org/) tests. + +ShortId does not generate cryptographically secure ids, so don't rely on it to make IDs which are impossible to guess. + + +### Usage + +```js +const shortid = require('shortid'); + +console.log(shortid.generate()); +// PPBqWA9 +``` + +Mongoose Unique Id +```js +_id: { + 'type': String, + 'default': shortid.generate +}, +``` + + + +### Browser Compatibility + +The best way to use `shortid` in the browser is via [browserify](http://browserify.org/) or [webpack](http://webpack.github.io/). + +These tools will automatically only include the files necessary for browser compatibility. + +All tests will run in the browser as well: + +```bash +## build the bundle, then open Mocha in a browser to see the tests run. +$ grunt build open +``` + + + +### Example + +```bash +~/projects/shortid ❯ node examples/examples.js +eWRhpRV +23TplPdS +46Juzcyx +dBvJIh-H +2WEKaVNO +7oet_d9Z +dogPzIz8 +nYrnfYEv +a4vhAoFG +hwX6aOr7 +``` + + +#### Real World Examples + +`shortId` was created for Node Knockout 2011 winner for Most Fun [Doodle Or Die](http://doodleordie.com). +Millions of doodles have been saved with `shortId` filenames. Every log message gets a `shortId` to make it easy +for us to look up later. + +Here are some other projects that use shortId: + +* [bevy](https://npmjs.org/package/bevy) - A simple server to manage multiple Node services. +* [capre](https://npmjs.org/package/capre) - Cross-Server Data Replication. +* [cordova-build](https://www.npmjs.org/package/cordova-build) - an alternative to phonegap build that runs on your servers/agents. +* [couchdb-tools](https://www.npmjs.org/package/couchdb-tools) - A library of handy functions for use when working with CouchDB documents. +* [CleverStack/clever-email](https://github.com/CleverStack/clever-email) - E-mail system for CleverStack. +* [CloudTypes](https://github.com/ticup/CloudTypes) - JavaScript end2end implementation of the Cloud Types model for Eventual Consistency programming. +* [dnode-tarantula](https://github.com/jutaz/dnode-tarantula) - an asynchronous rpc and event system for node.js based on dnode-protocol and TCP sockets. +* [mongoose-url-shortener](https://www.npmjs.org/package/mongoose-url-shortener) - A simple URL Shortening library for NodeJS using Promises/A+ results. +* [mozilla/smokejumper](https://github.com/mozilla/smokejumper) - The Smoke Jumper project is an effort to bring dead simple, secure, P2P file sharing to Firefox. +* [shortness](https://npmjs.org/package/shortness) - Node based URL shortener that uses SQLite. +* [file-db](https://npmjs.org/package/file-db) - Document database that uses directories and files to store its data, supporting nested key-value objects in named collections. +* [resume-generator](https://www.npmjs.org/package/resume-generator) - Resume Generator. +* [riffmint](https://npmjs.org/package/riffmint) - Collaboration in musical space. +* [rap1ds/dippa](https://github.com/rap1ds/dippa) - Dippa Editor – A web-based LaTeX editor + + + + +### API + +```js +var shortid = require('shortid'); +``` + +--------------------------------------- + +#### `shortid.generate()` + +__Returns__ `string` non-sequential unique id. + +__Example__ + +```js +users.insert({ + _id: shortid.generate(), + name: '...', + email: '...' +}); +``` + +--------------------------------------- + +#### `shortid.characters(string)` + +__Default:__ `'0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-_'` + +__Returns__ new alphabet as a `string` + +__Recommendation:__ If you don't like _ or -, you can to set new characters to use. + +__Optional__ + +Change the characters used. + +You must provide a string of all 64 unique characters. Order is not important. + +The default characters provided were selected because they are url safe. + +__Example__ + +```js +// use $ and @ instead of - and _ +shortid.characters('0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$@'); +``` + +```js +// any 64 unicode characters work, but I wouldn't recommend this. +shortid.characters('ⒶⒷⒸⒹⒺⒻⒼⒽⒾⒿⓀⓁⓂⓃⓄⓅⓆⓇⓈⓉⓊⓋⓌⓍⓎⓏⓐⓑⓒⓓⓔⓕⓖⓗⓘⓙⓚⓛⓜⓝⓞⓟⓠⓡⓢⓣⓤⓥⓦⓧⓨⓩ①②③④⑤⑥⑦⑧⑨⑩⑪⑫'); +``` + + +--------------------------------------- + +#### `shortid.isValid(id)` + +__Returns__ `boolean` + +Check to see if an id is a valid `shortid`. Note: This only means the id _could_ have been generated by `shortid`, it doesn't guarantee it. + +__Example__ + +```js +shortid.isValid('41XTDbE'); +// true +``` + +```js +shortid.isValid('i have spaces'); +// false +``` + +--------------------------------------- + +#### `shortid.worker(integer)` + +__Default:__ `process.env.NODE_UNIQUE_ID || 0` + +__Recommendation:__ You typically won't want to change this. + +__Optional__ + +If you are running multiple server processes then you should make sure every one has a unique `worker` id. Should be an integer between 0 and 16. +If you do not do this there is very little chance of two servers generating the same id, but it is theoretically possible +if both are generated in the exact same second and are generating the same number of ids that second and a half-dozen random numbers are all exactly the same. + +__Example__ + +```js +shortid.worker(1); +``` + +--------------------------------------- + +#### `shortid.seed(integer)` + +__Default:__ `1` + +__Recommendation:__ You typically won't want to change this. + +__Optional__ + +Choose a unique value that will seed the random number generator so users won't be able to figure out the pattern of the unique ids. Call it just once in your application before using `shortId` and always use the same value in your application. + +Most developers won't need to use this, it's mainly for testing ShortId. + +If you are worried about users somehow decrypting the id then use it as a secret value for increased encryption. + +__Example__ + +```js +shortid.seed(1000); +``` + + + + + + +### About the Author + +Hi! Thanks for checking out this project! My name is **Dylan Greene**. When not overwhelmed with my two young kids I enjoy contributing +to the open source community. I'm also a tech lead at [Opower](http://opower.com). [![@dylang](https://img.shields.io/badge/github-dylang-green.svg)](https://github.com/dylang) [![@dylang](https://img.shields.io/badge/twitter-dylang-blue.svg)](https://twitter.com/dylang) + +Here's some of my other Node projects: + +| Name | Description | npm Downloads | +|---|---|---| +| [`npm‑check`](https://github.com/dylang/npm-check) | Check for outdated, incorrect, and unused dependencies. | [![npm-check](https://img.shields.io/npm/dm/npm-check.svg?style=flat-square)](https://www.npmjs.org/package/npm-check) | +| [`grunt‑notify`](https://github.com/dylang/grunt-notify) | Automatic desktop notifications for Grunt errors and warnings. Supports OS X, Windows, Linux. | [![grunt-notify](https://img.shields.io/npm/dm/grunt-notify.svg?style=flat-square)](https://www.npmjs.org/package/grunt-notify) | +| [`space‑hogs`](https://github.com/dylang/space-hogs) | Discover surprisingly large directories from the command line. | [![space-hogs](https://img.shields.io/npm/dm/space-hogs.svg?style=flat-square)](https://www.npmjs.org/package/space-hogs) | +| [`rss`](https://github.com/dylang/node-rss) | RSS feed generator. Add RSS feeds to any project. Supports enclosures and GeoRSS. | [![rss](https://img.shields.io/npm/dm/rss.svg?style=flat-square)](https://www.npmjs.org/package/rss) | +| [`grunt‑prompt`](https://github.com/dylang/grunt-prompt) | Interactive prompt for your Grunt config using console checkboxes, text input with filtering, password fields. | [![grunt-prompt](https://img.shields.io/npm/dm/grunt-prompt.svg?style=flat-square)](https://www.npmjs.org/package/grunt-prompt) | +| [`xml`](https://github.com/dylang/node-xml) | Fast and simple xml generator. Supports attributes, CDATA, etc. Includes tests and examples. | [![xml](https://img.shields.io/npm/dm/xml.svg?style=flat-square)](https://www.npmjs.org/package/xml) | +| [`changelog`](https://github.com/dylang/changelog) | Command line tool (and Node module) that generates a changelog in color output, markdown, or json for modules in npmjs.org's registry as well as any public github.com repo. | [![changelog](https://img.shields.io/npm/dm/changelog.svg?style=flat-square)](https://www.npmjs.org/package/changelog) | +| [`grunt‑attention`](https://github.com/dylang/grunt-attention) | Display attention-grabbing messages in the terminal | [![grunt-attention](https://img.shields.io/npm/dm/grunt-attention.svg?style=flat-square)](https://www.npmjs.org/package/grunt-attention) | +| [`observatory`](https://github.com/dylang/observatory) | Beautiful UI for showing tasks running on the command line. | [![observatory](https://img.shields.io/npm/dm/observatory.svg?style=flat-square)](https://www.npmjs.org/package/observatory) | +| [`anthology`](https://github.com/dylang/anthology) | Module information and stats for any @npmjs user | [![anthology](https://img.shields.io/npm/dm/anthology.svg?style=flat-square)](https://www.npmjs.org/package/anthology) | +| [`grunt‑cat`](https://github.com/dylang/grunt-cat) | Echo a file to the terminal. Works with text, figlets, ascii art, and full-color ansi. | [![grunt-cat](https://img.shields.io/npm/dm/grunt-cat.svg?style=flat-square)](https://www.npmjs.org/package/grunt-cat) | + +_This list was generated using [anthology](https://github.com/dylang/anthology)._ + + +### License +Copyright (c) 2016 Dylan Greene, contributors. + +Released under the [MIT license](https://tldrlegal.com/license/mit-license). + +Screenshots are [CC BY-SA](http://creativecommons.org/licenses/by-sa/4.0/) (Attribution-ShareAlike). diff --git a/sdklab/meantimerecovery/aedes/node_modules/shortid/index.js b/sdklab/meantimerecovery/aedes/node_modules/shortid/index.js new file mode 100644 index 000000000..cc02b2f7b --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/shortid/index.js @@ -0,0 +1,2 @@ +'use strict'; +module.exports = require('./lib/index'); diff --git a/sdklab/meantimerecovery/aedes/node_modules/shortid/package.json b/sdklab/meantimerecovery/aedes/node_modules/shortid/package.json new file mode 100644 index 000000000..ba90158e1 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/shortid/package.json @@ -0,0 +1,62 @@ +{ + "_from": "shortid@^2.2.15", + "_id": "shortid@2.2.16", + "_inBundle": false, + "_integrity": "sha512-Ugt+GIZqvGXCIItnsL+lvFJOiN7RYqlGy7QE41O3YC1xbNSeDGIRO7xg2JJXIAj1cAGnOeC1r7/T9pgrtQbv4g==", + "_location": "/shortid", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "shortid@^2.2.15", + "name": "shortid", + "escapedName": "shortid", + "rawSpec": "^2.2.15", + "saveSpec": null, + "fetchSpec": "^2.2.15" + }, + "_requiredBy": [ + "/aedes" + ], + "_resolved": "https://registry.npmjs.org/shortid/-/shortid-2.2.16.tgz", + "_shasum": "b742b8f0cb96406fd391c76bfc18a67a57fe5608", + "_spec": "shortid@^2.2.15", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes", + "author": { + "name": "Dylan Greene", + "email": "dylang@gmail.com" + }, + "browser": { + "./lib/random/random-byte.js": "./lib/random/random-byte-browser.js", + "./lib/util/cluster-worker-id.js": "./lib/util/cluster-worker-id-browser.js" + }, + "bugs": { + "url": "https://github.com/dylang/shortid/issues" + }, + "bundleDependencies": false, + "dependencies": { + "nanoid": "^2.1.0" + }, + "deprecated": false, + "description": "Amazingly short non-sequential url-friendly unique id generator.", + "homepage": "https://github.com/dylang/shortid#readme", + "keywords": [ + "short", + "tiny", + "id", + "uuid", + "bitly", + "shorten", + "mongoid", + "shortid", + "tinyid" + ], + "license": "MIT", + "main": "index.js", + "name": "shortid", + "repository": { + "type": "git", + "url": "git+https://github.com/dylang/shortid.git" + }, + "version": "2.2.16" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/string_decoder/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/string_decoder/LICENSE new file mode 100644 index 000000000..778edb207 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/sdklab/meantimerecovery/aedes/node_modules/string_decoder/README.md b/sdklab/meantimerecovery/aedes/node_modules/string_decoder/README.md new file mode 100644 index 000000000..5fd58315e --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/sdklab/meantimerecovery/aedes/node_modules/string_decoder/package.json b/sdklab/meantimerecovery/aedes/node_modules/string_decoder/package.json new file mode 100644 index 000000000..540d4a600 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/string_decoder/package.json @@ -0,0 +1,62 @@ +{ + "_from": "string_decoder@^1.1.1", + "_id": "string_decoder@1.3.0", + "_inBundle": false, + "_integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "_location": "/string_decoder", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "string_decoder@^1.1.1", + "name": "string_decoder", + "escapedName": "string_decoder", + "rawSpec": "^1.1.1", + "saveSpec": null, + "fetchSpec": "^1.1.1" + }, + "_requiredBy": [ + "/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "_shasum": "42f114594a46cf1a8e30b0a84f56c78c3edac21e", + "_spec": "string_decoder@^1.1.1", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\readable-stream", + "bugs": { + "url": "https://github.com/nodejs/string_decoder/issues" + }, + "bundleDependencies": false, + "dependencies": { + "safe-buffer": "~5.2.0" + }, + "deprecated": false, + "description": "The string_decoder module from Node core", + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "files": [ + "lib" + ], + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT", + "main": "lib/string_decoder.js", + "name": "string_decoder", + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "test": "tap test/parallel/*.js && node test/verify-dependencies" + }, + "version": "1.3.0" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/History.md b/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/History.md new file mode 100644 index 000000000..acc867537 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/History.md @@ -0,0 +1,16 @@ + +1.0.2 / 2015-10-07 +================== + + * use try/catch when checking `localStorage` (#3, @kumavis) + +1.0.1 / 2014-11-25 +================== + + * browser: use `console.warn()` for deprecation calls + * browser: more jsdocs + +1.0.0 / 2014-04-30 +================== + + * initial commit diff --git a/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/LICENSE new file mode 100644 index 000000000..6a60e8c22 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/README.md b/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/README.md new file mode 100644 index 000000000..75622fa7c --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/README.md @@ -0,0 +1,53 @@ +util-deprecate +============== +### The Node.js `util.deprecate()` function with browser support + +In Node.js, this module simply re-exports the `util.deprecate()` function. + +In the web browser (i.e. via browserify), a browser-specific implementation +of the `util.deprecate()` function is used. + + +## API + +A `deprecate()` function is the only thing exposed by this module. + +``` javascript +// setup: +exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); + + +// users see: +foo(); +// foo() is deprecated, use bar() instead +foo(); +foo(); +``` + + +## License + +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/browser.js b/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/browser.js new file mode 100644 index 000000000..549ae2f06 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/browser.js @@ -0,0 +1,67 @@ + +/** + * Module exports. + */ + +module.exports = deprecate; + +/** + * Mark that a method should not be used. + * Returns a modified function which warns once by default. + * + * If `localStorage.noDeprecation = true` is set, then it is a no-op. + * + * If `localStorage.throwDeprecation = true` is set, then deprecated functions + * will throw an Error when invoked. + * + * If `localStorage.traceDeprecation = true` is set, then deprecated functions + * will invoke `console.trace()` instead of `console.error()`. + * + * @param {Function} fn - the function to deprecate + * @param {String} msg - the string to print to the console when `fn` is invoked + * @returns {Function} a new "deprecated" version of `fn` + * @api public + */ + +function deprecate (fn, msg) { + if (config('noDeprecation')) { + return fn; + } + + var warned = false; + function deprecated() { + if (!warned) { + if (config('throwDeprecation')) { + throw new Error(msg); + } else if (config('traceDeprecation')) { + console.trace(msg); + } else { + console.warn(msg); + } + warned = true; + } + return fn.apply(this, arguments); + } + + return deprecated; +} + +/** + * Checks `localStorage` for boolean values for the given `name`. + * + * @param {String} name + * @returns {Boolean} + * @api private + */ + +function config (name) { + // accessing global.localStorage can trigger a DOMException in sandboxed iframes + try { + if (!global.localStorage) return false; + } catch (_) { + return false; + } + var val = global.localStorage[name]; + if (null == val) return false; + return String(val).toLowerCase() === 'true'; +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/node.js b/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/node.js new file mode 100644 index 000000000..5e6fcff5d --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/node.js @@ -0,0 +1,6 @@ + +/** + * For Node.js, simply re-export the core `util.deprecate` function. + */ + +module.exports = require('util').deprecate; diff --git a/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/package.json b/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/package.json new file mode 100644 index 000000000..989bd9068 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/util-deprecate/package.json @@ -0,0 +1,57 @@ +{ + "_from": "util-deprecate@^1.0.1", + "_id": "util-deprecate@1.0.2", + "_inBundle": false, + "_integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "_location": "/util-deprecate", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "util-deprecate@^1.0.1", + "name": "util-deprecate", + "escapedName": "util-deprecate", + "rawSpec": "^1.0.1", + "saveSpec": null, + "fetchSpec": "^1.0.1" + }, + "_requiredBy": [ + "/from2/readable-stream", + "/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "_shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf", + "_spec": "util-deprecate@^1.0.1", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\readable-stream", + "author": { + "name": "Nathan Rajlich", + "email": "nathan@tootallnate.net", + "url": "http://n8.io/" + }, + "browser": "browser.js", + "bugs": { + "url": "https://github.com/TooTallNate/util-deprecate/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "The Node.js `util.deprecate()` function with browser support", + "homepage": "https://github.com/TooTallNate/util-deprecate", + "keywords": [ + "util", + "deprecate", + "browserify", + "browser", + "node" + ], + "license": "MIT", + "main": "node.js", + "name": "util-deprecate", + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/util-deprecate.git" + }, + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "version": "1.0.2" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/uuid/CHANGELOG.md b/sdklab/meantimerecovery/aedes/node_modules/uuid/CHANGELOG.md new file mode 100644 index 000000000..7519d19d8 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/uuid/CHANGELOG.md @@ -0,0 +1,229 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +### [8.3.2](https://github.com/uuidjs/uuid/compare/v8.3.1...v8.3.2) (2020-12-08) + +### Bug Fixes + +- lazy load getRandomValues ([#537](https://github.com/uuidjs/uuid/issues/537)) ([16c8f6d](https://github.com/uuidjs/uuid/commit/16c8f6df2f6b09b4d6235602d6a591188320a82e)), closes [#536](https://github.com/uuidjs/uuid/issues/536) + +### [8.3.1](https://github.com/uuidjs/uuid/compare/v8.3.0...v8.3.1) (2020-10-04) + +### Bug Fixes + +- support expo>=39.0.0 ([#515](https://github.com/uuidjs/uuid/issues/515)) ([c65a0f3](https://github.com/uuidjs/uuid/commit/c65a0f3fa73b901959d638d1e3591dfacdbed867)), closes [#375](https://github.com/uuidjs/uuid/issues/375) + +## [8.3.0](https://github.com/uuidjs/uuid/compare/v8.2.0...v8.3.0) (2020-07-27) + +### Features + +- add parse/stringify/validate/version/NIL APIs ([#479](https://github.com/uuidjs/uuid/issues/479)) ([0e6c10b](https://github.com/uuidjs/uuid/commit/0e6c10ba1bf9517796ff23c052fc0468eedfd5f4)), closes [#475](https://github.com/uuidjs/uuid/issues/475) [#478](https://github.com/uuidjs/uuid/issues/478) [#480](https://github.com/uuidjs/uuid/issues/480) [#481](https://github.com/uuidjs/uuid/issues/481) [#180](https://github.com/uuidjs/uuid/issues/180) + +## [8.2.0](https://github.com/uuidjs/uuid/compare/v8.1.0...v8.2.0) (2020-06-23) + +### Features + +- improve performance of v1 string representation ([#453](https://github.com/uuidjs/uuid/issues/453)) ([0ee0b67](https://github.com/uuidjs/uuid/commit/0ee0b67c37846529c66089880414d29f3ae132d5)) +- remove deprecated v4 string parameter ([#454](https://github.com/uuidjs/uuid/issues/454)) ([88ce3ca](https://github.com/uuidjs/uuid/commit/88ce3ca0ba046f60856de62c7ce03f7ba98ba46c)), closes [#437](https://github.com/uuidjs/uuid/issues/437) +- support jspm ([#473](https://github.com/uuidjs/uuid/issues/473)) ([e9f2587](https://github.com/uuidjs/uuid/commit/e9f2587a92575cac31bc1d4ae944e17c09756659)) + +### Bug Fixes + +- prepare package exports for webpack 5 ([#468](https://github.com/uuidjs/uuid/issues/468)) ([8d6e6a5](https://github.com/uuidjs/uuid/commit/8d6e6a5f8965ca9575eb4d92e99a43435f4a58a8)) + +## [8.1.0](https://github.com/uuidjs/uuid/compare/v8.0.0...v8.1.0) (2020-05-20) + +### Features + +- improve v4 performance by reusing random number array ([#435](https://github.com/uuidjs/uuid/issues/435)) ([bf4af0d](https://github.com/uuidjs/uuid/commit/bf4af0d711b4d2ed03d1f74fd12ad0baa87dc79d)) +- optimize V8 performance of bytesToUuid ([#434](https://github.com/uuidjs/uuid/issues/434)) ([e156415](https://github.com/uuidjs/uuid/commit/e156415448ec1af2351fa0b6660cfb22581971f2)) + +### Bug Fixes + +- export package.json required by react-native and bundlers ([#449](https://github.com/uuidjs/uuid/issues/449)) ([be1c8fe](https://github.com/uuidjs/uuid/commit/be1c8fe9a3206c358e0059b52fafd7213aa48a52)), closes [ai/nanoevents#44](https://github.com/ai/nanoevents/issues/44#issuecomment-602010343) [#444](https://github.com/uuidjs/uuid/issues/444) + +## [8.0.0](https://github.com/uuidjs/uuid/compare/v7.0.3...v8.0.0) (2020-04-29) + +### ⚠ BREAKING CHANGES + +- For native ECMAScript Module (ESM) usage in Node.js only named exports are exposed, there is no more default export. + + ```diff + -import uuid from 'uuid'; + -console.log(uuid.v4()); // -> 'cd6c3b08-0adc-4f4b-a6ef-36087a1c9869' + +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' + ``` + +- Deep requiring specific algorithms of this library like `require('uuid/v4')`, which has been deprecated in `uuid@7`, is no longer supported. + + Instead use the named exports that this module exports. + + For ECMAScript Modules (ESM): + + ```diff + -import uuidv4 from 'uuid/v4'; + +import { v4 as uuidv4 } from 'uuid'; + uuidv4(); + ``` + + For CommonJS: + + ```diff + -const uuidv4 = require('uuid/v4'); + +const { v4: uuidv4 } = require('uuid'); + uuidv4(); + ``` + +### Features + +- native Node.js ES Modules (wrapper approach) ([#423](https://github.com/uuidjs/uuid/issues/423)) ([2d9f590](https://github.com/uuidjs/uuid/commit/2d9f590ad9701d692625c07ed62f0a0f91227991)), closes [#245](https://github.com/uuidjs/uuid/issues/245) [#419](https://github.com/uuidjs/uuid/issues/419) [#342](https://github.com/uuidjs/uuid/issues/342) +- remove deep requires ([#426](https://github.com/uuidjs/uuid/issues/426)) ([daf72b8](https://github.com/uuidjs/uuid/commit/daf72b84ceb20272a81bb5fbddb05dd95922cbba)) + +### Bug Fixes + +- add CommonJS syntax example to README quickstart section ([#417](https://github.com/uuidjs/uuid/issues/417)) ([e0ec840](https://github.com/uuidjs/uuid/commit/e0ec8402c7ad44b7ef0453036c612f5db513fda0)) + +### [7.0.3](https://github.com/uuidjs/uuid/compare/v7.0.2...v7.0.3) (2020-03-31) + +### Bug Fixes + +- make deep require deprecation warning work in browsers ([#409](https://github.com/uuidjs/uuid/issues/409)) ([4b71107](https://github.com/uuidjs/uuid/commit/4b71107d8c0d2ef56861ede6403fc9dc35a1e6bf)), closes [#408](https://github.com/uuidjs/uuid/issues/408) + +### [7.0.2](https://github.com/uuidjs/uuid/compare/v7.0.1...v7.0.2) (2020-03-04) + +### Bug Fixes + +- make access to msCrypto consistent ([#393](https://github.com/uuidjs/uuid/issues/393)) ([8bf2a20](https://github.com/uuidjs/uuid/commit/8bf2a20f3565df743da7215eebdbada9d2df118c)) +- simplify link in deprecation warning ([#391](https://github.com/uuidjs/uuid/issues/391)) ([bb2c8e4](https://github.com/uuidjs/uuid/commit/bb2c8e4e9f4c5f9c1eaaf3ea59710c633cd90cb7)) +- update links to match content in readme ([#386](https://github.com/uuidjs/uuid/issues/386)) ([44f2f86](https://github.com/uuidjs/uuid/commit/44f2f86e9d2bbf14ee5f0f00f72a3db1292666d4)) + +### [7.0.1](https://github.com/uuidjs/uuid/compare/v7.0.0...v7.0.1) (2020-02-25) + +### Bug Fixes + +- clean up esm builds for node and browser ([#383](https://github.com/uuidjs/uuid/issues/383)) ([59e6a49](https://github.com/uuidjs/uuid/commit/59e6a49e7ce7b3e8fb0f3ee52b9daae72af467dc)) +- provide browser versions independent from module system ([#380](https://github.com/uuidjs/uuid/issues/380)) ([4344a22](https://github.com/uuidjs/uuid/commit/4344a22e7aed33be8627eeaaf05360f256a21753)), closes [#378](https://github.com/uuidjs/uuid/issues/378) + +## [7.0.0](https://github.com/uuidjs/uuid/compare/v3.4.0...v7.0.0) (2020-02-24) + +### ⚠ BREAKING CHANGES + +- The default export, which used to be the v4() method but which was already discouraged in v3.x of this library, has been removed. +- Explicitly note that deep imports of the different uuid version functions are deprecated and no longer encouraged and that ECMAScript module named imports should be used instead. Emit a deprecation warning for people who deep-require the different algorithm variants. +- Remove builtin support for insecure random number generators in the browser. Users who want that will have to supply their own random number generator function. +- Remove support for generating v3 and v5 UUIDs in Node.js<4.x +- Convert code base to ECMAScript Modules (ESM) and release CommonJS build for node and ESM build for browser bundlers. + +### Features + +- add UMD build to npm package ([#357](https://github.com/uuidjs/uuid/issues/357)) ([4e75adf](https://github.com/uuidjs/uuid/commit/4e75adf435196f28e3fbbe0185d654b5ded7ca2c)), closes [#345](https://github.com/uuidjs/uuid/issues/345) +- add various es module and CommonJS examples ([b238510](https://github.com/uuidjs/uuid/commit/b238510bf352463521f74bab175a3af9b7a42555)) +- ensure that docs are up-to-date in CI ([ee5e77d](https://github.com/uuidjs/uuid/commit/ee5e77db547474f5a8f23d6c857a6d399209986b)) +- hybrid CommonJS & ECMAScript modules build ([a3f078f](https://github.com/uuidjs/uuid/commit/a3f078faa0baff69ab41aed08e041f8f9c8993d0)) +- remove insecure fallback random number generator ([3a5842b](https://github.com/uuidjs/uuid/commit/3a5842b141a6e5de0ae338f391661e6b84b167c9)), closes [#173](https://github.com/uuidjs/uuid/issues/173) +- remove support for pre Node.js v4 Buffer API ([#356](https://github.com/uuidjs/uuid/issues/356)) ([b59b5c5](https://github.com/uuidjs/uuid/commit/b59b5c5ecad271c5453f1a156f011671f6d35627)) +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([c37a518](https://github.com/uuidjs/uuid/commit/c37a518e367ac4b6d0aa62dba1bc6ce9e85020f7)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +### Bug Fixes + +- add deep-require proxies for local testing and adjust tests ([#365](https://github.com/uuidjs/uuid/issues/365)) ([7fedc79](https://github.com/uuidjs/uuid/commit/7fedc79ac8fda4bfd1c566c7f05ef4ac13b2db48)) +- add note about removal of default export ([#372](https://github.com/uuidjs/uuid/issues/372)) ([12749b7](https://github.com/uuidjs/uuid/commit/12749b700eb49db8a9759fd306d8be05dbfbd58c)), closes [#370](https://github.com/uuidjs/uuid/issues/370) +- deprecated deep requiring of the different algorithm versions ([#361](https://github.com/uuidjs/uuid/issues/361)) ([c0bdf15](https://github.com/uuidjs/uuid/commit/c0bdf15e417639b1aeb0b247b2fb11f7a0a26b23)) + +## [3.4.0](https://github.com/uuidjs/uuid/compare/v3.3.3...v3.4.0) (2020-01-16) + +### Features + +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([e2d7314](https://github.com/uuidjs/uuid/commit/e2d7314)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +## [3.3.3](https://github.com/uuidjs/uuid/compare/v3.3.2...v3.3.3) (2019-08-19) + +### Bug Fixes + +- no longer run ci tests on node v4 +- upgrade dependencies + +## [3.3.2](https://github.com/uuidjs/uuid/compare/v3.3.1...v3.3.2) (2018-06-28) + +### Bug Fixes + +- typo ([305d877](https://github.com/uuidjs/uuid/commit/305d877)) + +## [3.3.1](https://github.com/uuidjs/uuid/compare/v3.3.0...v3.3.1) (2018-06-28) + +### Bug Fixes + +- fix [#284](https://github.com/uuidjs/uuid/issues/284) by setting function name in try-catch ([f2a60f2](https://github.com/uuidjs/uuid/commit/f2a60f2)) + +# [3.3.0](https://github.com/uuidjs/uuid/compare/v3.2.1...v3.3.0) (2018-06-22) + +### Bug Fixes + +- assignment to readonly property to allow running in strict mode ([#270](https://github.com/uuidjs/uuid/issues/270)) ([d062fdc](https://github.com/uuidjs/uuid/commit/d062fdc)) +- fix [#229](https://github.com/uuidjs/uuid/issues/229) ([c9684d4](https://github.com/uuidjs/uuid/commit/c9684d4)) +- Get correct version of IE11 crypto ([#274](https://github.com/uuidjs/uuid/issues/274)) ([153d331](https://github.com/uuidjs/uuid/commit/153d331)) +- mem issue when generating uuid ([#267](https://github.com/uuidjs/uuid/issues/267)) ([c47702c](https://github.com/uuidjs/uuid/commit/c47702c)) + +### Features + +- enforce Conventional Commit style commit messages ([#282](https://github.com/uuidjs/uuid/issues/282)) ([cc9a182](https://github.com/uuidjs/uuid/commit/cc9a182)) + +## [3.2.1](https://github.com/uuidjs/uuid/compare/v3.2.0...v3.2.1) (2018-01-16) + +### Bug Fixes + +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +# [3.2.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.2.0) (2018-01-16) + +### Bug Fixes + +- remove mistakenly added typescript dependency, rollback version (standard-version will auto-increment) ([09fa824](https://github.com/uuidjs/uuid/commit/09fa824)) +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +### Features + +- Add v3 Support ([#217](https://github.com/uuidjs/uuid/issues/217)) ([d94f726](https://github.com/uuidjs/uuid/commit/d94f726)) + +# [3.1.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.0.1) (2017-06-17) + +### Bug Fixes + +- (fix) Add .npmignore file to exclude test/ and other non-essential files from packing. (#183) +- Fix typo (#178) +- Simple typo fix (#165) + +### Features + +- v5 support in CLI (#197) +- V5 support (#188) + +# 3.0.1 (2016-11-28) + +- split uuid versions into separate files + +# 3.0.0 (2016-11-17) + +- remove .parse and .unparse + +# 2.0.0 + +- Removed uuid.BufferClass + +# 1.4.0 + +- Improved module context detection +- Removed public RNG functions + +# 1.3.2 + +- Improve tests and handling of v1() options (Issue #24) +- Expose RNG option to allow for perf testing with different generators + +# 1.3.0 + +- Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)! +- Support for node.js crypto API +- De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code diff --git a/sdklab/meantimerecovery/aedes/node_modules/uuid/CONTRIBUTING.md b/sdklab/meantimerecovery/aedes/node_modules/uuid/CONTRIBUTING.md new file mode 100644 index 000000000..4a4503d02 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/uuid/CONTRIBUTING.md @@ -0,0 +1,18 @@ +# Contributing + +Please feel free to file GitHub Issues or propose Pull Requests. We're always happy to discuss improvements to this library! + +## Testing + +```shell +npm test +``` + +## Releasing + +Releases are supposed to be done from master, version bumping is automated through [`standard-version`](https://github.com/conventional-changelog/standard-version): + +```shell +npm run release -- --dry-run # verify output manually +npm run release # follow the instructions from the output of this command +``` diff --git a/sdklab/meantimerecovery/aedes/node_modules/uuid/LICENSE.md b/sdklab/meantimerecovery/aedes/node_modules/uuid/LICENSE.md new file mode 100644 index 000000000..393416836 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/uuid/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright (c) 2010-2020 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/uuid/README.md b/sdklab/meantimerecovery/aedes/node_modules/uuid/README.md new file mode 100644 index 000000000..ed27e5760 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/uuid/README.md @@ -0,0 +1,505 @@ + + +# uuid [![CI](https://github.com/uuidjs/uuid/workflows/CI/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ACI) [![Browser](https://github.com/uuidjs/uuid/workflows/Browser/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ABrowser) + +For the creation of [RFC4122](http://www.ietf.org/rfc/rfc4122.txt) UUIDs + +- **Complete** - Support for RFC4122 version 1, 3, 4, and 5 UUIDs +- **Cross-platform** - Support for ... + - CommonJS, [ECMAScript Modules](#ecmascript-modules) and [CDN builds](#cdn-builds) + - Node 8, 10, 12, 14 + - Chrome, Safari, Firefox, Edge, IE 11 browsers + - Webpack and rollup.js module bundlers + - [React Native / Expo](#react-native--expo) +- **Secure** - Cryptographically-strong random values +- **Small** - Zero-dependency, small footprint, plays nice with "tree shaking" packagers +- **CLI** - Includes the [`uuid` command line](#command-line) utility + +**Upgrading from `uuid@3.x`?** Your code is probably okay, but check out [Upgrading From `uuid@3.x`](#upgrading-from-uuid3x) for details. + +## Quickstart + +To create a random UUID... + +**1. Install** + +```shell +npm install uuid +``` + +**2. Create a UUID** (ES6 module syntax) + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' +``` + +... or using CommonJS syntax: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +For timestamp UUIDs, namespace UUIDs, and other options read on ... + +## API Summary + +| | | | +| --- | --- | --- | +| [`uuid.NIL`](#uuidnil) | The nil UUID string (all zeros) | New in `uuid@8.3` | +| [`uuid.parse()`](#uuidparsestr) | Convert UUID string to array of bytes | New in `uuid@8.3` | +| [`uuid.stringify()`](#uuidstringifyarr-offset) | Convert array of bytes to UUID string | New in `uuid@8.3` | +| [`uuid.v1()`](#uuidv1options-buffer-offset) | Create a version 1 (timestamp) UUID | | +| [`uuid.v3()`](#uuidv3name-namespace-buffer-offset) | Create a version 3 (namespace w/ MD5) UUID | | +| [`uuid.v4()`](#uuidv4options-buffer-offset) | Create a version 4 (random) UUID | | +| [`uuid.v5()`](#uuidv5name-namespace-buffer-offset) | Create a version 5 (namespace w/ SHA-1) UUID | | +| [`uuid.validate()`](#uuidvalidatestr) | Test a string to see if it is a valid UUID | New in `uuid@8.3` | +| [`uuid.version()`](#uuidversionstr) | Detect RFC version of a UUID | New in `uuid@8.3` | + +## API + +### uuid.NIL + +The nil UUID string (all zeros). + +Example: + +```javascript +import { NIL as NIL_UUID } from 'uuid'; + +NIL_UUID; // ⇨ '00000000-0000-0000-0000-000000000000' +``` + +### uuid.parse(str) + +Convert UUID string to array of bytes + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Uint8Array[16]` | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { parse as uuidParse } from 'uuid'; + +// Parse a UUID +const bytes = uuidParse('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); + +// Convert to hex strings to show byte order (for documentation purposes) +[...bytes].map((v) => v.toString(16).padStart(2, '0')); // ⇨ + // [ + // '6e', 'c0', 'bd', '7f', + // '11', 'c0', '43', 'da', + // '97', '5e', '2a', '8a', + // 'd9', 'eb', 'ae', '0b' + // ] +``` + +### uuid.stringify(arr[, offset]) + +Convert array of bytes to UUID string + +| | | +| -------------- | ---------------------------------------------------------------------------- | +| `arr` | `Array`-like collection of 16 values (starting from `offset`) between 0-255. | +| [`offset` = 0] | `Number` Starting index in the Array | +| _returns_ | `String` | +| _throws_ | `TypeError` if a valid UUID string cannot be generated | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { stringify as uuidStringify } from 'uuid'; + +const uuidBytes = [ + 0x6e, + 0xc0, + 0xbd, + 0x7f, + 0x11, + 0xc0, + 0x43, + 0xda, + 0x97, + 0x5e, + 0x2a, + 0x8a, + 0xd9, + 0xeb, + 0xae, + 0x0b, +]; + +uuidStringify(uuidBytes); // ⇨ '6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b' +``` + +### uuid.v1([options[, buffer[, offset]]]) + +Create an RFC version 1 (timestamp) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.node` ] | RFC "node" field as an `Array[6]` of byte values (per 4.1.6) | +| [`options.clockseq`] | RFC "clock sequence" as a `Number` between 0 - 0x3fff | +| [`options.msecs`] | RFC "timestamp" field (`Number` of milliseconds, unix epoch) | +| [`options.nsecs`] | RFC "timestamp" field (`Number` of nanseconds to add to `msecs`, should be 0-10,000) | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | +| _throws_ | `Error` if more than 10M UUIDs/sec are requested | + +Note: The default [node id](https://tools.ietf.org/html/rfc4122#section-4.1.6) (the last 12 digits in the UUID) is generated once, randomly, on process startup, and then remains unchanged for the duration of the process. + +Note: `options.random` and `options.rng` are only meaningful on the very first call to `v1()`, where they may be passed to initialize the internal `node` and `clockseq` fields. + +Example: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +uuidv1(); // ⇨ '2c5ea4c0-4067-11e9-8bad-9b1deb4d3b7d' +``` + +Example using `options`: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +const v1options = { + node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], + clockseq: 0x1234, + msecs: new Date('2011-11-01').getTime(), + nsecs: 5678, +}; +uuidv1(v1options); // ⇨ '710b962e-041c-11e1-9234-0123456789ab' +``` + +### uuid.v3(name, namespace[, buffer[, offset]]) + +Create an RFC version 3 (namespace w/ MD5) UUID + +API is identical to `v5()`, but uses "v3" instead. + +⚠️ Note: Per the RFC, "_If backward compatibility is not an issue, SHA-1 [Version 5] is preferred_." + +### uuid.v4([options[, buffer[, offset]]]) + +Create an RFC version 4 (random) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Example: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +Example using predefined `random` values: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +const v4options = { + random: [ + 0x10, + 0x91, + 0x56, + 0xbe, + 0xc4, + 0xfb, + 0xc1, + 0xea, + 0x71, + 0xb4, + 0xef, + 0xe1, + 0x67, + 0x1c, + 0x58, + 0x36, + ], +}; +uuidv4(v4options); // ⇨ '109156be-c4fb-41ea-b1b4-efe1671c5836' +``` + +### uuid.v5(name, namespace[, buffer[, offset]]) + +Create an RFC version 5 (namespace w/ SHA-1) UUID + +| | | +| --- | --- | +| `name` | `String \| Array` | +| `namespace` | `String \| Array[16]` Namespace UUID | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Note: The RFC `DNS` and `URL` namespaces are available as `v5.DNS` and `v5.URL`. + +Example with custom namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +// Define a custom namespace. Readers, create your own using something like +// https://www.uuidgenerator.net/ +const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341'; + +uuidv5('Hello, World!', MY_NAMESPACE); // ⇨ '630eb68f-e0fa-5ecc-887a-7c7a62614681' +``` + +Example with RFC `URL` namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +uuidv5('https://www.w3.org/', uuidv5.URL); // ⇨ 'c106a26a-21bb-5538-8bf2-57095d1976c1' +``` + +### uuid.validate(str) + +Test a string to see if it is a valid UUID + +| | | +| --------- | --------------------------------------------------- | +| `str` | `String` to validate | +| _returns_ | `true` if string is a valid UUID, `false` otherwise | + +Example: + +```javascript +import { validate as uuidValidate } from 'uuid'; + +uuidValidate('not a UUID'); // ⇨ false +uuidValidate('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ true +``` + +Using `validate` and `version` together it is possible to do per-version validation, e.g. validate for only v4 UUIds. + +```javascript +import { version as uuidVersion } from 'uuid'; +import { validate as uuidValidate } from 'uuid'; + +function uuidValidateV4(uuid) { + return uuidValidate(uuid) && uuidVersion(uuid) === 4; +} + +const v1Uuid = 'd9428888-122b-11e1-b85c-61cd3cbb3210'; +const v4Uuid = '109156be-c4fb-41ea-b1b4-efe1671c5836'; + +uuidValidateV4(v4Uuid); // ⇨ true +uuidValidateV4(v1Uuid); // ⇨ false +``` + +### uuid.version(str) + +Detect RFC version of a UUID + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Number` The RFC version of the UUID | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Example: + +```javascript +import { version as uuidVersion } from 'uuid'; + +uuidVersion('45637ec4-c85f-11ea-87d0-0242ac130003'); // ⇨ 1 +uuidVersion('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ 4 +``` + +## Command Line + +UUIDs can be generated from the command line using `uuid`. + +```shell +$ uuid +ddeb27fb-d9a0-4624-be4d-4615062daed4 +``` + +The default is to generate version 4 UUIDS, however the other versions are supported. Type `uuid --help` for details: + +```shell +$ uuid --help + +Usage: + uuid + uuid v1 + uuid v3 + uuid v4 + uuid v5 + uuid --help + +Note: may be "URL" or "DNS" to use the corresponding UUIDs +defined by RFC4122 +``` + +## ECMAScript Modules + +This library comes with [ECMAScript Modules](https://www.ecma-international.org/ecma-262/6.0/#sec-modules) (ESM) support for Node.js versions that support it ([example](./examples/node-esmodules/)) as well as bundlers like [rollup.js](https://rollupjs.org/guide/en/#tree-shaking) ([example](./examples/browser-rollup/)) and [webpack](https://webpack.js.org/guides/tree-shaking/) ([example](./examples/browser-webpack/)) (targeting both, Node.js and browser environments). + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +To run the examples you must first create a dist build of this library in the module root: + +```shell +npm run build +``` + +## CDN Builds + +### ECMAScript Modules + +To load this module directly into modern browsers that [support loading ECMAScript Modules](https://caniuse.com/#feat=es6-module) you can make use of [jspm](https://jspm.org/): + +```html + +``` + +### UMD + +To load this module directly into older browsers you can use the [UMD (Universal Module Definition)](https://github.com/umdjs/umd) builds from any of the following CDNs: + +**Using [UNPKG](https://unpkg.com/uuid@latest/dist/umd/)**: + +```html + +``` + +**Using [jsDelivr](https://cdn.jsdelivr.net/npm/uuid@latest/dist/umd/)**: + +```html + +``` + +**Using [cdnjs](https://cdnjs.com/libraries/uuid)**: + +```html + +``` + +These CDNs all provide the same [`uuidv4()`](#uuidv4options-buffer-offset) method: + +```html + +``` + +Methods for the other algorithms ([`uuidv1()`](#uuidv1options-buffer-offset), [`uuidv3()`](#uuidv3name-namespace-buffer-offset) and [`uuidv5()`](#uuidv5name-namespace-buffer-offset)) are available from the files `uuidv1.min.js`, `uuidv3.min.js` and `uuidv5.min.js` respectively. + +## "getRandomValues() not supported" + +This error occurs in environments where the standard [`crypto.getRandomValues()`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto/getRandomValues) API is not supported. This issue can be resolved by adding an appropriate polyfill: + +### React Native / Expo + +1. Install [`react-native-get-random-values`](https://github.com/LinusU/react-native-get-random-values#readme) +1. Import it _before_ `uuid`. Since `uuid` might also appear as a transitive dependency of some other imports it's safest to just import `react-native-get-random-values` as the very first thing in your entry point: + +```javascript +import 'react-native-get-random-values'; +import { v4 as uuidv4 } from 'uuid'; +``` + +Note: If you are using Expo, you must be using at least `react-native-get-random-values@1.5.0` and `expo@39.0.0`. + +### Web Workers / Service Workers (Edge <= 18) + +[In Edge <= 18, Web Crypto is not supported in Web Workers or Service Workers](https://caniuse.com/#feat=cryptography) and we are not aware of a polyfill (let us know if you find one, please). + +## Upgrading From `uuid@7.x` + +### Only Named Exports Supported When Using with Node.js ESM + +`uuid@7.x` did not come with native ECMAScript Module (ESM) support for Node.js. Importing it in Node.js ESM consequently imported the CommonJS source with a default export. This library now comes with true Node.js ESM support and only provides named exports. + +Instead of doing: + +```javascript +import uuid from 'uuid'; +uuid.v4(); +``` + +you will now have to use the named exports: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +### Deep Requires No Longer Supported + +Deep requires like `require('uuid/v4')` [which have been deprecated in `uuid@7.x`](#deep-requires-now-deprecated) are no longer supported. + +## Upgrading From `uuid@3.x` + +"_Wait... what happened to `uuid@4.x` - `uuid@6.x`?!?_" + +In order to avoid confusion with RFC [version 4](#uuidv4options-buffer-offset) and [version 5](#uuidv5name-namespace-buffer-offset) UUIDs, and a possible [version 6](http://gh.peabody.io/uuidv6/), releases 4 thru 6 of this module have been skipped. + +### Deep Requires Now Deprecated + +`uuid@3.x` encouraged the use of deep requires to minimize the bundle size of browser builds: + +```javascript +const uuidv4 = require('uuid/v4'); // <== NOW DEPRECATED! +uuidv4(); +``` + +As of `uuid@7.x` this library now provides ECMAScript modules builds, which allow packagers like Webpack and Rollup to do "tree-shaking" to remove dead code. Instead, use the `import` syntax: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +... or for CommonJS: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); +``` + +### Default Export Removed + +`uuid@3.x` was exporting the Version 4 UUID method as a default export: + +```javascript +const uuid = require('uuid'); // <== REMOVED! +``` + +This usage pattern was already discouraged in `uuid@3.x` and has been removed in `uuid@7.x`. + +---- +Markdown generated from [README_js.md](README_js.md) by [![RunMD Logo](http://i.imgur.com/h0FVyzU.png)](https://github.com/broofa/runmd) \ No newline at end of file diff --git a/sdklab/meantimerecovery/aedes/node_modules/uuid/package.json b/sdklab/meantimerecovery/aedes/node_modules/uuid/package.json new file mode 100644 index 000000000..19780e840 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/uuid/package.json @@ -0,0 +1,164 @@ +{ + "_from": "uuid@^8.0.0", + "_id": "uuid@8.3.2", + "_inBundle": false, + "_integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "_location": "/uuid", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "uuid@^8.0.0", + "name": "uuid", + "escapedName": "uuid", + "rawSpec": "^8.0.0", + "saveSpec": null, + "fetchSpec": "^8.0.0" + }, + "_requiredBy": [ + "/aedes" + ], + "_resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "_shasum": "80d5b5ced271bb9af6c445f21a1a04c606cefbe2", + "_spec": "uuid@^8.0.0", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\aedes", + "bin": { + "uuid": "dist/bin/uuid" + }, + "browser": { + "./dist/md5.js": "./dist/md5-browser.js", + "./dist/rng.js": "./dist/rng-browser.js", + "./dist/sha1.js": "./dist/sha1-browser.js", + "./dist/esm-node/index.js": "./dist/esm-browser/index.js" + }, + "bugs": { + "url": "https://github.com/uuidjs/uuid/issues" + }, + "bundleDependencies": false, + "commitlint": { + "extends": [ + "@commitlint/config-conventional" + ] + }, + "deprecated": false, + "description": "RFC4122 (v1, v4, and v5) UUIDs", + "devDependencies": { + "@babel/cli": "7.11.6", + "@babel/core": "7.11.6", + "@babel/preset-env": "7.11.5", + "@commitlint/cli": "11.0.0", + "@commitlint/config-conventional": "11.0.0", + "@rollup/plugin-node-resolve": "9.0.0", + "babel-eslint": "10.1.0", + "bundlewatch": "0.3.1", + "eslint": "7.10.0", + "eslint-config-prettier": "6.12.0", + "eslint-config-standard": "14.1.1", + "eslint-plugin-import": "2.22.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-prettier": "3.1.4", + "eslint-plugin-promise": "4.2.1", + "eslint-plugin-standard": "4.0.1", + "husky": "4.3.0", + "jest": "25.5.4", + "lint-staged": "10.4.0", + "npm-run-all": "4.1.5", + "optional-dev-dependency": "2.0.1", + "prettier": "2.1.2", + "random-seed": "0.3.0", + "rollup": "2.28.2", + "rollup-plugin-terser": "7.0.2", + "runmd": "1.3.2", + "standard-version": "9.0.0" + }, + "exports": { + ".": { + "node": { + "module": "./dist/esm-node/index.js", + "require": "./dist/index.js", + "import": "./wrapper.mjs" + }, + "default": "./dist/esm-browser/index.js" + }, + "./package.json": "./package.json" + }, + "files": [ + "CHANGELOG.md", + "CONTRIBUTING.md", + "LICENSE.md", + "README.md", + "dist", + "wrapper.mjs" + ], + "homepage": "https://github.com/uuidjs/uuid#readme", + "husky": { + "hooks": { + "commit-msg": "commitlint -E HUSKY_GIT_PARAMS", + "pre-commit": "lint-staged" + } + }, + "keywords": [ + "uuid", + "guid", + "rfc4122" + ], + "license": "MIT", + "lint-staged": { + "*.{js,jsx,json,md}": [ + "prettier --write" + ], + "*.{js,jsx}": [ + "eslint --fix" + ] + }, + "main": "./dist/index.js", + "module": "./dist/esm-node/index.js", + "name": "uuid", + "optionalDevDependencies": { + "@wdio/browserstack-service": "6.4.0", + "@wdio/cli": "6.4.0", + "@wdio/jasmine-framework": "6.4.0", + "@wdio/local-runner": "6.4.0", + "@wdio/spec-reporter": "6.4.0", + "@wdio/static-server-service": "6.4.0", + "@wdio/sync": "6.4.0" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/uuidjs/uuid.git" + }, + "scripts": { + "build": "./scripts/build.sh", + "bundlewatch": "npm run pretest:browser && bundlewatch --config bundlewatch.config.json", + "docs": "( node --version | grep -q 'v12' ) && ( npm run build && runmd --output=README.md README_js.md )", + "docs:diff": "npm run docs && git diff --quiet README.md", + "eslint:check": "eslint src/ test/ examples/ *.js", + "eslint:fix": "eslint --fix src/ test/ examples/ *.js", + "examples:browser:rollup:build": "cd examples/browser-rollup && npm install && npm run build", + "examples:browser:webpack:build": "cd examples/browser-webpack && npm install && npm run build", + "examples:node:commonjs:test": "cd examples/node-commonjs && npm install && npm test", + "examples:node:esmodules:test": "cd examples/node-esmodules && npm install && npm test", + "lint": "npm run eslint:check && npm run prettier:check", + "md": "runmd --watch --output=README.md README_js.md", + "prepack": "npm run build", + "pretest": "[ -n $CI ] || npm run build", + "pretest:benchmark": "npm run build", + "pretest:browser": "optional-dev-dependency && npm run build && npm-run-all --parallel examples:browser:**", + "pretest:node": "npm run build", + "prettier:check": "prettier --ignore-path .prettierignore --check '**/*.{js,jsx,json,md}'", + "prettier:fix": "prettier --ignore-path .prettierignore --write '**/*.{js,jsx,json,md}'", + "release": "standard-version --no-verify", + "test": "BABEL_ENV=commonjs node --throw-deprecation node_modules/.bin/jest test/unit/", + "test:benchmark": "cd examples/benchmark && npm install && npm test", + "test:browser": "wdio run ./wdio.conf.js", + "test:node": "npm-run-all --parallel examples:node:**", + "test:pack": "./scripts/testpack.sh" + }, + "sideEffects": false, + "standard-version": { + "scripts": { + "postchangelog": "prettier --write CHANGELOG.md" + } + }, + "version": "8.3.2" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/uuid/wrapper.mjs b/sdklab/meantimerecovery/aedes/node_modules/uuid/wrapper.mjs new file mode 100644 index 000000000..c31e9cef4 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/uuid/wrapper.mjs @@ -0,0 +1,10 @@ +import uuid from './dist/index.js'; +export const v1 = uuid.v1; +export const v3 = uuid.v3; +export const v4 = uuid.v4; +export const v5 = uuid.v5; +export const NIL = uuid.NIL; +export const version = uuid.version; +export const validate = uuid.validate; +export const stringify = uuid.stringify; +export const parse = uuid.parse; diff --git a/sdklab/meantimerecovery/aedes/node_modules/wrappy/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/wrappy/LICENSE new file mode 100644 index 000000000..19129e315 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/wrappy/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/wrappy/README.md b/sdklab/meantimerecovery/aedes/node_modules/wrappy/README.md new file mode 100644 index 000000000..98eab2522 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/wrappy/README.md @@ -0,0 +1,36 @@ +# wrappy + +Callback wrapping utility + +## USAGE + +```javascript +var wrappy = require("wrappy") + +// var wrapper = wrappy(wrapperFunction) + +// make sure a cb is called only once +// See also: http://npm.im/once for this specific use case +var once = wrappy(function (cb) { + var called = false + return function () { + if (called) return + called = true + return cb.apply(this, arguments) + } +}) + +function printBoo () { + console.log('boo') +} +// has some rando property +printBoo.iAmBooPrinter = true + +var onlyPrintOnce = once(printBoo) + +onlyPrintOnce() // prints 'boo' +onlyPrintOnce() // does nothing + +// random property is retained! +assert.equal(onlyPrintOnce.iAmBooPrinter, true) +``` diff --git a/sdklab/meantimerecovery/aedes/node_modules/wrappy/package.json b/sdklab/meantimerecovery/aedes/node_modules/wrappy/package.json new file mode 100644 index 000000000..612da47f4 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/wrappy/package.json @@ -0,0 +1,58 @@ +{ + "_from": "wrappy@1", + "_id": "wrappy@1.0.2", + "_inBundle": false, + "_integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "_location": "/wrappy", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "wrappy@1", + "name": "wrappy", + "escapedName": "wrappy", + "rawSpec": "1", + "saveSpec": null, + "fetchSpec": "1" + }, + "_requiredBy": [ + "/once" + ], + "_resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "_shasum": "b5243d8f3ec1aa35f1364605bc0d1036e30ab69f", + "_spec": "wrappy@1", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\once", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/npm/wrappy/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Callback wrapping utility", + "devDependencies": { + "tap": "^2.3.1" + }, + "directories": { + "test": "test" + }, + "files": [ + "wrappy.js" + ], + "homepage": "https://github.com/npm/wrappy", + "license": "ISC", + "main": "wrappy.js", + "name": "wrappy", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/wrappy.git" + }, + "scripts": { + "test": "tap --coverage test/*.js" + }, + "version": "1.0.2" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/wrappy/wrappy.js b/sdklab/meantimerecovery/aedes/node_modules/wrappy/wrappy.js new file mode 100644 index 000000000..bb7e7d6fc --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/wrappy/wrappy.js @@ -0,0 +1,33 @@ +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/xtend/.jshintrc b/sdklab/meantimerecovery/aedes/node_modules/xtend/.jshintrc new file mode 100644 index 000000000..77887b5f0 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/xtend/.jshintrc @@ -0,0 +1,30 @@ +{ + "maxdepth": 4, + "maxstatements": 200, + "maxcomplexity": 12, + "maxlen": 80, + "maxparams": 5, + + "curly": true, + "eqeqeq": true, + "immed": true, + "latedef": false, + "noarg": true, + "noempty": true, + "nonew": true, + "undef": true, + "unused": "vars", + "trailing": true, + + "quotmark": true, + "expr": true, + "asi": true, + + "browser": false, + "esnext": true, + "devel": false, + "node": false, + "nonstandard": false, + + "predef": ["require", "module", "__dirname", "__filename"] +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/xtend/LICENSE b/sdklab/meantimerecovery/aedes/node_modules/xtend/LICENSE new file mode 100644 index 000000000..0099f4f6c --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/xtend/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) +Copyright (c) 2012-2014 Raynos. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/sdklab/meantimerecovery/aedes/node_modules/xtend/README.md b/sdklab/meantimerecovery/aedes/node_modules/xtend/README.md new file mode 100644 index 000000000..4a2703cff --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/xtend/README.md @@ -0,0 +1,32 @@ +# xtend + +[![browser support][3]][4] + +[![locked](http://badges.github.io/stability-badges/dist/locked.svg)](http://github.com/badges/stability-badges) + +Extend like a boss + +xtend is a basic utility library which allows you to extend an object by appending all of the properties from each object in a list. When there are identical properties, the right-most property takes precedence. + +## Examples + +```js +var extend = require("xtend") + +// extend returns a new object. Does not mutate arguments +var combination = extend({ + a: "a", + b: "c" +}, { + b: "b" +}) +// { a: "a", b: "b" } +``` + +## Stability status: Locked + +## MIT Licensed + + + [3]: http://ci.testling.com/Raynos/xtend.png + [4]: http://ci.testling.com/Raynos/xtend diff --git a/sdklab/meantimerecovery/aedes/node_modules/xtend/immutable.js b/sdklab/meantimerecovery/aedes/node_modules/xtend/immutable.js new file mode 100644 index 000000000..94889c9de --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/xtend/immutable.js @@ -0,0 +1,19 @@ +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend() { + var target = {} + + for (var i = 0; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/xtend/mutable.js b/sdklab/meantimerecovery/aedes/node_modules/xtend/mutable.js new file mode 100644 index 000000000..72debede6 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/xtend/mutable.js @@ -0,0 +1,17 @@ +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/xtend/package.json b/sdklab/meantimerecovery/aedes/node_modules/xtend/package.json new file mode 100644 index 000000000..7031d15d2 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/xtend/package.json @@ -0,0 +1,86 @@ +{ + "_from": "xtend@^4.0.2", + "_id": "xtend@4.0.2", + "_inBundle": false, + "_integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "_location": "/xtend", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "xtend@^4.0.2", + "name": "xtend", + "escapedName": "xtend", + "rawSpec": "^4.0.2", + "saveSpec": null, + "fetchSpec": "^4.0.2" + }, + "_requiredBy": [ + "/fastparallel" + ], + "_resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "_shasum": "bb72779f5fa465186b1f438f674fa347fdb5db54", + "_spec": "xtend@^4.0.2", + "_where": "C:\\Workspace\\azure-iot-sdk-python\\sdklab\\meantimerecovery\\aedes\\node_modules\\fastparallel", + "author": { + "name": "Raynos", + "email": "raynos2@gmail.com" + }, + "bugs": { + "url": "https://github.com/Raynos/xtend/issues", + "email": "raynos2@gmail.com" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Jake Verbaten" + }, + { + "name": "Matt Esch" + } + ], + "dependencies": {}, + "deprecated": false, + "description": "extend like a boss", + "devDependencies": { + "tape": "~1.1.0" + }, + "engines": { + "node": ">=0.4" + }, + "homepage": "https://github.com/Raynos/xtend", + "keywords": [ + "extend", + "merge", + "options", + "opts", + "object", + "array" + ], + "license": "MIT", + "main": "immutable", + "name": "xtend", + "repository": { + "type": "git", + "url": "git://github.com/Raynos/xtend.git" + }, + "scripts": { + "test": "node test" + }, + "testling": { + "files": "test.js", + "browsers": [ + "ie/7..latest", + "firefox/16..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest" + ] + }, + "version": "4.0.2" +} diff --git a/sdklab/meantimerecovery/aedes/node_modules/xtend/test.js b/sdklab/meantimerecovery/aedes/node_modules/xtend/test.js new file mode 100644 index 000000000..b895b42b3 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/node_modules/xtend/test.js @@ -0,0 +1,103 @@ +var test = require("tape") +var extend = require("./") +var mutableExtend = require("./mutable") + +test("merge", function(assert) { + var a = { a: "foo" } + var b = { b: "bar" } + + assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) + assert.end() +}) + +test("replace", function(assert) { + var a = { a: "foo" } + var b = { a: "bar" } + + assert.deepEqual(extend(a, b), { a: "bar" }) + assert.end() +}) + +test("undefined", function(assert) { + var a = { a: undefined } + var b = { b: "foo" } + + assert.deepEqual(extend(a, b), { a: undefined, b: "foo" }) + assert.deepEqual(extend(b, a), { a: undefined, b: "foo" }) + assert.end() +}) + +test("handle 0", function(assert) { + var a = { a: "default" } + var b = { a: 0 } + + assert.deepEqual(extend(a, b), { a: 0 }) + assert.deepEqual(extend(b, a), { a: "default" }) + assert.end() +}) + +test("is immutable", function (assert) { + var record = {} + + extend(record, { foo: "bar" }) + assert.equal(record.foo, undefined) + assert.end() +}) + +test("null as argument", function (assert) { + var a = { foo: "bar" } + var b = null + var c = void 0 + + assert.deepEqual(extend(b, a, c), { foo: "bar" }) + assert.end() +}) + +test("mutable", function (assert) { + var a = { foo: "bar" } + + mutableExtend(a, { bar: "baz" }) + + assert.equal(a.bar, "baz") + assert.end() +}) + +test("null prototype", function(assert) { + var a = { a: "foo" } + var b = Object.create(null) + b.b = "bar"; + + assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) + assert.end() +}) + +test("null prototype mutable", function (assert) { + var a = { foo: "bar" } + var b = Object.create(null) + b.bar = "baz"; + + mutableExtend(a, b) + + assert.equal(a.bar, "baz") + assert.end() +}) + +test("prototype pollution", function (assert) { + var a = {} + var maliciousPayload = '{"__proto__":{"oops":"It works!"}}' + + assert.strictEqual(a.oops, undefined) + extend({}, maliciousPayload) + assert.strictEqual(a.oops, undefined) + assert.end() +}) + +test("prototype pollution mutable", function (assert) { + var a = {} + var maliciousPayload = '{"__proto__":{"oops":"It works!"}}' + + assert.strictEqual(a.oops, undefined) + mutableExtend({}, maliciousPayload) + assert.strictEqual(a.oops, undefined) + assert.end() +}) diff --git a/sdklab/meantimerecovery/aedes/package-lock.json b/sdklab/meantimerecovery/aedes/package-lock.json new file mode 100644 index 000000000..03ba48334 --- /dev/null +++ b/sdklab/meantimerecovery/aedes/package-lock.json @@ -0,0 +1,305 @@ +{ + "name": "helloaedes", + "version": "1.0.0", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "aedes": { + "version": "0.42.6", + "resolved": "https://registry.npmjs.org/aedes/-/aedes-0.42.6.tgz", + "integrity": "sha512-x2jfb6zmRNRDHOkcadEhjUE81wsPFippNncrI1IX8YnEt4YrbpEmuSNbKYssqWW0p1c2Q2iaFVS/a3FiFOZ14Q==", + "requires": { + "aedes-packet": "^2.3.1", + "aedes-persistence": "^8.1.1", + "aedes-protocol-decoder": "^1.0.0", + "bulk-write-stream": "^2.0.1", + "end-of-stream": "^1.4.4", + "fastfall": "^1.5.1", + "fastparallel": "^2.3.0", + "fastseries": "^2.0.0", + "mqemitter": "^4.2.0", + "mqtt-packet": "^6.3.2", + "proxy-protocol-js": "^4.0.4", + "readable-stream": "^3.6.0", + "retimer": "^2.0.0", + "reusify": "^1.0.4", + "shortid": "^2.2.15", + "uuid": "^8.0.0" + } + }, + "aedes-packet": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/aedes-packet/-/aedes-packet-2.3.1.tgz", + "integrity": "sha512-LqBd57uc2rui2RbjycW17dylglejG26mM4ewVXGNDnVp/SUHFVEgm7d1HTmYrnSkSCNoHti042qgcTwv/F+BtQ==", + "requires": { + "mqtt-packet": "^6.3.0" + } + }, + "aedes-persistence": { + "version": "8.1.3", + "resolved": "https://registry.npmjs.org/aedes-persistence/-/aedes-persistence-8.1.3.tgz", + "integrity": "sha512-VMCjEV+2g1TNJb/IlDEUy6SP9crT+QUhe2xc6UjyqrFNBNgTvHmOefXY7FxWrwmR2QA02vwg3+5p/JXkyg/Dkw==", + "requires": { + "aedes-packet": "^2.3.1", + "from2": "^2.3.0", + "qlobber": "^5.0.3" + } + }, + "aedes-protocol-decoder": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/aedes-protocol-decoder/-/aedes-protocol-decoder-1.0.0.tgz", + "integrity": "sha512-nzWwSetptIFFk0gXtXBvmBPUD6HwuZKeVm/8mRJqEKfBJ4ZnvgpcO5CFwEDzTi884jOWTIBaJAWll3QLYsOCFQ==", + "requires": { + "forwarded": "^0.1.2", + "proxy-protocol-js": "^4.0.3" + } + }, + "base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" + }, + "bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "requires": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "bulk-write-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/bulk-write-stream/-/bulk-write-stream-2.0.1.tgz", + "integrity": "sha512-XWOLjgHtpDasHfwM8oO4df1JoZwa7/OwTsXDzh4rUTo+9CowzeOFBZz43w+H14h1fyq+xl28tVIBrdjcjj4Gug==", + "requires": { + "inherits": "^2.0.3", + "readable-stream": "^3.1.1" + } + }, + "core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" + }, + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "requires": { + "ms": "2.1.2" + } + }, + "end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "requires": { + "once": "^1.4.0" + } + }, + "fastfall": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/fastfall/-/fastfall-1.5.1.tgz", + "integrity": "sha512-KH6p+Z8AKPXnmA7+Iz2Lh8ARCMr+8WNPVludm1LGkZoD2MjY6LVnRMtTKhkdzI+jr0RzQWXKzKyBJm1zoHEL4Q==", + "requires": { + "reusify": "^1.0.0" + } + }, + "fastparallel": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/fastparallel/-/fastparallel-2.4.1.tgz", + "integrity": "sha512-qUmhxPgNHmvRjZKBFUNI0oZuuH9OlSIOXmJ98lhKPxMZZ7zS/Fi0wRHOihDSz0R1YiIOjxzOY4bq65YTcdBi2Q==", + "requires": { + "reusify": "^1.0.4", + "xtend": "^4.0.2" + } + }, + "fastseries": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fastseries/-/fastseries-2.0.0.tgz", + "integrity": "sha512-XBU9RXeoYc2/VnvMhplAxEmZLfIk7cvTBu+xwoBuTI8pL19E03cmca17QQycKIdxgwCeFA/a4u27gv1h3ya5LQ==" + }, + "forwarded": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", + "integrity": "sha512-Ua9xNhH0b8pwE3yRbFfXJvfdWF0UHNCdeyb2sbi9Ul/M+r3PTdrz7Cv4SCfZRMjmzEM9PhraqfZFbGTIg3OMyA==" + }, + "from2": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz", + "integrity": "sha512-OMcX/4IC/uqEPVgGeyfN22LJk6AZrMkRZHxcHBMBvHScDGgwTm2GT2Wkgtocyd3JfZffjj2kYUDXXII0Fk9W0g==", + "requires": { + "inherits": "^2.0.1", + "readable-stream": "^2.0.0" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + } + } + }, + "ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==" + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, + "mqemitter": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/mqemitter/-/mqemitter-4.5.0.tgz", + "integrity": "sha512-Mp/zytFeIv6piJQkEKnncHcP4R/ErJc5C7dfonkhkNUT2LA/nTayrfNxbipp3M5iCJUTQSUtzfQAQA3XVcKz6w==", + "requires": { + "fastparallel": "^2.3.0", + "qlobber": "^5.0.0" + } + }, + "mqtt-packet": { + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/mqtt-packet/-/mqtt-packet-6.10.0.tgz", + "integrity": "sha512-ja8+mFKIHdB1Tpl6vac+sktqy3gA8t9Mduom1BA75cI+R9AHnZOiaBQwpGiWnaVJLDGRdNhQmFaAqd7tkKSMGA==", + "requires": { + "bl": "^4.0.2", + "debug": "^4.1.1", + "process-nextick-args": "^2.0.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "nanoid": { + "version": "2.1.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-2.1.11.tgz", + "integrity": "sha512-s/snB+WGm6uwi0WjsZdaVcuf3KJXlfGl2LcxgwkEwJF0D/BWzVWAZW/XY4bFaiR7s0Jk3FPvlnepg1H1b1UwlA==" + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "requires": { + "wrappy": "1" + } + }, + "process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + }, + "proxy-protocol-js": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/proxy-protocol-js/-/proxy-protocol-js-4.0.6.tgz", + "integrity": "sha512-SjXgyBmr0dBbKUZ0jOzp0N9urTcDOI1cd1oEeE43W1vG4OMwYYLggCRcMJ0zv0gdTA8Imb4cAiYj8Ic/PWv1mw==" + }, + "qlobber": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/qlobber/-/qlobber-5.0.3.tgz", + "integrity": "sha512-wW4GTZPePyh0RgOsM18oDyOUlXfurVRgoNyJfS+y7VWPyd0GYhQp5T2tycZFZjonH+hngxIfklGJhTP/ghidgQ==" + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "retimer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/retimer/-/retimer-2.0.0.tgz", + "integrity": "sha512-KLXY85WkEq2V2bKex/LOO1ViXVn2KGYe4PYysAdYdjmraYIUsVkXu8O4am+8+5UbaaGl1qho4aqAAPHNQ4GSbg==" + }, + "reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==" + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + }, + "shortid": { + "version": "2.2.16", + "resolved": "https://registry.npmjs.org/shortid/-/shortid-2.2.16.tgz", + "integrity": "sha512-Ugt+GIZqvGXCIItnsL+lvFJOiN7RYqlGy7QE41O3YC1xbNSeDGIRO7xg2JJXIAj1cAGnOeC1r7/T9pgrtQbv4g==", + "requires": { + "nanoid": "^2.1.0" + } + }, + "string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "requires": { + "safe-buffer": "~5.2.0" + } + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" + } + } +} From 9514291132548a8fe4dd03b5529a2519f0ab1ad3 Mon Sep 17 00:00:00 2001 From: olivakar Date: Tue, 6 Sep 2022 15:04:51 -0700 Subject: [PATCH 17/20] local variable declared --- .../tests/test_async_certificate_enrollments.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/e2e/provisioning_e2e/tests/test_async_certificate_enrollments.py b/tests/e2e/provisioning_e2e/tests/test_async_certificate_enrollments.py index 1f3e97c63..e67aa10ad 100644 --- a/tests/e2e/provisioning_e2e/tests/test_async_certificate_enrollments.py +++ b/tests/e2e/provisioning_e2e/tests/test_async_certificate_enrollments.py @@ -83,7 +83,7 @@ def after_module(): async def test_device_register_with_device_id_for_a_x509_individual_enrollment(protocol): device_id = "e2edpsthunderbolt" device_index = type_to_device_indices.get("individual_with_device_id")[0] - + registration_id = "" try: individual_enrollment_record = create_individual_enrollment_with_x509_client_certs( device_index=device_index, device_id=device_id @@ -111,7 +111,7 @@ async def test_device_register_with_device_id_for_a_x509_individual_enrollment(p @pytest.mark.parametrize("protocol", ["mqtt", "mqttws"]) async def test_device_register_with_no_device_id_for_a_x509_individual_enrollment(protocol): device_index = type_to_device_indices.get("individual_no_device_id")[0] - + registration_id = "" try: individual_enrollment_record = create_individual_enrollment_with_x509_client_certs( device_index=device_index From 6c3db703411a95e561439c55e5ca2f7ab2bee793 Mon Sep 17 00:00:00 2001 From: olivakar Date: Thu, 22 Sep 2022 13:43:42 -0700 Subject: [PATCH 18/20] test (azure-iot-device) : DPS cert management client cert issuance tests (#1054) --- ...sion_symmetric_key_client_cert_issuance.py | 87 +++ .../dev_utils/provisioningservice/__init__.py | 0 .../dev_utils/provisioningservice/client.py | 327 ++++++++++ .../provisioningservice/protocol/__init__.py | 11 + .../protocol/models/__init__.py | 55 ++ .../protocol/models/attestation_mechanism.py | 41 ++ .../client_certificate_issuance_policy.py | 31 + .../models/custom_allocation_definition.py | 35 ++ .../models/device_registration_state.py | 87 +++ .../protocol/models/enrollment_group.py | 109 ++++ .../protocol/models/individual_enrollment.py | 123 ++++ .../protocol/models/initial_twin.py | 28 + .../models/initial_twin_properties.py | 22 + .../protocol/models/metadata.py | 29 + .../provisioning_service_error_details.py | 56 ++ .../protocol/models/reprovision_policy.py | 44 ++ .../models/symmetric_key_attestation.py | 28 + .../protocol/models/tpm_attestation.py | 32 + .../protocol/models/twin_collection.py | 37 ++ .../protocol/models/x509_attestation.py | 32 + .../protocol/models/x509_ca_references.py | 28 + .../protocol/models/x509_certificate_info.py | 65 ++ .../models/x509_certificate_with_info.py | 28 + .../protocol/models/x509_certificates.py | 28 + .../provisioningservice/protocol/version.py | 8 + .../provisioningservice/utils/__init__.py | 5 + .../provisioningservice/utils/auth.py | 64 ++ .../utils/connection_string.py | 84 +++ .../provisioningservice/utils/sastoken.py | 79 +++ requirements_test.txt | 2 +- scripts/create_x509_chain_crypto.py | 6 + tests/e2e/provisioning_e2e/pytest.ini | 2 +- .../test_async_certificate_enrollments.py | 137 ++-- .../tests/test_async_dps_cert_mgmt.py | 590 ++++++++++++++++++ .../tests/test_async_symmetric_enrollments.py | 108 +--- .../test_sync_certificate_enrollments.py | 99 ++- .../tests/test_sync_symmetric_enrollments.py | 107 +--- .../pipeline/test_mqtt_pipeline.py | 2 - .../test_pipeline_stages_provisioning.py | 9 +- vsts/dps-e2e-cert-mgmt.yaml | 1 + 40 files changed, 2389 insertions(+), 277 deletions(-) create mode 100644 azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance.py create mode 100644 dev_utils/dev_utils/provisioningservice/__init__.py create mode 100644 dev_utils/dev_utils/provisioningservice/client.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/__init__.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/__init__.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/attestation_mechanism.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/client_certificate_issuance_policy.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/custom_allocation_definition.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/device_registration_state.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/enrollment_group.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/individual_enrollment.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/initial_twin.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/initial_twin_properties.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/metadata.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/provisioning_service_error_details.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/reprovision_policy.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/symmetric_key_attestation.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/tpm_attestation.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/twin_collection.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/x509_attestation.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/x509_ca_references.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/x509_certificate_info.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/x509_certificate_with_info.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/models/x509_certificates.py create mode 100644 dev_utils/dev_utils/provisioningservice/protocol/version.py create mode 100644 dev_utils/dev_utils/provisioningservice/utils/__init__.py create mode 100644 dev_utils/dev_utils/provisioningservice/utils/auth.py create mode 100644 dev_utils/dev_utils/provisioningservice/utils/connection_string.py create mode 100644 dev_utils/dev_utils/provisioningservice/utils/sastoken.py create mode 100644 tests/e2e/provisioning_e2e/tests/test_async_dps_cert_mgmt.py diff --git a/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance.py b/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance.py new file mode 100644 index 000000000..54ea88c01 --- /dev/null +++ b/azure-iot-device/samples/dps-cert-mgmt/provision_symmetric_key_client_cert_issuance.py @@ -0,0 +1,87 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import asyncio +from azure.iot.device.aio import ProvisioningDeviceClient +import os +from azure.iot.device.aio import IoTHubDeviceClient +from azure.iot.device import Message +import uuid +from azure.iot.device import X509 + +messages_to_send = 10 +provisioning_host = os.getenv("PROVISIONING_HOST") +id_scope = os.getenv("PROVISIONING_IDSCOPE") +registration_id = os.getenv("PROVISIONING_REGISTRATION_ID") +symmetric_key = os.getenv("PROVISIONING_SYMMETRIC_KEY") + +csr_file = os.getenv("CSR_FILE") +key_file = os.getenv("X509_KEY_FILE") +issued_cert_file = os.getenv("X509_CERT_FILE") + + +async def main(): + provisioning_device_client = ProvisioningDeviceClient.create_from_symmetric_key( + provisioning_host=provisioning_host, + registration_id=registration_id, + id_scope=id_scope, + symmetric_key=symmetric_key, # authenticate for DPS + ) + with open(csr_file, "r") as csr: + csr_data = csr.read() + # set the CSR on the client + provisioning_device_client.client_csr = str(csr_data) + + registration_result = await provisioning_device_client.register() + + print("The complete registration result is") + print(registration_result.registration_state) + + with open(issued_cert_file, "w") as out_ca_pem: + # Write the issued certificate on the file. + cert_data = registration_result.registration_state.issued_client_certificate + out_ca_pem.write(cert_data) + + if registration_result.status == "assigned": + print("Will send telemetry from the provisioned device") + + x509 = X509( + cert_file=issued_cert_file, + key_file=key_file, + pass_phrase=os.getenv("PASS_PHRASE"), + ) + + device_client = IoTHubDeviceClient.create_from_x509_certificate( + hostname=registration_result.registration_state.assigned_hub, + device_id=registration_result.registration_state.device_id, + x509=x509, + ) + # Connect the client. + await device_client.connect() + + async def send_test_message(i): + print("sending message #" + str(i)) + msg = Message("test wind speed " + str(i)) + msg.message_id = uuid.uuid4() + await device_client.send_message(msg) + print("done sending message #" + str(i)) + + # send `messages_to_send` messages in parallel + await asyncio.gather(*[send_test_message(i) for i in range(1, messages_to_send + 1)]) + + # finally, disconnect + await device_client.shutdown() + else: + print("Can not send telemetry from the provisioned device") + + +if __name__ == "__main__": + asyncio.run(main()) + + # If using Python 3.6 or below, use the following code instead of asyncio.run(main()): + # loop = asyncio.get_event_loop() + # loop.run_until_complete(main()) + # loop.close() diff --git a/dev_utils/dev_utils/provisioningservice/__init__.py b/dev_utils/dev_utils/provisioningservice/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dev_utils/dev_utils/provisioningservice/client.py b/dev_utils/dev_utils/provisioningservice/client.py new file mode 100644 index 000000000..ef8cb5077 --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/client.py @@ -0,0 +1,327 @@ +# Copyright (c) Microsoft. All rights reserved. +# Licensed under the MIT license. See LICENSE file in the project root for +# full license information. + +from .utils import auth + +from msrest.service_client import SDKClient +from msrest import Configuration, Serializer, Deserializer +from .protocol.version import VERSION + +from msrest.pipeline import ClientRawResponse +from .protocol import models +import logging + +ENROLLMENTS_URL = "/enrollments/{id}/" +ENROLLMENT_GROUPS_URL = "/enrollmentGroups/{id}" +REGISTRATIONS_URL = "/registrations/{id}" +logging.basicConfig(level=logging.DEBUG) + + +class ProvisioningServiceClientConfiguration(Configuration): + """Configuration for ProvisioningServiceClient + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credentials: Subscription credentials which uniquely identify + client subscription. + :type credentials: None + :param str base_url: Service URL + """ + + def __init__(self, credentials, base_url=None): + + if credentials is None: + raise ValueError("Parameter 'credentials' must not be None.") + if not base_url: + base_url = "https://localhost" + + super(ProvisioningServiceClientConfiguration, self).__init__(base_url) + + self.add_user_agent("provisioningserviceclient/{}".format(VERSION)) + + self.credentials = credentials + + +class ProvisioningServiceError(Exception): + """ + An error from the Device Provisioning Service + + :param str message: Error message + :param Exception cause: Error that causes this error (optional) + """ + + def __init__(self, message, cause=None): + super(ProvisioningServiceError, self).__init__(message) + self.cause = cause + + +class ProvisioningServiceClient(SDKClient): + """ + API for connecting to, and conducting operations on a Device Provisioning Service + + :param str host_name: The host name of the Device Provisioning Service + :param str shared_access_key_name: The shared access key name of the + Device Provisioning Service + :param str shared_access_key: The shared access key of the Device Provisioning Service + """ + + authorization_header = "Authorization" + err_msg = "Service Error {} - {}" + _cs_delimiter = ";" + _cs_val_separator = "=" + _host_name_label = "HostName" + _shared_access_key_name_label = "SharedAccessKeyName" + _shared_access_key_label = "SharedAccessKey" + + def __init__(self, host_name, shared_access_key_name, shared_access_key): + + self.host_name = host_name + self.shared_access_key_name = shared_access_key_name + self.shared_access_key = shared_access_key + + # Build connection string + credentials = auth.ConnectionStringAuthentication.create_with_parsed_values( + self.host_name, self.shared_access_key_name, self.shared_access_key + ) + base_url = "https://" + self.host_name + self.config = ProvisioningServiceClientConfiguration(credentials, base_url) + super(ProvisioningServiceClient, self).__init__(self.config.credentials, self.config) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self.api_version = VERSION # "2018-09-01-preview" + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + + @classmethod + def create_from_connection_string(cls, connection_string): + """ + Create a Provisioning Service Client from a connection string + + :param str connection_string: The connection string for the Device Provisioning Service + :return: A new instance of :class:`ProvisioningServiceClient + ` + :rtype: :class:`ProvisioningServiceClient + ` + :raises: ValueError if connection string is invalid + """ + cs_args = connection_string.split(cls._cs_delimiter) + + if len(cs_args) != 3: + raise ValueError("Too many or too few values in the connection string") + if len(cs_args) > len(set(cs_args)): + raise ValueError("Duplicate label in connection string") + + for arg in cs_args: + tokens = arg.split(cls._cs_val_separator, 1) + + if tokens[0] == cls._host_name_label: + host_name = tokens[1] + elif tokens[0] == cls._shared_access_key_name_label: + shared_access_key_name = tokens[1] + elif tokens[0] == cls._shared_access_key_label: + shared_access_key = tokens[1] + else: + raise ValueError("Connection string contains incorrect values") + + return cls(host_name, shared_access_key_name, shared_access_key) + + def create_or_update_individual_enrollment( + self, enrollment, etag=None, custom_headers=None, raw=False, **operation_config + ): + """Create or update a device enrollment record. + :param enrollment: The device enrollment record. + :type enrollment: ~protocol.models.IndividualEnrollment + :param etag: The ETag of the enrollment record. + :type etag: str + :param custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: IndividualEnrollment or ClientRawResponse if raw=true + :rtype: ~protocol.models.IndividualEnrollment or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ProvisioningServiceErrorDetailsException` + """ + result = None + path_format_arguments = {"id": self._serialize.url("id", enrollment.registration_id, "str")} + url = self._client.format_url(ENROLLMENTS_URL, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters["api-version"] = self._serialize.query( + "self.api_version", self.api_version, "str" + ) + + # Construct headers + header_parameters = {} + header_parameters["Accept"] = "application/json" + header_parameters["Content-Type"] = "application/json; charset=utf-8" + if custom_headers: + header_parameters.update(custom_headers) + if etag is not None: + header_parameters["If-Match"] = self._serialize.header("if_match", etag, "str") + + # Construct body + body_content = self._serialize.body(enrollment, "IndividualEnrollment") + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response) + + if response.status_code == 200: + result = self._deserialize("IndividualEnrollment", response) + + if raw: + client_raw_response = ClientRawResponse(result, response) + return client_raw_response + + return result + + def create_or_update_enrollment_group( + self, enrollment_group, etag=None, custom_headers=None, raw=False, **operation_config + ): + """Create or update a device enrollment group record. + :param enrollment_group: The device enrollment record. + :type EnrollmentGroup: ~protocol.models.EnrollmentGroup + :param etag: The ETag of the enrollment record. + :type etag: str + :param custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: IndividualEnrollment or ClientRawResponse if raw=true + :rtype: ~protocol.models.IndividualEnrollment or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ProvisioningServiceErrorDetailsException` + """ + result = None + path_format_arguments = { + "id": self._serialize.url("id", enrollment_group.enrollment_group_id, "str") + } + url = self._client.format_url(ENROLLMENT_GROUPS_URL, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters["api-version"] = self._serialize.query( + "self.api_version", self.api_version, "str" + ) + + # Construct headers + header_parameters = {} + header_parameters["Accept"] = "application/json" + header_parameters["Content-Type"] = "application/json; charset=utf-8" + if custom_headers: + header_parameters.update(custom_headers) + if etag is not None: + header_parameters["If-Match"] = self._serialize.header("if_match", etag, "str") + + # Construct body + body_content = self._serialize.body(enrollment_group, "EnrollmentGroup") + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response) + + if response.status_code == 200: + result = self._deserialize("EnrollmentGroup", response) + + if raw: + client_raw_response = ClientRawResponse(result, response) + return client_raw_response + + return result + + def delete_individual_enrollment_by_param( + self, registration_id, etag=None, custom_headers=None, raw=False, **operation_config + ): + """ + Delete an Individual Enrollment on the Provisioning Service + + :param str registration_id: The registration id of the Individual Enrollment to be deleted + :param str etag: The etag of the Individual Enrollment to be deleted (optional) + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ProvisioningServiceErrorDetailsException` + """ + path_format_arguments = {"id": self._serialize.url("id", registration_id, "str")} + url = self._client.format_url(ENROLLMENTS_URL, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters["api-version"] = self._serialize.query( + "self.api_version", self.api_version, "str" + ) + + # Construct headers + header_parameters = {} + if custom_headers: + header_parameters.update(custom_headers) + if etag is not None: + header_parameters["If-Match"] = self._serialize.header("if_match", etag, "str") + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [204]: + raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def delete_enrollment_group_by_param( + self, group_id, etag=None, custom_headers=None, raw=False, **operation_config + ): + """ + Delete an Enrollment Group on the Provisioning Service + + :param str group_id: The registration id of the Individual Enrollment to be deleted + :param str etag: The etag of the Individual Enrollment to be deleted (optional) + :raises: :class:ProvisioningServiceError + ` if an error occurs on the + Provisioning Service + """ + path_format_arguments = {"id": self._serialize.url("id", group_id, "str")} + url = self._client.format_url(ENROLLMENT_GROUPS_URL, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters["api-version"] = self._serialize.query( + "self.api_version", self.api_version, "str" + ) + + # Construct headers + header_parameters = {} + if custom_headers: + header_parameters.update(custom_headers) + if etag is not None: + header_parameters["If-Match"] = self._serialize.header("if_match", etag, "str") + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [204]: + raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response diff --git a/dev_utils/dev_utils/provisioningservice/protocol/__init__.py b/dev_utils/dev_utils/provisioningservice/protocol/__init__.py new file mode 100644 index 000000000..65591f5aa --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/__init__.py @@ -0,0 +1,11 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .version import VERSION + + +__version__ = VERSION diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/__init__.py b/dev_utils/dev_utils/provisioningservice/protocol/models/__init__.py new file mode 100644 index 000000000..476329b0d --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/__init__.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +try: + from .provisioning_service_error_details import ( + ProvisioningServiceErrorDetails, + ProvisioningServiceErrorDetailsException, + ) + + from .device_registration_state import DeviceRegistrationState + from .tpm_attestation import TpmAttestation + from .x509_certificate_info import X509CertificateInfo + from .x509_certificate_with_info import X509CertificateWithInfo + from .x509_certificates import X509Certificates + from .x509_ca_references import X509CAReferences + from .x509_attestation import X509Attestation + from .symmetric_key_attestation import SymmetricKeyAttestation + from .attestation_mechanism import AttestationMechanism + from .metadata import Metadata + from .initial_twin_properties import InitialTwinProperties + from .initial_twin import InitialTwin + from .reprovision_policy import ReprovisionPolicy + from .custom_allocation_definition import CustomAllocationDefinition + from .individual_enrollment import IndividualEnrollment + from .enrollment_group import EnrollmentGroup + from .client_certificate_issuance_policy import ClientCertificateIssuancePolicy +except (ImportError) as e: + print(e) + + +__all__ = [ + "ProvisioningServiceErrorDetails", + "ProvisioningServiceErrorDetailsException", + "DeviceRegistrationState", + "TpmAttestation", + "X509CertificateInfo", + "X509CertificateWithInfo", + "X509Certificates", + "X509CAReferences", + "X509Attestation", + "SymmetricKeyAttestation", + "AttestationMechanism", + "Metadata", + "InitialTwinProperties", + "InitialTwin", + "ReprovisionPolicy", + "CustomAllocationDefinition", + "IndividualEnrollment", + "EnrollmentGroup", + "ClientCertificateIssuancePolicy", +] diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/attestation_mechanism.py b/dev_utils/dev_utils/provisioningservice/protocol/models/attestation_mechanism.py new file mode 100644 index 000000000..0a85196fa --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/attestation_mechanism.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AttestationMechanism(Model): + """Attestation mechanism for individualEnrollment as well as enrollmentGroup. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Attestation Type. Possible values include: 'none', + 'tpm', 'x509', 'symmetricKey' + :type type: str or ~protocol.models.enum + :param tpm: TPM attestation method. + :type tpm: ~protocol.models.TpmAttestation + :param x509: X509 attestation method. + :type x509: ~protocol.models.X509Attestation + :param symmetric_key: Symmetric Key attestation method. + :type symmetric_key: ~protocol.models.SymmetricKeyAttestation + """ + + _validation = {"type": {"required": True}} + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "tpm": {"key": "tpm", "type": "TpmAttestation"}, + "x509": {"key": "x509", "type": "X509Attestation"}, + "symmetric_key": {"key": "symmetricKey", "type": "SymmetricKeyAttestation"}, + } + + def __init__(self, **kwargs): + super(AttestationMechanism, self).__init__(**kwargs) + self.type = kwargs.get("type", None) + self.tpm = kwargs.get("tpm", None) + self.x509 = kwargs.get("x509", None) + self.symmetric_key = kwargs.get("symmetric_key", None) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/client_certificate_issuance_policy.py b/dev_utils/dev_utils/provisioningservice/protocol/models/client_certificate_issuance_policy.py new file mode 100644 index 000000000..d054b44f5 --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/client_certificate_issuance_policy.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ClientCertificateIssuancePolicy(Model): + """The device enrollment record. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + param certificateAuthorityName: The certificate authority name + :type certificateAuthorityName: str + """ + + _validation = {"certificate_authority_name": {"required": True}} + + _attribute_map = { + "certificate_authority_name": {"key": "certificateAuthorityName", "type": "str"}, + } + + def __init__(self, **kwargs): + super(ClientCertificateIssuancePolicy, self).__init__(**kwargs) + self.certificate_authority_name = kwargs.get("certificate_authority_name", None) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/custom_allocation_definition.py b/dev_utils/dev_utils/provisioningservice/protocol/models/custom_allocation_definition.py new file mode 100644 index 000000000..2e5ad3eda --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/custom_allocation_definition.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CustomAllocationDefinition(Model): + """Custom allocation definition. + + All required parameters must be populated in order to send to Azure. + + :param webhook_url: Required. The webhook URL used for allocation + requests. + :type webhook_url: str + :param api_version: Required. The API version of the provisioning service + types (such as IndividualEnrollment) sent in the custom allocation + request. Supported versions include: "2018-09-01-preview" + :type api_version: str + """ + + _validation = {"webhook_url": {"required": True}, "api_version": {"required": True}} + + _attribute_map = { + "webhook_url": {"key": "webhookUrl", "type": "str"}, + "api_version": {"key": "apiVersion", "type": "str"}, + } + + def __init__(self, **kwargs): + super(CustomAllocationDefinition, self).__init__(**kwargs) + self.webhook_url = kwargs.get("webhook_url", None) + self.api_version = kwargs.get("api_version", None) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/device_registration_state.py b/dev_utils/dev_utils/provisioningservice/protocol/models/device_registration_state.py new file mode 100644 index 000000000..086cfb50f --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/device_registration_state.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DeviceRegistrationState(Model): + """Device registration state. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar registration_id: The registration ID is alphanumeric, lowercase, and + may contain hyphens. + :vartype registration_id: str + :ivar created_date_time_utc: Registration create date time (in UTC). + :vartype created_date_time_utc: datetime + :ivar assigned_hub: Assigned Azure IoT Hub. + :vartype assigned_hub: str + :ivar device_id: Device ID. + :vartype device_id: str + :ivar status: Enrollment status. Possible values include: 'unassigned', + 'assigning', 'assigned', 'failed', 'disabled' + :vartype status: str or ~protocol.models.enum + :ivar substatus: Substatus for 'Assigned' devices. Possible values include + - 'initialAssignment': Device has been assigned to an IoT hub for the + first time, 'deviceDataMigrated': Device has been assigned to a different + IoT hub and its device data was migrated from the previously assigned IoT + hub. Device data was removed from the previously assigned IoT hub, + 'deviceDataReset': Device has been assigned to a different IoT hub and + its device data was populated from the initial state stored in the + enrollment. Device data was removed from the previously assigned IoT hub. + Possible values include: 'initialAssignment', 'deviceDataMigrated', + 'deviceDataReset' + :vartype substatus: str or ~protocol.models.enum + :ivar error_code: Error code. + :vartype error_code: int + :ivar error_message: Error message. + :vartype error_message: str + :ivar last_updated_date_time_utc: Last updated date time (in UTC). + :vartype last_updated_date_time_utc: datetime + :ivar etag: The entity tag associated with the resource. + :vartype etag: str + """ + + _validation = { + "registration_id": {"readonly": True}, + "created_date_time_utc": {"readonly": True}, + "assigned_hub": {"readonly": True}, + "device_id": {"readonly": True}, + "status": {"readonly": True}, + "substatus": {"readonly": True}, + "error_code": {"readonly": True}, + "error_message": {"readonly": True}, + "last_updated_date_time_utc": {"readonly": True}, + "etag": {"readonly": True}, + } + + _attribute_map = { + "registration_id": {"key": "registrationId", "type": "str"}, + "created_date_time_utc": {"key": "createdDateTimeUtc", "type": "iso-8601"}, + "assigned_hub": {"key": "assignedHub", "type": "str"}, + "device_id": {"key": "deviceId", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "substatus": {"key": "substatus", "type": "str"}, + "error_code": {"key": "errorCode", "type": "int"}, + "error_message": {"key": "errorMessage", "type": "str"}, + "last_updated_date_time_utc": {"key": "lastUpdatedDateTimeUtc", "type": "iso-8601"}, + "etag": {"key": "etag", "type": "str"}, + } + + def __init__(self, **kwargs): + super(DeviceRegistrationState, self).__init__(**kwargs) + self.registration_id = None + self.created_date_time_utc = None + self.assigned_hub = None + self.device_id = None + self.status = None + self.substatus = None + self.error_code = None + self.error_message = None + self.last_updated_date_time_utc = None + self.etag = None diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/enrollment_group.py b/dev_utils/dev_utils/provisioningservice/protocol/models/enrollment_group.py new file mode 100644 index 000000000..65b7c7020 --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/enrollment_group.py @@ -0,0 +1,109 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class EnrollmentGroup(Model): + """Enrollment group record. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param enrollment_group_id: Required. Enrollment Group ID. + :type enrollment_group_id: str + :param attestation: Required. Attestation method used by the device. + :type attestation: ~protocol.models.AttestationMechanism + :param iot_hub_host_name: The Iot Hub host name. + :type iot_hub_host_name: str + :param initial_twin: Initial device twin. + :type initial_twin: ~protocol.models.InitialTwin + :param etag: The entity tag associated with the resource. + :type etag: str + :param provisioning_status: The provisioning status. Possible values + include: 'enabled', 'disabled'. Default value: "enabled" . + :type provisioning_status: str or ~protocol.models.enum + :param reprovision_policy: The behavior when a device is re-provisioned to + an IoT hub. + :type reprovision_policy: ~protocol.models.ReprovisionPolicy + :ivar created_date_time_utc: The DateTime this resource was created. + :vartype created_date_time_utc: datetime + :ivar last_updated_date_time_utc: The DateTime this resource was last + updated. + :vartype last_updated_date_time_utc: datetime + :param allocation_policy: The allocation policy of this resource. This + policy overrides the tenant level allocation policy for this individual + enrollment or enrollment group. Possible values include 'hashed': Linked + IoT hubs are equally likely to have devices provisioned to them, + 'geoLatency': Devices are provisioned to an IoT hub with the lowest + latency to the device.If multiple linked IoT hubs would provide the same + lowest latency, the provisioning service hashes devices across those hubs, + 'static' : Specification of the desired IoT hub in the enrollment list + takes priority over the service-level allocation policy, 'custom': Devices + are provisioned to an IoT hub based on your own custom logic. The + provisioning service passes information about the device to the logic, and + the logic returns the desired IoT hub as well as the desired initial + configuration. We recommend using Azure Functions to host your logic. + Possible values include: 'hashed', 'geoLatency', 'static', 'custom' + :type allocation_policy: str or ~protocol.models.enum + :param iot_hubs: The list of names of IoT hubs the device(s) in this + resource can be allocated to. Must be a subset of tenant level list of IoT + hubs. + :type iot_hubs: list[str] + :param custom_allocation_definition: Custom allocation definition. + :type custom_allocation_definition: + ~protocol.models.CustomAllocationDefinition + """ + + _validation = { + "enrollment_group_id": {"required": True}, + "attestation": {"required": True}, + "created_date_time_utc": {"readonly": True}, + "last_updated_date_time_utc": {"readonly": True}, + } + + _attribute_map = { + "enrollment_group_id": {"key": "enrollmentGroupId", "type": "str"}, + "attestation": {"key": "attestation", "type": "AttestationMechanism"}, + "iot_hub_host_name": {"key": "iotHubHostName", "type": "str"}, + "initial_twin": {"key": "initialTwin", "type": "InitialTwin"}, + "etag": {"key": "etag", "type": "str"}, + "provisioning_status": {"key": "provisioningStatus", "type": "str"}, + "reprovision_policy": {"key": "reprovisionPolicy", "type": "ReprovisionPolicy"}, + "created_date_time_utc": {"key": "createdDateTimeUtc", "type": "iso-8601"}, + "last_updated_date_time_utc": {"key": "lastUpdatedDateTimeUtc", "type": "iso-8601"}, + "allocation_policy": {"key": "allocationPolicy", "type": "str"}, + "iot_hubs": {"key": "iotHubs", "type": "[str]"}, + "custom_allocation_definition": { + "key": "customAllocationDefinition", + "type": "CustomAllocationDefinition", + }, + "client_certificate_issuance_policy": { + "key": "clientCertificateIssuancePolicy", + "type": "ClientCertificateIssuancePolicy", + }, + } + + def __init__(self, **kwargs): + super(EnrollmentGroup, self).__init__(**kwargs) + self.enrollment_group_id = kwargs.get("enrollment_group_id", None) + self.attestation = kwargs.get("attestation", None) + self.iot_hub_host_name = kwargs.get("iot_hub_host_name", None) + self.initial_twin = kwargs.get("initial_twin", None) + self.etag = kwargs.get("etag", None) + self.provisioning_status = kwargs.get("provisioning_status", "enabled") + self.reprovision_policy = kwargs.get("reprovision_policy", None) + self.created_date_time_utc = None + self.last_updated_date_time_utc = None + self.allocation_policy = kwargs.get("allocation_policy", None) + self.iot_hubs = kwargs.get("iot_hubs", None) + self.custom_allocation_definition = kwargs.get("custom_allocation_definition", None) + self.client_certificate_issuance_policy = kwargs.get( + "client_certificate_issuance_policy", None + ) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/individual_enrollment.py b/dev_utils/dev_utils/provisioningservice/protocol/models/individual_enrollment.py new file mode 100644 index 000000000..0b9c78f11 --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/individual_enrollment.py @@ -0,0 +1,123 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IndividualEnrollment(Model): + """The device enrollment record. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param capabilities: Capabilities of the device + :type capabilities: ~protocol.models.DeviceCapabilities + :param registration_id: Required. The registration ID is alphanumeric, + lowercase, and may contain hyphens. + :type registration_id: str + :param device_id: Desired IoT Hub device ID (optional). + :type device_id: str + :ivar registration_state: Current registration status. + :vartype registration_state: ~protocol.models.DeviceRegistrationState + :param attestation: Required. Attestation method used by the device. + :type attestation: ~protocol.models.AttestationMechanism + :param iot_hub_host_name: The Iot Hub host name. + :type iot_hub_host_name: str + :param initial_twin: Initial device twin. + :type initial_twin: ~protocol.models.InitialTwin + :param etag: The entity tag associated with the resource. + :type etag: str + :param provisioning_status: The provisioning status. Possible values + include: 'enabled', 'disabled'. Default value: "enabled" . + :type provisioning_status: str or ~protocol.models.enum + :param reprovision_policy: The behavior when a device is re-provisioned to + an IoT hub. + :type reprovision_policy: ~protocol.models.ReprovisionPolicy + :ivar created_date_time_utc: The DateTime this resource was created. + :vartype created_date_time_utc: datetime + :ivar last_updated_date_time_utc: The DateTime this resource was last + updated. + :vartype last_updated_date_time_utc: datetime + :param allocation_policy: The allocation policy of this resource. This + policy overrides the tenant level allocation policy for this individual + enrollment or enrollment group. Possible values include 'hashed': Linked + IoT hubs are equally likely to have devices provisioned to them, + 'geoLatency': Devices are provisioned to an IoT hub with the lowest + latency to the device.If multiple linked IoT hubs would provide the same + lowest latency, the provisioning service hashes devices across those hubs, + 'static' : Specification of the desired IoT hub in the enrollment list + takes priority over the service-level allocation policy, 'custom': Devices + are provisioned to an IoT hub based on your own custom logic. The + provisioning service passes information about the device to the logic, and + the logic returns the desired IoT hub as well as the desired initial + configuration. We recommend using Azure Functions to host your logic. + Possible values include: 'hashed', 'geoLatency', 'static', 'custom' + :type allocation_policy: str or ~protocol.models.enum + :param iot_hubs: The list of names of IoT hubs the device(s) in this + resource can be allocated to. Must be a subset of tenant level list of IoT + hubs. + :type iot_hubs: list[str] + :param custom_allocation_definition: Custom allocation definition. + :type custom_allocation_definition: + ~protocol.models.CustomAllocationDefinition + """ + + _validation = { + "registration_id": {"required": True}, + "registration_state": {"readonly": True}, + "attestation": {"required": True}, + "created_date_time_utc": {"readonly": True}, + "last_updated_date_time_utc": {"readonly": True}, + } + + _attribute_map = { + "capabilities": {"key": "capabilities", "type": "DeviceCapabilities"}, + "registration_id": {"key": "registrationId", "type": "str"}, + "device_id": {"key": "deviceId", "type": "str"}, + "registration_state": {"key": "registrationState", "type": "DeviceRegistrationState"}, + "attestation": {"key": "attestation", "type": "AttestationMechanism"}, + "iot_hub_host_name": {"key": "iotHubHostName", "type": "str"}, + "initial_twin": {"key": "initialTwin", "type": "InitialTwin"}, + "etag": {"key": "etag", "type": "str"}, + "provisioning_status": {"key": "provisioningStatus", "type": "str"}, + "reprovision_policy": {"key": "reprovisionPolicy", "type": "ReprovisionPolicy"}, + "created_date_time_utc": {"key": "createdDateTimeUtc", "type": "iso-8601"}, + "last_updated_date_time_utc": {"key": "lastUpdatedDateTimeUtc", "type": "iso-8601"}, + "allocation_policy": {"key": "allocationPolicy", "type": "str"}, + "iot_hubs": {"key": "iotHubs", "type": "[str]"}, + "custom_allocation_definition": { + "key": "customAllocationDefinition", + "type": "CustomAllocationDefinition", + }, + "client_certificate_issuance_policy": { + "key": "clientCertificateIssuancePolicy", + "type": "ClientCertificateIssuancePolicy", + }, + } + + def __init__(self, **kwargs): + super(IndividualEnrollment, self).__init__(**kwargs) + self.capabilities = kwargs.get("capabilities", None) + self.registration_id = kwargs.get("registration_id", None) + self.device_id = kwargs.get("device_id", None) + self.registration_state = None + self.attestation = kwargs.get("attestation", None) + self.iot_hub_host_name = kwargs.get("iot_hub_host_name", None) + self.initial_twin = kwargs.get("initial_twin", None) + self.etag = kwargs.get("etag", None) + self.provisioning_status = kwargs.get("provisioning_status", "enabled") + self.reprovision_policy = kwargs.get("reprovision_policy", None) + self.created_date_time_utc = None + self.last_updated_date_time_utc = None + self.allocation_policy = kwargs.get("allocation_policy", None) + self.iot_hubs = kwargs.get("iot_hubs", None) + self.custom_allocation_definition = kwargs.get("custom_allocation_definition", None) + self.client_certificate_issuance_policy = kwargs.get( + "client_certificate_issuance_policy", None + ) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/initial_twin.py b/dev_utils/dev_utils/provisioningservice/protocol/models/initial_twin.py new file mode 100644 index 000000000..0002f9e34 --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/initial_twin.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class InitialTwin(Model): + """Initial device twin. Contains a subset of the properties of Twin. + + :param tags: Twin tags. + :type tags: ~protocol.models.TwinCollection + :param properties: Twin desired properties. + :type properties: ~protocol.models.InitialTwinProperties + """ + + _attribute_map = { + "tags": {"key": "tags", "type": "TwinCollection"}, + "properties": {"key": "properties", "type": "InitialTwinProperties"}, + } + + def __init__(self, **kwargs): + super(InitialTwin, self).__init__(**kwargs) + self.tags = kwargs.get("tags", None) + self.properties = kwargs.get("properties", None) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/initial_twin_properties.py b/dev_utils/dev_utils/provisioningservice/protocol/models/initial_twin_properties.py new file mode 100644 index 000000000..c88247cdf --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/initial_twin_properties.py @@ -0,0 +1,22 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class InitialTwinProperties(Model): + """Represents the initial properties that will be set on the device twin. + + :param desired: Gets and sets the InitialTwin desired properties. + :type desired: ~protocol.models.TwinCollection + """ + + _attribute_map = {"desired": {"key": "desired", "type": "TwinCollection"}} + + def __init__(self, **kwargs): + super(InitialTwinProperties, self).__init__(**kwargs) + self.desired = kwargs.get("desired", None) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/metadata.py b/dev_utils/dev_utils/provisioningservice/protocol/models/metadata.py new file mode 100644 index 000000000..f923bf571 --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/metadata.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Metadata(Model): + """Metadata for the TwinCollection. + + :param last_updated: Last time the TwinCollection was updated + :type last_updated: datetime + :param last_updated_version: This SHOULD be null for Reported properties + metadata and MUST not be null for Desired properties metadata. + :type last_updated_version: long + """ + + _attribute_map = { + "last_updated": {"key": "lastUpdated", "type": "iso-8601"}, + "last_updated_version": {"key": "lastUpdatedVersion", "type": "long"}, + } + + def __init__(self, **kwargs): + super(Metadata, self).__init__(**kwargs) + self.last_updated = kwargs.get("last_updated", None) + self.last_updated_version = kwargs.get("last_updated_version", None) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/provisioning_service_error_details.py b/dev_utils/dev_utils/provisioningservice/protocol/models/provisioning_service_error_details.py new file mode 100644 index 000000000..8b74275b6 --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/provisioning_service_error_details.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model +from msrest.exceptions import HttpOperationError + + +class ProvisioningServiceErrorDetails(Model): + """Contains the properties of an error returned by the Azure IoT Hub + Provisioning Service . + + :param error_code: + :type error_code: int + :param tracking_id: + :type tracking_id: str + :param message: + :type message: str + :param info: + :type info: dict[str, str] + :param timestamp_utc: + :type timestamp_utc: datetime + """ + + _attribute_map = { + "error_code": {"key": "errorCode", "type": "int"}, + "tracking_id": {"key": "trackingId", "type": "str"}, + "message": {"key": "message", "type": "str"}, + "info": {"key": "info", "type": "{str}"}, + "timestamp_utc": {"key": "timestampUtc", "type": "iso-8601"}, + } + + def __init__(self, **kwargs): + super(ProvisioningServiceErrorDetails, self).__init__(**kwargs) + self.error_code = kwargs.get("error_code", None) + self.tracking_id = kwargs.get("tracking_id", None) + self.message = kwargs.get("message", None) + self.info = kwargs.get("info", None) + self.timestamp_utc = kwargs.get("timestamp_utc", None) + + +class ProvisioningServiceErrorDetailsException(HttpOperationError): + """Server responsed with exception of type: 'ProvisioningServiceErrorDetails'. + + :param deserialize: A deserializer + :param response: Server response to be deserialized. + """ + + def __init__(self, deserialize, response, *args): + + super(ProvisioningServiceErrorDetailsException, self).__init__( + deserialize, response, "ProvisioningServiceErrorDetails", *args + ) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/reprovision_policy.py b/dev_utils/dev_utils/provisioningservice/protocol/models/reprovision_policy.py new file mode 100644 index 000000000..fc6d7ca85 --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/reprovision_policy.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ReprovisionPolicy(Model): + """The behavior of the service when a device is re-provisioned to an IoT hub. + + All required parameters must be populated in order to send to Azure. + + :param update_hub_assignment: Required. When set to true (default), the + Device Provisioning Service will evaluate the device's IoT Hub assignment + and update it if necessary for any provisioning requests beyond the first + from a given device. If set to false, the device will stay assigned to its + current IoT hub. Default value: True . + :type update_hub_assignment: bool + :param migrate_device_data: Required. When set to true (default), the + Device Provisioning Service will migrate the device's data (twin, device + capabilities, and device ID) from one IoT hub to another during an IoT hub + assignment update. If set to false, the Device Provisioning Service will + reset the device's data to the initial desired configuration stored in the + corresponding enrollment list. Default value: True . + :type migrate_device_data: bool + """ + + _validation = { + "update_hub_assignment": {"required": True}, + "migrate_device_data": {"required": True}, + } + + _attribute_map = { + "update_hub_assignment": {"key": "updateHubAssignment", "type": "bool"}, + "migrate_device_data": {"key": "migrateDeviceData", "type": "bool"}, + } + + def __init__(self, **kwargs): + super(ReprovisionPolicy, self).__init__(**kwargs) + self.update_hub_assignment = kwargs.get("update_hub_assignment", True) + self.migrate_device_data = kwargs.get("migrate_device_data", True) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/symmetric_key_attestation.py b/dev_utils/dev_utils/provisioningservice/protocol/models/symmetric_key_attestation.py new file mode 100644 index 000000000..45fa9f6cd --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/symmetric_key_attestation.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SymmetricKeyAttestation(Model): + """Attestation via SymmetricKey. + + :param primary_key: Primary symmetric key. + :type primary_key: str + :param secondary_key: Secondary symmetric key. + :type secondary_key: str + """ + + _attribute_map = { + "primary_key": {"key": "primaryKey", "type": "str"}, + "secondary_key": {"key": "secondaryKey", "type": "str"}, + } + + def __init__(self, **kwargs): + super(SymmetricKeyAttestation, self).__init__(**kwargs) + self.primary_key = kwargs.get("primary_key", None) + self.secondary_key = kwargs.get("secondary_key", None) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/tpm_attestation.py b/dev_utils/dev_utils/provisioningservice/protocol/models/tpm_attestation.py new file mode 100644 index 000000000..1b8324b1b --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/tpm_attestation.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TpmAttestation(Model): + """Attestation via TPM. + + All required parameters must be populated in order to send to Azure. + + :param endorsement_key: Required. + :type endorsement_key: str + :param storage_root_key: + :type storage_root_key: str + """ + + _validation = {"endorsement_key": {"required": True}} + + _attribute_map = { + "endorsement_key": {"key": "endorsementKey", "type": "str"}, + "storage_root_key": {"key": "storageRootKey", "type": "str"}, + } + + def __init__(self, **kwargs): + super(TpmAttestation, self).__init__(**kwargs) + self.endorsement_key = kwargs.get("endorsement_key", None) + self.storage_root_key = kwargs.get("storage_root_key", None) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/twin_collection.py b/dev_utils/dev_utils/provisioningservice/protocol/models/twin_collection.py new file mode 100644 index 000000000..007b2f0f4 --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/twin_collection.py @@ -0,0 +1,37 @@ +# # coding=utf-8 +# # -------------------------------------------------------------------------- +# # Code generated by Microsoft (R) AutoRest Code Generator. +# # Changes may cause incorrect behavior and will be lost if the code is +# # regenerated. +# # -------------------------------------------------------------------------- +# +# from msrest.serialization import Model +# +# +# class TwinCollection(Model): +# """Represents a collection of properties within a Twin. +# +# :param additional_properties: Unmatched properties from the message are +# deserialized this collection +# :type additional_properties: dict[str, object] +# :param version: Version of the TwinCollection +# :type version: long +# :param count: Number of properties in the TwinCollection +# :type count: int +# :param metadata: Metadata for the TwinCollection +# :type metadata: ~protocol.models.Metadata +# """ +# +# _attribute_map = { +# "additional_properties": {"key": "", "type": "{object}"}, +# "version": {"key": "version", "type": "long"}, +# "count": {"key": "count", "type": "int"}, +# "metadata": {"key": "metadata", "type": "Metadata"}, +# } +# +# def __init__(self, **kwargs): +# super(TwinCollection, self).__init__(**kwargs) +# self.additional_properties = kwargs.get("additional_properties", None) +# self.version = kwargs.get("version", None) +# self.count = kwargs.get("count", None) +# self.metadata = kwargs.get("metadata", None) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/x509_attestation.py b/dev_utils/dev_utils/provisioningservice/protocol/models/x509_attestation.py new file mode 100644 index 000000000..55b2752ca --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/x509_attestation.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class X509Attestation(Model): + """Attestation via X509. + + :param client_certificates: + :type client_certificates: ~protocol.models.X509Certificates + :param signing_certificates: + :type signing_certificates: ~protocol.models.X509Certificates + :param ca_references: + :type ca_references: ~protocol.models.X509CAReferences + """ + + _attribute_map = { + "client_certificates": {"key": "clientCertificates", "type": "X509Certificates"}, + "signing_certificates": {"key": "signingCertificates", "type": "X509Certificates"}, + "ca_references": {"key": "caReferences", "type": "X509CAReferences"}, + } + + def __init__(self, **kwargs): + super(X509Attestation, self).__init__(**kwargs) + self.client_certificates = kwargs.get("client_certificates", None) + self.signing_certificates = kwargs.get("signing_certificates", None) + self.ca_references = kwargs.get("ca_references", None) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/x509_ca_references.py b/dev_utils/dev_utils/provisioningservice/protocol/models/x509_ca_references.py new file mode 100644 index 000000000..5686280cb --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/x509_ca_references.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class X509CAReferences(Model): + """Primary and secondary CA references. + + :param primary: + :type primary: str + :param secondary: + :type secondary: str + """ + + _attribute_map = { + "primary": {"key": "primary", "type": "str"}, + "secondary": {"key": "secondary", "type": "str"}, + } + + def __init__(self, **kwargs): + super(X509CAReferences, self).__init__(**kwargs) + self.primary = kwargs.get("primary", None) + self.secondary = kwargs.get("secondary", None) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/x509_certificate_info.py b/dev_utils/dev_utils/provisioningservice/protocol/models/x509_certificate_info.py new file mode 100644 index 000000000..a11c03e2e --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/x509_certificate_info.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class X509CertificateInfo(Model): + """X509 certificate info. + + All required parameters must be populated in order to send to Azure. + + :param subject_name: Required. + :type subject_name: str + :param sha1_thumbprint: Required. + :type sha1_thumbprint: str + :param sha256_thumbprint: Required. + :type sha256_thumbprint: str + :param issuer_name: Required. + :type issuer_name: str + :param not_before_utc: Required. + :type not_before_utc: datetime + :param not_after_utc: Required. + :type not_after_utc: datetime + :param serial_number: Required. + :type serial_number: str + :param version: Required. + :type version: int + """ + + _validation = { + "subject_name": {"required": True}, + "sha1_thumbprint": {"required": True}, + "sha256_thumbprint": {"required": True}, + "issuer_name": {"required": True}, + "not_before_utc": {"required": True}, + "not_after_utc": {"required": True}, + "serial_number": {"required": True}, + "version": {"required": True}, + } + + _attribute_map = { + "subject_name": {"key": "subjectName", "type": "str"}, + "sha1_thumbprint": {"key": "sha1Thumbprint", "type": "str"}, + "sha256_thumbprint": {"key": "sha256Thumbprint", "type": "str"}, + "issuer_name": {"key": "issuerName", "type": "str"}, + "not_before_utc": {"key": "notBeforeUtc", "type": "iso-8601"}, + "not_after_utc": {"key": "notAfterUtc", "type": "iso-8601"}, + "serial_number": {"key": "serialNumber", "type": "str"}, + "version": {"key": "version", "type": "int"}, + } + + def __init__(self, **kwargs): + super(X509CertificateInfo, self).__init__(**kwargs) + self.subject_name = kwargs.get("subject_name", None) + self.sha1_thumbprint = kwargs.get("sha1_thumbprint", None) + self.sha256_thumbprint = kwargs.get("sha256_thumbprint", None) + self.issuer_name = kwargs.get("issuer_name", None) + self.not_before_utc = kwargs.get("not_before_utc", None) + self.not_after_utc = kwargs.get("not_after_utc", None) + self.serial_number = kwargs.get("serial_number", None) + self.version = kwargs.get("version", None) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/x509_certificate_with_info.py b/dev_utils/dev_utils/provisioningservice/protocol/models/x509_certificate_with_info.py new file mode 100644 index 000000000..45dd2ec81 --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/x509_certificate_with_info.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class X509CertificateWithInfo(Model): + """Certificate and Certificate info. + + :param certificate: + :type certificate: str + :param info: + :type info: ~protocol.models.X509CertificateInfo + """ + + _attribute_map = { + "certificate": {"key": "certificate", "type": "str"}, + "info": {"key": "info", "type": "X509CertificateInfo"}, + } + + def __init__(self, **kwargs): + super(X509CertificateWithInfo, self).__init__(**kwargs) + self.certificate = kwargs.get("certificate", None) + self.info = kwargs.get("info", None) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/models/x509_certificates.py b/dev_utils/dev_utils/provisioningservice/protocol/models/x509_certificates.py new file mode 100644 index 000000000..6fef18542 --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/models/x509_certificates.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class X509Certificates(Model): + """Primary and secondary certificates. + + :param primary: + :type primary: ~protocol.models.X509CertificateWithInfo + :param secondary: + :type secondary: ~protocol.models.X509CertificateWithInfo + """ + + _attribute_map = { + "primary": {"key": "primary", "type": "X509CertificateWithInfo"}, + "secondary": {"key": "secondary", "type": "X509CertificateWithInfo"}, + } + + def __init__(self, **kwargs): + super(X509Certificates, self).__init__(**kwargs) + self.primary = kwargs.get("primary", None) + self.secondary = kwargs.get("secondary", None) diff --git a/dev_utils/dev_utils/provisioningservice/protocol/version.py b/dev_utils/dev_utils/provisioningservice/protocol/version.py new file mode 100644 index 000000000..e559f14ac --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/protocol/version.py @@ -0,0 +1,8 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +VERSION = "2021-11-01-preview" diff --git a/dev_utils/dev_utils/provisioningservice/utils/__init__.py b/dev_utils/dev_utils/provisioningservice/utils/__init__.py new file mode 100644 index 000000000..3502e11cf --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/utils/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Microsoft. All rights reserved. +# Licensed under the MIT license. See LICENSE file in the project root for +# full license information. + +__all__ = ["sastoken"] diff --git a/dev_utils/dev_utils/provisioningservice/utils/auth.py b/dev_utils/dev_utils/provisioningservice/utils/auth.py new file mode 100644 index 000000000..11a2e5d20 --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/utils/auth.py @@ -0,0 +1,64 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +"""Provides authentication classes for use with the msrest library +""" + +from msrest.authentication import Authentication +from ..utils.connection_string import ConnectionString +from ..utils.connection_string import ( + HOST_NAME, + SHARED_ACCESS_KEY_NAME, + SHARED_ACCESS_KEY, +) +from ..utils.sastoken import SasToken + + +__all__ = ["ConnectionStringAuthentication"] + + +class ConnectionStringAuthentication(ConnectionString, Authentication): + """ConnectionString class that can be used with msrest to provide SasToken authentication + + :param connection_string: The connection string to generate SasToken with + """ + + def __init__(self, connection_string): + super(ConnectionStringAuthentication, self).__init__(connection_string) + + @classmethod + def create_with_parsed_values(cls, host_name, shared_access_key_name, shared_access_key): + connection_string = ( + HOST_NAME + + "=" + + host_name + + ";" + + SHARED_ACCESS_KEY_NAME + + "=" + + shared_access_key_name + + ";" + + SHARED_ACCESS_KEY + + "=" + + shared_access_key + ) + return cls(connection_string) + + def signed_session(self, session=None): + """Create requests session with any required auth headers applied. + + If a session object is provided, configure it directly. Otherwise, + create a new session and return it. + + :param session: The session to configure for authentication + :type session: requests.Session + :rtype: requests.Session + """ + session = super(ConnectionStringAuthentication, self).signed_session(session) + + # Authorization header + sastoken = SasToken(self[HOST_NAME], self[SHARED_ACCESS_KEY], self[SHARED_ACCESS_KEY_NAME]) + session.headers[self.header] = str(sastoken) + + return session diff --git a/dev_utils/dev_utils/provisioningservice/utils/connection_string.py b/dev_utils/dev_utils/provisioningservice/utils/connection_string.py new file mode 100644 index 000000000..594144137 --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/utils/connection_string.py @@ -0,0 +1,84 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +__all__ = ["ConnectionString"] + +CS_DELIMITER = ";" +CS_VAL_SEPARATOR = "=" + +HOST_NAME = "HostName" +SHARED_ACCESS_KEY_NAME = "SharedAccessKeyName" +SHARED_ACCESS_KEY = "SharedAccessKey" +SHARED_ACCESS_SIGNATURE = "SharedAccessSignature" +DEVICE_ID = "DeviceId" +MODULE_ID = "ModuleId" +GATEWAY_HOST_NAME = "GatewayHostName" + +_valid_keys = [ + HOST_NAME, + SHARED_ACCESS_KEY_NAME, + SHARED_ACCESS_KEY, + SHARED_ACCESS_SIGNATURE, + DEVICE_ID, + MODULE_ID, + GATEWAY_HOST_NAME, +] + + +def _parse_connection_string(connection_string): + """Return a dictionary of values contained in a given connection string""" + cs_args = connection_string.split(CS_DELIMITER) + d = dict(arg.split(CS_VAL_SEPARATOR, 1) for arg in cs_args) + if len(cs_args) != len(d): + # various errors related to incorrect parsing - duplicate args, bad syntax, etc. + raise ValueError("Invalid Connection String - Unable to parse") + if not all(key in _valid_keys for key in d.keys()): + raise ValueError("Invalid Connection String - Invalid Key") + _validate_keys(d) + return d + + +def _validate_keys(d): + """Raise ValueError if incorrect combination of keys""" + host_name = d.get(HOST_NAME) + shared_access_key_name = d.get(SHARED_ACCESS_KEY_NAME) + shared_access_key = d.get(SHARED_ACCESS_KEY) + device_id = d.get(DEVICE_ID) + + # This logic could be expanded to return the category of ConnectionString + if host_name and device_id and shared_access_key: + pass + elif host_name and shared_access_key and shared_access_key_name: + pass + else: + raise ValueError("Invalid Connection String - Incomplete") + + +class ConnectionString(object): + """ + Key/value mappings with connection details. Uses the same syntax as dictionary + + Parameters: + connection_string(str): string with connection details provided by Azure + + Raises: + ValueError if provided connection_string is invalid + """ + + def __init__(self, connection_string): + self._dict = _parse_connection_string(connection_string) + self._strrep = connection_string + + def __getitem__(self, key): + return self._dict[key] + + def __repr__(self): + return self._strrep + + def get(self, key, default=None): + try: + return self._dict[key] + except KeyError: + return default diff --git a/dev_utils/dev_utils/provisioningservice/utils/sastoken.py b/dev_utils/dev_utils/provisioningservice/utils/sastoken.py new file mode 100644 index 000000000..51393de7d --- /dev/null +++ b/dev_utils/dev_utils/provisioningservice/utils/sastoken.py @@ -0,0 +1,79 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +import base64 +import hmac +import hashlib +import time +import six.moves.urllib as urllib + +__all__ = ["SasToken", "SasTokenError"] + + +class SasTokenError(Exception): + def __init__(self, message, cause=None): + super(self.__class__, self).__init__(message) + self.cause = cause + + +class SasToken(object): + """ + Shared Access Signature Token used to authenticate a request + + Parameters: + uri (str): URI of the resouce to be accessed + key_name (str): Shared Access Key Name + key (str): Shared Access Key (base64 encoded) + ttl (int)[default 3600]: Time to live for the token, in seconds + + Data Attributes: + expiry_time (int): Time that token will expire (in UTC, since epoch) + ttl (int): Time to live for the token, in seconds + + Raises: + SasTokenError if trying to build a SasToken from invalid values + """ + + _encoding_type = "utf-8" + _service_token_format = "SharedAccessSignature sr={}&sig={}&se={}&skn={}" + _device_token_format = "SharedAccessSignature sr={}&sig={}&se={}" + + def __init__(self, uri, key, key_name=None, ttl=3600): + self._uri = urllib.parse.quote_plus(uri) + self._key = key + self._key_name = key_name + self.ttl = ttl + self.refresh() + + def __repr__(self): + return self._token + + def refresh(self): + """ + Refresh the SasToken lifespan, giving it a new expiry time + """ + self.expiry_time = int(time.time() + self.ttl) + self._token = self._build_token() + + def _build_token(self): + """Buid SasToken representation + + Returns: + String representation of the token + """ + try: + message = (self._uri + "\n" + str(self.expiry_time)).encode(self._encoding_type) + signing_key = base64.b64decode(self._key.encode(self._encoding_type)) + signed_hmac = hmac.HMAC(signing_key, message, hashlib.sha256) + signature = urllib.parse.quote(base64.b64encode(signed_hmac.digest())) + except (TypeError, base64.binascii.Error) as e: + raise SasTokenError("Unable to build SasToken from given values", e) + if self._key_name: + token = self._service_token_format.format( + self._uri, signature, str(self.expiry_time), self._key_name + ) + else: + token = self._device_token_format.format(self._uri, signature, str(self.expiry_time)) + return token diff --git a/requirements_test.txt b/requirements_test.txt index d7bfcfe21..cc8392be6 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -6,6 +6,6 @@ pytest-cov pytest-timeout flake8 azure-iot-hub # Only needed for iothub e2e -azure-iothub-provisioningserviceclient >= 1.2.0 # Only needed for provisioning e2e azure-eventhub # Only needed for iothub e2e psutil # Only needed for iothub e2e +msrest # Only needed for provisioning e2e \ No newline at end of file diff --git a/scripts/create_x509_chain_crypto.py b/scripts/create_x509_chain_crypto.py index 7670e62ac..08c06f315 100644 --- a/scripts/create_x509_chain_crypto.py +++ b/scripts/create_x509_chain_crypto.py @@ -400,6 +400,7 @@ def before_cert_creation_from_pipeline(): # Only needed for pipeline tests dirPath = "demoCA" + print("creating demoCA and other subfolders") if os.path.exists(dirPath): print("demoCA exists, so will delete first") shutil.rmtree(dirPath) @@ -434,9 +435,12 @@ def call_intermediate_cert_and_device_cert_creation_from_pipeline( ca_key = os.getenv("PROVISIONING_ROOT_CERT_KEY") in_cert_file_path = "ca_cert.pem" in_key_file_path = "ca_key.pem" + print("writing ca cert on a file") with open(in_cert_file_path, "w") as out_ca_pem: cert_pem_data = str(base64.b64decode(ca_cert), "ascii") out_ca_pem.write(cert_pem_data) + print("root cert content") + print(cert_pem_data) encoded_cert_pem_data = str.encode(cert_pem_data) root_cert = x509.load_pem_x509_certificate(encoded_cert_pem_data, default_backend()) @@ -464,6 +468,7 @@ def call_intermediate_cert_and_device_cert_creation_from_pipeline( key_file=intermediate_password_file, password=intermediate_password, key_size=key_size ) + print("creating intermediate cert") intermediate_cert = create_intermediate_ca_cert( issuer_cert_subject=root_cert.subject, issuer_key=root_private_key, @@ -472,6 +477,7 @@ def call_intermediate_cert_and_device_cert_creation_from_pipeline( days=days, ) + print("creating device certs") create_multiple_device_keys_and_certs( number_of_devices=device_count, issuer_cert_subject=intermediate_cert.subject, diff --git a/tests/e2e/provisioning_e2e/pytest.ini b/tests/e2e/provisioning_e2e/pytest.ini index 2f799d7d7..42c6ea50e 100644 --- a/tests/e2e/provisioning_e2e/pytest.ini +++ b/tests/e2e/provisioning_e2e/pytest.ini @@ -1,2 +1,2 @@ [pytest] -addopts = --timeout 30 \ No newline at end of file +addopts = --timeout 35 \ No newline at end of file diff --git a/tests/e2e/provisioning_e2e/tests/test_async_certificate_enrollments.py b/tests/e2e/provisioning_e2e/tests/test_async_certificate_enrollments.py index e67aa10ad..a1f1f8034 100644 --- a/tests/e2e/provisioning_e2e/tests/test_async_certificate_enrollments.py +++ b/tests/e2e/provisioning_e2e/tests/test_async_certificate_enrollments.py @@ -3,17 +3,13 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -import asyncio from provisioning_e2e.service_helper import Helper, connection_string_to_hostname from azure.iot.device.aio import ProvisioningDeviceClient from azure.iot.device.common import X509 -from provisioningserviceclient import ( - ProvisioningServiceClient, - IndividualEnrollment, - EnrollmentGroup, -) -from provisioningserviceclient.protocol.models import AttestationMechanism, ReprovisionPolicy +from dev_utils.provisioningservice.protocol import models +from dev_utils.provisioningservice.client import ProvisioningServiceClient + import pytest import logging import os @@ -28,11 +24,9 @@ delete_directories_certs_created_from_pipeline, ) - pytestmark = pytest.mark.asyncio logging.basicConfig(level=logging.DEBUG) - intermediate_common_name = "e2edpshomenum" intermediate_password = "revelio" device_common_name = "e2edpslocomotor" + str(uuid.uuid4()) @@ -47,12 +41,11 @@ PROVISIONING_HOST = os.getenv("PROVISIONING_DEVICE_ENDPOINT") ID_SCOPE = os.getenv("PROVISIONING_DEVICE_IDSCOPE") -certificate_count = 8 type_to_device_indices = { "individual_with_device_id": [1], "individual_no_device_id": [2], - "group_intermediate": [3, 4, 5], - "group_ca": [6, 7, 8], + "group_intermediate": [3, 4], + "group_ca": [5, 6], } @@ -66,7 +59,7 @@ def before_all_tests(request): ca_password=os.getenv("PROVISIONING_ROOT_PASSWORD"), intermediate_password=intermediate_password, device_password=device_password, - device_count=8, + device_count=6, ) def after_module(): @@ -83,10 +76,13 @@ def after_module(): async def test_device_register_with_device_id_for_a_x509_individual_enrollment(protocol): device_id = "e2edpsthunderbolt" device_index = type_to_device_indices.get("individual_with_device_id")[0] - registration_id = "" + + registration_id = device_common_name + str(device_index) try: + cert_content = read_cert_content_from_file(device_index=device_index) + individual_enrollment_record = create_individual_enrollment_with_x509_client_certs( - device_index=device_index, device_id=device_id + registration_id=registration_id, primary_cert=cert_content, device_id=device_id ) registration_id = individual_enrollment_record.registration_id @@ -98,8 +94,6 @@ async def test_device_register_with_device_id_for_a_x509_individual_enrollment(p assert device_id != registration_id assert_device_provisioned(device_id=device_id, registration_result=registration_result) - # TODO Remove weird fix : not sure why the delete of the device results in connection time out - await asyncio.sleep(10) device_registry_helper.try_delete_device(device_id) finally: service_client.delete_individual_enrollment_by_param(registration_id) @@ -111,11 +105,14 @@ async def test_device_register_with_device_id_for_a_x509_individual_enrollment(p @pytest.mark.parametrize("protocol", ["mqtt", "mqttws"]) async def test_device_register_with_no_device_id_for_a_x509_individual_enrollment(protocol): device_index = type_to_device_indices.get("individual_no_device_id")[0] - registration_id = "" + registration_id = device_common_name + str(device_index) try: + cert_content = read_cert_content_from_file(device_index=device_index) + individual_enrollment_record = create_individual_enrollment_with_x509_client_certs( - device_index=device_index + registration_id=registration_id, primary_cert=cert_content ) + registration_id = individual_enrollment_record.registration_id device_cert_file = "demoCA/newcerts/device_cert" + str(device_index) + ".pem" @@ -132,32 +129,36 @@ async def test_device_register_with_no_device_id_for_a_x509_individual_enrollmen service_client.delete_individual_enrollment_by_param(registration_id) +# TODO : Don't do mqttws as it conflicts with SAME cert problem, Need complete set of new certs with mqtts @pytest.mark.it( "A group of devices get provisioned to the linked IoTHub with device_ids equal to the individual registration_ids inside a group enrollment that has been created with intermediate X509 authentication" ) -@pytest.mark.parametrize("protocol", ["mqtt", "mqttws"]) +@pytest.mark.parametrize("protocol", ["mqtt"]) async def test_group_of_devices_register_with_no_device_id_for_a_x509_intermediate_authentication_group_enrollment( protocol, ): group_id = "e2e-intermediate-durmstrang" + str(uuid.uuid4()) - common_device_id = device_common_name + common_device_id = "e2edpsinterdevice" devices_indices = type_to_device_indices.get("group_intermediate") device_count_in_group = len(devices_indices) - reprovision_policy = ReprovisionPolicy(migrate_device_data=True) + reprovision_policy = models.ReprovisionPolicy(migrate_device_data=True) try: intermediate_cert_filename = "demoCA/newcerts/intermediate_cert.pem" with open(intermediate_cert_filename, "r") as intermediate_pem: intermediate_cert_content = intermediate_pem.read() - attestation_mechanism = AttestationMechanism.create_with_x509_signing_certs( - intermediate_cert_content + x509 = create_x509_client_or_sign_certs( + is_client=False, primary_cert=intermediate_cert_content ) - enrollment_group_provisioning_model = EnrollmentGroup.create( - group_id, attestation=attestation_mechanism, reprovision_policy=reprovision_policy + attestation_mechanism = models.AttestationMechanism(type="x509", x509=x509) + enrollment_group_provisioning_model = models.EnrollmentGroup( + enrollment_group_id=group_id, + attestation=attestation_mechanism, + reprovision_policy=reprovision_policy, ) - service_client.create_or_update(enrollment_group_provisioning_model) + service_client.create_or_update_enrollment_group(enrollment_group_provisioning_model) count = 0 common_device_key_input_file = "demoCA/private/device_key" @@ -183,9 +184,9 @@ async def test_group_of_devices_register_with_no_device_id_for_a_x509_intermedia ) assert_device_provisioned(device_id=device_id, registration_result=registration_result) + device_registry_helper.try_delete_device(device_id) - # Make sure space is okay. The following line must be outside for loop. assert count == device_count_in_group finally: @@ -198,26 +199,27 @@ async def test_group_of_devices_register_with_no_device_id_for_a_x509_intermedia @pytest.mark.it( "A group of devices get provisioned to the linked IoTHub with device_ids equal to the individual registration_ids inside a group enrollment that has been created with an already uploaded ca cert X509 authentication" ) -@pytest.mark.parametrize("protocol", ["mqtt", "mqttws"]) +@pytest.mark.parametrize("protocol", ["mqtt"]) async def test_group_of_devices_register_with_no_device_id_for_a_x509_ca_authentication_group_enrollment( protocol, ): group_id = "e2e-ca-ilvermorny" + str(uuid.uuid4()) - common_device_id = device_common_name + common_device_id = "e2edpscadevice" devices_indices = type_to_device_indices.get("group_ca") device_count_in_group = len(devices_indices) - reprovision_policy = ReprovisionPolicy(migrate_device_data=True) + reprovision_policy = models.ReprovisionPolicy(migrate_device_data=True) try: DPS_GROUP_CA_CERT = os.getenv("PROVISIONING_ROOT_CERT") - attestation_mechanism = AttestationMechanism.create_with_x509_ca_refs( - ref1=DPS_GROUP_CA_CERT - ) - enrollment_group_provisioning_model = EnrollmentGroup.create( - group_id, attestation=attestation_mechanism, reprovision_policy=reprovision_policy + x509 = create_x509_ca_refs(primary_ref=DPS_GROUP_CA_CERT) + attestation_mechanism = models.AttestationMechanism(type="x509", x509=x509) + enrollment_group_provisioning_model = models.EnrollmentGroup( + enrollment_group_id=group_id, + attestation=attestation_mechanism, + reprovision_policy=reprovision_policy, ) - service_client.create_or_update(enrollment_group_provisioning_model) + service_client.create_or_update_enrollment_group(enrollment_group_provisioning_model) count = 0 intermediate_cert_filename = "demoCA/newcerts/intermediate_cert.pem" @@ -227,6 +229,7 @@ async def test_group_of_devices_register_with_no_device_id_for_a_x509_ca_authent for index in devices_indices: count = count + 1 device_id = common_device_id + str(index) + device_key_input_file = common_device_key_input_file + str(index) + ".pem" device_cert_input_file = common_device_cert_input_file + str(index) + ".pem" device_inter_cert_chain_file = common_device_inter_cert_chain_file + str(index) + ".pem" @@ -249,7 +252,6 @@ async def test_group_of_devices_register_with_no_device_id_for_a_x509_ca_authent assert_device_provisioned(device_id=device_id, registration_result=registration_result) device_registry_helper.try_delete_device(device_id) - # Make sure space is okay. The following line must be outside for loop. assert count == device_count_in_group finally: service_client.delete_enrollment_group_by_param(group_id) @@ -271,24 +273,63 @@ def assert_device_provisioned(device_id, registration_result): assert device.device_id == device_id -def create_individual_enrollment_with_x509_client_certs(device_index, device_id=None): - registration_id = device_common_name + str(device_index) - reprovision_policy = ReprovisionPolicy(migrate_device_data=True) - - device_cert_input_file = "demoCA/newcerts/device_cert" + str(device_index) + ".pem" - with open(device_cert_input_file, "r") as in_device_cert: - device_cert_content = in_device_cert.read() +def create_individual_enrollment_with_x509_client_certs( + registration_id, + primary_cert, + secondary_cert=None, + device_id=None, + client_ca_name=None, +): + print("primary cert") + print(primary_cert) + reprovision_policy = models.ReprovisionPolicy(migrate_device_data=True) + x509 = create_x509_client_or_sign_certs( + is_client=True, primary_cert=primary_cert, secondary_cert=secondary_cert + ) + attestation_mechanism = models.AttestationMechanism(type="x509", x509=x509) - attestation_mechanism = AttestationMechanism.create_with_x509_client_certs(device_cert_content) + client_certificate_issuance_policy = None + if client_ca_name: + client_certificate_issuance_policy = models.ClientCertificateIssuancePolicy( + certificate_authority_name=client_ca_name + ) - individual_provisioning_model = IndividualEnrollment.create( + individual_provisioning_model = models.IndividualEnrollment( attestation=attestation_mechanism, registration_id=registration_id, reprovision_policy=reprovision_policy, device_id=device_id, + client_certificate_issuance_policy=client_certificate_issuance_policy, ) - return service_client.create_or_update(individual_provisioning_model) + return service_client.create_or_update_individual_enrollment(individual_provisioning_model) + + +def create_x509_client_or_sign_certs(is_client, primary_cert, secondary_cert=None): + + primary = models.X509CertificateWithInfo(certificate=primary_cert) + secondary = None + if secondary_cert: + secondary = models.X509CertificateWithInfo(certificate=secondary_cert) + certs = models.X509Certificates(primary=primary, secondary=secondary) + if is_client: + x509_attestation = models.X509Attestation(client_certificates=certs) + else: + x509_attestation = models.X509Attestation(signing_certificates=certs) + return x509_attestation + + +def create_x509_ca_refs(primary_ref, secondary_ref=None): + ca_refs = models.X509CAReferences(primary=primary_ref, secondary=secondary_ref) + x509_attestation = models.X509Attestation(ca_references=ca_refs) + return x509_attestation + + +def read_cert_content_from_file(device_index): + device_cert_input_file = "demoCA/newcerts/device_cert" + str(device_index) + ".pem" + with open(device_cert_input_file, "r") as in_device_cert: + device_cert_content = in_device_cert.read() + return device_cert_content async def result_from_register(registration_id, device_cert_file, device_key_file, protocol): diff --git a/tests/e2e/provisioning_e2e/tests/test_async_dps_cert_mgmt.py b/tests/e2e/provisioning_e2e/tests/test_async_dps_cert_mgmt.py new file mode 100644 index 000000000..e39b26fc0 --- /dev/null +++ b/tests/e2e/provisioning_e2e/tests/test_async_dps_cert_mgmt.py @@ -0,0 +1,590 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from provisioning_e2e.service_helper import Helper, connection_string_to_hostname +from azure.iot.device.aio import ProvisioningDeviceClient, IoTHubDeviceClient +from azure.iot.device.common import X509 +from dev_utils.provisioningservice.protocol import models +from dev_utils.provisioningservice.client import ProvisioningServiceClient + +import pytest +import logging +import os +import uuid +import base64 +import hmac +import hashlib + +from . import path_adjust # noqa: F401 + +# Refers to an item in "scripts" in the root. This is made to work via the above path_adjust +from create_x509_chain_crypto import ( + before_cert_creation_from_pipeline, + call_intermediate_cert_and_device_cert_creation_from_pipeline, + delete_directories_certs_created_from_pipeline, + create_private_key, + create_csr, +) + +pytestmark = pytest.mark.asyncio +logging.basicConfig(level=logging.DEBUG) + +intermediate_common_name = "e2edpshomenumdps" +intermediate_password = "revelio" +device_common_name = "e2edpslocomotor" + str(uuid.uuid4()) +device_password = "mortis" + +service_client = ProvisioningServiceClient.create_from_connection_string( + os.getenv("PROVISIONING_SERVICE_CONNECTION_STRING") +) +device_registry_helper = Helper(os.getenv("IOTHUB_CONNECTION_STRING")) +linked_iot_hub = connection_string_to_hostname(os.getenv("IOTHUB_CONNECTION_STRING")) + +PROVISIONING_HOST = os.getenv("PROVISIONING_DEVICE_ENDPOINT") +ID_SCOPE = os.getenv("PROVISIONING_DEVICE_IDSCOPE") +CLIENT_CERT_AUTH_NAME = os.getenv("CLIENT_CERTIFICATE_AUTHORITY_NAME") + +type_to_device_indices = { + "individual_with_device_id": [1], + "individual_no_device_id": [2], + "group_intermediate": [3, 4, 5], + "group_ca": [6, 7, 8], + "group_symmetric": [9, 10, 11], +} + + +@pytest.fixture(scope="module", autouse=True) +def before_all_tests(request): + logging.info("set up certificates before cert related tests") + before_cert_creation_from_pipeline() + call_intermediate_cert_and_device_cert_creation_from_pipeline( + intermediate_common_name=intermediate_common_name, + device_common_name=device_common_name, + ca_password=os.getenv("PROVISIONING_ROOT_PASSWORD"), + intermediate_password=intermediate_password, + device_password=device_password, + device_count=8, + ) + + def after_module(): + logging.info("tear down certificates after cert related tests") + delete_directories_certs_created_from_pipeline() + + request.addfinalizer(after_module) + + +@pytest.mark.it( + "A device requests a client cert by sending a certificate signing request " + "while being provisioned to the linked IoTHub with the device_id equal to the registration_id" + "of the individual enrollment that has been created with a symmetric key authentication" +) +@pytest.mark.parametrize("protocol", ["mqtt", "mqttws"]) +async def test_device_register_with_client_cert_issuance_for_a_symmetric_key_individual_enrollment( + protocol, +): + registration_id = "e2e-dps-locomotor" + str(uuid.uuid4()) + key_file = "key.pem" + csr_file = "request.pem" + issued_cert_file = "cert.pem" + try: + attestation_mechanism = models.AttestationMechanism(type="symmetricKey") + individual_enrollment_record = create_individual_enrollment( + registration_id=registration_id, + attestation_mechanism=attestation_mechanism, + client_ca_name=CLIENT_CERT_AUTH_NAME, + ) + symmetric_key = individual_enrollment_record.attestation.symmetric_key.primary_key + private_key = create_private_key(key_file) + create_csr(private_key, csr_file, registration_id) + + registration_result = await register_via_symmetric_key( + registration_id, symmetric_key, protocol, csr_file=csr_file + ) + + assert_device_provisioned( + device_id=registration_id, registration_result=registration_result + ) + await connect_device_with_operational_cert( + registration_result=registration_result, + issued_cert_file=issued_cert_file, + key_file=key_file, + ) + device_registry_helper.try_delete_device(registration_id) + finally: + service_client.delete_individual_enrollment_by_param(registration_id) + delete_client_certs(key_file, csr_file, issued_cert_file) + + +@pytest.mark.it( + "A group of devices request client certs by sending certificate signing requests while being provisioned" + " to the linked IoTHub inside a group enrollment that has been created with a symmetric key authentication" +) +@pytest.mark.parametrize("protocol", ["mqtt"]) +async def test_device_register_with_client_cert_issuance_for_a_symmetric_key_group_enrollment( + protocol, +): + group_id = "e2e-symmetric-group" + str(uuid.uuid4()) + devices_indices = type_to_device_indices.get("group_symmetric") + device_count_in_group = len(devices_indices) + common_device_id = "e2edpsgroupsymmetric" + try: + attestation_mechanism = models.AttestationMechanism(type="symmetricKey") + eg = create_enrollment_group(group_id=group_id, attestation_mechanism=attestation_mechanism) + master_key = eg.attestation.symmetric_key.primary_key + count = 0 + for index in devices_indices: + count = count + 1 + device_id = common_device_id + str(index) + device_key = derive_device_key(device_id, master_key) + + key_file = "key" + str(index) + ".pem" + csr_file = "request" + str(index) + ".pem" + + private_key = create_private_key(key_file) + create_csr(private_key, csr_file, device_id) + registration_result = await register_via_symmetric_key( + registration_id=device_id, + symmetric_key=device_key, + protocol=protocol, + csr_file=csr_file, + ) + + assert_device_provisioned(device_id=device_id, registration_result=registration_result) + issued_cert_file = "cert" + str(index) + ".pem" + await connect_device_with_operational_cert( + registration_result=registration_result, + issued_cert_file=issued_cert_file, + key_file=key_file, + ) + assert count == device_count_in_group + device_registry_helper.try_delete_device(device_id) + finally: + for index in devices_indices: + key_file = "key" + str(index) + ".pem" + csr_file = "request" + str(index) + ".pem" + issued_cert_file = "cert" + str(index) + ".pem" + delete_client_certs(key_file, csr_file, issued_cert_file) + service_client.delete_enrollment_group_by_param(group_id) + + +@pytest.mark.it( + "A device gets provisioned to the linked IoTHub with the user supplied device_id different from the registration_id of the individual enrollment that has been created with a selfsigned X509 authentication" +) +@pytest.mark.parametrize("protocol", ["mqtt", "mqttws"]) +async def test_device_register_with_device_id_for_a_x509_individual_enrollment(protocol): + device_id = "e2edpsthunderbolt" + device_index = type_to_device_indices.get("individual_with_device_id")[0] + registration_id = device_common_name + str(device_index) + try: + cert_content = read_cert_content_from_file(device_index=device_index) + x509 = create_x509_client_or_sign_certs(is_client=True, primary_cert=cert_content) + attestation_mechanism = models.AttestationMechanism(type="x509", x509=x509) + individual_enrollment_record = create_individual_enrollment( + registration_id=registration_id, + attestation_mechanism=attestation_mechanism, + device_id=device_id, + client_ca_name=CLIENT_CERT_AUTH_NAME, + ) + registration_id = individual_enrollment_record.registration_id + + device_cert_file = "demoCA/newcerts/device_cert" + str(device_index) + ".pem" + device_key_file = "demoCA/private/device_key" + str(device_index) + ".pem" + + key_file = "key.pem" + csr_file = "request.pem" + + private_key = create_private_key(key_file) + create_csr(private_key, csr_file, registration_id) + + registration_result = await register_via_x509( + registration_id, device_cert_file, device_key_file, protocol, csr_file=csr_file + ) + + assert device_id != registration_id + assert_device_provisioned(device_id=device_id, registration_result=registration_result) + issued_cert_file = "cert.pem" + await connect_device_with_operational_cert( + registration_result=registration_result, + issued_cert_file=issued_cert_file, + key_file=key_file, + ) + device_registry_helper.try_delete_device(device_id) + finally: + delete_client_certs(key_file, csr_file, issued_cert_file) + service_client.delete_individual_enrollment_by_param(registration_id) + + +@pytest.mark.it( + "A device gets provisioned to the linked IoTHub with device_id equal to the registration_id of the " + "individual enrollment that has been created with a selfsigned X509 authentication" +) +@pytest.mark.parametrize("protocol", ["mqtt", "mqttws"]) +async def test_device_register_with_no_device_id_for_a_x509_individual_enrollment(protocol): + device_index = type_to_device_indices.get("individual_no_device_id")[0] + registration_id = device_common_name + str(device_index) + try: + cert_content = read_cert_content_from_file(device_index=device_index) + x509 = create_x509_client_or_sign_certs(is_client=True, primary_cert=cert_content) + attestation_mechanism = models.AttestationMechanism(type="x509", x509=x509) + individual_enrollment_record = create_individual_enrollment( + registration_id=registration_id, + attestation_mechanism=attestation_mechanism, + client_ca_name=CLIENT_CERT_AUTH_NAME, + ) + + registration_id = individual_enrollment_record.registration_id + + device_cert_file = "demoCA/newcerts/device_cert" + str(device_index) + ".pem" + device_key_file = "demoCA/private/device_key" + str(device_index) + ".pem" + + key_file = "key.pem" + csr_file = "request.pem" + + private_key = create_private_key(key_file) + create_csr(private_key, csr_file, registration_id) + + registration_result = await register_via_x509( + registration_id, device_cert_file, device_key_file, protocol, csr_file=csr_file + ) + + assert_device_provisioned( + device_id=registration_id, registration_result=registration_result + ) + + issued_cert_file = "cert.pem" + + await connect_device_with_operational_cert( + registration_result=registration_result, + issued_cert_file=issued_cert_file, + key_file=key_file, + ) + device_registry_helper.try_delete_device(registration_id) + finally: + delete_client_certs(key_file, csr_file, issued_cert_file) + service_client.delete_individual_enrollment_by_param(registration_id) + + +@pytest.mark.it( + "A group of devices get provisioned to the linked IoTHub with device_ids equal to the individual registration_ids " + "inside a group enrollment that has been created with intermediate X509 authentication" +) +async def test_group_of_devices_register_with_no_device_id_for_a_x509_intermediate_authentication_group_enrollment(): + protocol = "mqtt" + group_id = "e2e-intermediate-durmstrang" + str(uuid.uuid4()) + common_device_id = "e2edpsinterdevice" + devices_indices = type_to_device_indices.get("group_intermediate") + device_count_in_group = len(devices_indices) + + try: + intermediate_cert_filename = "demoCA/newcerts/intermediate_cert.pem" + with open(intermediate_cert_filename, "r") as intermediate_pem: + intermediate_cert_content = intermediate_pem.read() + + x509 = create_x509_client_or_sign_certs( + is_client=False, + primary_cert=intermediate_cert_content, + ) + attestation_mechanism = models.AttestationMechanism(type="x509", x509=x509) + create_enrollment_group(group_id=group_id, attestation_mechanism=attestation_mechanism) + count = 0 + common_device_key_input_file = "demoCA/private/device_key" + common_device_cert_input_file = "demoCA/newcerts/device_cert" + common_device_inter_cert_chain_file = "demoCA/newcerts/out_inter_device_chain_cert" + for index in devices_indices: + count = count + 1 + device_id = common_device_id + str(index) + device_key_input_file = common_device_key_input_file + str(index) + ".pem" + device_cert_input_file = common_device_cert_input_file + str(index) + ".pem" + device_inter_cert_chain_file = common_device_inter_cert_chain_file + str(index) + ".pem" + filenames = [device_cert_input_file, intermediate_cert_filename] + with open(device_inter_cert_chain_file, "w") as outfile: + for fname in filenames: + with open(fname) as infile: + outfile.write(infile.read()) + + key_file = "key" + str(index) + ".pem" + csr_file = "request" + str(index) + ".pem" + + private_key = create_private_key(key_file) + create_csr(private_key, csr_file, device_id) + + registration_result = await register_via_x509( + registration_id=device_id, + device_cert_file=device_inter_cert_chain_file, + device_key_file=device_key_input_file, + protocol=protocol, + csr_file=csr_file, + ) + + assert_device_provisioned(device_id=device_id, registration_result=registration_result) + print("device was provisioned") + print(device_id) + issued_cert_file = "cert" + str(index) + ".pem" + + await connect_device_with_operational_cert( + registration_result=registration_result, + issued_cert_file=issued_cert_file, + key_file=key_file, + ) + device_registry_helper.try_delete_device(device_id) + + assert count == device_count_in_group + + finally: + for index in devices_indices: + key_file = "key" + str(index) + ".pem" + csr_file = "request" + str(index) + ".pem" + issued_cert_file = "cert" + str(index) + ".pem" + delete_client_certs(key_file, csr_file, issued_cert_file) + + service_client.delete_enrollment_group_by_param(group_id) + + +@pytest.mark.skip( + reason="The enrollment is never properly created on the pipeline and it is always created without any CA reference and eventually the registration fails" +) +@pytest.mark.it( + "A group of devices get provisioned to the linked IoTHub with device_ids equal to the individual registration_ids inside a group enrollment that has been created with an already uploaded ca cert X509 authentication" +) +async def test_group_of_devices_register_with_no_device_id_for_a_x509_ca_authentication_group_enrollment(): + protocol = "mqtt" + group_id = "e2e-ca-ilvermorny" + str(uuid.uuid4()) + common_device_id = "e2edpscadevice" + devices_indices = type_to_device_indices.get("group_ca") + device_count_in_group = len(devices_indices) + try: + DPS_GROUP_CA_CERT = os.getenv("PROVISIONING_ROOT_CERT") + x509 = create_x509_ca_refs(primary_ref=DPS_GROUP_CA_CERT) + attestation_mechanism = models.AttestationMechanism(type="x509", x509=x509) + create_enrollment_group(group_id=group_id, attestation_mechanism=attestation_mechanism) + count = 0 + intermediate_cert_filename = "demoCA/newcerts/intermediate_cert.pem" + common_device_key_input_file = "demoCA/private/device_key" + common_device_cert_input_file = "demoCA/newcerts/device_cert" + common_device_inter_cert_chain_file = "demoCA/newcerts/out_inter_device_chain_cert" + for index in devices_indices: + count = count + 1 + device_id = common_device_id + str(index) + device_key_input_file = common_device_key_input_file + str(index) + ".pem" + device_cert_input_file = common_device_cert_input_file + str(index) + ".pem" + device_inter_cert_chain_file = common_device_inter_cert_chain_file + str(index) + ".pem" + filenames = [device_cert_input_file, intermediate_cert_filename] + with open(device_inter_cert_chain_file, "w") as outfile: + for fname in filenames: + with open(fname) as infile: + logging.debug("Filename is {}".format(fname)) + content = infile.read() + logging.debug(content) + outfile.write(content) + + key_file = "key" + str(index) + ".pem" + csr_file = "request" + str(index) + ".pem" + + private_key = create_private_key(key_file) + create_csr(private_key, csr_file, device_id) + + registration_result = await register_via_x509( + registration_id=device_id, + device_cert_file=device_inter_cert_chain_file, + device_key_file=device_key_input_file, + protocol=protocol, + csr_file=csr_file, + ) + + assert_device_provisioned(device_id=device_id, registration_result=registration_result) + print("device was provisioned for ca") + print(device_id) + + issued_cert_file = "cert" + str(index) + ".pem" + + await connect_device_with_operational_cert( + registration_result=registration_result, + issued_cert_file=issued_cert_file, + key_file=key_file, + ) + device_registry_helper.try_delete_device(device_id) + + assert count == device_count_in_group + finally: + for index in devices_indices: + key_file = "key" + str(index) + ".pem" + csr_file = "request" + str(index) + ".pem" + issued_cert_file = "cert" + str(index) + ".pem" + delete_client_certs(key_file, csr_file, issued_cert_file) + service_client.delete_enrollment_group_by_param(group_id) + + +def assert_device_provisioned(device_id, registration_result): + """ + Assert that the device has been provisioned correctly to iothub from the registration result as well as from the device registry + :param device_id: The device id + :param registration_result: The registration result + """ + assert registration_result.status == "assigned" + assert registration_result.registration_state.device_id == device_id + assert registration_result.registration_state.assigned_hub == linked_iot_hub + + device = device_registry_helper.get_device(device_id) + assert device is not None + assert device.authentication.type == "selfSigned" + print("assertions") + print(device_id) + print(device.authentication.type) + assert device.device_id == device_id + + +def create_individual_enrollment( + registration_id, + attestation_mechanism, + device_id=None, + client_ca_name=None, +): + reprovision_policy = models.ReprovisionPolicy(migrate_device_data=True) + client_certificate_issuance_policy = None + if client_ca_name: + client_certificate_issuance_policy = models.ClientCertificateIssuancePolicy( + certificate_authority_name=client_ca_name + ) + + individual_provisioning_model = models.IndividualEnrollment( + attestation=attestation_mechanism, + registration_id=registration_id, + reprovision_policy=reprovision_policy, + device_id=device_id, + client_certificate_issuance_policy=client_certificate_issuance_policy, + ) + + return service_client.create_or_update_individual_enrollment(individual_provisioning_model) + + +def create_x509_client_or_sign_certs(is_client, primary_cert, secondary_cert=None): + + primary = models.X509CertificateWithInfo(certificate=primary_cert) + secondary = None + if secondary_cert: + secondary = models.X509CertificateWithInfo(certificate=secondary_cert) + certs = models.X509Certificates(primary=primary, secondary=secondary) + if is_client: + x509_attestation = models.X509Attestation(client_certificates=certs) + else: + x509_attestation = models.X509Attestation(signing_certificates=certs) + return x509_attestation + + +def create_x509_ca_refs(primary_ref, secondary_ref=None): + ca_refs = models.X509CAReferences(primary=primary_ref, secondary=secondary_ref) + x509_attestation = models.X509Attestation(ca_references=ca_refs) + return x509_attestation + + +def read_cert_content_from_file(device_index): + device_cert_input_file = "demoCA/newcerts/device_cert" + str(device_index) + ".pem" + with open(device_cert_input_file, "r") as in_device_cert: + device_cert_content = in_device_cert.read() + return device_cert_content + + +def delete_client_certs(key_file, csr_file, issued_cert_file): + if os.path.exists(key_file): + os.remove(key_file) + if os.path.exists(csr_file): + os.remove(csr_file) + if os.path.exists(issued_cert_file): + os.remove(issued_cert_file) + + +async def register_via_x509( + registration_id, device_cert_file, device_key_file, protocol, csr_file=None +): + x509 = X509(cert_file=device_cert_file, key_file=device_key_file, pass_phrase=device_password) + protocol_boolean_mapping = {"mqtt": False, "mqttws": True} + provisioning_device_client = ProvisioningDeviceClient.create_from_x509_certificate( + provisioning_host=PROVISIONING_HOST, + registration_id=registration_id, + id_scope=ID_SCOPE, + x509=x509, + websockets=protocol_boolean_mapping[protocol], + ) + + if csr_file: + with open(csr_file, "r") as csr: + csr_data = csr.read() + # Set the CSR on the client to send it to DPS + provisioning_device_client.client_certificate_signing_request = str(csr_data) + + return await provisioning_device_client.register() + + +async def register_via_symmetric_key(registration_id, symmetric_key, protocol, csr_file=None): + # We have this mapping because the pytest logs look better with "mqtt" and "mqttws" + # instead of just "True" and "False". + protocol_boolean_mapping = {"mqtt": False, "mqttws": True} + provisioning_device_client = ProvisioningDeviceClient.create_from_symmetric_key( + provisioning_host=PROVISIONING_HOST, + registration_id=registration_id, + id_scope=ID_SCOPE, + symmetric_key=symmetric_key, + websockets=protocol_boolean_mapping[protocol], + ) + if csr_file: + with open(csr_file, "r") as csr: + csr_data = csr.read() + # Set the CSR on the client to send it to DPS + provisioning_device_client.client_certificate_signing_request = str(csr_data) + return await provisioning_device_client.register() + + +def create_enrollment_group(group_id, attestation_mechanism): + + reprovision_policy = models.ReprovisionPolicy(migrate_device_data=True) + client_certificate_issuance_policy = models.ClientCertificateIssuancePolicy( + certificate_authority_name=CLIENT_CERT_AUTH_NAME + ) + enrollment_group_provisioning_model = models.EnrollmentGroup( + enrollment_group_id=group_id, + attestation=attestation_mechanism, + reprovision_policy=reprovision_policy, + client_certificate_issuance_policy=client_certificate_issuance_policy, + ) + return service_client.create_or_update_enrollment_group(enrollment_group_provisioning_model) + + +def derive_device_key(device_id, group_symmetric_key): + """ + The unique device ID and the group master key should be encoded into "utf-8" + After this the encoded group master key must be used to compute an HMAC-SHA256 of the encoded registration ID. + Finally the result must be converted into Base64 format. + The device key is the "utf-8" decoding of the above result. + """ + message = device_id.encode("utf-8") + signing_key = base64.b64decode(group_symmetric_key.encode("utf-8")) + signed_hmac = hmac.HMAC(signing_key, message, hashlib.sha256) + device_key_encoded = base64.b64encode(signed_hmac.digest()) + return device_key_encoded.decode("utf-8") + + +async def connect_device_with_operational_cert(registration_result, issued_cert_file, key_file): + + with open(issued_cert_file, "w") as out_ca_pem: + # Write the issued certificate on the file. This forms the certificate portion of the X509 object. + cert_data = registration_result.registration_state.issued_client_certificate + out_ca_pem.write(cert_data) + + x509 = X509( + cert_file=issued_cert_file, + key_file=key_file, + ) + + device_client = IoTHubDeviceClient.create_from_x509_certificate( + hostname=registration_result.registration_state.assigned_hub, + device_id=registration_result.registration_state.device_id, + x509=x509, + ) + # Connect the client. + await device_client.connect() + # Assert that this X509 was able to connect. + assert device_client.connected + await device_client.disconnect() diff --git a/tests/e2e/provisioning_e2e/tests/test_async_symmetric_enrollments.py b/tests/e2e/provisioning_e2e/tests/test_async_symmetric_enrollments.py index 2074c520f..cce06dd3a 100644 --- a/tests/e2e/provisioning_e2e/tests/test_async_symmetric_enrollments.py +++ b/tests/e2e/provisioning_e2e/tests/test_async_symmetric_enrollments.py @@ -6,24 +6,16 @@ from provisioning_e2e.service_helper import Helper, connection_string_to_hostname from azure.iot.device.aio import ProvisioningDeviceClient -from azure.iot.device.aio import IoTHubDeviceClient -from azure.iot.device.common import X509 -from provisioningserviceclient import ProvisioningServiceClient, IndividualEnrollment -from provisioningserviceclient.protocol.models import AttestationMechanism, ReprovisionPolicy +from dev_utils.provisioningservice.protocol import models +from dev_utils.provisioningservice.client import ProvisioningServiceClient import pytest import logging import os import uuid -from . import path_adjust # noqa: F401 -from create_x509_chain_crypto import ( - create_private_key, - create_csr, -) pytestmark = pytest.mark.asyncio logging.basicConfig(level=logging.DEBUG) - PROVISIONING_HOST = os.getenv("PROVISIONING_DEVICE_ENDPOINT") ID_SCOPE = os.getenv("PROVISIONING_DEVICE_IDSCOPE") conn_str = os.getenv("PROVISIONING_SERVICE_CONNECTION_STRING") @@ -33,8 +25,7 @@ service_client = ProvisioningServiceClient.create_from_connection_string(conn_str) device_registry_helper = Helper(os.getenv("IOTHUB_CONNECTION_STRING")) linked_iot_hub = connection_string_to_hostname(os.getenv("IOTHUB_CONNECTION_STRING")) -# TODO Delete this line. This is a pre created variable in key vault now. -symmetric_key_for_cert_management = os.getenv("DPS_CERT_ISSUANCE_SYM_KEY_AIO") +CLIENT_CERT_AUTH_NAME = os.getenv("CLIENT_CERTIFICATE_AUTHORITY_NAME") logger = logging.getLogger(__name__) @@ -47,6 +38,7 @@ async def test_device_register_with_no_device_id_for_a_symmetric_key_individual_enrollment( protocol, ): + registration_id = "" try: individual_enrollment_record = create_individual_enrollment( "e2e-dps-legilimens" + str(uuid.uuid4()) @@ -70,7 +62,7 @@ async def test_device_register_with_no_device_id_for_a_symmetric_key_individual_ ) @pytest.mark.parametrize("protocol", ["mqtt", "mqttws"]) async def test_device_register_with_device_id_for_a_symmetric_key_individual_enrollment(protocol): - + registration_id = "" device_id = "e2edpsgoldensnitch" try: individual_enrollment_record = create_individual_enrollment( @@ -89,97 +81,29 @@ async def test_device_register_with_device_id_for_a_symmetric_key_individual_enr service_client.delete_individual_enrollment_by_param(registration_id) -@pytest.mark.it( - "A device requests a client cert by sending a certificate signing request " - "while being provisioned to the linked IoTHub with the device_id equal to the registration_id" - "of the individual enrollment that has been created with a symmetric key authentication" -) -@pytest.mark.parametrize("protocol", ["mqtt", "mqttws"]) -async def test_device_register_with_client_cert_issuance_for_a_symmetric_key_individual_enrollment( - protocol, -): - key_file = "key.pem" - csr_file = "request.pem" - issued_cert_file = "cert.pem" - - try: - # TODO Uncomment lines when service releases. Can not create enrollment record now as it - # TODO involves manual steps to associate the enrollment with Client and Server Profiles. - # individual_enrollment_record = create_individual_enrollment( - # "e2e-dps-avis" + str(uuid.uuid4()) - # ) - - # registration_id = individual_enrollment_record.registration_id - # symmetric_key = individual_enrollment_record.attestation.symmetric_key.primary_key - - registration_id = "e2e-dps-avis" - symmetric_key = symmetric_key_for_cert_management - - logger.debug("the symmetric key for e2e-dps-avis") - logger.debug(symmetric_key_for_cert_management) - private_key = create_private_key(key_file) - create_csr(private_key, csr_file, registration_id) - - registration_result = await result_from_register( - registration_id, symmetric_key, protocol, csr_file=csr_file - ) - - assert_device_provisioned( - device_id=registration_id, registration_result=registration_result, client_cert=True - ) - with open(issued_cert_file, "w") as out_ca_pem: - # Write the issued certificate on the file. This forms the certificate portion of the X509 object. - cert_data = registration_result.registration_state.issued_client_certificate - out_ca_pem.write(cert_data) - - x509 = X509( - cert_file=issued_cert_file, - key_file=key_file, - ) - - device_client = IoTHubDeviceClient.create_from_x509_certificate( - hostname=registration_result.registration_state.assigned_hub, - device_id=registration_result.registration_state.device_id, - x509=x509, - ) - # Connect the client. - await device_client.connect() - # Assert that this X509 was able to connect. - assert device_client.connected - await device_client.disconnect() - - # TODO Uncomment this line. Right now do not delete the enrollment as it is not created on the fly. - # device_registry_helper.try_delete_device(registration_id) - finally: - # TODO Uncomment this line. Right now do not delete the enrollment as it is not created on the fly. - # TODO This is a previously created enrollment record. - # service_client.delete_individual_enrollment_by_param(registration_id) - if os.path.exists(key_file): - os.remove(key_file) - if os.path.exists(csr_file): - os.remove(csr_file) - if os.path.exists(issued_cert_file): - os.remove(issued_cert_file) - - -def create_individual_enrollment(registration_id, device_id=None): +def create_individual_enrollment(registration_id, device_id=None, client_ca_name=None): """ Create an individual enrollment record using the service client :param registration_id: The registration id of the enrollment :param device_id: Optional device id :return: And individual enrollment record """ - reprovision_policy = ReprovisionPolicy(migrate_device_data=True) - attestation_mechanism = AttestationMechanism(type="symmetricKey") - - individual_provisioning_model = IndividualEnrollment.create( + reprovision_policy = models.ReprovisionPolicy(migrate_device_data=True) + attestation_mechanism = models.AttestationMechanism(type="symmetricKey") + client_certificate_issuance_policy = None + if client_ca_name: + client_certificate_issuance_policy = models.ClientCertificateIssuancePolicy( + certificate_authority_name=client_ca_name + ) + individual_provisioning_model = models.IndividualEnrollment( attestation=attestation_mechanism, registration_id=registration_id, device_id=device_id, reprovision_policy=reprovision_policy, + client_certificate_issuance_policy=client_certificate_issuance_policy, ) - return service_client.create_or_update(individual_provisioning_model) + return service_client.create_or_update_individual_enrollment(individual_provisioning_model) def assert_device_provisioned(device_id, registration_result, client_cert=False): diff --git a/tests/e2e/provisioning_e2e/tests/test_sync_certificate_enrollments.py b/tests/e2e/provisioning_e2e/tests/test_sync_certificate_enrollments.py index e8d039e19..6553058ca 100644 --- a/tests/e2e/provisioning_e2e/tests/test_sync_certificate_enrollments.py +++ b/tests/e2e/provisioning_e2e/tests/test_sync_certificate_enrollments.py @@ -7,12 +7,8 @@ from provisioning_e2e.service_helper import Helper, connection_string_to_hostname from azure.iot.device import ProvisioningDeviceClient from azure.iot.device.common import X509 -from provisioningserviceclient import ( - ProvisioningServiceClient, - IndividualEnrollment, - EnrollmentGroup, -) -from provisioningserviceclient.protocol.models import AttestationMechanism, ReprovisionPolicy +from dev_utils.provisioningservice.protocol import models +from dev_utils.provisioningservice.client import ProvisioningServiceClient import pytest import logging import os @@ -27,7 +23,6 @@ delete_directories_certs_created_from_pipeline, ) - logging.basicConfig(level=logging.DEBUG) @@ -82,9 +77,12 @@ def test_device_register_with_device_id_for_a_x509_individual_enrollment(protoco device_id = "e2edpsflyingfeather" device_index = type_to_device_indices.get("individual_with_device_id")[0] + registration_id = device_common_name + str(device_index) try: + cert_content = read_cert_content_from_file(device_index=device_index) + individual_enrollment_record = create_individual_enrollment_with_x509_client_certs( - device_index=device_index, device_id=device_id + registration_id=registration_id, primary_cert=cert_content, device_id=device_id ) registration_id = individual_enrollment_record.registration_id @@ -107,11 +105,14 @@ def test_device_register_with_device_id_for_a_x509_individual_enrollment(protoco @pytest.mark.parametrize("protocol", ["mqtt", "mqttws"]) def test_device_register_with_no_device_id_for_a_x509_individual_enrollment(protocol): device_index = type_to_device_indices.get("individual_no_device_id")[0] - + registration_id = device_common_name + str(device_index) try: + cert_content = read_cert_content_from_file(device_index=device_index) + individual_enrollment_record = create_individual_enrollment_with_x509_client_certs( - device_index=device_index + registration_id=registration_id, primary_cert=cert_content ) + registration_id = individual_enrollment_record.registration_id device_cert_file = "demoCA/newcerts/device_cert" + str(device_index) + ".pem" @@ -139,21 +140,24 @@ def test_group_of_devices_register_with_no_device_id_for_a_x509_intermediate_aut common_device_id = device_common_name devices_indices = type_to_device_indices.get("group_intermediate") device_count_in_group = len(devices_indices) - reprovision_policy = ReprovisionPolicy(migrate_device_data=True) + reprovision_policy = models.ReprovisionPolicy(migrate_device_data=True) try: intermediate_cert_filename = "demoCA/newcerts/intermediate_cert.pem" with open(intermediate_cert_filename, "r") as intermediate_pem: intermediate_cert_content = intermediate_pem.read() - attestation_mechanism = AttestationMechanism.create_with_x509_signing_certs( - intermediate_cert_content + x509 = create_x509_client_or_sign_certs( + is_client=False, primary_cert=intermediate_cert_content ) - enrollment_group_provisioning_model = EnrollmentGroup.create( - group_id, attestation=attestation_mechanism, reprovision_policy=reprovision_policy + attestation_mechanism = models.AttestationMechanism(type="x509", x509=x509) + enrollment_group_provisioning_model = models.EnrollmentGroup( + enrollment_group_id=group_id, + attestation=attestation_mechanism, + reprovision_policy=reprovision_policy, ) - service_client.create_or_update(enrollment_group_provisioning_model) + service_client.create_or_update_enrollment_group(enrollment_group_provisioning_model) count = 0 common_device_key_input_file = "demoCA/private/device_key" @@ -203,18 +207,19 @@ def test_group_of_devices_register_with_no_device_id_for_a_x509_ca_authenticatio common_device_id = device_common_name devices_indices = type_to_device_indices.get("group_ca") device_count_in_group = len(devices_indices) - reprovision_policy = ReprovisionPolicy(migrate_device_data=True) + reprovision_policy = models.ReprovisionPolicy(migrate_device_data=True) try: DPS_GROUP_CA_CERT = os.getenv("PROVISIONING_ROOT_CERT") - attestation_mechanism = AttestationMechanism.create_with_x509_ca_refs( - ref1=DPS_GROUP_CA_CERT - ) - enrollment_group_provisioning_model = EnrollmentGroup.create( - group_id, attestation=attestation_mechanism, reprovision_policy=reprovision_policy + x509 = create_x509_ca_refs(primary_ref=DPS_GROUP_CA_CERT) + attestation_mechanism = models.AttestationMechanism(type="x509", x509=x509) + enrollment_group_provisioning_model = models.EnrollmentGroup( + enrollment_group_id=group_id, + attestation=attestation_mechanism, + reprovision_policy=reprovision_policy, ) - service_client.create_or_update(enrollment_group_provisioning_model) + service_client.create_or_update_enrollment_group(enrollment_group_provisioning_model) count = 0 intermediate_cert_filename = "demoCA/newcerts/intermediate_cert.pem" @@ -268,24 +273,50 @@ def assert_device_provisioned(device_id, registration_result): assert device.device_id == device_id -def create_individual_enrollment_with_x509_client_certs(device_index, device_id=None): - registration_id = device_common_name + str(device_index) - reprovision_policy = ReprovisionPolicy(migrate_device_data=True) - - device_cert_input_file = "demoCA/newcerts/device_cert" + str(device_index) + ".pem" - with open(device_cert_input_file, "r") as in_device_cert: - device_cert_content = in_device_cert.read() - - attestation_mechanism = AttestationMechanism.create_with_x509_client_certs(device_cert_content) +def create_individual_enrollment_with_x509_client_certs( + registration_id, primary_cert, secondary_cert=None, device_id=None +): + reprovision_policy = models.ReprovisionPolicy(migrate_device_data=True) + x509 = create_x509_client_or_sign_certs( + is_client=True, primary_cert=primary_cert, secondary_cert=secondary_cert + ) + attestation_mechanism = models.AttestationMechanism(type="x509", x509=x509) - individual_provisioning_model = IndividualEnrollment.create( + individual_provisioning_model = models.IndividualEnrollment( attestation=attestation_mechanism, registration_id=registration_id, reprovision_policy=reprovision_policy, device_id=device_id, ) - return service_client.create_or_update(individual_provisioning_model) + return service_client.create_or_update_individual_enrollment(individual_provisioning_model) + + +def create_x509_client_or_sign_certs(is_client, primary_cert, secondary_cert=None): + + primary = models.X509CertificateWithInfo(certificate=primary_cert) + secondary = None + if secondary_cert: + secondary = models.X509CertificateWithInfo(certificate=secondary_cert) + certs = models.X509Certificates(primary=primary, secondary=secondary) + if is_client: + x509_attestation = models.X509Attestation(client_certificates=certs) + else: + x509_attestation = models.X509Attestation(signing_certificates=certs) + return x509_attestation + + +def create_x509_ca_refs(primary_ref, secondary_ref=None): + ca_refs = models.X509CAReferences(primary=primary_ref, secondary=secondary_ref) + x509_attestation = models.X509Attestation(ca_references=ca_refs) + return x509_attestation + + +def read_cert_content_from_file(device_index): + device_cert_input_file = "demoCA/newcerts/device_cert" + str(device_index) + ".pem" + with open(device_cert_input_file, "r") as in_device_cert: + device_cert_content = in_device_cert.read() + return device_cert_content def result_from_register(registration_id, device_cert_file, device_key_file, protocol): diff --git a/tests/e2e/provisioning_e2e/tests/test_sync_symmetric_enrollments.py b/tests/e2e/provisioning_e2e/tests/test_sync_symmetric_enrollments.py index b5be87919..461e54761 100644 --- a/tests/e2e/provisioning_e2e/tests/test_sync_symmetric_enrollments.py +++ b/tests/e2e/provisioning_e2e/tests/test_sync_symmetric_enrollments.py @@ -6,31 +6,23 @@ from provisioning_e2e.service_helper import Helper, connection_string_to_hostname from azure.iot.device import ProvisioningDeviceClient -from azure.iot.device.common import X509 -from provisioningserviceclient import ProvisioningServiceClient, IndividualEnrollment -from provisioningserviceclient.protocol.models import AttestationMechanism, ReprovisionPolicy +from dev_utils.provisioningservice.protocol import models +from dev_utils.provisioningservice.client import ProvisioningServiceClient import pytest import logging import os import uuid -from . import path_adjust # noqa: F401 -from create_x509_chain_crypto import ( - create_private_key, - create_csr, -) -from azure.iot.device import IoTHubDeviceClient -logging.basicConfig(level=logging.DEBUG) PROVISIONING_HOST = os.getenv("PROVISIONING_DEVICE_ENDPOINT") ID_SCOPE = os.getenv("PROVISIONING_DEVICE_IDSCOPE") service_client = ProvisioningServiceClient.create_from_connection_string( os.getenv("PROVISIONING_SERVICE_CONNECTION_STRING") ) -device_registry_helper = Helper(os.getenv("IOTHUB_CONNECTION_STRING")) -linked_iot_hub = connection_string_to_hostname(os.getenv("IOTHUB_CONNECTION_STRING")) -# TODO Delete this line. This is a pre created variable in key vault now. -symmetric_key_for_cert_management = os.getenv("DPS_CERT_ISSUANCE_SYM_KEY_SYNC") +connection_string = os.getenv("IOTHUB_CONNECTION_STRING") +device_registry_helper = Helper(connection_string) +linked_iot_hub = connection_string_to_hostname(connection_string) +CLIENT_CERT_AUTH_NAME = os.getenv("CLIENT_CERTIFICATE_AUTHORITY_NAME") logger = logging.getLogger(__name__) @@ -40,6 +32,7 @@ ) @pytest.mark.parametrize("protocol", ["mqtt", "mqttws"]) def test_device_register_with_no_device_id_for_a_symmetric_key_individual_enrollment(protocol): + registration_id = "" try: individual_enrollment_record = create_individual_enrollment( "e2e-dps-underthewhompingwillow" + str(uuid.uuid4()) @@ -63,7 +56,7 @@ def test_device_register_with_no_device_id_for_a_symmetric_key_individual_enroll ) @pytest.mark.parametrize("protocol", ["mqtt", "mqttws"]) def test_device_register_with_device_id_for_a_symmetric_key_individual_enrollment(protocol): - + registration_id = "" device_id = "e2edpstommarvoloriddle" try: individual_enrollment_record = create_individual_enrollment( @@ -82,80 +75,6 @@ def test_device_register_with_device_id_for_a_symmetric_key_individual_enrollmen service_client.delete_individual_enrollment_by_param(registration_id) -@pytest.mark.it( - "A device requests a client cert by sending a certificate signing request " - "while being provisioned to the linked IoTHub with the device_id equal to the registration_id" - "of the individual enrollment that has been created with a symmetric key authentication" -) -@pytest.mark.parametrize("protocol", ["mqtt", "mqttws"]) -def test_device_register_with_client_cert_issuance_for_a_symmetric_key_individual_enrollment( - protocol, -): - key_file = "key.pem" - csr_file = "request.pem" - issued_cert_file = "cert.pem" - try: - # individual_enrollment_record = create_individual_enrollment( - # "e2e-dps-avis" + str(uuid.uuid4()) - # ) - # - # registration_id = individual_enrollment_record.registration_id - # symmetric_key = individual_enrollment_record.attestation.symmetric_key.primary_key - - registration_id = "e2e-dps-ventus" - symmetric_key = symmetric_key_for_cert_management - logger.debug("the symmetric key for e2e-dps-ventus") - logger.debug(symmetric_key_for_cert_management) - - key_file = "key.pem" - csr_file = "request.pem" - issued_cert_file = "cert.pem" - - private_key = create_private_key(key_file) - create_csr(private_key, csr_file, registration_id) - - registration_result = result_from_register( - registration_id, symmetric_key, protocol, csr_file=csr_file - ) - - assert_device_provisioned( - device_id=registration_id, registration_result=registration_result, client_cert=True - ) - with open(issued_cert_file, "w") as out_ca_pem: - # Write the issued certificate on the file. This forms the certificate portion of the X509 object. - cert_data = registration_result.registration_state.issued_client_certificate - out_ca_pem.write(cert_data) - - x509 = X509( - cert_file=issued_cert_file, - key_file=key_file, - ) - - device_client = IoTHubDeviceClient.create_from_x509_certificate( - hostname=registration_result.registration_state.assigned_hub, - device_id=registration_result.registration_state.device_id, - x509=x509, - ) - # Connect the client. - device_client.connect() - # Assert that this X509 was able to connect. - assert device_client.connected - device_client.disconnect() - - # TODO Uncomment this line. Right now do not delete the enrollment as it is not created on the fly. - # device_registry_helper.try_delete_device(registration_id) - finally: - # TODO Uncomment this line. Right now do not delete the enrollment as it is not created on the fly. - # TODO This is a previously created enrollment record. - # service_client.delete_individual_enrollment_by_param(registration_id) - if os.path.exists(key_file): - os.remove(key_file) - if os.path.exists(csr_file): - os.remove(csr_file) - if os.path.exists(issued_cert_file): - os.remove(issued_cert_file) - - def create_individual_enrollment(registration_id, device_id=None): """ Create an individual enrollment record using the service client @@ -163,17 +82,19 @@ def create_individual_enrollment(registration_id, device_id=None): :param device_id: Optional device id :return: And individual enrollment record """ - reprovision_policy = ReprovisionPolicy(migrate_device_data=True) - attestation_mechanism = AttestationMechanism(type="symmetricKey") + reprovision_policy = models.ReprovisionPolicy(migrate_device_data=True) + attestation_mechanism = models.AttestationMechanism(type="symmetricKey") + client_certificate_issuance_policy = None - individual_provisioning_model = IndividualEnrollment.create( + individual_provisioning_model = models.IndividualEnrollment( attestation=attestation_mechanism, registration_id=registration_id, device_id=device_id, reprovision_policy=reprovision_policy, + client_certificate_issuance_policy=client_certificate_issuance_policy, ) - return service_client.create_or_update(individual_provisioning_model) + return service_client.create_or_update_individual_enrollment(individual_provisioning_model) def assert_device_provisioned(device_id, registration_result, client_cert=False): diff --git a/tests/unit/provisioning/pipeline/test_mqtt_pipeline.py b/tests/unit/provisioning/pipeline/test_mqtt_pipeline.py index 075050f15..e2278173c 100644 --- a/tests/unit/provisioning/pipeline/test_mqtt_pipeline.py +++ b/tests/unit/provisioning/pipeline/test_mqtt_pipeline.py @@ -8,8 +8,6 @@ import logging from azure.iot.device.common.models import X509 from azure.iot.device.provisioning.pipeline.mqtt_pipeline import MQTTPipeline -from tests.common.pipeline import helpers -import json from azure.iot.device.provisioning.pipeline import constant as dps_constants from azure.iot.device.provisioning.pipeline import ( pipeline_stages_provisioning, diff --git a/tests/unit/provisioning/pipeline/test_pipeline_stages_provisioning.py b/tests/unit/provisioning/pipeline/test_pipeline_stages_provisioning.py index 8b6e63eb4..073db9f65 100644 --- a/tests/unit/provisioning/pipeline/test_pipeline_stages_provisioning.py +++ b/tests/unit/provisioning/pipeline/test_pipeline_stages_provisioning.py @@ -13,13 +13,10 @@ pipeline_ops_provisioning, ) from azure.iot.device.common.pipeline import pipeline_ops_base -from tests.common.pipeline import pipeline_stage_test +from tests.unit.common.pipeline import pipeline_stage_test from azure.iot.device.exceptions import ServiceError -from azure.iot.device.provisioning.models.registration_result import ( - RegistrationResult, - RegistrationState, -) -from tests.common.pipeline.helpers import StageRunOpTestBase + +from tests.unit.common.pipeline.helpers import StageRunOpTestBase from azure.iot.device import exceptions from azure.iot.device.provisioning.pipeline import constant diff --git a/vsts/dps-e2e-cert-mgmt.yaml b/vsts/dps-e2e-cert-mgmt.yaml index d5dbc61a3..af0fe5b47 100644 --- a/vsts/dps-e2e-cert-mgmt.yaml +++ b/vsts/dps-e2e-cert-mgmt.yaml @@ -47,6 +47,7 @@ jobs: PYTHONUNBUFFERED: True # Extra variable manually created + CLIENT_CERTIFICATE_AUTHORITY_NAME: $(DPSCERT-MAC-DPS-CLIENT-CERTIFICATE-AUTHORITY-NAME) DPS_CERT_ISSUANCE_SYM_KEY_AIO: $(DPSCERT-MAC-DPS-CLIENT-CERT-ISSUE-SYM-KEY-ASYNC) DPS_CERT_ISSUANCE_SYM_KEY_SYNC: $(DPSCERT-MAC-DPS-CLIENT-CERT-ISSUE-SYM-KEY-SYNC) From d5ef10aa80361275b098b9da16cd5caef03d5956 Mon Sep 17 00:00:00 2001 From: Sebastian Metoyer Date: Thu, 23 May 2024 14:28:55 -0400 Subject: [PATCH 19/20] flake8 fix --- dev_utils/dev_utils/service_helper_sync.py | 2 +- samples/pnp/simple_thermostat.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev_utils/dev_utils/service_helper_sync.py b/dev_utils/dev_utils/service_helper_sync.py index 6861b8dc7..c35c33233 100644 --- a/dev_utils/dev_utils/service_helper_sync.py +++ b/dev_utils/dev_utils/service_helper_sync.py @@ -252,7 +252,7 @@ def on_event_batch(partition_context, events): if device_id == self.device_id and module_id == self.module_id: converted_event = self._convert_incoming_event(event) - if type(converted_event) == EventhubEvent: + if isinstance(converted_event, EventhubEvent): if "message-id" in converted_event.system_properties: logger.info( "Received event with msgid={}".format( diff --git a/samples/pnp/simple_thermostat.py b/samples/pnp/simple_thermostat.py index 2ea144447..f326b1232 100644 --- a/samples/pnp/simple_thermostat.py +++ b/samples/pnp/simple_thermostat.py @@ -40,7 +40,7 @@ async def reboot_handler(values): global min_temp global avg_temp_list global target_temperature - if values and type(values) == int: + if values and isinstance(values, int): print("Rebooting after delay of {delay} secs".format(delay=values)) asyncio.sleep(values) max_temp = None From a0025e17f0c8c816cd8aac26728725a36dc2d634 Mon Sep 17 00:00:00 2001 From: Sebastian Metoyer Date: Tue, 28 May 2024 11:34:07 -0400 Subject: [PATCH 20/20] Re-add setuptools --- vsts/build.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/vsts/build.yaml b/vsts/build.yaml index 48f263e84..816d454ec 100644 --- a/vsts/build.yaml +++ b/vsts/build.yaml @@ -85,6 +85,7 @@ jobs: - script: | pip install wheel + pip install setuptools python scripts/build_packages.py displayName: Build source and wheel distribution