diff --git a/benchmarking/VERSION b/benchmarking/VERSION index 7becae11..600e6fd3 100644 --- a/benchmarking/VERSION +++ b/benchmarking/VERSION @@ -1 +1 @@ -v0.3.2 +v0.3.3 diff --git a/deployment/entity-service/templates/configmap.yaml b/deployment/entity-service/templates/configmap.yaml index 434f82fd..3d588c53 100644 --- a/deployment/entity-service/templates/configmap.yaml +++ b/deployment/entity-service/templates/configmap.yaml @@ -5,8 +5,6 @@ metadata: labels: {{- include "es.release_labels" . | indent 4 }} data: - DEBUG: {{ required "workers.debug is required." .Values.workers.debug | quote }} - CONNEXION_STRICT_VALIDATION: "true" CONNEXION_RESPONSE_VALIDATION: "true" diff --git a/deployment/entity-service/values.yaml b/deployment/entity-service/values.yaml index e09ff675..5fcd4440 100644 --- a/deployment/entity-service/values.yaml +++ b/deployment/entity-service/values.yaml @@ -204,8 +204,6 @@ workers: ## Worker configuration ## These settings populate the deployment's configmap. - debug: false - ## Desired task size in "number of comparisons" ## Note there is some overhead creating a task and a single dedicated cpu core can do between 50M and 100M ## comparisons per second, so much lower that 100M isn't generally worth splitting across celery workers. diff --git a/deployment/jobs/benchmark/timing-test-job.yaml b/deployment/jobs/benchmark/timing-test-job.yaml index 43ebc7aa..928cc190 100644 --- a/deployment/jobs/benchmark/timing-test-job.yaml +++ b/deployment/jobs/benchmark/timing-test-job.yaml @@ -21,7 +21,7 @@ spec: mountPath: /cache containers: - name: entitytester - image: data61/anonlink-benchmark:v0.3.2 + image: data61/anonlink-benchmark:v0.3.3 env: - name: SERVER value: "https://anonlink.easd.data61.xyz" diff --git a/docs/changelog.rst b/docs/changelog.rst index a1f42744..44fcb668 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -9,10 +9,34 @@ Next Version Version 1.13.0-beta2 -------------------- +-------------------- -- Encodings are now stored in Postgres database instead of files in an object store. -- Initial support for user supplied blocking implemented. +Adds support for users to supply blocking information along with encodings. Data can now be uploaded to +an object store and pulled by the Anonlink Entity Service instead of uploaded via the REST API. +This release includes substantial internal changes as encodings are now stored in Postgres instead of +the object store. + +- Feature to pull data from an object store and create temporary upload credentials. #537, #544, #551 +- Blocking implementation #510 #527, +- Benchmarking #478, #541 +- Encodings are now stored in Postgres database instead of files in an object store. #516, #522 +- Start to add integration tests to complement our end to end tests. #520, #528 +- Use anonlink-client instead of clkhash #536 +- Use Python 3.8 in base image. #518 +- A base image is now used for all our Docker images. #506, #511, #517, #519 +- Binary encodings now stored internally with their encoding id. #505 +- REST API implementation for accepting clknblocks #503 +- Update Open API spec to version 3. Add Blocking API #479 +- CI Updates #476 +- Chart updates #496, #497, #539 +- Documentation updates (production deployment, debugging with PyCharm) #473, #504 +- Fix Jaeger #500, #523 + +Misc changes/fixes: +- Detect invalid encoding size as early as possible #507 +- Use local benchmark cache #531 +- Cleanup docker-compose #533, #534, #547 +- Calculate number of comparisons accounting for user supplied blocks. #543 Version 1.13.0-beta ------------------- diff --git a/e2etests/tests/test_project_uploads.py b/e2etests/tests/test_project_uploads.py index 4a7f7d94..e4968f08 100644 --- a/e2etests/tests/test_project_uploads.py +++ b/e2etests/tests/test_project_uploads.py @@ -133,7 +133,7 @@ def test_project_upload_external_encodings(requests, a_project, binary_test_file def test_project_upload_external_data(requests, a_blocking_project, binary_test_file_path): project = a_blocking_project blocking_data = json.dumps( - {str(encoding_id): [str(encoding_id % 2), str(encoding_id % 3)] for encoding_id in range(1000)}).encode() + {str(encoding_id): list({str(encoding_id % 3), str(encoding_id % 13)}) for encoding_id in range(1000)}).encode() mc, upload_info = get_temp_upload_client(project, requests, project['update_tokens'][0]) @@ -203,8 +203,8 @@ def test_project_upload_external_data(requests, a_blocking_project, binary_test_ } ) assert res3.status_code == 201 - run_id = post_run(requests, project, threshold=0.9) - result = get_run_result(requests, project, run_id, timeout=60) + run_id = post_run(requests, project, threshold=0.95) + result = get_run_result(requests, project, run_id, timeout=120) assert 'groups' in result diff --git a/e2etests/tests/test_uploads.py b/e2etests/tests/test_upload_credentials.py similarity index 100% rename from e2etests/tests/test_uploads.py rename to e2etests/tests/test_upload_credentials.py