Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
binary format for upload is now different to the internal binary format.
  • Loading branch information
wilko committed Feb 17, 2020
commit 6dc218ccabc6ea0a2d7cc98b1ca6f35d7c9dd8d6
7 changes: 3 additions & 4 deletions backend/entityservice/tests/test_project_uploads.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,12 @@
import os
import pytest

from entityservice.serialization import binary_pack_filters
from entityservice.tests.config import url
from entityservice.tests.util import (
create_project_upload_data, create_project_upload_fake_data,
generate_clks, generate_json_serialized_clks,
get_expected_number_parties, get_run_result, post_run,
upload_binary_data, upload_binary_data_from_file)
upload_binary_data, upload_binary_data_from_file, binary_pack_for_upload)


def test_project_single_party_data_uploaded(requests, valid_project_params):
Expand Down Expand Up @@ -73,13 +72,13 @@ def test_project_binary_data_upload_with_different_encoded_size(
**valid_project_params
}).json()

common = next(binary_pack_filters(generate_clks(1, encoding_size),
common = next(binary_pack_for_upload(generate_clks(1, encoding_size),
encoding_size))

data = []
for i in range(expected_number_parties):
generated_clks = generate_clks(499, encoding_size)
packed_clks = binary_pack_filters(generated_clks, encoding_size)
packed_clks = binary_pack_for_upload(generated_clks, encoding_size)
packed_joined = b''.join(packed_clks)
packed_with_common = (
packed_joined + common if i == 0 else common + packed_joined)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,8 @@

import anonlink

from entityservice.serialization import binary_pack_filters
from entityservice.tests.util import (
create_project_upload_data, delete_project, get_run_result, post_run)
create_project_upload_data, delete_project, get_run_result, post_run, binary_pack_for_upload)

DATA_FILENAME = 'test-multiparty-results-correctness-data.pkl'
DATA_PATH = pathlib.Path(__file__).parent / 'testdata' / DATA_FILENAME
Expand All @@ -29,7 +28,7 @@ def test_groups_correctness(requests):
filter_size = len(filters[0][0])
assert all(len(filter_) == filter_size
for dataset in filters for filter_ in dataset)
packed_filters = [b''.join(binary_pack_filters(f, filter_size))
packed_filters = [b''.join(binary_pack_for_upload(f, filter_size))
for f in filters]
project_data, _ = create_project_upload_data(
requests, packed_filters, result_type='groups',
Expand Down
17 changes: 17 additions & 0 deletions backend/entityservice/tests/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import math
import os
import random
import struct
import time
import tempfile
from contextlib import contextmanager
Expand Down Expand Up @@ -54,6 +55,10 @@ def generate_clks(count, size):
return res


def generate_clks_with_id(count, size):
return zip(range(count), generate_clks(count, size))


def generate_json_serialized_clks(count, size=128):
clks = generate_clks(count, size)
return [serialize_bytes(hash_bytes) for hash_bytes in clks]
Expand Down Expand Up @@ -436,3 +441,15 @@ def upload_binary_data_from_file(requests, file_path, project_id, token, count,

def get_expected_number_parties(project_params):
return project_params.get('number_parties', 2)


def binary_upload_format(encoding_size):
bit_packing_fmt = f"!{encoding_size}s"
bit_packing_struct = struct.Struct(bit_packing_fmt)
return bit_packing_struct


def binary_pack_for_upload(filters, encoding_size):
bit_packing_struct = binary_upload_format(encoding_size)
for hash_bytes in filters:
yield bit_packing_struct.pack(hash_bytes)