diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000000..55395b7b952 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,718 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_size = 4 +indent_style = space +insert_final_newline = true +max_line_length = 140 +tab_width = 4 +ij_continuation_indent_size = 8 +ij_formatter_off_tag = @formatter:off +ij_formatter_on_tag = @formatter:on +ij_formatter_tags_enabled = false +ij_smart_tabs = false +ij_visual_guides = none +ij_wrap_on_typing = true + +[*.java] +ij_java_align_consecutive_assignments = false +ij_java_align_consecutive_variable_declarations = false +ij_java_align_group_field_declarations = false +ij_java_align_multiline_annotation_parameters = false +ij_java_align_multiline_array_initializer_expression = false +ij_java_align_multiline_assignment = false +ij_java_align_multiline_binary_operation = false +ij_java_align_multiline_chained_methods = false +ij_java_align_multiline_extends_list = false +ij_java_align_multiline_for = true +ij_java_align_multiline_method_parentheses = false +ij_java_align_multiline_parameters = false +ij_java_align_multiline_parameters_in_calls = false +ij_java_align_multiline_parenthesized_expression = false +ij_java_align_multiline_records = true +ij_java_align_multiline_resources = true +ij_java_align_multiline_ternary_operation = false +ij_java_align_multiline_text_blocks = false +ij_java_align_multiline_throws_list = false +ij_java_align_subsequent_simple_methods = false +ij_java_align_throws_keyword = false +ij_java_annotation_parameter_wrap = off +ij_java_array_initializer_new_line_after_left_brace = false +ij_java_array_initializer_right_brace_on_new_line = false +ij_java_array_initializer_wrap = off +ij_java_assert_statement_colon_on_next_line = false +ij_java_assert_statement_wrap = off +ij_java_assignment_wrap = off +ij_java_binary_operation_sign_on_next_line = false +ij_java_binary_operation_wrap = off +ij_java_blank_lines_after_anonymous_class_header = 0 +ij_java_blank_lines_after_class_header = 0 +ij_java_blank_lines_after_imports = 1 +ij_java_blank_lines_after_package = 1 +ij_java_blank_lines_around_class = 1 +ij_java_blank_lines_around_field = 0 +ij_java_blank_lines_around_field_in_interface = 0 +ij_java_blank_lines_around_initializer = 1 +ij_java_blank_lines_around_method = 1 +ij_java_blank_lines_around_method_in_interface = 1 +ij_java_blank_lines_before_class_end = 0 +ij_java_blank_lines_before_imports = 1 +ij_java_blank_lines_before_method_body = 0 +ij_java_blank_lines_before_package = 0 +ij_java_block_brace_style = end_of_line +ij_java_block_comment_at_first_column = true +ij_java_call_parameters_new_line_after_left_paren = false +ij_java_call_parameters_right_paren_on_new_line = false +ij_java_call_parameters_wrap = off +ij_java_case_statement_on_separate_line = true +ij_java_catch_on_new_line = false +ij_java_class_annotation_wrap = split_into_lines +ij_java_class_brace_style = end_of_line +ij_java_class_count_to_use_import_on_demand = 100 +ij_java_class_names_in_javadoc = 1 +ij_java_do_not_indent_top_level_class_members = false +ij_java_do_not_wrap_after_single_annotation = false +ij_java_do_while_brace_force = never +ij_java_doc_add_blank_line_after_description = true +ij_java_doc_add_blank_line_after_param_comments = false +ij_java_doc_add_blank_line_after_return = false +ij_java_doc_add_p_tag_on_empty_lines = true +ij_java_doc_align_exception_comments = true +ij_java_doc_align_param_comments = true +ij_java_doc_do_not_wrap_if_one_line = false +ij_java_doc_enable_formatting = true +ij_java_doc_enable_leading_asterisks = true +ij_java_doc_indent_on_continuation = false +ij_java_doc_keep_empty_lines = true +ij_java_doc_keep_empty_parameter_tag = true +ij_java_doc_keep_empty_return_tag = true +ij_java_doc_keep_empty_throws_tag = true +ij_java_doc_keep_invalid_tags = true +ij_java_doc_param_description_on_new_line = false +ij_java_doc_preserve_line_breaks = false +ij_java_doc_use_throws_not_exception_tag = true +ij_java_else_on_new_line = false +ij_java_entity_dd_suffix = EJB +ij_java_entity_eb_suffix = Bean +ij_java_entity_hi_suffix = Home +ij_java_entity_lhi_prefix = Local +ij_java_entity_lhi_suffix = Home +ij_java_entity_li_prefix = Local +ij_java_entity_pk_class = java.lang.String +ij_java_entity_vo_suffix = VO +ij_java_enum_constants_wrap = off +ij_java_extends_keyword_wrap = off +ij_java_extends_list_wrap = off +ij_java_field_annotation_wrap = split_into_lines +ij_java_finally_on_new_line = false +ij_java_for_brace_force = never +ij_java_for_statement_new_line_after_left_paren = false +ij_java_for_statement_right_paren_on_new_line = false +ij_java_for_statement_wrap = off +ij_java_generate_final_locals = false +ij_java_generate_final_parameters = true +ij_java_if_brace_force = never +ij_java_imports_layout = *,|,javax.**,java.**,|,$* +ij_java_indent_case_from_switch = true +ij_java_insert_inner_class_imports = false +ij_java_insert_override_annotation = true +ij_java_keep_blank_lines_before_right_brace = 2 +ij_java_keep_blank_lines_between_package_declaration_and_header = 2 +ij_java_keep_blank_lines_in_code = 2 +ij_java_keep_blank_lines_in_declarations = 2 +ij_java_keep_control_statement_in_one_line = true +ij_java_keep_first_column_comment = true +ij_java_keep_indents_on_empty_lines = false +ij_java_keep_line_breaks = true +ij_java_keep_multiple_expressions_in_one_line = false +ij_java_keep_simple_blocks_in_one_line = false +ij_java_keep_simple_classes_in_one_line = false +ij_java_keep_simple_lambdas_in_one_line = false +ij_java_keep_simple_methods_in_one_line = false +ij_java_label_indent_absolute = false +ij_java_label_indent_size = 0 +ij_java_lambda_brace_style = end_of_line +ij_java_layout_static_imports_separately = true +ij_java_line_comment_add_space = false +ij_java_line_comment_at_first_column = true +ij_java_message_dd_suffix = EJB +ij_java_message_eb_suffix = Bean +ij_java_method_annotation_wrap = split_into_lines +ij_java_method_brace_style = end_of_line +ij_java_method_call_chain_wrap = off +ij_java_method_parameters_new_line_after_left_paren = false +ij_java_method_parameters_right_paren_on_new_line = false +ij_java_method_parameters_wrap = off +ij_java_modifier_list_wrap = false +ij_java_names_count_to_use_import_on_demand = 100 +ij_java_new_line_after_lparen_in_record_header = false +ij_java_packages_to_use_import_on_demand = java.awt.*,javax.swing.* +ij_java_parameter_annotation_wrap = off +ij_java_parentheses_expression_new_line_after_left_paren = false +ij_java_parentheses_expression_right_paren_on_new_line = false +ij_java_place_assignment_sign_on_next_line = false +ij_java_prefer_longer_names = true +ij_java_prefer_parameters_wrap = false +ij_java_record_components_wrap = normal +ij_java_repeat_synchronized = true +ij_java_replace_instanceof_and_cast = false +ij_java_replace_null_check = true +ij_java_replace_sum_lambda_with_method_ref = true +ij_java_resource_list_new_line_after_left_paren = false +ij_java_resource_list_right_paren_on_new_line = false +ij_java_resource_list_wrap = off +ij_java_rparen_on_new_line_in_record_header = false +ij_java_session_dd_suffix = EJB +ij_java_session_eb_suffix = Bean +ij_java_session_hi_suffix = Home +ij_java_session_lhi_prefix = Local +ij_java_session_lhi_suffix = Home +ij_java_session_li_prefix = Local +ij_java_session_si_suffix = Service +ij_java_space_after_closing_angle_bracket_in_type_argument = false +ij_java_space_after_colon = true +ij_java_space_after_comma = true +ij_java_space_after_comma_in_type_arguments = true +ij_java_space_after_for_semicolon = true +ij_java_space_after_quest = true +ij_java_space_after_type_cast = true +ij_java_space_before_annotation_array_initializer_left_brace = false +ij_java_space_before_annotation_parameter_list = false +ij_java_space_before_array_initializer_left_brace = false +ij_java_space_before_catch_keyword = true +ij_java_space_before_catch_left_brace = true +ij_java_space_before_catch_parentheses = true +ij_java_space_before_class_left_brace = true +ij_java_space_before_colon = true +ij_java_space_before_colon_in_foreach = true +ij_java_space_before_comma = false +ij_java_space_before_do_left_brace = true +ij_java_space_before_else_keyword = true +ij_java_space_before_else_left_brace = true +ij_java_space_before_finally_keyword = true +ij_java_space_before_finally_left_brace = true +ij_java_space_before_for_left_brace = true +ij_java_space_before_for_parentheses = true +ij_java_space_before_for_semicolon = false +ij_java_space_before_if_left_brace = true +ij_java_space_before_if_parentheses = true +ij_java_space_before_method_call_parentheses = false +ij_java_space_before_method_left_brace = true +ij_java_space_before_method_parentheses = false +ij_java_space_before_opening_angle_bracket_in_type_parameter = false +ij_java_space_before_quest = true +ij_java_space_before_switch_left_brace = true +ij_java_space_before_switch_parentheses = true +ij_java_space_before_synchronized_left_brace = true +ij_java_space_before_synchronized_parentheses = true +ij_java_space_before_try_left_brace = true +ij_java_space_before_try_parentheses = true +ij_java_space_before_type_parameter_list = false +ij_java_space_before_while_keyword = true +ij_java_space_before_while_left_brace = true +ij_java_space_before_while_parentheses = true +ij_java_space_inside_one_line_enum_braces = false +ij_java_space_within_empty_array_initializer_braces = false +ij_java_space_within_empty_method_call_parentheses = false +ij_java_space_within_empty_method_parentheses = false +ij_java_spaces_around_additive_operators = true +ij_java_spaces_around_assignment_operators = true +ij_java_spaces_around_bitwise_operators = true +ij_java_spaces_around_equality_operators = true +ij_java_spaces_around_lambda_arrow = true +ij_java_spaces_around_logical_operators = true +ij_java_spaces_around_method_ref_dbl_colon = false +ij_java_spaces_around_multiplicative_operators = true +ij_java_spaces_around_relational_operators = true +ij_java_spaces_around_shift_operators = true +ij_java_spaces_around_type_bounds_in_type_parameters = true +ij_java_spaces_around_unary_operator = false +ij_java_spaces_within_angle_brackets = false +ij_java_spaces_within_annotation_parentheses = false +ij_java_spaces_within_array_initializer_braces = false +ij_java_spaces_within_braces = false +ij_java_spaces_within_brackets = false +ij_java_spaces_within_cast_parentheses = false +ij_java_spaces_within_catch_parentheses = false +ij_java_spaces_within_for_parentheses = false +ij_java_spaces_within_if_parentheses = false +ij_java_spaces_within_method_call_parentheses = false +ij_java_spaces_within_method_parentheses = false +ij_java_spaces_within_parentheses = false +ij_java_spaces_within_record_header = false +ij_java_spaces_within_switch_parentheses = false +ij_java_spaces_within_synchronized_parentheses = false +ij_java_spaces_within_try_parentheses = false +ij_java_spaces_within_while_parentheses = false +ij_java_special_else_if_treatment = true +ij_java_subclass_name_suffix = Impl +ij_java_ternary_operation_signs_on_next_line = false +ij_java_ternary_operation_wrap = off +ij_java_test_name_suffix = Test +ij_java_throws_keyword_wrap = off +ij_java_throws_list_wrap = off +ij_java_use_external_annotations = false +ij_java_use_fq_class_names = false +ij_java_use_relative_indents = false +ij_java_use_single_class_imports = true +ij_java_variable_annotation_wrap = off +ij_java_visibility = public +ij_java_while_brace_force = never +ij_java_while_on_new_line = false +ij_java_wrap_comments = false +ij_java_wrap_first_method_in_call_chain = false +ij_java_wrap_long_lines = false + +[*.scala] +indent_size = 2 +tab_width = 2 +ij_continuation_indent_size = 2 +ij_scala_align_composite_pattern = true +ij_scala_align_extends_with = 0 +ij_scala_align_group_field_declarations = false +ij_scala_align_if_else = false +ij_scala_align_in_columns_case_branch = false +ij_scala_align_multiline_binary_operation = false +ij_scala_align_multiline_chained_methods = false +ij_scala_align_multiline_for = true +ij_scala_align_multiline_parameters = false +ij_scala_align_multiline_parameters_in_calls = false +ij_scala_align_multiline_parenthesized_expression = false +ij_scala_align_tuple_elements = false +ij_scala_align_types_in_multiline_declarations = false +ij_scala_alternate_continuation_indent_for_params = 4 +ij_scala_binary_operation_wrap = off +ij_scala_blank_lines_after_anonymous_class_header = 0 +ij_scala_blank_lines_after_class_header = 0 +ij_scala_blank_lines_after_imports = 1 +ij_scala_blank_lines_after_package = 1 +ij_scala_blank_lines_around_class = 1 +ij_scala_blank_lines_around_field = 0 +ij_scala_blank_lines_around_field_in_inner_scopes = 0 +ij_scala_blank_lines_around_field_in_interface = 0 +ij_scala_blank_lines_around_method = 1 +ij_scala_blank_lines_around_method_in_inner_scopes = 1 +ij_scala_blank_lines_around_method_in_interface = 1 +ij_scala_blank_lines_before_imports = 1 +ij_scala_blank_lines_before_method_body = 0 +ij_scala_blank_lines_before_package = 0 +ij_scala_block_brace_style = end_of_line +ij_scala_block_comment_at_first_column = true +ij_scala_call_parameters_new_line_after_lparen = 0 +ij_scala_call_parameters_right_paren_on_new_line = false +ij_scala_call_parameters_wrap = off +ij_scala_case_clause_brace_force = never +ij_scala_catch_on_new_line = false +ij_scala_class_annotation_wrap = split_into_lines +ij_scala_class_brace_style = end_of_line +ij_scala_closure_brace_force = never +ij_scala_do_not_align_block_expr_params = true +ij_scala_do_not_indent_case_clause_body = false +ij_scala_do_not_indent_tuples_close_brace = true +ij_scala_do_while_brace_force = never +ij_scala_else_on_new_line = false +ij_scala_enable_scaladoc_formatting = true +ij_scala_enforce_functional_syntax_for_unit = true +ij_scala_extends_keyword_wrap = off +ij_scala_extends_list_wrap = off +ij_scala_field_annotation_wrap = split_into_lines +ij_scala_finally_brace_force = never +ij_scala_finally_on_new_line = false +ij_scala_for_brace_force = never +ij_scala_for_statement_wrap = off +ij_scala_formatter = 0 +ij_scala_if_brace_force = never +ij_scala_implicit_value_class_suffix = Ops +ij_scala_indent_braced_function_args = true +ij_scala_indent_case_from_switch = true +ij_scala_indent_first_parameter = true +ij_scala_indent_first_parameter_clause = false +ij_scala_indent_type_arguments = true +ij_scala_indent_type_parameters = true +ij_scala_insert_whitespaces_in_simple_one_line_method = true +ij_scala_keep_blank_lines_before_right_brace = 2 +ij_scala_keep_blank_lines_in_code = 2 +ij_scala_keep_blank_lines_in_declarations = 2 +ij_scala_keep_comments_on_same_line = true +ij_scala_keep_first_column_comment = false +ij_scala_keep_indents_on_empty_lines = false +ij_scala_keep_line_breaks = true +ij_scala_keep_one_line_lambdas_in_arg_list = false +ij_scala_keep_simple_blocks_in_one_line = false +ij_scala_keep_simple_methods_in_one_line = false +ij_scala_keep_xml_formatting = false +ij_scala_line_comment_at_first_column = true +ij_scala_method_annotation_wrap = split_into_lines +ij_scala_method_brace_force = never +ij_scala_method_brace_style = end_of_line +ij_scala_method_call_chain_wrap = off +ij_scala_method_parameters_new_line_after_left_paren = false +ij_scala_method_parameters_right_paren_on_new_line = false +ij_scala_method_parameters_wrap = off +ij_scala_modifier_list_wrap = false +ij_scala_multiline_string_align_dangling_closing_quotes = false +ij_scala_multiline_string_closing_quotes_on_new_line = false +ij_scala_multiline_string_insert_margin_on_enter = true +ij_scala_multiline_string_margin_char = | +ij_scala_multiline_string_margin_indent = 2 +ij_scala_multiline_string_opening_quotes_on_new_line = true +ij_scala_multiline_string_process_margin_on_copy_paste = true +ij_scala_newline_after_annotations = false +ij_scala_not_continuation_indent_for_params = false +ij_scala_parameter_annotation_wrap = off +ij_scala_parentheses_expression_new_line_after_left_paren = false +ij_scala_parentheses_expression_right_paren_on_new_line = false +ij_scala_place_closure_parameters_on_new_line = false +ij_scala_place_self_type_on_new_line = true +ij_scala_prefer_parameters_wrap = false +ij_scala_preserve_space_after_method_declaration_name = false +ij_scala_reformat_on_compile = false +ij_scala_replace_case_arrow_with_unicode_char = false +ij_scala_replace_for_generator_arrow_with_unicode_char = false +ij_scala_replace_lambda_with_greek_letter = false +ij_scala_replace_map_arrow_with_unicode_char = false +ij_scala_scalafmt_fallback_to_default_settings = false +ij_scala_scalafmt_reformat_on_files_save = false +ij_scala_scalafmt_show_invalid_code_warnings = true +ij_scala_scalafmt_use_intellij_formatter_for_range_format = true +ij_scala_sd_align_exception_comments = true +ij_scala_sd_align_list_item_content = true +ij_scala_sd_align_other_tags_comments = true +ij_scala_sd_align_parameters_comments = true +ij_scala_sd_align_return_comments = true +ij_scala_sd_blank_line_after_parameters_comments = false +ij_scala_sd_blank_line_after_return_comments = false +ij_scala_sd_blank_line_before_parameters = false +ij_scala_sd_blank_line_before_tags = true +ij_scala_sd_blank_line_between_parameters = false +ij_scala_sd_keep_blank_lines_between_tags = false +ij_scala_sd_preserve_spaces_in_tags = false +ij_scala_space_after_comma = true +ij_scala_space_after_for_semicolon = true +ij_scala_space_after_modifiers_constructor = false +ij_scala_space_after_type_colon = true +ij_scala_space_before_brace_method_call = true +ij_scala_space_before_class_left_brace = true +ij_scala_space_before_for_parentheses = true +ij_scala_space_before_if_parentheses = true +ij_scala_space_before_infix_like_method_parentheses = false +ij_scala_space_before_infix_method_call_parentheses = false +ij_scala_space_before_infix_operator_like_method_call_parentheses = true +ij_scala_space_before_method_call_parentheses = false +ij_scala_space_before_method_left_brace = true +ij_scala_space_before_method_parentheses = false +ij_scala_space_before_type_colon = false +ij_scala_space_before_type_parameter_in_def_list = false +ij_scala_space_before_type_parameter_leading_context_bound_colon = false +ij_scala_space_before_type_parameter_leading_context_bound_colon_hk = true +ij_scala_space_before_type_parameter_list = false +ij_scala_space_before_type_parameter_rest_context_bound_colons = true +ij_scala_space_before_while_parentheses = true +ij_scala_space_inside_closure_braces = true +ij_scala_space_inside_self_type_braces = true +ij_scala_space_within_empty_method_call_parentheses = false +ij_scala_spaces_around_at_in_patterns = false +ij_scala_spaces_in_imports = false +ij_scala_spaces_in_one_line_blocks = false +ij_scala_spaces_within_brackets = false +ij_scala_spaces_within_for_parentheses = false +ij_scala_spaces_within_if_parentheses = false +ij_scala_spaces_within_method_call_parentheses = false +ij_scala_spaces_within_method_parentheses = false +ij_scala_spaces_within_parentheses = false +ij_scala_spaces_within_while_parentheses = false +ij_scala_special_else_if_treatment = true +ij_scala_trailing_comma_arg_list_enabled = true +ij_scala_trailing_comma_import_selector_enabled = false +ij_scala_trailing_comma_mode = trailing_comma_keep +ij_scala_trailing_comma_params_enabled = true +ij_scala_trailing_comma_pattern_arg_list_enabled = false +ij_scala_trailing_comma_tuple_enabled = false +ij_scala_trailing_comma_tuple_type_enabled = false +ij_scala_trailing_comma_type_params_enabled = false +ij_scala_try_brace_force = never +ij_scala_type_annotation_exclude_constant = true +ij_scala_type_annotation_exclude_in_dialect_sources = true +ij_scala_type_annotation_exclude_in_test_sources = false +ij_scala_type_annotation_exclude_member_of_anonymous_class = false +ij_scala_type_annotation_exclude_member_of_private_class = false +ij_scala_type_annotation_exclude_when_type_is_stable = true +ij_scala_type_annotation_function_parameter = false +ij_scala_type_annotation_implicit_modifier = true +ij_scala_type_annotation_local_definition = false +ij_scala_type_annotation_private_member = false +ij_scala_type_annotation_protected_member = true +ij_scala_type_annotation_public_member = true +ij_scala_type_annotation_structural_type = true +ij_scala_type_annotation_underscore_parameter = false +ij_scala_type_annotation_unit_type = true +ij_scala_use_alternate_continuation_indent_for_params = false +ij_scala_use_scaladoc2_formatting = false +ij_scala_variable_annotation_wrap = off +ij_scala_while_brace_force = never +ij_scala_while_on_new_line = false +ij_scala_wrap_before_with_keyword = false +ij_scala_wrap_first_method_in_call_chain = false +ij_scala_wrap_long_lines = false + +[.editorconfig] +ij_editorconfig_align_group_field_declarations = false +ij_editorconfig_space_after_colon = false +ij_editorconfig_space_after_comma = true +ij_editorconfig_space_before_colon = false +ij_editorconfig_space_before_comma = false +ij_editorconfig_spaces_around_assignment_operators = true + +[{*.ant,*.fxml,*.jhm,*.jnlp,*.jrxml,*.pom,*.rng,*.tld,*.wadl,*.wsdd,*.wsdl,*.xjb,*.xml,*.xsd,*.xsl,*.xslt,*.xul}] +ij_xml_align_attributes = true +ij_xml_align_text = false +ij_xml_attribute_wrap = normal +ij_xml_block_comment_at_first_column = true +ij_xml_keep_blank_lines = 2 +ij_xml_keep_indents_on_empty_lines = false +ij_xml_keep_line_breaks = true +ij_xml_keep_line_breaks_in_text = true +ij_xml_keep_whitespaces = false +ij_xml_keep_whitespaces_around_cdata = preserve +ij_xml_keep_whitespaces_inside_cdata = false +ij_xml_line_comment_at_first_column = true +ij_xml_space_after_tag_name = false +ij_xml_space_around_equals_in_attribute = false +ij_xml_space_inside_empty_tag = false +ij_xml_text_wrap = normal +ij_xml_use_custom_settings = false + +[{*.bash,*.sh,*.zsh}] +indent_size = 2 +tab_width = 2 +ij_shell_binary_ops_start_line = false +ij_shell_keep_column_alignment_padding = false +ij_shell_minify_program = false +ij_shell_redirect_followed_by_space = false +ij_shell_switch_cases_indented = false + +[{*.gant,*.gradle,*.groovy,*.gson,*.gy}] +ij_groovy_align_group_field_declarations = false +ij_groovy_align_multiline_array_initializer_expression = false +ij_groovy_align_multiline_assignment = false +ij_groovy_align_multiline_binary_operation = false +ij_groovy_align_multiline_chained_methods = false +ij_groovy_align_multiline_extends_list = false +ij_groovy_align_multiline_for = true +ij_groovy_align_multiline_list_or_map = true +ij_groovy_align_multiline_method_parentheses = false +ij_groovy_align_multiline_parameters = false +ij_groovy_align_multiline_parameters_in_calls = false +ij_groovy_align_multiline_resources = true +ij_groovy_align_multiline_ternary_operation = false +ij_groovy_align_multiline_throws_list = false +ij_groovy_align_named_args_in_map = true +ij_groovy_align_throws_keyword = false +ij_groovy_array_initializer_new_line_after_left_brace = false +ij_groovy_array_initializer_right_brace_on_new_line = false +ij_groovy_array_initializer_wrap = off +ij_groovy_assert_statement_wrap = off +ij_groovy_assignment_wrap = off +ij_groovy_binary_operation_wrap = off +ij_groovy_blank_lines_after_class_header = 0 +ij_groovy_blank_lines_after_imports = 1 +ij_groovy_blank_lines_after_package = 1 +ij_groovy_blank_lines_around_class = 1 +ij_groovy_blank_lines_around_field = 0 +ij_groovy_blank_lines_around_field_in_interface = 0 +ij_groovy_blank_lines_around_method = 1 +ij_groovy_blank_lines_around_method_in_interface = 1 +ij_groovy_blank_lines_before_imports = 1 +ij_groovy_blank_lines_before_method_body = 0 +ij_groovy_blank_lines_before_package = 0 +ij_groovy_block_brace_style = end_of_line +ij_groovy_block_comment_at_first_column = true +ij_groovy_call_parameters_new_line_after_left_paren = false +ij_groovy_call_parameters_right_paren_on_new_line = false +ij_groovy_call_parameters_wrap = off +ij_groovy_catch_on_new_line = false +ij_groovy_class_annotation_wrap = split_into_lines +ij_groovy_class_brace_style = end_of_line +ij_groovy_class_count_to_use_import_on_demand = 100 +ij_groovy_do_while_brace_force = never +ij_groovy_else_on_new_line = false +ij_groovy_enum_constants_wrap = off +ij_groovy_extends_keyword_wrap = off +ij_groovy_extends_list_wrap = off +ij_groovy_field_annotation_wrap = split_into_lines +ij_groovy_finally_on_new_line = false +ij_groovy_for_brace_force = never +ij_groovy_for_statement_new_line_after_left_paren = false +ij_groovy_for_statement_right_paren_on_new_line = false +ij_groovy_for_statement_wrap = off +ij_groovy_if_brace_force = never +ij_groovy_import_annotation_wrap = 2 +ij_groovy_imports_layout = *,|,javax.**,java.**,|,$* +ij_groovy_indent_case_from_switch = true +ij_groovy_indent_label_blocks = true +ij_groovy_insert_inner_class_imports = false +ij_groovy_keep_blank_lines_before_right_brace = 2 +ij_groovy_keep_blank_lines_in_code = 2 +ij_groovy_keep_blank_lines_in_declarations = 2 +ij_groovy_keep_control_statement_in_one_line = true +ij_groovy_keep_first_column_comment = true +ij_groovy_keep_indents_on_empty_lines = false +ij_groovy_keep_line_breaks = true +ij_groovy_keep_multiple_expressions_in_one_line = false +ij_groovy_keep_simple_blocks_in_one_line = false +ij_groovy_keep_simple_classes_in_one_line = true +ij_groovy_keep_simple_lambdas_in_one_line = true +ij_groovy_keep_simple_methods_in_one_line = true +ij_groovy_label_indent_absolute = false +ij_groovy_label_indent_size = 0 +ij_groovy_lambda_brace_style = end_of_line +ij_groovy_layout_static_imports_separately = true +ij_groovy_line_comment_add_space = false +ij_groovy_line_comment_at_first_column = true +ij_groovy_method_annotation_wrap = split_into_lines +ij_groovy_method_brace_style = end_of_line +ij_groovy_method_call_chain_wrap = off +ij_groovy_method_parameters_new_line_after_left_paren = false +ij_groovy_method_parameters_right_paren_on_new_line = false +ij_groovy_method_parameters_wrap = off +ij_groovy_modifier_list_wrap = false +ij_groovy_names_count_to_use_import_on_demand = 100 +ij_groovy_parameter_annotation_wrap = off +ij_groovy_parentheses_expression_new_line_after_left_paren = false +ij_groovy_parentheses_expression_right_paren_on_new_line = false +ij_groovy_prefer_parameters_wrap = false +ij_groovy_resource_list_new_line_after_left_paren = false +ij_groovy_resource_list_right_paren_on_new_line = false +ij_groovy_resource_list_wrap = off +ij_groovy_space_after_assert_separator = true +ij_groovy_space_after_colon = true +ij_groovy_space_after_comma = true +ij_groovy_space_after_comma_in_type_arguments = true +ij_groovy_space_after_for_semicolon = true +ij_groovy_space_after_quest = true +ij_groovy_space_after_type_cast = true +ij_groovy_space_before_annotation_parameter_list = false +ij_groovy_space_before_array_initializer_left_brace = false +ij_groovy_space_before_assert_separator = false +ij_groovy_space_before_catch_keyword = true +ij_groovy_space_before_catch_left_brace = true +ij_groovy_space_before_catch_parentheses = true +ij_groovy_space_before_class_left_brace = true +ij_groovy_space_before_closure_left_brace = true +ij_groovy_space_before_colon = true +ij_groovy_space_before_comma = false +ij_groovy_space_before_do_left_brace = true +ij_groovy_space_before_else_keyword = true +ij_groovy_space_before_else_left_brace = true +ij_groovy_space_before_finally_keyword = true +ij_groovy_space_before_finally_left_brace = true +ij_groovy_space_before_for_left_brace = true +ij_groovy_space_before_for_parentheses = true +ij_groovy_space_before_for_semicolon = false +ij_groovy_space_before_if_left_brace = true +ij_groovy_space_before_if_parentheses = true +ij_groovy_space_before_method_call_parentheses = false +ij_groovy_space_before_method_left_brace = true +ij_groovy_space_before_method_parentheses = false +ij_groovy_space_before_quest = true +ij_groovy_space_before_switch_left_brace = true +ij_groovy_space_before_switch_parentheses = true +ij_groovy_space_before_synchronized_left_brace = true +ij_groovy_space_before_synchronized_parentheses = true +ij_groovy_space_before_try_left_brace = true +ij_groovy_space_before_try_parentheses = true +ij_groovy_space_before_while_keyword = true +ij_groovy_space_before_while_left_brace = true +ij_groovy_space_before_while_parentheses = true +ij_groovy_space_in_named_argument = true +ij_groovy_space_in_named_argument_before_colon = false +ij_groovy_space_within_empty_array_initializer_braces = false +ij_groovy_space_within_empty_method_call_parentheses = false +ij_groovy_spaces_around_additive_operators = true +ij_groovy_spaces_around_assignment_operators = true +ij_groovy_spaces_around_bitwise_operators = true +ij_groovy_spaces_around_equality_operators = true +ij_groovy_spaces_around_lambda_arrow = true +ij_groovy_spaces_around_logical_operators = true +ij_groovy_spaces_around_multiplicative_operators = true +ij_groovy_spaces_around_regex_operators = true +ij_groovy_spaces_around_relational_operators = true +ij_groovy_spaces_around_shift_operators = true +ij_groovy_spaces_within_annotation_parentheses = false +ij_groovy_spaces_within_array_initializer_braces = false +ij_groovy_spaces_within_braces = true +ij_groovy_spaces_within_brackets = false +ij_groovy_spaces_within_cast_parentheses = false +ij_groovy_spaces_within_catch_parentheses = false +ij_groovy_spaces_within_for_parentheses = false +ij_groovy_spaces_within_gstring_injection_braces = false +ij_groovy_spaces_within_if_parentheses = false +ij_groovy_spaces_within_list_or_map = false +ij_groovy_spaces_within_method_call_parentheses = false +ij_groovy_spaces_within_method_parentheses = false +ij_groovy_spaces_within_parentheses = false +ij_groovy_spaces_within_switch_parentheses = false +ij_groovy_spaces_within_synchronized_parentheses = false +ij_groovy_spaces_within_try_parentheses = false +ij_groovy_spaces_within_tuple_expression = false +ij_groovy_spaces_within_while_parentheses = false +ij_groovy_special_else_if_treatment = true +ij_groovy_ternary_operation_wrap = off +ij_groovy_throws_keyword_wrap = off +ij_groovy_throws_list_wrap = off +ij_groovy_use_flying_geese_braces = false +ij_groovy_use_fq_class_names = false +ij_groovy_use_fq_class_names_in_javadoc = true +ij_groovy_use_relative_indents = false +ij_groovy_use_single_class_imports = true +ij_groovy_variable_annotation_wrap = off +ij_groovy_while_brace_force = never +ij_groovy_while_on_new_line = false +ij_groovy_wrap_long_lines = false + +[{*.har,*.jsb2,*.jsb3,*.json,.babelrc,.eslintrc,.stylelintrc,bowerrc,jest.config}] +indent_size = 2 +ij_json_keep_blank_lines_in_code = 0 +ij_json_keep_indents_on_empty_lines = false +ij_json_keep_line_breaks = true +ij_json_space_after_colon = true +ij_json_space_after_comma = true +ij_json_space_before_colon = true +ij_json_space_before_comma = false +ij_json_spaces_within_braces = false +ij_json_spaces_within_brackets = false +ij_json_wrap_long_lines = false + +[{*.markdown,*.md}] +ij_markdown_force_one_space_after_blockquote_symbol = true +ij_markdown_force_one_space_after_header_symbol = true +ij_markdown_force_one_space_after_list_bullet = true +ij_markdown_force_one_space_between_words = true +ij_markdown_keep_indents_on_empty_lines = false +ij_markdown_max_lines_around_block_elements = 1 +ij_markdown_max_lines_around_header = 1 +ij_markdown_max_lines_between_paragraphs = 1 +ij_markdown_min_lines_around_block_elements = 1 +ij_markdown_min_lines_around_header = 1 +ij_markdown_min_lines_between_paragraphs = 1 + +[{*.properties,spring.handlers,spring.schemas}] +ij_properties_align_group_field_declarations = false +ij_properties_keep_blank_lines = false +ij_properties_key_value_delimiter = equals +ij_properties_spaces_around_key_value_delimiter = false + +[{*.yaml,*.yml}] +indent_size = 2 +ij_yaml_align_values_properties = do_not_align +ij_yaml_autoinsert_sequence_marker = true +ij_yaml_block_mapping_on_new_line = false +ij_yaml_indent_sequence_value = true +ij_yaml_keep_indents_on_empty_lines = false +ij_yaml_keep_line_breaks = true +ij_yaml_sequence_on_new_line = false +ij_yaml_space_before_colon = false +ij_yaml_spaces_within_braces = true +ij_yaml_spaces_within_brackets = true diff --git a/.evergreen/.evg.yml b/.evergreen/.evg.yml index 6b4f074e595..e3bf87d49de 100644 --- a/.evergreen/.evg.yml +++ b/.evergreen/.evg.yml @@ -9,12 +9,11 @@ stepback: true # Mark a failure as a system/bootstrap failure (purple box) rather then a task # failure by default. -# Actual testing tasks are marked with `type: test` -command_type: system +# Actual testing tasks are marked with `type: "test"` +command_type: "system" -# Protect ourself against rogue test case, or curl gone wild, that runs forever -# 12 minutes is the longest we'll ever run -exec_timeout_secs: 3600 # 12 minutes is the longest we'll ever run +# Protect ourselves against rogue test case, or curl gone wild, that runs forever +exec_timeout_secs: 3600 # What to do when evergreen hits the timeout (`post:` tasks are run automatically) timeout: @@ -24,7 +23,12 @@ timeout: ls -la functions: - "fetch source": + + # + # Start up and teardown functions + # + + "fetch-source": # Executes git clone and applies the submitted patch, if any - command: git.get_project params: @@ -32,61 +36,74 @@ functions: # Applies the subitted patch, if any # Deprecated. Should be removed. But still needed for certain agents (ZAP) - command: git.apply_patch - # Make an evergreen exapanstion file with dynamic values - - command: shell.exec - params: - working_dir: "src" - script: | - # Get the current unique version of this checkout - if [ "${is_patch}" = "true" ]; then - CURRENT_VERSION=$(git describe)-patch-${version_id} - else - CURRENT_VERSION=latest - fi - - export DRIVERS_TOOLS="$(pwd)/../drivers-tools" - - # Python has cygwin path problems on Windows. Detect prospective mongo-orchestration home directory - if [ "Windows_NT" == "$OS" ]; then # Magic variable in cygwin - export DRIVERS_TOOLS=$(cygpath -m $DRIVERS_TOOLS) - fi - - export MONGO_ORCHESTRATION_HOME="$DRIVERS_TOOLS/.evergreen/orchestration" - export MONGODB_BINARIES="$DRIVERS_TOOLS/mongodb/bin" - export UPLOAD_BUCKET="${project}" - export PROJECT_DIRECTORY="$(pwd)" - - cat < expansion.yml - CURRENT_VERSION: "$CURRENT_VERSION" - DRIVERS_TOOLS: "$DRIVERS_TOOLS" - MONGO_ORCHESTRATION_HOME: "$MONGO_ORCHESTRATION_HOME" - MONGODB_BINARIES: "$MONGODB_BINARIES" - UPLOAD_BUCKET: "$UPLOAD_BUCKET" - PROJECT_DIRECTORY: "$PROJECT_DIRECTORY" - PREPARE_SHELL: | - set -o errexit - set -o xtrace - export DRIVERS_TOOLS="$DRIVERS_TOOLS" - export MONGO_ORCHESTRATION_HOME="$MONGO_ORCHESTRATION_HOME" - export MONGODB_BINARIES="$MONGODB_BINARIES" - export UPLOAD_BUCKET="$UPLOAD_BUCKET" - export PROJECT_DIRECTORY="$PROJECT_DIRECTORY" - - export TMPDIR="$MONGO_ORCHESTRATION_HOME/db" - export PATH="$MONGODB_BINARIES:$PATH" - export PROJECT="${project}" - EOT - # See what we've done - cat expansion.yml + # Fetch the specifications submodule + - command: shell.exec + params: + working_dir: "src" + script: | + git submodule update --init + # Make an evergreen expansion file with dynamic values + - command: shell.exec + params: + working_dir: "src" + shell: "bash" + script: | + # Get the current unique version of this checkout + if [ "${is_patch}" = "true" ]; then + CURRENT_VERSION=$(git describe)-patch-${version_id} + else + CURRENT_VERSION=latest + fi + + export DRIVERS_TOOLS="$(pwd)/../drivers-tools" + + # Python has cygwin path problems on Windows. Detect prospective mongo-orchestration home directory + if [ "Windows_NT" == "$OS" ]; then # Magic variable in cygwin + export DRIVERS_TOOLS=$(cygpath -m $DRIVERS_TOOLS) + fi + + export MONGO_ORCHESTRATION_HOME="$DRIVERS_TOOLS/.evergreen/orchestration" + export MONGODB_BINARIES="$DRIVERS_TOOLS/mongodb/bin" + export UPLOAD_BUCKET="${project}" + export PROJECT_DIRECTORY="$(pwd)" + export ARCHIVE_FILE_NAME="mongo-java-driver.tgz" + export ARCHIVE_FILE_PATH="/tmp/$ARCHIVE_FILE_NAME" + + cat < expansion.yml + CURRENT_VERSION: "$CURRENT_VERSION" + DRIVERS_TOOLS: "$DRIVERS_TOOLS" + MONGO_ORCHESTRATION_HOME: "$MONGO_ORCHESTRATION_HOME" + MONGODB_BINARIES: "$MONGODB_BINARIES" + UPLOAD_BUCKET: "$UPLOAD_BUCKET" + PROJECT_DIRECTORY: "$PROJECT_DIRECTORY" + ARCHIVE_FILE_NAME: "$ARCHIVE_FILE_NAME" + ARCHIVE_FILE_PATH: "$ARCHIVE_FILE_PATH" + PREPARE_SHELL: | + set -o errexit + set -o xtrace + export DRIVERS_TOOLS="$DRIVERS_TOOLS" + export MONGO_ORCHESTRATION_HOME="$MONGO_ORCHESTRATION_HOME" + export MONGODB_BINARIES="$MONGODB_BINARIES" + export UPLOAD_BUCKET="$UPLOAD_BUCKET" + export PROJECT_DIRECTORY="$PROJECT_DIRECTORY" + export TMPDIR="$MONGO_ORCHESTRATION_HOME/db" + export PATH="$MONGODB_BINARIES:$PATH" + export PROJECT="${project}" + export ARCHIVE_FILE_NAME="$ARCHIVE_FILE_NAME" + export ARCHIVE_FILE_PATH="$ARCHIVE_FILE_PATH" + EOT + # See what we've done + cat expansion.yml # Load the expansion file to make an evergreen variable with the current unique version - command: expansions.update params: file: src/expansion.yml - "prepare resources": + "prepare-resources": - command: shell.exec params: + shell: "bash" script: | ${PREPARE_SHELL} rm -rf $DRIVERS_TOOLS @@ -94,510 +111,2425 @@ functions: # If this was a patch build, doing a fresh clone would not actually test the patch cp -R ${PROJECT_DIRECTORY}/ $DRIVERS_TOOLS else - git clone git://github.com/mongodb-labs/drivers-evergreen-tools.git $DRIVERS_TOOLS + git clone https://github.com/mongodb-labs/drivers-evergreen-tools.git $DRIVERS_TOOLS fi echo "{ \"releases\": { \"default\": \"$MONGODB_BINARIES\" }}" > $MONGO_ORCHESTRATION_HOME/orchestration.config - # Upload build artifacts that other tasks may depend on - # Note this URL needs to be totally unique, while predictable for the next task - # so it can automatically download the artifacts - "upload build": - # Compress and upload the entire build directory - - command: archive.targz_pack - params: - # Example: mongo_c_driver_releng_9dfb7d741efbca16faa7859b9349d7a942273e43_16_11_08_19_29_52.tar.gz - target: "${build_id}.tar.gz" - source_dir: ${PROJECT_DIRECTORY}/ - include: - - "./**" - - command: s3.put + "fix-absolute-paths": + - command: shell.exec params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} - local_file: ${build_id}.tar.gz - # Example: /mciuploads/${UPLOAD_BUCKET}/gcc49/9dfb7d741efbca16faa7859b9349d7a942273e43/debug-compile-nosasl-nossl/mongo_c_driver_releng_9dfb7d741efbca16faa7859b9349d7a942273e43_16_11_08_19_29_52.tar.gz - remote_file: ${UPLOAD_BUCKET}/${build_variant}/${revision}/${task_name}/${build_id}.tar.gz - bucket: mciuploads - permissions: public-read - content_type: ${content_type|application/x-gzip} + script: | + ${PREPARE_SHELL} + for filename in $(find ${DRIVERS_TOOLS} -name \*.json); do + perl -p -i -e "s|ABSOLUTE_PATH_REPLACEMENT_TOKEN|${DRIVERS_TOOLS}|g" $filename + done + + "assume-aws-test-secrets-role": + - command: ec2.assume_role + params: + role_arn: ${aws_test_secrets_role} - "exec script" : + "gradle-cache": - command: shell.exec - type: test params: working_dir: "src" + script: | + export GRADLE_RO_DEP_CACHE="$(pwd)/build/gradle-cache" + .evergreen/gradle-cache.sh + + "create-archive-tar-file": + - command: shell.exec + params: + working_dir: "src" + script: | + echo "Creating archive tar file at ${ARCHIVE_FILE_PATH}" + tar --exclude-vcs -czf "${ARCHIVE_FILE_PATH}" . + echo "Created archive tar file at ${ARCHIVE_FILE_PATH}" + + "start-mongo-orchestration": + - command: shell.exec + params: script: | ${PREPARE_SHELL} - ${PROJECT_DIRECTORY}/${file} + REQUIRE_API_VERSION=${REQUIRE_API_VERSION} LOAD_BALANCER=${LOAD_BALANCER} MONGODB_VERSION=${VERSION} TOPOLOGY=${TOPOLOGY} \ + AUTH=${AUTH} SSL=${SSL} STORAGE_ENGINE=${STORAGE_ENGINE} ORCHESTRATION_FILE=${ORCHESTRATION_FILE} \ + bash ${DRIVERS_TOOLS}/.evergreen/run-orchestration.sh + # run-orchestration generates expansion file with the MONGODB_URI for the cluster + - command: expansions.update + params: + file: mo-expansion.yml + "stop-mongo-orchestration": + - command: shell.exec + params: + shell: "bash" + script: | + ${PREPARE_SHELL} + bash ${DRIVERS_TOOLS}/.evergreen/stop-orchestration.sh || true - "upload mo artifacts": + "start-mongohoused": + - command: shell.exec + params: + include_expansions_in_env: [ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN" ] + script: | + DRIVERS_TOOLS="${DRIVERS_TOOLS}" bash ${DRIVERS_TOOLS}/.evergreen/atlas_data_lake/pull-mongohouse-image.sh - command: shell.exec params: + script: | + DRIVERS_TOOLS="${DRIVERS_TOOLS}" bash ${DRIVERS_TOOLS}/.evergreen/atlas_data_lake/run-mongohouse-image.sh + + "stop-mongohoused": + - command: shell.exec + params: + include_expansions_in_env: [ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN" ] + script: | + DRIVERS_TOOLS="${DRIVERS_TOOLS}" bash ${DRIVERS_TOOLS}/.evergreen/atlas_data_lake/teardown.sh || true + + + "start-load-balancer": + - command: shell.exec + params: + script: | + DRIVERS_TOOLS=${DRIVERS_TOOLS} MONGODB_URI=${MONGODB_URI} bash ${DRIVERS_TOOLS}/.evergreen/run-load-balancer.sh start + - command: expansions.update + params: + file: lb-expansion.yml + "stop-load-balancer": + - command: shell.exec + params: + script: | + cd ${DRIVERS_TOOLS}/.evergreen + DRIVERS_TOOLS=${DRIVERS_TOOLS} bash ${DRIVERS_TOOLS}/.evergreen/run-load-balancer.sh stop || true + + "stop-aws": + - command: shell.exec + params: + shell: "bash" + script: | + ${PREPARE_SHELL} + cd "${DRIVERS_TOOLS}/.evergreen/auth_aws" + if [ -f "./aws_e2e_setup.json" ]; then + . ./activate-authawsvenv.sh + python ./lib/aws_assign_instance_profile.py + fi + + "cleanup": + - command: shell.exec + params: + shell: "bash" + script: | + ${PREPARE_SHELL} + rm -rf $DRIVERS_TOOLS || true + + "add-aws-auth-variables-to-file": + - command: shell.exec + type: "test" + params: + include_expansions_in_env: [ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN" ] + shell: "bash" + working_dir: "src" + script: | + ${PREPARE_SHELL} + cd $DRIVERS_TOOLS/.evergreen/auth_aws + ./setup_secrets.sh drivers/aws_auth + + "add-atlas-connect-variables-to-file": + - command: shell.exec + type: "test" + params: + include_expansions_in_env: [ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN" ] + shell: "bash" + working_dir: "src" + script: | + ${PREPARE_SHELL} + ${DRIVERS_TOOLS}/.evergreen/secrets_handling/setup-secrets.sh drivers/atlas_connect + + "start-csfle-servers": + - command: ec2.assume_role + params: + role_arn: ${aws_test_secrets_role} + - command: subprocess.exec + params: + working_dir: "src" + binary: "bash" + include_expansions_in_env: ["AWS_SECRET_ACCESS_KEY", "AWS_ACCESS_KEY_ID", "AWS_SESSION_TOKEN"] + args: + - ${DRIVERS_TOOLS}/.evergreen/csfle/setup.sh + + "stop-csfle-servers": + - command: shell.exec + params: + shell: "bash" + script: | + ${PREPARE_SHELL} + bash ${DRIVERS_TOOLS}/.evergreen/csfle/teardown.sh || true + + # + # Publishing / uploading functions + # + + "upload-mo-artifacts": + - command: ec2.assume_role + params: + role_arn: ${UPLOAD_MO_ARTIFACTS_ROLE_ARN} + - command: shell.exec + params: + shell: "bash" script: | ${PREPARE_SHELL} find $MONGO_ORCHESTRATION_HOME -name \*.log | xargs tar czf mongodb-logs.tar.gz - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: mongodb-logs.tar.gz remote_file: ${UPLOAD_BUCKET}/${build_variant}/${revision}/${version_id}/${build_id}/logs/${task_id}-${execution}-mongodb-logs.tar.gz bucket: mciuploads permissions: public-read - content_type: ${content_type|application/x-gzip} + content_type: "${content_type|application/x-gzip}" display_name: "mongodb-logs.tar.gz" - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: drivers-tools/.evergreen/orchestration/server.log remote_file: ${UPLOAD_BUCKET}/${build_variant}/${revision}/${version_id}/${build_id}/logs/${task_id}-${execution}-orchestration.log bucket: mciuploads permissions: public-read - content_type: ${content_type|text/plain} + content_type: "${content_type|text/plain}" display_name: "orchestration.log" - "upload working dir": - - command: archive.targz_pack + "create-and-upload-SSDLC-release-assets": + - command: shell.exec + params: + shell: "bash" + working_dir: "src" + env: + PRODUCT_NAME: ${product_name} + PRODUCT_VERSION: ${product_version} + EVERGREEN_VERSION_ID: ${version_id} + script: .evergreen/ssdlc-report.sh + - command: ec2.assume_role params: - target: "working-dir.tar.gz" - source_dir: ${PROJECT_DIRECTORY}/ - include: - - "./**" + role_arn: ${UPLOAD_SSDLC_RELEASE_ASSETS_ROLE_ARN} - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} - local_file: working-dir.tar.gz - remote_file: ${UPLOAD_BUCKET}/${build_variant}/${revision}/${version_id}/${build_id}/artifacts/${task_id}-${execution}-working-dir.tar.gz - bucket: mciuploads - permissions: public-read - content_type: ${content_type|application/x-gzip} - display_name: "working-dir.tar.gz" - - command: archive.targz_pack - params: - target: "drivers-dir.tar.gz" - source_dir: ${DRIVERS_TOOLS} - include: - - "./**" + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} + local_file: ./src/build/ssdlc/ssdlc_compliance_report.md + remote_file: ${product_name}/${product_version}/ssdlc_compliance_report.md + bucket: java-driver-release-assets + region: us-west-1 + permissions: private + content_type: "text/markdown" + display_name: "ssdlc_compliance_report.md" - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} - local_file: drivers-dir.tar.gz - remote_file: ${UPLOAD_BUCKET}/${build_variant}/${revision}/${version_id}/${build_id}/artifacts/${task_id}-${execution}-drivers-dir.tar.gz - bucket: mciuploads - permissions: public-read - content_type: ${content_type|application/x-gzip} - display_name: "drivers-dir.tar.gz" + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} + local_files_include_filter: + - build/ssdlc/static-analysis-reports/*.sarif + local_files_include_filter_prefix: ./src/ + remote_file: ${product_name}/${product_version}/static-analysis-reports/ + bucket: java-driver-release-assets + region: us-west-1 + permissions: private + content_type: "application/sarif+json" + display_name: - "upload test results": + "upload-test-results": - command: attach.xunit_results params: - file: ./src/*/build/test-results/TEST-*.xml + file: ./src/*/build/test-results/*/TEST-*.xml - "bootstrap mongo-orchestration": + "trace-artifacts": - command: shell.exec params: + working_dir: "src" script: | - ${PREPARE_SHELL} - MONGODB_VERSION=${VERSION} TOPOLOGY=${TOPOLOGY} AUTH=${AUTH} SSL=${SSL} sh ${DRIVERS_TOOLS}/.evergreen/run-orchestration.sh - # run-orchestration generates expansion file with the MONGODB_URI for the cluster + PRODUCT_VERSION="$(echo -n "$(git describe --tags --always --dirty)" | cut -c 2-)" + cat > ssdlc-expansions.yml < setup.js + const mongo_binaries = "$MONGODB_BINARIES"; + const project_dir = "$PROJECT_DIRECTORY"; + EOF + + mongo --nodb setup.js aws_e2e_ecs.js cd - - rm -rf $DRIVERS_TOOLS || true - "fix absolute paths": + "run-ocsp-test": - command: shell.exec + type: "test" params: + working_dir: "src" script: | ${PREPARE_SHELL} - for filename in $(find ${DRIVERS_TOOLS} -name \*.json); do - perl -p -i -e "s|ABSOLUTE_PATH_REPLACEMENT_TOKEN|${DRIVERS_TOOLS}|g" $filename - done + CA_FILE="${DRIVERS_TOOLS}/.evergreen/ocsp/${OCSP_ALGORITHM}/ca.pem" \ + OCSP_TLS_SHOULD_SUCCEED="${OCSP_TLS_SHOULD_SUCCEED}" \ + OCSP_MUST_STAPLE="${OCSP_MUST_STAPLE}" \ + JAVA_VERSION="${JAVA_VERSION}" \ + bash ${PROJECT_DIRECTORY}/.evergreen/run-ocsp-test.sh - "windows fix": + "run-valid-ocsp-server-ca-responder-test": - command: shell.exec params: + background: true + shell: "bash" script: | ${PREPARE_SHELL} - for i in $(find ${DRIVERS_TOOLS}/.evergreen ${PROJECT_DIRECTORY}/.evergreen -name \*.sh); do - cat $i | tr -d '\r' > $i.new - mv $i.new $i - done + cd ${DRIVERS_TOOLS}/.evergreen/ocsp + . ./activate-ocspvenv.sh + nohup python ocsp_mock.py \ + --ca_file ${OCSP_ALGORITHM}/ca.pem \ + --ocsp_responder_cert ${OCSP_ALGORITHM}/ca.crt \ + --ocsp_responder_key ${OCSP_ALGORITHM}/ca.key \ + -p 8100 -v - "make files executable": + "run-revoked-ocsp-server-ca-responder-test": - command: shell.exec params: + background: true + shell: "bash" script: | ${PREPARE_SHELL} - for i in $(find ${DRIVERS_TOOLS}/.evergreen ${PROJECT_DIRECTORY}/.evergreen -name \*.sh); do - chmod +x $i - done + cd ${DRIVERS_TOOLS}/.evergreen/ocsp + . ./activate-ocspvenv.sh + nohup python ocsp_mock.py \ + --ca_file ${OCSP_ALGORITHM}/ca.pem \ + --ocsp_responder_cert ${OCSP_ALGORITHM}/ca.crt \ + --ocsp_responder_key ${OCSP_ALGORITHM}/ca.key \ + -p 8100 \ + -v \ + --fault revoked - "init test-results": + "run-valid-ocsp-server-delegate-responder-test": - command: shell.exec params: + background: true + shell: "bash" script: | ${PREPARE_SHELL} - echo '{"results": [{ "status": "FAIL", "test_file": "Build", "log_raw": "No test-results.json found was created" } ]}' > ${PROJECT_DIRECTORY}/test-results.json + cd ${DRIVERS_TOOLS}/.evergreen/ocsp + . ./activate-ocspvenv.sh + nohup python ocsp_mock.py \ + --ca_file ${OCSP_ALGORITHM}/ca.pem \ + --ocsp_responder_cert ${OCSP_ALGORITHM}/ocsp-responder.crt \ + --ocsp_responder_key ${OCSP_ALGORITHM}/ocsp-responder.key \ + -p 8100 -v - "install dependencies": - type: test - params: - working_dir: "src" - script: | - ${PREPARE_SHELL} - file="${PROJECT_DIRECTORY}/.evergreen/install-dependencies.sh" - [ -f ${file} ] && sh ${file} || echo "${file} not available, skipping" + "run-revoked-ocsp-server-delegate-responder-test": + - command: shell.exec + params: + background: true + shell: "bash" + script: | + ${PREPARE_SHELL} + cd ${DRIVERS_TOOLS}/.evergreen/ocsp + . ./activate-ocspvenv.sh + nohup python ocsp_mock.py \ + --ca_file ${OCSP_ALGORITHM}/ca.pem \ + --ocsp_responder_cert ${OCSP_ALGORITHM}/ocsp-responder.crt \ + --ocsp_responder_key ${OCSP_ALGORITHM}/ocsp-responder.key \ + -p 8100 \ + -v \ + --fault revoked -# Anchors + "run-gssapi-auth-test": + - command: shell.exec + type: "test" + params: + silent: true + working_dir: "src" + script: | + # DO NOT ECHO WITH XTRACE (which PREPARE_SHELL does) + PROJECT_DIRECTORY=${PROJECT_DIRECTORY} JAVA_VERSION=${JAVA_VERSION} MONGODB_URI=${gssapi_auth_mongodb_uri} \ + KDC=${gssapi_auth_kdc} REALM=${gssapi_auth_realm} KEYTAB_BASE64=${gssapi_auth_keytab_base64} \ + LOGIN_CONTEXT_NAME=${LOGIN_CONTEXT_NAME} \ + .evergreen/run-gssapi-auth-test.sh + + "run-socks5-tests": + - command: shell.exec + type: "test" + params: + working_dir: "src" + script: | + ${PREPARE_SHELL} + SOCKS_AUTH="${SOCKS_AUTH}" \ + SSL="${SSL}" MONGODB_URI="${MONGODB_URI}" \ + JAVA_VERSION="${JAVA_VERSION}" \ + .evergreen/run-socks5-tests.sh + + "run-kms-tls-test": + - command: shell.exec + type: "test" + params: + working_dir: "src" + script: | + ${PREPARE_SHELL} + set +o xtrace + MONGODB_URI="${MONGODB_URI}" KMS_TLS_ERROR_TYPE=${KMS_TLS_ERROR_TYPE} .evergreen/run-kms-tls-tests.sh + + "run-csfle-aws-from-environment-test": + - command: shell.exec + type: "test" + params: + working_dir: "src" + script: | + ${PREPARE_SHELL} + set +o xtrace + MONGODB_URI="${MONGODB_URI}" .evergreen/run-csfle-aws-from-environment.sh -hosts: &hosts - - rhel62-small + "run-csfle-tests-with-mongocryptd": + - command: shell.exec + type: "test" + params: + working_dir: "src" + env: + AZUREKMS_KEY_VAULT_ENDPOINT: ${testazurekms_keyvaultendpoint} + AZUREKMS_KEY_NAME: ${testazurekms_keyname} + script: | + ${PREPARE_SHELL} + MONGODB_URI="${MONGODB_URI}" JAVA_VERSION="${JAVA_VERSION}" .evergreen/run-csfle-tests-with-mongocryptd.sh + + "run-perf-tests": + - command: shell.exec + type: "test" + params: + working_dir: "src" + env: + PROVIDER: ${PROVIDER} + script: | + ${PREPARE_SHELL} + PROJECT_DIRECTORY=${PROJECT_DIRECTORY} .evergreen/run-perf-tests.sh + + "run-graalvm-native-image-app": + - command: shell.exec + type: "test" + params: + working_dir: "src" + script: | + ${PREPARE_SHELL} + MONGODB_URI="${MONGODB_URI}" JAVA_VERSION="${JAVA_VERSION}" .evergreen/run-graalvm-native-image-app.sh + + "run-oidc-auth-test-k8s-test": + - command: shell.exec + type: "test" + params: + shell: "bash" + working_dir: "src" + include_expansions_in_env: [ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN", "ARCHIVE_FILE_PATH" ] + script: |- + set -o errexit + ${PREPARE_SHELL} + export K8S_VARIANT=${VARIANT} + export K8S_DRIVERS_TAR_FILE=$ARCHIVE_FILE_PATH + export K8S_TEST_CMD="GRADLE_RO_DEP_CACHE='/tmp/test/build/gradle-cache' OIDC_ENV=k8s VARIANT=${VARIANT} ./.evergreen/run-mongodb-oidc-test.sh" + bash $DRIVERS_TOOLS/.evergreen/auth_oidc/k8s/setup-pod.sh + bash $DRIVERS_TOOLS/.evergreen/auth_oidc/k8s/run-self-test.sh + source $DRIVERS_TOOLS/.evergreen/auth_oidc/k8s/secrets-export.sh + bash $DRIVERS_TOOLS/.evergreen/auth_oidc/k8s/run-driver-test.sh + bash $DRIVERS_TOOLS/.evergreen/auth_oidc/k8s/teardown-pod.sh + +# Anchors pre: - - func: "fetch source" - - func: "prepare resources" - - func: "windows fix" - - func: "fix absolute paths" - - func: "init test-results" - - func: "make files executable" - - func: "install dependencies" + - func: "fetch-source" + - func: "prepare-resources" + - func: "fix-absolute-paths" post: - # Removed, causing timeouts - # - func: "upload working dir" - - func: "upload mo artifacts" - - func: "upload test results" + - func: "stop-mongo-orchestration" + - func: "upload-mo-artifacts" + - func: "upload-test-results" + - func: "assume-aws-test-secrets-role" + - func: "stop-load-balancer" + - func: "stop-aws" + - func: "stop-mongohoused" + - func: "stop-csfle-servers" - func: "cleanup" tasks: - # Compile / check build variant - - name: static-analysis - commands: - - func: "exec script" - vars: - file: ".evergreen/compile.sh" - - func: "upload build" - - - name: "test" - depends_on: - - variant: "static-checks" - name: "static-analysis" - commands: - - func: "bootstrap mongo-orchestration" - - func: "run tests" - - - name: "plain-auth-test" - depends_on: - - variant: "static-checks" - name: "static-analysis" - commands: - - func: "run plain auth test" - - - name: "gssapi-auth-test" - depends_on: - - variant: "static-checks" - name: "static-analysis" - commands: - - func: "run gssapi auth test" - - - name: "socket-test" - depends_on: - - variant: "static-checks" - name: "static-analysis" - commands: - - func: "bootstrap mongo-orchestration" - - func: "run socket tests" - - - name: "atlas-test" - depends_on: - - variant: "static-checks" - name: "static-analysis" - commands: - - func: "run atlas test" - - - name: publish-snapshot - depends_on: - - variant: ".tests-variant" - name: "test" - - variant: ".test-gssapi-variant" - name: "gssapi-auth-test" - - variant: "plain-auth-test" - name: "plain-auth-test" - commands: - - func: "publish snapshot" + # Compile / check build variant + - name: "static-analysis-task" + commands: + - command: subprocess.exec + type: "test" + params: + working_dir: "src" + binary: bash + args: + - ".evergreen/static-checks.sh" + + - name: "test-bson-and-crypt-task" + commands: + - func: "run-tests" + vars: + TESTS: 'bson:test bson-record-codec:test mongodb-crypt:test' + + - name: "test-core-task" + commands: + - func: "start-mongo-orchestration" + - func: "run-tests" + vars: + TESTS: 'driver-core:test' + + - name: "test-legacy-task" + commands: + - func: "start-csfle-servers" + - func: "start-mongo-orchestration" + - func: "run-tests" + vars: + TESTS: 'driver-legacy:test' + + - name: "test-sync-task" + commands: + - func: "start-csfle-servers" + - func: "start-mongo-orchestration" + - func: "run-tests" + vars: + TESTS: 'driver-sync:test' + + - name: "test-reactive-task" + commands: + - func: "start-csfle-servers" + - func: "start-mongo-orchestration" + - func: "run-tests" + vars: + TESTS: 'driver-reactive-streams:test' + + - name: "scala-test-task" + commands: + - func: "start-mongo-orchestration" + - func: "run-scala-tests" + + - name: "kotlin-test-task" + commands: + - func: "start-mongo-orchestration" + - func: "run-kotlin-tests" + + - name: "reactive-streams-tck-test-task" + commands: + - func: "start-mongo-orchestration" + vars: + VERSION: "6.0" + TOPOLOGY: "server" + - func: "run-reactive-streams-tck-tests" + + - name: "load-balancer-test-task" + commands: + - func: "start-mongo-orchestration" + vars: + LOAD_BALANCER: 'true' + - func: "start-load-balancer" + - func: "run-load-balancer-tests" + + - name: "oidc-auth-test-task" + commands: + - command: subprocess.exec + type: "test" + params: + working_dir: "src" + binary: bash + include_expansions_in_env: [ "DRIVERS_TOOLS", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN" ] + env: + OIDC_ENV: "test" + args: + - .evergreen/run-mongodb-oidc-test.sh + + - name: "oidc-auth-test-azure-task" + # Might exceed 1 hour of execution. + exec_timeout_secs: 7200 + commands: + - command: shell.exec + params: + shell: "bash" + working_dir: "src" + include_expansions_in_env: [ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN", "ARCHIVE_FILE_PATH" ] + env: + JAVA_HOME: ${JAVA_HOME} + script: |- + set -o errexit + ${PREPARE_SHELL} + export AZUREOIDC_DRIVERS_TAR_FILE=$ARCHIVE_FILE_PATH + export AZUREOIDC_TEST_CMD="GRADLE_RO_DEP_CACHE='/home/azureuser/build/gradle-cache' OIDC_ENV=azure ./.evergreen/run-mongodb-oidc-test.sh" + tar --exclude-vcs -czf $AZUREOIDC_DRIVERS_TAR_FILE . + bash $DRIVERS_TOOLS/.evergreen/auth_oidc/azure/run-driver-test.sh + + - name: "oidc-auth-test-gcp-task" + # Might exceed 1 hour of execution. + exec_timeout_secs: 7200 + commands: + - command: shell.exec + params: + shell: "bash" + working_dir: "src" + include_expansions_in_env: [ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN", "ARCHIVE_FILE_PATH" ] + script: |- + set -o errexit + ${PREPARE_SHELL} + export GCPOIDC_DRIVERS_TAR_FILE=$ARCHIVE_FILE_PATH + # Define the command to run on the VM. + # Ensure that we source the environment file created for us, set up any other variables we need, + # and then run our test suite on the vm. + export GCPOIDC_TEST_CMD="GRADLE_RO_DEP_CACHE='./build/gradle-cache' OIDC_ENV=gcp ./.evergreen/run-mongodb-oidc-test.sh" + tar --exclude-vcs -czf $GCPOIDC_DRIVERS_TAR_FILE . + bash $DRIVERS_TOOLS/.evergreen/auth_oidc/gcp/run-driver-test.sh + + - name: "oidc-auth-test-k8s-eks-task" + # Might exceed 1 hour of execution. + exec_timeout_secs: 7200 + commands: + - func: "assume-aws-test-secrets-role" + - func: "run-oidc-auth-test-k8s-test" + vars: + VARIANT: eks + + - name: "oidc-auth-test-k8s-aks-task" + # Might exceed 1 hour of execution. + exec_timeout_secs: 7200 + commands: + - func: "assume-aws-test-secrets-role" + - func: "run-oidc-auth-test-k8s-test" + vars: + VARIANT: aks + + - name: "oidc-auth-test-k8s-gke-task" + # Might exceed 1 hour of execution. + exec_timeout_secs: 7200 + commands: + - func: "assume-aws-test-secrets-role" + - func: "run-oidc-auth-test-k8s-test" + vars: + VARIANT: gke + + + - name: "accept-api-version-2-test-task" + commands: + - func: "start-csfle-servers" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "versioned-api-testing.json" + - func: "run-tests" + + - name: "plain-auth-test-task" + commands: + - func: "run-plain-auth-test" + + # Test that x509 auth using server with OpenSSL 3 succeeds. + - name: "atlas-x509-auth-test-task" + commands: + - func: "assume-aws-test-secrets-role" + - func: "add-atlas-connect-variables-to-file" + - func: "run-x509-auth-test" + + - name: "aws-auth-test-with-regular-aws-credentials-task" + commands: + - func: "start-mongo-orchestration" + vars: + AUTH: "auth" + ORCHESTRATION_FILE: "auth-aws.json" + TOPOLOGY: "server" + - func: "assume-aws-test-secrets-role" + - func: "add-aws-auth-variables-to-file" + - func: "run-aws-auth-test-with-regular-aws-credentials" + + - name: "aws-auth-test-with-assume-role-credentials-task" + commands: + - func: "start-mongo-orchestration" + vars: + AUTH: "auth" + ORCHESTRATION_FILE: "auth-aws.json" + TOPOLOGY: "server" + - func: "assume-aws-test-secrets-role" + - func: "add-aws-auth-variables-to-file" + - func: "run-aws-auth-test-with-assume-role-credentials" + + - name: "aws-auth-test-with-aws-credentials-as-environment-variables-task" + commands: + - func: "start-mongo-orchestration" + vars: + AUTH: "auth" + ORCHESTRATION_FILE: "auth-aws.json" + TOPOLOGY: "server" + - func: "assume-aws-test-secrets-role" + - func: "add-aws-auth-variables-to-file" + - func: "run-aws-auth-test-with-aws-credentials-as-environment-variables" + + - name: "aws-auth-test-with-aws-credentials-and-session-token-as-environment-variables-task" + commands: + - func: "start-mongo-orchestration" + vars: + AUTH: "auth" + ORCHESTRATION_FILE: "auth-aws.json" + TOPOLOGY: "server" + - func: "assume-aws-test-secrets-role" + - func: "add-aws-auth-variables-to-file" + - func: "run-aws-auth-test-with-aws-credentials-and-session-token-as-environment-variables" + + - name: "aws-auth-test-with-aws-EC2-credentials-task" + commands: + - func: "start-mongo-orchestration" + vars: + AUTH: "auth" + ORCHESTRATION_FILE: "auth-aws.json" + TOPOLOGY: "server" + - func: "assume-aws-test-secrets-role" + - func: "add-aws-auth-variables-to-file" + - func: "run-aws-auth-test-with-aws-EC2-credentials" + + - name: "aws-auth-test-with-web-identity-credentials-task" + commands: + - func: "start-mongo-orchestration" + vars: + AUTH: "auth" + ORCHESTRATION_FILE: "auth-aws.json" + TOPOLOGY: "server" + - func: "assume-aws-test-secrets-role" + - func: "add-aws-auth-variables-to-file" + - func: "run-aws-auth-test-with-web-identity-credentials" + + - name: "test-ocsp-rsa-valid-cert-server-staples-task" + tags: [ "ocsp" ] + commands: + - func: "run-valid-ocsp-server-ca-responder-test" + vars: + OCSP_ALGORITHM: "rsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "rsa" + OCSP_MUST_STAPLE: "true" + OCSP_TLS_SHOULD_SUCCEED: "1" + + - name: "test-ocsp-rsa-invalid-cert-server-staples-task" + tags: [ "ocsp" ] + commands: + - func: "run-revoked-ocsp-server-ca-responder-test" + vars: + OCSP_ALGORITHM: "rsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "rsa" + OCSP_MUST_STAPLE: "true" + OCSP_TLS_SHOULD_SUCCEED: "0" + + - name: "test-ocsp-rsa-valid-cert-server-does-not-staple-task" + tags: [ "ocsp" ] + commands: + - func: "run-valid-ocsp-server-ca-responder-test" + vars: + OCSP_ALGORITHM: "rsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-disableStapling-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "rsa" + OCSP_MUST_STAPLE: "false" + OCSP_TLS_SHOULD_SUCCEED: "1" + + - name: "test-ocsp-rsa-invalid-cert-server-does-not-staple-task" + tags: [ "ocsp" ] + commands: + - func: "run-revoked-ocsp-server-ca-responder-test" + vars: + OCSP_ALGORITHM: "rsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-disableStapling-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "rsa" + OCSP_MUST_STAPLE: "false" + OCSP_TLS_SHOULD_SUCCEED: "0" + + - name: "test-ocsp-rsa-valid-cert-server-staples-delegate-responder-task" + tags: [ "ocsp" ] + commands: + - func: "run-valid-ocsp-server-delegate-responder-test" + vars: + OCSP_ALGORITHM: "rsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "rsa" + OCSP_MUST_STAPLE: "true" + OCSP_TLS_SHOULD_SUCCEED: "1" + + - name: "test-ocsp-rsa-invalid-cert-server-staples-delegate-responder-task" + tags: [ "ocsp" ] + commands: + - func: "run-revoked-ocsp-server-delegate-responder-test" + vars: + OCSP_ALGORITHM: "rsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "rsa" + OCSP_MUST_STAPLE: "true" + OCSP_TLS_SHOULD_SUCCEED: "0" + + - name: "test-ocsp-rsa-valid-cert-server-does-not-staple-delegate-responder-task" + tags: [ "ocsp" ] + commands: + - func: "run-valid-ocsp-server-delegate-responder-test" + vars: + OCSP_ALGORITHM: "rsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-disableStapling-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "rsa" + OCSP_MUST_STAPLE: "false" + OCSP_TLS_SHOULD_SUCCEED: "1" + + - name: "test-ocsp-rsa-invalid-cert-server-does-not-staple-delegate-responder-task" + tags: [ "ocsp" ] + commands: + - func: "run-revoked-ocsp-server-delegate-responder-test" + vars: + OCSP_ALGORITHM: "rsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-disableStapling-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "rsa" + OCSP_MUST_STAPLE: "false" + OCSP_TLS_SHOULD_SUCCEED: "0" + + - name: "test-ocsp-rsa-soft-fail-task" + tags: [ "ocsp" ] + commands: + - func: "start-mongo-orchestration" + vars: + OCSP_ALGORITHM: "rsa" + ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-disableStapling-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "rsa" + OCSP_MUST_STAPLE: "false" + OCSP_TLS_SHOULD_SUCCEED: "0" + + - name: "test-ocsp-rsa-malicious-invalid-cert-mustStaple-server-does-not-staple-task" + tags: [ "ocsp" ] + commands: + - func: "run-revoked-ocsp-server-ca-responder-test" + vars: + OCSP_ALGORITHM: "rsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple-disableStapling-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "rsa" + OCSP_MUST_STAPLE: "false" + OCSP_TLS_SHOULD_SUCCEED: "0" + + - name: "test-ocsp-rsa-malicious-delegate-responder-invalid-cert-mustStaple-server-does-not-staple-task" + tags: [ "ocsp" ] + commands: + - func: "run-revoked-ocsp-server-delegate-responder-test" + vars: + OCSP_ALGORITHM: "rsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple-disableStapling-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "rsa" + OCSP_MUST_STAPLE: "false" + OCSP_TLS_SHOULD_SUCCEED: "0" + + - name: "test-ocsp-rsa-malicious-no-responder-mustStaple-server-does-not-staple-task" + tags: [ "ocsp" ] + commands: + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple-disableStapling-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "rsa" + OCSP_MUST_STAPLE: "false" + OCSP_TLS_SHOULD_SUCCEED: "0" + + - name: "test-ocsp-ecdsa-valid-cert-server-staples-task" + tags: [ "ocsp" ] + commands: + - func: "run-valid-ocsp-server-ca-responder-test" + vars: + OCSP_ALGORITHM: "ecdsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "ecdsa" + OCSP_MUST_STAPLE: "true" + OCSP_TLS_SHOULD_SUCCEED: "1" + + - name: "test-ocsp-ecdsa-invalid-cert-server-staples-task" + tags: [ "ocsp" ] + commands: + - func: "run-revoked-ocsp-server-ca-responder-test" + vars: + OCSP_ALGORITHM: "ecdsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "ecdsa" + OCSP_MUST_STAPLE: "true" + OCSP_TLS_SHOULD_SUCCEED: "0" + + - name: "test-ocsp-ecdsa-valid-cert-server-does-not-staple-task" + tags: [ "ocsp" ] + commands: + - func: "run-valid-ocsp-server-ca-responder-test" + vars: + OCSP_ALGORITHM: "ecdsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-disableStapling-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "ecdsa" + OCSP_MUST_STAPLE: "false" + OCSP_TLS_SHOULD_SUCCEED: "1" + + - name: "test-ocsp-ecdsa-invalid-cert-server-does-not-staple-task" + tags: [ "ocsp" ] + commands: + - func: "run-revoked-ocsp-server-ca-responder-test" + vars: + OCSP_ALGORITHM: "ecdsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-disableStapling-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "ecdsa" + OCSP_MUST_STAPLE: "false" + OCSP_TLS_SHOULD_SUCCEED: "0" + + - name: "test-ocsp-ecdsa-soft-fail-task" + tags: [ "ocsp" ] + commands: + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-disableStapling-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "ecdsa" + OCSP_MUST_STAPLE: "false" + OCSP_TLS_SHOULD_SUCCEED: "0" + + - name: "test-ocsp-ecdsa-malicious-invalid-cert-mustStaple-server-does-not-staple-task" + tags: [ "ocsp" ] + commands: + - func: "run-revoked-ocsp-server-ca-responder-test" + vars: + OCSP_ALGORITHM: "ecdsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple-disableStapling-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "ecdsa" + OCSP_MUST_STAPLE: "false" + OCSP_TLS_SHOULD_SUCCEED: "0" + + - name: "test-ocsp-ecdsa-valid-cert-server-staples-delegate-responder-task" + tags: [ "ocsp" ] + commands: + - func: "run-valid-ocsp-server-delegate-responder-test" + vars: + OCSP_ALGORITHM: "ecdsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "ecdsa" + OCSP_MUST_STAPLE: "true" + OCSP_TLS_SHOULD_SUCCEED: "1" + + - name: "test-ocsp-ecdsa-invalid-cert-server-staples-delegate-responder-task" + tags: [ "ocsp" ] + commands: + - func: "run-revoked-ocsp-server-delegate-responder-test" + vars: + OCSP_ALGORITHM: "ecdsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "ecdsa" + OCSP_MUST_STAPLE: "true" + OCSP_TLS_SHOULD_SUCCEED: "0" + + - name: "test-ocsp-ecdsa-valid-cert-server-does-not-staple-delegate-responder-task" + tags: [ "ocsp" ] + commands: + - func: "run-valid-ocsp-server-delegate-responder-test" + vars: + OCSP_ALGORITHM: "ecdsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-disableStapling-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "ecdsa" + OCSP_MUST_STAPLE: "false" + OCSP_TLS_SHOULD_SUCCEED: "1" + + - name: "test-ocsp-ecdsa-invalid-cert-server-does-not-staple-delegate-responder-task" + tags: [ "ocsp" ] + commands: + - func: "run-revoked-ocsp-server-delegate-responder-test" + vars: + OCSP_ALGORITHM: "ecdsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-disableStapling-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "ecdsa" + OCSP_MUST_STAPLE: "false" + OCSP_TLS_SHOULD_SUCCEED: "0" + + - name: "test-ocsp-ecdsa-malicious-delegate-responder-invalid-cert-mustStaple-server-does-not-staple-task" + tags: [ "ocsp" ] + commands: + - func: "run-revoked-ocsp-server-delegate-responder-test" + vars: + OCSP_ALGORITHM: "ecdsa" + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple-disableStapling-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "ecdsa" + OCSP_MUST_STAPLE: "false" + OCSP_TLS_SHOULD_SUCCEED: "0" + + - name: "test-ocsp-ecdsa-malicious-no-responder-mustStaple-server-does-not-staple-task" + tags: [ "ocsp" ] + commands: + - func: "start-mongo-orchestration" + vars: + ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple-disableStapling-singleEndpoint.json" + VERSION: "latest" + TOPOLOGY: "server" + - func: "run-ocsp-test" + vars: + OCSP_ALGORITHM: "ecdsa" + OCSP_MUST_STAPLE: "false" + OCSP_TLS_SHOULD_SUCCEED: "0" + + - name: "atlas-search-task" + commands: + - command: shell.exec + type: "test" + params: + working_dir: "src" + script: | + ${PREPARE_SHELL} + MONGODB_URI="${atlas_search_uri}" .evergreen/run-atlas-search-tests.sh + + - name: "atlas-connectivity-task" + commands: + - command: shell.exec + type: "test" + params: + silent: true + working_dir: "src" + script: | + # DO NOT ECHO WITH XTRACE (which PREPARE_SHELL does) + # The connection strings are pipe-delimited + MONGODB_URIS="${atlas_free_tier_uri}|${atlas_replica_set_uri}|${atlas_sharded_uri}|${atlas_tls_v11_uri}|${atlas_tls_v12_uri}|${atlas_free_tier_uri_srv}|${atlas_replica_set_uri_srv}|${atlas_sharded_uri_srv}|${atlas_tls_v11_uri_srv}|${atlas_tls_v12_uri_srv}" \ + JAVA_VERSION="8" \ + .evergreen/run-connectivity-tests.sh + + - name: "atlas-search-index-management-task" + commands: + - command: subprocess.exec + params: + working_dir: "src" + binary: bash + add_expansions_to_env: true + args: + - .evergreen/run-atlas-search-index-management-tests.sh + + - name: "gssapi-auth-test-task" + commands: + - func: "run-gssapi-auth-test" + + - name: "slow-test-task" + commands: + - func: "start-mongo-orchestration" + - func: "run-slow-tests" + + - name: "socket-test-task" + commands: + - func: "start-mongo-orchestration" + - func: "run-socket-tests" + + - name: "publish-snapshot-task" + depends_on: + - variant: "static-checks" + name: "static-analysis-task" + commands: + - func: "publish-snapshot" + - func: "trace-artifacts" + vars: + product_name: mongo-java-driver-snapshot + - func: "create-and-upload-SSDLC-release-assets" + + - name: "publish-release-task" + git_tag_only: true + commands: + - func: "publish-release" + - func: "trace-artifacts" + vars: + product_name: mongo-java-driver + - func: "create-and-upload-SSDLC-release-assets" + + # Do not rename this task – renaming resets the performance time series + - name: "perf-task" + tags: [ "perf" ] + # Benchmark could exceed 1 hour of execution. + exec_timeout_secs: 7200 + commands: + - func: "start-mongo-orchestration" + vars: + VERSION: "v8.0-perf" + TOPOLOGY: "server" + SSL: "nossl" + AUTH: "noauth" + - func: "run-perf-tests" + - func: "send-dashboard-data" + + - name: "perf-netty-task" + tags: [ "perf" ] + # Benchmark could exceed 1 hour of execution. + exec_timeout_secs: 7200 + commands: + - func: "start-mongo-orchestration" + vars: + VERSION: "v8.0-perf" + TOPOLOGY: "server" + SSL: "nossl" + AUTH: "noauth" + - func: "run-perf-tests" + vars: + PROVIDER: "Netty" + - func: "send-dashboard-data" + + - name: "aws-lambda-deployed-task" + commands: + - command: ec2.assume_role + params: + role_arn: ${LAMBDA_AWS_ROLE_ARN} + duration_seconds: 3600 + - command: subprocess.exec + params: + working_dir: "src" + binary: bash + add_expansions_to_env: true + args: + - .evergreen/run-deployed-lambda-aws-tests.sh + env: + TEST_LAMBDA_DIRECTORY: ${PROJECT_DIRECTORY}/driver-lambda/ + AWS_REGION: us-east-1 + + - name: "test-kms-tls-invalid-cert-task" + tags: [ "kms-tls" ] + commands: + - func: "start-mongo-orchestration" + vars: + TOPOLOGY: "server" + AUTH: "noauth" + SSL: "nossl" + - func: "start-csfle-servers" + - func: "run-kms-tls-test" + vars: + KMS_TLS_ERROR_TYPE: "expired" + TOPOLOGY: "server" + AUTH: "noauth" + SSL: "nossl" + + - name: "test-kms-tls-invalid-hostname-task" + tags: [ "kms-tls" ] + commands: + - func: "start-mongo-orchestration" + vars: + TOPOLOGY: "server" + AUTH: "noauth" + SSL: "nossl" + - func: "start-csfle-servers" + vars: + CERT_FILE: "wrong-host.pem" + - func: "run-kms-tls-test" + vars: + KMS_TLS_ERROR_TYPE: "invalidHostname" + TOPOLOGY: "server" + AUTH: "noauth" + SSL: "nossl" + + - name: "test-csfle-aws-from-environment-task" + tags: [ "csfle-aws-from-environment" ] + commands: + - func: "start-csfle-servers" + - func: "start-mongo-orchestration" + vars: + TOPOLOGY: "server" + AUTH: "noauth" + SSL: "nossl" + - func: "run-csfle-aws-from-environment-test" + + - name: "csfle-tests-with-mongocryptd-task" + commands: + - func: "start-csfle-servers" + - func: "start-mongo-orchestration" + - func: "run-csfle-tests-with-mongocryptd" + + - name: "test-gcp-kms-task" + commands: + - command: shell.exec + type: "setup" + params: + working_dir: "src" + shell: "bash" + script: | + ${PREPARE_SHELL} + echo "Copying files ... begin" + export GCPKMS_GCLOUD=${GCPKMS_GCLOUD} + export GCPKMS_PROJECT=${GCPKMS_PROJECT} + export GCPKMS_ZONE=${GCPKMS_ZONE} + export GCPKMS_INSTANCENAME=${GCPKMS_INSTANCENAME} + GCPKMS_SRC=$ARCHIVE_FILE_PATH GCPKMS_DST=$GCPKMS_INSTANCENAME: $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/copy-file.sh + echo "Copying files ... end" + echo "Untarring file ... begin" + GCPKMS_CMD="tar xf $ARCHIVE_FILE_NAME" $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/run-command.sh + echo "Untarring file ... end" + - command: shell.exec + type: "test" + params: + working_dir: "src" + shell: "bash" + script: | + ${PREPARE_SHELL} + export GCPKMS_GCLOUD=${GCPKMS_GCLOUD} + export GCPKMS_PROJECT=${GCPKMS_PROJECT} + export GCPKMS_ZONE=${GCPKMS_ZONE} + export GCPKMS_INSTANCENAME=${GCPKMS_INSTANCENAME} + GCPKMS_CMD="MONGODB_URI=mongodb://localhost:27017 PROVIDER=gcp ./.evergreen/run-fle-on-demand-credential-test.sh" $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/run-command.sh + + - name: "test-azure-kms-task" + # Might exceed 1 hour of execution. + exec_timeout_secs: 7200 + commands: + - command: shell.exec + type: "setup" + params: + working_dir: "src" + shell: "bash" + script: | + ${PREPARE_SHELL} + echo "Copying files ... begin" + export AZUREKMS_RESOURCEGROUP=${testazurekms_resourcegroup} + export AZUREKMS_VMNAME=${AZUREKMS_VMNAME} + export AZUREKMS_PRIVATEKEYPATH=/tmp/testazurekms_privatekey + tar --exclude-vcs -czf $ARCHIVE_FILE_PATH . + AZUREKMS_SRC=$ARCHIVE_FILE_PATH AZUREKMS_DST="~/" $DRIVERS_TOOLS/.evergreen/csfle/azurekms/copy-file.sh + echo "Copying files ... end" + echo "Untarring file ... begin" + AZUREKMS_CMD="tar xf $ARCHIVE_FILE_NAME" $DRIVERS_TOOLS/.evergreen/csfle/azurekms/run-command.sh + echo "Untarring file ... end" + - command: shell.exec + type: "test" + params: + working_dir: "src" + shell: "bash" + script: | + ${PREPARE_SHELL} + export AZUREKMS_RESOURCEGROUP=${testazurekms_resourcegroup} + export AZUREKMS_VMNAME=${AZUREKMS_VMNAME} + export AZUREKMS_PRIVATEKEYPATH=/tmp/testazurekms_privatekey + AZUREKMS_CMD="GRADLE_RO_DEP_CACHE='/home/azureuser/build/gradle-cache' MONGODB_URI=mongodb://localhost:27017 PROVIDER=azure AZUREKMS_KEY_VAULT_ENDPOINT=${testazurekms_keyvaultendpoint} AZUREKMS_KEY_NAME=${testazurekms_keyname} ./.evergreen/run-fle-on-demand-credential-test.sh" $DRIVERS_TOOLS/.evergreen/csfle/azurekms/run-command.sh + + - name: "test-socks5-task" + tags: [ ] + commands: + - func: "start-mongo-orchestration" + vars: + VERSION: "latest" + TOPOLOGY: "replica_set" + - func: "run-socks5-tests" + + - name: "graalvm-native-image-app-task" + commands: + - func: "start-mongo-orchestration" + - func: "run-graalvm-native-image-app" axes: - - id: version - display_name: MongoDB Version + - id: "version" + display_name: "MongoDB Version" values: - id: "latest" display_name: "latest" variables: - VERSION: "latest" - - id: "4.0" - display_name: "4.0" + VERSION: "latest" + - id: "8.0" + display_name: "8.0" variables: - VERSION: "4.0" - - id: "3.6" - display_name: "3.6" + VERSION: "8.0" + - id: "7.0" + display_name: "7.0" variables: - VERSION: "3.6" - - id: "3.4" - display_name: "3.4" + VERSION: "7.0" + - id: "6.0" + display_name: "6.0" variables: - VERSION: "3.4" - - id: "3.2" - display_name: "3.2" + VERSION: "6.0" + - id: "5.0" + display_name: "5.0" variables: - VERSION: "3.2" - - id: "3.0" - display_name: "3.0" + VERSION: "5.0" + - id: "4.4" + display_name: "4.4" variables: - VERSION: "3.0" - - id: "2.6" - display_name: "2.6" + VERSION: "4.4" + - id: "4.2" + display_name: "4.2" variables: - VERSION: "2.6" - - id: os - display_name: OS + VERSION: "4.2" + + - id: "os" + display_name: "OS" values: - id: "linux" display_name: "Linux" - run_on: *hosts + run_on: "rhel80-small" + - id: "ubuntu" + display_name: "Ubuntu" + run_on: "ubuntu2004-small" - - id: topology - display_name: Topology + - id: "topology" + display_name: "Topology" values: - id: "standalone" - display_name: Standalone + display_name: "Standalone" variables: - TOPOLOGY: "server" + TOPOLOGY: "server" - id: "replicaset" - display_name: Replica Set + display_name: "Replica Set" variables: - TOPOLOGY: "replica_set" + TOPOLOGY: "replica_set" - id: "sharded-cluster" - display_name: Sharded Cluster + display_name: "Sharded Cluster" variables: - TOPOLOGY: "sharded_cluster" - - id: auth - display_name: Authentication + TOPOLOGY: "sharded_cluster" + + - id: "auth" + display_name: "Authentication" values: - id: "auth" - display_name: Auth + display_name: "Auth" variables: - AUTH: "auth" + AUTH: "auth" - id: "noauth" - display_name: NoAuth + display_name: "NoAuth" variables: - AUTH: "noauth" - - id: ssl - display_name: SSL + AUTH: "noauth" + + - id: "socks-auth" + display_name: "Socks Proxy Authentication" + values: + - id: "auth" + display_name: "Auth" + variables: + SOCKS_AUTH: "auth" + - id: "noauth" + display_name: "NoAuth" + variables: + SOCKS_AUTH: "noauth" + + - id: "ssl" + display_name: "SSL" values: - id: "ssl" - display_name: SSL + display_name: "SSL" variables: - SSL: "ssl" + SSL: "ssl" - id: "nossl" - display_name: NoSSL + display_name: "NoSSL" variables: - SSL: "nossl" - - id: compressor - display_name: Compressor + SSL: "nossl" + + - id: "async-transport" + display_name: Async Transport + values: + - id: "netty" + display_name: Netty + variables: + ASYNC_TRANSPORT: "netty" + + - id: "netty-ssl-provider" + display_name: "Netty TLS/SSL protocol provider" + values: + - id: "jdk" + display_name: "JDK" + variables: + NETTY_SSL_PROVIDER: "JDK" + - id: "openssl" + display_name: "OpenSSL" + variables: + NETTY_SSL_PROVIDER: "OPENSSL" + + - id: "compressor" + display_name: "Compressor" values: - id: "snappy" - display_name: Snappy + display_name: "Snappy" variables: - COMPRESSOR: "snappy" + COMPRESSOR: "snappy" - id: "zlib" - display_name: Zlib + display_name: "Zlib" variables: - COMPRESSOR: "zlib" - - id: jdk - display_name: JDK + COMPRESSOR: "zlib" + - id: "zstd" + display_name: "Zstd" + variables: + COMPRESSOR: "zstd" + + - id: "jdk" + display_name: "JDK" values: - - id: "jdk9" - display_name: JDK9 + - id: "jdk21" + display_name: "JDK21" + variables: + JAVA_VERSION: "21" + - id: "jdk17" + display_name: "JDK17" variables: - JDK: "jdk9" + JAVA_VERSION: "17" + - id: "jdk11" + display_name: "JDK11" + variables: + JAVA_VERSION: "11" - id: "jdk8" - display_name: JDK8 + display_name: "JDK8" + variables: + JAVA_VERSION: "8" + + - id: "scala" + display_name: "SCALA" + values: + - id: "2.11" + display_name: "Scala 2.11" + variables: + SCALA: "2.11" + - id: "2.12" + display_name: "Scala 2.12" + variables: + SCALA: "2.12" + - id: "2.13" + display_name: "Scala 2.13" + variables: + SCALA: "2.13" + + - id: "api-version" + display_name: "API Version" + values: + - id: "required" + display_name: "API Version Required" + variables: + REQUIRE_API_VERSION: "true" + + - id: "gssapi-login-context-name" + display_name: "GSSAPI Login Context Name" + values: + - id: "standard" + display_name: "standard" + variables: + LOGIN_CONTEXT_NAME: "com.sun.security.jgss.krb5.initiate" + - id: "fallback" + display_name: "fallback" variables: - JDK: "jdk8" - - id: "jdk7" - display_name: JDK7 + LOGIN_CONTEXT_NAME: "com.sun.security.jgss.initiate" + + - id: "aws-credential-provider" + display_name: "AWS Credential Provider" + values: + - id: "aws_sdk_v2" + display_name: "AWS SDK V2" variables: - JDK: "jdk7" - - id: "jdk6" - display_name: JDK6 + AWS_CREDENTIAL_PROVIDER: "awsSdkV2" + - id: "aws_sdk_v1" + display_name: "AWS SDK V1" variables: - JDK: "jdk6" + AWS_CREDENTIAL_PROVIDER: "awsSdkV1" + - id: "built_in" + display_name: "Built-In" + variables: + AWS_CREDENTIAL_PROVIDER: "builtIn" + +task_groups: + - name: "atlas-deployed-task-group" + max_hosts: -1 + setup_group_can_fail_task: true + setup_group_timeout_secs: 1800 + setup_group: + - func: "fetch-source" + - func: "prepare-resources" + - command: subprocess.exec + type: "setup" + params: + working_dir: "src" + binary: bash + add_expansions_to_env: true + env: + MONGODB_VERSION: "8.0" + args: + - ${DRIVERS_TOOLS}/.evergreen/atlas/setup-atlas-cluster.sh + - command: expansions.update + params: + file: src/atlas-expansion.yml + teardown_group: + - command: subprocess.exec + type: "setup" + params: + working_dir: "src" + binary: bash + add_expansions_to_env: true + args: + - ${DRIVERS_TOOLS}/.evergreen/atlas/teardown-atlas-cluster.sh + tasks: + - "atlas-search-index-management-task" + - "aws-lambda-deployed-task" + + - name: "test-gcp-kms-task-group" + setup_group_can_fail_task: true + setup_group_timeout_secs: 1800 # 30 minutes + setup_group: + - func: "fetch-source" + - func: "prepare-resources" + - func: "fix-absolute-paths" + - func: "gradle-cache" + - func: "create-archive-tar-file" + - command: shell.exec + params: + shell: "bash" + script: | + ${PREPARE_SHELL} + echo '${testgcpkms_key_file}' > /tmp/testgcpkms_key_file.json + export GCPKMS_KEYFILE=/tmp/testgcpkms_key_file.json + export GCPKMS_DRIVERS_TOOLS=$DRIVERS_TOOLS + export GCPKMS_SERVICEACCOUNT="${testgcpkms_service_account}" + export GCPKMS_MACHINETYPE="e2-standard-4" + $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/create-and-setup-instance.sh + # Load the GCPKMS_GCLOUD, GCPKMS_INSTANCE, GCPKMS_REGION, and GCPKMS_ZONE expansions. + - command: expansions.update + params: + file: testgcpkms-expansions.yml + teardown_group: + - command: shell.exec + params: + shell: "bash" + script: | + ${PREPARE_SHELL} + export GCPKMS_GCLOUD=${GCPKMS_GCLOUD} + export GCPKMS_PROJECT=${GCPKMS_PROJECT} + export GCPKMS_ZONE=${GCPKMS_ZONE} + export GCPKMS_INSTANCENAME=${GCPKMS_INSTANCENAME} + $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/delete-instance.sh || true + tasks: + - "test-gcp-kms-task" + + - name: "test-azure-kms-task-group" + setup_group_can_fail_task: true + setup_group_timeout_secs: 1800 # 30 minutes + setup_group: + - func: "fetch-source" + - func: "prepare-resources" + - func: "fix-absolute-paths" + - func: "gradle-cache" + - func: "create-archive-tar-file" + - func: "assume-aws-test-secrets-role" + - command: shell.exec + params: + shell: "bash" + include_expansions_in_env: [ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN" ] + script: | + ${PREPARE_SHELL} + echo '${testazurekms_publickey}' > /tmp/testazurekms_publickey + echo '${testazurekms_privatekey}' > /tmp/testazurekms_privatekey + # Set 600 permissions on private key file. Otherwise ssh / scp may error with permissions "are too open". + chmod 600 /tmp/testazurekms_privatekey + export AZUREKMS_CLIENTID=${testazurekms_clientid} + export AZUREKMS_TENANTID=${testazurekms_tenantid} + export AZUREKMS_SECRET=${testazurekms_secret} + export AZUREKMS_DRIVERS_TOOLS=$DRIVERS_TOOLS + export AZUREKMS_RESOURCEGROUP=${testazurekms_resourcegroup} + export AZUREKMS_PUBLICKEYPATH=/tmp/testazurekms_publickey + export AZUREKMS_PRIVATEKEYPATH=/tmp/testazurekms_privatekey + export AZUREKMS_SCOPE=${testazurekms_scope} + export AZUREKMS_VMNAME_PREFIX=JAVADRIVER + export AZUREKMS_MACHINESIZE="Standard_DS3_v2" + $DRIVERS_TOOLS/.evergreen/csfle/azurekms/create-and-setup-vm.sh + - command: expansions.update + params: + file: testazurekms-expansions.yml + teardown_group: + # Load expansions again. The setup task may have failed before running `expansions.update`. + - command: expansions.update + params: + file: testazurekms-expansions.yml + - command: shell.exec + params: + shell: "bash" + script: | + ${PREPARE_SHELL} + export AZUREKMS_VMNAME=${AZUREKMS_VMNAME} + export AZUREKMS_RESOURCEGROUP=${testazurekms_resourcegroup} + $DRIVERS_TOOLS/.evergreen/csfle/azurekms/delete-vm.sh + tasks: + - "test-azure-kms-task" + + - name: "test-oidc-task-group" + setup_group_can_fail_task: true + setup_group_timeout_secs: 1800 + teardown_task_can_fail_task: true + teardown_task_timeout_secs: 1800 + setup_group: + - func: "fetch-source" + - func: "prepare-resources" + - func: "fix-absolute-paths" + - func: "assume-aws-test-secrets-role" + - command: subprocess.exec + params: + binary: bash + include_expansions_in_env: [ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN" ] + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/setup.sh + teardown_task: + - command: subprocess.exec + params: + binary: bash + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/teardown.sh + tasks: + - "oidc-auth-test-task" + + - name: "test-oidc-azure-task-group" + setup_group_can_fail_task: true + setup_group_timeout_secs: 1800 + teardown_task_can_fail_task: true + teardown_task_timeout_secs: 1800 + setup_group: + - func: "fetch-source" + - func: "prepare-resources" + - func: "fix-absolute-paths" + - func: "gradle-cache" + - func: "create-archive-tar-file" + - func: "assume-aws-test-secrets-role" + - command: subprocess.exec + params: + binary: bash + include_expansions_in_env: [ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN" ] + env: + AZUREOIDC_VMNAME_PREFIX: "JAVA_DRIVER" + AZUREKMS_MACHINESIZE: "Standard_DS3_v2" + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/azure/create-and-setup-vm.sh + teardown_task: + - command: subprocess.exec + params: + binary: bash + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/azure/delete-vm.sh + tasks: + - "oidc-auth-test-azure-task" + + - name: "test-oidc-gcp-task-group" + setup_group_can_fail_task: true + setup_group_timeout_secs: 1800 + teardown_task_can_fail_task: true + teardown_task_timeout_secs: 1800 + setup_group: + - func: "fetch-source" + - func: "prepare-resources" + - func: "fix-absolute-paths" + - func: "gradle-cache" + - func: "create-archive-tar-file" + - func: "assume-aws-test-secrets-role" + - command: subprocess.exec + params: + binary: bash + include_expansions_in_env: [ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN" ] + env: + GCPOIDC_VMNAME_PREFIX: "JAVA_DRIVER" + GCPKMS_MACHINETYPE: "e2-standard-4" + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/gcp/setup.sh + teardown_task: + - command: subprocess.exec + params: + binary: bash + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/gcp/teardown.sh + tasks: + - "oidc-auth-test-gcp-task" + + - name: "test-oidc-k8s-eks-task-group" + setup_group_can_fail_task: true + setup_group_timeout_secs: 1800 + teardown_task_can_fail_task: true + teardown_task_timeout_secs: 1800 + setup_group: + - func: "fetch-source" + - func: "prepare-resources" + - func: "fix-absolute-paths" + - func: "gradle-cache" + - func: "create-archive-tar-file" + - func: "assume-aws-test-secrets-role" + - command: subprocess.exec + params: + binary: bash + include_expansions_in_env: [ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN" ] + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/k8s/setup.sh + teardown_task: + - command: subprocess.exec + params: + binary: bash + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/k8s/teardown.sh + tasks: + - "oidc-auth-test-k8s-eks-task" + + - name: "test-oidc-k8s-aks-task-group" + setup_group_can_fail_task: true + setup_group_timeout_secs: 1800 + teardown_task_can_fail_task: true + teardown_task_timeout_secs: 1800 + setup_group: + - func: "fetch-source" + - func: "prepare-resources" + - func: "fix-absolute-paths" + - func: "gradle-cache" + - func: "create-archive-tar-file" + - func: "assume-aws-test-secrets-role" + - command: subprocess.exec + params: + binary: bash + include_expansions_in_env: [ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN" ] + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/k8s/setup.sh + teardown_task: + - command: subprocess.exec + params: + binary: bash + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/k8s/teardown.sh + tasks: + - "oidc-auth-test-k8s-aks-task" + + - name: "test-oidc-k8s-gke-task-group" + setup_group_can_fail_task: true + setup_group_timeout_secs: 1800 + teardown_task_can_fail_task: true + teardown_task_timeout_secs: 1800 + setup_group: + - func: "fetch-source" + - func: "prepare-resources" + - func: "fix-absolute-paths" + - func: "gradle-cache" + - func: "create-archive-tar-file" + - func: "assume-aws-test-secrets-role" + - command: subprocess.exec + params: + binary: bash + include_expansions_in_env: [ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN" ] + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/k8s/setup.sh + teardown_task: + - command: subprocess.exec + params: + binary: bash + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/k8s/teardown.sh + tasks: + - "oidc-auth-test-k8s-gke-task" buildvariants: -# Test packaging and other release related routines -- name: static-checks - display_name: "Static Checks" - run_on: *hosts - tasks: - - name: "static-analysis" - -- matrix_name: "tests-zlib-compression" - matrix_spec: { compressor : "zlib", auth: "noauth", ssl: "nossl", jdk: "jdk6", version: ["3.6", "4.0", "latest"], topology: "standalone", os: "linux" } - display_name: "${version} ${compressor} ${topology} ${auth} ${ssl} ${jdk} ${os} " - tags: ["tests-variant"] - tasks: - - name: "test" - -- matrix_name: "tests-snappy-compression" - matrix_spec: { compressor : "snappy", auth: "noauth", ssl: "nossl", jdk: "jdk7", version: ["3.4", "3.6", "4.0", "latest"], topology: "standalone", os: "linux" } - display_name: "${version} ${compressor} ${topology} ${auth} ${ssl} ${jdk} ${os} " - tags: ["tests-variant"] - tasks: - - name: "test" - -- matrix_name: "tests-jdk6-unsecure" - matrix_spec: { auth: "noauth", ssl: "nossl", jdk: "jdk6", version: "*", topology: "*", os: "*" } - display_name: "${version} ${topology} ${auth} ${ssl} ${jdk} ${os} " - tags: ["tests-variant"] - tasks: - - name: "test" - -- matrix_name: "tests-jdk6-secure" - matrix_spec: { auth: "auth", ssl: "ssl", jdk: "jdk6", version: "*", topology: "*", os: "*" } - exclude_spec: { auth: "auth", ssl: "ssl", jdk: "jdk6", version: ["4.0", "latest"], topology: "*", os: "*" } - display_name: "${version} ${topology} ${auth} ${ssl} ${jdk} ${os} " - tags: ["tests-variant"] - tasks: - - name: "test" - -- matrix_name: "tests-jdk8-secure" - matrix_spec: { auth: "auth", ssl: "ssl", jdk: "jdk8", version: ["4.0", "latest"], topology: "*", os: "*" } - display_name: "${version} ${topology} ${auth} ${ssl} ${jdk} ${os} " - tags: ["tests-variant"] - tasks: - - name: "test" - -- matrix_name: "tests-jdk7-secure" - matrix_spec: { auth: "auth", ssl: "ssl", jdk: "jdk7", version: "3.6", topology: "standalone", os: "linux" } - display_name: "${version} ${topology} ${auth} ${ssl} ${jdk} ${os} " - tags: ["tests-variant"] - tasks: - - name: "test" - -- matrix_name: "tests-socket" - matrix_spec: { auth: "*", ssl: "nossl", jdk: "jdk8", version: ["4.0"], topology: "standalone", os: "linux" } - display_name: "Socket: ${version} ${topology} ${auth} ${jdk} ${os} " - tags: ["tests-socket-variant"] - tasks: - - name: "socket-test" - -- matrix_name: "tests-socket-snappy-compression" - matrix_spec: { compressor : "snappy", auth: "noauth", ssl: "nossl", jdk: "jdk7", version: ["4.0"], topology: "standalone", os: "linux" } - display_name: "Socket: ${version} ${compressor} ${topology} ${auth} ${jdk} ${os} " - tags: ["tests-socket-variant"] - tasks: - - name: "socket-test" - -- matrix_name: "test-gssapi" - matrix_spec: { jdk: "*", os: "linux" } - display_name: "GSSAPI (Kerberos) Auth test ${jdk} ${os} " - tags: ["test-gssapi-variant"] - tasks: - - name: "gssapi-auth-test" - -- name: plain-auth-test - display_name: "PLAIN (LDAP) Auth test" - run_on: *hosts - tasks: - - name: "plain-auth-test" - -- name: atlas-test - display_name: "Atlas test" - run_on: *hosts - tasks: - - name: "atlas-test" - -- name: publish-snapshot - display_name: "Publish Snapshot" - run_on: *hosts - tasks: - - name: "publish-snapshot" + # + # Name based variants + # + + # Test packaging and other release related routines + - name: "static-checks" + display_name: "Static Checks" + run_on: rhel80-small + tasks: + - name: "static-analysis-task" + + - name: "perf" + display_name: "Performance Tests" + tags: [ "perf-variant" ] + run_on: rhel90-dbx-perf-large + tasks: + - name: "perf-task" + - name: "perf-netty-task" + + - name: plain-auth-test + display_name: "PLAIN (LDAP) Auth test" + run_on: rhel80-small + tasks: + - name: "plain-auth-test-task" + + - name: "oidc-auth-test" + display_name: "OIDC Auth" + run_on: ubuntu2204-small + tasks: + - name: "test-oidc-task-group" + batchtime: 20160 # 14 days + + - name: "test-oidc-azure-variant" + display_name: "OIDC Auth Azure" + run_on: ubuntu2204-small + tasks: + - name: "test-oidc-azure-task-group" + batchtime: 20160 # 14 days + + - name: "test-oidc-gcp-variant" + display_name: "OIDC Auth GCP" + run_on: ubuntu2204-small + tasks: + - name: "test-oidc-gcp-task-group" + batchtime: 20160 # 14 days + + - name: "test-oidc-k8s-variant" + display_name: "OIDC Auth K8S" + run_on: ubuntu2204-small + tasks: + - name: "test-oidc-k8s-eks-task-group" + batchtime: 20160 # 14 days + - name: "test-oidc-k8s-aks-task-group" + batchtime: 20160 # 14 days + - name: "test-oidc-k8s-gke-task-group" + batchtime: 20160 # 14 days + + - name: "atlas-search-variant" + display_name: "Atlas Tests" + run_on: rhel80-small + tasks: + - name: "atlas-deployed-task-group" + - name: "atlas-search-task" + - name: "atlas-connectivity-task" + - name: "atlas-x509-auth-test-task" + + - name: "reactive-streams-tck-test" + display_name: "Reactive Streams TCK tests" + run_on: rhel80-small + tasks: + - name: "reactive-streams-tck-test-task" + + - name: "publish-snapshot" + display_name: "Publish Snapshot" + run_on: "ubuntu2204-small" + tasks: + - name: "publish-snapshot-task" + + - name: "publish-release" + display_name: "Publish Release" + run_on: "ubuntu2204-small" + tasks: + - name: "publish-release-task" + + - name: "test-gcp-kms-variant" + display_name: "GCP KMS" + run_on: + - ubuntu2204-small + tasks: + - name: "test-gcp-kms-task-group" + batchtime: 20160 # Use a batchtime of 14 days as suggested by the CSFLE test README + + - name: "test-azure-kms-variant" + display_name: "Azure KMS" + run_on: + - ubuntu2204-small + tasks: + - name: "test-azure-kms-task-group" + batchtime: 20160 # Use a batchtime of 14 days as suggested by the CSFLE test README + + # + # Matrix based variants + # + + - matrix_name: "tests-zlib-compression" + matrix_spec: { compressor: "zlib", auth: "noauth", ssl: "nossl", jdk: "jdk8", version: "*", topology: "standalone", os: "linux" } + display_name: "${version} ${compressor} ${topology} ${auth} ${ssl} ${jdk} ${os} " + tags: [ "tests-variant" ] + tasks: + - name: "test-sync-task" + - name: "test-reactive-task" + - name: "test-core-task" + - name: "test-legacy-task" + + - matrix_name: "tests-snappy-compression" + matrix_spec: { compressor: "snappy", auth: "noauth", ssl: "nossl", jdk: "jdk8", version: "*", topology: "standalone", os: "linux" } + display_name: "${version} ${compressor} ${topology} ${auth} ${ssl} ${jdk} ${os} " + tags: [ "tests-variant" ] + tasks: + - name: "test-sync-task" + - name: "test-reactive-task" + - name: "test-core-task" + - name: "test-legacy-task" + + - matrix_name: "tests-zstd-compression" + matrix_spec: { compressor: "zstd", auth: "noauth", ssl: "nossl", jdk: "jdk8", + version: [ "4.2", "4.4", "5.0", "6.0", "7.0", "8.0", "latest" ], + topology: "standalone", os: "linux" } + display_name: "${version} ${compressor} ${topology} ${auth} ${ssl} ${jdk} ${os} " + tags: [ "tests-variant" ] + tasks: + - name: "test-sync-task" + - name: "test-reactive-task" + - name: "test-core-task" + - name: "test-legacy-task" + + - matrix_name: "tests-unit" + matrix_spec: { jdk: [ "jdk8", "jdk11", "jdk17", "jdk21" ], os: "linux" } + display_name: "${jdk} ${os} Unit" + tags: [ "tests-variant" ] + tasks: + - name: "test-bson-and-crypt-task" + + - matrix_name: "tests-jdk8-unsecure" + matrix_spec: { auth: "noauth", ssl: "nossl", jdk: "jdk8", version: [ "4.2", "4.4", "5.0", "6.0", "7.0", "8.0", "latest" ], + topology: "*", os: "linux" } + display_name: "${version} ${topology} ${auth} ${ssl} ${jdk} ${os} " + tags: [ "tests-variant" ] + tasks: + - name: "test-sync-task" + - name: "test-reactive-task" + - name: "test-core-task" + - name: "test-legacy-task" + + - matrix_name: "tests-jdk-secure" + matrix_spec: { auth: "auth", ssl: "ssl", jdk: [ "jdk8", "jdk17", "jdk21" ], + version: [ "4.2", "4.4", "5.0", "6.0", "7.0", "8.0", "latest" ], + topology: "*", os: "linux" } + display_name: "${version} ${topology} ${auth} ${ssl} ${jdk} ${os} " + tags: [ "tests-variant" ] + tasks: + - name: "test-sync-task" + - name: "test-reactive-task" + - name: "test-core-task" + - name: "test-legacy-task" + + - matrix_name: "tests-jdk-secure-jdk11" + matrix_spec: { auth: "auth", ssl: "ssl", jdk: [ "jdk11" ], version: [ "7.0" ], topology: [ "replicaset" ], os: "linux" } + display_name: "${version} ${topology} ${auth} ${ssl} ${jdk} ${os} " + tags: [ "tests-variant" ] + tasks: + - name: "test-sync-task" + - name: "test-reactive-task" + - name: "test-core-task" + - name: "test-legacy-task" + + - matrix_name: "tests-require-api-version" + matrix_spec: { api-version: "required", auth: "auth", ssl: "nossl", jdk: [ "jdk21" ], version: [ "5.0", "6.0", "7.0", "8.0", "latest" ], + topology: "standalone", os: "linux" } + display_name: "${version} ${topology} ${api-version} " + tags: [ "tests-variant" ] + tasks: + - name: "test-sync-task" + - name: "test-reactive-task" + - name: "test-core-task" + - name: "test-legacy-task" + + - matrix_name: "tests-load-balancer-secure" + matrix_spec: { auth: "auth", ssl: "ssl", jdk: [ "jdk21" ], version: [ "5.0", "6.0", "7.0", "8.0", "latest" ], topology: "sharded-cluster", + os: "ubuntu" } + display_name: "Load Balancer ${version} ${auth} ${ssl} ${jdk} ${os}" + tasks: + - name: "load-balancer-test-task" + + - matrix_name: "tests-slow-task" + matrix_spec: { auth: "noauth", ssl: "nossl", jdk: "jdk21", version: [ "7.0" ], topology: "standalone", os: "linux" } + display_name: "Slow: ${version} ${topology} ${ssl} ${jdk} ${os} " + tags: [ "tests-slow-variant" ] + tasks: + - name: "slow-test-task" + + - matrix_name: "tests-socket-task" + matrix_spec: { auth: "*", ssl: "nossl", jdk: "jdk8", version: [ "4.2" ], topology: "standalone", os: "linux" } + display_name: "Socket: ${version} ${topology} ${auth} ${jdk} ${os} " + tags: [ "tests-socket-variant" ] + tasks: + - name: "socket-test-task" + + - matrix_name: "tests-netty" + matrix_spec: { auth: "noauth", ssl: "*", jdk: "jdk8", version: [ "7.0" ], topology: "replicaset", os: "linux", + async-transport: "netty" } + display_name: "Netty: ${version} ${topology} ${ssl} ${auth} ${jdk} ${os} " + tags: [ "tests-netty-variant" ] + tasks: + - name: "test-reactive-task" + - name: "test-core-task" + + - matrix_name: "tests-netty-ssl-provider" + matrix_spec: { auth: "auth", ssl: "ssl", jdk: "jdk8", version: [ "7.0" ], topology: "replicaset", os: "linux", + async-transport: "netty", netty-ssl-provider: "*" } + display_name: "Netty SSL provider: ${version} ${topology} ${ssl} SslProvider.${netty-ssl-provider} ${auth} ${jdk} ${os} " + tags: [ "tests-netty-variant" ] + tasks: + - name: "test-reactive-task" + - name: "test-core-task" + + - matrix_name: "tests-socket-snappy-compression" + matrix_spec: { compressor: "snappy", auth: "noauth", ssl: "nossl", jdk: "jdk8", version: [ "4.2" ], topology: "standalone", os: "linux" } + display_name: "Socket: ${version} ${compressor} ${topology} ${auth} ${jdk} ${os} " + tags: [ "tests-socket-variant" ] + tasks: + - name: "socket-test-task" + + - matrix_name: "tests-socket-zstd-compression" + matrix_spec: { compressor: "zstd", auth: "noauth", ssl: "nossl", jdk: "jdk8", version: [ "4.2" ], topology: "standalone", os: "linux" } + display_name: "Socket: ${version} ${compressor} ${topology} ${auth} ${jdk} ${os} " + tags: [ "tests-socket-variant" ] + tasks: + - name: "socket-test-task" + + - matrix_name: "test-gssapi" + matrix_spec: { jdk: [ "jdk8", "jdk17", "jdk21" ], os: "linux", gssapi-login-context-name: "*" } + display_name: "GSSAPI (Kerberos) Auth test ${jdk} ${os} ${gssapi-login-context-name}" + tags: [ "test-gssapi-variant" ] + tasks: + - name: "gssapi-auth-test-task" + + - matrix_name: "aws-auth-test" + matrix_spec: { ssl: "nossl", jdk: [ "jdk8", "jdk17", "jdk21" ], version: [ "4.4", "5.0", "6.0", "7.0", "8.0", "latest" ], os: "ubuntu", + aws-credential-provider: "*" } + display_name: "MONGODB-AWS Basic Auth test ${version} ${jdk} ${aws-credential-provider}" + run_on: "ubuntu2204-small" + tasks: + - name: "aws-auth-test-with-regular-aws-credentials-task" + + - matrix_name: "aws-ec2-auth-test" + matrix_spec: { ssl: "nossl", jdk: [ "jdk21" ], version: [ "7.0" ], os: "ubuntu", aws-credential-provider: "*" } + display_name: "MONGODB-AWS Advanced Auth test ${version} ${jdk} ${aws-credential-provider}" + run_on: "ubuntu2204-small" + tasks: + - name: "aws-auth-test-with-aws-EC2-credentials-task" + - name: "aws-auth-test-with-assume-role-credentials-task" + - name: "aws-auth-test-with-aws-credentials-as-environment-variables-task" + - name: "aws-auth-test-with-aws-credentials-and-session-token-as-environment-variables-task" + - name: "aws-auth-test-with-web-identity-credentials-task" + + - matrix_name: "accept-api-version-2-test" + matrix_spec: { ssl: "nossl", auth: "noauth", jdk: "jdk21", version: [ "5.0", "6.0", "7.0", "8.0", "latest" ], topology: "standalone", + os: "linux" } + display_name: "Accept API Version 2 ${version}" + run_on: "ubuntu2204-small" + tasks: + - name: "accept-api-version-2-test-task" + + - matrix_name: "ocsp-test" + matrix_spec: { auth: "noauth", ssl: "ssl", jdk: "jdk21", version: [ "4.4", "5.0", "6.0", "7.0", "8.0", "latest" ], os: "ubuntu" } + display_name: "OCSP test ${version} ${os}" + tasks: + - name: ".ocsp" + + - matrix_name: "scala-tests" + matrix_spec: { auth: "noauth", ssl: "nossl", jdk: [ "jdk8", "jdk17", "jdk21" ], version: [ "7.0" ], topology: "replicaset", + scala: "*", os: "ubuntu" } + display_name: "${scala} ${jdk} ${version} ${topology} ${os}" + tags: [ "test-scala-variant" ] + tasks: + - name: "scala-test-task" + + - matrix_name: "kotlin-tests" + matrix_spec: { auth: "noauth", ssl: "nossl", jdk: [ "jdk8", "jdk17", "jdk21" ], version: [ "7.0" ], topology: "replicaset", os: "ubuntu" } + display_name: "Kotlin: ${jdk} ${version} ${topology} ${os}" + tags: [ "test-kotlin-variant" ] + tasks: + - name: "kotlin-test-task" + + - matrix_name: "kms-tls-test" + matrix_spec: { os: "linux", version: [ "5.0" ], topology: [ "standalone" ] } + display_name: "CSFLE KMS TLS" + tasks: + - name: ".kms-tls" + + - matrix_name: "csfle-aws-from-environment-test" + matrix_spec: { os: "linux", version: [ "5.0" ], topology: [ "standalone" ] } + display_name: "CSFLE AWS From Environment" + tasks: + - name: ".csfle-aws-from-environment" + + - matrix_name: "csfle-tests-with-mongocryptd" + matrix_spec: { os: "linux", version: [ "4.2", "4.4", "5.0", "6.0", "7.0", "8.0", "latest" ], topology: [ "replicaset" ] } + display_name: "CSFLE with mongocryptd: ${version}" + tasks: + - name: "csfle-tests-with-mongocryptd-task" + + - matrix_name: "socks5-tests" + matrix_spec: { os: "linux", ssl: [ "nossl", "ssl" ], version: [ "latest" ], topology: [ "replicaset" ], socks-auth: [ "auth", "noauth" ] } + display_name: "SOCKS5 proxy ${socks-auth} : ${version} ${topology} ${ssl} ${jdk} ${os}" + tasks: + - name: "test-socks5-task" + + - matrix_name: "graalvm-native-image-app" + matrix_spec: { version: [ "7.0" ], topology: [ "replicaset" ], auth: [ "noauth" ], ssl: [ "nossl" ], + jdk: [ "jdk21" ], os: [ "linux" ] } + display_name: "GraalVM native image app: ${version} ${topology} ${auth} ${ssl} ${jdk} ${os}" + tasks: + - name: "graalvm-native-image-app-task" diff --git a/.evergreen/compile.sh b/.evergreen/compile.sh deleted file mode 100755 index fca76701503..00000000000 --- a/.evergreen/compile.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -set -o xtrace # Write all commands first to stderr -set -o errexit # Exit the script with error if any of the commands fail - -############################################ -# Main Program # -############################################ - -echo "Compiling java driver with jdk9" - -# We always compile with the latest version of java -export JAVA_HOME="/opt/java/jdk9" -./gradlew -version -./gradlew -PxmlReports.enabled=true --info -x test clean check jar testClasses docs diff --git a/.evergreen/gradle-cache.sh b/.evergreen/gradle-cache.sh new file mode 100755 index 00000000000..2ffa72908fc --- /dev/null +++ b/.evergreen/gradle-cache.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +set -o xtrace # Write all commands first to stderr +set -o errexit # Exit the script with error if any of the commands fail + +############################################ +# Main Program # +############################################ +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE[0]:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" + +echo "Enable caching" +echo "org.gradle.caching=true" >> gradle.properties +echo "kotlin.caching.enabled=true" >> gradle.properties + +echo "Compiling JVM drivers" +./gradlew -version +./gradlew classes --parallel + +# Copy the Gradle dependency cache to the gradle read only dependency cache directory. +if [ -n "$GRADLE_RO_DEP_CACHE" ];then + echo "Copying Gradle dependency cache to $GRADLE_RO_DEP_CACHE" + mkdir -p $GRADLE_RO_DEP_CACHE + + # https://docs.gradle.org/current/userguide/dependency_caching.html#sec:cache-copy + # Gradle suggests removing the "*.lock" files and the `gc.properties` file for saving/restoring cache + cp -r $HOME/.gradle/caches/modules-2 "$GRADLE_RO_DEP_CACHE" + find "$GRADLE_RO_DEP_CACHE" -name "*.lock" -type f | xargs rm -f + find "$GRADLE_RO_DEP_CACHE" -name "gc.properties" -type f | xargs rm -f + + echo "Copied Gradle dependency cache to $GRADLE_RO_DEP_CACHE" +fi diff --git a/.evergreen/prepare-oidc-get-tokens-docker.sh b/.evergreen/prepare-oidc-get-tokens-docker.sh new file mode 100755 index 00000000000..e904d5d2b89 --- /dev/null +++ b/.evergreen/prepare-oidc-get-tokens-docker.sh @@ -0,0 +1,50 @@ +#!/bin/bash + +set -o xtrace +set -o errexit # Exit the script with error if any of the commands fail + +############################################ +# Main Program # +############################################ + +# Supported/used environment variables: +# DRIVERS_TOOLS The path to evergreeen tools +# OIDC_AWS_* Required OIDC_AWS_* env variables must be configured +# +# Environment variables used as output: +# OIDC_TESTS_ENABLED Allows running OIDC tests +# OIDC_TOKEN_DIR The path to generated OIDC AWS tokens +# AWS_WEB_IDENTITY_TOKEN_FILE The path to AWS token for device workflow + +if [ -z ${DRIVERS_TOOLS+x} ]; then + echo "DRIVERS_TOOLS. is not set"; + exit 1 +fi + +if [ -z ${OIDC_AWS_ROLE_ARN+x} ]; then + echo "OIDC_AWS_ROLE_ARN. is not set"; + exit 1 +fi + +if [ -z ${OIDC_AWS_SECRET_ACCESS_KEY+x} ]; then + echo "OIDC_AWS_SECRET_ACCESS_KEY. is not set"; + exit 1 +fi + +if [ -z ${OIDC_AWS_ACCESS_KEY_ID+x} ]; then + echo "OIDC_AWS_ACCESS_KEY_ID. is not set"; + exit 1 +fi + +export AWS_ROLE_ARN=${OIDC_AWS_ROLE_ARN} +export AWS_SECRET_ACCESS_KEY=${OIDC_AWS_SECRET_ACCESS_KEY} +export AWS_ACCESS_KEY_ID=${OIDC_AWS_ACCESS_KEY_ID} +export OIDC_FOLDER=${DRIVERS_TOOLS}/.evergreen/auth_oidc +export OIDC_TOKEN_DIR=${OIDC_FOLDER}/test_tokens +export AWS_WEB_IDENTITY_TOKEN_FILE=${OIDC_TOKEN_DIR}/test1 +export OIDC_TESTS_ENABLED=true + +echo "Configuring OIDC server for local authentication tests" + +cd ${OIDC_FOLDER} +DRIVERS_TOOLS=${DRIVERS_TOOLS} ./oidc_get_tokens.sh \ No newline at end of file diff --git a/.evergreen/prepare-oidc-server-docker.sh b/.evergreen/prepare-oidc-server-docker.sh new file mode 100755 index 00000000000..0fcd1ed4194 --- /dev/null +++ b/.evergreen/prepare-oidc-server-docker.sh @@ -0,0 +1,50 @@ +#!/bin/bash + +set -o xtrace +set -o errexit # Exit the script with error if any of the commands fail + +############################################ +# Main Program # +############################################ + +# Supported/used environment variables: +# DRIVERS_TOOLS The path to evergreeen tools +# OIDC_AWS_* OIDC_AWS_* env variables must be configured +# +# Environment variables used as output: +# OIDC_TESTS_ENABLED Allows running OIDC tests +# OIDC_TOKEN_DIR The path to generated tokens +# AWS_WEB_IDENTITY_TOKEN_FILE The path to AWS token for device workflow + +if [ -z ${DRIVERS_TOOLS+x} ]; then + echo "DRIVERS_TOOLS. is not set"; + exit 1 +fi + +if [ -z ${OIDC_AWS_ROLE_ARN+x} ]; then + echo "OIDC_AWS_ROLE_ARN. is not set"; + exit 1 +fi + +if [ -z ${OIDC_AWS_SECRET_ACCESS_KEY+x} ]; then + echo "OIDC_AWS_SECRET_ACCESS_KEY. is not set"; + exit 1 +fi + +if [ -z ${OIDC_AWS_ACCESS_KEY_ID+x} ]; then + echo "OIDC_AWS_ACCESS_KEY_ID. is not set"; + exit 1 +fi + +export AWS_ROLE_ARN=${OIDC_AWS_ROLE_ARN} +export AWS_SECRET_ACCESS_KEY=${OIDC_AWS_SECRET_ACCESS_KEY} +export AWS_ACCESS_KEY_ID=${OIDC_AWS_ACCESS_KEY_ID} +export OIDC_FOLDER=${DRIVERS_TOOLS}/.evergreen/auth_oidc +export OIDC_TOKEN_DIR=${OIDC_FOLDER}/test_tokens +export AWS_WEB_IDENTITY_TOKEN_FILE=${OIDC_TOKEN_DIR}/test1 +export OIDC_TESTS_ENABLED=true + +echo "Configuring OIDC server for local authentication tests" + +cd ${OIDC_FOLDER} +DRIVERS_TOOLS=${DRIVERS_TOOLS} ./start_local_server.sh \ No newline at end of file diff --git a/.evergreen/publish.sh b/.evergreen/publish.sh index 1d718369682..d1182c2f42d 100755 --- a/.evergreen/publish.sh +++ b/.evergreen/publish.sh @@ -7,20 +7,25 @@ set -o errexit # Exit the script with error if any of the commands fail ############################################ # Main Program # ############################################ +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" -echo ${RING_FILE_GPG_BASE64} | base64 -d > ${PROJECT_DIRECTORY}/secring.gpg - -trap "rm ${PROJECT_DIRECTORY}/secring.gpg; exit" EXIT HUP - -echo "Publishing snapshot with jdk9" - -export JAVA_HOME="/opt/java/jdk9" +RELEASE=${RELEASE:false} export ORG_GRADLE_PROJECT_nexusUsername=${NEXUS_USERNAME} export ORG_GRADLE_PROJECT_nexusPassword=${NEXUS_PASSWORD} -export ORG_GRADLE_PROJECT_signing_keyId=${SIGNING_KEY_ID} -export ORG_GRADLE_PROJECT_signing_password=${SIGNING_PASSWORD} -export ORG_GRADLE_PROJECT_signing_secretKeyRingFile=${PROJECT_DIRECTORY}/secring.gpg +export ORG_GRADLE_PROJECT_signingKey="${SIGNING_KEY}" +export ORG_GRADLE_PROJECT_signingPassword=${SIGNING_PASSWORD} + +if [ "$RELEASE" == "true" ]; then + TASK="publishArchives closeAndReleaseSonatypeStagingRepository" +else + TASK="publishSnapshots" +fi + +SYSTEM_PROPERTIES="-Dorg.gradle.internal.publish.checksums.insecure=true" ./gradlew -version -./gradlew uploadSnapshotArchives +./gradlew ${SYSTEM_PROPERTIES} --stacktrace --info ${TASK} # Scala 2.13 is published as result of this gradle execution. +./gradlew ${SYSTEM_PROPERTIES} --stacktrace --info :bson-scala:${TASK} :driver-scala:${TASK} -PscalaVersion=2.12 +./gradlew ${SYSTEM_PROPERTIES} --stacktrace --info :bson-scala:${TASK} :driver-scala:${TASK} -PscalaVersion=2.11 diff --git a/.evergreen/run-atlas-search-index-management-tests.sh b/.evergreen/run-atlas-search-index-management-tests.sh new file mode 100755 index 00000000000..784a9b45a0d --- /dev/null +++ b/.evergreen/run-atlas-search-index-management-tests.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +set -o errexit + +# Supported/used environment variables: +# MONGODB_URI Set the connection to an Atlas cluster + +############################################ +# Main Program # +############################################ +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE[0]:-$0}")" +source "${RELATIVE_DIR_PATH}/setup-env.bash" + +echo "Running Atlas Search tests" +./gradlew -version +./gradlew --stacktrace --info \ + -Dorg.mongodb.test.atlas.search.index.helpers=true \ + -Dorg.mongodb.test.uri=${MONGODB_URI} \ + driver-sync:test --tests AtlasSearchIndexManagementProseTest \ + driver-reactive-streams:test --tests AtlasSearchIndexManagementProseTest \ diff --git a/.evergreen/run-atlas-search-tests.sh b/.evergreen/run-atlas-search-tests.sh new file mode 100755 index 00000000000..01f6bc78b42 --- /dev/null +++ b/.evergreen/run-atlas-search-tests.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +set -o errexit + +# Supported/used environment variables: +# MONGODB_URI Set the connection to an Atlas cluster + +############################################ +# Main Program # +############################################ +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE[0]:-$0}")" +source "${RELATIVE_DIR_PATH}/setup-env.bash" + +echo "Running Atlas Search tests" +./gradlew -version +./gradlew --stacktrace --info \ + -Dorg.mongodb.test.atlas.search=true \ + -Dorg.mongodb.test.uri=${MONGODB_URI} \ + driver-core:test --tests AggregatesSearchIntegrationTest \ + --tests AggregatesBinaryVectorSearchIntegrationTest \ + --tests AggregatesSearchTest \ diff --git a/.evergreen/run-connectivity-tests.sh b/.evergreen/run-connectivity-tests.sh index bcfe0800ec7..38ccfaaf763 100755 --- a/.evergreen/run-connectivity-tests.sh +++ b/.evergreen/run-connectivity-tests.sh @@ -1,7 +1,7 @@ #!/bin/bash -# Don't trace since the URI contains a password that shouldn't show up in the logs -set -o errexit # Exit the script with error if any of the commands fail +# Exit the script with error if any of the commands fail +set -o errexit # Supported/used environment variables: # JDK Set the version of java to be used. Java versions can be set from the java toolchain /opt/java @@ -9,19 +9,15 @@ set -o errexit # Exit the script with error if any of the commands fail # Support arguments: # Pass as many MongoDB URIS as arguments to this script as required -JDK=${JDK:-jdk} - ############################################ # Main Program # ############################################ +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" -echo "Running connectivity tests with ${JDK}" - -# We always compile with the latest version of java -export JAVA_HOME="/opt/java/jdk9" - -./gradlew -version +echo "Running connectivity tests with Java ${JAVA_VERSION}" -for MONGODB_URI in $@; do - ./gradlew -PjdkHome=/opt/java/${JDK} -Dorg.mongodb.test.uri=${MONGODB_URI} --stacktrace --info -Dtest.single=ConnectivityTest --rerun-tasks driver-sync:test -done +./gradlew -PjavaVersion=${JAVA_VERSION} -Dorg.mongodb.test.connectivity.uris="${MONGODB_URIS}" --info --continue \ + driver-sync:test --tests ConnectivityTest \ + driver-legacy:test --tests ConnectivityTest \ + driver-reactive-streams:test --tests ConnectivityTest diff --git a/.evergreen/run-csfle-aws-from-environment.sh b/.evergreen/run-csfle-aws-from-environment.sh new file mode 100755 index 00000000000..a3c7b8fa106 --- /dev/null +++ b/.evergreen/run-csfle-aws-from-environment.sh @@ -0,0 +1,40 @@ +#!/bin/bash + +# Don't trace since the URI contains a password that shouldn't show up in the logs +set -o errexit # Exit the script with error if any of the commands fail + +# Supported/used environment variables: +# MONGODB_URI Set the suggested connection MONGODB_URI (including credentials and topology info) +# AWS_ACCESS_KEY_ID The AWS access key identifier for client-side encryption +# AWS_SECRET_ACCESS_KEY The AWS secret access key for client-side encryption + +############################################ +# Main Program # +############################################ +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" + +echo "Running CSFLE AWS from environment tests" + +./gradlew -version + +export AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} +export AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} + +./gradlew --stacktrace --info -Dorg.mongodb.test.uri=${MONGODB_URI} \ + driver-sync:cleanTest driver-sync:test --tests ClientSideEncryptionAwsCredentialFromEnvironmentTest +first=$? +echo $first + +./gradlew --stacktrace --info -Dorg.mongodb.test.uri=${MONGODB_URI} \ + driver-reactive-streams:cleanTest driver-reactive-streams:test --tests ClientSideEncryptionAwsCredentialFromEnvironmentTest +second=$? +echo $second + +if [ $first -ne 0 ]; then + exit $first +elif [ $second -ne 0 ]; then + exit $second +else + exit 0 +fi diff --git a/.evergreen/run-csfle-tests-with-mongocryptd.sh b/.evergreen/run-csfle-tests-with-mongocryptd.sh new file mode 100755 index 00000000000..4e320c32178 --- /dev/null +++ b/.evergreen/run-csfle-tests-with-mongocryptd.sh @@ -0,0 +1,68 @@ +#!/bin/bash + +set -o xtrace # Write all commands first to stderr +set -o errexit # Exit the script with error if any of the commands fail + +# Supported/used environment variables: +# MONGODB_URI Set the suggested connection MONGODB_URI (including credentials and topology info) +# JAVA_VERSION Set the version of java to be used. Java versions can be set from the java toolchain /opt/java +# AWS_ACCESS_KEY_ID The AWS access key identifier for client-side encryption +# AWS_SECRET_ACCESS_KEY The AWS secret access key for client-side encryption +# AWS_ACCESS_KEY_ID_AWS_KMS_NAMED The AWS access key identifier for client-side encryption's named KMS provider. +# AWS_SECRET_ACCESS_KEY_AWS_KMS_NAMED The AWS secret access key for client-side encryption's named KMS provider. +# AWS_TEMP_ACCESS_KEY_ID The temporary AWS access key identifier for client-side encryption +# AWS_TEMP_SECRET_ACCESS_KEY The temporary AWS secret access key for client-side encryption +# AWS_TEMP_SESSION_TOKEN The temporary AWS session token for client-side encryption +# AZURE_TENANT_ID The Azure tenant identifier for client-side encryption +# AZURE_CLIENT_ID The Azure client identifier for client-side encryption +# AZURE_CLIENT_SECRET The Azure client secret for client-side encryption +# GCP_EMAIL The GCP email for client-side encryption +# GCP_PRIVATE_KEY The GCP private key for client-side encryption +# AZUREKMS_KEY_VAULT_ENDPOINT The Azure key vault endpoint for integration tests +# AZUREKMS_KEY_NAME The Azure key name endpoint for integration tests + +MONGODB_URI=${MONGODB_URI:-} + +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" + +############################################ +# Functions # +############################################ + +provision_ssl () { + # We generate the keystore and truststore on every run with the certs in the drivers-tools repo + if [ ! -f client.pkc ]; then + openssl pkcs12 -CAfile ${DRIVERS_TOOLS}/.evergreen/x509gen/ca.pem -export -in ${DRIVERS_TOOLS}/.evergreen/x509gen/client.pem -out client.pkc -password pass:bithere + fi + + cp ${JAVA_HOME}/lib/security/cacerts mongo-truststore + ${JAVA_HOME}/bin/keytool -importcert -trustcacerts -file ${DRIVERS_TOOLS}/.evergreen/x509gen/ca.pem -keystore mongo-truststore -storepass changeit -storetype JKS -noprompt + + # We add extra gradle arguments for SSL + export GRADLE_EXTRA_VARS="-Pssl.enabled=true -Pssl.keyStoreType=pkcs12 -Pssl.keyStore=`pwd`/client.pkc -Pssl.keyStorePassword=bithere -Pssl.trustStoreType=jks -Pssl.trustStore=`pwd`/mongo-truststore -Pssl.trustStorePassword=changeit" +} + +############################################ +# Main Program # +############################################ + +# Set up keystore/truststore regardless, as they are required for testing KMIP +provision_ssl + +echo "Running tests with Java ${JAVA_VERSION}" + +./gradlew -version + +# By not specifying the path to the `crypt_shared` via the `CRYPT_SHARED_LIB_PATH` Java system property, +# we force the driver to start `mongocryptd` instead of loading and using `crypt_shared`. +./gradlew -PjavaVersion=${JAVA_VERSION} -Dorg.mongodb.test.uri=${MONGODB_URI} \ + ${GRADLE_EXTRA_VARS} \ + -Dorg.mongodb.test.fle.on.demand.credential.test.failure.enabled=true \ + --stacktrace --info --continue \ + driver-sync:test \ + --tests "*.Client*Encryption*" \ + driver-reactive-streams:test \ + --tests "*.Client*Encryption*" \ + driver-scala:integrationTest \ + --tests "*.Client*Encryption*" diff --git a/.evergreen/run-deployed-lambda-aws-tests.sh b/.evergreen/run-deployed-lambda-aws-tests.sh new file mode 100755 index 00000000000..57c23aa7f0b --- /dev/null +++ b/.evergreen/run-deployed-lambda-aws-tests.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +set -o xtrace # Write all commands first to stderr +set -o errexit # Exit the script with error if any of the commands fail + +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE[0]:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" + +# compiled outside of lambda workflow. Note "SkipBuild: True" in template.yaml +./gradlew -version +./gradlew --info driver-lambda:shadowJar + +. ${DRIVERS_TOOLS}/.evergreen/aws_lambda/run-deployed-lambda-aws-tests.sh diff --git a/.evergreen/run-fle-on-demand-credential-test.sh b/.evergreen/run-fle-on-demand-credential-test.sh new file mode 100755 index 00000000000..6445b53c666 --- /dev/null +++ b/.evergreen/run-fle-on-demand-credential-test.sh @@ -0,0 +1,52 @@ +#!/bin/bash + +set -o xtrace +set -o errexit # Exit the script with error if any of the commands fail + +# Supported/used environment variables: +# MONGODB_URI Set the URI, including an optional username/password to use to connect to the server +# PROVIDER Which KMS provider to test (either "gcp" or "azure") +# AZUREKMS_KEY_VAULT_ENDPOINT The Azure key vault endpoint for Azure integration tests +# AZUREKMS_KEY_NAME The Azure key name endpoint for Azure integration tests + +############################################ +# Main Program # +############################################ + +echo "Running ${PROVIDER}} Credential Acquisition Test" + +if ! which java ; then + echo "Installing java..." + sudo apt install openjdk-17-jdk -y +fi + +export PROVIDER=${PROVIDER} + +echo "Running gradle version" +./gradlew -version + +echo "Running gradle classes compile for driver-sync and driver-reactive-streams" +./gradlew --parallel --build-cache --stacktrace --info \ + driver-sync:classes driver-reactive-streams:classes + +echo "Running driver-sync tests" +./gradlew -Dorg.mongodb.test.uri="${MONGODB_URI}" \ + -Dorg.mongodb.test.fle.on.demand.credential.test.success.enabled=true \ + --build-cache --stacktrace --info driver-sync:test --tests ClientSideEncryptionOnDemandCredentialsTest +first=$? +echo $first + +echo "Running driver-reactive-streams tests" +./gradlew -Dorg.mongodb.test.uri="${MONGODB_URI}" \ + -Dorg.mongodb.test.fle.on.demand.credential.test.success.enabled=true \ + --build-cache --stacktrace --info driver-reactive-streams:test --tests ClientSideEncryptionOnDemandCredentialsTest +second=$? +echo $second + +if [ $first -ne 0 ]; then + exit $first +elif [ $second -ne 0 ]; then + exit $second +else + exit 0 +fi diff --git a/.evergreen/run-graalvm-native-image-app.sh b/.evergreen/run-graalvm-native-image-app.sh new file mode 100755 index 00000000000..e39b9b3a179 --- /dev/null +++ b/.evergreen/run-graalvm-native-image-app.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +# Supported/used environment variables: +# MONGODB_URI The connection string to use, including credentials and topology info. +# JAVA_VERSION The Java SE version for Gradle toolchain. + +set -o errexit + +readonly RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE[0]:-$0}")" +source "${RELATIVE_DIR_PATH}/setup-env.bash" + +echo "MONGODB_URI: ${MONGODB_URI}" +echo "JAVA_HOME: ${JAVA_HOME}" +readonly JDK_GRAALVM_VAR_NAME="JDK${JAVA_VERSION}_GRAALVM" +readonly JDK_GRAALVM="${!JDK_GRAALVM_VAR_NAME}" +echo "The JDK distribution for running Gradle is" +echo "$("${JAVA_HOME}"/bin/java --version)" +echo "The Java SE version for the Gradle toolchain is ${JAVA_VERSION}" +echo "The GraalVM JDK distribution expected to be found at \`${JDK_GRAALVM}\` by the Gradle toolchain functionality is" +echo "$("${JDK_GRAALVM}"/bin/java --version)" +echo "The Gradle version is" +./gradlew --version + +echo "Building and running the GraalVM native image app" +./gradlew -PincludeGraalvm -PjavaVersion=${JAVA_VERSION} -Dorg.mongodb.test.uri=${MONGODB_URI} :graalvm-native-image-app:nativeRun diff --git a/.evergreen/run-gssapi-auth-test.sh b/.evergreen/run-gssapi-auth-test.sh index e14595d17c2..aa131daeee0 100755 --- a/.evergreen/run-gssapi-auth-test.sh +++ b/.evergreen/run-gssapi-auth-test.sh @@ -6,40 +6,35 @@ set -o errexit # Exit the script with error if any of the commands fail # Supported/used environment variables: # MONGODB_URI Set the URI, including username/password to use to connect to the server via PLAIN authentication mechanism # JDK Set the version of java to be used. Java versions can be set from the java toolchain /opt/java -# "jdk5", "jdk6", "jdk7", "jdk8", "jdk9" +# "jdk5", "jdk6", "jdk7", "jdk8", "jdk9", "jdk11" # KDC The KDC # REALM The realm # KEYTAB_BASE64 The BASE64-encoded keytab # PROJECT_DIRECTORY The project directory - -JDK=${JDK:-jdk} +# LOGIN_CONTEXT_NAME The login context name to use to look up the GSSAPI Subject ############################################ # Main Program # ############################################ - -echo "Running GSSAPI authentication tests" +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" +echo "Running GSSAPI authentication tests with login context name '${LOGIN_CONTEXT_NAME}'" echo ${KEYTAB_BASE64} | base64 -d > ${PROJECT_DIRECTORY}/.evergreen/drivers.keytab trap "rm ${PROJECT_DIRECTORY}/.evergreen/drivers.keytab; exit" EXIT HUP cat << EOF > .evergreen/java.login.drivers.config -com.sun.security.jgss.krb5.initiate { +${LOGIN_CONTEXT_NAME} { com.sun.security.auth.module.Krb5LoginModule required doNotPrompt=true useKeyTab=true keyTab="${PROJECT_DIRECTORY}/.evergreen/drivers.keytab" principal=drivers; }; EOF -echo "Compiling java driver with jdk9" - -# We always compile with the latest version of java -export JAVA_HOME="/opt/java/jdk9" - -echo "Running tests with ${JDK}" +echo "Running tests with Java ${JAVA_VERSION}" ./gradlew -version -./gradlew -PjdkHome=/opt/java/${JDK} --stacktrace --info \ --Dorg.mongodb.test.uri=${MONGODB_URI} \ +./gradlew -PjavaVersion=${JAVA_VERSION} --stacktrace --info \ +-Dorg.mongodb.test.uri=${MONGODB_URI} -Dorg.mongodb.test.gssapi.login.context.name=${LOGIN_CONTEXT_NAME} \ -Pgssapi.enabled=true -Psun.security.krb5.debug=true -Pauth.login.config=file://${PROJECT_DIRECTORY}/.evergreen/java.login.drivers.config \ -Pkrb5.kdc=${KDC} -Pkrb5.realm=${REALM} -Psun.security.krb5.debug=true \ --Dtest.single=GSSAPIAuthenticationSpecification driver-core:test +driver-core:test --tests GSSAPIAuthenticationSpecification --tests GSSAPIAuthenticatorSpecification --tests KerberosSubjectProviderTest diff --git a/.evergreen/run-kms-tls-tests.sh b/.evergreen/run-kms-tls-tests.sh new file mode 100755 index 00000000000..df3a38c0eec --- /dev/null +++ b/.evergreen/run-kms-tls-tests.sh @@ -0,0 +1,43 @@ +#!/bin/bash + +# Don't trace since the URI contains a password that shouldn't show up in the logs +set -o errexit # Exit the script with error if any of the commands fail + +# Supported/used environment variables: +# MONGODB_URI Set the suggested connection MONGODB_URI (including credentials and topology info) +# KMS_TLS_ERROR_TYPE Either "expired" or "invalidHostname" + +############################################ +# Main Program # +############################################ +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" +echo "Running KMS TLS tests" + +cp ${JAVA_HOME}/lib/security/cacerts mongo-truststore +${JAVA_HOME}/bin/keytool -importcert -trustcacerts -file ${DRIVERS_TOOLS}/.evergreen/x509gen/ca.pem -keystore mongo-truststore -storepass changeit -storetype JKS -noprompt + +export GRADLE_EXTRA_VARS="-Pssl.enabled=true -Pssl.trustStoreType=jks -Pssl.trustStore=`pwd`/mongo-truststore -Pssl.trustStorePassword=changeit" +export KMS_TLS_ERROR_TYPE=${KMS_TLS_ERROR_TYPE} + +./gradlew -version + +./gradlew --stacktrace --info ${GRADLE_EXTRA_VARS} -Dorg.mongodb.test.uri=${MONGODB_URI} \ + -Dorg.mongodb.test.kms.tls.error.type=${KMS_TLS_ERROR_TYPE} \ + driver-sync:cleanTest driver-sync:test --tests ClientSideEncryptionKmsTlsTest +first=$? +echo $first + +./gradlew --stacktrace --info ${GRADLE_EXTRA_VARS} -Dorg.mongodb.test.uri=${MONGODB_URI} \ + -Dorg.mongodb.test.kms.tls.error.type=${KMS_TLS_ERROR_TYPE} \ + driver-reactive-streams:cleanTest driver-reactive-streams:test --tests ClientSideEncryptionKmsTlsTest +second=$? +echo $second + +if [ $first -ne 0 ]; then + exit $first +elif [ $second -ne 0 ]; then + exit $second +else + exit 0 +fi diff --git a/.evergreen/run-kotlin-tests.sh b/.evergreen/run-kotlin-tests.sh new file mode 100755 index 00000000000..66acf68809a --- /dev/null +++ b/.evergreen/run-kotlin-tests.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +set -o xtrace # Write all commands first to stderr +set -o errexit # Exit the script with error if any of the commands fail + + +AUTH=${AUTH:-noauth} +SSL=${SSL:-nossl} +MONGODB_URI=${MONGODB_URI:-} +TOPOLOGY=${TOPOLOGY:-standalone} + +############################################ +# Main Program # +############################################ +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" + + +if [ "$SSL" != "nossl" ]; then + echo -e "\nSSL support not configured for Kotlin tests" + exit 1 +fi + +if [ "$AUTH" != "noauth" ]; then + echo -e "\nAuth support not configured for Kotlin tests" + exit 1 +fi + +export MULTI_MONGOS_URI_SYSTEM_PROPERTY="-Dorg.mongodb.test.multi.mongos.uri=${MONGODB_URI}" + +./gradlew -version + +echo "Running Kotlin Unit Tests" +./gradlew :bson-kotlin:test :bson-kotlinx:test :driver-kotlin-sync:test :driver-kotlin-coroutine:test :driver-kotlin-extensions:test + +echo "Running Kotlin Integration Tests" +./gradlew :driver-kotlin-sync:integrationTest :driver-kotlin-coroutine:integrationTest -Dorg.mongodb.test.uri=${MONGODB_URI} ${MULTI_MONGOS_URI_SYSTEM_PROPERTY} diff --git a/.evergreen/run-load-balancer-tests.sh b/.evergreen/run-load-balancer-tests.sh new file mode 100755 index 00000000000..4ee1510a901 --- /dev/null +++ b/.evergreen/run-load-balancer-tests.sh @@ -0,0 +1,95 @@ +#!/bin/bash + +set -o xtrace # Write all commands first to stderr +set -o errexit # Exit the script with error if any of the commands fail + +# Supported/used environment variables: +# AUTH Set to enable authentication. Values are: "auth" / "noauth" (default) +# SSL Set to enable SSL. Values are "ssl" / "nossl" (default) +# JDK Set the version of java to be used. Java versions can be set from the java toolchain /opt/java +# SINGLE_MONGOS_LB_URI Set the URI pointing to a load balancer configured with a single mongos server +# MULTI_MONGOS_LB_URI Set the URI pointing to a load balancer configured with multiple mongos servers + +AUTH=${AUTH:-noauth} +SSL=${SSL:-nossl} +MONGODB_URI=${MONGODB_URI:-} + +############################################ +# Main Program # +############################################ +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" + +if [ "$SSL" != "nossl" ]; then + # We generate the keystore and truststore on every run with the certs in the drivers-tools repo + if [ ! -f client.pkc ]; then + openssl pkcs12 -CAfile ${DRIVERS_TOOLS}/.evergreen/x509gen/ca.pem -export -in ${DRIVERS_TOOLS}/.evergreen/x509gen/client.pem -out client.pkc -password pass:bithere + fi + + cp ${JAVA_HOME}/lib/security/cacerts mongo-truststore + ${JAVA_HOME}/bin/keytool -importcert -trustcacerts -file ${DRIVERS_TOOLS}/.evergreen/x509gen/ca.pem -keystore mongo-truststore -storepass changeit -storetype JKS -noprompt + + # We add extra gradle arguments for SSL + GRADLE_EXTRA_VARS="-Pssl.enabled=true -Pssl.keyStoreType=pkcs12 -Pssl.keyStore=$(pwd)/client.pkc -Pssl.keyStorePassword=bithere -Pssl.trustStoreType=jks -Pssl.trustStore=$(pwd)/mongo-truststore -Pssl.trustStorePassword=changeit" + SINGLE_MONGOS_LB_URI="${SINGLE_MONGOS_LB_URI}&ssl=true&sslInvalidHostNameAllowed=true" + MULTI_MONGOS_LB_URI="${MULTI_MONGOS_LB_URI}&ssl=true&sslInvalidHostNameAllowed=true" +fi + +echo "Running $AUTH tests over $SSL and connecting to $SINGLE_MONGOS_LB_URI" + +echo "Running tests with Java ${JAVA_VERSION}" +./gradlew -version + +# Disabling errexit so that both gradle command will run. +# Then we exit with non-zero if either of them exited with non-zero + +set +o errexit + +./gradlew -PjavaVersion=${JAVA_VERSION} \ + -Dorg.mongodb.test.uri=${SINGLE_MONGOS_LB_URI} \ + -Dorg.mongodb.test.multi.mongos.uri=${MULTI_MONGOS_LB_URI} \ + ${GRADLE_EXTRA_VARS} --stacktrace --info --continue driver-sync:test \ + --tests LoadBalancerTest \ + --tests RetryableReadsTest \ + --tests RetryableWritesTest \ + --tests VersionedApiTest \ + --tests ChangeStreamsTest \ + --tests UnifiedCrudTest \ + --tests UnifiedTransactionsTest \ + --tests InitialDnsSeedlistDiscoveryTest +first=$? +echo $first + +./gradlew -PjavaVersion=${JAVA_VERSION} \ + -Dorg.mongodb.test.uri=${SINGLE_MONGOS_LB_URI} \ + -Dorg.mongodb.test.multi.mongos.uri=${MULTI_MONGOS_LB_URI} \ + ${GRADLE_EXTRA_VARS} --stacktrace --info --continue driver-reactive-stream:test \ + --tests LoadBalancerTest \ + --tests RetryableReadsTest \ + --tests RetryableWritesTest \ + --tests VersionedApiTest \ + --tests ChangeStreamsTest \ + --tests UnifiedCrudTest \ + --tests UnifiedTransactionsTest \ + --tests InitialDnsSeedlistDiscoveryTest +second=$? +echo $second + +./gradlew -PjavaVersion=${JAVA_VERSION} \ + -Dorg.mongodb.test.uri=${SINGLE_MONGOS_LB_URI} \ + -Dorg.mongodb.test.multi.mongos.uri=${MULTI_MONGOS_LB_URI} \ + ${GRADLE_EXTRA_VARS} --stacktrace --info --continue driver-core:test \ + --tests CommandBatchCursorFunctionalTest \ + --tests AsyncCommandBatchCursorFunctionalTest +third=$? +echo $third + +if [ $first -ne 0 ]; then + exit $first +elif [ $second -ne 0 ]; then + exit $second +elif [ $third -ne 0 ]; then + exit $third +else + exit 0 +fi diff --git a/.evergreen/run-mongodb-aws-ecs-test.sh b/.evergreen/run-mongodb-aws-ecs-test.sh new file mode 100755 index 00000000000..63e4232839b --- /dev/null +++ b/.evergreen/run-mongodb-aws-ecs-test.sh @@ -0,0 +1,73 @@ +#!/bin/bash + +set -o xtrace +set -o errexit # Exit the script with error if any of the commands fail + +# Supported/used environment variables: +# MONGODB_URI Set the URI, including an optional username/password to use to connect to the server via MONGODB-AWS +# authentication mechanism +# JDK Set the version of java to be used. Java versions can be set from the java toolchain /opt/java +# "jdk5", "jdk6", "jdk7", "jdk8", "jdk9", "jdk11" + +############################################ +# Main Program # +############################################ + +if [[ -z "$1" ]]; then + echo "usage: $0 " + exit 1 +fi +MONGODB_URI="$1" + +echo "Running MONGODB-AWS ECS authentication tests" + +apt update + +if ! which java ; then + echo "Installing java..." + # Ubuntu 18.04 ca-certificates-java and opendjdk-17 bug work around + dpkg --purge --force-depends ca-certificates-java + apt install ca-certificates-java -y + apt install openjdk-17-jdk -y +fi + +if ! which git ; then + echo "installing git..." + apt install git -y +fi + +cd src + +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" + +./gradlew -version + +echo "Running tests..." +./gradlew -Dorg.mongodb.test.uri=${MONGODB_URI} -Dorg.mongodb.test.aws.credential.provider=awsSdkV2 --stacktrace --debug --info \ + driver-core:test --tests AwsAuthenticationSpecification +first=$? +echo $first + +./gradlew -Dorg.mongodb.test.uri=${MONGODB_URI} -Dorg.mongodb.test.aws.credential.provider=awsSdkV1 --stacktrace --debug --info \ + driver-core:test --tests AwsAuthenticationSpecification +second=$? +echo $second + +./gradlew -Dorg.mongodb.test.uri=${MONGODB_URI} -Dorg.mongodb.test.aws.credential.provider=builtIn --stacktrace --debug --info \ + driver-core:test --tests AwsAuthenticationSpecification +third=$? +echo $third + +if [ $first -ne 0 ]; then + exit $first +elif [ $second -ne 0 ]; then + exit $second +elif [ $third -ne 0 ]; then + exit $third +else + exit 0 +fi + + +cd - diff --git a/.evergreen/run-mongodb-aws-test.sh b/.evergreen/run-mongodb-aws-test.sh new file mode 100755 index 00000000000..3e1e6c3cf5d --- /dev/null +++ b/.evergreen/run-mongodb-aws-test.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +set -o xtrace +set -o errexit # Exit the script with error if any of the commands fail + +# Supported/used environment variables: +# JDK Set the version of java to be used. Java versions can be set from the java toolchain /opt/java +# "jdk5", "jdk6", "jdk7", "jdk8", "jdk9", "jdk11" +# AWS_CREDENTIAL_PROVIDER "builtIn", 'awsSdkV1', 'awsSdkV2' +############################################ +# Main Program # +############################################ +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" + +echo "Running MONGODB-AWS authentication tests" + +# Handle credentials and environment setup. +. $DRIVERS_TOOLS/.evergreen/auth_aws/aws_setup.sh $1 + +# show test output +set -x + +echo "Running tests with Java ${JAVA_VERSION}" +./gradlew -version + +# As this script may be executed multiple times in a single task, with different values for MONGODB_URI, it's necessary +# to run cleanTest to ensure that the test actually executes each run +./gradlew -PjavaVersion="${JAVA_VERSION}" -Dorg.mongodb.test.uri="${MONGODB_URI}" \ +-Dorg.mongodb.test.aws.credential.provider="${AWS_CREDENTIAL_PROVIDER}" \ +--stacktrace --debug --info driver-core:cleanTest driver-core:test --tests AwsAuthenticationSpecification diff --git a/.evergreen/run-mongodb-oidc-test.sh b/.evergreen/run-mongodb-oidc-test.sh new file mode 100755 index 00000000000..778b8962c09 --- /dev/null +++ b/.evergreen/run-mongodb-oidc-test.sh @@ -0,0 +1,66 @@ +#!/bin/bash + +set +x # Disable debug trace +set -eu + +echo "Running MONGODB-OIDC authentication tests" +echo "OIDC_ENV $OIDC_ENV" +FULL_DESCRIPTION=$OIDC_ENV +if [ $OIDC_ENV == "test" ]; then + if [ -z "$DRIVERS_TOOLS" ]; then + echo "Must specify DRIVERS_TOOLS" + exit 1 + fi + source ${DRIVERS_TOOLS}/.evergreen/auth_oidc/secrets-export.sh + # java will not need to be installed, but we need to config + RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" + source "${RELATIVE_DIR_PATH}/setup-env.bash" +elif [ $OIDC_ENV == "azure" ]; then + source ./env.sh +elif [ $OIDC_ENV == "gcp" ]; then + source ./secrets-export.sh +elif [ $OIDC_ENV == "k8s" ]; then + # Make sure K8S_VARIANT is set. + if [ -z "$K8S_VARIANT" ]; then + echo "Must specify K8S_VARIANT" + popd + exit 1 + fi + + FULL_DESCRIPTION="${OIDC_ENV} - ${K8S_VARIANT}" + # fix for git permissions issue: + git config --global --add safe.directory /tmp/test +else + echo "Unrecognized OIDC_ENV $OIDC_ENV" + exit 1 +fi + +if ! which java ; then + echo "Installing java..." + sudo apt install openjdk-17-jdk -y + echo "Installed java." +fi + +which java +export OIDC_TESTS_ENABLED=true + +# use admin credentials for tests +TO_REPLACE="mongodb://" +REPLACEMENT="mongodb://$OIDC_ADMIN_USER:$OIDC_ADMIN_PWD@" +ADMIN_URI=${MONGODB_URI/$TO_REPLACE/$REPLACEMENT} + +echo "Running gradle version" +./gradlew -version + +echo "Running gradle classes compile for driver-sync and driver-reactive-streams: ${FULL_DESCRIPTION}" +./gradlew --parallel --stacktrace --info \ + driver-sync:classes driver-reactive-streams:classes + +echo "Running OIDC authentication tests against driver-sync: ${FULL_DESCRIPTION}" +./gradlew -Dorg.mongodb.test.uri="$ADMIN_URI" \ + --stacktrace --debug --info \ + driver-sync:test --tests OidcAuthenticationProseTests --tests UnifiedAuthTest + +echo "Running OIDC authentication tests against driver-reactive-streams: ${FULL_DESCRIPTION}" +./gradlew -Dorg.mongodb.test.uri="$ADMIN_URI" \ + --stacktrace --debug --info driver-reactive-streams:test --tests OidcAuthenticationAsyncProseTests diff --git a/.evergreen/run-ocsp-test.sh b/.evergreen/run-ocsp-test.sh new file mode 100755 index 00000000000..048935926aa --- /dev/null +++ b/.evergreen/run-ocsp-test.sh @@ -0,0 +1,44 @@ +#!/usr/bin/env bash + +set -o xtrace +set -o errexit # Exit the script with error if any of the commands fail + +# Supported/used environment variables: +# JDK Set the version of java to be used. Java versions can be set from the java toolchain /opt/java +# "jdk5", "jdk6", "jdk7", "jdk8", "jdk9", "jdk11" + +OCSP_MUST_STAPLE=${OCSP_MUST_STAPLE:-} +OCSP_TLS_SHOULD_SUCCEED=${OCSP_TLS_SHOULD_SUCCEED:-} +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" + +############################################ +# Functions # +############################################ + +provision_ssl () { + echo "SSL !" + + cp ${JAVA_HOME}/lib/security/cacerts mongo-truststore + ${JAVA_HOME}/bin/keytool -import -trustcacerts -file ${CA_FILE} -keystore mongo-truststore -alias ca_ocsp -storepass changeit -noprompt + + # We add extra gradle arguments for SSL + export GRADLE_EXTRA_VARS="-Pssl.enabled=true -Pocsp.property=`pwd`/java-security-ocsp-property -Pssl.trustStoreType=jks -Pssl.trustStore=`pwd`/mongo-truststore -Pssl.trustStorePassword=changeit -Pssl.checkRevocation=true -Pclient.enableStatusRequestExtension=${OCSP_MUST_STAPLE} -Pclient.protocols=TLSv1.2 -Pocsp.tls.should.succeed=${OCSP_TLS_SHOULD_SUCCEED}" +} + +############################################ +# Main Program # +############################################ + +echo "Running OCSP tests" + + +# show test output +set -x + +provision_ssl + +echo "Running OCSP tests with Java ${JAVA_VERSION}" +./gradlew -version +./gradlew -PjavaVersion=${JAVA_VERSION} ${GRADLE_EXTRA_VARS} --stacktrace --debug --info driver-sync:test --tests OcspTest + diff --git a/.evergreen/run-perf-tests.sh b/.evergreen/run-perf-tests.sh new file mode 100755 index 00000000000..472e4348135 --- /dev/null +++ b/.evergreen/run-perf-tests.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +set -o xtrace +set -o errexit + +rm -rf driver-performance-test-data +git clone https://github.com/mongodb-labs/driver-performance-test-data.git +cd driver-performance-test-data +tar xf extended_bson.tgz +tar xf parallel.tgz +tar xf single_and_multi_document.tgz +cd .. + +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" + +export TEST_PATH="${PROJECT_DIRECTORY}/driver-performance-test-data/" +export OUTPUT_FILE="${PROJECT_DIRECTORY}/results.json" + +if [ "${PROVIDER}" = "Netty" ]; then + TASK="driver-benchmarks:runNetty" +else + TASK="driver-benchmarks:run" +fi + +start_time=$(date +%s) +./gradlew -Dorg.mongodb.benchmarks.data=${TEST_PATH} -Dorg.mongodb.benchmarks.output=${OUTPUT_FILE} ${TASK} +end_time=$(date +%s) +elapsed_secs=$((end_time-start_time)) + diff --git a/.evergreen/run-plain-auth-test.sh b/.evergreen/run-plain-auth-test.sh index b6a1631e9d8..fbc965df4b4 100755 --- a/.evergreen/run-plain-auth-test.sh +++ b/.evergreen/run-plain-auth-test.sh @@ -8,17 +8,16 @@ set -o errexit # Exit the script with error if any of the commands fail # JDK Set the version of java to be used. Java versions can be set from the java toolchain /opt/java # "jdk5", "jdk6", "jdk7", "jdk8", "jdk9" -JDK=${JDK:-jdk} ############################################ # Main Program # ############################################ +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" echo "Running PLAIN authentication tests" -# We always compile with the latest version of java -export JAVA_HOME="/opt/java/jdk9" -echo "Running tests with ${JDK}" +echo "Running tests with Java ${JAVA_VERSION}" ./gradlew -version -./gradlew -PjdkHome=/opt/java/${JDK} -Dorg.mongodb.test.uri=${MONGODB_URI} --stacktrace --info -Dtest.single=PlainAuthenticationSpecification driver-core:test +./gradlew -PjavaVersion=${JAVA_VERSION} -Dorg.mongodb.test.uri=${MONGODB_URI} --stacktrace --info driver-core:test --tests PlainAuthenticationSpecification diff --git a/.evergreen/run-reactive-streams-tck-tests.sh b/.evergreen/run-reactive-streams-tck-tests.sh new file mode 100755 index 00000000000..6bd5e91a4ec --- /dev/null +++ b/.evergreen/run-reactive-streams-tck-tests.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +set -o xtrace # Write all commands first to stderr +set -o errexit # Exit the script with error if any of the commands fail + +############################################ +# Main Program # +############################################ +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" + +echo "Running Reactive Streams TCK tests with Java ${JAVA_VERSION}" + +./gradlew -version +./gradlew --stacktrace --info driver-reactive-streams:tckTest diff --git a/.evergreen/run-scala-tests.sh b/.evergreen/run-scala-tests.sh new file mode 100755 index 00000000000..02fd240d7c5 --- /dev/null +++ b/.evergreen/run-scala-tests.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +set -o xtrace # Write all commands first to stderr +set -o errexit # Exit the script with error if any of the commands fail + + +AUTH=${AUTH:-noauth} +SSL=${SSL:-nossl} +MONGODB_URI=${MONGODB_URI:-} +TOPOLOGY=${TOPOLOGY:-standalone} + +############################################ +# Main Program # +############################################ +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" + + +if [ "$SSL" != "nossl" ]; then + echo -e "\nSSL support not configured for Scala tests" + exit 1 +fi + +if [ "$AUTH" != "noauth" ]; then + echo -e "\nAuth support not configured for Scala tests" + exit 1 +fi + +export MULTI_MONGOS_URI_SYSTEM_PROPERTY="-Dorg.mongodb.test.multi.mongos.uri=${MONGODB_URI}" + +echo "Running scala tests with Scala $SCALA" + +./gradlew -version +./gradlew -PjavaVersion=${JAVA_VERSION} -PscalaVersion=$SCALA --stacktrace --info scalaCheck \ + -Dorg.mongodb.test.uri=${MONGODB_URI} ${MULTI_MONGOS_URI_SYSTEM_PROPERTY} diff --git a/.evergreen/run-socket-tests.sh b/.evergreen/run-socket-tests.sh index b09bc1bcf57..df215c953e5 100755 --- a/.evergreen/run-socket-tests.sh +++ b/.evergreen/run-socket-tests.sh @@ -10,17 +10,18 @@ set -o errexit # Exit the script with error if any of the commands fail # Supported values: "server", "replica_set", "sharded_cluster" # COMPRESSOR Set to enable compression. Values are "snappy" and "zlib" (default is no compression) # JDK Set the version of java to be used. Java versions can be set from the java toolchain /opt/java -# "jdk5", "jdk6", "jdk7", "jdk8", "jdk9" +# "jdk5", "jdk6", "jdk7", "jdk8", "jdk9", "jdk11" AUTH=${AUTH:-noauth} MONGODB_URI=${MONGODB_URI:-} -JDK=${JDK:-jdk} TOPOLOGY=${TOPOLOGY:-server} COMPRESSOR=${COMPRESSOR:-} ############################################ # Main Program # ############################################ +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" SOCKET_REGEX='(.*)localhost:([0-9]+)?(.*)' while [[ $MONGODB_URI =~ $SOCKET_REGEX ]]; do @@ -47,9 +48,6 @@ fi echo "Running $AUTH tests over for $TOPOLOGY and connecting to $MONGODB_URI" -# We always compile with the latest version of java -export JAVA_HOME="/opt/java/jdk9" - -echo "Running tests with ${JDK}" +echo "Running tests with Java ${JAVA_VERSION}" ./gradlew -version -./gradlew -PjdkHome=/opt/java/${JDK} -Dorg.mongodb.test.uri=${MONGODB_URI} ${GRADLE_EXTRA_VARS} --stacktrace --info :driver-legacy:test :driver-sync:test +./gradlew -PjavaVersion=${JAVA_VERSION} -Dorg.mongodb.test.uri=${MONGODB_URI} ${GRADLE_EXTRA_VARS} --stacktrace --info :driver-legacy:test :driver-sync:test diff --git a/.evergreen/run-socks5-tests.sh b/.evergreen/run-socks5-tests.sh new file mode 100755 index 00000000000..c6cbb812b86 --- /dev/null +++ b/.evergreen/run-socks5-tests.sh @@ -0,0 +1,87 @@ +#!/bin/bash + +set -o xtrace # Write all commands first to stderr +set -o errexit # Exit the script with error if any of the commands fail + +SSL=${SSL:-nossl} +SOCKS_AUTH=${SOCKS_AUTH:-noauth} +MONGODB_URI=${MONGODB_URI:-} +SOCKS5_SERVER_SCRIPT="$DRIVERS_TOOLS/.evergreen/socks5srv.py" +PYTHON_BINARY=${PYTHON_BINARY:-python3} +# Grab a connection string that only refers to *one* of the hosts in MONGODB_URI +FIRST_HOST=$(echo "$MONGODB_URI" | awk -F[/:,] '{print $4":"$5}') +# Use 127.0.0.1:12345 as the URL for the single host that we connect to, +# we configure the Socks5 proxy server script to redirect from this to FIRST_HOST +export MONGODB_URI_SINGLEHOST="mongodb://127.0.0.1:12345" + +if [ "${SSL}" = "ssl" ]; then + MONGODB_URI="${MONGODB_URI}&ssl=true&sslInvalidHostNameAllowed=true" + MONGODB_URI_SINGLEHOST="${MONGODB_URI_SINGLEHOST}/?ssl=true&sslInvalidHostNameAllowed=true" +fi + +# Compute path to socks5 fake server script in a way that works on Windows +if [ "Windows_NT" == "$OS" ]; then + SOCKS5_SERVER_SCRIPT=$(cygpath -m $DRIVERS_TOOLS) +fi + +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" + +############################################ +# Functions # +############################################ + +provision_ssl () { + # We generate the keystore and truststore on every run with the certs in the drivers-tools repo + if [ ! -f client.pkc ]; then + openssl pkcs12 -CAfile ${DRIVERS_TOOLS}/.evergreen/x509gen/ca.pem -export -in ${DRIVERS_TOOLS}/.evergreen/x509gen/client.pem -out client.pkc -password pass:bithere + fi + + cp ${JAVA_HOME}/lib/security/cacerts mongo-truststore + ${JAVA_HOME}/bin/keytool -importcert -trustcacerts -file ${DRIVERS_TOOLS}/.evergreen/x509gen/ca.pem -keystore mongo-truststore -storepass changeit -storetype JKS -noprompt + + # We add extra gradle arguments for SSL + export GRADLE_SSL_VARS="-Pssl.enabled=true -Pssl.keyStoreType=pkcs12 -Pssl.keyStore=`pwd`/client.pkc -Pssl.keyStorePassword=bithere -Pssl.trustStoreType=jks -Pssl.trustStore=`pwd`/mongo-truststore -Pssl.trustStorePassword=changeit" +} + + +run_socks5_proxy () { +if [ "$SOCKS_AUTH" == "auth" ]; then + "$PYTHON_BINARY" "$SOCKS5_SERVER_SCRIPT" --port 1080 --auth username:p4ssw0rd --map "127.0.0.1:12345 to $FIRST_HOST" & + SOCKS5_SERVER_PID_1=$! + trap "kill $SOCKS5_SERVER_PID_1" EXIT + else + "$PYTHON_BINARY" "$SOCKS5_SERVER_SCRIPT" --port 1080 --map "127.0.0.1:12345 to $FIRST_HOST" & + SOCKS5_SERVER_PID_1=$! + trap "kill $SOCKS5_SERVER_PID_1" EXIT +fi +} + +run_socks5_prose_tests () { +if [ "$SOCKS_AUTH" == "auth" ]; then + local AUTH_ENABLED="true" +else + local AUTH_ENABLED="false" +fi + +echo "Running Socks5 tests with Java ${JAVA_VERSION} over $SSL for $TOPOLOGY and connecting to $MONGODB_URI with socks auth enabled: $AUTH_ENABLED" +./gradlew -PjavaVersion=${JAVA_VERSION} -Dorg.mongodb.test.uri=${MONGODB_URI} \ + -Dorg.mongodb.test.uri.singleHost=${MONGODB_URI_SINGLEHOST} \ + -Dorg.mongodb.test.uri.proxyHost="127.0.0.1" \ + -Dorg.mongodb.test.uri.proxyPort="1080" \ + -Dorg.mongodb.test.uri.socks.auth.enabled=${AUTH_ENABLED} \ + ${GRADLE_SSL_VARS} \ + --stacktrace --info --continue \ + driver-sync:test \ + --tests "com.mongodb.client.Socks5ProseTest*" +} + +############################################ +# Main Program # +############################################ + +# Set up keystore/truststore +provision_ssl +./gradlew -version +run_socks5_proxy +run_socks5_prose_tests diff --git a/.evergreen/run-tests.sh b/.evergreen/run-tests.sh index 01fd36f1f28..10bd5bc107d 100755 --- a/.evergreen/run-tests.sh +++ b/.evergreen/run-tests.sh @@ -2,59 +2,78 @@ set -o xtrace # Write all commands first to stderr set -o errexit # Exit the script with error if any of the commands fail +set -o pipefail # Exit if any command in a pipe fails # Supported/used environment variables: -# AUTH Set to enable authentication. Values are: "auth" / "noauth" (default) -# SSL Set to enable SSL. Values are "ssl" / "nossl" (default) -# MONGODB_URI Set the suggested connection MONGODB_URI (including credentials and topology info) -# TOPOLOGY Allows you to modify variables and the MONGODB_URI based on test topology -# Supported values: "server", "replica_set", "sharded_cluster" -# COMPRESSOR Set to enable compression. Values are "snappy" and "zlib" (default is no compression) -# JDK Set the version of java to be used. Java versions can be set from the java toolchain /opt/java -# "jdk5", "jdk6", "jdk7", "jdk8", "jdk9" +# AUTH Set to enable authentication. Values are: "auth" / "noauth" (default) +# SSL Set to enable SSL. Values are "ssl" / "nossl" (default) +# NETTY_SSL_PROVIDER The Netty TLS/SSL protocol provider. Ignored unless SSL is "ssl" and ASYNC_TRANSPORT is "netty". Values are "JDK", "OPENSSL", null (a.k.a. "" or '') (default). +# MONGODB_URI Set the suggested connection MONGODB_URI (including credentials and topology info) +# TOPOLOGY Allows you to modify variables and the MONGODB_URI based on test topology +# Supported values: "server", "replica_set", "sharded_cluster" +# COMPRESSOR Set to enable compression. Values are "snappy" and "zlib" (default is no compression) +# ASYNC_TRANSPORT Set the async transport. Values are "nio2" or "netty". +# JDK Set the version of java to be used. Java versions can be set from the java toolchain /opt/java +# SLOW_TESTS_ONLY Set to true to only run the slow tests +# AWS_ACCESS_KEY_ID The AWS access key identifier for client-side encryption +# AWS_SECRET_ACCESS_KEY The AWS secret access key for client-side encryption +# AWS_ACCESS_KEY_ID_AWS_KMS_NAMED The AWS access key identifier for client-side encryption's named KMS provider. +# AWS_SECRET_ACCESS_KEY_AWS_KMS_NAMED The AWS secret access key for client-side encryption's named KMS provider. +# AWS_TEMP_ACCESS_KEY_ID The temporary AWS access key identifier for client-side encryption +# AWS_TEMP_SECRET_ACCESS_KEY The temporary AWS secret access key for client-side encryption +# AWS_TEMP_SESSION_TOKEN The temporary AWS session token for client-side encryption +# AZURE_TENANT_ID The Azure tenant identifier for client-side encryption +# AZURE_CLIENT_ID The Azure client identifier for client-side encryption +# AZURE_CLIENT_SECRET The Azure client secret for client-side encryption +# GCP_EMAIL The GCP email for client-side encryption +# GCP_PRIVATE_KEY The GCP private key for client-side encryption +# AZUREKMS_KEY_VAULT_ENDPOINT The Azure key vault endpoint for integration tests +# AZUREKMS_KEY_NAME The Azure key name endpoint for integration tests AUTH=${AUTH:-noauth} SSL=${SSL:-nossl} MONGODB_URI=${MONGODB_URI:-} -JDK=${JDK:-jdk} TOPOLOGY=${TOPOLOGY:-server} COMPRESSOR=${COMPRESSOR:-} +TESTS=${TESTS:-test} +SLOW_TESTS_ONLY=${SLOW_TESTS_ONLY:-false} -# JDK6 needs async.type=netty -if [ "$JDK" == "jdk6" ]; then - export ASYNC_TYPE="-Dorg.mongodb.async.type=netty" -else - export ASYNC_TYPE="-Dorg.mongodb.async.type=nio2" +if [ -n "${ASYNC_TRANSPORT}" ]; then + readonly JAVA_SYSPROP_ASYNC_TRANSPORT="-Dorg.mongodb.test.async.transport=${ASYNC_TRANSPORT}" fi -# We always compile with the latest version of java -export JAVA_HOME="/opt/java/jdk9" +if [ "${SSL}" = "ssl" ] && [ "${ASYNC_TRANSPORT}" = "netty" ] && [ -n "${NETTY_SSL_PROVIDER}" ]; then + readonly JAVA_SYSPROP_NETTY_SSL_PROVIDER="-Dorg.mongodb.test.netty.ssl.provider=${NETTY_SSL_PROVIDER}" +fi +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" ############################################ # Functions # ############################################ provision_ssl () { - echo "SSL !" - # We generate the keystore and truststore on every run with the certs in the drivers-tools repo if [ ! -f client.pkc ]; then openssl pkcs12 -CAfile ${DRIVERS_TOOLS}/.evergreen/x509gen/ca.pem -export -in ${DRIVERS_TOOLS}/.evergreen/x509gen/client.pem -out client.pkc -password pass:bithere fi - if [ ! -f mongo-truststore ]; then - ${JAVA_HOME}/bin/keytool -importcert -trustcacerts -file ${DRIVERS_TOOLS}/.evergreen/x509gen/ca.pem -keystore mongo-truststore -storepass hithere -storetype JKS -noprompt - fi + + cp ${JAVA_HOME}/lib/security/cacerts mongo-truststore + ${JAVA_HOME}/bin/keytool -importcert -trustcacerts -file ${DRIVERS_TOOLS}/.evergreen/x509gen/ca.pem -keystore mongo-truststore -storepass changeit -storetype JKS -noprompt # We add extra gradle arguments for SSL - export GRADLE_EXTRA_VARS="-Pssl.enabled=true -Pssl.keyStoreType=pkcs12 -Pssl.keyStore=`pwd`/client.pkc -Pssl.keyStorePassword=bithere -Pssl.trustStoreType=jks -Pssl.trustStore=`pwd`/mongo-truststore -Pssl.trustStorePassword=hithere" - export ASYNC_TYPE="-Dorg.mongodb.async.type=netty" + export GRADLE_EXTRA_VARS="-Pssl.enabled=true -Pssl.keyStoreType=pkcs12 -Pssl.keyStore=`pwd`/client.pkc -Pssl.keyStorePassword=bithere -Pssl.trustStoreType=jks -Pssl.trustStore=`pwd`/mongo-truststore -Pssl.trustStorePassword=changeit" +} +provision_multi_mongos_uri_for_ssl () { # Arguments for auth + SSL if [ "$AUTH" != "noauth" ] || [ "$TOPOLOGY" == "replica_set" ]; then export MONGODB_URI="${MONGODB_URI}&ssl=true&sslInvalidHostNameAllowed=true" + export MULTI_MONGOS_URI="${MULTI_MONGOS_URI}&ssl=true&sslInvalidHostNameAllowed=true" else export MONGODB_URI="${MONGODB_URI}/?ssl=true&sslInvalidHostNameAllowed=true" + export MULTI_MONGOS_URI="${MULTI_MONGOS_URI}/?ssl=true&sslInvalidHostNameAllowed=true" fi } @@ -64,6 +83,11 @@ provision_ssl () { # Provision the correct connection string and set up SSL if needed if [ "$TOPOLOGY" == "sharded_cluster" ]; then + if [ "$AUTH" = "auth" ]; then + export MULTI_MONGOS_URI="mongodb://bob:pwd123@localhost:27017,localhost:27018/?authSource=admin" + else + export MULTI_MONGOS_URI="${MONGODB_URI}" + fi if [ "$AUTH" = "auth" ]; then export MONGODB_URI="mongodb://bob:pwd123@localhost:27017/?authSource=admin" @@ -78,13 +102,40 @@ if [ "$COMPRESSOR" != "" ]; then else export MONGODB_URI="${MONGODB_URI}/?compressors=${COMPRESSOR}" fi + + if [[ "$MULTI_MONGOS_URI" == *"?"* ]]; then + export MULTI_MONGOS_URI="${MULTI_MONGOS_URI}&compressors=${COMPRESSOR}" + else + export MULTI_MONGOS_URI="${MULTI_MONGOS_URI}/?compressors=${COMPRESSOR}" + fi fi +# Set up keystore/truststore regardless, as they are required for testing KMIP +provision_ssl + if [ "$SSL" != "nossl" ]; then - provision_ssl + provision_multi_mongos_uri_for_ssl +fi + +export MULTI_MONGOS_URI_SYSTEM_PROPERTY="-Dorg.mongodb.test.multi.mongos.uri=${MULTI_MONGOS_URI}" + +# For now it's sufficient to hard-code the API version to "1", since it's the only API version +if [ ! -z "$REQUIRE_API_VERSION" ]; then + export API_VERSION="-Dorg.mongodb.test.api.version=1" fi + echo "Running $AUTH tests over $SSL for $TOPOLOGY and connecting to $MONGODB_URI" -echo "Running tests with ${JDK}" +echo "Running tests with Java ${JAVA_VERSION}" ./gradlew -version -./gradlew -PjdkHome=/opt/java/${JDK} -Dorg.mongodb.test.uri=${MONGODB_URI} ${GRADLE_EXTRA_VARS} ${ASYNC_TYPE} --stacktrace --info test + +./gradlew -PjavaVersion=${JAVA_VERSION} -Dorg.mongodb.test.uri=${MONGODB_URI} \ + ${MULTI_MONGOS_URI_SYSTEM_PROPERTY} ${API_VERSION} ${GRADLE_EXTRA_VARS} \ + ${JAVA_SYSPROP_ASYNC_TRANSPORT} ${JAVA_SYSPROP_NETTY_SSL_PROVIDER} \ + -Dorg.mongodb.test.fle.on.demand.credential.test.failure.enabled=true \ + --stacktrace --info --continue ${TESTS} | tee -a logs.txt + +if grep -q 'LEAK:' logs.txt ; then + echo "Netty Leak detected, please inspect build log" + exit 1 +fi diff --git a/.evergreen/run-x509-auth-tests.sh b/.evergreen/run-x509-auth-tests.sh new file mode 100755 index 00000000000..93b23fca1ca --- /dev/null +++ b/.evergreen/run-x509-auth-tests.sh @@ -0,0 +1,56 @@ +#!/bin/bash + +# Exit the script with error if any of the commands fail +set -o errexit + +# Supported/used environment variables: +# JDK Set the version of java to be used. Java versions can be set from the java toolchain /opt/java +# ATLAS_X509_DEV Set the connection string for the Atlas X509 development cluster. +# ATLAS_X509_DEV_CERT_BASE64 Set the base64 encoded contents of a PEM file containing the client certificate (signed by the mongodb dev CA) and client private key for the X509 authentication on development cluster. +# ATLAS_X509_DEV_CERT_NOUSER_BASE64 Set the base64 encoded contents of a PEM file containing the client certificate (signed by the mongodb dev CA) and client private key for the X509 authentication on development cluster with the subject name that does not exist on the server/cluster. + +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" + +MONGODB_URI=${ATLAS_X509_DEV:-} +echo "$MONGODB_URI" +ATLAS_X509_DEV_CERT_BASE64=${ATLAS_X509_DEV_CERT_BASE64:-} +ATLAS_X509_DEV_CERT_NOUSER_BASE64=${ATLAS_X509_DEV_CERT_NOUSER_BASE64:-} + +############################################ +# Functions # +############################################ + +provision_keystores () { + # Base64 decode contents of a PEM holder for client certificate (signed by the mongodb dev CA) and private key + echo "${ATLAS_X509_DEV_CERT_BASE64}" | base64 --decode > ca_and_pk.pem + echo "${ATLAS_X509_DEV_CERT_NOUSER_BASE64}" | base64 --decode > ca_and_pk_no_user.pem + + # Build the pkcs12 (keystore). We include the leaf-only certificate (with public key) and private key in the keystore, + # assuming the signed certificate is already trusted by the Atlas as issuer is MongoDB dev CA. + echo "Creating PKCS12 keystore from ca_and_pk.pem" + openssl pkcs12 -export \ + -in ca_and_pk.pem \ + -out existing_user.p12 \ + -password pass:test + + echo "Creating PKCS12 keystore from ca_and_pk_no_user.pem" + openssl pkcs12 -export \ + -in ca_and_pk_no_user.pem \ + -out non_existing_user.p12 \ + -password pass:test +} + +############################################ +# Main Program # +############################################ +echo "Running X509 Authentication tests with Java ${JAVA_VERSION}" + +# Set up keystores for x509 authentication. +provision_keystores + +./gradlew -PjavaVersion=${JAVA_VERSION} -Dorg.mongodb.test.uri=${MONGODB_URI} --info --continue \ + -Dorg.mongodb.test.x509.auth.enabled=true \ + -Dorg.mongodb.test.x509.auth.keystore.location="$(pwd)" \ + driver-sync:test --tests X509AuthenticationTest \ + driver-reactive-streams:test --tests X509AuthenticationTest \ No newline at end of file diff --git a/.evergreen/setup-env.bash b/.evergreen/setup-env.bash new file mode 100644 index 00000000000..cae67cd65eb --- /dev/null +++ b/.evergreen/setup-env.bash @@ -0,0 +1,51 @@ +# Java configurations for evergreen + +export JDK8="/opt/java/jdk8" +export JDK11="/opt/java/jdk11" +export JDK17="/opt/java/jdk17" +export JDK21="/opt/java/jdk21" +# note that `JDK21_GRAALVM` is used in `run-graalvm-native-image-app.sh` +# by dynamically constructing the variable name +export JDK21_GRAALVM="/opt/java/jdk21-graalce" + +if [ -d "$JDK17" ]; then + export JAVA_HOME=$JDK17 +fi + +export JAVA_VERSION=${JAVA_VERSION:-17} + +echo "Java Configs:" +echo "Java Home: ${JAVA_HOME}" +echo "Java test version: ${JAVA_VERSION}" + +# Rename environment variables for AWS, Azure, and GCP +if [ -f secrets-export.sh ]; then + echo "Renaming secrets env variables" + . secrets-export.sh + + export AWS_ACCESS_KEY_ID=$FLE_AWS_ACCESS_KEY_ID + export AWS_SECRET_ACCESS_KEY=$FLE_AWS_SECRET_ACCESS_KEY + export AWS_DEFAULT_REGION=$FLE_AWS_DEFAULT_REGION + + export AWS_ACCESS_KEY_ID_AWS_KMS_NAMED=$FLE_AWS_KEY2 + export AWS_SECRET_ACCESS_KEY_AWS_KMS_NAMED=$FLE_AWS_SECRET2 + + export AWS_TEMP_ACCESS_KEY_ID=$CSFLE_AWS_TEMP_ACCESS_KEY_ID + export AWS_TEMP_SECRET_ACCESS_KEY=$CSFLE_AWS_TEMP_SECRET_ACCESS_KEY + export AWS_TEMP_SESSION_TOKEN=$CSFLE_AWS_TEMP_SESSION_TOKEN + + export AZURE_CLIENT_ID=$FLE_AZURE_CLIENTID + export AZURE_TENANT_ID=$FLE_AZURE_TENANTID + export AZURE_CLIENT_SECRET=$FLE_AZURE_CLIENTSECRET + + export GCP_EMAIL=$FLE_GCP_EMAIL + export GCP_PRIVATE_KEY=$FLE_GCP_PRIVATEKEY + + # Unset AWS_SESSION_TOKEN if it is empty + if [ -z "$AWS_SESSION_TOKEN" ];then + unset AWS_SESSION_TOKEN + fi + +else + echo "No secrets env variables found to rename" +fi diff --git a/.evergreen/ssdlc-report.sh b/.evergreen/ssdlc-report.sh new file mode 100755 index 00000000000..56d5957f5ab --- /dev/null +++ b/.evergreen/ssdlc-report.sh @@ -0,0 +1,100 @@ +#!/usr/bin/env bash + +set -eu + +# Supported/used environment variables: +# PRODUCT_NAME +# PRODUCT_VERSION +# EVERGREEN_VERSION_ID + +if [ -z "${PRODUCT_NAME}" ]; then + printf "\nPRODUCT_NAME must be set to a non-empty string\n" + exit 1 +fi +if [ -z "${PRODUCT_VERSION}" ]; then + printf "\nPRODUCT_VERSION must be set to a non-empty string\n" + exit 1 +fi +if [ -z "${EVERGREEN_VERSION_ID}" ]; then + printf "\EVERGREEN_VERSION_ID must be set to a non-empty string\n" + exit 1 +fi + +############################################ +# Main Program # +############################################ +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE[0]:-$0}")" +source "${RELATIVE_DIR_PATH}/setup-env.bash" + +printf "\nCreating SSDLC reports\n" +printf "\nProduct name: %s\n" "${PRODUCT_NAME}" +printf "\nProduct version: %s\n" "${PRODUCT_VERSION}" + +declare -r SSDLC_PATH="${RELATIVE_DIR_PATH}/../build/ssdlc" +declare -r SSDLC_STATIC_ANALYSIS_REPORTS_PATH="${SSDLC_PATH}/static-analysis-reports" +mkdir "${SSDLC_PATH}" +mkdir "${SSDLC_STATIC_ANALYSIS_REPORTS_PATH}" + +declare -r EVERGREEN_PROJECT_NAME_PREFIX="${PRODUCT_NAME//-/_}" +declare -r EVERGREEN_BUILD_URL_PREFIX="https://spruce.mongodb.com/version" +declare -r GIT_TAG="r${PRODUCT_VERSION}" +GIT_COMMIT_HASH="$(git rev-list --ignore-missing -n 1 "${GIT_TAG}")" +set +e + GIT_BRANCH_DEFAULT="$(git branch -a --contains "${GIT_TAG}" | grep 'main$')" + GIT_BRANCH_PATCH="$(git branch -a --contains "${GIT_TAG}" | grep '\.x$')" +set -e +if [ -n "${GIT_BRANCH_DEFAULT}" ]; then + declare -r EVERGREEN_BUILD_URL="${EVERGREEN_BUILD_URL_PREFIX}/${EVERGREEN_PROJECT_NAME_PREFIX}_${GIT_COMMIT_HASH}" +elif [ -n "${GIT_BRANCH_PATCH}" ]; then + # strip out the patch version + declare -r EVERGREEN_PROJECT_NAME_SUFFIX="${PRODUCT_VERSION%.*}" + declare -r EVERGREEN_BUILD_URL="${EVERGREEN_BUILD_URL_PREFIX}/${EVERGREEN_PROJECT_NAME_PREFIX}_${EVERGREEN_PROJECT_NAME_SUFFIX}_${GIT_COMMIT_HASH}" +elif [[ "${PRODUCT_NAME}" == *'-snapshot' ]]; then + declare -r EVERGREEN_BUILD_URL="${EVERGREEN_BUILD_URL_PREFIX}/${EVERGREEN_VERSION_ID}" +else + printf "\nFailed to compute EVERGREEN_BUILD_URL\n" + exit 1 +fi +printf "\nEvergreen build URL: %s\n" "${EVERGREEN_BUILD_URL}" + +PRODUCT_RELEASE_CREATOR="$(git log --ignore-missing "${GIT_TAG}"^.."${GIT_TAG}" --simplify-by-decoration --pretty='format:%aN')" +printf "\nProduct release creator: %s\n" "${PRODUCT_RELEASE_CREATOR}" + +printf "\nCreating SpotBugs SARIF reports\n" +./gradlew -version +set +e + # This `gradlew` command is expected to exit with a non-zero exit status, + # because it reports all the findings that we normally explicitly exclude as "No Fix Needed"/"False Positive". + ./gradlew -PssdlcReport.enabled=true --continue -x test -x integrationTest -x spotlessApply check scalaCheck +set -e +printf "\nSpotBugs created the following SARIF reports\n" +IFS=$'\n' +declare -a SARIF_PATHS=($(find "${RELATIVE_DIR_PATH}/.." -path "*/spotbugs/*.sarif")) +unset IFS +for SARIF_PATH in "${SARIF_PATHS[@]}"; do + GRADLE_PROJECT_NAME="$(basename "$(dirname "$(dirname "$(dirname "$(dirname "${SARIF_PATH}")")")")")" + NEW_SARIF_PATH="${SSDLC_STATIC_ANALYSIS_REPORTS_PATH}/${GRADLE_PROJECT_NAME}_$(basename "${SARIF_PATH}")" + cp "${SARIF_PATH}" "${NEW_SARIF_PATH}" + printf "%s\n" "${NEW_SARIF_PATH}" +done + +printf "\nCreating SSDLC compliance report\n" +declare -r TEMPLATE_SSDLC_REPORT_PATH="${RELATIVE_DIR_PATH}/template_ssdlc_compliance_report.md" +declare -r SSDLC_REPORT_PATH="${SSDLC_PATH}/ssdlc_compliance_report.md" +cp "${TEMPLATE_SSDLC_REPORT_PATH}" "${SSDLC_REPORT_PATH}" +declare -a SED_EDIT_IN_PLACE_OPTION +if [[ "$OSTYPE" == "darwin"* ]]; then + SED_EDIT_IN_PLACE_OPTION=(-i '') +else + SED_EDIT_IN_PLACE_OPTION=(-i) +fi +sed "${SED_EDIT_IN_PLACE_OPTION[@]}" \ + -e "s/\${product_name}/${PRODUCT_NAME}/g" \ + -e "s/\${product_version}/${PRODUCT_VERSION}/g" \ + -e "s/\${report_date_utc}/$(date -u +%Y-%m-%d)/g" \ + -e "s/\${product_release_creator}/${PRODUCT_RELEASE_CREATOR}/g" \ + -e "s>\${evergreen_build_url}>${EVERGREEN_BUILD_URL}>g" \ + "${SSDLC_REPORT_PATH}" +printf "%s\n" "${SSDLC_REPORT_PATH}" + +printf "\n" diff --git a/.evergreen/static-checks.sh b/.evergreen/static-checks.sh new file mode 100755 index 00000000000..1accf5c1684 --- /dev/null +++ b/.evergreen/static-checks.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +set -o xtrace # Write all commands first to stderr +set -o errexit # Exit the script with error if any of the commands fail + +############################################ +# Main Program # +############################################ +RELATIVE_DIR_PATH="$(dirname "${BASH_SOURCE[0]:-$0}")" +. "${RELATIVE_DIR_PATH}/setup-env.bash" + +echo "Compiling JVM drivers" + +./gradlew -version +./gradlew -PxmlReports.enabled=true --info -x test -x integrationTest -x spotlessApply clean check scalaCheck jar testClasses docs diff --git a/.evergreen/template_ssdlc_compliance_report.md b/.evergreen/template_ssdlc_compliance_report.md new file mode 100644 index 00000000000..adadc60fd71 --- /dev/null +++ b/.evergreen/template_ssdlc_compliance_report.md @@ -0,0 +1,67 @@ +# ${product_name} SSDLC compliance report + +This report is available at +. + + + + + + + + + + + + + + + + + + +
Product name${product_name}
Product version${product_version}
Release creator + ${product_release_creator} +

+ Refer to data in Papertrail for more details. + There is currently no official way to serve that data. +

+
Report date, UTC${report_date_utc}
+ +## Process document + +Blocked on . + +The MongoDB SSDLC policy is available at +. + +## Third-party dependency information + +There are no dependencies to report vulnerabilities of. +Our [SBOM](https://docs.devprod.prod.corp.mongodb.com/mms/python/src/sbom/silkbomb/docs/CYCLONEDX/) lite +is . + +## Static analysis findings + +The static analysis findings are available at +. +All the findings in the aforementioned reports +are either of the MongoDB status "False Positive" or "No Fix Needed", +because code that has any other findings cannot technically get into the product. + + may also be of interest. + +## Security testing results + +The testing results are available at +<${evergreen_build_url}>. + +See the driver security testing summary + +for the description of what is tested. + +## Signature information + +The product artifacts are signed. +The signatures can be verified by following instructions at +. diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 00000000000..e829944e910 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,11 @@ +# .git-blame-ignore-revs +# Checkstyle fixes +94780bc8b72c62d9bc09beaa9ac62b942debab5f +# Copyright fixes +0aa2ec20d5215c0ac727602dd2cd891c22c69ba8 +# Scala spotless changes +fd21430c967571ed172259cc4100f291257a9a01 +# IntelliJ automated code cleanup +d9aa6044e1a6b440bcb013c330497f2813484050 +# Remove `final` in catch clauses +4b3b48546fb0457e5c515ccfe8780e373ad7de5f diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000000..28c26f58754 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,2 @@ +# Listing code owners is required by DRIVERS-3098 +* @mongodb/dbx-java \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000000..80bd0c9bf01 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,9 @@ +version: 2 +updates: + - package-ecosystem: "gitsubmodule" + directory: "/" + schedule: + interval: "weekly" + commit-message: + prefix: "build" + include: "scope" diff --git a/.github/workflows/bump-and-tag.sh b/.github/workflows/bump-and-tag.sh new file mode 100755 index 00000000000..0875db6bc59 --- /dev/null +++ b/.github/workflows/bump-and-tag.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash +set -e + +if [ "$#" -ne 3 ]; then + echo "Usage: $0 " >&2 + exit 1 +fi + +CURRENT_VERSION=$1 +RELEASE_VERSION=$2 +NEXT_VERSION=$3 + +SCRIPT_DIR=$(dirname ${BASH_SOURCE[0]}) + +echo "Bump version in gradle.properties to ${RELEASE_VERSION}" +${SCRIPT_DIR}/bump-version.sh "${RELEASE_VERSION_WITHOUT_SUFFIX}-SNAPSHOT" "${RELEASE_VERSION}" + +echo "Create release tag for ${RELEASE_VERSION}" +git tag -a -m "${RELEASE_VERSION}" r${RELEASE_VERSION} + +echo "Bump to snapshot version for ${NEXT_VERSION}" +${SCRIPT_DIR}/bump-version.sh "${RELEASE_VERSION}" "${NEXT_VERSION}-SNAPSHOT" diff --git a/.github/workflows/bump-version.sh b/.github/workflows/bump-version.sh new file mode 100755 index 00000000000..eaa92163bfa --- /dev/null +++ b/.github/workflows/bump-version.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash +set -e + +if [ "$#" -ne 2 ]; then + echo "Usage: $0 " >&2 + exit 1 +fi + +FROM_VERSION=$1 +TO_VERSION=$2 + +sed --in-place "s/version=${FROM_VERSION}/version=${TO_VERSION}/g" gradle.properties +git commit -m "Version: bump ${TO_VERSION}" gradle.properties diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000000..005ac768f3b --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,139 @@ +name: "Release New Version" +run-name: "Release ${{ inputs.version }}" + +on: + workflow_dispatch: + inputs: + version: + description: "The version to be released (e.g. 1.2.3)" + required: true + type: "string" + +jobs: + prepare-release: + environment: release + name: "Prepare release" + runs-on: ubuntu-latest + permissions: + # Write permission for id-token is necessary to generate a new token for the GitHub App + id-token: write + # Write permission for contents is to ensure we're allowed to push to the repository + contents: write + + steps: + - name: "Create release output" + run: echo '🎬 Release process for version ${{ env.RELEASE_VERSION }} started by @${{ github.triggering_actor }}' >> $GITHUB_STEP_SUMMARY + + - uses: mongodb-labs/drivers-github-tools/secure-checkout@v2 + with: + app_id: ${{ vars.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + + - name: "Store version numbers in env variables" + # The awk command to increase the version number was copied from + # StackOverflow: https://stackoverflow.com/a/61921674/3959933 + # Variables set here: + # RELEASE_VERSION: The version the deployment is expected to create + # RELEASE_VERSION_WITHOUT_SUFFIX: The version without any stability + # suffixes. Example: 5.2.0-beta0 => 5.2.0 + # NEXT_VERSION: The next version to be released. For pre-releases, the + # next version is a snapshot of the pre-release version. Examples: + # 5.2.0 => 5.2.1; 5.2.0-beta0 => 5.2.0 + # RELEASE_BRANCH: The name of the stable branch for this release series + # Example: 5.2.0 => 5.2.x + # Example: 5.2.0-beta1 => + run: | + echo RELEASE_VERSION=${{ inputs.version }} >> $GITHUB_ENV + echo RELEASE_VERSION_WITHOUT_SUFFIX=$(echo ${{ inputs.version }} | awk -F- '{print $1}') >> $GITHUB_ENV + if [[ "${{ inputs.version }}" =~ (alpha|beta|rc)[0-9]+$ ]]; then + echo NEXT_VERSION=$(echo ${{ inputs.version }} | awk -F- '{print $1}') >> $GITHUB_ENV + echo RELEASE_BRANCH=${{ github.ref_name }} >> $GITHUB_ENV + else + echo NEXT_VERSION=$(echo ${{ inputs.version }} | awk -F. -v OFS=. '{$NF += 1 ; print}') >> $GITHUB_ENV + echo RELEASE_BRANCH=$(echo ${{ inputs.version }} | awk -F. -v OFS=. '{$NF = "x" ; print}') >> $GITHUB_ENV + fi + + - name: "Ensure current snapshot version matches release version" + run: | + grep -q "version=${{ env.RELEASE_VERSION_WITHOUT_SUFFIX }}-SNAPSHOT" gradle.properties + if [[ $? != 0 ]]; then + echo '❌ Release failed: version in gradle.properties is not a snapshot for release version ${{ inputs.version }}' >> $GITHUB_STEP_SUMMARY + exit 1 + fi + + - name: "Ensure release tag does not already exist" + run: | + if [[ $(git tag -l r${{ env.RELEASE_VERSION }}) == r${{ env.RELEASE_VERSION }} ]]; then + echo '❌ Release failed: tag for version ${{ inputs.version }} already exists' >> $GITHUB_STEP_SUMMARY + exit 1 + fi + + # For patch releases (A.B.C where C != 0), we expect the release to be + # triggered from the A.B.x maintenance branch. We use the release version + # without suffixes to avoid mistakes when making pre-releases + - name: "Fail if patch release is created from wrong release branch" + if: ${{ !endsWith(env.RELEASE_VERSION_WITHOUT_SUFFIX, '.0') && env.RELEASE_BRANCH != github.ref_name }} + run: | + echo '❌ Release failed due to branch mismatch: expected ${{ inputs.version }} to be released from ${{ env.RELEASE_BRANCH }}, got ${{ github.ref_name }}' >> $GITHUB_STEP_SUMMARY + exit 1 + + # For non-patch releases (A.B.C where C == 0), we expect the release to + # be triggered from main or the A.B.x maintenance branch. This includes + # pre-releases for any non-patch releases, e.g. 5.2.0-beta1 + - name: "Fail if non-patch release is created from wrong release branch" + if: ${{ endsWith(env.RELEASE_VERSION_WITHOUT_SUFFIX, '.0') && env.RELEASE_BRANCH != github.ref_name && github.ref_name != 'main' }} + run: | + echo '❌ Release failed due to branch mismatch: expected ${{ inputs.version }} to be released from ${{ env.RELEASE_BRANCH }} or main, got ${{ github.ref_name }}' >> $GITHUB_STEP_SUMMARY + exit 1 + + # Set commit author information to the user that triggered the release workflow + - name: "Set git author information" + run: | + GITHUB_USER_NAME=$(gh api users/${{ github.actor }} --jq '.name') + GITHUB_USER_ID=$(gh api users/${{ github.actor }} --jq '.id') + git config user.name "${GITHUB_USER_NAME}" + git config user.email "${GITHUB_USER_ID}+${{ github.actor }}@users.noreply.github.com" + + # If a non-patch release is created from a branch other than its + # maintenance branch, create that branch from the current one and push it + # Pre-releases don't have this behaviour, so we can check the full release + # version including stability suffixes to exclude those + - name: "Create new release branch for non-patch release" + if: ${{ endsWith(env.RELEASE_VERSION, '.0') && env.RELEASE_BRANCH != github.ref_name }} + run: | + echo '🆕 Creating new release branch ${{ env.RELEASE_BRANCH }} from ${{ github.ref_name }}' >> $GITHUB_STEP_SUMMARY + git checkout -b ${{ env.RELEASE_BRANCH }} + NEXT_MINOR_VERSION=$(echo "${{ env.RELEASE_VERSION }}" | awk -F. -v OFS=. '{$2 += 1 ; $NF = 0 ; print}') + echo "➡️ Bumping version for ${{ github.ref_name }} branch to ${NEXT_MINOR_VERSION}" >> $GITHUB_STEP_SUMMARY + git checkout ${{ github.ref_name }} + .github/workflows/bump-version.sh "${{ env.RELEASE_VERSION_WITHOUT_SUFFIX }}-SNAPSHOT" "${NEXT_MINOR_VERSION}-SNAPSHOT" + git push origin ${{ github.ref_name }} + git checkout ${{ env.RELEASE_BRANCH }} + + # This step bumps version numbers in gradle.properties and creates git artifacts for the release + - name: "Bump version numbers and create release tag" + run: .github/workflows/bump-and-tag.sh "${{ env.RELEASE_VERSION_WITHOUT_SUFFIX }}" "${{ env.RELEASE_VERSION }}" "${{ env.NEXT_VERSION }}" + + - name: "Push release branch and tag" + run: | + git push origin ${{ env.RELEASE_BRANCH }} + git push origin r${{ env.RELEASE_VERSION }} + + - name: "Create draft release with generated changelog" + run: | + if [[ "${{ inputs.version }}" =~ (alpha|beta|rc) ]]; then + PRERELEASE="--prerelease --latest=false" + fi + echo "RELEASE_URL=$(\ + gh release create r${RELEASE_VERSION} \ + ${PRERELEASE} \ + --target ${{ env.RELEASE_BRANCH }} \ + --title "Java Driver ${{ env.RELEASE_VERSION }} ($(date '+%B %d, %Y'))" \ + --generate-notes \ + --draft\ + )" >> "$GITHUB_ENV" + + - name: "Set summary" + run: | + echo '🚀 Created tag and drafted release for version [${{ env.RELEASE_VERSION }}](${{ env.RELEASE_URL }})' >> $GITHUB_STEP_SUMMARY + echo '✍️ You may now update the release notes and publish the release when ready' >> $GITHUB_STEP_SUMMARY diff --git a/.gitignore b/.gitignore index 74cf015aa2c..6398e8490e8 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ # Build artifacts build +target out mongo*.jar @@ -31,13 +32,29 @@ atlassian-ide-plugin.xml # code review codereview.rc +# evergreen +expansion.yml + # local settings **/gradle.properties - -# doc settings -docs/reference/public -docs/landing/public -docs/hugo* +local.properties # jenv -.java-version \ No newline at end of file +.java-version + +#sdkman +.sdkmanrc + +# mongocryptd +**/mongocryptd*.pid + +# shell scripts +*.sh +!.evergreen/*.sh + +# security-sensitive files +*.gpg + +# bin build directories +**/bin + diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000000..a9ac62f04bb --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "specifications"] + path = driver-core/src/test/resources/specifications + url = https://github.com/mongodb/specifications diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 596a6c1c5ba..00000000000 --- a/.travis.yml +++ /dev/null @@ -1,44 +0,0 @@ -dist: trusty -sudo: true -language: java -jdk: -- oraclejdk9 - -notifications: - email: - recipients: - - jeff.yemin@mongodb.com - - ross@mongodb.com - on_success: change - on_failure: always - -branches: - only: - - master - -env: - global: - - MONGODB_FILE_NAME=mongodb-linux-x86_64-enterprise-ubuntu1404 - - MONGODB=3.6.1 - -addons: - apt: - packages: - - libsnmp-dev - -install: - - wget http://downloads.mongodb.com/linux/${MONGODB_FILE_NAME}-${MONGODB}.tgz - - tar xzf ${MONGODB_FILE_NAME}-${MONGODB}.tgz - - ${PWD}/${MONGODB_FILE_NAME}-${MONGODB}/bin/mongod --version - -before_script: - - mkdir ${PWD}/${MONGODB_FILE_NAME}-${MONGODB}/data - - ${PWD}/${MONGODB_FILE_NAME}-${MONGODB}/bin/mongod --dbpath ${PWD}/${MONGODB_FILE_NAME}-${MONGODB}/data --logpath ${PWD}/${MONGODB_FILE_NAME}-${MONGODB}/mongodb.log --setParameter enableTestCommands=1 --fork --smallfiles --nojournal - -script: - - ./gradlew -q assemble - - ./gradlew check -Ptravistest=true - - ./gradlew docs - -after_script: - - pkill mongod diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9a2ecd97527..88827db052f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -18,9 +18,9 @@ Pull Requests Pull requests should generally be made against the master (default) branch and include relevant tests, if applicable. Code should compile with the Java 9 compiler and tests should pass under all Java versions which the driver currently -supports. Currently the Java driver supports a minimum version of Java 6. Please run './gradlew test' to confirm. By default, running the +supports. Currently the Java driver supports a minimum version of Java 8. Please run './gradlew test' to confirm. By default, running the tests requires that you start a mongod server on localhost, listening on the default port and configured to run with -[`enableTestCommands`](http://docs.mongodb.org/manual/reference/parameters/#param.enableTestCommands), which may be set with the +[`enableTestCommands`](https://www.mongodb.com/docs/manual/reference/parameters/#param.enableTestCommands), which may be set with the `--setParameter enableTestCommands=1` command-line parameter. At minimum, please test against the latest release version of the MongoDB server. @@ -30,5 +30,11 @@ pull request will not be considered. Talk To Us ---------- -If you want to work on something or have questions / complaints please reach out to us by creating a Question issue at -(https://jira.mongodb.org/secure/CreateIssue!default.jspa). +If you have questions about using the driver, please reach out on the +[MongoDB Community Forums](https://www.mongodb.com/community/forums/tags/c/data/drivers/7/java). + +Thanks to all the people who have already contributed! + + + + diff --git a/README.md b/README.md index e54fe3b533b..ef32f93306b 100644 --- a/README.md +++ b/README.md @@ -2,16 +2,24 @@ Release notes are available [here](https://github.com/mongodb/mongo-java-driver/releases). -## API Documentation: +## Documentation -Javadoc for all major and minor releases is available [here](http://api.mongodb.com/java/). +Reference and API documentation for the Java driver is available [here](https://www.mongodb.com/docs/drivers/java/sync/current/). + +Reference and API documentation for the Kotlin driver is available [here](https://www.mongodb.com/docs/drivers/kotlin/coroutine/current/). + +Reference and API documentation for the Scala driver is available [here](https://www.mongodb.com/docs/languages/scala/scala-driver/current/). + +## Tutorials / Training + +For tutorials on how to use the MongoDB JVM Drivers, please reference [MongoDB University](https://learn.mongodb.com/). Additional tutorials, videos, and code examples using both the Java Driver and the Kotlin Driver can also be found in the [MongoDB Developer Center](https://www.mongodb.com/developer/). ## Support / Feedback -For issues with, questions about, or feedback for the MongoDB Java driver, please look into -our [support channels](http://www.mongodb.org/about/support). Please -do not email any of the Java driver developers directly with issues or -questions - you're more likely to get an answer on the [mongodb-user](http://groups.google.com/group/mongodb-user) list on Google Groups. +For issues with, questions about, or feedback for the MongoDB Java, Kotlin, and Scala drivers, please look into +our [support channels](https://www.mongodb.com/docs/manual/support/). Please +do not email any of the driver developers directly with issues or +questions - you're more likely to get an answer on the [MongoDB Community Forums](https://community.mongodb.com/tags/c/drivers-odms-connectors/7/java-driver) or [StackOverflow](https://stackoverflow.com/questions/tagged/mongodb+java). At a minimum, please include in your description the exact version of the driver that you are using. If you are having connectivity issues, it's often also useful to paste in the line of code where you construct the MongoClient instance, @@ -20,31 +28,32 @@ any connectivity-related exceptions and post those as well. ## Bugs / Feature Requests -Think you’ve found a bug? Want to see a new feature in the Java driver? Please open a +Think you’ve found a bug in the Java, Kotlin, or Scala drivers? Want to see a new feature in the drivers? Please open a case in our issue management tool, JIRA: - [Create an account and login](https://jira.mongodb.org). - Navigate to [the JAVA project](https://jira.mongodb.org/browse/JAVA). -- Click **Create Issue** - Please provide as much information as possible about the issue type and how to reproduce it. +- Click **Create Issue** - Please provide as much information as possible about the issue type, which driver you are using, and how to reproduce your issue. Bug reports in JIRA for the driver and the Core Server (i.e. SERVER) project are **public**. If you’ve identified a security vulnerability in a driver or any other -MongoDB project, please report it according to the [instructions here](http://docs.mongodb.org/manual/tutorial/create-a-vulnerability-report). +MongoDB project, please report it according to the [instructions here](https://www.mongodb.com/docs/manual/tutorial/create-a-vulnerability-report). ## Versioning -Major increments (such as 2.x -> 3.x) will occur when break changes are being made to the public API. All methods and -classes removed in a major release will have been deprecated in a prior release of the previous major release branch, and/or otherwise -called out in the release notes. +We follow [semantic versioning](https://semver.org/spec/v2.0.0.html) when releasing. + +#### @Alpha -Minor 3.x increments (such as 3.1, 3.2, etc) will occur when non-trivial new functionality is added or significant enhancements or bug -fixes occur that may have behavioral changes that may affect some edge cases (such as dependence on behavior resulting from a bug). An -example of an enhancement is a method or class added to support new functionality added to the MongoDB server. Minor releases will -almost always be binary compatible with prior minor releases from the same major release branch, exept as noted below. +APIs marked with the `@Alpha` annotation are in the early stages of development, subject to incompatible changes, +or even removal, in a future release and may lack some intended features. An APIs bearing `@Alpha` annotation may +contain known issues affecting functionality, performance, and stability. They are also exempt from any compatibility +guarantees made by its containing library. -Patch 3.x.y increments (such as 3.0.0 -> 3.0.1, 3.1.1 -> 3.1.2, etc) will occur for bug fixes only and will always be binary compitible -with prior patch releases of the same minor release branch. +It is inadvisable for applications to use Alpha APIs in production environments or for libraries +(which get included on users' CLASSPATHs, outside the library developers' control) to depend on these APIs. Alpha APIs +are intended for experimental purposes only. #### @Beta @@ -65,84 +74,72 @@ time. ## Binaries Binaries and dependency information for Maven, Gradle, Ivy and others can be found at -[http://search.maven.org](http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.mongodb%22%20AND%20a%3A%22mongo-java-driver%22). +[https://central.sonatype.com/search](https://central.sonatype.com/search?namespace=org.mongodb&name=mongodb-driver-sync). Example for Maven: ```xml org.mongodb - mongodb-driver + mongodb-driver-sync x.y.z ``` - -For an all-in-one jar (which embeds the core driver and bson): - -```xml - - org.mongodb - mongo-java-driver - x.y.z - -``` - Snapshot builds are also published regulary via Sonatype. Example for Maven: ```xml - - - sonatype-snapshot - https://oss.sonatype.org/content/repositories/snapshots/ - - + + + Central Portal Snapshots + central-portal-snapshots + https://central.sonatype.com/repository/maven-snapshots/ + + false + + + true + + + ``` -For binaries containing the asynchronous API, see the [driver-async README](driver-async/#binaries). - ## Build -To build and test the driver: +Java 17+ and git is required to build and compile the source. To build and test the driver: ``` -$ git clone https://github.com/mongodb/mongo-java-driver.git +$ git clone --recurse-submodules https://github.com/mongodb/mongo-java-driver.git $ cd mongo-java-driver $ ./gradlew check ``` -The test suite requires mongod to be running with [`enableTestCommands`](http://docs.mongodb.org/manual/reference/parameters/#param.enableTestCommands), which may be set with the `--setParameter enableTestCommands=1` +The test suite requires mongod to be running with [`enableTestCommands`](https://www.mongodb.com/docs/manual/reference/parameters/#param.enableTestCommands), which may be set with the `--setParameter enableTestCommands=1` command-line parameter: ``` +$ mkdir -p data/db $ mongod --dbpath ./data/db --logpath ./data/mongod.log --port 27017 --logappend --fork --setParameter enableTestCommands=1 ``` If you encounter `"Too many open files"` errors when running the tests then you will need to increase -the number of available file descriptors prior to starting mongod as described in [https://docs.mongodb.com/manual/reference/ulimit/](https://docs.mongodb.com/manual/reference/ulimit/) - -### Build status: - -[![Build Status](https://travis-ci.org/mongodb/mongo-java-driver.svg?branch=master)](https://travis-ci.org/mongodb/mongo-java-driver) - -## Maintainers - -* Jeff Yemin jeff.yemin@mongodb.com -* Ross Lawley ross@mongodb.com - -## Contributors: -* Trisha Gee trisha.gee@gmail.com -* Uladzmir Mihura trnl.me@gmail.com -* Justin Lee justin.lee@mongodb.com -* Craig Wilson craig.wilson@mongodb.com - -Additional contributors can be found [here](https://github.com/mongodb/mongo-java-driver/graphs/contributors). +the number of available file descriptors prior to starting mongod as described in [https://www.mongodb.com/docs/manual/reference/ulimit/](https://www.mongodb.com/docs/manual/reference/ulimit/) -## Supporters +## IntelliJ IDEA -YourKit is supporting this open source project with its [YourKit Java Profiler](http://www.yourkit.com/java/profiler/index.jsp). +A couple of manual configuration steps are required to run the code in IntelliJ: -JetBrains is supporting this open source project with: +- Java 17+ is required to build and compile the source. -[![Intellij IDEA](http://www.jetbrains.com/img/logos/logo_intellij_idea.png)](http://www.jetbrains.com/idea/) +- **Error:** `java: cannot find symbol: class SNIHostName location: package javax.net.ssl`
+ **Fix:** Settings/Preferences > Build, Execution, Deployment > Compiler > Java Compiler - untick "Use '--release' option for + cross-compilation (Java 9 and later)" +- **Error:** `java: package com.mongodb.internal.build does not exist`
+ **Fixes:** Any of the following:
+ - Run the `generateBuildConfig` task: eg: `./gradlew generateBuildConfig` or via Gradle > driver-core > Tasks > buildconfig > + generateBuildConfig + - Set `generateBuildConfig` to execute Before Build. via Gradle > Tasks > buildconfig > right click generateBuildConfig - click on + "Execute Before Build" + - Delegate all build actions to Gradle: Settings/Preferences > Build, Execution, Deployment > Build Tools > Gradle > Build and run + using/Run tests using - select "Gradle" diff --git a/THIRD-PARTY-NOTICES b/THIRD-PARTY-NOTICES index a142ed6344f..acca60ca973 100644 --- a/THIRD-PARTY-NOTICES +++ b/THIRD-PARTY-NOTICES @@ -4,7 +4,7 @@ be distributed under licenses different than the MongoDB Java Driver software. In the event that we accidentally failed to list a required notice, please bring it to our attention through any of the ways detailed here: - mongodb-dev@googlegroups.com + https://jira.mongodb.org/browse/JAVA The attached notices are provided for information only. @@ -21,7 +21,7 @@ https://github.com/mongodb/mongo-java-driver. Any republication or derived work distributed in source code form must include this copyright and license notice. -2) The following files: Assertions.java, AbstractCopyOnWriteMap.java, CopyOnWriteMap.java +2) The following files: Assertions.java Copyright (c) 2008-2014 Atlassian Pty Ltd @@ -37,7 +37,10 @@ https://github.com/mongodb/mongo-java-driver. See the License for the specific language governing permissions and limitations under the License. -3) The following files: Beta.java, UnsignedLongs.java, UnsignedLongsTest.java +3) The following files: + + Alpha.java (formerly Beta.java) + Beta.java Copyright 2010 The Guava Authors Copyright 2011 The Guava Authors @@ -54,10 +57,10 @@ https://github.com/mongodb/mongo-java-driver. See the License for the specific language governing permissions and limitations under the License. -4) The following files: ReadTimeoutHandler.java +4) The following files: InstantCodec.java, Jsr310CodecProvider.java, LocalDateCodec.java, LocalDateTimeCodec.java, LocalTimeCodec.java Copyright 2008-present MongoDB, Inc. - Copyright 2012 The Netty Project + Copyright 2018 Cezary Bartosiak Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -71,16 +74,86 @@ https://github.com/mongodb/mongo-java-driver. See the License for the specific language governing permissions and limitations under the License. -5) The following files: InstantCodec.java, Jsr310CodecProvider.java, LocalDateCodec.java, LocalDateTimeCodec.java, LocalTimeCodec.java +5) The following files: SaslPrep.java + + Copyright 2008-present MongoDB, Inc. + Copyright 2017 Tom Bentley + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +6) The following files (originally from https://github.com/marianobarrios/tls-channel): + + AsynchronousTlsChannel.java + AsynchronousTlsChannelGroup.java + BufferAllocator.java + BufferHolder.java + ByteBufferSet.java + ByteBufferUtil.java + ClientTlsChannel.java + DirectBufferAllocator.java + DirectBufferDeallocator.java + ExtendedAsynchronousByteChannel.java + HeapBufferAllocator.java + NeedsReadException.java + NeedsTaskException.java + NeedsWriteException.java + ServerTlsChannel.java + SniSslContextFactory.java + TlsChannel.java + TlsChannelBuilder.java + TlsChannelCallbackException.java + TlsChannelFlowControlException.java + TlsChannelImpl.java + TlsExplorer.java + TrackingAllocator.java + Util.java + WouldBlockException.java + + Copyright (c) [2015-2020] all contributors + + MIT License + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + +7) The following files (originally from https://github.com/google/guava): + + InetAddressUtils.java (formerly InetAddresses.java) + InetAddressUtilsTest.java (formerly InetAddressesTest.java) Copyright 2008-present MongoDB, Inc. - Copyright 2018 Cezary Bartosiak + Copyright (C) 2008 The Guava Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, @@ -88,10 +161,33 @@ https://github.com/mongodb/mongo-java-driver. See the License for the specific language governing permissions and limitations under the License. -6) The following files: SaslPrep.java +8) The following files (originally from https://github.com/Litote/kmongo): + + Filters.kt + Properties.kt + KPropertyPath.kt + FiltersTest.kt + KPropertiesTest.kt + + Copyright 2008-present MongoDB, Inc. + Copyright (C) 2016/2022 Litote + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +9) The following files: BsonCodecUtils.kt Copyright 2008-present MongoDB, Inc. - Copyright 2017 Tom Bentley + Copyright 2017-2021 JetBrains s.r.o. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/bom/build.gradle.kts b/bom/build.gradle.kts new file mode 100644 index 00000000000..806c4f20950 --- /dev/null +++ b/bom/build.gradle.kts @@ -0,0 +1,155 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import ProjectExtensions.configureMavenPublication +import groovy.util.Node +import groovy.util.NodeList + +plugins { + id("java-platform") + id("project.base") + id("conventions.publishing") + id("conventions.spotless") +} + +base.archivesName.set("mongodb-driver-bom") + +dependencies { + constraints { + api(project(":mongodb-crypt")) + api(project(":driver-core")) + api(project(":bson")) + api(project(":bson-record-codec")) + + api(project(":driver-sync")) + api(project(":driver-reactive-streams")) + + api(project(":bson-kotlin")) + api(project(":bson-kotlinx")) + api(project(":driver-kotlin-coroutine")) + api(project(":driver-kotlin-sync")) + api(project(":driver-kotlin-extensions")) + + api(project(":bson-scala")) + api(project(":driver-scala")) + } +} + +/* + * Handle the multiple versions of Scala we support as defined in `gradle.properties` + */ +val defaultScalaVersion: String = project.findProperty("defaultScalaVersion")!!.toString() +val scalaVersions: List? = project.findProperty("supportedScalaVersions")?.toString()?.split(",") + +require(!scalaVersions.isNullOrEmpty()) { + "Scala versions must be provided as a comma-separated list in the 'supportedScalaVersions' project property" +} + +scalaVersions?.forEach { version -> + require(version.matches(Regex("\\d\\.\\d{2}"))) { "Scala version '$version' must be in the format X.YY" } +} +/* + * Apply the Java Platform plugin to create the BOM + * Modify the generated POM to include all supported versions of Scala for driver-scala or bson-scala. + */ +configureMavenPublication { + components.findByName("javaPlatform")?.let { from(it) } + + pom { + name.set("bom") + description.set( + "This Bill of Materials POM simplifies dependency management when referencing multiple MongoDB Java Driver artifacts in projects using Gradle or Maven.") + + withXml { + val pomXml: Node = asNode() + + val dependencyManagementNode = pomXml.getNode("dependencyManagement") + require(dependencyManagementNode != null) { + " node not found in the generated BOM POM" + } + val dependenciesNode = dependencyManagementNode.getNode("dependencies") + require(dependenciesNode != null) { " node not found in the generated BOM POM" } + + val existingScalaDeps = + dependenciesNode + .children() + .map { it as Node } + .filter { it.getNode("artifactId")?.text()?.contains("scala") ?: false } + + existingScalaDeps.forEach { + val groupId: String = it.getNode("groupId")!!.text() + val originalArtifactId: String = it.getNode("artifactId")!!.text() + val artifactVersion: String = it.getNode("version")!!.text() + + // Add multiple versions with Scala suffixes for each Scala-related dependency. + scalaVersions!!.forEach { scalaVersion -> + if (scalaVersion != defaultScalaVersion) { + // Replace scala version suffix + val newArtifactId: String = originalArtifactId.replace(defaultScalaVersion, scalaVersion) + val dependencyNode = dependenciesNode.appendNode("dependency") + dependencyNode.appendNode("groupId", groupId) + dependencyNode.appendNode("artifactId", newArtifactId) + dependencyNode.appendNode("version", artifactVersion) + } + } + } + } + } +} + +/* + * Validate the BOM file. + */ +tasks.withType { + pom.withXml { + val pomXml: Node = asNode() + val dependenciesNode = pomXml.getNode("dependencyManagement").getNode("dependencies") + require(dependenciesNode!!.children().isNotEmpty()) { + "BOM must contain more then one element:\n$destination" + } + + dependenciesNode + .children() + .map { it as Node } + .forEach { + val groupId: String = it.getNode("groupId")!!.text() + require(groupId.startsWith("org.mongodb")) { + "BOM must contain only 'org.mongodb' dependencies, but found '$groupId':\n$destination" + } + + /* + * The and tags should be omitted in BOM dependencies. + * This ensures that consuming projects have the flexibility to decide whether a dependency is optional in their context. + * + * The BOM's role is to provide version information, not to dictate inclusion or exclusion of dependencies. + */ + require(it.getNode("scope") == null) { + "BOM must not contain elements in dependency:\n$destination" + } + require(it.getNode("optional") == null) { + "BOM must not contain elements in dependency:\n$destination" + } + } + } +} + +/** A node lookup helper. */ +private fun Node?.getNode(nodeName: String): Node? { + val found = this?.get(nodeName) + if (found is NodeList && found.isNotEmpty()) { + return found[0] as Node + } + return null +} diff --git a/bson-kotlin/build.gradle.kts b/bson-kotlin/build.gradle.kts new file mode 100644 index 00000000000..2cfd4413637 --- /dev/null +++ b/bson-kotlin/build.gradle.kts @@ -0,0 +1,39 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import ProjectExtensions.configureJarManifest +import ProjectExtensions.configureMavenPublication + +plugins { id("project.kotlin") } + +base.archivesName.set("bson-kotlin") + +dependencies { + api(project(path = ":bson", configuration = "default")) + implementation(libs.kotlin.reflect) + + // Test case checks MongoClientSettings.getDefaultCodecRegistry() support + testImplementation(project(path = ":driver-core", configuration = "default")) +} + +configureMavenPublication { + pom { + name.set("BSON Kotlin") + description.set("The BSON Codec for Kotlin") + url.set("https://bsonspec.org") + } +} + +configureJarManifest { attributes["Automatic-Module-Name"] = "org.mongodb.bson.kotlin" } diff --git a/bson-kotlin/src/main/kotlin/org/bson/codecs/kotlin/ArrayCodec.kt b/bson-kotlin/src/main/kotlin/org/bson/codecs/kotlin/ArrayCodec.kt new file mode 100644 index 00000000000..10ea90aee1b --- /dev/null +++ b/bson-kotlin/src/main/kotlin/org/bson/codecs/kotlin/ArrayCodec.kt @@ -0,0 +1,128 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlin + +import java.lang.reflect.ParameterizedType +import java.lang.reflect.Type +import kotlin.reflect.KClass +import org.bson.BsonReader +import org.bson.BsonType +import org.bson.BsonWriter +import org.bson.codecs.Codec +import org.bson.codecs.DecoderContext +import org.bson.codecs.EncoderContext +import org.bson.codecs.configuration.CodecRegistry + +@Suppress("UNCHECKED_CAST") +internal data class ArrayCodec(private val kClass: KClass, private val codec: Codec) : Codec { + + companion object { + internal fun create( + kClass: KClass, + typeArguments: List, + codecRegistry: CodecRegistry + ): Codec { + assert(kClass.javaObjectType.isArray) { "$kClass must be an array type" } + val (valueClass, nestedTypes) = + if (typeArguments.isEmpty()) { + Pair(kClass.java.componentType.kotlin.javaObjectType as Class, emptyList()) + } else { + // Unroll the actual class and any type arguments + when (val pType = typeArguments[0]) { + is Class<*> -> Pair(pType as Class, emptyList()) + is ParameterizedType -> Pair(pType.rawType as Class, pType.actualTypeArguments.toList()) + else -> Pair(Object::class.java as Class, emptyList()) + } + } + val codec = + if (nestedTypes.isEmpty()) codecRegistry.get(valueClass) else codecRegistry.get(valueClass, nestedTypes) + return ArrayCodec(kClass, codec) + } + } + + private val isPrimitiveArray = kClass.java.componentType != kClass.java.componentType.kotlin.javaObjectType + + override fun encode(writer: BsonWriter, arrayValue: R, encoderContext: EncoderContext) { + writer.writeStartArray() + + boxed(arrayValue).forEach { + if (it == null) writer.writeNull() else encoderContext.encodeWithChildContext(codec, writer, it) + } + + writer.writeEndArray() + } + + override fun getEncoderClass(): Class = kClass.java + + override fun decode(reader: BsonReader, decoderContext: DecoderContext): R { + reader.readStartArray() + val data = ArrayList() + while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { + if (reader.currentBsonType == BsonType.NULL) { + reader.readNull() + data.add(null) + } else { + data.add(decoderContext.decodeWithChildContext(codec, reader)) + } + } + reader.readEndArray() + return unboxed(data) + } + + fun boxed(arrayValue: R): Iterable { + val boxedValue = + if (!isPrimitiveArray) { + (arrayValue as Array).asIterable() + } else if (arrayValue is BooleanArray) { + arrayValue.asIterable() + } else if (arrayValue is ByteArray) { + arrayValue.asIterable() + } else if (arrayValue is CharArray) { + arrayValue.asIterable() + } else if (arrayValue is DoubleArray) { + arrayValue.asIterable() + } else if (arrayValue is FloatArray) { + arrayValue.asIterable() + } else if (arrayValue is IntArray) { + arrayValue.asIterable() + } else if (arrayValue is LongArray) { + arrayValue.asIterable() + } else if (arrayValue is ShortArray) { + arrayValue.asIterable() + } else { + throw IllegalArgumentException("Unsupported array type ${arrayValue.javaClass}") + } + return boxedValue as Iterable + } + + private fun unboxed(data: ArrayList): R { + return when (kClass) { + BooleanArray::class -> (data as ArrayList).toBooleanArray() as R + ByteArray::class -> (data as ArrayList).toByteArray() as R + CharArray::class -> (data as ArrayList).toCharArray() as R + DoubleArray::class -> (data as ArrayList).toDoubleArray() as R + FloatArray::class -> (data as ArrayList).toFloatArray() as R + IntArray::class -> (data as ArrayList).toIntArray() as R + LongArray::class -> (data as ArrayList).toLongArray() as R + ShortArray::class -> (data as ArrayList).toShortArray() as R + else -> data.toArray(arrayOfNulls(data.size)) as R + } + } + + private fun arrayOfNulls(size: Int): Array { + return java.lang.reflect.Array.newInstance(codec.encoderClass, size) as Array + } +} diff --git a/bson-kotlin/src/main/kotlin/org/bson/codecs/kotlin/ArrayCodecProvider.kt b/bson-kotlin/src/main/kotlin/org/bson/codecs/kotlin/ArrayCodecProvider.kt new file mode 100644 index 00000000000..eccb5b88b27 --- /dev/null +++ b/bson-kotlin/src/main/kotlin/org/bson/codecs/kotlin/ArrayCodecProvider.kt @@ -0,0 +1,31 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlin + +import java.lang.reflect.Type +import org.bson.codecs.Codec +import org.bson.codecs.configuration.CodecProvider +import org.bson.codecs.configuration.CodecRegistry + +/** A Kotlin reflection based Codec Provider for data classes */ +public class ArrayCodecProvider : CodecProvider { + override fun get(clazz: Class, registry: CodecRegistry): Codec? = get(clazz, emptyList(), registry) + + override fun get(clazz: Class, typeArguments: List, registry: CodecRegistry): Codec? = + if (clazz.isArray) { + ArrayCodec.create(clazz.kotlin, typeArguments, registry) + } else null +} diff --git a/bson-kotlin/src/main/kotlin/org/bson/codecs/kotlin/DataClassCodec.kt b/bson-kotlin/src/main/kotlin/org/bson/codecs/kotlin/DataClassCodec.kt new file mode 100644 index 00000000000..85e705cb8c0 --- /dev/null +++ b/bson-kotlin/src/main/kotlin/org/bson/codecs/kotlin/DataClassCodec.kt @@ -0,0 +1,263 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlin + +import java.lang.reflect.ParameterizedType +import java.lang.reflect.Type +import kotlin.reflect.KClass +import kotlin.reflect.KClassifier +import kotlin.reflect.KFunction +import kotlin.reflect.KParameter +import kotlin.reflect.KProperty1 +import kotlin.reflect.KTypeParameter +import kotlin.reflect.KTypeProjection +import kotlin.reflect.full.createType +import kotlin.reflect.full.findAnnotation +import kotlin.reflect.full.findAnnotations +import kotlin.reflect.full.hasAnnotation +import kotlin.reflect.full.primaryConstructor +import kotlin.reflect.jvm.javaType +import kotlin.reflect.jvm.jvmErasure +import org.bson.BsonReader +import org.bson.BsonType +import org.bson.BsonWriter +import org.bson.codecs.Codec +import org.bson.codecs.DecoderContext +import org.bson.codecs.EncoderContext +import org.bson.codecs.RepresentationConfigurable +import org.bson.codecs.configuration.CodecConfigurationException +import org.bson.codecs.configuration.CodecRegistry +import org.bson.codecs.pojo.annotations.BsonCreator +import org.bson.codecs.pojo.annotations.BsonDiscriminator +import org.bson.codecs.pojo.annotations.BsonExtraElements +import org.bson.codecs.pojo.annotations.BsonId +import org.bson.codecs.pojo.annotations.BsonIgnore +import org.bson.codecs.pojo.annotations.BsonProperty +import org.bson.codecs.pojo.annotations.BsonRepresentation +import org.bson.diagnostics.Loggers + +internal data class DataClassCodec( + private val kClass: KClass, + private val primaryConstructor: KFunction, + private val propertyModels: List, +) : Codec { + + private val fieldNamePropertyModelMap = propertyModels.associateBy { it.fieldName } + private val propertyModelId: PropertyModel? = fieldNamePropertyModelMap[idFieldName] + + data class PropertyModel(val param: KParameter, val fieldName: String, val codec: Codec) + + override fun encode(writer: BsonWriter, value: T, encoderContext: EncoderContext) { + writer.writeStartDocument() + if (propertyModelId != null) { + encodeProperty(propertyModelId, value, writer, encoderContext) + } + propertyModels + .filter { it != propertyModelId } + .forEach { propertyModel -> encodeProperty(propertyModel, value, writer, encoderContext) } + writer.writeEndDocument() + } + + override fun getEncoderClass(): Class = kClass.java + + @Suppress("TooGenericExceptionCaught") + override fun decode(reader: BsonReader, decoderContext: DecoderContext): T { + val args: MutableMap = mutableMapOf() + fieldNamePropertyModelMap.values.forEach { args[it.param] = null } + + reader.readStartDocument() + while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { + val fieldName = reader.readName() + val propertyModel = fieldNamePropertyModelMap[fieldName] + if (propertyModel == null) { + reader.skipValue() + if (logger.isTraceEnabled) { + logger.trace("Found property not present in the DataClass: $fieldName") + } + } else if (propertyModel.param.type.isMarkedNullable && reader.currentBsonType == BsonType.NULL) { + reader.readNull() + } else { + try { + args[propertyModel.param] = decoderContext.decodeWithChildContext(propertyModel.codec, reader) + } catch (e: Exception) { + throw CodecConfigurationException( + "Unable to decode $fieldName for ${kClass.simpleName} data class.", e) + } + } + } + reader.readEndDocument() + + try { + return primaryConstructor.callBy(args) + } catch (e: Exception) { + throw CodecConfigurationException( + "Unable to invoke primary constructor of ${kClass.simpleName} data class", e) + } + } + + @Suppress("UNCHECKED_CAST") + private fun encodeProperty( + propertyModel: PropertyModel, + value: T, + writer: BsonWriter, + encoderContext: EncoderContext + ) { + value::class + .members + .firstOrNull { it.name == propertyModel.param.name } + ?.let { + val propertyValue = (it as KProperty1).get(value) + propertyValue?.let { pValue -> + writer.writeName(propertyModel.fieldName) + encoderContext.encodeWithChildContext(propertyModel.codec, writer, pValue) + } + } + } + + companion object { + + internal val logger = Loggers.getLogger("DataClassCodec") + private const val idFieldName = "_id" + + internal fun create( + kClass: KClass, + codecRegistry: CodecRegistry, + types: List = emptyList() + ): Codec? { + return if (kClass.isData) { + validateAnnotations(kClass) + val primaryConstructor = + kClass.primaryConstructor ?: throw CodecConfigurationException("No primary constructor for $kClass") + val typeMap = + types + .mapIndexed { i, k -> primaryConstructor.typeParameters[i].createType().classifier!! to k } + .toMap() + + val propertyModels = + primaryConstructor.parameters.map { kParameter -> + PropertyModel( + kParameter, computeFieldName(kParameter), getCodec(kParameter, typeMap, codecRegistry)) + } + return DataClassCodec(kClass, primaryConstructor, propertyModels) + } else { + null + } + } + + private fun validateAnnotations(kClass: KClass) { + codecConfigurationRequires(kClass.findAnnotation() == null) { + """Annotation 'BsonDiscriminator' is not supported on kotlin data classes, + | but found on ${kClass.simpleName}.""" + .trimMargin() + } + + codecConfigurationRequires(kClass.constructors.all { it.findAnnotations().isEmpty() }) { + """Annotation 'BsonCreator' is not supported on kotlin data classes, + | but found in ${kClass.simpleName}.""" + .trimMargin() + } + + kClass.primaryConstructor?.parameters?.map { param -> + codecConfigurationRequires(param.findAnnotations().isEmpty()) { + """Annotation 'BsonIgnore' is not supported in kotlin data classes, + | found on the parameter for ${param.name}.""" + .trimMargin() + } + codecConfigurationRequires(param.findAnnotations().isEmpty()) { + """Annotation 'BsonExtraElements' is not supported in kotlin data classes, + | found on the parameter for ${param.name}.""" + .trimMargin() + } + } + } + + private fun computeFieldName(parameter: KParameter): String { + return if (parameter.hasAnnotation()) { + idFieldName + } else { + parameter.findAnnotation()?.value ?: requireNotNull(parameter.name) + } + } + + @Suppress("UNCHECKED_CAST") + private fun getCodec( + kParameter: KParameter, + typeMap: Map, + codecRegistry: CodecRegistry + ): Codec { + return when (kParameter.type.classifier) { + is KClass<*> -> { + codecRegistry.getCodec( + kParameter, + (kParameter.type.classifier as KClass).javaObjectType, + kParameter.type.arguments + .mapNotNull { typeMap[it.type?.classifier] ?: computeJavaType(it) } + .toList()) + } + is KTypeParameter -> { + when (val pType = typeMap[kParameter.type.classifier] ?: kParameter.type.javaType) { + is Class<*> -> + codecRegistry.getCodec(kParameter, (pType as Class).kotlin.java, emptyList()) + is ParameterizedType -> + codecRegistry.getCodec( + kParameter, + (pType.rawType as Class).kotlin.javaObjectType, + pType.actualTypeArguments.toList()) + else -> null + } + } + else -> null + } + ?: throw CodecConfigurationException( + "Could not find codec for ${kParameter.name} with type ${kParameter.type}") + } + + private fun computeJavaType(kTypeProjection: KTypeProjection): Type? { + val javaType: Type = kTypeProjection.type?.javaType!! + return if (javaType == Any::class.java) { + kTypeProjection.type?.jvmErasure?.javaObjectType + } else javaType + } + + @Suppress("UNCHECKED_CAST") + private fun CodecRegistry.getCodec(kParameter: KParameter, clazz: Class, types: List): Codec { + val codec = + if (clazz.isArray) { + ArrayCodec.create(clazz.kotlin, types, this) + } else if (types.isEmpty()) { + this.get(clazz) + } else { + this.get(clazz, types) + } + + return kParameter.findAnnotation()?.let { + if (codec !is RepresentationConfigurable<*>) { + throw CodecConfigurationException( + "Codec for `${kParameter.name}` must implement RepresentationConfigurable" + + " to supportBsonRepresentation") + } + codec.withRepresentation(it.value) as Codec + } + ?: codec + } + + private fun codecConfigurationRequires(value: Boolean, lazyMessage: () -> String) { + if (!value) { + throw CodecConfigurationException(lazyMessage.invoke()) + } + } + } +} diff --git a/bson-kotlin/src/main/kotlin/org/bson/codecs/kotlin/DataClassCodecProvider.kt b/bson-kotlin/src/main/kotlin/org/bson/codecs/kotlin/DataClassCodecProvider.kt new file mode 100644 index 00000000000..962741033e1 --- /dev/null +++ b/bson-kotlin/src/main/kotlin/org/bson/codecs/kotlin/DataClassCodecProvider.kt @@ -0,0 +1,29 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlin + +import java.lang.reflect.Type +import org.bson.codecs.Codec +import org.bson.codecs.configuration.CodecProvider +import org.bson.codecs.configuration.CodecRegistry + +/** A Kotlin reflection based Codec Provider for data classes */ +public class DataClassCodecProvider : CodecProvider { + override fun get(clazz: Class, registry: CodecRegistry): Codec? = get(clazz, emptyList(), registry) + + override fun get(clazz: Class, typeArguments: List, registry: CodecRegistry): Codec? = + DataClassCodec.create(clazz.kotlin, registry, typeArguments) +} diff --git a/bson-kotlin/src/test/kotlin/org/bson/codecs/kotlin/DataClassCodecProviderTest.kt b/bson-kotlin/src/test/kotlin/org/bson/codecs/kotlin/DataClassCodecProviderTest.kt new file mode 100644 index 00000000000..7b9e0bbb2ba --- /dev/null +++ b/bson-kotlin/src/test/kotlin/org/bson/codecs/kotlin/DataClassCodecProviderTest.kt @@ -0,0 +1,92 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlin + +import com.mongodb.MongoClientSettings +import kotlin.test.assertEquals +import kotlin.test.assertNotNull +import kotlin.test.assertNull +import kotlin.test.assertTrue +import kotlin.time.Duration +import org.bson.BsonReader +import org.bson.BsonWriter +import org.bson.codecs.Codec +import org.bson.codecs.DecoderContext +import org.bson.codecs.EncoderContext +import org.bson.codecs.configuration.CodecConfigurationException +import org.bson.codecs.configuration.CodecRegistries.fromCodecs +import org.bson.codecs.configuration.CodecRegistries.fromProviders +import org.bson.codecs.configuration.CodecRegistries.fromRegistries +import org.bson.codecs.kotlin.samples.DataClassParameterized +import org.bson.codecs.kotlin.samples.DataClassWithJVMErasure +import org.bson.codecs.kotlin.samples.DataClassWithSimpleValues +import org.bson.conversions.Bson +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertDoesNotThrow +import org.junit.jupiter.api.assertThrows + +class DataClassCodecProviderTest { + + @Test + fun shouldReturnNullForNonDataClass() { + assertNull(DataClassCodecProvider().get(String::class.java, Bson.DEFAULT_CODEC_REGISTRY)) + } + + @Test + fun shouldReturnDataClassCodecForDataClass() { + val provider = DataClassCodecProvider() + val codec = provider.get(DataClassWithSimpleValues::class.java, Bson.DEFAULT_CODEC_REGISTRY) + + assertNotNull(codec) + assertTrue { codec is DataClassCodec } + assertEquals(DataClassWithSimpleValues::class.java, codec.encoderClass) + } + + @Test + fun shouldRequireTypeArgumentsForDataClassParameterized() { + assertThrows { + DataClassCodecProvider().get(DataClassParameterized::class.java, Bson.DEFAULT_CODEC_REGISTRY) + } + } + + @Test + fun shouldReturnDataClassCodecUsingDefaultRegistry() { + val codec = MongoClientSettings.getDefaultCodecRegistry().get(DataClassWithSimpleValues::class.java) + + assertNotNull(codec) + assertTrue { codec is DataClassCodec } + assertEquals(DataClassWithSimpleValues::class.java, codec.encoderClass) + } + + @Test + fun shouldBeAbleHandleDataClassWithJVMErasure() { + + class DurationCodec : Codec { + override fun encode(writer: BsonWriter, value: Duration, encoderContext: EncoderContext) = TODO() + override fun getEncoderClass(): Class = Duration::class.java + override fun decode(reader: BsonReader, decoderContext: DecoderContext): Duration = TODO() + } + + val registry = + fromRegistries( + fromCodecs(DurationCodec()), fromProviders(DataClassCodecProvider()), Bson.DEFAULT_CODEC_REGISTRY) + + val codec = assertDoesNotThrow { registry.get(DataClassWithJVMErasure::class.java) } + assertNotNull(codec) + assertTrue { codec is DataClassCodec } + assertEquals(DataClassWithJVMErasure::class.java, codec.encoderClass) + } +} diff --git a/bson-kotlin/src/test/kotlin/org/bson/codecs/kotlin/DataClassCodecTest.kt b/bson-kotlin/src/test/kotlin/org/bson/codecs/kotlin/DataClassCodecTest.kt new file mode 100644 index 00000000000..c203a5d2358 --- /dev/null +++ b/bson-kotlin/src/test/kotlin/org/bson/codecs/kotlin/DataClassCodecTest.kt @@ -0,0 +1,593 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlin + +import kotlin.test.assertEquals +import org.bson.BsonDocument +import org.bson.BsonDocumentReader +import org.bson.BsonDocumentWriter +import org.bson.codecs.DecoderContext +import org.bson.codecs.EncoderContext +import org.bson.codecs.configuration.CodecConfigurationException +import org.bson.codecs.configuration.CodecRegistries.fromProviders +import org.bson.codecs.kotlin.samples.Box +import org.bson.codecs.kotlin.samples.DataClassEmbedded +import org.bson.codecs.kotlin.samples.DataClassLastItemDefaultsToNull +import org.bson.codecs.kotlin.samples.DataClassListOfDataClasses +import org.bson.codecs.kotlin.samples.DataClassListOfListOfDataClasses +import org.bson.codecs.kotlin.samples.DataClassListOfSealed +import org.bson.codecs.kotlin.samples.DataClassMapOfDataClasses +import org.bson.codecs.kotlin.samples.DataClassMapOfListOfDataClasses +import org.bson.codecs.kotlin.samples.DataClassNestedParameterizedTypes +import org.bson.codecs.kotlin.samples.DataClassParameterized +import org.bson.codecs.kotlin.samples.DataClassSealedA +import org.bson.codecs.kotlin.samples.DataClassSealedB +import org.bson.codecs.kotlin.samples.DataClassSealedC +import org.bson.codecs.kotlin.samples.DataClassSelfReferential +import org.bson.codecs.kotlin.samples.DataClassWithArrays +import org.bson.codecs.kotlin.samples.DataClassWithBooleanMapKey +import org.bson.codecs.kotlin.samples.DataClassWithBsonConstructor +import org.bson.codecs.kotlin.samples.DataClassWithBsonDiscriminator +import org.bson.codecs.kotlin.samples.DataClassWithBsonExtraElements +import org.bson.codecs.kotlin.samples.DataClassWithBsonId +import org.bson.codecs.kotlin.samples.DataClassWithBsonIgnore +import org.bson.codecs.kotlin.samples.DataClassWithBsonProperty +import org.bson.codecs.kotlin.samples.DataClassWithCollections +import org.bson.codecs.kotlin.samples.DataClassWithDataClassMapKey +import org.bson.codecs.kotlin.samples.DataClassWithDefaults +import org.bson.codecs.kotlin.samples.DataClassWithEmbedded +import org.bson.codecs.kotlin.samples.DataClassWithEnum +import org.bson.codecs.kotlin.samples.DataClassWithEnumMapKey +import org.bson.codecs.kotlin.samples.DataClassWithFailingInit +import org.bson.codecs.kotlin.samples.DataClassWithInvalidBsonRepresentation +import org.bson.codecs.kotlin.samples.DataClassWithListThatLastItemDefaultsToNull +import org.bson.codecs.kotlin.samples.DataClassWithMutableList +import org.bson.codecs.kotlin.samples.DataClassWithMutableMap +import org.bson.codecs.kotlin.samples.DataClassWithMutableSet +import org.bson.codecs.kotlin.samples.DataClassWithNativeArrays +import org.bson.codecs.kotlin.samples.DataClassWithNestedParameterized +import org.bson.codecs.kotlin.samples.DataClassWithNestedParameterizedDataClass +import org.bson.codecs.kotlin.samples.DataClassWithNullableGeneric +import org.bson.codecs.kotlin.samples.DataClassWithNulls +import org.bson.codecs.kotlin.samples.DataClassWithObjectIdAndBsonDocument +import org.bson.codecs.kotlin.samples.DataClassWithPair +import org.bson.codecs.kotlin.samples.DataClassWithParameterizedDataClass +import org.bson.codecs.kotlin.samples.DataClassWithSequence +import org.bson.codecs.kotlin.samples.DataClassWithSimpleValues +import org.bson.codecs.kotlin.samples.DataClassWithTriple +import org.bson.codecs.kotlin.samples.Key +import org.bson.conversions.Bson +import org.bson.types.ObjectId +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertThrows + +class DataClassCodecTest { + private val numberLong = "\$numberLong" + private val emptyDocument = "{}" + + @Test + fun testDataClassWithSimpleValues() { + val expected = + """{"char": "c", "byte": 0, "short": 1, "int": 22, "long": {"$numberLong": "42"}, "float": 4.0, + | "double": 4.2, "boolean": true, "string": "String"}""" + .trimMargin() + val dataClass = DataClassWithSimpleValues('c', 0, 1, 22, 42L, 4.0f, 4.2, true, "String") + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithComplexTypes() { + val expected = + """{ + | "listSimple": ["a", "b", "c", "d"], + | "listList": [["a", "b"], [], ["c", "d"]], + | "listMap": [{"a": 1, "b": 2}, {}, {"c": 3, "d": 4}], + | "mapSimple": {"a": 1, "b": 2, "c": 3, "d": 4}, + | "mapList": {"a": ["a", "b"], "b": [], "c": ["c", "d"]}, + | "mapMap" : {"a": {"a": 1, "b": 2}, "b": {}, "c": {"c": 3, "d": 4}} + |}""" + .trimMargin() + + val dataClass = + DataClassWithCollections( + listOf("a", "b", "c", "d"), + listOf(listOf("a", "b"), emptyList(), listOf("c", "d")), + listOf(mapOf("a" to 1, "b" to 2), emptyMap(), mapOf("c" to 3, "d" to 4)), + mapOf("a" to 1, "b" to 2, "c" to 3, "d" to 4), + mapOf("a" to listOf("a", "b"), "b" to emptyList(), "c" to listOf("c", "d")), + mapOf("a" to mapOf("a" to 1, "b" to 2), "b" to emptyMap(), "c" to mapOf("c" to 3, "d" to 4))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithArrays() { + val expected = + """{ + | "arraySimple": ["a", "b", "c", "d"], + | "nestedArrays": [["e", "f"], [], ["g", "h"]], + | "arrayOfMaps": [{"A": ["aa"], "B": ["bb"]}, {}, {"C": ["cc", "ccc"]}], + |}""" + .trimMargin() + + val dataClass = + DataClassWithArrays( + arrayOf("a", "b", "c", "d"), + arrayOf(arrayOf("e", "f"), emptyArray(), arrayOf("g", "h")), + arrayOf( + mapOf("A" to arrayOf("aa"), "B" to arrayOf("bb")), emptyMap(), mapOf("C" to arrayOf("cc", "ccc")))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithNativeArrays() { + val expected = + """{ + | "booleanArray": [true, false], + | "byteArray": [1, 2], + | "charArray": ["a", "b"], + | "doubleArray": [ 1.1, 2.2, 3.3], + | "floatArray": [1.0, 2.0, 3.0], + | "intArray": [10, 20, 30, 40], + | "longArray": [{ "$numberLong": "111" }, { "$numberLong": "222" }, { "$numberLong": "333" }], + | "shortArray": [1, 2, 3], + | "listOfArrays": [[true, false], [false, true]], + | "mapOfArrays": {"A": [1, 2], "B":[], "C": [3, 4]} + |}""" + .trimMargin() + + val dataClass = + DataClassWithNativeArrays( + booleanArrayOf(true, false), + byteArrayOf(1, 2), + charArrayOf('a', 'b'), + doubleArrayOf(1.1, 2.2, 3.3), + floatArrayOf(1.0f, 2.0f, 3.0f), + intArrayOf(10, 20, 30, 40), + longArrayOf(111, 222, 333), + shortArrayOf(1, 2, 3), + listOf(booleanArrayOf(true, false), booleanArrayOf(false, true)), + mapOf(Pair("A", intArrayOf(1, 2)), Pair("B", intArrayOf()), Pair("C", intArrayOf(3, 4)))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithDefaults() { + val expectedDefault = + """{ + | "boolean": false, + | "string": "String", + | "listSimple": ["a", "b", "c"] + |}""" + .trimMargin() + + val defaultDataClass = DataClassWithDefaults() + assertRoundTrips(expectedDefault, defaultDataClass) + } + + @Test + fun testDataClassWithNulls() { + val dataClass = DataClassWithNulls(null, null, null) + assertRoundTrips(emptyDocument, dataClass) + + val withStoredNulls = BsonDocument.parse("""{"boolean": null, "string": null, "listSimple": null}""") + assertDecodesTo(withStoredNulls, dataClass) + } + + @Test + fun testDataClassWithListThatLastItemDefaultsToNull() { + val expected = + """{ + | "elements": [{"required": "required"}, {"required": "required"}], + |}""" + .trimMargin() + + val dataClass = + DataClassWithListThatLastItemDefaultsToNull( + listOf(DataClassLastItemDefaultsToNull("required"), DataClassLastItemDefaultsToNull("required"))) + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithNullableGenericsNotNull() { + val expected = + """{ + | "box": {"boxed": "String"} + |}""" + .trimMargin() + + val dataClass = DataClassWithNullableGeneric(Box("String")) + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithNullableGenericsNull() { + val expected = """{"box": {}}""" + val dataClass = DataClassWithNullableGeneric(Box(null)) + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassSelfReferential() { + val expected = + """{"name": "tree", + | "left": {"name": "L", "left": {"name": "LL"}, "right": {"name": "LR"}}, + | "right": {"name": "R", + | "left": {"name": "RL", + | "left": {"name": "RLL"}, + | "right": {"name": "RLR"}}, + | "right": {"name": "RR"}} + |}""" + .trimMargin() + val dataClass = + DataClassSelfReferential( + "tree", + DataClassSelfReferential("L", DataClassSelfReferential("LL"), DataClassSelfReferential("LR")), + DataClassSelfReferential( + "R", + DataClassSelfReferential("RL", DataClassSelfReferential("RLL"), DataClassSelfReferential("RLR")), + DataClassSelfReferential("RR"))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithEmbedded() { + val expected = """{"id": "myId", "embedded": {"name": "embedded1"}}""" + val dataClass = DataClassWithEmbedded("myId", DataClassEmbedded("embedded1")) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassListOfDataClasses() { + val expected = """{"id": "myId", "nested": [{"name": "embedded1"}, {"name": "embedded2"}]}""" + val dataClass = + DataClassListOfDataClasses("myId", listOf(DataClassEmbedded("embedded1"), DataClassEmbedded("embedded2"))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassListOfListOfDataClasses() { + val expected = """{"id": "myId", "nested": [[{"name": "embedded1"}], [{"name": "embedded2"}]]}""" + val dataClass = + DataClassListOfListOfDataClasses( + "myId", listOf(listOf(DataClassEmbedded("embedded1")), listOf(DataClassEmbedded("embedded2")))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassMapOfDataClasses() { + val expected = """{"id": "myId", "nested": {"first": {"name": "embedded1"}, "second": {"name": "embedded2"}}}""" + val dataClass = + DataClassMapOfDataClasses( + "myId", mapOf("first" to DataClassEmbedded("embedded1"), "second" to DataClassEmbedded("embedded2"))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassMapOfListOfDataClasses() { + val expected = + """{"id": "myId", "nested": {"first": [{"name": "embedded1"}], "second": [{"name": "embedded2"}]}}""" + val dataClass = + DataClassMapOfListOfDataClasses( + "myId", + mapOf( + "first" to listOf(DataClassEmbedded("embedded1")), + "second" to listOf(DataClassEmbedded("embedded2")))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithParameterizedDataClass() { + val expected = + """{"id": "myId", + | "parameterizedDataClass": {"number": 2.0, "string": "myString", + | "parameterizedList": [{"name": "embedded1"}]} + |}""" + .trimMargin() + val dataClass = + DataClassWithParameterizedDataClass( + "myId", DataClassParameterized(2.0, "myString", listOf(DataClassEmbedded("embedded1")))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithNestedParameterizedDataClass() { + val expected = + """{"id": "myId", + |"nestedParameterized": { + | "parameterizedDataClass": + | {"number": 4.2, "string": "myString", "parameterizedList": [{"name": "embedded1"}]}, + | "other": "myOtherString", "optionalOther": "myOptionalOtherString" + | } + |}""" + .trimMargin() + val dataClass = + DataClassWithNestedParameterizedDataClass( + "myId", + DataClassWithNestedParameterized( + DataClassParameterized(4.2, "myString", listOf(DataClassEmbedded("embedded1"))), + "myOtherString", + "myOptionalOtherString")) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithPair() { + val expected = """{"pair": {"first": "a", "second": 1}}""" + val dataClass = DataClassWithPair("a" to 1) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithTriple() { + val expected = """{"triple": {"first": "a", "second": 1, "third": 2.1}}""" + val dataClass = DataClassWithTriple(Triple("a", 1, 2.1)) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassNestedParameterizedTypes() { + val expected = + """{ + |"triple": { + | "first": "0", + | "second": {"first": 1, "second": {"first": 1.2, "second": {"first": "1.3", "second": 1.3}}}, + | "third": {"first": 2, "second": {"first": 2.1, "second": "two dot two"}, + | "third": {"first": "3.1", "second": {"first": 3.2, "second": "three dot two" }, + | "third": 3.3 }} + | } + |}""" + .trimMargin() + val dataClass = + DataClassNestedParameterizedTypes( + Triple( + "0", + Pair(1, Pair(1.2, Pair("1.3", 1.3))), + Triple(2, Pair(2.1, "two dot two"), Triple("3.1", Pair(3.2, "three dot two"), 3.3)))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithMutableList() { + val expected = """{"value": ["A", "B", "C"]}""" + val dataClass = DataClassWithMutableList(mutableListOf("A", "B", "C")) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithMutableSet() { + val expected = """{"value": ["A", "B", "C"]}""" + val dataClass = DataClassWithMutableSet(mutableSetOf("A", "B", "C")) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithMutableMap() { + val expected = """{"value": {"a": "A", "b": "B", "c": "C"}}""" + val dataClass = DataClassWithMutableMap(mutableMapOf("a" to "A", "b" to "B", "c" to "C")) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithEnum() { + val expected = """{"value": "A"}""" + + val dataClass = DataClassWithEnum(Key.A) + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithEnumKeyMap() { + assertThrows("Unsupported map key") { + DataClassCodec.create(DataClassWithEnumMapKey::class, registry()) + } + } + + @Test + fun testDataClassWithSequence() { + assertThrows("Unsupported type Sequence") { + DataClassCodec.create(DataClassWithSequence::class, registry()) + } + } + + @Test + fun testDataClassWithBooleanKeyMap() { + assertThrows("Unsupported Map key type") { + DataClassCodec.create(DataClassWithBooleanMapKey::class, registry()) + } + } + + @Test + fun testDataClassWithDataClassKeyMap() { + assertThrows("Unsupported Map key type") { + DataClassCodec.create(DataClassWithDataClassMapKey::class, registry()) + } + } + + @Test + fun testDataClassEmbeddedWithExtraData() { + val expected = + """{ + | "extraA": "extraA", + | "name": "NAME", + | "extraB": "extraB" + |}""" + .trimMargin() + + val dataClass = DataClassEmbedded("NAME") + assertDecodesTo(BsonDocument.parse(expected), dataClass) + } + + @Test + fun testDataClassWithObjectIdAndBsonDocument() { + val subDocument = + """{ + | "_id": 1, + | "arrayEmpty": [], + | "arraySimple": [{"${'$'}numberInt": "1"}, {"${'$'}numberInt": "2"}, {"${'$'}numberInt": "3"}], + | "arrayComplex": [{"a": {"${'$'}numberInt": "1"}}, {"a": {"${'$'}numberInt": "2"}}], + | "arrayMixedTypes": [{"${'$'}numberInt": "1"}, {"${'$'}numberInt": "2"}, true, + | [{"${'$'}numberInt": "1"}, {"${'$'}numberInt": "2"}, {"${'$'}numberInt": "3"}], + | {"a": {"${'$'}numberInt": "2"}}], + | "arrayComplexMixedTypes": [{"a": {"${'$'}numberInt": "1"}}, {"a": "a"}], + | "binary": {"${'$'}binary": {"base64": "S2Fma2Egcm9ja3Mh", "subType": "00"}}, + | "boolean": true, + | "code": {"${'$'}code": "int i = 0;"}, + | "codeWithScope": {"${'$'}code": "int x = y", "${'$'}scope": {"y": {"${'$'}numberInt": "1"}}}, + | "dateTime": {"${'$'}date": {"${'$'}numberLong": "1577836801000"}}, + | "decimal128": {"${'$'}numberDecimal": "1.0"}, + | "documentEmpty": {}, + | "document": {"a": {"${'$'}numberInt": "1"}}, + | "double": {"${'$'}numberDouble": "62.0"}, + | "int32": {"${'$'}numberInt": "42"}, + | "int64": {"${'$'}numberLong": "52"}, + | "maxKey": {"${'$'}maxKey": 1}, + | "minKey": {"${'$'}minKey": 1}, + | "null": null, + | "objectId": {"${'$'}oid": "5f3d1bbde0ca4d2829c91e1d"}, + | "regex": {"${'$'}regularExpression": {"pattern": "^test.*regex.*xyz$", "options": "i"}}, + | "string": "the fox ...", + | "symbol": {"${'$'}symbol": "ruby stuff"}, + | "timestamp": {"${'$'}timestamp": {"t": 305419896, "i": 5}}, + | "undefined": {"${'$'}undefined": true} + | }""" + .trimMargin() + val expected = """{"objectId": {"${'$'}oid": "111111111111111111111111"}, "bsonDocument": $subDocument}""" + + val dataClass = + DataClassWithObjectIdAndBsonDocument(ObjectId("111111111111111111111111"), BsonDocument.parse(subDocument)) + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassSealed() { + val dataClassA = DataClassSealedA("string") + val dataClassB = DataClassSealedB(1) + val dataClassC = DataClassSealedC("String") + + val expectedDataClassSealedA = """{"a": "string"}""" + assertRoundTrips(expectedDataClassSealedA, dataClassA) + + val expectedDataClassSealedB = """{"b": 1}""" + assertRoundTrips(expectedDataClassSealedB, dataClassB) + + val expectedDataClassSealedC = """{"c": "String"}""" + assertRoundTrips(expectedDataClassSealedC, dataClassC) + + assertThrows("No Codec for DataClassSealed") { + DataClassCodec.create(DataClassListOfSealed::class, registry()) + } + } + + @Test + fun testDataFailures() { + assertThrows("Missing data") { + val codec = DataClassCodec.create(DataClassWithSimpleValues::class, registry()) + codec?.decode(BsonDocumentReader(BsonDocument()), DecoderContext.builder().build()) + } + + assertThrows("Invalid types") { + val data = + BsonDocument.parse( + """{"char": 123, "short": "2", "int": 22, "long": "ok", "float": true, "double": false, + | "boolean": "true", "string": 99}""" + .trimMargin()) + val codec = DataClassCodec.create(DataClassWithSimpleValues::class, registry()) + codec?.decode(BsonDocumentReader(data), DecoderContext.builder().build()) + } + + assertThrows("Invalid complex types") { + val data = BsonDocument.parse("""{"_id": "myId", "embedded": 123}""") + val codec = DataClassCodec.create(DataClassWithEmbedded::class, registry()) + codec?.decode(BsonDocumentReader(data), DecoderContext.builder().build()) + } + + assertThrows("Failing init") { + val data = BsonDocument.parse("""{"id": "myId"}""") + val codec = DataClassCodec.create(DataClassWithFailingInit::class, registry()) + codec?.decode(BsonDocumentReader(data), DecoderContext.builder().build()) + } + } + + @Test + fun testSupportedAnnotations() { + assertRoundTrips("""{"_id": "a"}""", DataClassWithBsonId("a")) + assertRoundTrips("""{"_id": "a"}""", DataClassWithBsonProperty("a")) + } + + @Test + fun testInvalidAnnotations() { + assertThrows { + DataClassCodec.create(DataClassWithBsonDiscriminator::class, registry()) + } + assertThrows { + DataClassCodec.create(DataClassWithBsonConstructor::class, registry()) + } + assertThrows { DataClassCodec.create(DataClassWithBsonIgnore::class, registry()) } + assertThrows { + DataClassCodec.create(DataClassWithBsonExtraElements::class, registry()) + } + assertThrows { + DataClassCodec.create(DataClassWithInvalidBsonRepresentation::class, registry()) + } + } + + private fun assertRoundTrips(expected: String, value: T) { + assertDecodesTo(assertEncodesTo(expected, value), value) + } + + @Suppress("UNCHECKED_CAST") + private fun assertEncodesTo(json: String, value: T): BsonDocument { + val expected = BsonDocument.parse(json) + val codec: DataClassCodec = DataClassCodec.create(value::class, registry()) as DataClassCodec + val document = BsonDocument() + val writer = BsonDocumentWriter(document) + + codec.encode(writer, value, EncoderContext.builder().build()) + assertEquals(expected, document) + if (expected.contains("_id")) { + assertEquals("_id", document.firstKey) + } + return document + } + + @Suppress("UNCHECKED_CAST") + private fun assertDecodesTo(value: BsonDocument, expected: T) { + val codec: DataClassCodec = DataClassCodec.create(expected::class, registry()) as DataClassCodec + val decoded: T = codec.decode(BsonDocumentReader(value), DecoderContext.builder().build()) + + assertEquals(expected, decoded) + } + + private fun registry() = fromProviders(ArrayCodecProvider(), DataClassCodecProvider(), Bson.DEFAULT_CODEC_REGISTRY) +} diff --git a/bson-kotlin/src/test/kotlin/org/bson/codecs/kotlin/samples/DataClasses.kt b/bson-kotlin/src/test/kotlin/org/bson/codecs/kotlin/samples/DataClasses.kt new file mode 100644 index 00000000000..77483cc9ee7 --- /dev/null +++ b/bson-kotlin/src/test/kotlin/org/bson/codecs/kotlin/samples/DataClasses.kt @@ -0,0 +1,258 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlin.samples + +import kotlin.time.Duration +import org.bson.BsonDocument +import org.bson.BsonMaxKey +import org.bson.BsonType +import org.bson.codecs.pojo.annotations.BsonCreator +import org.bson.codecs.pojo.annotations.BsonDiscriminator +import org.bson.codecs.pojo.annotations.BsonExtraElements +import org.bson.codecs.pojo.annotations.BsonId +import org.bson.codecs.pojo.annotations.BsonIgnore +import org.bson.codecs.pojo.annotations.BsonProperty +import org.bson.codecs.pojo.annotations.BsonRepresentation +import org.bson.types.ObjectId + +data class DataClassWithSimpleValues( + val char: Char, + val byte: Byte, + val short: Short, + val int: Int, + val long: Long, + val float: Float, + val double: Double, + val boolean: Boolean, + val string: String +) + +data class DataClassWithCollections( + val listSimple: List, + val listList: List>, + val listMap: List>, + val mapSimple: Map, + val mapList: Map>, + val mapMap: Map> +) + +data class DataClassWithArrays( + val arraySimple: Array, + val nestedArrays: Array>, + val arrayOfMaps: Array>> +) { + override fun equals(other: Any?): Boolean { + if (this === other) return true + if (javaClass != other?.javaClass) return false + + other as DataClassWithArrays + + if (!arraySimple.contentEquals(other.arraySimple)) return false + if (!nestedArrays.contentDeepEquals(other.nestedArrays)) return false + + if (arrayOfMaps.size != other.arrayOfMaps.size) return false + arrayOfMaps.forEachIndexed { i, map -> + val otherMap = other.arrayOfMaps[i] + if (map.keys != otherMap.keys) return false + map.keys.forEach { key -> if (!map[key].contentEquals(otherMap[key])) return false } + } + + return true + } + + override fun hashCode(): Int { + var result = arraySimple.contentHashCode() + result = 31 * result + nestedArrays.contentDeepHashCode() + result = 31 * result + arrayOfMaps.contentHashCode() + return result + } +} + +data class DataClassWithNativeArrays( + val booleanArray: BooleanArray, + val byteArray: ByteArray, + val charArray: CharArray, + val doubleArray: DoubleArray, + val floatArray: FloatArray, + val intArray: IntArray, + val longArray: LongArray, + val shortArray: ShortArray, + val listOfArrays: List, + val mapOfArrays: Map +) { + + @SuppressWarnings("ComplexMethod") + override fun equals(other: Any?): Boolean { + if (this === other) return true + if (javaClass != other?.javaClass) return false + + other as DataClassWithNativeArrays + + if (!booleanArray.contentEquals(other.booleanArray)) return false + if (!byteArray.contentEquals(other.byteArray)) return false + if (!charArray.contentEquals(other.charArray)) return false + if (!doubleArray.contentEquals(other.doubleArray)) return false + if (!floatArray.contentEquals(other.floatArray)) return false + if (!intArray.contentEquals(other.intArray)) return false + if (!longArray.contentEquals(other.longArray)) return false + if (!shortArray.contentEquals(other.shortArray)) return false + + if (listOfArrays.size != other.listOfArrays.size) return false + listOfArrays.forEachIndexed { i, value -> if (!value.contentEquals(other.listOfArrays[i])) return false } + + if (mapOfArrays.keys != other.mapOfArrays.keys) return false + mapOfArrays.keys.forEach { key -> if (!mapOfArrays[key].contentEquals(other.mapOfArrays[key])) return false } + + return true + } + + override fun hashCode(): Int { + var result = booleanArray.contentHashCode() + result = 31 * result + byteArray.contentHashCode() + result = 31 * result + charArray.contentHashCode() + result = 31 * result + doubleArray.contentHashCode() + result = 31 * result + floatArray.contentHashCode() + result = 31 * result + intArray.contentHashCode() + result = 31 * result + longArray.contentHashCode() + result = 31 * result + shortArray.contentHashCode() + result = 31 * result + listOfArrays.hashCode() + result = 31 * result + mapOfArrays.hashCode() + return result + } +} + +data class DataClassWithDefaults( + val boolean: Boolean = false, + val string: String = "String", + val listSimple: List = listOf("a", "b", "c") +) + +data class DataClassWithNulls(val boolean: Boolean?, val string: String?, val listSimple: List?) + +data class DataClassWithListThatLastItemDefaultsToNull(val elements: List) + +data class DataClassLastItemDefaultsToNull(val required: String, val optional: String? = null) + +data class DataClassSelfReferential( + val name: String, + val left: DataClassSelfReferential? = null, + val right: DataClassSelfReferential? = null +) + +data class DataClassEmbedded(val name: String) + +data class DataClassWithEmbedded(val id: String, val embedded: DataClassEmbedded) + +data class DataClassListOfDataClasses(val id: String, val nested: List) + +data class DataClassListOfListOfDataClasses(val id: String, val nested: List>) + +data class DataClassMapOfDataClasses(val id: String, val nested: Map) + +data class DataClassMapOfListOfDataClasses(val id: String, val nested: Map>) + +data class DataClassWithParameterizedDataClass( + val id: String, + val parameterizedDataClass: DataClassParameterized +) + +data class DataClassParameterized(val number: N, val string: String, val parameterizedList: List) + +data class DataClassWithNestedParameterizedDataClass( + val id: String, + val nestedParameterized: DataClassWithNestedParameterized +) + +data class DataClassWithNestedParameterized( + val parameterizedDataClass: DataClassParameterized, + val other: B, + val optionalOther: B? +) + +data class DataClassWithPair(val pair: Pair) + +data class DataClassWithTriple(val triple: Triple) + +data class DataClassNestedParameterizedTypes( + val triple: + Triple< + String, + Pair>>, + Triple, Triple, Double>>> +) + +data class DataClassWithMutableList(val value: MutableList) + +data class DataClassWithMutableSet(val value: MutableSet) + +data class DataClassWithMutableMap(val value: MutableMap) + +data class DataClassWithBooleanMapKey(val map: Map) + +enum class Key { + A, + B +} + +data class DataClassWithEnum(val value: Key) + +data class DataClassWithEnumMapKey(val map: Map) + +data class DataClassKey(val value: String) + +data class DataClassWithDataClassMapKey(val map: Map) + +data class DataClassWithObjectIdAndBsonDocument(val objectId: ObjectId, val bsonDocument: BsonDocument) + +sealed class DataClassSealed + +data class DataClassSealedA(val a: String) : DataClassSealed() + +data class DataClassSealedB(val b: Int) : DataClassSealed() + +data class DataClassSealedC(val c: String) : DataClassSealed() + +data class DataClassListOfSealed(val items: List) + +data class DataClassWithBsonId(@BsonId val id: String) + +data class DataClassWithBsonProperty(@BsonProperty("_id") val id: String) + +@BsonDiscriminator data class DataClassWithBsonDiscriminator(val id: String) + +data class DataClassWithBsonIgnore(val id: String, @BsonIgnore val ignored: String) + +data class DataClassWithBsonExtraElements(val id: String, @BsonExtraElements val extraElements: Map) + +data class DataClassWithBsonConstructor(val id: String, val count: Int) { + @BsonCreator constructor(id: String) : this(id, -1) +} + +data class DataClassWithInvalidBsonRepresentation(@BsonRepresentation(BsonType.STRING) val id: BsonMaxKey) + +data class DataClassWithFailingInit(val id: String) { + init { + require(false) + } +} + +data class DataClassWithSequence(val value: Sequence) + +data class DataClassWithJVMErasure(val duration: Duration, val ints: List) + +data class Box(val boxed: T) + +data class DataClassWithNullableGeneric(val box: Box) diff --git a/bson-kotlinx/build.gradle.kts b/bson-kotlinx/build.gradle.kts new file mode 100644 index 00000000000..1671a876edf --- /dev/null +++ b/bson-kotlinx/build.gradle.kts @@ -0,0 +1,47 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import ProjectExtensions.configureJarManifest +import ProjectExtensions.configureMavenPublication + +plugins { + id("project.kotlin") + alias(libs.plugins.kotlin.serialization) +} + +base.archivesName.set("bson-kotlinx") + +dependencies { + api(project(path = ":bson", configuration = "default")) + implementation(platform(libs.kotlinx.serialization)) + implementation(libs.kotlinx.serialization.core) + implementation(libs.kotlin.reflect) + + optionalApi(libs.kotlinx.serialization.datetime) + optionalApi(libs.kotlinx.serialization.json) + + // Test case checks MongoClientSettings.getDefaultCodecRegistry() support + testImplementation(project(path = ":driver-core", configuration = "default")) +} + +configureMavenPublication { + pom { + name.set("BSON Kotlinx") + description.set("The BSON Codec for Kotlinx serialization") + url.set("https://bsonspec.org") + } +} + +configureJarManifest { attributes["Automatic-Module-Name"] = "org.mongodb.bson.kotlinx" } diff --git a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonConfiguration.kt b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonConfiguration.kt new file mode 100644 index 00000000000..8a163f42f83 --- /dev/null +++ b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonConfiguration.kt @@ -0,0 +1,49 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlinx + +/** + * Bson Configuration for serialization + * + * Usage example with codecs: + * ``` + * val codec = KotlinSerializerCodec.create(mykClass, bsonConfiguration = BsonConfiguration(encodeDefaults = false)) + * ``` + * + * @property encodeDefaults encode default values, defaults to true + * @property explicitNulls encode null values, defaults to false + * @property classDiscriminator class discriminator to use when encoding polymorphic types + */ +public data class BsonConfiguration( + val encodeDefaults: Boolean = true, + val explicitNulls: Boolean = false, + val classDiscriminator: String = "_t", + val bsonNamingStrategy: BsonNamingStrategy? = null +) + +/** + * Optional BSON naming strategy for a field. + * + * @since 5.4 + */ +public enum class BsonNamingStrategy { + + /** + * A strategy that transforms serial names from camel case to snake case — lowercase characters with words separated + * by underscores. + */ + SNAKE_CASE, +} diff --git a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonDecoder.kt b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonDecoder.kt new file mode 100644 index 00000000000..c00d09345d0 --- /dev/null +++ b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonDecoder.kt @@ -0,0 +1,342 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlinx + +import kotlinx.serialization.DeserializationStrategy +import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.SerializationException +import kotlinx.serialization.descriptors.PolymorphicKind +import kotlinx.serialization.descriptors.PrimitiveKind +import kotlinx.serialization.descriptors.SerialDescriptor +import kotlinx.serialization.descriptors.SerialKind +import kotlinx.serialization.descriptors.StructureKind +import kotlinx.serialization.encoding.AbstractDecoder +import kotlinx.serialization.encoding.CompositeDecoder +import kotlinx.serialization.encoding.CompositeDecoder.Companion.DECODE_DONE +import kotlinx.serialization.encoding.CompositeDecoder.Companion.UNKNOWN_NAME +import kotlinx.serialization.encoding.Decoder +import kotlinx.serialization.modules.SerializersModule +import org.bson.AbstractBsonReader +import org.bson.BsonInvalidOperationException +import org.bson.BsonReader +import org.bson.BsonReaderMark +import org.bson.BsonType +import org.bson.BsonValue +import org.bson.codecs.BsonValueCodec +import org.bson.codecs.DecoderContext +import org.bson.codecs.kotlinx.utils.BsonCodecUtils.cacheElementNamesByDescriptor +import org.bson.codecs.kotlinx.utils.BsonCodecUtils.createBsonArrayDecoder +import org.bson.codecs.kotlinx.utils.BsonCodecUtils.createBsonDecoder +import org.bson.codecs.kotlinx.utils.BsonCodecUtils.createBsonDocumentDecoder +import org.bson.codecs.kotlinx.utils.BsonCodecUtils.createBsonMapDecoder +import org.bson.codecs.kotlinx.utils.BsonCodecUtils.createBsonPolymorphicDecoder +import org.bson.codecs.kotlinx.utils.BsonCodecUtils.getCachedElementNamesByDescriptor +import org.bson.internal.NumberCodecHelper +import org.bson.internal.StringCodecHelper +import org.bson.types.ObjectId + +/** + * The BsonDecoder interface + * + * For custom serialization handlers + */ +@ExperimentalSerializationApi +public sealed interface BsonDecoder : Decoder, CompositeDecoder { + + /** @return the decoded ObjectId */ + public fun decodeObjectId(): ObjectId + /** @return the decoded BsonValue */ + public fun decodeBsonValue(): BsonValue +} + +@OptIn(ExperimentalSerializationApi::class) +internal sealed class AbstractBsonDecoder( + val reader: AbstractBsonReader, + override val serializersModule: SerializersModule, + val configuration: BsonConfiguration +) : BsonDecoder, AbstractDecoder() { + + companion object { + + val bsonValueCodec = BsonValueCodec() + const val UNKNOWN_INDEX = -10 + val validKeyKinds = setOf(PrimitiveKind.STRING, PrimitiveKind.CHAR, SerialKind.ENUM) + + fun validateCurrentBsonType( + reader: BsonReader, + expectedType: BsonType, + descriptor: SerialDescriptor, + actualType: (descriptor: SerialDescriptor) -> String = { it.kind.toString() } + ) { + reader.currentBsonType?.let { + if (it != expectedType) { + throw SerializationException( + "Invalid data for `${actualType(descriptor)}` expected a bson " + + "${expectedType.name.lowercase()} found: ${reader.currentBsonType}") + } + } + } + } + + private data class ElementMetadata(val name: String, val nullable: Boolean, var processed: Boolean = false) + private var elementsMetadata: Array? = null + private var currentIndex: Int = UNKNOWN_INDEX + + private fun initElementMetadata(descriptor: SerialDescriptor) { + if (this.elementsMetadata != null) return + val elementsMetadata = + Array(descriptor.elementsCount) { + val elementDescriptor = descriptor.getElementDescriptor(it) + ElementMetadata( + elementDescriptor.serialName, elementDescriptor.isNullable && !descriptor.isElementOptional(it)) + } + this.elementsMetadata = elementsMetadata + cacheElementNamesByDescriptor(descriptor, configuration) + } + + override fun decodeElementIndex(descriptor: SerialDescriptor): Int { + initElementMetadata(descriptor) + currentIndex = decodeElementIndexImpl(descriptor) + elementsMetadata?.getOrNull(currentIndex)?.processed = true + return currentIndex + } + + @Suppress("ReturnCount", "ComplexMethod") + private fun decodeElementIndexImpl(descriptor: SerialDescriptor): Int { + val elementMetadata = elementsMetadata ?: error("elementsMetadata may not be null.") + val name: String? = + when (reader.state ?: error("State of reader may not be null.")) { + AbstractBsonReader.State.NAME -> reader.readName() + AbstractBsonReader.State.VALUE -> reader.currentName + AbstractBsonReader.State.TYPE -> { + reader.readBsonType() + return decodeElementIndexImpl(descriptor) + } + AbstractBsonReader.State.END_OF_DOCUMENT, + AbstractBsonReader.State.END_OF_ARRAY -> + return elementMetadata.indexOfFirst { it.nullable && !it.processed } + else -> null + } + + return name?.let { + val index = + if (configuration.bsonNamingStrategy == BsonNamingStrategy.SNAKE_CASE) { + getCachedElementNamesByDescriptor(descriptor)[it]?.let { name -> descriptor.getElementIndex(name) } + ?: UNKNOWN_NAME + } else { + descriptor.getElementIndex(it) + } + return if (index == UNKNOWN_NAME) { + reader.skipValue() + decodeElementIndexImpl(descriptor) + } else { + index + } + } + ?: UNKNOWN_NAME + } + + override fun beginStructure(descriptor: SerialDescriptor): CompositeDecoder { + return when (descriptor.kind) { + is PolymorphicKind -> createBsonPolymorphicDecoder(descriptor, reader, serializersModule, configuration) + is StructureKind.LIST -> createBsonArrayDecoder(descriptor, reader, serializersModule, configuration) + is StructureKind.CLASS, + StructureKind.OBJECT -> createBsonDocumentDecoder(descriptor, reader, serializersModule, configuration) + is StructureKind.MAP -> createBsonMapDecoder(descriptor, reader, serializersModule, configuration) + else -> throw SerializationException("Primitives are not supported at top-level") + } + } + + override fun endStructure(descriptor: SerialDescriptor) { + when (descriptor.kind) { + is StructureKind.LIST -> reader.readEndArray() + is StructureKind.MAP, + StructureKind.CLASS, + StructureKind.OBJECT -> reader.readEndDocument() + else -> {} + } + } + + override fun decodeByte(): Byte = NumberCodecHelper.decodeByte(reader) + override fun decodeChar(): Char = StringCodecHelper.decodeChar(reader) + override fun decodeFloat(): Float = NumberCodecHelper.decodeFloat(reader) + override fun decodeShort(): Short = NumberCodecHelper.decodeShort(reader) + override fun decodeBoolean(): Boolean = reader.readBoolean() + override fun decodeDouble(): Double = NumberCodecHelper.decodeDouble(reader) + override fun decodeInt(): Int = NumberCodecHelper.decodeInt(reader) + override fun decodeLong(): Long = NumberCodecHelper.decodeLong(reader) + override fun decodeString(): String = readOrThrow({ reader.readString() }, BsonType.STRING) + + override fun decodeNull(): Nothing? { + if (reader.state == AbstractBsonReader.State.VALUE) { + readOrThrow({ reader.readNull() }, BsonType.NULL) + } + return null + } + + override fun decodeEnum(enumDescriptor: SerialDescriptor): Int = enumDescriptor.getElementIndex(decodeString()) + override fun decodeNotNullMark(): Boolean { + return reader.state != AbstractBsonReader.State.END_OF_DOCUMENT && reader.currentBsonType != BsonType.NULL + } + + override fun decodeObjectId(): ObjectId = readOrThrow({ reader.readObjectId() }, BsonType.OBJECT_ID) + override fun decodeBsonValue(): BsonValue = bsonValueCodec.decode(reader, DecoderContext.builder().build()) + + private inline fun readOrThrow(action: () -> T, bsonType: BsonType): T { + return try { + action() + } catch (e: BsonInvalidOperationException) { + throw BsonInvalidOperationException( + "Reading field '${reader.currentName}' failed expected $bsonType type but found:" + + " ${reader.currentBsonType}.", + e) + } + } +} + +/** The default Bson Decoder implementation */ +internal open class BsonDecoderImpl( + reader: AbstractBsonReader, + serializersModule: SerializersModule, + configuration: BsonConfiguration +) : AbstractBsonDecoder(reader, serializersModule, configuration) + +/** The Bson array decoder */ +internal open class BsonArrayDecoder( + descriptor: SerialDescriptor, + reader: AbstractBsonReader, + serializersModule: SerializersModule, + configuration: BsonConfiguration +) : AbstractBsonDecoder(reader, serializersModule, configuration) { + + init { + validateCurrentBsonType(reader, BsonType.ARRAY, descriptor) + reader.readStartArray() + } + + private var index = 0 + override fun decodeElementIndex(descriptor: SerialDescriptor): Int { + val nextType = reader.readBsonType() + if (nextType == BsonType.END_OF_DOCUMENT) return DECODE_DONE + return index++ + } +} + +/** The Bson document decoder */ +@OptIn(ExperimentalSerializationApi::class) +internal open class BsonDocumentDecoder( + descriptor: SerialDescriptor, + reader: AbstractBsonReader, + serializersModule: SerializersModule, + configuration: BsonConfiguration +) : AbstractBsonDecoder(reader, serializersModule, configuration) { + + init { + validateCurrentBsonType(reader, BsonType.DOCUMENT, descriptor) { it.serialName } + reader.readStartDocument() + } +} + +/** The Bson polymorphic class decoder */ +@OptIn(ExperimentalSerializationApi::class) +internal open class BsonPolymorphicDecoder( + descriptor: SerialDescriptor, + reader: AbstractBsonReader, + serializersModule: SerializersModule, + configuration: BsonConfiguration +) : AbstractBsonDecoder(reader, serializersModule, configuration) { + private var index = 0 + private var mark: BsonReaderMark? + + init { + mark = reader.mark + validateCurrentBsonType(reader, BsonType.DOCUMENT, descriptor) { it.serialName } + reader.readStartDocument() + } + + override fun decodeSerializableValue(deserializer: DeserializationStrategy): T { + mark?.let { + it.reset() + mark = null + } + return deserializer.deserialize(createBsonDecoder(reader, serializersModule, configuration)) + } + + override fun decodeElementIndex(descriptor: SerialDescriptor): Int { + var found = false + return when (index) { + 0 -> { + while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { + if (reader.readName() == configuration.classDiscriminator) { + found = true + break + } + reader.skipValue() + } + if (!found) { + throw SerializationException( + "Missing required discriminator field `${configuration.classDiscriminator}` " + + "for polymorphic class: `${descriptor.serialName}`.") + } + index++ + } + 1 -> index++ + else -> DECODE_DONE + } + } +} + +/** The Bson map decoder */ +@OptIn(ExperimentalSerializationApi::class) +internal open class BsonMapDecoder( + descriptor: SerialDescriptor, + reader: AbstractBsonReader, + serializersModule: SerializersModule, + configuration: BsonConfiguration +) : AbstractBsonDecoder(reader, serializersModule, configuration) { + private var index = 0 + private var isKey = false + + init { + validateCurrentBsonType(reader, BsonType.DOCUMENT, descriptor) + reader.readStartDocument() + } + + override fun decodeString(): String { + return if (isKey) { + reader.readName() + } else { + super.decodeString() + } + } + + override fun decodeElementIndex(descriptor: SerialDescriptor): Int { + val keyKind = descriptor.getElementDescriptor(0).kind + if (!validKeyKinds.contains(keyKind)) { + throw SerializationException( + "Invalid key type for ${descriptor.serialName}. Expected STRING or ENUM but found: `${keyKind}`") + } + + if (!isKey) { + isKey = true + val nextType = reader.readBsonType() + if (nextType == BsonType.END_OF_DOCUMENT) return DECODE_DONE + } else { + isKey = false + } + return index++ + } +} diff --git a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonEncoder.kt b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonEncoder.kt new file mode 100644 index 00000000000..8a34bccdb36 --- /dev/null +++ b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonEncoder.kt @@ -0,0 +1,258 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlinx + +import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.SerializationException +import kotlinx.serialization.SerializationStrategy +import kotlinx.serialization.descriptors.PolymorphicKind +import kotlinx.serialization.descriptors.PrimitiveKind +import kotlinx.serialization.descriptors.SerialDescriptor +import kotlinx.serialization.descriptors.SerialKind +import kotlinx.serialization.descriptors.StructureKind +import kotlinx.serialization.encoding.AbstractEncoder +import kotlinx.serialization.encoding.CompositeEncoder +import kotlinx.serialization.encoding.Encoder +import kotlinx.serialization.modules.SerializersModule +import org.bson.BsonValue +import org.bson.BsonWriter +import org.bson.codecs.BsonValueCodec +import org.bson.codecs.EncoderContext +import org.bson.codecs.kotlinx.utils.BsonCodecUtils.convertCamelCase +import org.bson.types.ObjectId + +/** + * The BsonEncoder interface + * + * For custom serialization handlers + */ +@ExperimentalSerializationApi +public sealed interface BsonEncoder : Encoder, CompositeEncoder { + + /** + * Encodes an ObjectId + * + * @param value the ObjectId + */ + public fun encodeObjectId(value: ObjectId) + + /** + * Encodes a BsonValue + * + * @param value the BsonValue + */ + public fun encodeBsonValue(value: BsonValue) +} + +/** + * The default BsonEncoder implementation + * + * Unlike BsonDecoder implementations, state is shared when encoding, so a single class is used to encode Bson Arrays, + * Documents, Polymorphic types and Maps. + */ +@OptIn(ExperimentalSerializationApi::class) +internal open class BsonEncoderImpl( + val writer: BsonWriter, + override val serializersModule: SerializersModule, + val configuration: BsonConfiguration +) : BsonEncoder, AbstractEncoder() { + + companion object { + val validKeyKinds = setOf(PrimitiveKind.STRING, PrimitiveKind.CHAR, SerialKind.ENUM) + val bsonValueCodec = BsonValueCodec() + } + + private var isPolymorphic = false + private var state = STATE.VALUE + private var mapState = MapState() + internal val deferredElementHandler: DeferredElementHandler = DeferredElementHandler() + + override fun shouldEncodeElementDefault(descriptor: SerialDescriptor, index: Int): Boolean = + configuration.encodeDefaults + + override fun beginStructure(descriptor: SerialDescriptor): CompositeEncoder { + when (descriptor.kind) { + is PolymorphicKind -> { + writer.writeStartDocument() + writer.writeName(configuration.classDiscriminator) + isPolymorphic = true + } + is StructureKind.LIST -> writer.writeStartArray() + is StructureKind.CLASS, + StructureKind.OBJECT -> { + if (isPolymorphic) { + isPolymorphic = false + } else { + writer.writeStartDocument() + } + } + is StructureKind.MAP -> { + writer.writeStartDocument() + mapState = MapState() + } + else -> throw SerializationException("Primitives are not supported at top-level") + } + return this + } + + override fun endStructure(descriptor: SerialDescriptor) { + when (descriptor.kind) { + is StructureKind.LIST -> writer.writeEndArray() + StructureKind.MAP, + StructureKind.CLASS, + StructureKind.OBJECT -> writer.writeEndDocument() + else -> {} + } + } + + override fun encodeElement(descriptor: SerialDescriptor, index: Int): Boolean { + when (descriptor.kind) { + is StructureKind.CLASS -> { + val elementName = descriptor.getElementName(index) + if (descriptor.getElementDescriptor(index).isNullable) { + deferredElementHandler.set(elementName) + } else { + encodeName(elementName) + } + } + is StructureKind.MAP -> { + if (index == 0) { + val keyKind = descriptor.getElementDescriptor(index).kind + if (!validKeyKinds.contains(keyKind)) { + throw SerializationException( + """Invalid key type for ${descriptor.serialName}. + | Expected STRING or ENUM but found: `${keyKind}`.""" + .trimMargin()) + } + } + state = mapState.nextState() + } + else -> {} + } + return true + } + + override fun encodeSerializableValue(serializer: SerializationStrategy, value: T) { + deferredElementHandler.with( + { + // When using generics its possible for `value` to be null + // See: https://youtrack.jetbrains.com/issue/KT-66206 + if (value != null || configuration.explicitNulls) { + encodeName(it) + super.encodeSerializableValue(serializer, value) + } + }, + { super.encodeSerializableValue(serializer, value) }) + } + + override fun encodeNullableSerializableValue(serializer: SerializationStrategy, value: T?) { + deferredElementHandler.with( + { + if (value != null || configuration.explicitNulls) { + encodeName(it) + super.encodeNullableSerializableValue(serializer, value) + } + }, + { super.encodeNullableSerializableValue(serializer, value) }) + } + + override fun encodeByte(value: Byte) = encodeInt(value.toInt()) + override fun encodeChar(value: Char) = encodeString(value.toString()) + override fun encodeFloat(value: Float) = encodeDouble(value.toDouble()) + override fun encodeShort(value: Short) = encodeInt(value.toInt()) + + override fun encodeBoolean(value: Boolean) = writer.writeBoolean(value) + override fun encodeDouble(value: Double) = writer.writeDouble(value) + override fun encodeInt(value: Int) = writer.writeInt32(value) + override fun encodeLong(value: Long) = writer.writeInt64(value) + override fun encodeNull() = writer.writeNull() + + override fun encodeString(value: String) { + when (state) { + STATE.NAME -> deferredElementHandler.set(value) + STATE.VALUE -> writer.writeString(value) + } + } + + override fun encodeEnum(enumDescriptor: SerialDescriptor, index: Int) { + val value = enumDescriptor.getElementName(index) + when (state) { + STATE.NAME -> encodeName(value) + STATE.VALUE -> writer.writeString(value) + } + } + + override fun encodeObjectId(value: ObjectId) { + writer.writeObjectId(value) + } + + override fun encodeBsonValue(value: BsonValue) { + bsonValueCodec.encode(writer, value, EncoderContext.builder().build()) + } + + internal fun encodeName(value: Any) { + val name = + value.toString().let { + if (configuration.bsonNamingStrategy == BsonNamingStrategy.SNAKE_CASE) { + convertCamelCase(it, '_') + } else { + it + } + } + writer.writeName(name) + state = STATE.VALUE + } + + private enum class STATE { + NAME, + VALUE + } + + private class MapState { + var currentState: STATE = STATE.VALUE + fun getState(): STATE = currentState + + fun nextState(): STATE { + currentState = + when (currentState) { + STATE.VALUE -> STATE.NAME + STATE.NAME -> STATE.VALUE + } + return getState() + } + } + + internal class DeferredElementHandler { + private var deferredElementName: String? = null + + fun set(name: String) { + assert(deferredElementName == null) { "Overwriting an existing deferred name" } + deferredElementName = name + } + + fun with(actionWithDeferredElement: (String) -> Unit, actionWithoutDeferredElement: () -> Unit) { + deferredElementName?.let { + reset() + actionWithDeferredElement(it) + } + ?: actionWithoutDeferredElement() + } + + private fun reset() { + deferredElementName = null + } + } +} diff --git a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonSerializers.kt b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonSerializers.kt new file mode 100644 index 00000000000..26c19c0fe17 --- /dev/null +++ b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonSerializers.kt @@ -0,0 +1,136 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlinx + +import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.KSerializer +import kotlinx.serialization.SerializationException +import kotlinx.serialization.Serializer +import kotlinx.serialization.descriptors.PrimitiveKind +import kotlinx.serialization.descriptors.PrimitiveSerialDescriptor +import kotlinx.serialization.descriptors.SerialDescriptor +import kotlinx.serialization.encoding.Decoder +import kotlinx.serialization.encoding.Encoder +import kotlinx.serialization.modules.SerializersModule +import kotlinx.serialization.modules.plus +import org.bson.BsonArray +import org.bson.BsonBinary +import org.bson.BsonBoolean +import org.bson.BsonDateTime +import org.bson.BsonDbPointer +import org.bson.BsonDecimal128 +import org.bson.BsonDocument +import org.bson.BsonDouble +import org.bson.BsonInt32 +import org.bson.BsonInt64 +import org.bson.BsonJavaScript +import org.bson.BsonJavaScriptWithScope +import org.bson.BsonMaxKey +import org.bson.BsonMinKey +import org.bson.BsonNull +import org.bson.BsonObjectId +import org.bson.BsonRegularExpression +import org.bson.BsonString +import org.bson.BsonSymbol +import org.bson.BsonTimestamp +import org.bson.BsonUndefined +import org.bson.BsonValue +import org.bson.RawBsonArray +import org.bson.RawBsonDocument +import org.bson.types.ObjectId + +/** + * The default serializers module + * + * Handles: + * - ObjectId serialization + * - BsonValue serialization + */ +@ExperimentalSerializationApi +public val defaultSerializersModule: SerializersModule = + ObjectIdSerializer.serializersModule + BsonValueSerializer.serializersModule + dateTimeSerializersModule + +@ExperimentalSerializationApi +@Serializer(forClass = ObjectId::class) +public object ObjectIdSerializer : KSerializer { + override val descriptor: SerialDescriptor = PrimitiveSerialDescriptor("ObjectIdSerializer", PrimitiveKind.STRING) + + override fun serialize(encoder: Encoder, value: ObjectId) { + when (encoder) { + is BsonEncoder -> encoder.encodeObjectId(value) + else -> throw SerializationException("ObjectId is not supported by ${encoder::class}") + } + } + + override fun deserialize(decoder: Decoder): ObjectId { + return when (decoder) { + is BsonDecoder -> decoder.decodeObjectId() + else -> throw SerializationException("ObjectId is not supported by ${decoder::class}") + } + } + + public val serializersModule: SerializersModule = SerializersModule { + contextual(ObjectId::class, ObjectIdSerializer) + } +} + +@ExperimentalSerializationApi +@Serializer(forClass = BsonValue::class) +public object BsonValueSerializer : KSerializer { + override val descriptor: SerialDescriptor = PrimitiveSerialDescriptor("BsonValueSerializer", PrimitiveKind.STRING) + + override fun serialize(encoder: Encoder, value: BsonValue) { + when (encoder) { + is BsonEncoder -> encoder.encodeBsonValue(value) + else -> throw SerializationException("BsonValues are not supported by ${encoder::class}") + } + } + + override fun deserialize(decoder: Decoder): BsonValue { + return when (decoder) { + is BsonDecoder -> decoder.decodeBsonValue() + else -> throw SerializationException("BsonValues are not supported by ${decoder::class}") + } + } + + @Suppress("UNCHECKED_CAST") + public val serializersModule: SerializersModule = SerializersModule { + contextual(BsonNull::class, BsonValueSerializer as KSerializer) + contextual(BsonArray::class, BsonValueSerializer as KSerializer) + contextual(BsonBinary::class, BsonValueSerializer as KSerializer) + contextual(BsonBoolean::class, BsonValueSerializer as KSerializer) + contextual(BsonDateTime::class, BsonValueSerializer as KSerializer) + contextual(BsonDbPointer::class, BsonValueSerializer as KSerializer) + contextual(BsonDocument::class, BsonValueSerializer as KSerializer) + contextual(BsonDouble::class, BsonValueSerializer as KSerializer) + contextual(BsonInt32::class, BsonValueSerializer as KSerializer) + contextual(BsonInt64::class, BsonValueSerializer as KSerializer) + contextual(BsonDecimal128::class, BsonValueSerializer as KSerializer) + contextual(BsonMaxKey::class, BsonValueSerializer as KSerializer) + contextual(BsonMinKey::class, BsonValueSerializer as KSerializer) + contextual(BsonJavaScript::class, BsonValueSerializer as KSerializer) + contextual(BsonJavaScriptWithScope::class, BsonValueSerializer as KSerializer) + contextual(BsonObjectId::class, BsonValueSerializer as KSerializer) + contextual(BsonRegularExpression::class, BsonValueSerializer as KSerializer) + contextual(BsonString::class, BsonValueSerializer as KSerializer) + contextual(BsonSymbol::class, BsonValueSerializer as KSerializer) + contextual(BsonTimestamp::class, BsonValueSerializer as KSerializer) + contextual(BsonUndefined::class, BsonValueSerializer as KSerializer) + contextual(BsonDocument::class, BsonValueSerializer as KSerializer) + contextual(RawBsonDocument::class, BsonValueSerializer as KSerializer) + contextual(RawBsonArray::class, BsonValueSerializer as KSerializer) + } +} diff --git a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/DateTimeSerializers.kt b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/DateTimeSerializers.kt new file mode 100644 index 00000000000..7b597135d4f --- /dev/null +++ b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/DateTimeSerializers.kt @@ -0,0 +1,221 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlinx + +import java.time.ZoneOffset +import kotlinx.datetime.Instant +import kotlinx.datetime.LocalDate +import kotlinx.datetime.LocalDateTime +import kotlinx.datetime.LocalTime +import kotlinx.datetime.TimeZone +import kotlinx.datetime.UtcOffset +import kotlinx.datetime.atDate +import kotlinx.datetime.atStartOfDayIn +import kotlinx.datetime.toInstant +import kotlinx.datetime.toLocalDateTime +import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.KSerializer +import kotlinx.serialization.SerializationException +import kotlinx.serialization.descriptors.PrimitiveKind +import kotlinx.serialization.descriptors.PrimitiveSerialDescriptor +import kotlinx.serialization.descriptors.SerialDescriptor +import kotlinx.serialization.encoding.Decoder +import kotlinx.serialization.encoding.Encoder +import kotlinx.serialization.modules.SerializersModule +import kotlinx.serialization.modules.plus +import org.bson.BsonDateTime +import org.bson.codecs.kotlinx.utils.SerializationModuleUtils.isClassAvailable + +/** + * The default serializers module + * + * Handles: + * - ObjectId serialization + * - BsonValue serialization + * - Instant serialization + * - LocalDate serialization + * - LocalDateTime serialization + * - LocalTime serialization + */ +@ExperimentalSerializationApi +public val dateTimeSerializersModule: SerializersModule by lazy { + var module = SerializersModule {} + if (isClassAvailable("kotlinx.datetime.Instant")) { + module += InstantAsBsonDateTime.serializersModule + } + if (isClassAvailable("kotlinx.datetime.LocalDate")) { + module += LocalDateAsBsonDateTime.serializersModule + } + if (isClassAvailable("kotlinx.datetime.LocalDateTime")) { + module += LocalDateTimeAsBsonDateTime.serializersModule + } + if (isClassAvailable("kotlinx.datetime.LocalTime")) { + module += LocalTimeAsBsonDateTime.serializersModule + } + module +} + +/** + * Instant KSerializer. + * + * Encodes and decodes `Instant` objects to and from `BsonDateTime`. Data is extracted via + * [kotlinx.datetime.Instant.fromEpochMilliseconds] and stored to millisecond accuracy. + * + * @since 5.2 + */ +@ExperimentalSerializationApi +public object InstantAsBsonDateTime : KSerializer { + override val descriptor: SerialDescriptor = PrimitiveSerialDescriptor("InstantAsBsonDateTime", PrimitiveKind.STRING) + + override fun serialize(encoder: Encoder, value: Instant) { + when (encoder) { + is BsonEncoder -> encoder.encodeBsonValue(BsonDateTime(value.toEpochMilliseconds())) + else -> throw SerializationException("Instant is not supported by ${encoder::class}") + } + } + + override fun deserialize(decoder: Decoder): Instant { + return when (decoder) { + is BsonDecoder -> Instant.fromEpochMilliseconds(decoder.decodeBsonValue().asDateTime().value) + else -> throw SerializationException("Instant is not supported by ${decoder::class}") + } + } + + @Suppress("UNCHECKED_CAST") + public val serializersModule: SerializersModule = SerializersModule { + contextual(Instant::class, InstantAsBsonDateTime as KSerializer) + } +} + +/** + * LocalDate KSerializer. + * + * Encodes and decodes `LocalDate` objects to and from `BsonDateTime`. + * + * Converts the `LocalDate` values to and from `UTC`. + * + * @since 5.2 + */ +@ExperimentalSerializationApi +public object LocalDateAsBsonDateTime : KSerializer { + override val descriptor: SerialDescriptor = + PrimitiveSerialDescriptor("LocalDateAsBsonDateTime", PrimitiveKind.STRING) + + override fun serialize(encoder: Encoder, value: LocalDate) { + when (encoder) { + is BsonEncoder -> { + val epochMillis = value.atStartOfDayIn(TimeZone.UTC).toEpochMilliseconds() + encoder.encodeBsonValue(BsonDateTime(epochMillis)) + } + else -> throw SerializationException("LocalDate is not supported by ${encoder::class}") + } + } + + override fun deserialize(decoder: Decoder): LocalDate { + return when (decoder) { + is BsonDecoder -> + Instant.fromEpochMilliseconds(decoder.decodeBsonValue().asDateTime().value) + .toLocalDateTime(TimeZone.UTC) + .date + else -> throw SerializationException("LocalDate is not supported by ${decoder::class}") + } + } + + @Suppress("UNCHECKED_CAST") + public val serializersModule: SerializersModule = SerializersModule { + contextual(LocalDate::class, LocalDateAsBsonDateTime as KSerializer) + } +} + +/** + * LocalDateTime KSerializer. + * + * Encodes and decodes `LocalDateTime` objects to and from `BsonDateTime`. Data is stored to millisecond accuracy. + * + * Converts the `LocalDateTime` values to and from `UTC`. + * + * @since 5.2 + */ +@ExperimentalSerializationApi +public object LocalDateTimeAsBsonDateTime : KSerializer { + override val descriptor: SerialDescriptor = + PrimitiveSerialDescriptor("LocalDateTimeAsBsonDateTime", PrimitiveKind.STRING) + + override fun serialize(encoder: Encoder, value: LocalDateTime) { + when (encoder) { + is BsonEncoder -> { + val epochMillis = value.toInstant(UtcOffset(ZoneOffset.UTC)).toEpochMilliseconds() + encoder.encodeBsonValue(BsonDateTime(epochMillis)) + } + else -> throw SerializationException("LocalDateTime is not supported by ${encoder::class}") + } + } + + override fun deserialize(decoder: Decoder): LocalDateTime { + return when (decoder) { + is BsonDecoder -> + Instant.fromEpochMilliseconds(decoder.decodeBsonValue().asDateTime().value) + .toLocalDateTime(TimeZone.UTC) + else -> throw SerializationException("LocalDateTime is not supported by ${decoder::class}") + } + } + + @Suppress("UNCHECKED_CAST") + public val serializersModule: SerializersModule = SerializersModule { + contextual(LocalDateTime::class, LocalDateTimeAsBsonDateTime as KSerializer) + } +} + +/** + * LocalTime KSerializer. + * + * Encodes and decodes `LocalTime` objects to and from `BsonDateTime`. Data is stored to millisecond accuracy. + * + * Converts the `LocalTime` values to and from EpochDay at `UTC`. + * + * @since 5.2 + */ +@ExperimentalSerializationApi +public object LocalTimeAsBsonDateTime : KSerializer { + override val descriptor: SerialDescriptor = + PrimitiveSerialDescriptor("LocalTimeAsBsonDateTime", PrimitiveKind.STRING) + + override fun serialize(encoder: Encoder, value: LocalTime) { + when (encoder) { + is BsonEncoder -> { + val epochMillis = + value.atDate(LocalDate.fromEpochDays(0)).toInstant(UtcOffset(ZoneOffset.UTC)).toEpochMilliseconds() + encoder.encodeBsonValue(BsonDateTime(epochMillis)) + } + else -> throw SerializationException("LocalTime is not supported by ${encoder::class}") + } + } + + override fun deserialize(decoder: Decoder): LocalTime { + return when (decoder) { + is BsonDecoder -> + Instant.fromEpochMilliseconds(decoder.decodeBsonValue().asDateTime().value) + .toLocalDateTime(TimeZone.UTC) + .time + else -> throw SerializationException("LocalTime is not supported by ${decoder::class}") + } + } + + @Suppress("UNCHECKED_CAST") + public val serializersModule: SerializersModule = SerializersModule { + contextual(LocalTime::class, LocalTimeAsBsonDateTime as KSerializer) + } +} diff --git a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/JsonBsonDecoder.kt b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/JsonBsonDecoder.kt new file mode 100644 index 00000000000..bd8b6739958 --- /dev/null +++ b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/JsonBsonDecoder.kt @@ -0,0 +1,154 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlinx + +import java.util.Base64 +import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.descriptors.SerialDescriptor +import kotlinx.serialization.json.Json +import kotlinx.serialization.json.JsonArray +import kotlinx.serialization.json.JsonDecoder +import kotlinx.serialization.json.JsonElement +import kotlinx.serialization.json.JsonObject +import kotlinx.serialization.json.JsonPrimitive +import kotlinx.serialization.json.buildJsonArray +import kotlinx.serialization.json.buildJsonObject +import kotlinx.serialization.modules.SerializersModule +import org.bson.AbstractBsonReader +import org.bson.BsonBinarySubType +import org.bson.BsonType +import org.bson.UuidRepresentation +import org.bson.codecs.kotlinx.utils.BsonCodecUtils.toJsonNamingStrategy +import org.bson.internal.UuidHelper + +@OptIn(ExperimentalSerializationApi::class) +internal interface JsonBsonDecoder : BsonDecoder, JsonDecoder { + val reader: AbstractBsonReader + val configuration: BsonConfiguration + + fun json(): Json = Json { + explicitNulls = configuration.explicitNulls + encodeDefaults = configuration.encodeDefaults + classDiscriminator = configuration.classDiscriminator + namingStrategy = configuration.bsonNamingStrategy.toJsonNamingStrategy() + serializersModule = this@JsonBsonDecoder.serializersModule + } + + @Suppress("ComplexMethod") + override fun decodeJsonElement(): JsonElement = + reader.run { + when (currentBsonType) { + BsonType.DOCUMENT -> readJsonObject() + BsonType.ARRAY -> readJsonArray() + BsonType.NULL -> JsonPrimitive(decodeNull()) + BsonType.STRING -> JsonPrimitive(decodeString()) + BsonType.BOOLEAN -> JsonPrimitive(decodeBoolean()) + BsonType.INT32 -> JsonPrimitive(decodeInt()) + BsonType.INT64 -> JsonPrimitive(decodeLong()) + BsonType.DOUBLE -> JsonPrimitive(decodeDouble()) + BsonType.DECIMAL128 -> JsonPrimitive(reader.readDecimal128()) + BsonType.OBJECT_ID -> JsonPrimitive(decodeObjectId().toHexString()) + BsonType.DATE_TIME -> JsonPrimitive(reader.readDateTime()) + BsonType.TIMESTAMP -> JsonPrimitive(reader.readTimestamp().value) + BsonType.BINARY -> { + val subtype = reader.peekBinarySubType() + val data = reader.readBinaryData().data + when (subtype) { + BsonBinarySubType.UUID_LEGACY.value -> + JsonPrimitive( + UuidHelper.decodeBinaryToUuid(data, subtype, UuidRepresentation.JAVA_LEGACY).toString()) + BsonBinarySubType.UUID_STANDARD.value -> + JsonPrimitive( + UuidHelper.decodeBinaryToUuid(data, subtype, UuidRepresentation.STANDARD).toString()) + else -> JsonPrimitive(Base64.getEncoder().encodeToString(data)) + } + } + else -> error("Unsupported json type: $currentBsonType") + } + } + + private fun readJsonObject(): JsonObject { + reader.readStartDocument() + val obj = buildJsonObject { + var type = reader.readBsonType() + while (type != BsonType.END_OF_DOCUMENT) { + put(reader.readName(), decodeJsonElement()) + type = reader.readBsonType() + } + } + + reader.readEndDocument() + return obj + } + + private fun readJsonArray(): JsonArray { + reader.readStartArray() + val array = buildJsonArray { + var type = reader.readBsonType() + while (type != BsonType.END_OF_DOCUMENT) { + add(decodeJsonElement()) + type = reader.readBsonType() + } + } + + reader.readEndArray() + return array + } +} + +internal class JsonBsonDecoderImpl( + reader: AbstractBsonReader, + serializersModule: SerializersModule, + configuration: BsonConfiguration +) : BsonDecoderImpl(reader, serializersModule, configuration), JsonBsonDecoder { + override val json = json() +} + +internal class JsonBsonArrayDecoder( + descriptor: SerialDescriptor, + reader: AbstractBsonReader, + serializersModule: SerializersModule, + configuration: BsonConfiguration +) : BsonArrayDecoder(descriptor, reader, serializersModule, configuration), JsonBsonDecoder { + override val json = json() +} + +internal class JsonBsonDocumentDecoder( + descriptor: SerialDescriptor, + reader: AbstractBsonReader, + serializersModule: SerializersModule, + configuration: BsonConfiguration +) : BsonDocumentDecoder(descriptor, reader, serializersModule, configuration), JsonBsonDecoder { + override val json = json() +} + +internal class JsonBsonPolymorphicDecoder( + descriptor: SerialDescriptor, + reader: AbstractBsonReader, + serializersModule: SerializersModule, + configuration: BsonConfiguration +) : BsonPolymorphicDecoder(descriptor, reader, serializersModule, configuration), JsonBsonDecoder { + override val json = json() +} + +internal class JsonBsonMapDecoder( + descriptor: SerialDescriptor, + reader: AbstractBsonReader, + serializersModule: SerializersModule, + configuration: BsonConfiguration +) : BsonMapDecoder(descriptor, reader, serializersModule, configuration), JsonBsonDecoder { + override val json = json() +} diff --git a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/JsonBsonEncoder.kt b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/JsonBsonEncoder.kt new file mode 100644 index 00000000000..4a754834e6d --- /dev/null +++ b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/JsonBsonEncoder.kt @@ -0,0 +1,134 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlinx + +import java.math.BigDecimal +import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.SerializationStrategy +import kotlinx.serialization.json.Json +import kotlinx.serialization.json.JsonArray +import kotlinx.serialization.json.JsonElement +import kotlinx.serialization.json.JsonEncoder +import kotlinx.serialization.json.JsonNull +import kotlinx.serialization.json.JsonObject +import kotlinx.serialization.json.JsonPrimitive +import kotlinx.serialization.json.double +import kotlinx.serialization.json.int +import kotlinx.serialization.json.long +import kotlinx.serialization.modules.SerializersModule +import org.bson.BsonWriter +import org.bson.codecs.kotlinx.utils.BsonCodecUtils.toJsonNamingStrategy +import org.bson.types.Decimal128 + +@OptIn(ExperimentalSerializationApi::class) +internal class JsonBsonEncoder( + writer: BsonWriter, + override val serializersModule: SerializersModule, + configuration: BsonConfiguration, +) : BsonEncoderImpl(writer, serializersModule, configuration), JsonEncoder { + + companion object { + private val DOUBLE_MIN_VALUE = BigDecimal.valueOf(Double.MIN_VALUE) + private val DOUBLE_MAX_VALUE = BigDecimal.valueOf(Double.MAX_VALUE) + private val INT_MIN_VALUE = BigDecimal.valueOf(Int.MIN_VALUE.toLong()) + private val INT_MAX_VALUE = BigDecimal.valueOf(Int.MAX_VALUE.toLong()) + private val LONG_MIN_VALUE = BigDecimal.valueOf(Long.MIN_VALUE) + private val LONG_MAX_VALUE = BigDecimal.valueOf(Long.MAX_VALUE) + } + + override val json = Json { + explicitNulls = configuration.explicitNulls + encodeDefaults = configuration.encodeDefaults + classDiscriminator = configuration.classDiscriminator + namingStrategy = configuration.bsonNamingStrategy.toJsonNamingStrategy() + serializersModule = this@JsonBsonEncoder.serializersModule + } + + override fun encodeSerializableValue(serializer: SerializationStrategy, value: T) { + if (value is JsonElement) encodeJsonElement(value) + else super.encodeSerializableValue(serializer, value) + } + + override fun encodeJsonElement(element: JsonElement) { + deferredElementHandler.with( + { + when (element) { + is JsonNull -> + if (configuration.explicitNulls) { + encodeName(it) + encodeNull() + } + is JsonPrimitive -> { + encodeName(it) + encodeJsonPrimitive(element) + } + is JsonObject -> { + encodeName(it) + encodeJsonObject(element) + } + is JsonArray -> { + encodeName(it) + encodeJsonArray(element) + } + } + }, + { + when (element) { + is JsonNull -> if (configuration.explicitNulls) encodeNull() + is JsonPrimitive -> encodeJsonPrimitive(element) + is JsonObject -> encodeJsonObject(element) + is JsonArray -> encodeJsonArray(element) + } + }) + } + + private fun encodeJsonPrimitive(primitive: JsonPrimitive) { + val content = primitive.content + when { + primitive.isString -> encodeString(content) + content == "true" || content == "false" -> encodeBoolean(content.toBooleanStrict()) + else -> { + val decimal = BigDecimal(content) + when { + decimal.scale() != 0 -> + if (DOUBLE_MIN_VALUE <= decimal && decimal <= DOUBLE_MAX_VALUE) { + encodeDouble(primitive.double) + } else { + writer.writeDecimal128(Decimal128(decimal)) + } + INT_MIN_VALUE <= decimal && decimal <= INT_MAX_VALUE -> encodeInt(primitive.int) + LONG_MIN_VALUE <= decimal && decimal <= LONG_MAX_VALUE -> encodeLong(primitive.long) + else -> writer.writeDecimal128(Decimal128(decimal)) + } + } + } + } + + private fun encodeJsonObject(obj: JsonObject) { + writer.writeStartDocument() + obj.forEach { k, v -> + deferredElementHandler.set(k) + encodeJsonElement(v) + } + writer.writeEndDocument() + } + + private fun encodeJsonArray(array: JsonArray) { + writer.writeStartArray() + array.forEach(::encodeJsonElement) + writer.writeEndArray() + } +} diff --git a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/KotlinSerializerCodec.kt b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/KotlinSerializerCodec.kt new file mode 100644 index 00000000000..0c7491b2278 --- /dev/null +++ b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/KotlinSerializerCodec.kt @@ -0,0 +1,186 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlinx + +import kotlin.reflect.KClass +import kotlin.reflect.full.findAnnotation +import kotlin.reflect.full.findAnnotations +import kotlin.reflect.full.hasAnnotation +import kotlin.reflect.full.primaryConstructor +import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.InternalSerializationApi +import kotlinx.serialization.KSerializer +import kotlinx.serialization.Serializable +import kotlinx.serialization.SerializationException +import kotlinx.serialization.modules.SerializersModule +import kotlinx.serialization.serializer +import org.bson.AbstractBsonReader +import org.bson.BsonReader +import org.bson.BsonWriter +import org.bson.codecs.Codec +import org.bson.codecs.DecoderContext +import org.bson.codecs.EncoderContext +import org.bson.codecs.configuration.CodecConfigurationException +import org.bson.codecs.kotlinx.utils.BsonCodecUtils.createBsonDecoder +import org.bson.codecs.kotlinx.utils.BsonCodecUtils.createBsonEncoder +import org.bson.codecs.pojo.annotations.BsonCreator +import org.bson.codecs.pojo.annotations.BsonDiscriminator +import org.bson.codecs.pojo.annotations.BsonExtraElements +import org.bson.codecs.pojo.annotations.BsonId +import org.bson.codecs.pojo.annotations.BsonIgnore +import org.bson.codecs.pojo.annotations.BsonProperty +import org.bson.codecs.pojo.annotations.BsonRepresentation + +/** + * The Kotlin serializer codec which utilizes the kotlinx serialization module. + * + * Use the [create] method to create the codec + */ +@OptIn(ExperimentalSerializationApi::class, InternalSerializationApi::class) +public class KotlinSerializerCodec +private constructor( + private val kClass: KClass, + private val serializer: KSerializer, + private val serializersModule: SerializersModule, + private val bsonConfiguration: BsonConfiguration +) : Codec { + + /** KotlinSerializerCodec companion object */ + public companion object { + + /** + * Creates a `Codec` for the kClass or returns null if there is no serializer available. + * + * @param T The codec type + * @param serializersModule the serializiers module to use + * @param bsonConfiguration the bson configuration for serializing + * @return the codec + */ + public inline fun create( + serializersModule: SerializersModule = defaultSerializersModule, + bsonConfiguration: BsonConfiguration = BsonConfiguration() + ): Codec? = create(T::class, serializersModule, bsonConfiguration) + + /** + * Creates a `Codec` for the kClass or returns null if there is no serializer available. + * + * @param T The codec type + * @param kClass the KClass for the codec + * @param serializersModule the serializiers module to use + * @param bsonConfiguration the bson configuration for serializing + * @return the codec + */ + @Suppress("SwallowedException") + public fun create( + kClass: KClass, + serializersModule: SerializersModule = defaultSerializersModule, + bsonConfiguration: BsonConfiguration = BsonConfiguration() + ): Codec? { + return if (kClass.hasAnnotation()) { + try { + create(kClass, kClass.serializer(), serializersModule, bsonConfiguration) + } catch (exception: SerializationException) { + null + } + } else { + null + } + } + + /** + * Creates a `Codec` for the kClass using the supplied serializer + * + * @param T The codec type + * @param kClass the KClass for the codec + * @param serializer the KSerializer to use + * @param serializersModule the serializiers module to use + * @param bsonConfiguration the bson configuration for serializing + * @return the codec + */ + public fun create( + kClass: KClass, + serializer: KSerializer, + serializersModule: SerializersModule, + bsonConfiguration: BsonConfiguration + ): Codec { + validateAnnotations(kClass) + return KotlinSerializerCodec(kClass, serializer, serializersModule, bsonConfiguration) + } + + private fun validateAnnotations(kClass: KClass) { + codecConfigurationRequires(kClass.findAnnotation() == null) { + """Annotation 'BsonDiscriminator' is not supported with kotlin serialization, + | but found on ${kClass.simpleName}. Use `BsonConfiguration` with `KotlinSerializerCodec.create` + | to configure a discriminator.""" + .trimMargin() + } + + codecConfigurationRequires(kClass.constructors.all { it.findAnnotations().isEmpty() }) { + """Annotation 'BsonCreator' is not supported with kotlin serialization, + | but found in ${kClass.simpleName}.""" + .trimMargin() + } + + kClass.primaryConstructor?.parameters?.map { param -> + codecConfigurationRequires(param.findAnnotations().isEmpty()) { + """Annotation 'BsonId' is not supported with kotlin serialization, + | found on the parameter for ${param.name}. Use `@SerialName("_id")` instead.""" + .trimMargin() + } + + codecConfigurationRequires(param.findAnnotations().isEmpty()) { + """Annotation 'BsonProperty' is not supported with kotlin serialization, + | found on the parameter for ${param.name}. Use `@SerialName` instead.""" + .trimMargin() + } + + codecConfigurationRequires(param.findAnnotations().isEmpty()) { + """Annotation 'BsonIgnore' is not supported with kotlinx serialization, + | found on the parameter for ${param.name}. Use `@Transient` annotation to ignore a property.""" + .trimMargin() + } + + codecConfigurationRequires(param.findAnnotations().isEmpty()) { + """Annotation 'BsonExtraElements' is not supported with kotlinx serialization, + | found on the parameter for ${param.name}.""" + .trimMargin() + } + + codecConfigurationRequires(param.findAnnotations().isEmpty()) { + """Annotation 'BsonRepresentation' is not supported with kotlinx serialization, + | found on the parameter for ${param.name}.""" + .trimMargin() + } + } + } + private fun codecConfigurationRequires(value: Boolean, lazyMessage: () -> String) { + if (!value) { + throw CodecConfigurationException(lazyMessage.invoke()) + } + } + } + + override fun encode(writer: BsonWriter, value: T, encoderContext: EncoderContext) { + serializer.serialize(createBsonEncoder(writer, serializersModule, bsonConfiguration), value) + } + + override fun getEncoderClass(): Class = kClass.java + + override fun decode(reader: BsonReader, decoderContext: DecoderContext): T { + require(reader is AbstractBsonReader) + return serializer.deserialize(createBsonDecoder(reader, serializersModule, bsonConfiguration)) + } +} diff --git a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/KotlinSerializerCodecProvider.kt b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/KotlinSerializerCodecProvider.kt new file mode 100644 index 00000000000..1ae5353dbaa --- /dev/null +++ b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/KotlinSerializerCodecProvider.kt @@ -0,0 +1,37 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlinx + +import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.modules.SerializersModule +import org.bson.codecs.Codec +import org.bson.codecs.configuration.CodecProvider +import org.bson.codecs.configuration.CodecRegistry + +/** + * A Kotlin Serialization based Codec Provider + * + * The underlying class must be annotated with the `@Serializable`. + */ +@OptIn(ExperimentalSerializationApi::class) +public class KotlinSerializerCodecProvider( + private val serializersModule: SerializersModule = defaultSerializersModule, + private val bsonConfiguration: BsonConfiguration = BsonConfiguration() +) : CodecProvider { + + override fun get(clazz: Class, registry: CodecRegistry): Codec? = + KotlinSerializerCodec.create(clazz.kotlin, serializersModule, bsonConfiguration) +} diff --git a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/utils/BsonCodecUtils.kt b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/utils/BsonCodecUtils.kt new file mode 100644 index 00000000000..daf6c7df6f9 --- /dev/null +++ b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/utils/BsonCodecUtils.kt @@ -0,0 +1,194 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlinx.utils + +import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.SerializationException +import kotlinx.serialization.descriptors.SerialDescriptor +import kotlinx.serialization.descriptors.elementNames +import kotlinx.serialization.json.JsonNamingStrategy +import kotlinx.serialization.modules.SerializersModule +import org.bson.AbstractBsonReader +import org.bson.BsonWriter +import org.bson.codecs.kotlinx.BsonArrayDecoder +import org.bson.codecs.kotlinx.BsonConfiguration +import org.bson.codecs.kotlinx.BsonDecoder +import org.bson.codecs.kotlinx.BsonDecoderImpl +import org.bson.codecs.kotlinx.BsonDocumentDecoder +import org.bson.codecs.kotlinx.BsonEncoder +import org.bson.codecs.kotlinx.BsonEncoderImpl +import org.bson.codecs.kotlinx.BsonMapDecoder +import org.bson.codecs.kotlinx.BsonNamingStrategy +import org.bson.codecs.kotlinx.BsonPolymorphicDecoder +import org.bson.codecs.kotlinx.JsonBsonArrayDecoder +import org.bson.codecs.kotlinx.JsonBsonDecoderImpl +import org.bson.codecs.kotlinx.JsonBsonDocumentDecoder +import org.bson.codecs.kotlinx.JsonBsonEncoder +import org.bson.codecs.kotlinx.JsonBsonMapDecoder +import org.bson.codecs.kotlinx.JsonBsonPolymorphicDecoder + +@ExperimentalSerializationApi +internal object BsonCodecUtils { + + @Suppress("SwallowedException") + private val hasJsonEncoder: Boolean by lazy { + try { + Class.forName("kotlinx.serialization.json.JsonEncoder") + true + } catch (e: ClassNotFoundException) { + false + } + } + + @Suppress("SwallowedException") + private val hasJsonDecoder: Boolean by lazy { + try { + Class.forName("kotlinx.serialization.json.JsonDecoder") + true + } catch (e: ClassNotFoundException) { + false + } + } + + private val cachedElementNamesByDescriptor: MutableMap> = mutableMapOf() + + internal fun createBsonEncoder( + writer: BsonWriter, + serializersModule: SerializersModule, + configuration: BsonConfiguration + ): BsonEncoder { + return if (hasJsonEncoder) JsonBsonEncoder(writer, serializersModule, configuration) + else BsonEncoderImpl(writer, serializersModule, configuration) + } + + internal fun createBsonDecoder( + reader: AbstractBsonReader, + serializersModule: SerializersModule, + configuration: BsonConfiguration + ): BsonDecoder { + return if (hasJsonDecoder) JsonBsonDecoderImpl(reader, serializersModule, configuration) + else BsonDecoderImpl(reader, serializersModule, configuration) + } + + internal fun createBsonArrayDecoder( + descriptor: SerialDescriptor, + reader: AbstractBsonReader, + serializersModule: SerializersModule, + configuration: BsonConfiguration + ): BsonArrayDecoder { + return if (hasJsonDecoder) JsonBsonArrayDecoder(descriptor, reader, serializersModule, configuration) + else BsonArrayDecoder(descriptor, reader, serializersModule, configuration) + } + + internal fun createBsonDocumentDecoder( + descriptor: SerialDescriptor, + reader: AbstractBsonReader, + serializersModule: SerializersModule, + configuration: BsonConfiguration + ): BsonDocumentDecoder { + return if (hasJsonDecoder) JsonBsonDocumentDecoder(descriptor, reader, serializersModule, configuration) + else BsonDocumentDecoder(descriptor, reader, serializersModule, configuration) + } + + internal fun createBsonPolymorphicDecoder( + descriptor: SerialDescriptor, + reader: AbstractBsonReader, + serializersModule: SerializersModule, + configuration: BsonConfiguration + ): BsonPolymorphicDecoder { + return if (hasJsonDecoder) JsonBsonPolymorphicDecoder(descriptor, reader, serializersModule, configuration) + else BsonPolymorphicDecoder(descriptor, reader, serializersModule, configuration) + } + + internal fun createBsonMapDecoder( + descriptor: SerialDescriptor, + reader: AbstractBsonReader, + serializersModule: SerializersModule, + configuration: BsonConfiguration + ): BsonMapDecoder { + return if (hasJsonDecoder) JsonBsonMapDecoder(descriptor, reader, serializersModule, configuration) + else BsonMapDecoder(descriptor, reader, serializersModule, configuration) + } + + internal fun cacheElementNamesByDescriptor(descriptor: SerialDescriptor, configuration: BsonConfiguration) { + val convertedNameMap = + when (configuration.bsonNamingStrategy) { + BsonNamingStrategy.SNAKE_CASE -> { + val snakeCasedNames = descriptor.elementNames.associateWith { name -> convertCamelCase(name, '_') } + + snakeCasedNames.entries + .groupBy { entry -> entry.value } + .filter { group -> group.value.size > 1 } + .entries + .fold(StringBuilder("")) { acc, group -> + val keys = group.value.joinToString(", ") { entry -> entry.key } + acc.append("$keys in ${descriptor.serialName} generate same name: ${group.key}.\n") + } + .toString() + .takeIf { it.trim().isNotEmpty() } + ?.let { errorMessage: String -> throw SerializationException(errorMessage) } + + snakeCasedNames.entries.associate { it.value to it.key } + } + else -> emptyMap() + } + + cachedElementNamesByDescriptor[descriptor.serialName] = convertedNameMap + } + + internal fun getCachedElementNamesByDescriptor(descriptor: SerialDescriptor): Map { + return cachedElementNamesByDescriptor[descriptor.serialName] ?: emptyMap() + } + + // https://github.com/Kotlin/kotlinx.serialization/blob/f9f160a680da9f92c3bb121ae3644c96e57ba42e/formats/json/commonMain/src/kotlinx/serialization/json/JsonNamingStrategy.kt#L142-L174 + internal fun convertCamelCase(value: String, delimiter: Char) = + buildString(value.length * 2) { + var bufferedChar: Char? = null + var previousUpperCharsCount = 0 + + value.forEach { c -> + if (c.isUpperCase()) { + if (previousUpperCharsCount == 0 && isNotEmpty() && last() != delimiter) append(delimiter) + + bufferedChar?.let(::append) + + previousUpperCharsCount++ + bufferedChar = c.lowercaseChar() + } else { + if (bufferedChar != null) { + if (previousUpperCharsCount > 1 && c.isLetter()) { + append(delimiter) + } + append(bufferedChar) + previousUpperCharsCount = 0 + bufferedChar = null + } + append(c) + } + } + + if (bufferedChar != null) { + append(bufferedChar) + } + } + + internal fun BsonNamingStrategy?.toJsonNamingStrategy(): JsonNamingStrategy? { + return when (this) { + BsonNamingStrategy.SNAKE_CASE -> JsonNamingStrategy.SnakeCase + else -> null + } + } +} diff --git a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/utils/SerializationModuleUtils.kt b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/utils/SerializationModuleUtils.kt new file mode 100644 index 00000000000..306644c81ad --- /dev/null +++ b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/utils/SerializationModuleUtils.kt @@ -0,0 +1,28 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlinx.utils + +internal object SerializationModuleUtils { + @Suppress("SwallowedException") + fun isClassAvailable(className: String): Boolean { + return try { + Class.forName(className) + true + } catch (e: ClassNotFoundException) { + false + } + } +} diff --git a/bson-kotlinx/src/test/kotlin/org/bson/codecs/kotlinx/KotlinSerializerCodecProviderTest.kt b/bson-kotlinx/src/test/kotlin/org/bson/codecs/kotlinx/KotlinSerializerCodecProviderTest.kt new file mode 100644 index 00000000000..5a912e7bb3a --- /dev/null +++ b/bson-kotlinx/src/test/kotlin/org/bson/codecs/kotlinx/KotlinSerializerCodecProviderTest.kt @@ -0,0 +1,149 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlinx + +import com.mongodb.MongoClientSettings +import kotlin.test.assertEquals +import kotlin.test.assertNotNull +import kotlin.test.assertNull +import kotlin.test.assertTrue +import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.modules.SerializersModule +import kotlinx.serialization.modules.plus +import kotlinx.serialization.modules.polymorphic +import kotlinx.serialization.modules.subclass +import org.bson.BsonDocument +import org.bson.BsonDocumentReader +import org.bson.BsonDocumentWriter +import org.bson.codecs.DecoderContext +import org.bson.codecs.EncoderContext +import org.bson.codecs.kotlinx.samples.DataClassContainsOpen +import org.bson.codecs.kotlinx.samples.DataClassOpen +import org.bson.codecs.kotlinx.samples.DataClassOpenA +import org.bson.codecs.kotlinx.samples.DataClassOpenB +import org.bson.codecs.kotlinx.samples.DataClassParameterized +import org.bson.codecs.kotlinx.samples.DataClassSealedInterface +import org.bson.codecs.kotlinx.samples.DataClassWithSimpleValues +import org.bson.codecs.kotlinx.samples.SealedInterface +import org.bson.conversions.Bson +import org.bson.json.JsonReader +import org.bson.types.ObjectId +import org.junit.jupiter.api.Test + +class KotlinSerializerCodecProviderTest { + + data class NotMarkedSerializable(val t: String) + + @Test + fun shouldReturnNullForNonSerializableClass() { + assertNull(KotlinSerializerCodecProvider().get(NotMarkedSerializable::class.java, Bson.DEFAULT_CODEC_REGISTRY)) + assertNull(KotlinSerializerCodecProvider().get(DoubleArray::class.java, Bson.DEFAULT_CODEC_REGISTRY)) + assertNull(KotlinSerializerCodecProvider().get(CharSequence::class.java, Bson.DEFAULT_CODEC_REGISTRY)) + } + + @Test + fun shouldReturnKotlinSerializerCodecForDataClass() { + val provider = KotlinSerializerCodecProvider() + val codec = provider.get(DataClassWithSimpleValues::class.java, Bson.DEFAULT_CODEC_REGISTRY) + + assertNotNull(codec) + assertTrue { codec is KotlinSerializerCodec } + assertEquals(DataClassWithSimpleValues::class.java, codec.encoderClass) + } + + @Test + fun shouldReturnNullFoRawParameterizedDataClass() { + val codec = KotlinSerializerCodecProvider().get(DataClassParameterized::class.java, Bson.DEFAULT_CODEC_REGISTRY) + assertNull(codec) + } + + @Test + fun shouldReturnKotlinSerializerCodecUsingDefaultRegistry() { + val codec = MongoClientSettings.getDefaultCodecRegistry().get(DataClassWithSimpleValues::class.java) + + assertNotNull(codec) + assertTrue { codec is KotlinSerializerCodec } + assertEquals(DataClassWithSimpleValues::class.java, codec.encoderClass) + } + + @Test + fun testDataClassWithSimpleValuesFieldOrdering() { + val codec = MongoClientSettings.getDefaultCodecRegistry().get(DataClassWithSimpleValues::class.java) + val expected = DataClassWithSimpleValues('c', 0, 1, 22, 42L, 4.0f, 4.2, true, "String") + + val numberLong = "\$numberLong" + val actual = + codec.decode( + JsonReader( + """{"boolean": true, "byte": 0, "char": "c", "double": 4.2, "float": 4.0, "int": 22, + |"long": {"$numberLong": "42"}, "short": 1, "string": "String"}""" + .trimMargin()), + DecoderContext.builder().build()) + + assertEquals(expected, actual) + } + + @Test + fun testDataClassSealedFieldOrdering() { + val codec = MongoClientSettings.getDefaultCodecRegistry().get(SealedInterface::class.java) + + val objectId = ObjectId("111111111111111111111111") + val oid = "\$oid" + val expected = DataClassSealedInterface(objectId, "string") + val actual = + codec.decode( + JsonReader( + """{"name": "string", "_id": {$oid: "${objectId.toHexString()}"}, + |"_t": "org.bson.codecs.kotlinx.samples.DataClassSealedInterface"}""" + .trimMargin()), + DecoderContext.builder().build()) + + assertEquals(expected, actual) + } + + @OptIn(ExperimentalSerializationApi::class) + @Test + fun shouldAllowOverridingOfSerializersModuleAndBsonConfigurationInConstructor() { + + val serializersModule = + SerializersModule { + this.polymorphic(DataClassOpen::class) { + this.subclass(DataClassOpenA::class) + this.subclass(DataClassOpenB::class) + } + } + defaultSerializersModule + + val bsonConfiguration = BsonConfiguration(classDiscriminator = "__type") + val dataClassContainsOpenB = DataClassContainsOpen(DataClassOpenB(1)) + + val codec = + KotlinSerializerCodecProvider(serializersModule, bsonConfiguration) + .get(DataClassContainsOpen::class.java, Bson.DEFAULT_CODEC_REGISTRY)!! + + assertTrue { codec is KotlinSerializerCodec } + val encodedDocument = BsonDocument() + val writer = BsonDocumentWriter(encodedDocument) + codec.encode(writer, dataClassContainsOpenB, EncoderContext.builder().build()) + writer.flush() + + assertEquals( + BsonDocument.parse("""{"open": {"__type": "org.bson.codecs.kotlinx.samples.DataClassOpenB", "b": 1}}"""), + encodedDocument) + + assertEquals( + dataClassContainsOpenB, codec.decode(BsonDocumentReader(encodedDocument), DecoderContext.builder().build())) + } +} diff --git a/bson-kotlinx/src/test/kotlin/org/bson/codecs/kotlinx/KotlinSerializerCodecTest.kt b/bson-kotlinx/src/test/kotlin/org/bson/codecs/kotlinx/KotlinSerializerCodecTest.kt new file mode 100644 index 00000000000..f9b3eb753c5 --- /dev/null +++ b/bson-kotlinx/src/test/kotlin/org/bson/codecs/kotlinx/KotlinSerializerCodecTest.kt @@ -0,0 +1,1227 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlinx + +import java.math.BigDecimal +import java.util.Base64 +import java.util.stream.Stream +import kotlin.test.assertEquals +import kotlinx.datetime.Instant +import kotlinx.datetime.LocalDate +import kotlinx.datetime.LocalDateTime +import kotlinx.datetime.LocalTime +import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.MissingFieldException +import kotlinx.serialization.SerializationException +import kotlinx.serialization.json.JsonPrimitive +import kotlinx.serialization.json.buildJsonArray +import kotlinx.serialization.json.buildJsonObject +import kotlinx.serialization.json.put +import kotlinx.serialization.modules.SerializersModule +import kotlinx.serialization.modules.plus +import kotlinx.serialization.modules.polymorphic +import kotlinx.serialization.modules.subclass +import org.bson.BsonBoolean +import org.bson.BsonDocument +import org.bson.BsonDocumentReader +import org.bson.BsonDocumentWriter +import org.bson.BsonDouble +import org.bson.BsonInt32 +import org.bson.BsonInt64 +import org.bson.BsonInvalidOperationException +import org.bson.BsonMaxKey +import org.bson.BsonMinKey +import org.bson.BsonString +import org.bson.BsonUndefined +import org.bson.codecs.DecoderContext +import org.bson.codecs.EncoderContext +import org.bson.codecs.configuration.CodecConfigurationException +import org.bson.codecs.kotlinx.samples.Box +import org.bson.codecs.kotlinx.samples.DataClassBsonValues +import org.bson.codecs.kotlinx.samples.DataClassContainsOpen +import org.bson.codecs.kotlinx.samples.DataClassContainsValueClass +import org.bson.codecs.kotlinx.samples.DataClassEmbedded +import org.bson.codecs.kotlinx.samples.DataClassKey +import org.bson.codecs.kotlinx.samples.DataClassLastItemDefaultsToNull +import org.bson.codecs.kotlinx.samples.DataClassListOfDataClasses +import org.bson.codecs.kotlinx.samples.DataClassListOfListOfDataClasses +import org.bson.codecs.kotlinx.samples.DataClassListOfSealed +import org.bson.codecs.kotlinx.samples.DataClassMapOfDataClasses +import org.bson.codecs.kotlinx.samples.DataClassMapOfListOfDataClasses +import org.bson.codecs.kotlinx.samples.DataClassNestedParameterizedTypes +import org.bson.codecs.kotlinx.samples.DataClassOpen +import org.bson.codecs.kotlinx.samples.DataClassOpenA +import org.bson.codecs.kotlinx.samples.DataClassOpenB +import org.bson.codecs.kotlinx.samples.DataClassOptionalBsonValues +import org.bson.codecs.kotlinx.samples.DataClassParameterized +import org.bson.codecs.kotlinx.samples.DataClassSealed +import org.bson.codecs.kotlinx.samples.DataClassSealedA +import org.bson.codecs.kotlinx.samples.DataClassSealedB +import org.bson.codecs.kotlinx.samples.DataClassSealedC +import org.bson.codecs.kotlinx.samples.DataClassSelfReferential +import org.bson.codecs.kotlinx.samples.DataClassWithAnnotations +import org.bson.codecs.kotlinx.samples.DataClassWithBooleanMapKey +import org.bson.codecs.kotlinx.samples.DataClassWithBsonConstructor +import org.bson.codecs.kotlinx.samples.DataClassWithBsonDiscriminator +import org.bson.codecs.kotlinx.samples.DataClassWithBsonExtraElements +import org.bson.codecs.kotlinx.samples.DataClassWithBsonId +import org.bson.codecs.kotlinx.samples.DataClassWithBsonIgnore +import org.bson.codecs.kotlinx.samples.DataClassWithBsonProperty +import org.bson.codecs.kotlinx.samples.DataClassWithBsonRepresentation +import org.bson.codecs.kotlinx.samples.DataClassWithCamelCase +import org.bson.codecs.kotlinx.samples.DataClassWithCollections +import org.bson.codecs.kotlinx.samples.DataClassWithContextualDateValues +import org.bson.codecs.kotlinx.samples.DataClassWithDataClassMapKey +import org.bson.codecs.kotlinx.samples.DataClassWithDateValues +import org.bson.codecs.kotlinx.samples.DataClassWithDefaults +import org.bson.codecs.kotlinx.samples.DataClassWithEmbedded +import org.bson.codecs.kotlinx.samples.DataClassWithEncodeDefault +import org.bson.codecs.kotlinx.samples.DataClassWithEnum +import org.bson.codecs.kotlinx.samples.DataClassWithEnumMapKey +import org.bson.codecs.kotlinx.samples.DataClassWithFailingInit +import org.bson.codecs.kotlinx.samples.DataClassWithJsonElement +import org.bson.codecs.kotlinx.samples.DataClassWithJsonElements +import org.bson.codecs.kotlinx.samples.DataClassWithJsonElementsNullable +import org.bson.codecs.kotlinx.samples.DataClassWithKotlinAllowedName +import org.bson.codecs.kotlinx.samples.DataClassWithListThatLastItemDefaultsToNull +import org.bson.codecs.kotlinx.samples.DataClassWithMutableList +import org.bson.codecs.kotlinx.samples.DataClassWithMutableMap +import org.bson.codecs.kotlinx.samples.DataClassWithMutableSet +import org.bson.codecs.kotlinx.samples.DataClassWithNestedParameterized +import org.bson.codecs.kotlinx.samples.DataClassWithNestedParameterizedDataClass +import org.bson.codecs.kotlinx.samples.DataClassWithNullableGeneric +import org.bson.codecs.kotlinx.samples.DataClassWithNulls +import org.bson.codecs.kotlinx.samples.DataClassWithPair +import org.bson.codecs.kotlinx.samples.DataClassWithParameterizedDataClass +import org.bson.codecs.kotlinx.samples.DataClassWithRequired +import org.bson.codecs.kotlinx.samples.DataClassWithSameSnakeCaseName +import org.bson.codecs.kotlinx.samples.DataClassWithSequence +import org.bson.codecs.kotlinx.samples.DataClassWithSimpleValues +import org.bson.codecs.kotlinx.samples.DataClassWithTriple +import org.bson.codecs.kotlinx.samples.Key +import org.bson.codecs.kotlinx.samples.SealedInterface +import org.bson.codecs.kotlinx.samples.ValueClass +import org.bson.json.JsonMode +import org.bson.json.JsonWriterSettings +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertThrows +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.MethodSource + +@OptIn(ExperimentalSerializationApi::class) +@Suppress("LargeClass") +class KotlinSerializerCodecTest { + private val oid = "\$oid" + private val numberLong = "\$numberLong" + private val numberDecimal = "\$numberDecimal" + private val emptyDocument = "{}" + private val altConfiguration = + BsonConfiguration(encodeDefaults = false, classDiscriminator = "_t", explicitNulls = true) + + private val allBsonTypesJson = + """{ + | "id": {"$oid": "111111111111111111111111"}, + | "arrayEmpty": [], + | "arraySimple": [{"${'$'}numberInt": "1"}, {"${'$'}numberInt": "2"}, {"${'$'}numberInt": "3"}], + | "arrayComplex": [{"a": {"${'$'}numberInt": "1"}}, {"a": {"${'$'}numberInt": "2"}}], + | "arrayMixedTypes": [{"${'$'}numberInt": "1"}, {"${'$'}numberInt": "2"}, true, + | [{"${'$'}numberInt": "1"}, {"${'$'}numberInt": "2"}, {"${'$'}numberInt": "3"}], + | {"a": {"${'$'}numberInt": "2"}}], + | "arrayComplexMixedTypes": [{"a": {"${'$'}numberInt": "1"}}, {"a": "a"}], + | "binary": {"${'$'}binary": {"base64": "S2Fma2Egcm9ja3Mh", "subType": "00"}}, + | "boolean": true, + | "code": {"${'$'}code": "int i = 0;"}, + | "codeWithScope": {"${'$'}code": "int x = y", "${'$'}scope": {"y": 1}}, + | "dateTime": {"${'$'}date": {"${'$'}numberLong": "1577836801000"}}, + | "decimal128": {"${'$'}numberDecimal": "1.0"}, + | "documentEmpty": {}, + | "document": {"a": {"${'$'}numberInt": "1"}}, + | "double": {"${'$'}numberDouble": "62.0"}, + | "int32": {"${'$'}numberInt": "42"}, + | "int64": {"${'$'}numberLong": "52"}, + | "maxKey": {"${'$'}maxKey": 1}, + | "minKey": {"${'$'}minKey": 1}, + | "objectId": {"${'$'}oid": "211111111111111111111112"}, + | "regex": {"${'$'}regularExpression": {"pattern": "^test.*regex.*xyz$", "options": "i"}}, + | "string": "the fox ...", + | "symbol": {"${'$'}symbol": "ruby stuff"}, + | "timestamp": {"${'$'}timestamp": {"t": 305419896, "i": 5}}, + | "undefined": {"${'$'}undefined": true} + | }""" + .trimMargin() + + private val allBsonTypesDocument = BsonDocument.parse(allBsonTypesJson) + private val jsonAllSupportedTypesDocument: BsonDocument by + lazy { + val doc = BsonDocument.parse(allBsonTypesJson) + listOf("minKey", "maxKey", "code", "codeWithScope", "regex", "symbol", "undefined").forEach { + doc.remove(it) + } + doc + } + + companion object { + @JvmStatic + fun testTypesCastingDataClassWithSimpleValues(): Stream { + return Stream.of( + BsonDocument() + .append("char", BsonString("c")) + .append("byte", BsonInt32(1)) + .append("short", BsonInt32(2)) + .append("int", BsonInt32(10)) + .append("long", BsonInt32(10)) + .append("float", BsonInt32(2)) + .append("double", BsonInt32(3)) + .append("boolean", BsonBoolean.TRUE) + .append("string", BsonString("String")), + BsonDocument() + .append("char", BsonString("c")) + .append("byte", BsonDouble(1.0)) + .append("short", BsonDouble(2.0)) + .append("int", BsonDouble(9.9999999999999992)) + .append("long", BsonDouble(9.9999999999999992)) + .append("float", BsonDouble(2.0)) + .append("double", BsonDouble(3.0)) + .append("boolean", BsonBoolean.TRUE) + .append("string", BsonString("String")), + BsonDocument() + .append("char", BsonString("c")) + .append("byte", BsonDouble(1.0)) + .append("short", BsonDouble(2.0)) + .append("int", BsonDouble(10.0)) + .append("long", BsonDouble(10.0)) + .append("float", BsonDouble(2.0)) + .append("double", BsonDouble(3.0)) + .append("boolean", BsonBoolean.TRUE) + .append("string", BsonString("String")), + BsonDocument() + .append("char", BsonString("c")) + .append("byte", BsonInt64(1)) + .append("short", BsonInt64(2)) + .append("int", BsonInt64(10)) + .append("long", BsonInt64(10)) + .append("float", BsonInt64(2)) + .append("double", BsonInt64(3)) + .append("boolean", BsonBoolean.TRUE) + .append("string", BsonString("String"))) + } + } + + @ParameterizedTest + @MethodSource("testTypesCastingDataClassWithSimpleValues") + fun testTypesCastingDataClassWithSimpleValues(data: BsonDocument) { + val expectedDataClass = DataClassWithSimpleValues('c', 1, 2, 10, 10L, 2.0f, 3.0, true, "String") + + assertDecodesTo(data, expectedDataClass) + } + + @Test + fun testDataClassWithDateValuesContextualSerialization() { + val expected = + "{\n" + + " \"instant\": {\"\$date\": \"2001-09-09T01:46:40Z\"}, \n" + + " \"localTime\": {\"\$date\": \"1970-01-01T00:00:10Z\"}, \n" + + " \"localDateTime\": {\"\$date\": \"2021-01-01T00:00:04Z\"}, \n" + + " \"localDate\": {\"\$date\": \"1970-10-28T00:00:00Z\"}\n" + + "}".trimMargin() + + val expectedDataClass = + DataClassWithContextualDateValues( + Instant.fromEpochMilliseconds(10_000_000_000_00), + LocalTime.fromMillisecondOfDay(10_000), + LocalDateTime.parse("2021-01-01T00:00:04"), + LocalDate.fromEpochDays(300)) + + assertRoundTrips(expected, expectedDataClass) + } + + @Test + fun testDataClassWithDateValuesStandard() { + val expected = + "{\n" + + " \"instant\": \"1970-01-01T00:00:01Z\", \n" + + " \"localTime\": \"00:00:01\", \n" + + " \"localDateTime\": \"2021-01-01T00:00:04\", \n" + + " \"localDate\": \"1970-01-02\"\n" + + "}".trimMargin() + + val expectedDataClass = + DataClassWithDateValues( + Instant.fromEpochMilliseconds(1000), + LocalTime.fromMillisecondOfDay(1000), + LocalDateTime.parse("2021-01-01T00:00:04"), + LocalDate.fromEpochDays(1)) + + assertRoundTrips(expected, expectedDataClass) + } + + @Test + fun testDataClassWithComplexTypes() { + val expected = + """{ + | "listSimple": ["a", "b", "c", "d"], + | "listList": [["a", "b"], [], ["c", "d"]], + | "listMap": [{"a": 1, "b": 2}, {}, {"c": 3, "d": 4}], + | "mapSimple": {"a": 1, "b": 2, "c": 3, "d": 4}, + | "mapList": {"a": ["a", "b"], "b": [], "c": ["c", "d"]}, + | "mapMap" : {"a": {"a": 1, "b": 2}, "b": {}, "c": {"c": 3, "d": 4}} + |}""" + .trimMargin() + + val dataClass = + DataClassWithCollections( + listOf("a", "b", "c", "d"), + listOf(listOf("a", "b"), emptyList(), listOf("c", "d")), + listOf(mapOf("a" to 1, "b" to 2), emptyMap(), mapOf("c" to 3, "d" to 4)), + mapOf("a" to 1, "b" to 2, "c" to 3, "d" to 4), + mapOf("a" to listOf("a", "b"), "b" to emptyList(), "c" to listOf("c", "d")), + mapOf("a" to mapOf("a" to 1, "b" to 2), "b" to emptyMap(), "c" to mapOf("c" to 3, "d" to 4))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithDefaults() { + val expectedDefault = + """{ + | "boolean": false, + | "string": "String", + | "listSimple": ["a", "b", "c"] + |}""" + .trimMargin() + + val defaultDataClass = DataClassWithDefaults() + assertRoundTrips(expectedDefault, defaultDataClass) + assertRoundTrips(emptyDocument, defaultDataClass, altConfiguration) + + val expectedSomeOverrides = """{"boolean": true, "listSimple": ["a"]}""" + val someOverridesDataClass = DataClassWithDefaults(boolean = true, listSimple = listOf("a")) + assertRoundTrips(expectedSomeOverrides, someOverridesDataClass, altConfiguration) + } + + @Test + fun testDataClassWithNulls() { + val expectedNulls = + """{ + | "boolean": null, + | "string": null, + | "listSimple": null + |}""" + .trimMargin() + + val dataClass = DataClassWithNulls(null, null, null) + assertRoundTrips(emptyDocument, dataClass) + assertRoundTrips(expectedNulls, dataClass, altConfiguration) + } + + @Test + fun testDataClassWithListThatLastItemDefaultsToNull() { + val expectedWithOutNulls = + """{ + | "elements": [{"required": "required"}, {"required": "required"}], + |}""" + .trimMargin() + + val dataClass = + DataClassWithListThatLastItemDefaultsToNull( + listOf(DataClassLastItemDefaultsToNull("required"), DataClassLastItemDefaultsToNull("required"))) + assertRoundTrips(expectedWithOutNulls, dataClass) + + val expectedWithNulls = + """{ + | "elements": [{"required": "required", "optional": null}, {"required": "required", "optional": null}], + |}""" + .trimMargin() + assertRoundTrips(expectedWithNulls, dataClass, BsonConfiguration(explicitNulls = true)) + } + + @Test + fun testDataClassWithNullableGenericsNotNull() { + val expected = + """{ + | "box": {"boxed": "String"} + |}""" + .trimMargin() + + val dataClass = DataClassWithNullableGeneric(Box("String")) + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithNullableGenericsNull() { + val expectedDefault = """{"box": {}}""" + val dataClass = DataClassWithNullableGeneric(Box(null)) + assertRoundTrips(expectedDefault, dataClass) + val expectedNull = """{"box": {"boxed": null}}""" + assertRoundTrips(expectedNull, dataClass, altConfiguration) + } + + @Test + fun testDataClassSelfReferential() { + val expected = + """{"name": "tree", + | "left": {"name": "L", "left": {"name": "LL"}, "right": {"name": "LR"}}, + | "right": {"name": "R", + | "left": {"name": "RL", + | "left": {"name": "RLL"}, + | "right": {"name": "RLR"}}, + | "right": {"name": "RR"}} + |}""" + .trimMargin() + val dataClass = + DataClassSelfReferential( + "tree", + DataClassSelfReferential("L", DataClassSelfReferential("LL"), DataClassSelfReferential("LR")), + DataClassSelfReferential( + "R", + DataClassSelfReferential("RL", DataClassSelfReferential("RLL"), DataClassSelfReferential("RLR")), + DataClassSelfReferential("RR"))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithEmbedded() { + val expected = """{"id": "myId", "embedded": {"name": "embedded1"}}""" + val dataClass = DataClassWithEmbedded("myId", DataClassEmbedded("embedded1")) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassListOfDataClasses() { + val expected = """{"id": "myId", "nested": [{"name": "embedded1"}, {"name": "embedded2"}]}""" + val dataClass = + DataClassListOfDataClasses("myId", listOf(DataClassEmbedded("embedded1"), DataClassEmbedded("embedded2"))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassListOfListOfDataClasses() { + val expected = """{"id": "myId", "nested": [[{"name": "embedded1"}], [{"name": "embedded2"}]]}""" + val dataClass = + DataClassListOfListOfDataClasses( + "myId", listOf(listOf(DataClassEmbedded("embedded1")), listOf(DataClassEmbedded("embedded2")))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassMapOfDataClasses() { + val expected = """{"id": "myId", "nested": {"first": {"name": "embedded1"}, "second": {"name": "embedded2"}}}""" + val dataClass = + DataClassMapOfDataClasses( + "myId", mapOf("first" to DataClassEmbedded("embedded1"), "second" to DataClassEmbedded("embedded2"))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassMapOfListOfDataClasses() { + val expected = + """{"id": "myId", "nested": {"first": [{"name": "embedded1"}], "second": [{"name": "embedded2"}]}}""" + val dataClass = + DataClassMapOfListOfDataClasses( + "myId", + mapOf( + "first" to listOf(DataClassEmbedded("embedded1")), + "second" to listOf(DataClassEmbedded("embedded2")))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithParameterizedDataClass() { + val expected = + """{"id": "myId", + | "parameterizedDataClass": {"number": 2.0, "string": "myString", + | "parameterizedList": [{"name": "embedded1"}]} + |}""" + .trimMargin() + val dataClass = + DataClassWithParameterizedDataClass( + "myId", DataClassParameterized(2.0, "myString", listOf(DataClassEmbedded("embedded1")))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithNestedParameterizedDataClass() { + val expected = + """{"id": "myId", + |"nestedParameterized": { + | "parameterizedDataClass": + | {"number": 4.2, "string": "myString", "parameterizedList": [{"name": "embedded1"}]}, + | "other": "myOtherString", "optionalOther": "myOptionalOtherString" + | } + |}""" + .trimMargin() + val dataClass = + DataClassWithNestedParameterizedDataClass( + "myId", + DataClassWithNestedParameterized( + DataClassParameterized(4.2, "myString", listOf(DataClassEmbedded("embedded1"))), + "myOtherString", + "myOptionalOtherString")) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithPair() { + val expected = """{"pair": {"first": "a", "second": 1}}""" + val dataClass = DataClassWithPair("a" to 1) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithTriple() { + val expected = """{"triple": {"first": "a", "second": 1, "third": 2.1}}""" + val dataClass = DataClassWithTriple(Triple("a", 1, 2.1)) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassNestedParameterizedTypes() { + val expected = + """{ + |"triple": { + | "first": "0", + | "second": {"first": 1, "second": {"first": 1.2, "second": {"first": "1.3", "second": 1.3}}}, + | "third": {"first": 2, "second": {"first": 2.1, "second": "two dot two"}, + | "third": {"first": "3.1", "second": {"first": 3.2, "second": "three dot two" }, + | "third": 3.3 }} + | } + |}""" + .trimMargin() + val dataClass = + DataClassNestedParameterizedTypes( + Triple( + "0", + Pair(1, Pair(1.2, Pair("1.3", 1.3))), + Triple(2, Pair(2.1, "two dot two"), Triple("3.1", Pair(3.2, "three dot two"), 3.3)))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithMutableList() { + val expected = """{"value": ["A", "B", "C"]}""" + val dataClass = DataClassWithMutableList(mutableListOf("A", "B", "C")) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithMutableSet() { + val expected = """{"value": ["A", "B", "C"]}""" + val dataClass = DataClassWithMutableSet(mutableSetOf("A", "B", "C")) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithMutableMap() { + val expected = """{"value": {"a": "A", "b": "B", "c": "C"}}""" + val dataClass = DataClassWithMutableMap(mutableMapOf("a" to "A", "b" to "B", "c" to "C")) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithAnnotations() { + val expected = """{"_id": "id", "nom": "name", "string": "string"}""" + val dataClass = DataClassWithAnnotations("id", "name", "string") + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithEncodeDefault() { + val expectedDefault = + """{ + | "boolean": false, + | "listSimple": ["a", "b", "c"] + |}""" + .trimMargin() + + val defaultDataClass = DataClassWithEncodeDefault() + assertRoundTrips(expectedDefault, defaultDataClass) + assertRoundTrips("""{"listSimple": ["a", "b", "c"]}""", defaultDataClass, altConfiguration) + + val expectedSomeOverrides = """{"string": "STRING", "listSimple": ["a"]}""" + val someOverridesDataClass = DataClassWithEncodeDefault(string = "STRING", listSimple = listOf("a")) + assertRoundTrips(expectedSomeOverrides, someOverridesDataClass, altConfiguration) + } + + @Test + fun testDataClassWithRequired() { + val expectedDefault = + """{ + | "boolean": false, + | "string": "String", + | "listSimple": ["a", "b", "c"] + |}""" + .trimMargin() + + val defaultDataClass = DataClassWithRequired() + assertRoundTrips(expectedDefault, defaultDataClass) + + assertThrows { deserialize(BsonDocument()) } + } + + @Test + fun testDataClassWithEnum() { + val expected = """{"value": "A"}""" + + val dataClass = DataClassWithEnum(Key.A) + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithEnumKeyMap() { + val expected = """{"map": {"A": true, "B": false}}""" + + val dataClass = DataClassWithEnumMapKey(mapOf(Key.A to true, Key.B to false)) + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithSequence() { + val dataClass = DataClassWithSequence(listOf("A", "B", "C").asSequence()) + assertThrows { serialize(dataClass) } + } + + @Test + fun testDataClassWithBooleanKeyMap() { + val dataClass = DataClassWithBooleanMapKey(mapOf(true to true, false to false)) + assertThrows { serialize(dataClass) } + assertThrows { + deserialize(BsonDocument.parse("""{"map": {"true": true}}""")) + } + } + + @Test + fun testDataClassWithDataClassKeyMap() { + val dataClass = DataClassWithDataClassMapKey(mapOf(DataClassKey("A") to true, DataClassKey("A") to false)) + assertThrows { serialize(dataClass) } + assertThrows { + deserialize(BsonDocument.parse("""{"map": {"A": true}}""")) + } + } + + @Test + fun testDataClassEmbeddedWithExtraData() { + val expected = + """{ + | "extraA": "extraA", + | "name": "NAME", + | "extraB": "extraB" + |}""" + .trimMargin() + + val dataClass = DataClassEmbedded("NAME") + assertDecodesTo(BsonDocument.parse(expected), dataClass) + } + + @Test + fun testDataClassBsonValues() { + + val dataClass = + DataClassBsonValues( + allBsonTypesDocument["id"]!!.asObjectId().value, + allBsonTypesDocument["arrayEmpty"]!!.asArray(), + allBsonTypesDocument["arraySimple"]!!.asArray(), + allBsonTypesDocument["arrayComplex"]!!.asArray(), + allBsonTypesDocument["arrayMixedTypes"]!!.asArray(), + allBsonTypesDocument["arrayComplexMixedTypes"]!!.asArray(), + allBsonTypesDocument["binary"]!!.asBinary(), + allBsonTypesDocument["boolean"]!!.asBoolean(), + allBsonTypesDocument["code"]!!.asJavaScript(), + allBsonTypesDocument["codeWithScope"]!!.asJavaScriptWithScope(), + allBsonTypesDocument["dateTime"]!!.asDateTime(), + allBsonTypesDocument["decimal128"]!!.asDecimal128(), + allBsonTypesDocument["documentEmpty"]!!.asDocument(), + allBsonTypesDocument["document"]!!.asDocument(), + allBsonTypesDocument["double"]!!.asDouble(), + allBsonTypesDocument["int32"]!!.asInt32(), + allBsonTypesDocument["int64"]!!.asInt64(), + allBsonTypesDocument["maxKey"]!! as BsonMaxKey, + allBsonTypesDocument["minKey"]!! as BsonMinKey, + allBsonTypesDocument["objectId"]!!.asObjectId(), + allBsonTypesDocument["regex"]!!.asRegularExpression(), + allBsonTypesDocument["string"]!!.asString(), + allBsonTypesDocument["symbol"]!!.asSymbol(), + allBsonTypesDocument["timestamp"]!!.asTimestamp(), + allBsonTypesDocument["undefined"]!! as BsonUndefined) + + assertRoundTrips(allBsonTypesJson, dataClass) + } + + @Test + fun testDataClassOptionalBsonValues() { + val dataClass = + DataClassOptionalBsonValues( + allBsonTypesDocument["id"]!!.asObjectId().value, + allBsonTypesDocument["arrayEmpty"]!!.asArray(), + allBsonTypesDocument["arraySimple"]!!.asArray(), + allBsonTypesDocument["arrayComplex"]!!.asArray(), + allBsonTypesDocument["arrayMixedTypes"]!!.asArray(), + allBsonTypesDocument["arrayComplexMixedTypes"]!!.asArray(), + allBsonTypesDocument["binary"]!!.asBinary(), + allBsonTypesDocument["boolean"]!!.asBoolean(), + allBsonTypesDocument["code"]!!.asJavaScript(), + allBsonTypesDocument["codeWithScope"]!!.asJavaScriptWithScope(), + allBsonTypesDocument["dateTime"]!!.asDateTime(), + allBsonTypesDocument["decimal128"]!!.asDecimal128(), + allBsonTypesDocument["documentEmpty"]!!.asDocument(), + allBsonTypesDocument["document"]!!.asDocument(), + allBsonTypesDocument["double"]!!.asDouble(), + allBsonTypesDocument["int32"]!!.asInt32(), + allBsonTypesDocument["int64"]!!.asInt64(), + allBsonTypesDocument["maxKey"]!! as BsonMaxKey, + allBsonTypesDocument["minKey"]!! as BsonMinKey, + allBsonTypesDocument["objectId"]!!.asObjectId(), + allBsonTypesDocument["regex"]!!.asRegularExpression(), + allBsonTypesDocument["string"]!!.asString(), + allBsonTypesDocument["symbol"]!!.asSymbol(), + allBsonTypesDocument["timestamp"]!!.asTimestamp(), + allBsonTypesDocument["undefined"]!! as BsonUndefined) + + assertRoundTrips(allBsonTypesJson, dataClass) + + val emptyDataClass = + DataClassOptionalBsonValues( + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null) + + assertRoundTrips("{}", emptyDataClass) + assertRoundTrips( + """{ "id": null, "arrayEmpty": null, "arraySimple": null, "arrayComplex": null, "arrayMixedTypes": null, + | "arrayComplexMixedTypes": null, "binary": null, "boolean": null, "code": null, "codeWithScope": null, + | "dateTime": null, "decimal128": null, "documentEmpty": null, "document": null, "double": null, + | "int32": null, "int64": null, "maxKey": null, "minKey": null, "objectId": null, "regex": null, + | "string": null, "symbol": null, "timestamp": null, "undefined": null }""" + .trimMargin(), + emptyDataClass, + BsonConfiguration(explicitNulls = true)) + } + + @Test + fun testDataClassSealed() { + val expectedA = """{"a": "string"}""" + val dataClassA = DataClassSealedA("string") + assertRoundTrips(expectedA, dataClassA) + + val expectedB = """{"b": 1}""" + val dataClassB = DataClassSealedB(1) + assertRoundTrips(expectedB, dataClassB) + + val expectedC = """{"c": "String"}""" + val dataClassC = DataClassSealedC("String") + assertRoundTrips(expectedC, dataClassC) + + val expectedDataClassSealedA = """{"_t": "org.bson.codecs.kotlinx.samples.DataClassSealedA", "a": "string"}""" + val dataClassSealedA = DataClassSealedA("string") as DataClassSealed + assertRoundTrips(expectedDataClassSealedA, dataClassSealedA) + + val expectedDataClassSealedB = """{"_t": "org.bson.codecs.kotlinx.samples.DataClassSealedB", "b": 1}""" + val dataClassSealedB = DataClassSealedB(1) as DataClassSealed + assertRoundTrips(expectedDataClassSealedB, dataClassSealedB) + + val expectedDataClassSealedC = """{"_t": "C", "c": "String"}""" + val dataClassSealedC = DataClassSealedC("String") as DataClassSealed + assertRoundTrips(expectedDataClassSealedC, dataClassSealedC) + + val dataClassListOfSealed = DataClassListOfSealed(listOf(dataClassA, dataClassB, dataClassC)) + val expectedListOfSealed = + """{"items": [$expectedDataClassSealedA, $expectedDataClassSealedB, $expectedDataClassSealedC]}""" + assertRoundTrips(expectedListOfSealed, dataClassListOfSealed) + + val expectedListOfSealedDiscriminator = expectedListOfSealed.replace("_t", "#class") + assertRoundTrips( + expectedListOfSealedDiscriminator, dataClassListOfSealed, BsonConfiguration(classDiscriminator = "#class")) + } + + @Test + fun testDataClassOpen() { + val expectedA = """{"a": "string"}""" + val dataClassA = DataClassOpenA("string") + assertRoundTrips(expectedA, dataClassA) + + val expectedB = """{"b": 1}""" + val dataClassB = DataClassOpenB(1) + assertRoundTrips(expectedB, dataClassB) + + val serializersModule = + SerializersModule { + this.polymorphic(DataClassOpen::class) { + this.subclass(DataClassOpenA::class) + this.subclass(DataClassOpenB::class) + } + } + defaultSerializersModule + + val dataClassContainsOpenA = DataClassContainsOpen(dataClassA) + val expectedOpenA = """{"open": {"_t": "org.bson.codecs.kotlinx.samples.DataClassOpenA", "a": "string"}}""" + assertRoundTrips(expectedOpenA, dataClassContainsOpenA, serializersModule = serializersModule) + + val dataClassContainsOpenB = DataClassContainsOpen(dataClassB) + val expectedOpenB = """{"open": {"#class": "org.bson.codecs.kotlinx.samples.DataClassOpenB", "b": 1}}""" + assertRoundTrips( + expectedOpenB, + dataClassContainsOpenB, + configuration = BsonConfiguration(classDiscriminator = "#class"), + serializersModule = serializersModule) + } + + @Test + fun testValueClasses() { + val expected = """{"value": "valueString"}""" + val valueClass = ValueClass("valueString") + val dataClass = DataClassContainsValueClass(valueClass) + + assertThrows() { serialize(valueClass) } + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithJsonElement() { + val expected = + """{"value": { + |"char": "c", + |"byte": 0, + |"short": 1, + |"int": 22, + |"long": {"$numberLong": "3000000000"}, + |"decimal": {"$numberDecimal": "10000000000000000000"} + |"decimal2": {"$numberDecimal": "3.1230E+700"} + |"float": 4.0, + |"double": 4.2, + |"boolean": true, + |"string": "String" + |}}""" + .trimMargin() + + val dataClass = + DataClassWithJsonElement( + buildJsonObject { + put("char", "c") + put("byte", 0) + put("short", 1) + put("int", 22) + put("long", 3_000_000_000) + put("decimal", BigDecimal("10000000000000000000")) + put("decimal2", BigDecimal("3.1230E+700")) + put("float", 4.0) + put("double", 4.2) + put("boolean", true) + put("string", "String") + }) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithJsonElements() { + val expected = + """{ + | "jsonElement": {"string": "String"}, + | "jsonArray": [1, 2], + | "jsonElements": [{"string": "String"}, {"int": 42}], + | "jsonNestedMap": {"nestedString": {"string": "String"}, + | "nestedLong": {"long": {"$numberLong": "3000000000"}}} + |}""" + .trimMargin() + + val dataClass = + DataClassWithJsonElements( + buildJsonObject { put("string", "String") }, + buildJsonArray { + add(JsonPrimitive(1)) + add(JsonPrimitive(2)) + }, + listOf(buildJsonObject { put("string", "String") }, buildJsonObject { put("int", 42) }), + mapOf( + Pair("nestedString", buildJsonObject { put("string", "String") }), + Pair("nestedLong", buildJsonObject { put("long", 3000000000L) }))) + + assertRoundTrips(expected, dataClass) + } + + @Test + fun testDataClassWithJsonElementsNullable() { + val expected = + """{ + | "jsonElement": {"null": null}, + | "jsonArray": [1, 2, null], + | "jsonElements": [{"null": null}], + | "jsonNestedMap": {"nestedNull": null} + |}""" + .trimMargin() + + val dataClass = + DataClassWithJsonElementsNullable( + buildJsonObject { put("null", null) }, + buildJsonArray { + add(JsonPrimitive(1)) + add(JsonPrimitive(2)) + add(JsonPrimitive(null)) + }, + listOf(buildJsonObject { put("null", null) }), + mapOf(Pair("nestedNull", null))) + + assertRoundTrips(expected, dataClass, altConfiguration) + + val expectedNoNulls = + """{ + | "jsonElement": {}, + | "jsonArray": [1, 2], + | "jsonElements": [{}], + | "jsonNestedMap": {} + |}""" + .trimMargin() + + val dataClassNoNulls = + DataClassWithJsonElementsNullable( + buildJsonObject {}, + buildJsonArray { + add(JsonPrimitive(1)) + add(JsonPrimitive(2)) + }, + listOf(buildJsonObject {}), + mapOf()) + assertEncodesTo(expectedNoNulls, dataClass) + assertDecodesTo(expectedNoNulls, dataClassNoNulls) + } + + @Test + fun testDataClassWithJsonElementNullSupport() { + val expected = + """{"jsonElement": {"null": null}, + | "jsonArray": [1, 2, null], + | "jsonElements": [{"null": null}], + | "jsonNestedMap": {"nestedNull": null} + | } + | """ + .trimMargin() + + val dataClass = + DataClassWithJsonElements( + buildJsonObject { put("null", null) }, + buildJsonArray { + add(JsonPrimitive(1)) + add(JsonPrimitive(2)) + add(JsonPrimitive(null)) + }, + listOf(buildJsonObject { put("null", null) }), + mapOf(Pair("nestedNull", JsonPrimitive(null)))) + + assertRoundTrips(expected, dataClass, altConfiguration) + + val expectedNoNulls = + """{"jsonElement": {}, + | "jsonArray": [1, 2], + | "jsonElements": [{}], + | "jsonNestedMap": {} + | } + | """ + .trimMargin() + + val dataClassNoNulls = + DataClassWithJsonElements( + buildJsonObject {}, + buildJsonArray { + add(JsonPrimitive(1)) + add(JsonPrimitive(2)) + }, + listOf(buildJsonObject {}), + mapOf()) + assertEncodesTo(expectedNoNulls, dataClass) + assertDecodesTo(expectedNoNulls, dataClassNoNulls) + } + + @Test + @Suppress("LongMethod") + fun testDataClassWithJsonElementBsonSupport() { + val dataClassWithAllSupportedJsonTypes = + DataClassWithJsonElement( + buildJsonObject { + put("id", "111111111111111111111111") + put("arrayEmpty", buildJsonArray {}) + put( + "arraySimple", + buildJsonArray { + add(JsonPrimitive(1)) + add(JsonPrimitive(2)) + add(JsonPrimitive(3)) + }) + put( + "arrayComplex", + buildJsonArray { + add(buildJsonObject { put("a", JsonPrimitive(1)) }) + add(buildJsonObject { put("a", JsonPrimitive(2)) }) + }) + put( + "arrayMixedTypes", + buildJsonArray { + add(JsonPrimitive(1)) + add(JsonPrimitive(2)) + add(JsonPrimitive(true)) + add( + buildJsonArray { + add(JsonPrimitive(1)) + add(JsonPrimitive(2)) + add(JsonPrimitive(3)) + }) + add(buildJsonObject { put("a", JsonPrimitive(2)) }) + }) + put( + "arrayComplexMixedTypes", + buildJsonArray { + add(buildJsonObject { put("a", JsonPrimitive(1)) }) + add(buildJsonObject { put("a", JsonPrimitive("a")) }) + }) + put("binary", JsonPrimitive("S2Fma2Egcm9ja3Mh")) + put("boolean", JsonPrimitive(true)) + put("dateTime", JsonPrimitive(1577836801000)) + put("decimal128", JsonPrimitive(1.0)) + put("documentEmpty", buildJsonObject {}) + put("document", buildJsonObject { put("a", JsonPrimitive(1)) }) + put("double", JsonPrimitive(62.0)) + put("int32", JsonPrimitive(42)) + put("int64", JsonPrimitive(52)) + put("objectId", JsonPrimitive("211111111111111111111112")) + put("string", JsonPrimitive("the fox ...")) + put("timestamp", JsonPrimitive(1311768464867721221)) + }) + + val jsonWriterSettings = + JsonWriterSettings.builder() + .outputMode(JsonMode.RELAXED) + .objectIdConverter { oid, writer -> writer.writeString(oid.toHexString()) } + .dateTimeConverter { d, writer -> writer.writeNumber(d.toString()) } + .timestampConverter { ts, writer -> writer.writeNumber(ts.value.toString()) } + .binaryConverter { b, writer -> writer.writeString(Base64.getEncoder().encodeToString(b.data)) } + .decimal128Converter { d, writer -> writer.writeNumber(d.toDouble().toString()) } + .build() + val dataClassWithAllSupportedJsonTypesSimpleJson = jsonAllSupportedTypesDocument.toJson(jsonWriterSettings) + + assertEncodesTo( + """{"value": $dataClassWithAllSupportedJsonTypesSimpleJson }""", dataClassWithAllSupportedJsonTypes) + assertDecodesTo("""{"value": $jsonAllSupportedTypesDocument}""", dataClassWithAllSupportedJsonTypes) + } + + @Test + fun testDataFailures() { + assertThrows("Missing data") { + val codec = KotlinSerializerCodec.create(DataClassWithSimpleValues::class) + codec?.decode(BsonDocumentReader(BsonDocument()), DecoderContext.builder().build()) + } + + assertThrows("Invalid types") { + val data = + BsonDocument.parse( + """{"char": 123, "short": "2", "int": 22, "long": "ok", "float": true, "double": false, + | "boolean": "true", "string": 99}""" + .trimMargin()) + val codec = KotlinSerializerCodec.create() + codec?.decode(BsonDocumentReader(data), DecoderContext.builder().build()) + } + + assertThrows("Failing init") { + val data = BsonDocument.parse("""{"id": "myId"}""") + val codec = KotlinSerializerCodec.create() + codec?.decode(BsonDocumentReader(data), DecoderContext.builder().build()) + } + + var exception = + assertThrows("Invalid complex types - document") { + val data = BsonDocument.parse("""{"_id": "myId", "embedded": 123}""") + val codec = KotlinSerializerCodec.create() + codec?.decode(BsonDocumentReader(data), DecoderContext.builder().build()) + } + assertEquals( + "Invalid data for `org.bson.codecs.kotlinx.samples.DataClassEmbedded` " + + "expected a bson document found: INT32", + exception.message) + + exception = + assertThrows("Invalid complex types - list") { + val data = BsonDocument.parse("""{"_id": "myId", "nested": 123}""") + val codec = KotlinSerializerCodec.create() + codec?.decode(BsonDocumentReader(data), DecoderContext.builder().build()) + } + assertEquals("Invalid data for `LIST` expected a bson array found: INT32", exception.message) + + exception = + assertThrows("Invalid complex types - map") { + val data = BsonDocument.parse("""{"_id": "myId", "nested": 123}""") + val codec = KotlinSerializerCodec.create() + codec?.decode(BsonDocumentReader(data), DecoderContext.builder().build()) + } + assertEquals("Invalid data for `MAP` expected a bson document found: INT32", exception.message) + + exception = + assertThrows("Missing discriminator") { + val data = BsonDocument.parse("""{"_id": {"$oid": "111111111111111111111111"}, "name": "string"}""") + val codec = KotlinSerializerCodec.create() + codec?.decode(BsonDocumentReader(data), DecoderContext.builder().build()) + } + assertEquals( + "Missing required discriminator field `_t` for polymorphic class: " + + "`org.bson.codecs.kotlinx.samples.SealedInterface`.", + exception.message) + } + + @Test + fun testInvalidAnnotations() { + assertThrows { KotlinSerializerCodec.create(DataClassWithBsonId::class) } + assertThrows { KotlinSerializerCodec.create(DataClassWithBsonProperty::class) } + assertThrows { + KotlinSerializerCodec.create(DataClassWithBsonDiscriminator::class) + } + assertThrows { KotlinSerializerCodec.create(DataClassWithBsonConstructor::class) } + assertThrows { KotlinSerializerCodec.create(DataClassWithBsonIgnore::class) } + assertThrows { + KotlinSerializerCodec.create(DataClassWithBsonExtraElements::class) + } + assertThrows { + KotlinSerializerCodec.create(DataClassWithBsonRepresentation::class) + } + } + + @Test + fun testSnakeCaseNamingStrategy() { + val expected = + """{"two_words": "", "my_property": "", "camel_case_underscores": "", "url_mapping": "", + | "my_http_auth": "", "my_http2_api_key": "", "my_http2fast_api_key": ""}""" + .trimMargin() + val dataClass = DataClassWithCamelCase() + assertRoundTrips(expected, dataClass, BsonConfiguration(bsonNamingStrategy = BsonNamingStrategy.SNAKE_CASE)) + } + + @Test + fun testSameSnakeCaseName() { + val expected = """{"my_http_auth": "", "my_http_auth1": ""}""" + val dataClass = DataClassWithSameSnakeCaseName() + val exception = + assertThrows { + assertRoundTrips( + expected, dataClass, BsonConfiguration(bsonNamingStrategy = BsonNamingStrategy.SNAKE_CASE)) + } + assertEquals( + "myHTTPAuth, myHttpAuth in org.bson.codecs.kotlinx.samples.DataClassWithSameSnakeCaseName" + + " generate same name: my_http_auth.\n" + + "myHTTPAuth1, myHttpAuth1 in org.bson.codecs.kotlinx.samples.DataClassWithSameSnakeCaseName" + + " generate same name: my_http_auth1.\n", + exception.message) + } + + @Test + fun testKotlinAllowedName() { + val expected = """{"имя_переменной": "", "variable _name": ""}""" + val dataClass = DataClassWithKotlinAllowedName() + assertRoundTrips(expected, dataClass, BsonConfiguration(bsonNamingStrategy = BsonNamingStrategy.SNAKE_CASE)) + } + + private inline fun assertRoundTrips( + expected: String, + value: T, + configuration: BsonConfiguration = BsonConfiguration(), + serializersModule: SerializersModule = defaultSerializersModule + ) { + assertDecodesTo( + assertEncodesTo(expected, value, serializersModule, configuration), value, serializersModule, configuration) + } + + private inline fun assertEncodesTo( + json: String, + value: T, + serializersModule: SerializersModule = defaultSerializersModule, + configuration: BsonConfiguration = BsonConfiguration() + ): BsonDocument { + val expected = BsonDocument.parse(json) + val actual = serialize(value, serializersModule, configuration) + println(actual.toJson()) + assertEquals(expected, actual) + return actual + } + + private inline fun serialize( + value: T, + serializersModule: SerializersModule = defaultSerializersModule, + configuration: BsonConfiguration = BsonConfiguration() + ): BsonDocument { + val document = BsonDocument() + val writer = BsonDocumentWriter(document) + val codec = KotlinSerializerCodec.create(T::class, serializersModule, configuration)!! + codec.encode(writer, value, EncoderContext.builder().build()) + writer.flush() + return document + } + + private inline fun assertDecodesTo( + value: String, + expected: T, + serializersModule: SerializersModule = defaultSerializersModule, + configuration: BsonConfiguration = BsonConfiguration() + ) { + assertDecodesTo(BsonDocument.parse(value), expected, serializersModule, configuration) + } + + private inline fun assertDecodesTo( + value: BsonDocument, + expected: T, + serializersModule: SerializersModule = defaultSerializersModule, + configuration: BsonConfiguration = BsonConfiguration() + ) { + assertEquals(expected, deserialize(value, serializersModule, configuration)) + } + private inline fun deserialize( + value: BsonDocument, + serializersModule: SerializersModule = defaultSerializersModule, + configuration: BsonConfiguration = BsonConfiguration() + ): T { + val codec = KotlinSerializerCodec.create(T::class, serializersModule, configuration)!! + return codec.decode(BsonDocumentReader(value), DecoderContext.builder().build()) + } +} diff --git a/bson-kotlinx/src/test/kotlin/org/bson/codecs/kotlinx/samples/DataClasses.kt b/bson-kotlinx/src/test/kotlin/org/bson/codecs/kotlinx/samples/DataClasses.kt new file mode 100644 index 00000000000..773af52cd96 --- /dev/null +++ b/bson-kotlinx/src/test/kotlin/org/bson/codecs/kotlinx/samples/DataClasses.kt @@ -0,0 +1,371 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.kotlinx.samples + +import kotlinx.datetime.Instant +import kotlinx.datetime.LocalDate +import kotlinx.datetime.LocalDateTime +import kotlinx.datetime.LocalTime +import kotlinx.serialization.Contextual +import kotlinx.serialization.EncodeDefault +import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.Required +import kotlinx.serialization.SerialName +import kotlinx.serialization.Serializable +import kotlinx.serialization.json.JsonArray +import kotlinx.serialization.json.JsonElement +import org.bson.BsonArray +import org.bson.BsonBinary +import org.bson.BsonBoolean +import org.bson.BsonDateTime +import org.bson.BsonDecimal128 +import org.bson.BsonDocument +import org.bson.BsonDouble +import org.bson.BsonInt32 +import org.bson.BsonInt64 +import org.bson.BsonJavaScript +import org.bson.BsonJavaScriptWithScope +import org.bson.BsonMaxKey +import org.bson.BsonMinKey +import org.bson.BsonObjectId +import org.bson.BsonRegularExpression +import org.bson.BsonString +import org.bson.BsonSymbol +import org.bson.BsonTimestamp +import org.bson.BsonType +import org.bson.BsonUndefined +import org.bson.codecs.pojo.annotations.BsonCreator +import org.bson.codecs.pojo.annotations.BsonDiscriminator +import org.bson.codecs.pojo.annotations.BsonExtraElements +import org.bson.codecs.pojo.annotations.BsonId +import org.bson.codecs.pojo.annotations.BsonIgnore +import org.bson.codecs.pojo.annotations.BsonProperty +import org.bson.codecs.pojo.annotations.BsonRepresentation +import org.bson.types.ObjectId + +@Serializable +data class DataClassWithSimpleValues( + val char: Char, + val byte: Byte, + val short: Short, + val int: Int, + val long: Long, + val float: Float, + val double: Double, + val boolean: Boolean, + val string: String +) + +@Serializable +data class DataClassWithContextualDateValues( + @Contextual val instant: Instant, + @Contextual val localTime: LocalTime, + @Contextual val localDateTime: LocalDateTime, + @Contextual val localDate: LocalDate, +) + +@Serializable +data class DataClassWithDateValues( + val instant: Instant, + val localTime: LocalTime, + val localDateTime: LocalDateTime, + val localDate: LocalDate, +) + +@Serializable +data class DataClassWithCollections( + val listSimple: List, + val listList: List>, + val listMap: List>, + val mapSimple: Map, + val mapList: Map>, + val mapMap: Map> +) + +@Serializable +data class DataClassWithDefaults( + val boolean: Boolean = false, + val string: String = "String", + val listSimple: List = listOf("a", "b", "c") +) + +@Serializable +data class DataClassWithCamelCase( + val twoWords: String = "", + @Suppress("ConstructorParameterNaming") val MyProperty: String = "", + @Suppress("ConstructorParameterNaming") val camel_Case_Underscores: String = "", + @Suppress("ConstructorParameterNaming") val URLMapping: String = "", + val myHTTPAuth: String = "", + val myHTTP2ApiKey: String = "", + val myHTTP2fastApiKey: String = "", +) + +@Serializable +data class DataClassWithSameSnakeCaseName( + val myHTTPAuth: String = "", + val myHttpAuth: String = "", + val myHTTPAuth1: String = "", + val myHttpAuth1: String = "", +) + +@Serializable +data class DataClassWithKotlinAllowedName( + @Suppress("ConstructorParameterNaming") val имяПеременной: String = "", + @Suppress("ConstructorParameterNaming") val `variable Name`: String = "", +) + +@Serializable data class DataClassWithNulls(val boolean: Boolean?, val string: String?, val listSimple: List?) + +@Serializable +data class DataClassWithListThatLastItemDefaultsToNull(val elements: List) + +@Serializable data class DataClassLastItemDefaultsToNull(val required: String, val optional: String? = null) + +@Serializable +data class DataClassSelfReferential( + val name: String, + val left: DataClassSelfReferential? = null, + val right: DataClassSelfReferential? = null +) + +@Serializable data class DataClassEmbedded(val name: String) + +@Serializable data class DataClassWithEmbedded(val id: String, val embedded: DataClassEmbedded) + +@Serializable data class DataClassListOfDataClasses(val id: String, val nested: List) + +@Serializable data class DataClassListOfListOfDataClasses(val id: String, val nested: List>) + +@Serializable data class DataClassMapOfDataClasses(val id: String, val nested: Map) + +@Serializable +data class DataClassMapOfListOfDataClasses(val id: String, val nested: Map>) + +@Serializable +data class DataClassWithParameterizedDataClass( + val id: String, + val parameterizedDataClass: DataClassParameterized +) + +@Serializable +data class DataClassParameterized(val number: N, val string: String, val parameterizedList: List) + +@Serializable +data class DataClassWithNestedParameterizedDataClass( + val id: String, + val nestedParameterized: DataClassWithNestedParameterized +) + +@Serializable +data class DataClassWithNestedParameterized( + val parameterizedDataClass: DataClassParameterized, + val other: B, + val optionalOther: B? +) + +@Serializable data class DataClassWithPair(val pair: Pair) + +@Serializable data class DataClassWithTriple(val triple: Triple) + +@Serializable +data class DataClassNestedParameterizedTypes( + val triple: + Triple< + String, + Pair>>, + Triple, Triple, Double>>> +) + +@Serializable data class DataClassWithMutableList(val value: MutableList) + +@Serializable data class DataClassWithMutableSet(val value: MutableSet) + +@Serializable data class DataClassWithMutableMap(val value: MutableMap) + +@Serializable +data class DataClassWithAnnotations( + @SerialName("_id") val id: String, + @SerialName("nom") val name: String, + val string: String +) + +@OptIn(ExperimentalSerializationApi::class) +@Serializable +data class DataClassWithEncodeDefault( + val boolean: Boolean = false, + @EncodeDefault(EncodeDefault.Mode.NEVER) val string: String = "String", + @EncodeDefault(EncodeDefault.Mode.ALWAYS) val listSimple: List = listOf("a", "b", "c") +) + +@Serializable +data class DataClassWithRequired( + val boolean: Boolean = false, + @Required val string: String = "String", + @Required val listSimple: List = listOf("a", "b", "c") +) + +@Serializable data class DataClassWithBooleanMapKey(val map: Map) + +enum class Key { + A, + B +} + +@Serializable data class DataClassWithEnum(val value: Key) + +@Serializable data class DataClassWithEnumMapKey(val map: Map) + +@Serializable data class DataClassKey(val value: String) + +@Serializable data class DataClassWithDataClassMapKey(val map: Map) + +@Serializable +data class DataClassBsonValues( + @Contextual val id: ObjectId, + @Contextual val arrayEmpty: BsonArray, + @Contextual val arraySimple: BsonArray, + @Contextual val arrayComplex: BsonArray, + @Contextual val arrayMixedTypes: BsonArray, + @Contextual val arrayComplexMixedTypes: BsonArray, + @Contextual val binary: BsonBinary, + @Contextual val boolean: BsonBoolean, + @Contextual val code: BsonJavaScript, + @Contextual val codeWithScope: BsonJavaScriptWithScope, + @Contextual val dateTime: BsonDateTime, + @Contextual val decimal128: BsonDecimal128, + @Contextual val documentEmpty: BsonDocument, + @Contextual val document: BsonDocument, + @Contextual val double: BsonDouble, + @Contextual val int32: BsonInt32, + @Contextual val int64: BsonInt64, + @Contextual val maxKey: BsonMaxKey, + @Contextual val minKey: BsonMinKey, + @Contextual val objectId: BsonObjectId, + @Contextual val regex: BsonRegularExpression, + @Contextual val string: BsonString, + @Contextual val symbol: BsonSymbol, + @Contextual val timestamp: BsonTimestamp, + @Contextual val undefined: BsonUndefined, +) + +@Serializable +data class DataClassOptionalBsonValues( + @Contextual val id: ObjectId?, + @Contextual val arrayEmpty: BsonArray?, + @Contextual val arraySimple: BsonArray?, + @Contextual val arrayComplex: BsonArray?, + @Contextual val arrayMixedTypes: BsonArray?, + @Contextual val arrayComplexMixedTypes: BsonArray?, + @Contextual val binary: BsonBinary?, + @Contextual val boolean: BsonBoolean?, + @Contextual val code: BsonJavaScript?, + @Contextual val codeWithScope: BsonJavaScriptWithScope?, + @Contextual val dateTime: BsonDateTime?, + @Contextual val decimal128: BsonDecimal128?, + @Contextual val documentEmpty: BsonDocument?, + @Contextual val document: BsonDocument?, + @Contextual val double: BsonDouble?, + @Contextual val int32: BsonInt32?, + @Contextual val int64: BsonInt64?, + @Contextual val maxKey: BsonMaxKey?, + @Contextual val minKey: BsonMinKey?, + @Contextual val objectId: BsonObjectId?, + @Contextual val regex: BsonRegularExpression?, + @Contextual val string: BsonString?, + @Contextual val symbol: BsonSymbol?, + @Contextual val timestamp: BsonTimestamp?, + @Contextual val undefined: BsonUndefined?, +) + +@Serializable sealed class DataClassSealed + +@Serializable data class DataClassSealedA(val a: String) : DataClassSealed() + +@Serializable data class DataClassSealedB(val b: Int) : DataClassSealed() + +@Serializable @SerialName("C") data class DataClassSealedC(val c: String) : DataClassSealed() + +@Serializable +sealed interface SealedInterface { + val name: String +} + +@Serializable +data class DataClassSealedInterface(@Contextual @SerialName("_id") val id: ObjectId, override val name: String) : + SealedInterface + +@Serializable data class DataClassListOfSealed(val items: List) + +interface DataClassOpen + +@Serializable data class DataClassOpenA(val a: String) : DataClassOpen + +@Serializable data class DataClassOpenB(val b: Int) : DataClassOpen + +@Serializable data class DataClassContainsOpen(val open: DataClassOpen) + +@JvmInline @Serializable value class ValueClass(val s: String) + +@Serializable data class DataClassContainsValueClass(val value: ValueClass) + +@Serializable data class DataClassWithBsonId(@BsonId val id: String) + +@Serializable data class DataClassWithBsonProperty(@BsonProperty("_id") val id: String) + +@BsonDiscriminator @Serializable data class DataClassWithBsonDiscriminator(val id: String) + +@Serializable data class DataClassWithBsonIgnore(val id: String, @BsonIgnore val ignored: String) + +@Serializable +data class DataClassWithBsonExtraElements(val id: String, @BsonExtraElements val extraElements: Map) + +@Serializable +data class DataClassWithBsonConstructor(val id: String, val count: Int) { + @BsonCreator constructor(id: String) : this(id, -1) +} + +@Serializable data class DataClassWithBsonRepresentation(@BsonRepresentation(BsonType.STRING) val id: Int) + +@Serializable +data class DataClassWithFailingInit(val id: String) { + init { + require(false) + } +} + +@Serializable data class DataClassWithSequence(val value: Sequence) + +@Serializable data class Box(val boxed: T) + +@Serializable data class DataClassWithNullableGeneric(val box: Box) + +@Serializable data class DataClassWithJsonElement(val value: JsonElement) + +@Serializable +data class DataClassWithJsonElements( + val jsonElement: JsonElement, + val jsonArray: JsonArray, + val jsonElements: List, + val jsonNestedMap: Map +) + +@Serializable +data class DataClassWithJsonElementsNullable( + val jsonElement: JsonElement?, + val jsonArray: JsonArray?, + val jsonElements: List?, + val jsonNestedMap: Map? +) diff --git a/bson-record-codec/build.gradle.kts b/bson-record-codec/build.gradle.kts new file mode 100644 index 00000000000..5165679c06b --- /dev/null +++ b/bson-record-codec/build.gradle.kts @@ -0,0 +1,53 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import ProjectExtensions.configureJarManifest +import ProjectExtensions.configureMavenPublication + +plugins { + id("project.java") + id("conventions.test-artifacts") +} + +base.archivesName.set("bson-record-codec") + +dependencies { + api(project(path = ":bson", configuration = "default")) + + // Test case checks MongoClientSettings.getDefaultCodecRegistry() support + testImplementation(project(path = ":driver-core", configuration = "default")) +} + +configureMavenPublication { + pom { + name.set("BSON Record Codec") + description.set("The BSON Codec for Java records") + url.set("https://bsonspec.org") + } +} + +configureJarManifest { + attributes["Automatic-Module-Name"] = "org.mongodb.bson.record.codec" + attributes["Bundle-SymbolicName"] = "org.mongodb.bson-record-codec" +} + +java { + sourceCompatibility = JavaVersion.VERSION_17 + targetCompatibility = JavaVersion.VERSION_17 +} + +tasks.withType { options.release.set(17) } + +tasks.withType().configureEach { onlyIf { javaVersion.isCompatibleWith(JavaVersion.VERSION_17) } } diff --git a/bson-record-codec/src/main/org/bson/codecs/record/RecordCodec.java b/bson-record-codec/src/main/org/bson/codecs/record/RecordCodec.java new file mode 100644 index 00000000000..01b59f35265 --- /dev/null +++ b/bson-record-codec/src/main/org/bson/codecs/record/RecordCodec.java @@ -0,0 +1,382 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record; + +import org.bson.BsonInvalidOperationException; +import org.bson.BsonReader; +import org.bson.BsonType; +import org.bson.BsonWriter; +import org.bson.codecs.Codec; +import org.bson.codecs.DecoderContext; +import org.bson.codecs.EncoderContext; +import org.bson.codecs.RepresentationConfigurable; +import org.bson.codecs.configuration.CodecConfigurationException; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.codecs.pojo.annotations.BsonCreator; +import org.bson.codecs.pojo.annotations.BsonDiscriminator; +import org.bson.codecs.pojo.annotations.BsonExtraElements; +import org.bson.codecs.pojo.annotations.BsonId; +import org.bson.codecs.pojo.annotations.BsonIgnore; +import org.bson.codecs.pojo.annotations.BsonProperty; +import org.bson.codecs.pojo.annotations.BsonRepresentation; +import org.bson.diagnostics.Logger; +import org.bson.diagnostics.Loggers; + +import javax.annotation.Nullable; +import java.lang.annotation.Annotation; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.RecordComponent; +import java.lang.reflect.Type; +import java.lang.reflect.TypeVariable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static java.lang.String.format; +import static org.bson.assertions.Assertions.notNull; + +final class RecordCodec implements Codec { + private static final Logger LOGGER = Loggers.getLogger("RecordCodec"); + private final Class clazz; + private final Constructor canonicalConstructor; + private final List componentModels; + private final ComponentModel componentModelForId; + private final Map fieldNameToComponentModel; + + private static final class ComponentModel { + private final RecordComponent component; + private final Codec codec; + private final int index; + private final String fieldName; + private final boolean isNullable; + + private ComponentModel(final List typeParameters, final RecordComponent component, final CodecRegistry codecRegistry, + final int index) { + validateAnnotations(component, index); + this.component = component; + this.codec = computeCodec(typeParameters, component, codecRegistry); + this.index = index; + this.fieldName = computeFieldName(component); + this.isNullable = !component.getType().isPrimitive(); + } + + String getComponentName() { + return component.getName(); + } + + String getFieldName() { + return fieldName; + } + + Object getValue(final Record record) throws InvocationTargetException, IllegalAccessException { + return component.getAccessor().invoke(record); + } + + private static Codec computeCodec(final List typeParameters, final RecordComponent component, + final CodecRegistry codecRegistry) { + var rawType = toWrapper(resolveComponentType(typeParameters, component)); + var codec = component.getGenericType() instanceof ParameterizedType parameterizedType + ? codecRegistry.get(rawType, + resolveActualTypeArguments(typeParameters, component.getDeclaringRecord(), parameterizedType)) + : codecRegistry.get(rawType); + BsonType bsonRepresentationType = null; + + if (isAnnotationPresentOnField(component, BsonRepresentation.class)) { + bsonRepresentationType = getAnnotationOnField(component, + BsonRepresentation.class).value(); + } + if (bsonRepresentationType != null) { + if (codec instanceof RepresentationConfigurable representationConfigurable) { + codec = representationConfigurable.withRepresentation(bsonRepresentationType); + } else { + throw new CodecConfigurationException( + format("Codec for %s must implement RepresentationConfigurable to support BsonRepresentation", + codec.getEncoderClass())); + } + } + return codec; + } + + private static Class resolveComponentType(final List typeParameters, final RecordComponent component) { + Type resolvedType = resolveType(component.getGenericType(), typeParameters, component.getDeclaringRecord()); + return resolvedType instanceof Class clazz ? clazz : component.getType(); + } + + private static List resolveActualTypeArguments(final List typeParameters, final Class recordClass, + final ParameterizedType parameterizedType) { + return Arrays.stream(parameterizedType.getActualTypeArguments()) + .map(type -> resolveType(type, typeParameters, recordClass)) + .toList(); + } + + private static Type resolveType(final Type type, final List typeParameters, final Class recordClass) { + return type instanceof TypeVariable typeVariable + ? typeParameters.get(getIndexOfTypeParameter(typeVariable.getName(), recordClass)) + : type; + } + + // Get + private static int getIndexOfTypeParameter(final String typeParameterName, final Class recordClass) { + var typeParameters = recordClass.getTypeParameters(); + for (int i = 0; i < typeParameters.length; i++) { + if (typeParameters[i].getName().equals(typeParameterName)) { + return i; + } + } + throw new CodecConfigurationException(format("Could not find type parameter on record %s with name %s", + recordClass.getName(), typeParameterName)); + } + + private static String computeFieldName(final RecordComponent component) { + if (isAnnotationPresentOnField(component, BsonId.class)) { + return "_id"; + } else if (isAnnotationPresentOnField(component, BsonProperty.class)) { + return getAnnotationOnField(component, BsonProperty.class).value(); + } + return component.getName(); + } + + private static boolean isAnnotationPresentOnField(final RecordComponent component, + final Class annotation) { + try { + return component.getDeclaringRecord().getDeclaredField(component.getName()).isAnnotationPresent(annotation); + } catch (NoSuchFieldException e) { + throw new AssertionError(format("Unexpectedly missing the declared field for record component %s", component), e); + } + } + + private static boolean isAnnotationPresentOnCanonicalConstructorParameter(final RecordComponent component, + final int index, final Class annotation) { + return getCanonicalConstructor(component.getDeclaringRecord()).getParameters()[index].isAnnotationPresent(annotation); + } + + private static T getAnnotationOnField(final RecordComponent component, final Class annotation) { + try { + return component.getDeclaringRecord().getDeclaredField(component.getName()).getAnnotation(annotation); + } catch (NoSuchFieldException e) { + throw new AssertionError(format("Unexpectedly missing the declared field for recordComponent %s", component), e); + } + } + + private static void validateAnnotations(final RecordComponent component, final int index) { + validateAnnotationNotPresentOnType(component.getDeclaringRecord(), BsonDiscriminator.class); + validateAnnotationNotPresentOnConstructor(component.getDeclaringRecord(), BsonCreator.class); + validateAnnotationNotPresentOnMethod(component.getDeclaringRecord(), BsonCreator.class); + validateAnnotationNotPresentOnFieldOrAccessor(component, BsonIgnore.class); + validateAnnotationNotPresentOnFieldOrAccessor(component, BsonExtraElements.class); + validateAnnotationOnlyOnField(component, index, BsonId.class); + validateAnnotationOnlyOnField(component, index, BsonProperty.class); + validateAnnotationOnlyOnField(component, index, BsonRepresentation.class); + } + + private static void validateAnnotationNotPresentOnType(final Class clazz, + @SuppressWarnings("SameParameterValue") final Class annotation) { + if (clazz.isAnnotationPresent(annotation)) { + throw new CodecConfigurationException(format("Annotation '%s' not supported on records, but found on '%s'", + annotation, clazz.getName())); + } + } + + private static void validateAnnotationNotPresentOnConstructor(final Class clazz, + @SuppressWarnings("SameParameterValue") final Class annotation) { + for (var constructor : clazz.getConstructors()) { + if (constructor.isAnnotationPresent(annotation)) { + throw new CodecConfigurationException( + format("Annotation '%s' not supported on record constructors, but found on constructor of '%s'", + annotation, clazz.getName())); + } + } + } + + private static void validateAnnotationNotPresentOnMethod(final Class clazz, + @SuppressWarnings("SameParameterValue") final Class annotation) { + for (var method : clazz.getMethods()) { + if (method.isAnnotationPresent(annotation)) { + throw new CodecConfigurationException( + format("Annotation '%s' not supported on methods, but found on method '%s' of '%s'", + annotation, method.getName(), clazz.getName())); + } + } + } + + private static void validateAnnotationNotPresentOnFieldOrAccessor(final RecordComponent component, + final Class annotation) { + if (isAnnotationPresentOnField(component, annotation)) { + throw new CodecConfigurationException( + format("Annotation '%s' is not supported on records, but found on component '%s' of record '%s'", + annotation.getName(), component, component.getDeclaringRecord())); + } + if (component.getAccessor().isAnnotationPresent(annotation)) { + throw new CodecConfigurationException( + format("Annotation '%s' is not supported on records, but found on accessor for component '%s' of record '%s'", + annotation.getName(), component, component.getDeclaringRecord())); + } + } + + private static void validateAnnotationOnlyOnField(final RecordComponent component, final int index, + final Class annotation) { + if (!isAnnotationPresentOnField(component, annotation)) { + if (component.getAccessor().isAnnotationPresent(annotation)) { + throw new CodecConfigurationException(format("Annotation %s present on accessor but not component '%s' of record '%s'", + annotation.getName(), component, component.getDeclaringRecord())); + } + if (isAnnotationPresentOnCanonicalConstructorParameter(component, index, annotation)) { + throw new CodecConfigurationException( + format("Annotation %s present on canonical constructor parameter but not component '%s' of record '%s'", + annotation.getName(), component, component.getDeclaringRecord())); + } + } + } + } + + RecordCodec(final Class clazz, final List types, final CodecRegistry codecRegistry) { + if (types.size() != clazz.getTypeParameters().length) { + throw new CodecConfigurationException("Unexpected number of type parameters for record class " + clazz); + } + this.clazz = notNull("class", clazz); + canonicalConstructor = notNull("canonicalConstructor", getCanonicalConstructor(clazz)); + componentModels = getComponentModels(clazz, codecRegistry, types); + fieldNameToComponentModel = componentModels.stream() + .collect(Collectors.toMap(ComponentModel::getFieldName, Function.identity())); + componentModelForId = getComponentModelForId(clazz, componentModels); + } + + @SuppressWarnings("unchecked") + @Override + public T decode(final BsonReader reader, final DecoderContext decoderContext) { + reader.readStartDocument(); + + Object[] constructorArguments = new Object[componentModels.size()]; + while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { + var fieldName = reader.readName(); + var componentModel = fieldNameToComponentModel.get(fieldName); + if (componentModel == null) { + reader.skipValue(); + if (LOGGER.isTraceEnabled()) { + LOGGER.trace(format("Found property not present in the ClassModel: %s", fieldName)); + } + } else if (reader.getCurrentBsonType() == BsonType.NULL) { + if (!componentModel.isNullable) { + throw new BsonInvalidOperationException(format("Null value on primitive field: %s", componentModel.fieldName)); + } + reader.readNull(); + } else { + constructorArguments[componentModel.index] = decoderContext.decodeWithChildContext(componentModel.codec, reader); + } + } + reader.readEndDocument(); + + try { + return (T) canonicalConstructor.newInstance(constructorArguments); + } catch (ReflectiveOperationException e) { + throw new CodecConfigurationException(format("Unable to invoke canonical constructor of record class %s", clazz.getName()), e); + } + } + + @Override + public void encode(final BsonWriter writer, final T record, final EncoderContext encoderContext) { + writer.writeStartDocument(); + if (componentModelForId != null) { + writeComponent(writer, record, componentModelForId); + } + for (var componentModel : componentModels) { + if (componentModel == componentModelForId) { + continue; + } + writeComponent(writer, record, componentModel); + } + writer.writeEndDocument(); + + } + + @Override + public Class getEncoderClass() { + return clazz; + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + private void writeComponent(final BsonWriter writer, final T record, final ComponentModel componentModel) { + try { + Object componentValue = componentModel.getValue(record); + if (componentValue != null) { + writer.writeName(componentModel.getFieldName()); + ((Codec) componentModel.codec).encode(writer, componentValue, EncoderContext.builder().build()); + } + } catch (ReflectiveOperationException e) { + throw new CodecConfigurationException( + format("Unable to access value of component %s for record %s", componentModel.getComponentName(), clazz.getName()), e); + } + } + + private static List getComponentModels(final Class clazz, final CodecRegistry codecRegistry, + final List typeParameters) { + var recordComponents = clazz.getRecordComponents(); + var componentModels = new ArrayList(recordComponents.length); + for (int i = 0; i < recordComponents.length; i++) { + componentModels.add(new ComponentModel(typeParameters, recordComponents[i], codecRegistry, i)); + } + return componentModels; + } + + @Nullable + private static ComponentModel getComponentModelForId(final Class clazz, final List componentModels) { + List componentModelsForId = componentModels.stream() + .filter(componentModel -> componentModel.getFieldName().equals("_id")).toList(); + if (componentModelsForId.size() > 1) { + throw new CodecConfigurationException(format("Record %s has more than one _id component", clazz.getName())); + } else { + return componentModelsForId.stream().findFirst().orElse(null); + } + } + + private static Constructor getCanonicalConstructor(final Class clazz) { + try { + return clazz.getDeclaredConstructor(Arrays.stream(clazz.getRecordComponents()) + .map(RecordComponent::getType) + .toArray(Class[]::new)); + } catch (NoSuchMethodException e) { + throw new AssertionError(format("Could not find canonical constructor for record %s", clazz.getName())); + } + } + + private static Class toWrapper(final Class clazz) { + if (clazz == Integer.TYPE) { + return Integer.class; + } else if (clazz == Long.TYPE) { + return Long.class; + } else if (clazz == Boolean.TYPE) { + return Boolean.class; + } else if (clazz == Byte.TYPE) { + return Byte.class; + } else if (clazz == Character.TYPE) { + return Character.class; + } else if (clazz == Float.TYPE) { + return Float.class; + } else if (clazz == Double.TYPE) { + return Double.class; + } else if (clazz == Short.TYPE) { + return Short.class; + } else { + return clazz; + } + } +} diff --git a/bson-record-codec/src/main/org/bson/codecs/record/RecordCodecProvider.java b/bson-record-codec/src/main/org/bson/codecs/record/RecordCodecProvider.java new file mode 100644 index 00000000000..e41f1fb2bb1 --- /dev/null +++ b/bson-record-codec/src/main/org/bson/codecs/record/RecordCodecProvider.java @@ -0,0 +1,49 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record; + +import org.bson.codecs.Codec; +import org.bson.codecs.configuration.CodecProvider; +import org.bson.codecs.configuration.CodecRegistry; + +import java.lang.reflect.Type; +import java.util.List; + +import static org.bson.assertions.Assertions.assertNotNull; + +/** + * Provides Codec instances for Java records. + * + * @since 4.6 + * @see Record + */ +public final class RecordCodecProvider implements CodecProvider { + @Override + public Codec get(final Class clazz, final CodecRegistry registry) { + return get(clazz, List.of(), registry); + } + + @Override + public Codec get(final Class clazz, final List typeArguments, final CodecRegistry registry) { + if (!assertNotNull(clazz).isRecord()) { + return null; + } + @SuppressWarnings({"unchecked", "rawtypes"}) + Codec result = new RecordCodec(clazz, assertNotNull(typeArguments), registry); + return result; + } +} diff --git a/bson-record-codec/src/main/org/bson/codecs/record/package-info.java b/bson-record-codec/src/main/org/bson/codecs/record/package-info.java new file mode 100644 index 00000000000..b1d179eb3f6 --- /dev/null +++ b/bson-record-codec/src/main/org/bson/codecs/record/package-info.java @@ -0,0 +1,20 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * This package contains classes for encoding and decoding Java records. + */ +package org.bson.codecs.record; diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/RecordCodecProviderTest.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/RecordCodecProviderTest.java new file mode 100644 index 00000000000..5f71169f0c8 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/RecordCodecProviderTest.java @@ -0,0 +1,62 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record; + +import com.mongodb.MongoClientSettings; +import org.bson.codecs.record.samples.TestRecordWithPojoAnnotations; +import org.bson.conversions.Bson; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + + +public class RecordCodecProviderTest { + + @Test + public void shouldReturnNullForNonRecord() { + var provider = new RecordCodecProvider(); + + // expect + assertNull(provider.get(String.class, Bson.DEFAULT_CODEC_REGISTRY)); + } + + @Test + public void shouldReturnRecordCodecForRecord() { + var provider = new RecordCodecProvider(); + + // when + var codec = provider.get(TestRecordWithPojoAnnotations.class, Bson.DEFAULT_CODEC_REGISTRY); + + // then + assertTrue(codec instanceof RecordCodec); + var recordCodec = (RecordCodec) codec; + assertEquals(TestRecordWithPojoAnnotations.class, recordCodec.getEncoderClass()); + } + + @Test + public void shouldReturnRecordCodecForRecordUsingDefaultRegistry() { + // when + var codec = MongoClientSettings.getDefaultCodecRegistry().get(TestRecordWithPojoAnnotations.class); + + // then + assertTrue(codec instanceof RecordCodec); + var recordCodec = (RecordCodec) codec; + assertEquals(TestRecordWithPojoAnnotations.class, recordCodec.getEncoderClass()); + } +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/RecordCodecTest.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/RecordCodecTest.java new file mode 100644 index 00000000000..c7adef45bc8 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/RecordCodecTest.java @@ -0,0 +1,419 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record; + +import org.bson.BsonArray; +import org.bson.BsonDocument; +import org.bson.BsonDocumentReader; +import org.bson.BsonDocumentWriter; +import org.bson.BsonDouble; +import org.bson.BsonInt32; +import org.bson.BsonInvalidOperationException; +import org.bson.BsonNull; +import org.bson.BsonObjectId; +import org.bson.BsonString; +import org.bson.codecs.DecoderContext; +import org.bson.codecs.EncoderContext; +import org.bson.codecs.configuration.CodecConfigurationException; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.codecs.record.samples.TestRecordEmbedded; +import org.bson.codecs.record.samples.TestRecordParameterized; +import org.bson.codecs.record.samples.TestRecordWithIllegalBsonCreatorOnConstructor; +import org.bson.codecs.record.samples.TestRecordWithIllegalBsonCreatorOnMethod; +import org.bson.codecs.record.samples.TestRecordWithIllegalBsonDiscriminatorOnRecord; +import org.bson.codecs.record.samples.TestRecordWithIllegalBsonExtraElementsOnAccessor; +import org.bson.codecs.record.samples.TestRecordWithIllegalBsonExtraElementsOnComponent; +import org.bson.codecs.record.samples.TestRecordWithIllegalBsonIdOnAccessor; +import org.bson.codecs.record.samples.TestRecordWithIllegalBsonIdOnCanonicalConstructor; +import org.bson.codecs.record.samples.TestRecordWithIllegalBsonIgnoreOnAccessor; +import org.bson.codecs.record.samples.TestRecordWithIllegalBsonIgnoreOnComponent; +import org.bson.codecs.record.samples.TestRecordWithIllegalBsonPropertyOnAccessor; +import org.bson.codecs.record.samples.TestRecordWithIllegalBsonPropertyOnCanonicalConstructor; +import org.bson.codecs.record.samples.TestRecordWithIllegalBsonRepresentationOnAccessor; +import org.bson.codecs.record.samples.TestRecordWithListOfListOfRecords; +import org.bson.codecs.record.samples.TestRecordWithListOfRecords; +import org.bson.codecs.record.samples.TestRecordWithMapOfListOfRecords; +import org.bson.codecs.record.samples.TestRecordWithMapOfRecords; +import org.bson.codecs.record.samples.TestRecordWithNestedParameterized; +import org.bson.codecs.record.samples.TestRecordWithNestedParameterizedRecord; +import org.bson.codecs.record.samples.TestRecordWithNullableField; +import org.bson.codecs.record.samples.TestRecordWithParameterizedRecord; +import org.bson.codecs.record.samples.TestRecordWithPojoAnnotations; +import org.bson.codecs.record.samples.TestSelfReferentialHolderRecord; +import org.bson.codecs.record.samples.TestSelfReferentialRecord; +import org.bson.conversions.Bson; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.Test; + +import java.util.List; +import java.util.Map; + +import static org.bson.codecs.configuration.CodecRegistries.fromProviders; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +public class RecordCodecTest { + + @Test + public void testRecordWithPojoAnnotations() { + var codec = createRecordCodec(TestRecordWithPojoAnnotations.class, Bson.DEFAULT_CODEC_REGISTRY); + var identifier = new ObjectId(); + var testRecord = new TestRecordWithPojoAnnotations("Lucas", 14, List.of("soccer", "basketball"), identifier.toHexString()); + + var document = new BsonDocument(); + var writer = new BsonDocumentWriter(document); + + // when + codec.encode(writer, testRecord, EncoderContext.builder().build()); + + // then + assertEquals( + new BsonDocument("_id", new BsonObjectId(identifier)) + .append("name", new BsonString("Lucas")) + .append("hobbies", new BsonArray(List.of(new BsonString("soccer"), new BsonString("basketball")))) + .append("a", new BsonInt32(14)), + document); + assertEquals("_id", document.getFirstKey()); + + // when + var decoded = codec.decode(new BsonDocumentReader(document), DecoderContext.builder().build()); + + // then + assertEquals(testRecord, decoded); + } + + @Test + public void testRecordWithNestedListOfRecords() { + var codec = createRecordCodec(TestRecordWithListOfRecords.class, + fromProviders(new RecordCodecProvider(), Bson.DEFAULT_CODEC_REGISTRY)); + var identifier = new ObjectId(); + var testRecord = new TestRecordWithListOfRecords(identifier, List.of(new TestRecordEmbedded("embedded"))); + + var document = new BsonDocument(); + var writer = new BsonDocumentWriter(document); + + // when + codec.encode(writer, testRecord, EncoderContext.builder().build()); + + // then + assertEquals( + new BsonDocument("_id", new BsonObjectId(identifier)) + .append("nestedRecords", new BsonArray(List.of(new BsonDocument("name", new BsonString("embedded"))))), + document); + assertEquals("_id", document.getFirstKey()); + + // when + var decoded = codec.decode(new BsonDocumentReader(document), DecoderContext.builder().build()); + + // then + assertEquals(testRecord, decoded); + } + + @Test + public void testRecordWithNestedListOfListOfRecords() { + var codec = createRecordCodec(TestRecordWithListOfListOfRecords.class, + fromProviders(new RecordCodecProvider(), Bson.DEFAULT_CODEC_REGISTRY)); + var identifier = new ObjectId(); + var testRecord = new TestRecordWithListOfListOfRecords(identifier, List.of(List.of(new TestRecordEmbedded("embedded")))); + + var document = new BsonDocument(); + var writer = new BsonDocumentWriter(document); + + // when + codec.encode(writer, testRecord, EncoderContext.builder().build()); + + // then + assertEquals( + new BsonDocument("_id", new BsonObjectId(identifier)) + .append("nestedRecords", + new BsonArray(List.of(new BsonArray(List.of(new BsonDocument("name", new BsonString("embedded"))))))), + document); + assertEquals("_id", document.getFirstKey()); + + // when + var decoded = codec.decode(new BsonDocumentReader(document), DecoderContext.builder().build()); + + // then + assertEquals(testRecord, decoded); + } + + @Test + public void testRecordWithNestedMapOfRecords() { + var codec = createRecordCodec(TestRecordWithMapOfRecords.class, + fromProviders(new RecordCodecProvider(), Bson.DEFAULT_CODEC_REGISTRY)); + var identifier = new ObjectId(); + var testRecord = new TestRecordWithMapOfRecords(identifier, + Map.of("first", new TestRecordEmbedded("embedded"))); + + var document = new BsonDocument(); + var writer = new BsonDocumentWriter(document); + + // when + codec.encode(writer, testRecord, EncoderContext.builder().build()); + + // then + assertEquals( + new BsonDocument("_id", new BsonObjectId(identifier)) + .append("nestedRecords", new BsonDocument("first", new BsonDocument("name", new BsonString("embedded")))), + document); + assertEquals("_id", document.getFirstKey()); + + // when + var decoded = codec.decode(new BsonDocumentReader(document), DecoderContext.builder().build()); + + // then + assertEquals(testRecord, decoded); + } + + @Test + public void testRecordWithNestedMapOfListRecords() { + var codec = createRecordCodec(TestRecordWithMapOfListOfRecords.class, + fromProviders(new RecordCodecProvider(), Bson.DEFAULT_CODEC_REGISTRY)); + var identifier = new ObjectId(); + var testRecord = new TestRecordWithMapOfListOfRecords(identifier, + Map.of("first", List.of(new TestRecordEmbedded("embedded")))); + + var document = new BsonDocument(); + var writer = new BsonDocumentWriter(document); + + // when + codec.encode(writer, testRecord, EncoderContext.builder().build()); + + // then + assertEquals( + new BsonDocument("_id", new BsonObjectId(identifier)) + .append("nestedRecords", + new BsonDocument("first", + new BsonArray(List.of(new BsonDocument("name", new BsonString("embedded")))))), + document); + assertEquals("_id", document.getFirstKey()); + + // when + var decoded = codec.decode(new BsonDocumentReader(document), DecoderContext.builder().build()); + + // then + assertEquals(testRecord, decoded); + } + + @Test + public void testRecordWithNestedParameterizedRecord() { + var codec = createRecordCodec(TestRecordWithParameterizedRecord.class, + fromProviders(new RecordCodecProvider(), Bson.DEFAULT_CODEC_REGISTRY)); + var identifier = new ObjectId(); + var testRecord = new TestRecordWithParameterizedRecord(identifier, + new TestRecordParameterized<>(42.0, List.of(new TestRecordEmbedded("embedded")))); + + var document = new BsonDocument(); + var writer = new BsonDocumentWriter(document); + + // when + codec.encode(writer, testRecord, EncoderContext.builder().build()); + + // then + assertEquals( + new BsonDocument("_id", new BsonObjectId(identifier)) + .append("parameterizedRecord", + new BsonDocument("number", new BsonDouble(42.0)) + .append("parameterizedList", + new BsonArray(List.of(new BsonDocument("name", new BsonString("embedded")))))), + document); + assertEquals("_id", document.getFirstKey()); + + // when + var decoded = codec.decode(new BsonDocumentReader(document), DecoderContext.builder().build()); + + // then + assertEquals(testRecord, decoded); + } + + @Test + public void testRecordWithNestedParameterizedRecordWithDifferentlyOrderedTypeParameters() { + var codec = createRecordCodec(TestRecordWithNestedParameterizedRecord.class, + fromProviders(new RecordCodecProvider(), Bson.DEFAULT_CODEC_REGISTRY)); + var identifier = new ObjectId(); + var testRecord = new TestRecordWithNestedParameterizedRecord(identifier, + new TestRecordWithNestedParameterized<>( + new TestRecordParameterized<>(42.0, List.of(new TestRecordEmbedded("p"))), + "o")); + + var document = new BsonDocument(); + var writer = new BsonDocumentWriter(document); + + // when + codec.encode(writer, testRecord, EncoderContext.builder().build()); + + // then + assertEquals( + new BsonDocument("_id", new BsonObjectId(identifier)) + .append("nestedParameterized", + new BsonDocument("parameterizedRecord", + new BsonDocument("number", new BsonDouble(42.0)) + .append("parameterizedList", + new BsonArray(List.of(new BsonDocument("name", new BsonString("p")))))) + .append("other", new BsonString("o"))), + document); + + // when + var decoded = codec.decode(new BsonDocumentReader(document), DecoderContext.builder().build()); + + // then + assertEquals(testRecord, decoded); + } + + @Test + public void testRecordWithNulls() { + var codec = createRecordCodec(TestRecordWithPojoAnnotations.class, Bson.DEFAULT_CODEC_REGISTRY); + var identifier = new ObjectId(); + var testRecord = new TestRecordWithPojoAnnotations(null, 14, null, identifier.toHexString()); + + var document = new BsonDocument(); + var writer = new BsonDocumentWriter(document); + + // when + codec.encode(writer, testRecord, EncoderContext.builder().build()); + + // then + assertEquals( + new BsonDocument("_id", new BsonObjectId(identifier)) + .append("a", new BsonInt32(14)), + document); + + // when + var decoded = codec.decode(new BsonDocumentReader(document), DecoderContext.builder().build()); + + // then + assertEquals(testRecord, decoded); + } + + @Test + public void testRecordWithStoredNulls() { + var codec = createRecordCodec(TestRecordWithNullableField.class, Bson.DEFAULT_CODEC_REGISTRY); + var identifier = new ObjectId(); + var testRecord = new TestRecordWithNullableField(identifier, null, 42); + + var document = new BsonDocument("_id", new BsonObjectId(identifier)) + .append("name", new BsonNull()) + .append("age", new BsonInt32(42)); + + // when + var decoded = codec.decode(new BsonDocumentReader(document), DecoderContext.builder().build()); + + // then + assertEquals(testRecord, decoded); + } + + @Test + public void testExceptionsWithStoredNullsOnPrimitiveField() { + var codec = createRecordCodec(TestRecordWithNullableField.class, Bson.DEFAULT_CODEC_REGISTRY); + + var document = new BsonDocument("_id", new BsonObjectId(new ObjectId())) + .append("name", new BsonString("Felix")) + .append("age", new BsonNull()); + + assertThrows(BsonInvalidOperationException.class, () -> + codec.decode(new BsonDocumentReader(document), DecoderContext.builder().build())); + } + + @Test + public void testRecordWithExtraData() { + var codec = createRecordCodec(TestRecordWithPojoAnnotations.class, Bson.DEFAULT_CODEC_REGISTRY); + var identifier = new ObjectId(); + var testRecord = new TestRecordWithPojoAnnotations("Felix", 13, List.of("rugby", "badminton"), identifier.toHexString()); + + var document = new BsonDocument("_id", new BsonObjectId(identifier)) + .append("nationality", new BsonString("British")) + .append("name", new BsonString("Felix")) + .append("hobbies", new BsonArray(List.of(new BsonString("rugby"), new BsonString("badminton")))) + .append("a", new BsonInt32(13)); + + // when + var decoded = codec.decode(new BsonDocumentReader(document), DecoderContext.builder().build()); + + // then + assertEquals(testRecord, decoded); + } + + @Test + public void testSelfReferentialRecords() { + var registry = fromProviders(new RecordCodecProvider(), Bson.DEFAULT_CODEC_REGISTRY); + var codec = registry.get(TestSelfReferentialHolderRecord.class); + var testRecord = new TestSelfReferentialHolderRecord("0", + new TestSelfReferentialRecord<>("1", + new TestSelfReferentialRecord<>("2", null, null), + new TestSelfReferentialRecord<>("3", null, null))); + + var document = new BsonDocument(); + + // when + codec.encode(new BsonDocumentWriter(document), testRecord, EncoderContext.builder().build()); + + // then + assertEquals( + new BsonDocument("_id", new BsonString("0")) + .append("selfReferentialRecord", + new BsonDocument("name", new BsonString("1")) + .append("left", new BsonDocument("name", new BsonString("2"))) + .append("right", new BsonDocument("name", new BsonString("3")))), + document); + + // when + var decoded = codec.decode(new BsonDocumentReader(document), DecoderContext.builder().build()); + + // then + assertEquals(testRecord, decoded); + } + + @Test + public void testExceptionsForAnnotationsNotOnRecordComponent() { + assertThrows(CodecConfigurationException.class, () -> + createRecordCodec(TestRecordWithIllegalBsonIdOnAccessor.class, Bson.DEFAULT_CODEC_REGISTRY)); + assertThrows(CodecConfigurationException.class, () -> + createRecordCodec(TestRecordWithIllegalBsonIdOnCanonicalConstructor.class, Bson.DEFAULT_CODEC_REGISTRY)); + + assertThrows(CodecConfigurationException.class, () -> + createRecordCodec(TestRecordWithIllegalBsonPropertyOnAccessor.class, Bson.DEFAULT_CODEC_REGISTRY)); + assertThrows(CodecConfigurationException.class, () -> + createRecordCodec(TestRecordWithIllegalBsonPropertyOnCanonicalConstructor.class, Bson.DEFAULT_CODEC_REGISTRY)); + + assertThrows(CodecConfigurationException.class, () -> + createRecordCodec(TestRecordWithIllegalBsonRepresentationOnAccessor.class, Bson.DEFAULT_CODEC_REGISTRY)); + } + + @Test + public void testExceptionsForUnsupportedAnnotations() { + assertThrows(CodecConfigurationException.class, () -> + createRecordCodec(TestRecordWithIllegalBsonDiscriminatorOnRecord.class, Bson.DEFAULT_CODEC_REGISTRY)); + + assertThrows(CodecConfigurationException.class, () -> + createRecordCodec(TestRecordWithIllegalBsonCreatorOnConstructor.class, Bson.DEFAULT_CODEC_REGISTRY)); + assertThrows(CodecConfigurationException.class, () -> + createRecordCodec(TestRecordWithIllegalBsonCreatorOnMethod.class, Bson.DEFAULT_CODEC_REGISTRY)); + + assertThrows(CodecConfigurationException.class, () -> + createRecordCodec(TestRecordWithIllegalBsonIgnoreOnComponent.class, Bson.DEFAULT_CODEC_REGISTRY)); + assertThrows(CodecConfigurationException.class, () -> + createRecordCodec(TestRecordWithIllegalBsonIgnoreOnAccessor.class, Bson.DEFAULT_CODEC_REGISTRY)); + assertThrows(CodecConfigurationException.class, () -> + createRecordCodec(TestRecordWithIllegalBsonExtraElementsOnComponent.class, Bson.DEFAULT_CODEC_REGISTRY)); + assertThrows(CodecConfigurationException.class, () -> + createRecordCodec(TestRecordWithIllegalBsonExtraElementsOnAccessor.class, Bson.DEFAULT_CODEC_REGISTRY)); + } + + private static RecordCodec createRecordCodec(final Class clazz, final CodecRegistry registry) { + return new RecordCodec<>(clazz, List.of(), registry); + } +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordEmbedded.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordEmbedded.java new file mode 100644 index 00000000000..b83f6bde2e2 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordEmbedded.java @@ -0,0 +1,20 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +public record TestRecordEmbedded(String name) { +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordParameterized.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordParameterized.java new file mode 100644 index 00000000000..91f0c051b33 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordParameterized.java @@ -0,0 +1,22 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import java.util.List; + +public record TestRecordParameterized(N number, List parameterizedList) { +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonCreatorOnConstructor.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonCreatorOnConstructor.java new file mode 100644 index 00000000000..d9db5adb149 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonCreatorOnConstructor.java @@ -0,0 +1,27 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonCreator; + +public record TestRecordWithIllegalBsonCreatorOnConstructor(String name) { + @SuppressWarnings("RedundantRecordConstructor") + @BsonCreator + public TestRecordWithIllegalBsonCreatorOnConstructor(final String name) { + this.name = name; + } +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonCreatorOnMethod.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonCreatorOnMethod.java new file mode 100644 index 00000000000..707162ced9c --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonCreatorOnMethod.java @@ -0,0 +1,26 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonCreator; + +public record TestRecordWithIllegalBsonCreatorOnMethod(String name) { + @BsonCreator + public static TestRecordWithIllegalBsonCreatorOnMethod create(final String name) { + return new TestRecordWithIllegalBsonCreatorOnMethod(name); + } +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonDiscriminatorOnRecord.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonDiscriminatorOnRecord.java new file mode 100644 index 00000000000..4d6fa0995b9 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonDiscriminatorOnRecord.java @@ -0,0 +1,23 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonDiscriminator; + +@BsonDiscriminator +public record TestRecordWithIllegalBsonDiscriminatorOnRecord(String name) { +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonExtraElementsOnAccessor.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonExtraElementsOnAccessor.java new file mode 100644 index 00000000000..54f8489e388 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonExtraElementsOnAccessor.java @@ -0,0 +1,27 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonExtraElements; + +public record TestRecordWithIllegalBsonExtraElementsOnAccessor(String name) { + @Override + @BsonExtraElements + public String name() { + return name; + } +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonExtraElementsOnComponent.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonExtraElementsOnComponent.java new file mode 100644 index 00000000000..e7f919c90fc --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonExtraElementsOnComponent.java @@ -0,0 +1,22 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonExtraElements; + +public record TestRecordWithIllegalBsonExtraElementsOnComponent(@BsonExtraElements String name) { +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonIdOnAccessor.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonIdOnAccessor.java new file mode 100644 index 00000000000..b61162d6489 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonIdOnAccessor.java @@ -0,0 +1,28 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonId; + +public record TestRecordWithIllegalBsonIdOnAccessor(String name) { + + @Override + @BsonId + public String name() { + return name; + } +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonIdOnCanonicalConstructor.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonIdOnCanonicalConstructor.java new file mode 100644 index 00000000000..2c4d5bc18d7 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonIdOnCanonicalConstructor.java @@ -0,0 +1,25 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonId; + +public record TestRecordWithIllegalBsonIdOnCanonicalConstructor(String name) { + public TestRecordWithIllegalBsonIdOnCanonicalConstructor(@BsonId final String name) { + this.name = name; + } +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonIgnoreOnAccessor.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonIgnoreOnAccessor.java new file mode 100644 index 00000000000..12a04cbc724 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonIgnoreOnAccessor.java @@ -0,0 +1,28 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonIgnore; + +public record TestRecordWithIllegalBsonIgnoreOnAccessor(String name) { + + @Override + @BsonIgnore + public String name() { + return name; + } +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonIgnoreOnComponent.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonIgnoreOnComponent.java new file mode 100644 index 00000000000..5d18e3fe48d --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonIgnoreOnComponent.java @@ -0,0 +1,22 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonIgnore; + +public record TestRecordWithIllegalBsonIgnoreOnComponent(@BsonIgnore String name) { +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonPropertyOnAccessor.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonPropertyOnAccessor.java new file mode 100644 index 00000000000..e4a2ad204ad --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonPropertyOnAccessor.java @@ -0,0 +1,28 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonProperty; + +public record TestRecordWithIllegalBsonPropertyOnAccessor(String name) { + + @Override + @BsonProperty("n") + public String name() { + return name; + } +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonPropertyOnCanonicalConstructor.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonPropertyOnCanonicalConstructor.java new file mode 100644 index 00000000000..9465505bf29 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonPropertyOnCanonicalConstructor.java @@ -0,0 +1,26 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonProperty; + +public record TestRecordWithIllegalBsonPropertyOnCanonicalConstructor(String name) { + + public TestRecordWithIllegalBsonPropertyOnCanonicalConstructor(@BsonProperty("n") final String name) { + this.name = name; + } +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonRepresentationOnAccessor.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonRepresentationOnAccessor.java new file mode 100644 index 00000000000..d453f4e1e85 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithIllegalBsonRepresentationOnAccessor.java @@ -0,0 +1,29 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.BsonType; +import org.bson.codecs.pojo.annotations.BsonRepresentation; + +public record TestRecordWithIllegalBsonRepresentationOnAccessor(String name) { + + @Override + @BsonRepresentation(value = BsonType.INT32) + public String name() { + return name; + } +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithListOfListOfRecords.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithListOfListOfRecords.java new file mode 100644 index 00000000000..65012c32fbb --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithListOfListOfRecords.java @@ -0,0 +1,25 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonId; +import org.bson.types.ObjectId; + +import java.util.List; + +public record TestRecordWithListOfListOfRecords(@BsonId ObjectId id, List> nestedRecords) { +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithListOfRecords.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithListOfRecords.java new file mode 100644 index 00000000000..459186e863c --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithListOfRecords.java @@ -0,0 +1,25 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonId; +import org.bson.types.ObjectId; + +import java.util.List; + +public record TestRecordWithListOfRecords(@BsonId ObjectId id, List nestedRecords) { +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithMapOfListOfRecords.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithMapOfListOfRecords.java new file mode 100644 index 00000000000..b9b220b9579 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithMapOfListOfRecords.java @@ -0,0 +1,26 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonId; +import org.bson.types.ObjectId; + +import java.util.List; +import java.util.Map; + +public record TestRecordWithMapOfListOfRecords(@BsonId ObjectId id, Map> nestedRecords) { +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithMapOfRecords.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithMapOfRecords.java new file mode 100644 index 00000000000..5989fdbb085 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithMapOfRecords.java @@ -0,0 +1,25 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonId; +import org.bson.types.ObjectId; + +import java.util.Map; + +public record TestRecordWithMapOfRecords(@BsonId ObjectId id, Map nestedRecords) { +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithNestedParameterized.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithNestedParameterized.java new file mode 100644 index 00000000000..c760e2f7f73 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithNestedParameterized.java @@ -0,0 +1,22 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +public record TestRecordWithNestedParameterized( + TestRecordParameterized parameterizedRecord, + B other) { +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithNestedParameterizedRecord.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithNestedParameterizedRecord.java new file mode 100644 index 00000000000..8a992f13a18 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithNestedParameterizedRecord.java @@ -0,0 +1,25 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonId; +import org.bson.types.ObjectId; + +public record TestRecordWithNestedParameterizedRecord( + @BsonId ObjectId id, + TestRecordWithNestedParameterized nestedParameterized) { +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithNullableField.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithNullableField.java new file mode 100644 index 00000000000..f2329c8170e --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithNullableField.java @@ -0,0 +1,23 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonId; +import org.bson.types.ObjectId; + +public record TestRecordWithNullableField(@BsonId ObjectId id, String name, int age) { +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithParameterizedRecord.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithParameterizedRecord.java new file mode 100644 index 00000000000..fc8d1feee39 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithParameterizedRecord.java @@ -0,0 +1,24 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonId; +import org.bson.types.ObjectId; + +public record TestRecordWithParameterizedRecord(@BsonId ObjectId id, + TestRecordParameterized parameterizedRecord) { +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithPojoAnnotations.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithPojoAnnotations.java new file mode 100644 index 00000000000..650d3b8de0d --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestRecordWithPojoAnnotations.java @@ -0,0 +1,57 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.BsonType; +import org.bson.codecs.pojo.annotations.BsonId; +import org.bson.codecs.pojo.annotations.BsonProperty; +import org.bson.codecs.pojo.annotations.BsonRepresentation; + +import java.util.List; + +public record TestRecordWithPojoAnnotations(String name, + @BsonProperty("a") int age, + List hobbies, + @BsonRepresentation(BsonType.OBJECT_ID) @BsonId String identifier) { + + public TestRecordWithPojoAnnotations(final String name, final int age, final List hobbies, final String identifier) { + this.name = name; + this.age = age; + this.hobbies = hobbies; + this.identifier = identifier; + } + + @Override + public String name() { + return name; + } + + @Override + public int age() { + return age; + } + + @Override + public List hobbies() { + return hobbies; + } + + @Override + public String identifier() { + return identifier; + } +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestSelfReferentialHolderRecord.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestSelfReferentialHolderRecord.java new file mode 100644 index 00000000000..557243d3f50 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestSelfReferentialHolderRecord.java @@ -0,0 +1,23 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import org.bson.codecs.pojo.annotations.BsonId; + +public record TestSelfReferentialHolderRecord(@BsonId String id, + TestSelfReferentialRecord selfReferentialRecord) { +} diff --git a/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestSelfReferentialRecord.java b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestSelfReferentialRecord.java new file mode 100644 index 00000000000..5f097854670 --- /dev/null +++ b/bson-record-codec/src/test/unit/org/bson/codecs/record/samples/TestSelfReferentialRecord.java @@ -0,0 +1,24 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.record.samples; + +import com.mongodb.lang.Nullable; + +public record TestSelfReferentialRecord(T name, + @Nullable TestSelfReferentialRecord left, + @Nullable TestSelfReferentialRecord right) { +} diff --git a/bson-scala/build.gradle.kts b/bson-scala/build.gradle.kts new file mode 100644 index 00000000000..e23087ae314 --- /dev/null +++ b/bson-scala/build.gradle.kts @@ -0,0 +1,37 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import ProjectExtensions.configureJarManifest +import ProjectExtensions.configureMavenPublication + +plugins { id("project.scala") } + +base.archivesName.set("mongo-scala-bson") + +dependencies { api(project(path = ":bson", configuration = "default")) } + +configureMavenPublication { + pom { + name.set("Mongo Scala BSON Library") + description.set("A Scala wrapper / extension to the BSON library") + url.set("https://bsonspec.org") + } +} + +configureJarManifest { + attributes["Automatic-Module-Name"] = "org.mongodb.bson.scala" + attributes["Bundle-SymbolicName"] = "org.mongodb.scala.mongo-scala-bson" + attributes["Import-Package"] = "!scala.*,*" +} diff --git a/bson-scala/src/main/scala-2.13+/org/mongodb/scala/bson/collection/immutable/Document.scala b/bson-scala/src/main/scala-2.13+/org/mongodb/scala/bson/collection/immutable/Document.scala new file mode 100644 index 00000000000..31afbf30059 --- /dev/null +++ b/bson-scala/src/main/scala-2.13+/org/mongodb/scala/bson/collection/immutable/Document.scala @@ -0,0 +1,141 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.collection.immutable + +import scala.jdk.CollectionConverters._ +import scala.collection.mutable.ListBuffer +import scala.collection.{ mutable, Iterable, IterableOps, SpecificIterableFactory, StrictOptimizedIterableOps } +import org.mongodb.scala.bson._ +import org.mongodb.scala.bson.collection.BaseDocument + +/** + * The immutable [[Document]] companion object for easy creation. + */ +object Document extends SpecificIterableFactory[(String, BsonValue), Document] { + + import BsonMagnets._ + + /** + * Create a new empty Document + * @return a new Document + */ + def empty: Document = apply() + + /** + * Create a new Document + * @return a new Document + */ + def apply(): Document = new Document(new BsonDocument()) + + /** + * Parses a string in MongoDB Extended JSON format to a `Document` + * + * @param json the JSON stringN + * @return a corresponding `Document` object + * @see org.bson.json.JsonReader + * @see [[https://www.mongodb.com/docs/manual/reference/mongodb-extended-json/ MongoDB Extended JSON]] + */ + def apply(json: String): Document = new Document(BsonDocument(json)) + + /** + * Create a new document from the elems + * @param elems the key/value pairs that make up the Document. This can be any valid `(String, BsonValue)` pair that can be + * transformed into a [[BsonElement]] via [[BsonMagnets.CanBeBsonElement]] implicits and any [[BsonTransformer]]s that + * are in scope. + * @return a new Document consisting key/value pairs given by `elems`. + */ + def apply(elems: CanBeBsonElement*): Document = { + val underlying = new BsonDocument() + elems.foreach(elem => underlying.put(elem.key, elem.value)) + new Document(underlying) + } + + /** + * Create a new document from the elems + * @param elems a sequence of key/values that make up the Document. This can be any valid sequence of `(String, BsonValue)` pairs that + * can be transformed into a sequence of [[BsonElement]]s via [[BsonMagnets.CanBeBsonElements]] implicits and any + * [[BsonTransformer]]s + * that are in scope. + * @return a new Document consisting key/value pairs given by `elems`. + */ + def apply(elems: CanBeBsonElements): Document = { + val underlying = new BsonDocument() + elems.values.foreach(el => underlying.put(el.key, el.value)) + new Document(underlying) + } + + def builder: mutable.Builder[(String, BsonValue), Document] = ListBuffer[(String, BsonValue)]() mapResult fromSeq + + def fromSeq(ts: Seq[(String, BsonValue)]): Document = { + val underlying = new BsonDocument() + ts.foreach(kv => underlying.put(kv._1, kv._2)) + apply(underlying) + } + + override def newBuilder: mutable.Builder[(String, BsonValue), Document] = builder + override def fromSpecific(it: IterableOnce[(String, BsonValue)]): Document = fromSeq(it.iterator.toSeq) +} + +/** + * An immutable Document implementation. + * + * A strictly typed `Map[String, BsonValue]` like structure that traverses the elements in insertion order. Unlike native scala maps there + * is no variance in the value type and it always has to be a `BsonValue`. + * + * @param underlying the underlying BsonDocument which stores the data. + * + */ +case class Document(protected[scala] val underlying: BsonDocument) + extends BaseDocument[Document] + with IterableOps[(String, BsonValue), Iterable, Document] + with StrictOptimizedIterableOps[(String, BsonValue), Iterable, Document] { + + /** + * Creates a new immutable document + * @param underlying the underlying BsonDocument + * @return a new document + */ + protected[scala] def apply(underlying: BsonDocument) = new Document(underlying) + + /** + * Applies a function `f` to all elements of this document. + * + * @param f the function that is applied for its side-effect to every element. + * The result of function `f` is discarded. + * + * @tparam U the type parameter describing the result of function `f`. + * This result will always be ignored. Typically `U` is `Unit`, + * but this is not necessary. + * + */ + override def foreach[U](f: ((String, BsonValue)) => U): Unit = underlying.asScala foreach f + + // Mandatory overrides of `fromSpecific`, `newSpecificBuilder`, + // and `empty`, from `IterableOps` + override protected def fromSpecific(coll: IterableOnce[(String, BsonValue)]): Document = Document.fromSpecific(coll) + override protected def newSpecificBuilder: mutable.Builder[(String, BsonValue), Document] = Document.newBuilder + override def empty: Document = Document.empty + + // Overloading of `appended`, `prepended`, `appendedAll`, `prependedAll`, + // `map`, `flatMap` and `concat` to return an `RNA` when possible + def concat(suffix: IterableOnce[(String, BsonValue)]): Document = strictOptimizedConcat(suffix, newSpecificBuilder) + // scalastyle:off method.name + @inline final def ++(suffix: IterableOnce[(String, BsonValue)]): Document = concat(suffix) + // scalastyle:on method.name + def map[B](f: ((String, BsonValue)) => (String, BsonValue)): Document = strictOptimizedMap(newSpecificBuilder, f) + +} diff --git a/bson-scala/src/main/scala-2.13+/org/mongodb/scala/bson/collection/mutable/Document.scala b/bson-scala/src/main/scala-2.13+/org/mongodb/scala/bson/collection/mutable/Document.scala new file mode 100644 index 00000000000..86f11c5a8f7 --- /dev/null +++ b/bson-scala/src/main/scala-2.13+/org/mongodb/scala/bson/collection/mutable/Document.scala @@ -0,0 +1,284 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.collection.mutable + +import org.mongodb.scala.bson._ +import org.mongodb.scala.bson.collection.BaseDocument + +import scala.jdk.CollectionConverters._ +import scala.collection._ +import scala.collection.mutable.ListBuffer + +/** + * Mutable [[Document]] companion object for easy creation. + */ +object Document extends SpecificIterableFactory[(String, BsonValue), Document] { + + import BsonMagnets._ + + /** + * Create a new empty Document + * @return a new Document + */ + def empty: Document = apply() + + /** + * Create a new Document + * @return a new Document + */ + def apply(): Document = Document(BsonDocument()) + + /** + * Parses a string in MongoDB Extended JSON format to a `Document` + * + * @param json the JSON string + * @return a corresponding `Document` object + * @see org.bson.json.JsonReader + * @see [[https://www.mongodb.com/docs/manual/reference/mongodb-extended-json/ MongoDB Extended JSON]] + */ + def apply(json: String): Document = Document(BsonDocument(json)) + + /** + * Create a new document from the elems + * @param elems the key/value pairs that make up the Document. This can be any valid `(String, BsonValue)` pair that can be + * transformed into a [[BsonElement]] via [[BsonMagnets.CanBeBsonElement]] implicits and any [[BsonTransformer]]s that are + * in scope. + * @return a new Document consisting key/value pairs given by `elems`. + */ + def apply(elems: CanBeBsonElement*): Document = { + val underlying = new BsonDocument() + elems.foreach(elem => underlying.put(elem.key, elem.value)) + new Document(underlying) + } + + /** + * Create a new document from the elems + * @param elem a sequence of key/values that make up the Document. This can be any valid sequence of `(String, BsonValue)` pairs that + * can be transformed into a sequence of [[BsonElement]]s via [[BsonMagnets.CanBeBsonElements]] implicits and any + * [[BsonTransformer]]s + * that are in scope. + * @return a new Document consisting key/value pairs given by `elems`. + */ + def apply(elem: CanBeBsonElements): Document = { + val underlying = new BsonDocument() + elem.values.foreach(kv => underlying.put(kv.key, kv.value)) + new Document(underlying) + } + + private def builder: mutable.Builder[(String, BsonValue), Document] = + ListBuffer[(String, BsonValue)]() mapResult fromSeq + + private def fromSeq(ts: Seq[(String, BsonValue)]): Document = { + val underlying = new BsonDocument() + ts.foreach(kv => underlying.put(kv._1, kv._2)) + apply(underlying) + } + + override def newBuilder: mutable.Builder[(String, BsonValue), Document] = builder + override def fromSpecific(it: IterableOnce[(String, BsonValue)]): Document = fromSeq(it.iterator.toSeq) +} + +/** + * An mutable Document implementation. + * + * A strictly typed `Map[String, BsonValue]` like structure that traverses the elements in insertion order. Unlike native scala maps there + * is no variance in the value type and it always has to be a `BsonValue`. + * + * @param underlying the underlying BsonDocument which stores the data. + */ +case class Document(protected[scala] val underlying: BsonDocument) + extends BaseDocument[Document] + with IterableOps[(String, BsonValue), Iterable, Document] + with StrictOptimizedIterableOps[(String, BsonValue), Iterable, Document] { + + import BsonMagnets._ + + /** + * Creates a new immutable document + * @param underlying the underlying BsonDocument + * @return a new document + */ + protected[scala] def apply(underlying: BsonDocument) = new Document(underlying) + + /** + * Applies a function `f` to all elements of this document. + * + * @param f the function that is applied for its side-effect to every element. + * The result of function `f` is discarded. + * + * @tparam U the type parameter describing the result of function `f`. + * This result will always be ignored. Typically `U` is `Unit`, + * but this is not necessary. + * + */ + override def foreach[U](f: ((String, BsonValue)) => U): Unit = underlying.asScala foreach f + + // Mandatory overrides of `fromSpecific`, `newSpecificBuilder`, + // and `empty`, from `IterableOps` + override protected def fromSpecific(coll: IterableOnce[(String, BsonValue)]): Document = Document.fromSpecific(coll) + override protected def newSpecificBuilder: mutable.Builder[(String, BsonValue), Document] = Document.newBuilder + override def empty: Document = Document.empty + + // Overloading of `appended`, `prepended`, `appendedAll`, `prependedAll`, + // `map`, `flatMap` and `concat` to return an `Document` when possible + def concat(suffix: IterableOnce[(String, BsonValue)]): Document = strictOptimizedConcat(suffix, newSpecificBuilder) + // scalastyle:off method.name + @inline final def ++(suffix: IterableOnce[(String, BsonValue)]): Document = concat(suffix) + // scalastyle:on method.name + def map[B](f: ((String, BsonValue)) => (String, BsonValue)): Document = strictOptimizedMap(newSpecificBuilder, f) + // TODO other operations + + // scalastyle:off method.name + /** + * Adds a new key/value pair to this document. + * If the document already contains a mapping for the key, it will be overridden by the new value. + * + * @param elems the key/value pair. This can be any valid `(String, BsonValue)` pair that can be transformed into a [[BsonElement]] + * via [[BsonMagnets.CanBeBsonElement]] implicits and any [[BsonTransformer]]s that are in scope. + * @return the document itself + */ + def +=(elems: CanBeBsonElement*): Document = { + elems.foreach(elem => underlying.put(elem.key, elem.value)) + this + } + + /** + * Adds all elements produced by a TraversableOnce to this document. + * + * @param elems a sequence of key/values that make up the Document. This can be any valid sequence of `(String, BsonValue)` pairs that + * can be transformed into a sequence of [[BsonElement]]s via [[BsonMagnets.CanBeBsonElements]] implicits and + * any [[BsonTransformer]]s + * that are in scope. + * @return the document itself. + */ + def ++=(elems: CanBeBsonElements): Document = { + elems.values.foreach(elem => underlying.put(elem.key, elem.value)) + this + } + // scalastyle:on method.name + + /** + * Adds a new key/value pair to this map. + * If the document already contains a mapping for the key, it will be overridden by the new value. + * + * @param key The key to update + * @param value The new value + */ + def update[B](key: String, value: B)(implicit transformer: BsonTransformer[B]): Unit = { this += ((key, value)) } + + /** + * Adds a new key/value pair to this document and optionally returns previously bound value. + * If the document already contains a mapping for the key, it will be overridden by the new value. + * + * @param key the key to update + * @param value the new value + * @return an option value containing the value associated with the key before the `put` operation was executed, or + * `None` if `key` was not defined in the document before. + */ + def put[B](key: String, value: B)(implicit transformer: BsonTransformer[B]): Option[BsonValue] = { + val r = get(key) + update(key, value) + r + } + + /** + * If given key is already in this document, returns associated value. + * + * Otherwise, computes value from given expression `op`, stores with key in document and returns that value. + * @param key the key to test + * @param op the computation yielding the value to associate with `key`, if `key` is previously unbound. + * @return the value associated with key (either previously or as a result of executing the method). + */ + def getOrElseUpdate[B](key: String, op: => B)(implicit transformer: BsonTransformer[B]): BsonValue = { + if (get(key).isEmpty) this += ((key, op)) + this(key) + } + + // scalastyle:off method.name + /** + * Removes a key from this document. + * @param key the key to be removed + * @return the document itself. + */ + def -=(key: String): Document = { underlying.remove(key); this } + + /** + * Removes two or more elements from this document. + * + * @param elems the remaining elements to remove. + * @return the document itself + */ + def -=(elems: String*): Document = { + this --= elems + } + + /** + * Removes all elements produced by an iterator from this document. + * + * @param xs the iterator producing the elements to remove. + * @return the document itself + */ + def --=(xs: IterableOnce[String]): Document = { xs foreach -=; this } + // scalastyle:on method.name + + /** + * Removes a key from this document, returning the value associated previously with that key as an option. + * @param key the key to be removed + * @return an option value containing the value associated previously with `key`, + * or `None` if `key` was not defined in the document before. + */ + def remove(key: String): Option[BsonValue] = { + val r = get(key) + this -= key + r + } + + /** + * Retains only those mappings for which the predicate `p` returns `true`. + * + * @param p The test predicate + */ + def retain(p: (String, BsonValue) => Boolean): Document = { + for ((k, v) <- this) + if (!p(k, v)) underlying.remove(k) + this + } + + /** + * Removes all bindings from the document. After this operation has completed the document will be empty. + */ + def clear(): Unit = underlying.clear() + + /** + * Applies a transformation function to all values contained in this document. + * The transformation function produces new values from existing keys associated values. + * + * @param f the transformation to apply + * @return the document itself. + */ + def transform[B](f: (String, BsonValue) => B)(implicit transformer: BsonTransformer[B]): Document = { + this.foreach(kv => update(kv._1, f(kv._1, kv._2))) + this + } + + /** + * Copies the document and creates a new one + * + * @return a new document with a copy of the underlying BsonDocument + */ + def copy(): Document = Document(copyBsonDocument()) +} diff --git a/bson-scala/src/main/scala-2.13-/org/mongodb/scala/bson/collection/immutable/Document.scala b/bson-scala/src/main/scala-2.13-/org/mongodb/scala/bson/collection/immutable/Document.scala new file mode 100644 index 00000000000..4ed15c16b25 --- /dev/null +++ b/bson-scala/src/main/scala-2.13-/org/mongodb/scala/bson/collection/immutable/Document.scala @@ -0,0 +1,142 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.collection.immutable + +import scala.collection.JavaConverters._ +import scala.collection.generic.CanBuildFrom +import scala.collection.mutable.ListBuffer +import scala.collection.{ mutable, Traversable, TraversableLike } + +import org.mongodb.scala.bson._ +import org.mongodb.scala.bson.collection.BaseDocument + +/** + * The immutable [[Document]] companion object for easy creation. + */ +object Document { + + import BsonMagnets._ + + /** + * Create a new empty Document + * @return a new Document + */ + def empty: Document = apply() + + /** + * Create a new Document + * @return a new Document + */ + def apply(): Document = new Document(new BsonDocument()) + + /** + * Parses a string in MongoDB Extended JSON format to a `Document` + * + * @param json the JSON string + * @return a corresponding `Document` object + * @see org.bson.json.JsonReader + * @see [[https://www.mongodb.com/docs/manual/reference/mongodb-extended-json/ MongoDB Extended JSON]] + */ + def apply(json: String): Document = new Document(BsonDocument(json)) + + /** + * Create a new document from the elems + * @param elems the key/value pairs that make up the Document. This can be any valid `(String, BsonValue)` pair that can be + * transformed into a [[BsonElement]] via [[BsonMagnets.CanBeBsonElement]] implicits and any [[BsonTransformer]]s that + * are in scope. + * @return a new Document consisting key/value pairs given by `elems`. + */ + def apply(elems: CanBeBsonElement*): Document = { + val underlying = new BsonDocument() + elems.foreach(elem => underlying.put(elem.key, elem.value)) + new Document(underlying) + } + + /** + * Create a new document from the elems + * @param elems a sequence of key/values that make up the Document. This can be any valid sequence of `(String, BsonValue)` pairs that + * can be transformed into a sequence of [[BsonElement]]s via [[BsonMagnets.CanBeBsonElements]] implicits and any + * [[BsonTransformer]]s + * that are in scope. + * @return a new Document consisting key/value pairs given by `elems`. + */ + def apply(elems: CanBeBsonElements): Document = { + val underlying = new BsonDocument() + elems.values.foreach(el => underlying.put(el.key, el.value)) + new Document(underlying) + } + + /** + * A implicit builder factory. + * + * @return a builder factory. + */ + implicit def canBuildFrom: CanBuildFrom[Traversable[(String, BsonValue)], (String, BsonValue), Document] = { + new CanBuildFrom[Traversable[(String, BsonValue)], (String, BsonValue), Document] { + def apply(): mutable.Builder[(String, BsonValue), Document] = builder + def apply(from: Traversable[(String, BsonValue)]): mutable.Builder[(String, BsonValue), Document] = builder + } + } + + def builder: mutable.Builder[(String, BsonValue), Document] = ListBuffer[(String, BsonValue)]() mapResult fromSeq + + def fromSeq(ts: Seq[(String, BsonValue)]): Document = { + val underlying = new BsonDocument() + ts.foreach(kv => underlying.put(kv._1, kv._2)) + apply(underlying) + } +} + +/** + * An immutable Document implementation. + * + * A strictly typed `Map[String, BsonValue]` like structure that traverses the elements in insertion order. Unlike native scala maps there + * is no variance in the value type and it always has to be a `BsonValue`. + * + * @param underlying the underlying BsonDocument which stores the data. + * + */ +case class Document(protected[scala] val underlying: BsonDocument) + extends BaseDocument[Document] + with TraversableLike[(String, BsonValue), Document] { + + /** + * Creates a new immutable document + * @param underlying the underlying BsonDocument + * @return a new document + */ + protected[scala] def apply(underlying: BsonDocument) = new Document(underlying) + + /** + * Applies a function `f` to all elements of this document. + * + * @param f the function that is applied for its side-effect to every element. + * The result of function `f` is discarded. + * + * @tparam U the type parameter describing the result of function `f`. + * This result will always be ignored. Typically `U` is `Unit`, + * but this is not necessary. + * + */ + override def foreach[U](f: ((String, BsonValue)) => U): Unit = underlying.asScala foreach f + + /** + * Creates a new builder for this collection type. + */ + override def newBuilder: mutable.Builder[(String, BsonValue), Document] = Document.builder + +} diff --git a/bson-scala/src/main/scala-2.13-/org/mongodb/scala/bson/collection/mutable/Document.scala b/bson-scala/src/main/scala-2.13-/org/mongodb/scala/bson/collection/mutable/Document.scala new file mode 100644 index 00000000000..714ef73583f --- /dev/null +++ b/bson-scala/src/main/scala-2.13-/org/mongodb/scala/bson/collection/mutable/Document.scala @@ -0,0 +1,284 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.collection.mutable + +import scala.collection.JavaConverters._ +import scala.collection._ +import scala.collection.generic.CanBuildFrom +import scala.collection.mutable.ListBuffer + +import org.mongodb.scala.bson._ +import org.mongodb.scala.bson.collection.BaseDocument + +/** + * Mutable [[Document]] companion object for easy creation. + */ +object Document { + + import BsonMagnets._ + + /** + * Create a new empty Document + * @return a new Document + */ + def empty: Document = apply() + + /** + * Create a new Document + * @return a new Document + */ + def apply(): Document = Document(BsonDocument()) + + /** + * Parses a string in MongoDB Extended JSON format to a `Document` + * + * @param json the JSON string + * @return a corresponding `Document` object + * @see org.bson.json.JsonReader + * @see [[https://www.mongodb.com/docs/manual/reference/mongodb-extended-json/ MongoDB Extended JSON]] + */ + def apply(json: String): Document = Document(BsonDocument(json)) + + /** + * Create a new document from the elems + * @param elems the key/value pairs that make up the Document. This can be any valid `(String, BsonValue)` pair that can be + * transformed into a [[BsonElement]] via [[BsonMagnets.CanBeBsonElement]] implicits and any [[BsonTransformer]]s that are + * in scope. + * @return a new Document consisting key/value pairs given by `elems`. + */ + def apply(elems: CanBeBsonElement*): Document = { + val underlying = new BsonDocument() + elems.foreach(elem => underlying.put(elem.key, elem.value)) + new Document(underlying) + } + + /** + * Create a new document from the elems + * @param elem a sequence of key/values that make up the Document. This can be any valid sequence of `(String, BsonValue)` pairs that + * can be transformed into a sequence of [[BsonElement]]s via [[BsonMagnets.CanBeBsonElements]] implicits and any + * [[BsonTransformer]]s + * that are in scope. + * @return a new Document consisting key/value pairs given by `elems`. + */ + def apply(elem: CanBeBsonElements): Document = { + val underlying = new BsonDocument() + elem.values.foreach(kv => underlying.put(kv.key, kv.value)) + new Document(underlying) + } + + /** + * A implicit builder factory. + * + * @return a builder factory. + */ + implicit def canBuildFrom: CanBuildFrom[Traversable[(String, BsonValue)], (String, BsonValue), Document] = { + new CanBuildFrom[Traversable[(String, BsonValue)], (String, BsonValue), Document] { + def apply(): mutable.Builder[(String, BsonValue), Document] = builder + def apply(from: Traversable[(String, BsonValue)]): mutable.Builder[(String, BsonValue), Document] = builder + } + } + + private def builder: mutable.Builder[(String, BsonValue), Document] = + ListBuffer[(String, BsonValue)]() mapResult fromSeq + + private def fromSeq(ts: Seq[(String, BsonValue)]): Document = { + val underlying = new BsonDocument() + ts.foreach(kv => underlying.put(kv._1, kv._2)) + apply(underlying) + } +} + +/** + * An mutable Document implementation. + * + * A strictly typed `Map[String, BsonValue]` like structure that traverses the elements in insertion order. Unlike native scala maps there + * is no variance in the value type and it always has to be a `BsonValue`. + * + * @param underlying the underlying BsonDocument which stores the data. + */ +case class Document(protected[scala] val underlying: BsonDocument) + extends BaseDocument[Document] + with TraversableLike[(String, BsonValue), Document] + with Mutable { + + import BsonMagnets._ + + /** + * Creates a new immutable document + * @param underlying the underlying BsonDocument + * @return a new document + */ + protected[scala] def apply(underlying: BsonDocument) = new Document(underlying) + + /** + * Applies a function `f` to all elements of this document. + * + * @param f the function that is applied for its side-effect to every element. + * The result of function `f` is discarded. + * + * @tparam U the type parameter describing the result of function `f`. + * This result will always be ignored. Typically `U` is `Unit`, + * but this is not necessary. + * + */ + override def foreach[U](f: ((String, BsonValue)) => U): Unit = underlying.asScala foreach f + + /** + * Creates a new builder for this collection type. + */ + override def newBuilder: mutable.Builder[(String, BsonValue), Document] = Document.builder + + // scalastyle:off method.name + /** + * Adds a new key/value pair to this document. + * If the document already contains a mapping for the key, it will be overridden by the new value. + * + * @param elems the key/value pair. This can be any valid `(String, BsonValue)` pair that can be transformed into a [[BsonElement]] + * via [[BsonMagnets.CanBeBsonElement]] implicits and any [[BsonTransformer]]s that are in scope. + * @return the document itself + */ + def +=(elems: CanBeBsonElement*): Document = { + elems.foreach(elem => underlying.put(elem.key, elem.value)) + this + } + + /** + * Adds all elements produced by a TraversableOnce to this document. + * + * @param elems a sequence of key/values that make up the Document. This can be any valid sequence of `(String, BsonValue)` pairs that + * can be transformed into a sequence of [[BsonElement]]s via [[BsonMagnets.CanBeBsonElements]] implicits and + * any [[BsonTransformer]]s + * that are in scope. + * @return the document itself. + */ + def ++=(elems: CanBeBsonElements): Document = { + elems.values.foreach(elem => underlying.put(elem.key, elem.value)) + this + } + // scalastyle:on method.name + + /** + * Adds a new key/value pair to this map. + * If the document already contains a mapping for the key, it will be overridden by the new value. + * + * @param key The key to update + * @param value The new value + */ + def update[B](key: String, value: B)(implicit transformer: BsonTransformer[B]): Unit = { this += ((key, value)) } + + /** + * Adds a new key/value pair to this document and optionally returns previously bound value. + * If the document already contains a mapping for the key, it will be overridden by the new value. + * + * @param key the key to update + * @param value the new value + * @return an option value containing the value associated with the key before the `put` operation was executed, or + * `None` if `key` was not defined in the document before. + */ + def put[B](key: String, value: B)(implicit transformer: BsonTransformer[B]): Option[BsonValue] = { + val r = get(key) + update(key, value) + r + } + + /** + * If given key is already in this document, returns associated value. + * + * Otherwise, computes value from given expression `op`, stores with key in document and returns that value. + * @param key the key to test + * @param op the computation yielding the value to associate with `key`, if `key` is previously unbound. + * @return the value associated with key (either previously or as a result of executing the method). + */ + def getOrElseUpdate[B](key: String, op: => B)(implicit transformer: BsonTransformer[B]): BsonValue = { + if (get(key).isEmpty) this += ((key, op)) + this(key) + } + + // scalastyle:off method.name + /** + * Removes a key from this document. + * @param key the key to be removed + * @return the document itself. + */ + def -=(key: String): Document = { underlying.remove(key); this } + + /** + * Removes two or more elements from this document. + * + * @param elems the remaining elements to remove. + * @return the document itself + */ + def -=(elems: String*): Document = { + this --= elems + } + + /** + * Removes all elements produced by an iterator from this document. + * + * @param xs the iterator producing the elements to remove. + * @return the document itself + */ + def --=(xs: TraversableOnce[String]): Document = { xs foreach -=; this } + // scalastyle:on method.name + + /** + * Removes a key from this document, returning the value associated previously with that key as an option. + * @param key the key to be removed + * @return an option value containing the value associated previously with `key`, + * or `None` if `key` was not defined in the document before. + */ + def remove(key: String): Option[BsonValue] = { + val r = get(key) + this -= key + r + } + + /** + * Retains only those mappings for which the predicate `p` returns `true`. + * + * @param p The test predicate + */ + def retain(p: (String, BsonValue) => Boolean): Document = { + for ((k, v) <- this) + if (!p(k, v)) underlying.remove(k) + this + } + + /** + * Removes all bindings from the document. After this operation has completed the document will be empty. + */ + def clear(): Unit = underlying.clear() + + /** + * Applies a transformation function to all values contained in this document. + * The transformation function produces new values from existing keys associated values. + * + * @param f the transformation to apply + * @return the document itself. + */ + def transform[B](f: (String, BsonValue) => B)(implicit transformer: BsonTransformer[B]): Document = { + this.foreach(kv => update(kv._1, f(kv._1, kv._2))) + this + } + + /** + * Copies the document and creates a new one + * + * @return a new document with a copy of the underlying BsonDocument + */ + def copy(): Document = Document(copyBsonDocument()) +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/BsonElement.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/BsonElement.scala new file mode 100644 index 00000000000..41d3112b1f1 --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/BsonElement.scala @@ -0,0 +1,28 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson + +/** + * A companion helper for a `BsonElement` - the mapping from a name to a BsonValue. + * + * Used by the [[BsonMagnets]] and polices valid key/value pairs types for [[Document]]. + * + * @since 1.0 + */ +object BsonElement { + def apply(key: String, value: BsonValue): BsonElement = new BsonElement(key, value) +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/BsonMagnets.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/BsonMagnets.scala new file mode 100644 index 00000000000..f74e64c15f1 --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/BsonMagnets.scala @@ -0,0 +1,118 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson + +import scala.language.implicitConversions + +/** + * A magnet pattern implementation enforcing the validity of user provided native values being able to be converted into [[BsonValue]]s. + * + * @since 1.0 + */ +protected[bson] object BsonMagnets { + + /** + * Represents any single [[BsonValue]] + * + * This is a `BsonValue` or any type of `T` that has a [[BsonTransformer]] in scope for the given type. + */ + sealed trait CanBeBsonValue { + val value: BsonValue + } + + /** + * Implicitly converts type `T` to a [[BsonValue]] as long as there is a [[BsonTransformer]] in scope for the given type. + * + * @param v the initial value + * @param transformer implicitly provided [[BsonTransformer]] that needs to be in scope for type `T` to be transformed into a [[BsonValue]] + * @tparam T the type of the initial value + * @return A CanBeBsonValue that holds the transformed [[BsonValue]] + */ + implicit def singleToCanBeBsonValue[T](v: T)(implicit transformer: BsonTransformer[T]): CanBeBsonValue = { + new CanBeBsonValue { + override val value: BsonValue = transformer(v) + } + } + + /** + * Represents a single [[BsonElement]] + * + * This is essentially a `(String, BsonValue)` key value pair. Any pair of `(String, T)` where type `T` has a [[BsonTransformer]] in + * scope into a [[BsonValue]] is also a valid pair. + */ + sealed trait CanBeBsonElement { + val bsonElement: BsonElement + + /** + * The key of the [[BsonElement]] + * @return the key + */ + def key: String = bsonElement.getName + + /** + * The value of the [[BsonElement]] + * @return the BsonValue + */ + def value: BsonValue = bsonElement.getValue + } + + /** + * Implicitly converts key/value tuple of type (String, T) into a `CanBeBsonElement` + * + * @param kv the key value pair + * @param transformer the implicit [[BsonTransformer]] for the value + * @tparam T the type of the value + * @return a CanBeBsonElement representing the key/value pair + */ + implicit def tupleToCanBeBsonElement[T]( + kv: (String, T) + )(implicit transformer: BsonTransformer[T]): CanBeBsonElement = { + new CanBeBsonElement { + override val bsonElement: BsonElement = BsonElement(kv._1, transformer(kv._2)) + } + } + + /** + * Represents a sequence of [[BsonElement]]s + * + * This is essentially a `Iterable[(String, BsonValue)]` of key value pairs. Any pair of `(String, T)` where type `T` has a + * [[BsonTransformer]] in scope into a [[BsonValue]] is also a valid pair. + */ + sealed trait CanBeBsonElements { + + /** + * The `BsonElement` sequence + */ + val values: Iterable[BsonElement] + } + + /** + * Implicitly converts any iterable of key/value pairs into a [[CanBeBsonElements]]. + * + * @param elems the iterable of key/value pairs + * @param transformer the implicit transformer for the values + * @tparam T the type of the values + * @return CanBeBsonElements representing the key/value pairs + */ + implicit def iterableToCanBeBsonElements[T]( + elems: Iterable[(String, T)] + )(implicit transformer: BsonTransformer[T]): CanBeBsonElements = + new CanBeBsonElements { + override val values: Iterable[BsonElement] = elems.map(kv => BsonElement(kv._1, transformer(kv._2))) + } + +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/BsonTransformer.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/BsonTransformer.scala new file mode 100644 index 00000000000..c3a0a64dc45 --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/BsonTransformer.scala @@ -0,0 +1,228 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson + +import java.util.Date + +import scala.annotation.implicitNotFound +import scala.collection.JavaConverters._ +import scala.util.matching.Regex + +import org.mongodb.scala.bson.collection.immutable.{ Document => IDocument } +import org.mongodb.scala.bson.collection.mutable.{ Document => MDocument } + +/** + * BsonTransformers allow the transformation of type `T` to their corresponding [[BsonValue]]. + * + * Custom implementations can be written to implicitly to convert a `T` into a [[BsonValue]] so it can be stored in a `Document`. + * + * @tparam T the type of value to be transformed into a [[BsonValue]]. + * @since 1.0 + */ +@implicitNotFound( + "No bson implicit transformer found for type ${T}. Implement or import an implicit BsonTransformer for this type." +) +trait BsonTransformer[-T] { + + /** + * Convert the object into a [[BsonValue]] + */ + def apply(value: T): BsonValue +} + +/** + * Maps the following native scala types to BsonValues: + * + * - `BsonValue => BsonValue` + * - `BigDecimal` => BsonDecimal128 + * - `Boolean => BsonBoolean` + * - `String => BsonString` + * - `Array[Byte] => BsonBinary` + * - `Regex => BsonRegex` + * - `Date => BsonDateTime` + * - `Decimal128` => BsonDecimal128 + * - `ObjectId => BsonObjectId` + * - `Int => BsonInt32` + * - `Long => BsonInt64` + * - `Double => BsonDouble` + * - `None => BsonNull` + * - `immutable.Document => BsonDocument` + * - `mutable.Document => BsonDocument` + * - `Option[T] => BsonValue` where `T` is one of the above types + * - `Seq[(String, T)] => BsonDocument` where `T` is one of the above types + * - `Seq[T] => BsonArray` where `T` is one of the above types + */ +object BsonTransformer extends DefaultBsonTransformers {} + +/** + * Default BsonTransformers for native types. + */ +trait DefaultBsonTransformers extends LowPrio { + + /** + * Noop transformer for `BsonValue`s + */ + implicit object TransformBsonValue extends BsonTransformer[BsonValue] { + def apply(value: BsonValue): BsonValue = value + } + + /** + * Transforms `BigDecimal` to `BsonDecimal128` + */ + implicit object TransformBigDecimal extends BsonTransformer[BigDecimal] { + def apply(value: BigDecimal): BsonDecimal128 = BsonDecimal128(value) + } + + /** + * Transforms `Boolean` to `BsonBoolean` + */ + implicit object TransformBoolean extends BsonTransformer[Boolean] { + def apply(value: Boolean): BsonBoolean = BsonBoolean(value) + } + + /** + * Transforms `String` to `BsonString` + */ + implicit object TransformString extends BsonTransformer[String] { + def apply(value: String): BsonString = BsonString(value) + } + + /** + * Transforms `Array[Byte]` to `BsonBinary` + */ + implicit object TransformBinary extends BsonTransformer[Array[Byte]] { + def apply(value: Array[Byte]): BsonBinary = BsonBinary(value) + } + + /** + * Transforms `Regex` to `BsonRegex` + */ + implicit object TransformRegex extends BsonTransformer[Regex] { + def apply(value: Regex): BsonRegularExpression = BsonRegularExpression(value) + } + + /** + * Transforms `Date` to `BsonDateTime` + */ + implicit object TransformDateTime extends BsonTransformer[Date] { + def apply(value: Date): BsonDateTime = BsonDateTime(value) + } + + /** + * Transforms `Decimal128` to `BsonDecimal128` + */ + implicit object TransformDecimal128 extends BsonTransformer[Decimal128] { + def apply(value: Decimal128): BsonDecimal128 = BsonDecimal128(value) + } + + /** + * Transforms `ObjectId` to `BsonObjectId` + */ + implicit object TransformObjectId extends BsonTransformer[ObjectId] { + def apply(value: ObjectId): BsonObjectId = BsonObjectId(value) + } + + /** + * Transforms `Int` to `BsonInt32` + */ + implicit object TransformInt extends BsonTransformer[Int] { + def apply(value: Int): BsonInt32 = BsonInt32(value) + } + + /** + * Transforms `Long` to `BsonInt64` + */ + implicit object TransformLong extends BsonTransformer[Long] { + def apply(value: Long): BsonInt64 = BsonInt64(value) + } + + /** + * Transforms `Double` to `BsonDouble` + */ + implicit object TransformDouble extends BsonTransformer[Double] { + def apply(value: Double): BsonDouble = BsonDouble(value) + } + + /** + * Transforms `None` to `BsonNull` + */ + implicit object TransformNone extends BsonTransformer[Option[Nothing]] { + def apply(value: Option[Nothing]): BsonNull = BsonNull() + } + + /** + * Transforms `Option[T]` to `BsonValue` + */ + implicit def transformOption[T](implicit transformer: BsonTransformer[T]): BsonTransformer[Option[T]] = { + new BsonTransformer[Option[T]] { + def apply(value: Option[T]): BsonValue = value match { + case Some(transformable) => transformer(transformable) + case None => BsonNull() + } + } + } + +} + +trait LowPrio { + + /** + * Transforms `immutable.Document` to `BsonDocument` + */ + implicit object TransformImmutableDocument extends BsonTransformer[IDocument] { + def apply(value: IDocument): BsonDocument = value.toBsonDocument + } + + /** + * Transforms `mutable.Document` to `BsonDocument` + */ + implicit object TransformMutableDocument extends BsonTransformer[MDocument] { + def apply(value: MDocument): BsonDocument = value.underlying + } + + /** + * Transforms `Seq[(String, T)]` to `BsonDocument` + * + * @param transformer implicit transformer for type `T` + * @tparam T the type of the values + * @return a BsonDocument containing the values + */ + implicit def transformKeyValuePairs[T]( + implicit transformer: BsonTransformer[T] + ): BsonTransformer[Seq[(String, T)]] = { + new BsonTransformer[Seq[(String, T)]] { + def apply(values: Seq[(String, T)]): BsonDocument = { + BsonDocument(values.map(kv => (kv._1, transformer(kv._2))).toList) + } + } + } + + /** + * Transforms `Seq[T]` to `BsonArray` + * + * @param transformer implicit transformer for type `T` + * @tparam T the type of the values + * @return a BsonArray containing all the values + */ + implicit def transformSeq[T](implicit transformer: BsonTransformer[T]): BsonTransformer[Seq[T]] = { + new BsonTransformer[Seq[T]] { + def apply(values: Seq[T]): BsonValue = { + new BsonArray(values.map(transformer.apply).toList.asJava) + } + } + } +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/BsonValue.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/BsonValue.scala new file mode 100644 index 00000000000..8fdd797369c --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/BsonValue.scala @@ -0,0 +1,497 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson + +import java.util.Date + +import scala.collection.JavaConverters._ +import scala.util.matching.Regex + +import org.bson.{ BsonDocument => JBsonDocument } + +/** + * Companion helper for a BsonArray + * + * @since 1.0 + */ +object BsonArray { + import BsonMagnets._ + + /** + * Create an empty BsonArray + * @return the BsonArray + */ + def apply(): BsonArray = new BsonArray() + + /** + * Create a BsonArray from the provided values + * + * @param elems the `BsonValues` to become the `BsonArray` + * @return the BsonArray + */ + @deprecated("Use `fromIterable` instead", "2.7.0") + def apply(elems: Iterable[BsonValue]): BsonArray = fromIterable(elems) + + /** + * Create a BsonArray from the provided values + * + * @param elems the `BsonValues` to become the `BsonArray` + * @return the BsonArray + */ + def fromIterable(elems: Iterable[BsonValue]): BsonArray = new BsonArray(elems.toList.asJava) + + /** + * Creates a BsonArray from the provided values + * + * + * @param elems the values that can be transformed into a `BsonValue` + * @return the BsonArray + */ + def apply(elems: CanBeBsonValue*): BsonArray = new BsonArray(elems.map(_.value).asJava) +} + +/** + * Companion helper for a BsonBinary + * @since 1.0 + */ +object BsonBinary { + + /** + * Creates the BsonBinary form the provided bytes + * + * @param value the bytes + * @return the BsonBinary + */ + def apply(value: Array[Byte]): BsonBinary = new BsonBinary(value) +} + +/** + * Companion helper for a BsonBoolean + * @since 1.0 + */ +object BsonBoolean { + + /** + * Creates a `BsonBoolean` + * + * @param value the value + * @return the BsonBoolean + */ + def apply(value: Boolean): BsonBoolean = new BsonBoolean(value) +} + +/** + * Companion helper for a BsonDateTime + * @since 1.0 + */ +object BsonDateTime { + + /** + * Creates a BsonDateTime + * + * @param value the number of milliseconds since the Unix epoch + * @return the BsonDateTime + */ + def apply(value: Long): BsonDateTime = new BsonDateTime(value) + + /** + * Creates a BsonDateTime + * + * @param date a `java.util.Date` + * @return the BsonDateTime + */ + def apply(date: Date): BsonDateTime = new BsonDateTime(date.getTime) +} + +/** + * Companion helper for a BsonDecimal128 + * @since 1.2 + */ +object BsonDecimal128 { + + /** + * Creates a `BsonDecimal128` + * + * @param value the `Decimal128` + * @return the BigDecimal + */ + def apply(value: Decimal128): BsonDecimal128 = new BsonDecimal128(value) + + /** + * Creates a `BsonDecimal128` + * + * @param value the `BigDecimal` + * @return the BigDecimal + */ + def apply(value: BigDecimal): BsonDecimal128 = apply(new Decimal128(value.bigDecimal)) + + /** + * Creates a `BsonDecimal128` + * + * @param value the long value to convert + * @return the BigDecimal + */ + def apply(value: Long): BsonDecimal128 = apply(new Decimal128(value)) + + /** + * Creates a `BsonDecimal128` + * + * @param value the string value to convert + * @return the BigDecimal + */ + def apply(value: String): BsonDecimal128 = apply(org.bson.types.Decimal128.parse(value)) +} + +/** + * Companion helper for a BsonDocument + * @since 1.0 + */ +object BsonDocument { + + import BsonMagnets._ + + /** + * Creates an empty `BsonDocument` + * @return the BsonDocument + */ + def apply(): BsonDocument = new JBsonDocument() + + /** + * Creates a `BsonDocument` from the key value pairs + * + * @param elems a traversable of key, value pairs + * @return the BsonDocument + */ + def apply(elems: Traversable[(String, BsonValue)]): BsonDocument = { + val bsonDocument = new JBsonDocument() + elems.foreach(kv => bsonDocument.put(kv._1, kv._2)) + bsonDocument + } + + /** + * Creates a `BsonDocument` from key value pairs + * + * @param elems the key, value pairs + * @return the BsonDocument + */ + def apply(elems: CanBeBsonElement*): BsonDocument = { + val bsonDocument = new JBsonDocument() + elems.foreach(elem => bsonDocument.put(elem.key, elem.value)) + bsonDocument + } + + /** + * Creates a `BsonDocument` from a json String + * + * @param json the json string + * @return the BsonDocumet + */ + def apply(json: String): BsonDocument = JBsonDocument.parse(json) +} + +/** + * Companion helper for a BsonDouble + * @since 1.0 + */ +object BsonDouble { + + /** + * Creates a `BsonDouble` + * + * @param value the BsonDouble value + * @return the BsonDouble + */ + def apply(value: Double): BsonDouble = new BsonDouble(value) +} + +/** + * Companion helper for a BsonInt32 + * @since 1.0 + */ +object BsonInt32 { + + /** + * Creates a `BsonInt32` + * + * @param value the BsonInt32 value + * @return the BsonInt32 + */ + def apply(value: Int): BsonInt32 = new BsonInt32(value) +} + +/** + * Companion helper for a BsonInt64 + * @since 1.0 + */ +object BsonInt64 { + + /** + * Creates a `BsonInt64` + * + * @param value the BsonInt64 value + * @return the BsonInt64 + */ + def apply(value: Long): BsonInt64 = new BsonInt64(value) +} + +/** + * Companion helper for a BsonJavaScript + * @since 1.0 + */ +object BsonJavaScript { + + /** + * Creates a `BsonJavaScript` + * + * @param value the javascript function + * @return the BsonJavaScript + */ + def apply(value: String): BsonJavaScript = new BsonJavaScript(value) +} + +/** + * Companion helper for a BsonJavaScriptWithScope + * @since 1.0 + */ +object BsonJavaScriptWithScope { + import BsonMagnets._ + + /** + * Creates a `BsonJavaScript` + * + * @param value the javascript function + * @param scope the function scope + * @return the BsonJavaScript + */ + def apply(value: String, scope: BsonDocument): BsonJavaScriptWithScope = new BsonJavaScriptWithScope(value, scope) + + /** + * Creates a `BsonJavaScript` + * + * @param value the javascript function + * @param scope the function scope + * @return the BsonJavaScript + */ + def apply(value: String, scope: CanBeBsonElement*): BsonJavaScriptWithScope = + new BsonJavaScriptWithScope(value, BsonDocument(scope: _*)) + + /** + * Creates a `BsonJavaScript` + * + * @param value the javascript function + * @param scope the function scope + * @return the BsonJavaScript + */ + def apply(value: String, scope: Traversable[(String, BsonValue)]): BsonJavaScriptWithScope = + new BsonJavaScriptWithScope(value, BsonDocument(scope)) +} + +/** + * Companion helper for a BsonMaxKey + * @since 1.0 + */ +object BsonMaxKey { + + /** + * Creates a `BsonMaxKey` + * @return the BsonMaxKey + */ + def apply(): BsonMaxKey = new BsonMaxKey() +} + +/** + * Companion helper for a BsonMinKey + * @since 1.0 + */ +object BsonMinKey { + + /** + * Creates a `BsonMinKey` + * @return the BsonMinKey + */ + def apply(): BsonMinKey = new BsonMinKey() +} + +/** + * Companion helper for a BsonNull + * @since 1.0 + */ +object BsonNull { + + /** + * Creates a `BsonNull` + * @return the BsonNull + */ + def apply(): BsonNull = new BsonNull() +} + +/** + * Companion helper for a BsonNumber + * @since 1.0 + */ +object BsonNumber { + + /** + * Creates a `BsonNumber` + * + * @param value the value + * @return the BsonNumber + */ + def apply(value: Int): BsonNumber = new BsonInt32(value) + + /** + * Creates a `BsonNumber` + * + * @param value the value + * @return the BsonNumber + */ + def apply(value: Long): BsonNumber = new BsonInt64(value) + + /** + * Creates a `BsonNumber` + * + * @param value the value + * @return the BsonNumber + */ + def apply(value: Double): BsonNumber = new BsonDouble(value) +} + +/** + * Companion helper for a BsonObjectId + * @since 1.0 + */ +object BsonObjectId { + + /** + * Creates a new `BsonObjectId` + * + * @return the BsonObjectId + */ + def apply(): BsonObjectId = new BsonObjectId(new ObjectId()) + + /** + * Creates a new `BsonObjectId` + * + * @param value the 24-byte hexadecimal string representation of an `ObjectId`. + * @return the BsonObjectId + */ + def apply(value: String): BsonObjectId = new BsonObjectId(new ObjectId(value)) + + /** + * Creates a new `BsonObjectId` + * + * @param value the `ObjectId`. + * @return the BsonObjectId + */ + def apply(value: ObjectId): BsonObjectId = new BsonObjectId(value) +} + +/** + * Companion helper for a BsonRegularExpression + * @since 1.0 + */ +object BsonRegularExpression { + + /** + * Creates a new `BsonRegularExpression` + * + * @param value the `Regex`. + * @return the BsonRegularExpression + */ + def apply(value: Regex): BsonRegularExpression = new BsonRegularExpression(value.regex) + + /** + * Creates a new `BsonRegularExpression` + * + * @param value the Regex string. + * @return the BsonRegularExpression + */ + def apply(value: String): BsonRegularExpression = new BsonRegularExpression(value) + + /** + * Creates a new `BsonRegularExpression` + * + * @param value the Regex string. + * @param options the regex options tring + * @return the BsonRegularExpression + */ + def apply(value: String, options: String): BsonRegularExpression = new BsonRegularExpression(value, options) +} + +/** + * Companion helper for a BsonString + * @since 1.0 + */ +object BsonString { + + /** + * Creates a new `BsonString` + * + * @param value the string. + * @return the BsonString + */ + def apply(value: String): BsonString = new BsonString(value) +} + +/** + * Companion helper for a BsonSymbol + * @since 1.0 + */ +object BsonSymbol { + + /** + * Creates a new `BsonSymbol` + * + * @param value the Symbol. + * @return the BsonSymbol + */ + def apply(value: Symbol): BsonSymbol = new BsonSymbol(value.name) +} + +/** + * Companion helper for a BsonTimestamp + * @since 1.0 + */ +object BsonTimestamp { + + /** + * Creates a new `BsonTimestamp` + * @return the BsonTimestamp + */ + def apply(): BsonTimestamp = new BsonTimestamp(0, 0) + + /** + * Creates a new `BsonTimestamp` + * @param time the time in seconds since epoch + * @param inc an incrementing ordinal for operations within a given second + * @return the BsonTimestamp + */ + def apply(time: Int, inc: Int): BsonTimestamp = new BsonTimestamp(time, inc) +} + +/** + * Companion helper for a BsonUndefined + * @since 1.0 + */ +object BsonUndefined { + + /** + * Creates a new `BsonUndefined` + * @return the BsonUndefined + */ + def apply(): BsonUndefined = new BsonUndefined() +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/DefaultHelper.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/DefaultHelper.scala new file mode 100644 index 00000000000..b8664276eae --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/DefaultHelper.scala @@ -0,0 +1,84 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson + +/** + * A helper containing the sealed `DefaultsTo` trait which is used to determine the default type for a given method. + * + * @since 1.0 + */ +protected[scala] object DefaultHelper { + + /** + * Neat helper to obtain a default type should one not be given eg: + * + * {{{ + * def find[T]()(implicit e: T DefaultsTo Document) { ... } + * }}} + * + * The signature of the `find` method ensures that it can only be called if the caller can supply an object of type + * `DefaultsTo[T, Document]`. Of course, the [[DefaultsTo.default]] and `[[DefaultsTo.overrideDefault]] methods make it easy to create + * such an object for any type `T`. Since these methods are implicit, the compiler automatically handles the business of calling one of + * them and passing the result into `find`. + * + * ''But how does the compiler know which method to call?'' It uses its type inference and implicit resolution rules to determine the + * appropriate method. There are three cases to consider: + * + * 1. `find` is called with no type parameter. In this case, type T must be inferred. Searching for an implicit method that can provide + * an object of type `DefaultsTo[T, Document]`, the compiler finds `default` and `overrideDefault`. `default` is chosen since it has + * priority (because it's defined in a proper subclass of the trait that defines overrideDefault). As a result, T must be bound to + * `Document. + * + * 2. `find` is called with a non-Document type parameter (e.g., `find[BsonDocument]()`). In this case, an object of type + * `DefaultsTo[BsonDocument, Document]` must be supplied. Only the `overrideDefault` method can supply it, so the compiler inserts the + * appropriate call. + * + * 3. `find` is called with `Document` as the type parameter. Again, either method is applicable, but default wins due to its higher + * priority. + * + */ + sealed class DefaultsTo[A, B] + + /** + * Companion object for [[DefaultsTo]] + */ + object DefaultsTo extends LowPriorityDefaultsTo { + + /** + * Implicitly sets a default type of B. See [[DefaultsTo]] + * + * @tparam B the default type + * @return Defaults[B, B] instance + */ + implicit def default[B]: DefaultsTo[B, B] = new DefaultsTo[B, B] + } + + /** + * Lower priority defaultsTo implicit helper + */ + trait LowPriorityDefaultsTo { + + /** + * Overrides the default with the set type of A. See [[DefaultsTo]] + * + * @tparam A The type to use + * @tparam B The default type incase type A was missing + * @return Defaults[A, B] instance + */ + implicit def overrideDefault[A, B]: DefaultsTo[A, B] = new DefaultsTo[A, B] + } +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/annotations/BsonIgnore.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/annotations/BsonIgnore.scala new file mode 100644 index 00000000000..f1300a90713 --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/annotations/BsonIgnore.scala @@ -0,0 +1,10 @@ +package org.mongodb.scala.bson.annotations + +import scala.annotation.StaticAnnotation + +/** + * Annotation to ignore a property. + * + * @since 4.2 + */ +case class BsonIgnore() extends StaticAnnotation diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/annotations/BsonProperty.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/annotations/BsonProperty.scala new file mode 100644 index 00000000000..28fe469e5f8 --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/annotations/BsonProperty.scala @@ -0,0 +1,26 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.annotations + +import scala.annotation.StaticAnnotation + +/** + * Annotation to change the stored key of a property + * + * @param key the key for the stored property + */ +case class BsonProperty(key: String) extends StaticAnnotation diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/DocumentCodecProvider.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/DocumentCodecProvider.scala new file mode 100644 index 00000000000..b60032961af --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/DocumentCodecProvider.scala @@ -0,0 +1,42 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.codecs + +import org.bson.codecs.Codec +import org.bson.codecs.configuration.{ CodecProvider, CodecRegistry } +import org.mongodb.scala.bson.collection.{ immutable, mutable } + +/** + * A [[http://api.mongodb.org/java/current/org/bson/codecs/configuration/CodecProvider.html CodecProvider]] for the Document + * class and all the default Codec implementations on which it depends. + */ +case class DocumentCodecProvider() extends CodecProvider { + + val IMMUTABLE: Class[immutable.Document] = classOf[immutable.Document] + val MUTABLE: Class[mutable.Document] = classOf[mutable.Document] + + // scalastyle:off null + @SuppressWarnings(Array("unchecked")) + def get[T](clazz: Class[T], registry: CodecRegistry): Codec[T] = { + clazz match { + case IMMUTABLE => ImmutableDocumentCodec(registry).asInstanceOf[Codec[T]] + case MUTABLE => MutableDocumentCodec(registry).asInstanceOf[Codec[T]] + case _ => null + } + } + // scalastyle:on null +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/ImmutableDocumentCodec.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/ImmutableDocumentCodec.scala new file mode 100644 index 00000000000..a82300b3e0d --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/ImmutableDocumentCodec.scala @@ -0,0 +1,63 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.codecs + +import org.bson.codecs.configuration.CodecRegistry +import org.bson.codecs.{ BsonDocumentCodec, CollectibleCodec, DecoderContext, EncoderContext } +import org.bson.{ BsonReader, BsonValue, BsonWriter } +import org.mongodb.scala.bson.collection.immutable.Document + +/** + * Companion helper for immutable Document instances. + */ +object ImmutableDocumentCodec { + def apply(): ImmutableDocumentCodec = ImmutableDocumentCodec(None) + def apply(registry: CodecRegistry): ImmutableDocumentCodec = ImmutableDocumentCodec(Some(registry)) +} + +/** + * A Codec for immutable Document instances. + * + * As the underlying driver expects documents to be mutable the driver has direct access to the Documents underlying + * mutable `BsonDocument` instance and therefore will mutate the document when adding an `_id` + */ +case class ImmutableDocumentCodec(registry: Option[CodecRegistry]) extends CollectibleCodec[Document] { + + lazy val underlying: BsonDocumentCodec = { + registry.map(new BsonDocumentCodec(_)).getOrElse(new BsonDocumentCodec) + } + + override def generateIdIfAbsentFromDocument(document: Document): Document = { + if (!underlying.documentHasId(document.underlying)) { + Document(underlying.generateIdIfAbsentFromDocument(document.toBsonDocument.clone)) + } else { + document + } + } + + override def documentHasId(document: Document): Boolean = underlying.documentHasId(document.underlying) + + override def getDocumentId(document: Document): BsonValue = underlying.getDocumentId(document.underlying) + + override def encode(writer: BsonWriter, value: Document, encoderContext: EncoderContext): Unit = + underlying.encode(writer, value.underlying, encoderContext) + + override def getEncoderClass: Class[Document] = classOf[Document] + + override def decode(reader: BsonReader, decoderContext: DecoderContext): Document = + Document(underlying.decode(reader, decoderContext)) +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/IterableCodec.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/IterableCodec.scala new file mode 100644 index 00000000000..c6d98d78ba0 --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/IterableCodec.scala @@ -0,0 +1,130 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.codecs + +import java.util.UUID + +import scala.collection.mutable + +import org.bson._ +import org.bson.codecs._ +import org.bson.codecs.configuration.CodecRegistry + +/** + * IterableCodec companion object + * + * @since 1.2 + */ +object IterableCodec { + + def apply(registry: CodecRegistry, bsonTypeClassMap: BsonTypeClassMap): IterableCodec = + apply(registry, bsonTypeClassMap, None) + + def apply( + registry: CodecRegistry, + bsonTypeClassMap: BsonTypeClassMap, + valueTransformer: Option[Transformer] + ): IterableCodec = { + new IterableCodec(registry, bsonTypeClassMap, valueTransformer.getOrElse(DEFAULT_TRANSFORMER)) + } + + private val DEFAULT_TRANSFORMER = new Transformer() { + def transform(objectToTransform: Object): Object = objectToTransform + } +} + +/** + * Encodes and decodes `Iterable` objects. + * + * @since 1.2 + */ +@SuppressWarnings(Array("rawtypes")) +case class IterableCodec(registry: CodecRegistry, bsonTypeClassMap: BsonTypeClassMap, valueTransformer: Transformer) + extends Codec[Iterable[_ <: Any]] { + lazy val bsonTypeCodecMap = new BsonTypeCodecMap(bsonTypeClassMap, registry) + + override def decode(reader: BsonReader, decoderContext: DecoderContext): Iterable[_] = + readValue(reader, decoderContext).asInstanceOf[Iterable[_]] + + override def encode(writer: BsonWriter, value: Iterable[_ <: Any], encoderContext: EncoderContext): Unit = + writeValue(writer, encoderContext, value) + + override def getEncoderClass: Class[Iterable[_]] = classOf[Iterable[_]] + + @SuppressWarnings(Array("unchecked", "rawtypes")) + private def writeValue[T](writer: BsonWriter, encoderContext: EncoderContext, value: T): Unit = { + value match { + case isNull if value == null => writer.writeNull() // scalastyle:ignore + case map: Map[_, _] => + writeMap(writer, map.asInstanceOf[Map[String, Any]], encoderContext.getChildContext) + case list: Iterable[_] => + writeIterable(writer, list, encoderContext.getChildContext) + case _ => + val codec = registry.get(value.getClass).asInstanceOf[Encoder[T]] + encoderContext.encodeWithChildContext(codec, writer, value) + } + } + + private def writeMap(writer: BsonWriter, map: Map[String, Any], encoderContext: EncoderContext): Unit = { + writer.writeStartDocument() + map.foreach(kv => { + writer.writeName(kv._1) + writeValue(writer, encoderContext, kv._2) + }) + writer.writeEndDocument() + } + + private def writeIterable(writer: BsonWriter, list: Iterable[_], encoderContext: EncoderContext): Unit = { + writer.writeStartArray() + list.foreach(value => writeValue(writer, encoderContext, value)) + writer.writeEndArray() + } + + private def readValue(reader: BsonReader, decoderContext: DecoderContext): Any = { + reader.getCurrentBsonType match { + case BsonType.NULL => + reader.readNull() + null // scalastyle:ignore + case BsonType.ARRAY => readList(reader, decoderContext) + case BsonType.DOCUMENT => readMap(reader, decoderContext) + case BsonType.BINARY if BsonBinarySubType.isUuid(reader.peekBinarySubType) && reader.peekBinarySize == 16 => + registry.get(classOf[UUID]).decode(reader, decoderContext) + case bsonType: BsonType => + valueTransformer.transform(bsonTypeCodecMap.get(bsonType).decode(reader, decoderContext)) + } + } + + private def readMap(reader: BsonReader, decoderContext: DecoderContext): Map[String, _] = { + val map = mutable.Map[String, Any]() + reader.readStartDocument() + while (reader.readBsonType ne BsonType.END_OF_DOCUMENT) { + map += (reader.readName -> readValue(reader, decoderContext)) + } + reader.readEndDocument() + map.toMap + } + + private def readList(reader: BsonReader, decoderContext: DecoderContext): List[_] = { + reader.readStartArray() + val list = mutable.ListBuffer[Any]() + while (reader.readBsonType ne BsonType.END_OF_DOCUMENT) { + list.append(readValue(reader, decoderContext)) + } + reader.readEndArray() + list.toList + } +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/IterableCodecProvider.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/IterableCodecProvider.scala new file mode 100644 index 00000000000..eeb0d4ee440 --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/IterableCodecProvider.scala @@ -0,0 +1,74 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.codecs + +import org.bson.Transformer +import org.bson.codecs.Codec +import org.bson.codecs.configuration.{ CodecProvider, CodecRegistry } + +/** + * IterableCodecProvider companion object + * + * @since 1.2 + */ +object IterableCodecProvider { + + /** + * Create a `IterableCodecProvider` with the default `BsonTypeClassMap` and `Transformer`. + * @return the new instance + */ + def apply(): IterableCodecProvider = new IterableCodecProvider(BsonTypeClassMap(), None) + + /** + * Create a `IterableCodecProvider` with the given `BsonTypeClassMap` and the default `Transformer`. + * + * @param bsonTypeClassMap the bson type class map + * @return the new instance + */ + def apply(bsonTypeClassMap: BsonTypeClassMap): IterableCodecProvider = + new IterableCodecProvider(bsonTypeClassMap, None) + + /** + * Create a `IterableCodecProvider` with the default `BsonTypeClassMap` and the given `Transformer`. + * + * @param valueTransformer the value transformer for decoded values + * @return the new instance + */ + def apply(valueTransformer: Transformer): IterableCodecProvider = + new IterableCodecProvider(BsonTypeClassMap(), Option(valueTransformer)) +} + +/** + * A `CodecProvider` for classes than implement the `Iterable` interface. + * + * @param bsonTypeClassMap the non-null `BsonTypeClassMap` with which to construct instances of `DocumentCodec` and `ListCodec`. + * @param valueTransformer the value transformer for decoded values + * + * @since 1.2 + */ +case class IterableCodecProvider(bsonTypeClassMap: BsonTypeClassMap, valueTransformer: Option[Transformer]) + extends CodecProvider { + + @SuppressWarnings(Array("unchecked")) + def get[T](clazz: Class[T], registry: CodecRegistry): Codec[T] = { + if (classOf[Iterable[_]].isAssignableFrom(clazz)) { + IterableCodec(registry, bsonTypeClassMap, valueTransformer).asInstanceOf[Codec[T]] + } else { + null // scalastyle:ignore + } + } +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/Macros.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/Macros.scala new file mode 100644 index 00000000000..f667342c91a --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/Macros.scala @@ -0,0 +1,130 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.codecs + +import scala.annotation.compileTimeOnly +import scala.language.experimental.macros +import scala.language.implicitConversions + +import org.bson.codecs.Codec +import org.bson.codecs.configuration.{ CodecProvider, CodecRegistry } + +import org.mongodb.scala.bson.codecs.macrocodecs.{ CaseClassCodec, CaseClassProvider } + +/** + * Macro based Codecs + * + * Allows the compile time creation of Codecs for case classes. + * + * The recommended approach is to use the implicit [[Macros.createCodecProvider[T](clazz:Class[T])*]] method to help build a codecRegistry: + * ``` + * import org.mongodb.scala.bson.codecs.Macros.createCodecProvider + * import org.bson.codecs.configuration.CodecRegistries.{fromRegistries, fromProviders} + * + * case class Contact(phone: String) + * case class User(_id: Int, username: String, age: Int, hobbies: List[String], contacts: List[Contact]) + * + * val codecRegistry = fromRegistries(fromProviders(classOf[User], classOf[Contact]), MongoClient.DEFAULT_CODEC_REGISTRY) + * ``` + * + * @since 2.0 + */ +object Macros { + + /** + * Creates a CodecProvider for a case class + * + * @tparam T the case class to create a Codec from + * @return the CodecProvider for the case class + */ + @compileTimeOnly("Creating a CodecProvider utilises Macros and must be run at compile time.") + def createCodecProvider[T](): CodecProvider = macro CaseClassProvider.createCodecProviderEncodeNone[T] + + /** + * Creates a CodecProvider for a case class using the given class to represent the case class + * + * @param clazz the clazz that is the case class + * @tparam T the case class to create a Codec from + * @return the CodecProvider for the case class + */ + @compileTimeOnly("Creating a CodecProvider utilises Macros and must be run at compile time.") + implicit def createCodecProvider[T](clazz: Class[T]): CodecProvider = + macro CaseClassProvider.createCodecProviderWithClassEncodeNone[T] + + /** + * Creates a CodecProvider for a case class that ignores any `None` values + * + * @tparam T the case class to create a Codec from + * @return the CodecProvider for the case class + * @since 2.1 + */ + @compileTimeOnly("Creating a CodecProvider utilises Macros and must be run at compile time.") + def createCodecProviderIgnoreNone[T](): CodecProvider = macro CaseClassProvider.createCodecProviderIgnoreNone[T] + + /** + * Creates a CodecProvider for a case class that ignores any `None` values, using the given class to represent the case class + * + * @param clazz the clazz that is the case class + * @tparam T the case class to create a Codec from + * @return the CodecProvider for the case class + * @since 2.1 + */ + @compileTimeOnly("Creating a CodecProvider utilises Macros and must be run at compile time.") + def createCodecProviderIgnoreNone[T](clazz: Class[T]): CodecProvider = + macro CaseClassProvider.createCodecProviderWithClassIgnoreNone[T] + + /** + * Creates a Codec for a case class + * + * @tparam T the case class to create a Codec from + * @return the Codec for the case class + */ + @compileTimeOnly("Creating a Codec utilises Macros and must be run at compile time.") + def createCodec[T](): Codec[T] = macro CaseClassCodec.createCodecBasicCodecRegistryEncodeNone[T] + + /** + * Creates a Codec for a case class + * + * @param codecRegistry the Codec Registry to use + * @tparam T the case class to create a codec from + * @return the Codec for the case class + */ + @compileTimeOnly("Creating a Codec utilises Macros and must be run at compile time.") + def createCodec[T](codecRegistry: CodecRegistry): Codec[T] = macro CaseClassCodec.createCodecEncodeNone[T] + + /** + * Creates a Codec for a case class + * + * @tparam T the case class to create a Codec from + * @return the Codec for the case class + * @since 2.1 + */ + @compileTimeOnly("Creating a Codec utilises Macros and must be run at compile time.") + def createCodecIgnoreNone[T](): Codec[T] = macro CaseClassCodec.createCodecBasicCodecRegistryIgnoreNone[T] + + /** + * Creates a Codec for a case class + * + * @param codecRegistry the Codec Registry to use + * @tparam T the case class to create a codec from + * @return the Codec for the case class + * @since 2.1 + */ + @compileTimeOnly("Creating a Codec utilises Macros and must be run at compile time.") + def createCodecIgnoreNone[T](codecRegistry: CodecRegistry): Codec[T] = macro CaseClassCodec.createCodecIgnoreNone[T] + +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/MutableDocumentCodec.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/MutableDocumentCodec.scala new file mode 100644 index 00000000000..c9a28fd5aeb --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/MutableDocumentCodec.scala @@ -0,0 +1,57 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.codecs + +import org.bson.codecs.configuration.CodecRegistry +import org.bson.codecs.{ BsonDocumentCodec, CollectibleCodec, DecoderContext, EncoderContext } +import org.bson.{ BsonReader, BsonValue, BsonWriter } +import org.mongodb.scala.bson.collection.mutable.Document + +/** + * Companion helper for mutable Document instances. + */ +object MutableDocumentCodec { + def apply(): MutableDocumentCodec = MutableDocumentCodec(None) + def apply(registry: CodecRegistry): MutableDocumentCodec = MutableDocumentCodec(Some(registry)) +} + +/** + * A Codec for mutable Document instances. + */ +case class MutableDocumentCodec(registry: Option[CodecRegistry]) extends CollectibleCodec[Document] { + + lazy val underlying: BsonDocumentCodec = { + registry.map(new BsonDocumentCodec(_)).getOrElse(new BsonDocumentCodec) + } + + override def generateIdIfAbsentFromDocument(document: Document): Document = { + underlying.generateIdIfAbsentFromDocument(document.underlying) + document + } + + override def documentHasId(document: Document): Boolean = underlying.documentHasId(document.underlying) + + override def getDocumentId(document: Document): BsonValue = underlying.getDocumentId(document.underlying) + + override def encode(writer: BsonWriter, value: Document, encoderContext: EncoderContext): Unit = + underlying.encode(writer, value.underlying, encoderContext) + + override def getEncoderClass: Class[Document] = classOf[Document] + + override def decode(reader: BsonReader, decoderContext: DecoderContext): Document = + Document(underlying.decode(reader, decoderContext)) +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/macrocodecs/CaseClassCodec.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/macrocodecs/CaseClassCodec.scala new file mode 100644 index 00000000000..a5e61754f1f --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/macrocodecs/CaseClassCodec.scala @@ -0,0 +1,417 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.codecs.macrocodecs + +import scala.reflect.macros.whitebox +import org.bson.codecs.Codec +import org.bson.codecs.configuration.CodecRegistry +import org.mongodb.scala.bson.annotations.{ BsonIgnore, BsonProperty } + +private[codecs] object CaseClassCodec { + + def createCodecBasicCodecRegistryEncodeNone[T: c.WeakTypeTag](c: whitebox.Context)(): c.Expr[Codec[T]] = { + import c.universe._ + createCodecBasicCodecRegistry[T](c)(c.Expr[Boolean](q"true")) + } + + def createCodecEncodeNone[T: c.WeakTypeTag]( + c: whitebox.Context + )(codecRegistry: c.Expr[CodecRegistry]): c.Expr[Codec[T]] = { + import c.universe._ + createCodec[T](c)(codecRegistry, c.Expr[Boolean](q"true")) + } + + def createCodecBasicCodecRegistryIgnoreNone[T: c.WeakTypeTag](c: whitebox.Context)(): c.Expr[Codec[T]] = { + import c.universe._ + createCodecBasicCodecRegistry[T](c)(c.Expr[Boolean](q"false")) + } + + def createCodecIgnoreNone[T: c.WeakTypeTag]( + c: whitebox.Context + )(codecRegistry: c.Expr[CodecRegistry]): c.Expr[Codec[T]] = { + import c.universe._ + createCodec[T](c)(codecRegistry, c.Expr[Boolean](q"false")) + } + + def createCodecBasicCodecRegistry[T: c.WeakTypeTag]( + c: whitebox.Context + )(encodeNone: c.Expr[Boolean]): c.Expr[Codec[T]] = { + import c.universe._ + createCodec[T](c)( + c.Expr[CodecRegistry]( + q""" + import org.bson.codecs.{ BsonValueCodecProvider, ValueCodecProvider } + import org.bson.codecs.configuration.CodecRegistries.fromProviders + fromProviders( + DocumentCodecProvider(), + IterableCodecProvider(), + new ValueCodecProvider(), + new BsonValueCodecProvider() + ) + """ + ), + encodeNone + ) + } + + // scalastyle:off method.length + def createCodec[T: c.WeakTypeTag]( + c: whitebox.Context + )(codecRegistry: c.Expr[CodecRegistry], encodeNone: c.Expr[Boolean]): c.Expr[Codec[T]] = { + import c.universe._ + + // Declared types + val mainType = weakTypeOf[T] + + val stringType = typeOf[String] + val mapTypeSymbol = typeOf[collection.Map[_, _]].typeSymbol + + // Names + val classTypeName = mainType.typeSymbol.name.toTypeName + val codecName = TypeName(s"${classTypeName}MacroCodec") + + // Type checkers + def isCaseClass(t: Type): Boolean = { + // https://github.com/scala/bug/issues/7755 + val _ = t.typeSymbol.typeSignature + t.typeSymbol.isClass && t.typeSymbol.asClass.isCaseClass && !t.typeSymbol.isModuleClass + } + + def isCaseObject(t: Type): Boolean = t.typeSymbol.isModuleClass && t.typeSymbol.asClass.isCaseClass + def isMap(t: Type): Boolean = t.baseClasses.contains(mapTypeSymbol) + def isOption(t: Type): Boolean = t.typeSymbol == definitions.OptionClass + def isTuple(t: Type): Boolean = definitions.TupleClass.seq.contains(t.typeSymbol) + def isSealed(t: Type): Boolean = t.typeSymbol.isClass && t.typeSymbol.asClass.isSealed + def isAbstractSealed(t: Type): Boolean = isSealed(t) && t.typeSymbol.isAbstract + + def allSubclasses(s: Symbol): Set[Symbol] = { + val directSubClasses = s.asClass.knownDirectSubclasses + directSubClasses ++ directSubClasses.flatMap({ s: Symbol => + allSubclasses(s) + }) + } + val subClasses: List[Type] = + allSubclasses(mainType.typeSymbol).map(_.asClass.toType).filter(t => isCaseClass(t) || isCaseObject(t)).toList + if (isSealed(mainType) && subClasses.isEmpty) { + c.abort( + c.enclosingPosition, + s"No known subclasses of the sealed ${if (mainType.typeSymbol.asClass.isTrait) "trait" else "class"}" + ) + } + val knownTypes: List[Type] = (mainType +: subClasses).filterNot(_.typeSymbol.isAbstract).reverse + + def createTerms(t: Type): List[TermSymbol] = { + if (!isAbstractSealed(t)) { + val constructor = t.decl(termNames.CONSTRUCTOR) + if (!constructor.isMethod) c.abort(c.enclosingPosition, "No constructor, unsupported class type") + constructor.asMethod.paramLists match { + case h :: _ => h.map(_.asTerm) + case _ => List.empty + } + } else { + List.empty + } + } + + val terms = knownTypes.flatMap(t => createTerms(t)) + + val fields: Map[Type, List[(TermName, Type)]] = { + knownTypes + .map(t => + ( + t, + t.members.sorted + .filter(_.isMethod) + .map(_.asMethod) + .filter(m => m.isGetter && m.isParamAccessor) + .map(m => (m.name, m.returnType.asSeenFrom(t, t.typeSymbol))) + ) + ) + .toMap + } + + val classAnnotatedFieldsMap: Map[TermName, Constant] = { + terms + .flatMap(t => { + t.annotations + .find(a => a.tree.tpe eq typeOf[BsonProperty]) + .flatMap(_.tree.children.lastOption) + .map(tree => { + t.name -> tree.productElement(0).asInstanceOf[Constant] + }) + }) + .toMap + } + + val ignoredFields: Map[Type, Seq[(TermName, Tree)]] = { + knownTypes.map { tpe => + if (!isCaseClass(tpe)) { + (tpe, Nil) + } else { + val constructor = tpe.decl(termNames.CONSTRUCTOR) + if (!constructor.isMethod) c.abort(c.enclosingPosition, "No constructor, unsupported class type") + + val defaults = constructor.asMethod.paramLists.head + .map(_.asTerm) + .zipWithIndex + .filter(_._1.annotations.exists(_.tree.tpe == typeOf[BsonIgnore])) + .map { + case (p, i) => + if (p.isParamWithDefault) { + val getterName = TermName("apply$default$" + (i + 1)) + p.name -> q"${tpe.typeSymbol.companion}.$getterName" + } else { + c.abort( + c.enclosingPosition, + s"Field [${p.name}] with BsonIgnore annotation must have a default value" + ) + } + } + + tpe -> defaults + } + }.toMap + } + + // Data converters + def keyName(t: Type): Literal = Literal(Constant(t.typeSymbol.name.decodedName.toString)) + def keyNameTerm(t: TermName): Literal = Literal(classAnnotatedFieldsMap.getOrElse(t, Constant(t.toString))) + + // Primitives type map + val primitiveTypesMap: Map[Type, Type] = Map( + typeOf[Boolean] -> typeOf[java.lang.Boolean], + typeOf[Byte] -> typeOf[java.lang.Byte], + typeOf[Char] -> typeOf[java.lang.Character], + typeOf[Double] -> typeOf[java.lang.Double], + typeOf[Float] -> typeOf[java.lang.Float], + typeOf[Int] -> typeOf[java.lang.Integer], + typeOf[Long] -> typeOf[java.lang.Long], + typeOf[Short] -> typeOf[java.lang.Short] + ) + + /* + * Flattens the type args for any given type. + * + * Removes the key field from Maps as they have to be strings. + * Removes Option type as the Option value is wrapped automatically below. + * Throws if the case class contains a Tuple + * + * @param at the type to flatten the arguments for + * @return a list of the type arguments for the type + */ + def flattenTypeArgs(at: Type): List[c.universe.Type] = { + val t = at.dealias + val typeArgs = t.typeArgs match { + case head :: _ if isMap(t) && !(head.erasure =:= stringType) => + c.abort(c.enclosingPosition, "Maps must contain string types for keys") + case _ :: tail if isMap(t) /* head.erasure =:= stringType */ => tail + case args => args + } + val types = t +: typeArgs.flatMap(x => flattenTypeArgs(x)) + if (types.exists(isTuple)) c.abort(c.enclosingPosition, "Tuples currently aren't supported in case classes") + types.filterNot(isOption).map(x => if (isCaseClass(x)) x else primitiveTypesMap.getOrElse(x.erasure, x)) + } + + /* + * Maps the given field names to type args for the values in the field + * + * ``` + * addresses: Seq[Address] => (addresses, List[classOf[Seq], classOf[Address]]) + * nestedAddresses: Seq[Seq[Address]] => (addresses, List[classOf[Seq], classOf[Seq], classOf[Address]]) + * ``` + * + * @return a map of the field names with a list of the contain types + */ + def createFieldTypeArgsMap(fields: List[(TermName, Type)]) = { + val setTypeArgs = fields.map({ + case (name, f) => + val key = keyNameTerm(name) + q""" + typeArgs += ($key -> { + val tpeArgs = mutable.ListBuffer.empty[Class[_]] + ..${flattenTypeArgs(f).map(t => + q"tpeArgs += classOf[${if (isCaseClass(t)) t.finalResultType else t.finalResultType.erasure}]" + )} + tpeArgs.toList + })""" + }) + + q""" + val typeArgs = mutable.Map[String, List[Class[_]]]() + ..$setTypeArgs + typeArgs.toMap + """ + } + + /* + * For each case class sets the Map of the given field names and their field types. + */ + def createClassFieldTypeArgsMap = { + val setClassFieldTypeArgs = fields.map(field => q""" + classFieldTypeArgs += (${keyName(field._1)} -> ${createFieldTypeArgsMap(field._2)}) + """) + + q""" + val classFieldTypeArgs = mutable.Map[String, Map[String, List[Class[_]]]]() + ..$setClassFieldTypeArgs + classFieldTypeArgs.toMap + """ + } + + /* + * Creates a `Map[String, Class[_]]` mapping the case class name and the type. + * + * @return the case classes map + */ + def caseClassesMap = { + val setSubClasses = + knownTypes.map(t => q"caseClassesMap += (${keyName(t)} -> classOf[${t.finalResultType.erasure}])") + q""" + val caseClassesMap = mutable.Map[String, Class[_]]() + ..$setSubClasses + caseClassesMap.toMap + """ + } + + /* + * Creates a `Map[Class[_], Boolean]` mapping field types to a boolean representing if they are a case class. + * + * @return the class to case classes map + */ + def classToCaseClassMap = { + val flattenedFieldTypes = fields.flatMap({ case (t, types) => types.map(f => f._2) :+ t }) + val setClassToCaseClassMap = flattenedFieldTypes.map(t => + q"""classToCaseClassMap ++= ${flattenTypeArgs(t).map(t => + q"(classOf[${t.finalResultType.erasure}], ${isCaseClass(t) || isCaseObject(t) || isSealed(t)})" + )}""" + ) + + q""" + val classToCaseClassMap = mutable.Map[Class[_], Boolean]() + ..$setClassToCaseClassMap + classToCaseClassMap.toMap + """ + } + + /* + * Handles the writing of case class fields. + * + * @param fields the list of fields + * @return the tree that writes the case class fields + */ + def writeClassValues(fields: List[(TermName, Type)], ignoredFields: Seq[(TermName, Tree)]): List[Tree] = { + fields + .filterNot { case (name, _) => ignoredFields.exists { case (iname, _) => name == iname } } + .map({ + case (name, f) => + val key = keyNameTerm(name) + f match { + case optional if isOption(optional) => q""" + val localVal = instanceValue.$name + if (localVal.isDefined) { + writer.writeName($key) + this.writeFieldValue($key, writer, localVal.get, encoderContext) + } else if ($encodeNone) { + writer.writeName($key) + this.writeFieldValue($key, writer, this.bsonNull, encoderContext) + }""" + case _ => q""" + val localVal = instanceValue.$name + writer.writeName($key) + this.writeFieldValue($key, writer, localVal, encoderContext) + """ + } + }) + } + + /* + * Writes the Case Class fields and values to the BsonWriter + */ + def writeValue: Tree = { + val cases: Seq[Tree] = { + fields.map { + case (classType, _) if isCaseObject(classType) => cq""" ${keyName(classType)} =>""" + case (classType, fields) => + cq""" ${keyName(classType)} => + val instanceValue = value.asInstanceOf[${classType}] + ..${writeClassValues(fields, ignoredFields(classType))}""" + }.toSeq + } :+ cq"""_ => throw new BsonInvalidOperationException("Unexpected class type: " + className)""" + q""" + writer.writeStartDocument() + this.writeClassFieldName(writer, className, encoderContext) + className match { case ..$cases } + writer.writeEndDocument() + """ + } + + def fieldSetters(fields: List[(TermName, Type)], ignoredFields: Seq[(TermName, Tree)]) = { + fields.map({ + case (name, f) => + val key = keyNameTerm(name) + val missingField = Literal(Constant(s"Missing field: $key")) + + ignoredFields.find { case (iname, _) => name == iname }.map(_._2) match { + case Some(default) => + q"$name = $default" + case None => + f match { + case optional if isOption(optional) => + q"$name = (if (fieldData.contains($key)) Option(fieldData($key)) else None).asInstanceOf[$f]" + case _ => + q"""$name = fieldData.getOrElse($key, throw new BsonInvalidOperationException($missingField)).asInstanceOf[$f]""" + } + } + }) + } + + def getInstance = { + val cases = knownTypes.map { st => + if (isCaseObject(st)) { + val instance = st.typeSymbol.asClass.module + cq"${keyName(st)} => $instance" + } else { + cq"${keyName(st)} => new $st(..${fieldSetters(fields(st), ignoredFields(st))})" + } + } :+ cq"""_ => throw new BsonInvalidOperationException("Unexpected class type: " + className)""" + q"className match { case ..$cases }" + } + + c.Expr[Codec[T]]( + q""" + import scala.collection.mutable + import org.bson.{ BsonInvalidOperationException, BsonWriter } + import org.bson.codecs.EncoderContext + import org.bson.codecs.configuration.CodecRegistry + import org.mongodb.scala.bson.codecs.macrocodecs.MacroCodec + + final case class $codecName(codecRegistry: CodecRegistry) extends { + val encoderClass = classOf[$classTypeName] + } with MacroCodec[$classTypeName] { + val caseClassesMap = $caseClassesMap + val classToCaseClassMap = $classToCaseClassMap + val classFieldTypeArgsMap = $createClassFieldTypeArgsMap + def getInstance(className: String, fieldData: Map[String, Any]) = $getInstance + def writeCaseClassData(className: String, writer: BsonWriter, value: $mainType, encoderContext: EncoderContext) = $writeValue + } + + ${codecName.toTermName}($codecRegistry).asInstanceOf[Codec[$mainType]] + """ + ) + } + // scalastyle:on method.length +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/macrocodecs/CaseClassProvider.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/macrocodecs/CaseClassProvider.scala new file mode 100644 index 00000000000..1eac1f3afd3 --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/macrocodecs/CaseClassProvider.scala @@ -0,0 +1,77 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.codecs.macrocodecs + +import scala.reflect.macros.whitebox + +import org.bson.codecs.configuration.{ CodecProvider, CodecRegistry } + +private[codecs] object CaseClassProvider { + + def createCodecProviderEncodeNone[T: c.WeakTypeTag](c: whitebox.Context)(): c.Expr[CodecProvider] = { + import c.universe._ + createCodecProvider[T](c)(c.Expr[Boolean](q"true")) + } + + def createCodecProviderWithClassEncodeNone[T: c.WeakTypeTag]( + c: whitebox.Context + )(clazz: c.Expr[Class[T]]): c.Expr[CodecProvider] = { + import c.universe._ + createCodecProvider[T](c)(c.Expr[Boolean](q"true")) + } + + def createCodecProviderWithClassIgnoreNone[T: c.WeakTypeTag]( + c: whitebox.Context + )(clazz: c.Expr[Class[T]]): c.Expr[CodecProvider] = { + import c.universe._ + createCodecProvider[T](c)(c.Expr[Boolean](q"false")) + } + + def createCodecProviderIgnoreNone[T: c.WeakTypeTag](c: whitebox.Context)(): c.Expr[CodecProvider] = { + import c.universe._ + createCodecProvider[T](c)(c.Expr[Boolean](q"false")) + } + + def createCodecProvider[T: c.WeakTypeTag](c: whitebox.Context)(encodeNone: c.Expr[Boolean]): c.Expr[CodecProvider] = { + import c.universe._ + + // Declared type + val mainType = weakTypeOf[T] + + // Names + def exprCodecRegistry = c.Expr[CodecRegistry](q"codecRegistry") + def codec = CaseClassCodec.createCodec[T](c)(exprCodecRegistry, encodeNone) + + c.Expr[CodecProvider]( + q""" + import org.bson.codecs.Codec + import org.bson.codecs.configuration.{ CodecProvider, CodecRegistry } + + new CodecProvider { + @SuppressWarnings(Array("unchecked")) + def get[C](clazz: Class[C], codecRegistry: CodecRegistry): Codec[C] = { + if (classOf[$mainType].isAssignableFrom(clazz)) { + $codec.asInstanceOf[Codec[C]] + } else { + null + } + } + } + """ + ) + } +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/macrocodecs/MacroCodec.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/macrocodecs/MacroCodec.scala new file mode 100644 index 00000000000..e284647af87 --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/macrocodecs/MacroCodec.scala @@ -0,0 +1,254 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.codecs.macrocodecs + +import scala.collection.JavaConverters._ +import scala.collection.mutable + +import org.bson._ +import org.bson.codecs.configuration.{ CodecRegistries, CodecRegistry } +import org.bson.codecs.{ Codec, DecoderContext, Encoder, EncoderContext } +import scala.collection.immutable.Vector + +import org.mongodb.scala.bson.BsonNull + +/** + * + * @tparam T the case class type for the codec + * @since 2.0 + */ +trait MacroCodec[T] extends Codec[T] { + + /** + * Creates a `Map[String, Class[_]]` mapping the case class name and the type. + */ + val caseClassesMap: Map[String, Class[_]] + + /** + * Creates a `Map[Class[_], Boolean]` mapping field types to a boolean representing if they are a case class. + */ + val classToCaseClassMap: Map[Class[_], Boolean] + + /** + * A nested map of case class name to a Map of the given field names and a list of the field types. + */ + val classFieldTypeArgsMap: Map[String, Map[String, List[Class[_]]]] + + /** + * The case class type for the codec. + */ + val encoderClass: Class[T] + + /** + * The `CodecRegistry` for use with the codec. + */ + val codecRegistry: CodecRegistry + + /** + * Creates a new instance of the case class with the provided data + * + * @param className the name of the class to be instantiated + * @param fieldsData the Map of data for the class + * @return the new instance of the class + */ + def getInstance(className: String, fieldsData: Map[String, Any]): T + + /** + * The method that writes the data for the case class + * + * @param className the name of the current case class being written + * @param writer the `BsonWriter` + * @param value the value to the case class + * @param encoderContext the `EncoderContext` + */ + def writeCaseClassData(className: String, writer: BsonWriter, value: T, encoderContext: EncoderContext): Unit + + /** + * The field used to save the class name when saving sealed case classes. + */ + val classFieldName = "_t" + lazy val hasClassFieldName: Boolean = caseClassesMapInv.keySet != Set(encoderClass) + lazy val caseClassesMapInv: Map[Class[_], String] = caseClassesMap.map(_.swap) + protected val registry: CodecRegistry = + CodecRegistries.fromRegistries(List(codecRegistry, CodecRegistries.fromCodecs(this)).asJava) + protected val bsonNull = BsonNull() + + override def encode(writer: BsonWriter, value: T, encoderContext: EncoderContext): Unit = { + if (value == null) { // scalastyle:ignore + throw new BsonInvalidOperationException(s"Invalid value for $encoderClass found a `null` value.") + } + writeValue(writer, value, encoderContext) + } + + override def decode(reader: BsonReader, decoderContext: DecoderContext): T = { + val className = getClassName(reader, decoderContext) + val fieldTypeArgsMap = classFieldTypeArgsMap(className) + val map = mutable.Map[String, Any]() + reader.readStartDocument() + while (reader.readBsonType ne BsonType.END_OF_DOCUMENT) { + val name = reader.readName + val typeArgs = if (name == classFieldName) List(classOf[String]) else fieldTypeArgsMap.getOrElse(name, List.empty) + if (typeArgs.isEmpty) { + reader.skipValue() + } else { + map += (name -> readValue(reader, decoderContext, typeArgs.head, typeArgs.tail)) + } + } + reader.readEndDocument() + getInstance(className, map.toMap) + } + + override def getEncoderClass: Class[T] = encoderClass + + protected def getClassName(reader: BsonReader, decoderContext: DecoderContext): String = { + if (hasClassFieldName) { + // Find the class name + @scala.annotation.tailrec + def readOptionalClassName(): Option[String] = { + if (reader.readBsonType == BsonType.END_OF_DOCUMENT) { + None + } else if (reader.readName == classFieldName) { + Some(codecRegistry.get(classOf[String]).decode(reader, decoderContext)) + } else { + reader.skipValue() + readOptionalClassName() + } + } + + val mark: BsonReaderMark = reader.getMark() + reader.readStartDocument() + val optionalClassName: Option[String] = readOptionalClassName() + mark.reset() + + val className = optionalClassName.getOrElse { + throw new BsonInvalidOperationException(s"Could not decode sealed case class. Missing '$classFieldName' field.") + } + + if (!caseClassesMap.contains(className)) { + throw new BsonInvalidOperationException(s"Could not decode sealed case class, unknown class $className.") + } + className + } else { + caseClassesMap.head._1 + } + } + + protected def writeClassFieldName(writer: BsonWriter, className: String, encoderContext: EncoderContext): Unit = { + if (hasClassFieldName) { + writer.writeName(classFieldName) + this.writeValue(writer, className, encoderContext) + } + } + + protected def writeFieldValue[V]( + fieldName: String, + writer: BsonWriter, + value: V, + encoderContext: EncoderContext + ): Unit = { + if (value == null) { // scalastyle:ignore + throw new BsonInvalidOperationException(s"Invalid value for $fieldName found a `null` value.") + } + writeValue(writer, value, encoderContext) + } + + protected def writeValue[V](writer: BsonWriter, value: V, encoderContext: EncoderContext): Unit = { + val clazz = value.getClass + caseClassesMapInv.get(clazz) match { + case Some(className) => + writeCaseClassData(className: String, writer: BsonWriter, value.asInstanceOf[T], encoderContext: EncoderContext) + case None => + val codec = registry.get(clazz).asInstanceOf[Encoder[V]] + encoderContext.encodeWithChildContext(codec, writer, value) + } + } + + protected def readValue[V]( + reader: BsonReader, + decoderContext: DecoderContext, + clazz: Class[V], + typeArgs: List[Class[_]] + ): V = { + val currentType = reader.getCurrentBsonType + currentType match { + case BsonType.DOCUMENT => readDocument(reader, decoderContext, clazz, typeArgs) + case BsonType.ARRAY => readArray(reader, decoderContext, clazz, typeArgs) + case BsonType.NULL => + reader.readNull() + null.asInstanceOf[V] // scalastyle:ignore + case _ => registry.get(clazz).decode(reader, decoderContext) + } + } + + protected def readArray[V]( + reader: BsonReader, + decoderContext: DecoderContext, + clazz: Class[V], + typeArgs: List[Class[_]] + ): V = { + + if (typeArgs.isEmpty) { + throw new BsonInvalidOperationException( + s"Invalid Bson format for '${clazz.getSimpleName}'. Found a list but there is no type data." + ) + } + reader.readStartArray() + val list = mutable.ListBuffer[Any]() + while (reader.readBsonType ne BsonType.END_OF_DOCUMENT) { + list.append(readValue(reader, decoderContext, typeArgs.head, typeArgs.tail)) + } + reader.readEndArray() + if (classOf[Set[_]].isAssignableFrom(clazz)) { + list.toSet.asInstanceOf[V] + } else if (classOf[Vector[_]].isAssignableFrom(clazz)) { + list.toVector.asInstanceOf[V] + } else if (classOf[Stream[_]].isAssignableFrom(clazz)) { + list.toStream.asInstanceOf[V] + } else { + list.toList.asInstanceOf[V] + } + } + + protected def readDocument[V]( + reader: BsonReader, + decoderContext: DecoderContext, + clazz: Class[V], + typeArgs: List[Class[_]] + ): V = { + if (classToCaseClassMap.getOrElse(clazz, false) || typeArgs.isEmpty) { + registry.get(clazz).decode(reader, decoderContext) + } else { + val map = mutable.Map[String, Any]() + reader.readStartDocument() + while (reader.readBsonType ne BsonType.END_OF_DOCUMENT) { + val name = reader.readName + if (typeArgs.isEmpty) { + reader.skipValue() + } else { + map += (name -> readValue( + reader, + decoderContext, + typeArgs.head, + typeArgs.tail + )) + } + } + reader.readEndDocument() + map.toMap.asInstanceOf[V] + } + } +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/package.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/package.scala new file mode 100644 index 00000000000..0ce606ad1cb --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/codecs/package.scala @@ -0,0 +1,38 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson + +package object codecs { + + /** + * Type alias to the `BsonTypeClassMap` + */ + type BsonTypeClassMap = org.bson.codecs.BsonTypeClassMap + + /** + * Companion to return the default `BsonTypeClassMap` + */ + object BsonTypeClassMap { + def apply(): BsonTypeClassMap = new BsonTypeClassMap() + } + + /** + * Type alias to the `BsonTypeCodecMap` + */ + type BsonTypeCodecMap = org.bson.codecs.BsonTypeCodecMap + +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/collection/BaseDocument.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/collection/BaseDocument.scala new file mode 100644 index 00000000000..69d25ca69d9 --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/collection/BaseDocument.scala @@ -0,0 +1,257 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.collection + +import scala.collection.JavaConverters._ +import scala.collection.{ GenTraversableOnce, Traversable } +import scala.reflect.ClassTag +import scala.util.{ Failure, Success, Try } + +import org.bson.json.JsonWriterSettings + +import org.mongodb.scala.bson.DefaultHelper._ +import org.mongodb.scala.bson._ +import org.bson.codecs.configuration.CodecRegistry +import org.mongodb.scala.bson.conversions.Bson + +import org.mongodb.scala.bson.BsonMagnets + +/** + * Base Document trait. + * + * A strictly typed `Traversable[(String, BsonValue)]` and provides the underlying immutable document behaviour. + * See [[immutable.Document]] or [[mutable.Document]] for the concrete implementations. + * + * @tparam T The concrete Document implementation + */ +private[bson] trait BaseDocument[T] extends Traversable[(String, BsonValue)] with Bson { + + import BsonMagnets._ + + /** + * The underlying bson document + * + * Restricted access to the underlying BsonDocument + */ + protected[scala] val underlying: BsonDocument + + /** + * Create a concrete document instance + * + * @param underlying the underlying BsonDocument + * @return a concrete document instance + */ + protected[scala] def apply(underlying: BsonDocument): T + + /** + * Retrieves the value which is associated with the given key or throws a `NoSuchElementException`. + * + * @param key the key + * @return the value associated with the given key, or throws `NoSuchElementException`. + */ + def apply[TResult <: BsonValue]( + key: String + )(implicit e: TResult DefaultsTo BsonValue, ct: ClassTag[TResult]): TResult = { + get[TResult](key) match { + case Some(value) => value + case None => throw new NoSuchElementException("key not found: " + key) + } + } + + /** + * Returns the value associated with a key, or a default value if the key is not contained in the map. + * @param key the key. + * @param default The default value in case no binding for `key` is found in the Document. + * This can be any [[BsonValue]] type or any native type that has an implicit [[BsonTransformer]] in scope. + * @tparam B the result type of the default computation. + * @return the value associated with `key` if it exists, + * otherwise the result of the `default` computation. + */ + def getOrElse[B >: BsonValue](key: String, default: CanBeBsonValue): B = get(key) match { + case Some(v) => v + case None => default.value + } + + // scalastyle:off spaces.after.plus method.name + /** + * Creates a new document containing a new key/value and all the existing key/values. + * + * Mapping `kv` will override existing mappings from this document with the same key. + * + * @param elems the key/value mapping to be added. This can be any valid `(String, BsonValue)` pair that can be transformed into a + * [[BsonElement]] via [[BsonMagnets.CanBeBsonElement]] implicits and any [[BsonTransformer]]s that are in scope. + * @return a new document containing mappings of this document and the mapping `kv`. + */ + def +(elems: CanBeBsonElement*): T = { + val bsonDocument: BsonDocument = copyBsonDocument() + elems.foreach(elem => bsonDocument.put(elem.key, elem.value)) + apply(bsonDocument) + } + // scalastyle:on spaces.after.plus + + /** + * Removes one or more elements to this document and returns a new document. + * + * @param elems the remaining elements to remove. + * @return A new document with the keys removed. + */ + def -(elems: String*): T = --(elems) + + /** + * Removes a number of elements provided by a traversable object and returns a new document without the removed elements. + * + * @param xs the traversable object consisting of key-value pairs. + * @return a new document with the bindings of this document and those from `xs`. + */ + def --(xs: GenTraversableOnce[String]): T = { + val keysToIgnore = xs.toList + val newUnderlying = new BsonDocument() + for ((k, v) <- iterator if !keysToIgnore.contains(k)) { + newUnderlying.put(k, v) + } + apply(newUnderlying) + } + // scalastyle:on method.name + + /** + * Creates a new Document consisting of all key/value pairs of the current document + * plus a new pair of a given key and value. + * + * @param key The key to add + * @param value The new value + * @return A fresh immutable document with the binding from `key` to `value` added to the new document. + */ + def updated[B](key: String, value: B)(implicit transformer: BsonTransformer[B]): T = this + ((key, value)) + + /** + * Creates a new Document consisting of all key/value pairs of the current document + * plus a new pair of a given key and value. + * + * @param elems The key/values to add. This can be any valid `(String, BsonValue)` pair that can be transformed into a + * [[BsonElement]] via [[BsonMagnets.CanBeBsonElement]] implicits and any [[BsonTransformer]]s that are in scope. + * @return A fresh immutable document with the binding from `key` to `value` added to the new document. + */ + def updated(elems: CanBeBsonElement*): T = this + (elems: _*) + + /** + * Optionally returns the value associated with a key. + * + * @param key the key we want to lookup + * @return an option value containing the value associated with `key` in this document, + * or `None` if none exists. + */ + def get[TResult <: BsonValue]( + key: String + )(implicit e: TResult DefaultsTo BsonValue, ct: ClassTag[TResult]): Option[TResult] = { + underlying.containsKey(key) match { + case true => + Try(ct.runtimeClass.cast(underlying.get(key))) match { + case Success(v) => Some(v.asInstanceOf[TResult]) + case Failure(ex) => None + } + case false => None + } + } + + /** + * Creates a new iterator over all key/value pairs in this document + * + * @return the new iterator + */ + def iterator: Iterator[(String, BsonValue)] = underlying.asScala.iterator + + /** + * Filters this document by retaining only keys satisfying a predicate. + * @param p the predicate used to test keys + * @return a new document consisting only of those key value pairs of this map where the key satisfies + * the predicate `p`. + */ + def filterKeys(p: String => Boolean): T = this -- keys.filterNot(p) + + /** + * Tests whether this map contains a binding for a key + * + * @param key the key + * @return true if there is a binding for key in this document, false otherwise. + */ + def contains(key: String): Boolean = underlying.containsKey(key) + + /** + * Collects all keys of this document in a set. + * + * @return a set containing all keys of this document. + */ + def keySet: Set[String] = underlying.keySet().asScala.toSet + + /** + * Collects all keys of this document in an iterable collection. + * + * @return the keys of this document as an iterable. + */ + def keys: Iterable[String] = keySet.toIterable + + /** + * Creates an iterator for all keys. + * + * @return an iterator over all keys. + */ + def keysIterator: Iterator[String] = keySet.toIterator + + /** + * Collects all values of this document in an iterable collection. + * + * @return the values of this document as an iterable. + */ + def values: Iterable[BsonValue] = underlying.values().asScala + + /** + * Creates an iterator for all values in this document. + * + * @return an iterator over all values that are associated with some key in this document. + */ + def valuesIterator: Iterator[BsonValue] = values.toIterator + + /** + * Gets a JSON representation of this document + * + * @return a JSON representation of this document + */ + def toJson(): String = underlying.toJson + + /** + * Gets a JSON representation of this document using the given `JsonWriterSettings`. + * @param settings the JSON writer settings + * @return a JSON representation of this document + */ + def toJson(settings: JsonWriterSettings): String = underlying.toJson(settings) + + override def toBsonDocument: BsonDocument = underlying + + override def toBsonDocument[TDocument](documentClass: Class[TDocument], codecRegistry: CodecRegistry): BsonDocument = + underlying + + /** + * Copies the BsonDocument + * @return the copied BsonDocument + */ + private[collection] def copyBsonDocument(): BsonDocument = { + val bsonDocument = BsonDocument() + for (entry <- underlying.entrySet().asScala) bsonDocument.put(entry.getKey, entry.getValue) + bsonDocument + } + +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/collection/package.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/collection/package.scala new file mode 100644 index 00000000000..7ea56e96b0c --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/collection/package.scala @@ -0,0 +1,40 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson + +/** + * The collection package. + */ +package object collection { + + /** + * An immutable Document implementation. + * + * A strictly typed `Map[String, BsonValue]` like structure that traverses the elements in insertion order. Unlike native scala maps there + * is no variance in the value type and it always has to be a `BsonValue`. + */ + type Document = immutable.Document + + /** + * An immutable Document implementation. + * + * A strictly typed `Map[String, BsonValue]` like structure that traverses the elements in insertion order. Unlike native scala maps there + * is no variance in the value type and it always has to be a `BsonValue`. + */ + val Document = immutable.Document + +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/conversions/package.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/conversions/package.scala new file mode 100644 index 00000000000..977fbfd89fb --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/conversions/package.scala @@ -0,0 +1,28 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson + +/** + * The conversions package. + */ +package object conversions { + + /** + * Type alias to the Bson interface - an interface for types that are able to render themselves into a `BsonDocument`. + */ + type Bson = org.bson.conversions.Bson +} diff --git a/bson-scala/src/main/scala/org/mongodb/scala/bson/package.scala b/bson-scala/src/main/scala/org/mongodb/scala/bson/package.scala new file mode 100644 index 00000000000..1da0979f1fd --- /dev/null +++ b/bson-scala/src/main/scala/org/mongodb/scala/bson/package.scala @@ -0,0 +1,178 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala + +/** + * The bson package, contains mirrors and companion objects for `Bson` values. + */ +package object bson { + + /** + * An immutable Document implementation. + * + * A strictly typed `Map[String, BsonValue]` like structure that traverses the elements in insertion order. Unlike native scala maps there + * is no variance in the value type and it always has to be a `BsonValue`. + */ + type Document = collection.Document + + /** + * An immutable Document implementation. + * + * A strictly typed `Map[String, BsonValue]` like structure that traverses the elements in insertion order. Unlike native scala maps there + * is no variance in the value type and it always has to be a `BsonValue`. + */ + val Document = collection.Document + + /** + * Alias to `org.bson.BsonArray` + */ + type BsonArray = org.bson.BsonArray + + /** + * Alias to `org.bson.BsonBinary` + */ + type BsonBinary = org.bson.BsonBinary + + /** + * Alias to `org.bson.BsonBoolean` + */ + type BsonBoolean = org.bson.BsonBoolean + + /** + * Alias to `org.bson.BsonDateTime` + */ + type BsonDateTime = org.bson.BsonDateTime + + /** + * Alias to `org.bson.BsonDecimal128` + * @since 1.2 + */ + type BsonDecimal128 = org.bson.BsonDecimal128 + + /** + * Alias to `org.bson.BsonDocument` + */ + type BsonDocument = org.bson.BsonDocument + + /** + * Alias to `org.bson.BsonDouble` + */ + type BsonDouble = org.bson.BsonDouble + + /** + * Alias to `org.bson.BsonInt32` + */ + type BsonInt32 = org.bson.BsonInt32 + + /** + * Alias to `org.bson.BsonInt64` + */ + type BsonInt64 = org.bson.BsonInt64 + + /** + * Alias to `org.bson.BsonJavaScript` + */ + type BsonJavaScript = org.bson.BsonJavaScript + + /** + * Alias to `org.bson.BsonJavaScriptWithScope` + */ + type BsonJavaScriptWithScope = org.bson.BsonJavaScriptWithScope + + /** + * Alias to `org.bson.BsonMaxKey` + */ + type BsonMaxKey = org.bson.BsonMaxKey + + /** + * Alias to `org.bson.BsonMinKey` + */ + type BsonMinKey = org.bson.BsonMinKey + + /** + * Alias to `org.bson.BsonNull` + */ + type BsonNull = org.bson.BsonNull + + /** + * Alias to `org.bson.BsonNumber` + */ + type BsonNumber = org.bson.BsonNumber + + /** + * Alias to `org.bson.BsonObjectId` + */ + type BsonObjectId = org.bson.BsonObjectId + + /** + * Alias to `org.bson.BsonRegularExpression` + */ + type BsonRegularExpression = org.bson.BsonRegularExpression + + /** + * Alias to `org.bson.BsonString` + */ + type BsonString = org.bson.BsonString + + /** + * Alias to `org.bson.BsonSymbol` + */ + type BsonSymbol = org.bson.BsonSymbol + + /** + * Alias to `org.bson.BsonTimestamp` + */ + type BsonTimestamp = org.bson.BsonTimestamp + + /** + * Alias to `org.bson.BsonUndefined` + */ + type BsonUndefined = org.bson.BsonUndefined + + /** + * Alias to `org.bson.BsonValue` + */ + type BsonValue = org.bson.BsonValue + + /** + * Alias to `org.bson.BsonElement` + */ + type BsonElement = org.bson.BsonElement + + /** + * Alias to `org.bson.ObjectId` + * @since 1.2 + */ + type ObjectId = org.bson.types.ObjectId + + /** + * Alias to `org.bson.Decimal128` + * @since 1.2 + */ + type Decimal128 = org.bson.types.Decimal128 + + /** + * Implicit value class for a [[BsonElement]] allowing easy access to the key/value pair + * + * @param self the bsonElement + */ + implicit class RichBsonElement(val self: BsonElement) extends AnyVal { + def key: String = self.getName + def value: BsonValue = self.getValue + } + +} diff --git a/bson-scala/src/test/scala/org/mongodb/scala/bson/BaseSpec.scala b/bson-scala/src/test/scala/org/mongodb/scala/bson/BaseSpec.scala new file mode 100644 index 00000000000..4ac18c5b31c --- /dev/null +++ b/bson-scala/src/test/scala/org/mongodb/scala/bson/BaseSpec.scala @@ -0,0 +1,21 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.mongodb.scala.bson + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +abstract class BaseSpec extends AnyFlatSpec with Matchers {} diff --git a/bson-scala/src/test/scala/org/mongodb/scala/bson/BsonTransformerSpec.scala b/bson-scala/src/test/scala/org/mongodb/scala/bson/BsonTransformerSpec.scala new file mode 100644 index 00000000000..79629d04151 --- /dev/null +++ b/bson-scala/src/test/scala/org/mongodb/scala/bson/BsonTransformerSpec.scala @@ -0,0 +1,94 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson + +import java.util.Date + +import org.mongodb.scala.bson.collection.{ immutable, mutable } + +import scala.language.implicitConversions + +class BsonTransformerSpec extends BaseSpec { + + "The BsonTransformer companion" should "not transform BsonValues" in { + transform(BsonString("abc")) should equal(BsonString("abc")) + } + it should "transform Binary" in { + transform(Array[Byte](128.toByte)) should equal(BsonBinary(Array[Byte](128.toByte))) + } + it should "transform BigDecmial" in { + transform(BigDecimal(100)) should equal(BsonDecimal128(100)) + } + it should "transform Boolean" in { + transform(true) should equal(BsonBoolean(true)) + } + it should "transform DateTime" in { + transform(new Date(100)) should equal(BsonDateTime(100)) + } + it should "transform Decimal128" in { + transform(new Decimal128(100)) should equal(BsonDecimal128(100)) + } + it should "transform Double" in { + transform(2.0) should equal(BsonDouble(2.0)) + } + it should "transform ImmutableDocument" in { + transform(immutable.Document("a" -> 1, "b" -> "two", "c" -> false)) should equal( + BsonDocument("a" -> 1, "b" -> "two", "c" -> false) + ) + } + + it should "transform Int" in { + transform(1) should equal(BsonInt32(1)) + } + it should "transform KeyValuePairs[T]" in { + transform(Seq("a" -> "a", "b" -> "b", "c" -> "c")) should equal(BsonDocument("a" -> "a", "b" -> "b", "c" -> "c")) + } + it should "transform Long" in { + transform(1L) should equal(BsonInt64(1)) + } + it should "transform MutableDocument" in { + transform(mutable.Document("a" -> 1, "b" -> "two", "c" -> false)) should equal( + BsonDocument("a" -> 1, "b" -> "two", "c" -> false) + ) + } + it should "transform None" in { + transform(None) should equal(BsonNull()) + } + it should "transform ObjectId" in { + val objectId = new ObjectId() + transform(objectId) should equal(BsonObjectId(objectId)) + } + it should "transform Option[T]" in { + transform(Some(1)) should equal(new BsonInt32(1)) + } + it should "transform Regex" in { + transform("/.*/".r) should equal(BsonRegularExpression("/.*/")) + } + it should "transform Seq[T]" in { + transform(Seq("a", "b", "c")) should equal(BsonArray("a", "b", "c")) + } + it should "transform String" in { + transform("abc") should equal(BsonString("abc")) + } + + it should "thrown a runtime exception when no transformer available" in { + "transform(BigInt(12))" shouldNot compile + } + + implicit def transform[T](v: T)(implicit transformer: BsonTransformer[T]): BsonValue = transformer(v) + +} diff --git a/bson-scala/src/test/scala/org/mongodb/scala/bson/BsonValueSpec.scala b/bson-scala/src/test/scala/org/mongodb/scala/bson/BsonValueSpec.scala new file mode 100644 index 00000000000..f7040afc427 --- /dev/null +++ b/bson-scala/src/test/scala/org/mongodb/scala/bson/BsonValueSpec.scala @@ -0,0 +1,152 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson + +import java.util.Date + +import scala.collection.JavaConverters._ + +class BsonValueSpec extends BaseSpec { + + "BsonArray companion" should "create a BsonArray" in { + BsonArray() should equal(new BsonArray()) + + val values: List[BsonNumber] = List(BsonInt32(1), BsonInt64(2), new BsonDouble(3.0)) + val bsonArray = BsonArray.fromIterable(values) + val expected = new BsonArray(values.asJava) + + bsonArray should equal(expected) + + val implicitBsonArray = BsonArray(1, 2L, 3.0) + implicitBsonArray should equal(expected) + } + + "BsonBinary companion" should "create a BsonBinary" in { + val byteArray = Array[Byte](80.toByte, 5, 4, 3, 2, 1) + BsonBinary(byteArray) should equal(new BsonBinary(byteArray)) + } + + "BsonBoolean companion" should "create a BsonBoolean" in { + BsonBoolean(false) should equal(new BsonBoolean(false)) + BsonBoolean(true) should equal(new BsonBoolean(true)) + } + + "BsonDateTime companion" should "create a BsonDateTime" in { + val date = new Date() + + BsonDateTime(date) should equal(new BsonDateTime(date.getTime)) + BsonDateTime(1000) should equal(new BsonDateTime(1000)) + } + + "BsonDecimal128 companion" should "create a BsonDecimal128" in { + val expected = new BsonDecimal128(new Decimal128(100)) + + BsonDecimal128(100) should equal(expected) + BsonDecimal128("100") should equal(expected) + BsonDecimal128(BigDecimal(100)) should equal(expected) + BsonDecimal128(new Decimal128(100)) should equal(expected) + } + + "BsonDocument companion" should "create a BsonDocument" in { + val expected = new BsonDocument("a", BsonInt32(1)) + expected.put("b", BsonDouble(2.0)) + + BsonDocument() should equal(new BsonDocument()) + BsonDocument("a" -> 1, "b" -> 2.0) should equal(expected) + BsonDocument(Seq(("a", BsonInt32(1)), ("b", BsonDouble(2.0)))) should equal(expected) + BsonDocument("{a: 1, b: 2.0}") should equal(expected) + } + + "BsonDouble companion" should "create a BsonDouble" in { + BsonDouble(2.0) should equal(new BsonDouble(2.0)) + } + + "BsonInt32 companion" should "create a BsonInt32" in { + BsonInt32(1) should equal(new BsonInt32(1)) + } + + "BsonInt64 companion" should "create a BsonInt64" in { + BsonInt64(1) should equal(new BsonInt64(1)) + } + + "BsonJavaScript companion" should "create a BsonJavaScript" in { + BsonJavaScript("function(){}") should equal(new BsonJavaScript("function(){}")) + } + + "BsonJavaScriptWithScope companion" should "create a BsonJavaScriptWithScope" in { + val function = "function(){}" + val scope = new BsonDocument("a", new BsonInt32(1)) + val expected = new BsonJavaScriptWithScope(function, scope) + + BsonJavaScriptWithScope(function, scope) should equal(expected) + BsonJavaScriptWithScope(function, "a" -> 1) should equal(expected) + BsonJavaScriptWithScope(function, Document("a" -> 1)) should equal(expected) + } + + "BsonMaxKey companion" should "create a BsonMaxKey" in { + BsonMaxKey() should equal(new BsonMaxKey()) + } + + "BsonMinKey companion" should "create a BsonMinKey" in { + BsonMinKey() should equal(new BsonMinKey()) + } + + "BsonNull companion" should "create a BsonNull" in { + BsonNull() should equal(new BsonNull()) + } + + "BsonNumber companion" should "create a BsonNumber" in { + BsonNumber(1) should equal(BsonInt32(1)) + BsonNumber(1L) should equal(BsonInt64(1)) + BsonNumber(1.0) should equal(BsonDouble(1.0)) + } + + "BsonObjectId companion" should "create a BsonObjectId" in { + val bsonObjectId = BsonObjectId() + val objectId = bsonObjectId.getValue + val hexString = objectId.toHexString + val expected = new BsonObjectId(bsonObjectId.getValue) + + bsonObjectId should equal(expected) + BsonObjectId(hexString) should equal(expected) + BsonObjectId(objectId) should equal(expected) + } + + "BsonRegularExpression companion" should "create a BsonRegularExpression" in { + BsonRegularExpression("/(.*)/") should equal(new BsonRegularExpression("/(.*)/")) + BsonRegularExpression("/(.*)/".r) should equal(new BsonRegularExpression("/(.*)/")) + BsonRegularExpression("/(.*)/", "?i") should equal(new BsonRegularExpression("/(.*)/", "?i")) + } + + "BsonString companion" should "create a BsonString" in { + BsonString("aBc") should equal(new BsonString("aBc")) + } + + "BsonSymbol companion" should "create a BsonSymbol" in { + BsonSymbol(Symbol("sym")) should equal(new BsonSymbol("sym")) + } + + "BsonTimestamp companion" should "create a BsonTimestamp" in { + BsonTimestamp() should equal(new BsonTimestamp(0, 0)) + BsonTimestamp(10, 1) should equal(new BsonTimestamp(10, 1)) + } + + "BsonUndefined companion" should "create a BsonUndefined" in { + BsonUndefined() should equal(new BsonUndefined()) + } + +} diff --git a/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/DocumentCodecProviderSpec.scala b/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/DocumentCodecProviderSpec.scala new file mode 100644 index 00000000000..7fb08f842c1 --- /dev/null +++ b/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/DocumentCodecProviderSpec.scala @@ -0,0 +1,35 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.codecs + +import org.bson.codecs.configuration.CodecRegistries.fromProviders +import org.mongodb.scala.bson.BaseSpec +import org.mongodb.scala.bson.collection.{ immutable, mutable, Document } + +class DocumentCodecProviderSpec extends BaseSpec { + + "DocumentCodecProvider" should "get the correct codec" in { + + val provider = DocumentCodecProvider() + val registry = fromProviders(provider) + + provider.get[Document](classOf[Document], registry) shouldBe a[ImmutableDocumentCodec] + provider.get[immutable.Document](classOf[immutable.Document], registry) shouldBe a[ImmutableDocumentCodec] + provider.get[mutable.Document](classOf[mutable.Document], registry) shouldBe a[MutableDocumentCodec] + Option(provider.get[String](classOf[String], registry)) shouldBe None + } +} diff --git a/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/ImmutableDocumentCodecSpec.scala b/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/ImmutableDocumentCodecSpec.scala new file mode 100644 index 00000000000..74c6436f5bc --- /dev/null +++ b/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/ImmutableDocumentCodecSpec.scala @@ -0,0 +1,130 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.codecs + +import java.nio.ByteBuffer +import java.util.Date + +import org.bson._ +import org.bson.codecs.configuration.CodecRegistry +import org.bson.codecs.{ DecoderContext, EncoderContext } +import org.bson.io.{ BasicOutputBuffer, ByteBufferBsonInput } +import org.bson.types.ObjectId +import org.mongodb.scala.bson.BaseSpec +import org.mongodb.scala.bson.codecs.Registry.DEFAULT_CODEC_REGISTRY +import org.mongodb.scala.bson.collection.immutable.Document + +import scala.collection.JavaConverters._ + +class ImmutableDocumentCodecSpec extends BaseSpec { + + val registry: CodecRegistry = DEFAULT_CODEC_REGISTRY + + "MutableDocumentCodec" should "encode and decode all default types with readers and writers" in { + val original: Document = Document( + "binary" -> new BsonBinary("bson".toCharArray map (_.toByte)), + "boolean" -> new BsonBoolean(true), + "dateTime" -> new BsonDateTime(new Date().getTime), + "double" -> new BsonDouble(1.0), + "int" -> new BsonInt32(1), + "long" -> new BsonInt64(1L), + "null" -> new BsonNull(), + "objectId" -> new BsonObjectId(new ObjectId()), + "regEx" -> new BsonRegularExpression("^bson".r.regex), + "string" -> new BsonString("string"), + "symbol" -> new BsonSymbol(Symbol("bson").name), + "bsonDocument" -> new BsonDocument("a", new BsonString("string")), + "array" -> new BsonArray(List(new BsonString("string"), new BsonBoolean(false)).asJava) + ) + + info("encoding") + val writer: BsonBinaryWriter = new BsonBinaryWriter(new BasicOutputBuffer()) + ImmutableDocumentCodec(registry).encode(writer, original, EncoderContext.builder().build()) + + info("decoding") + val buffer: BasicOutputBuffer = writer.getBsonOutput().asInstanceOf[BasicOutputBuffer]; + val reader: BsonBinaryReader = new BsonBinaryReader( + new ByteBufferBsonInput( + new ByteBufNIO(ByteBuffer.wrap(buffer.toByteArray)) + ) + ) + + val decodedDocument = ImmutableDocumentCodec().decode(reader, DecoderContext.builder().build()) + + decodedDocument shouldBe a[Document] + original should equal(decodedDocument) + } + + it should "respect encodeIdFirst property in encoder context" in { + val original: Document = Document( + "a" -> new BsonString("string"), + "_id" -> new BsonInt32(1), + "nested" -> Document("a" -> new BsonString("string"), "_id" -> new BsonInt32(1)).toBsonDocument + ) + + info("encoding") + val writer: BsonBinaryWriter = new BsonBinaryWriter(new BasicOutputBuffer()) + ImmutableDocumentCodec(registry).encode( + writer, + original, + EncoderContext.builder().isEncodingCollectibleDocument(true).build() + ) + + info("decoding") + val buffer: BasicOutputBuffer = writer.getBsonOutput().asInstanceOf[BasicOutputBuffer]; + val reader: BsonBinaryReader = + new BsonBinaryReader(new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap(buffer.toByteArray)))) + + val decodedDocument = ImmutableDocumentCodec().decode(reader, DecoderContext.builder().build()) + + decodedDocument shouldBe a[Document] + original should equal(decodedDocument) + decodedDocument.keys.toList should contain theSameElementsInOrderAs (List("_id", "a", "nested")) + + Document(decodedDocument[BsonDocument]("nested")).keys.toList should contain theSameElementsInOrderAs (List( + "a", + "_id" + )) + } + + it should "encoder class should work as expected" in { + ImmutableDocumentCodec().getEncoderClass should equal(classOf[Document]) + } + + it should "determine if document has an _id" in { + ImmutableDocumentCodec().documentHasId(Document()) should be(false) + ImmutableDocumentCodec().documentHasId(Document("_id" -> new BsonInt32(1))) should be(true) + } + + it should "get the document_id" in { + ImmutableDocumentCodec().getDocumentId(Document()) should be(null) + ImmutableDocumentCodec().getDocumentId(Document("_id" -> new BsonInt32(1))) should be(new BsonInt32(1)) + } + + it should "generate document id if absent but not mutate original document" in { + val document = Document() + val document2 = ImmutableDocumentCodec().generateIdIfAbsentFromDocument(document) + document.contains("_id") shouldBe false + document2("_id") shouldBe a[BsonObjectId] + } + + it should "not generate document id if present" in { + val document = Document("_id" -> new BsonInt32(1)) + ImmutableDocumentCodec().generateIdIfAbsentFromDocument(document) + document("_id") should equal(new BsonInt32(1)) + } +} diff --git a/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/IterableCodecProviderSpec.scala b/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/IterableCodecProviderSpec.scala new file mode 100644 index 00000000000..2e9fb983a90 --- /dev/null +++ b/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/IterableCodecProviderSpec.scala @@ -0,0 +1,36 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.codecs + +import org.bson.codecs.configuration.CodecRegistries.fromProviders +import org.mongodb.scala.bson.BaseSpec + +class IterableCodecProviderSpec extends BaseSpec { + + "IterableCodecProvider" should "get the correct codec" in { + + val provider = IterableCodecProvider() + val registry = fromProviders(provider) + + provider.get[Iterable[Any]](classOf[Iterable[Any]], registry) shouldBe a[IterableCodec] + provider.get[List[String]](classOf[List[String]], registry) shouldBe a[IterableCodec] + provider.get[Seq[Integer]](classOf[Seq[Integer]], registry) shouldBe a[IterableCodec] + provider.get[Map[String, Integer]](classOf[Map[String, Integer]], registry) shouldBe a[IterableCodec] + Option(provider.get[String](classOf[String], registry)) shouldBe None + } + +} diff --git a/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/IterableCodecSpec.scala b/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/IterableCodecSpec.scala new file mode 100644 index 00000000000..644d6fc068f --- /dev/null +++ b/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/IterableCodecSpec.scala @@ -0,0 +1,122 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.codecs + +import org.bson.codecs.{ DecoderContext, EncoderContext } +import org.bson.{ BsonDocumentReader, BsonDocumentWriter, Transformer } +import org.mongodb.scala.bson.codecs.Registry.DEFAULT_CODEC_REGISTRY +import org.mongodb.scala.bson.{ BaseSpec, BsonDocument } + +class IterableCodecSpec extends BaseSpec { + + "IterableCodec" should "have the correct encoding class" in { + val codec = IterableCodec(DEFAULT_CODEC_REGISTRY, BsonTypeClassMap()) + codec.getEncoderClass() should equal(classOf[Iterable[_]]) + } + + it should "encode an Iterable to a BSON array" in { + val codec = IterableCodec(DEFAULT_CODEC_REGISTRY, BsonTypeClassMap()) + val writer = new BsonDocumentWriter(new BsonDocument()) + + writer.writeStartDocument() + writer.writeName("array") + codec.encode(writer, List(1, 2, 3), EncoderContext.builder().build()) + writer.writeEndDocument() + writer.getDocument should equal(BsonDocument("{array : [1, 2, 3]}")) + } + + it should "decode a BSON array to an Iterable" in { + val codec = IterableCodec(DEFAULT_CODEC_REGISTRY, BsonTypeClassMap()) + val reader = new BsonDocumentReader(BsonDocument("{array : [1, 2, 3]}")) + + reader.readStartDocument() + reader.readName("array") + val iterable = codec.decode(reader, DecoderContext.builder().build()) + reader.readEndDocument() + + iterable should equal(List(1, 2, 3)) + } + + it should "encode an Iterable containing Maps to a BSON array" in { + val codec = IterableCodec(DEFAULT_CODEC_REGISTRY, BsonTypeClassMap()) + val writer = new BsonDocumentWriter(new BsonDocument()) + + writer.writeStartDocument() + writer.writeName("array") + codec.encode( + writer, + List(Map("a" -> 1, "b" -> 2, "c" -> null)), + EncoderContext.builder().build() + ) // scalastyle:ignore + writer.writeEndDocument() + writer.getDocument should equal(BsonDocument("{array : [{a: 1, b: 2, c: null}]}")) + } + + it should "decode a BSON array containing maps to an Iterable" in { + val codec = IterableCodec(DEFAULT_CODEC_REGISTRY, BsonTypeClassMap()) + val reader = new BsonDocumentReader(BsonDocument("{array : [{a: 1, b: 2, c: null}]}")) + + reader.readStartDocument() + reader.readName("array") + val iterable = codec.decode(reader, DecoderContext.builder().build()) + reader.readEndDocument() + + iterable should equal(List(Map("a" -> 1, "b" -> 2, "c" -> null))) // scalastyle:ignore + } + + it should "encode a Map to a BSON document" in { + val codec = IterableCodec(DEFAULT_CODEC_REGISTRY, BsonTypeClassMap()) + val writer = new BsonDocumentWriter(new BsonDocument()) + + writer.writeStartDocument() + writer.writeName("document") + codec.encode(writer, Map("a" -> 1, "b" -> 2), EncoderContext.builder().build()) + writer.writeEndDocument() + writer.getDocument should equal(BsonDocument("{document : {a: 1, b: 2}}")) + } + + it should "decode a BSON Document to a Map" in { + val codec = IterableCodec(DEFAULT_CODEC_REGISTRY, BsonTypeClassMap()) + val reader = new BsonDocumentReader(BsonDocument("{document : {a: 1, b: 2}}")) + + reader.readStartDocument() + reader.readName("document") + val iterable = codec.decode(reader, DecoderContext.builder().build()) + reader.readEndDocument() + + iterable should equal(Map("a" -> 1, "b" -> 2)) + } + + it should "use the provided transformer" in { + val codec = IterableCodec( + DEFAULT_CODEC_REGISTRY, + BsonTypeClassMap(), + new Transformer { + override def transform(objectToTransform: Any): AnyRef = s"$objectToTransform" + } + ) + val reader = new BsonDocumentReader(BsonDocument("{array : [1, 2, 3]}")) + + reader.readStartDocument() + reader.readName("array") + val iterable = codec.decode(reader, DecoderContext.builder().build()) + reader.readEndDocument() + + iterable.toList should contain theSameElementsInOrderAs List("1", "2", "3") + } + +} diff --git a/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/MacrosSpec.scala b/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/MacrosSpec.scala new file mode 100644 index 00000000000..e3c8ded2d89 --- /dev/null +++ b/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/MacrosSpec.scala @@ -0,0 +1,727 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.codecs + +import java.nio.ByteBuffer +import java.util +import java.util.Date + +import org.bson._ +import org.bson.codecs.configuration.{ CodecProvider, CodecRegistries, CodecRegistry } +import org.bson.codecs.{ Codec, DecoderContext, EncoderContext } +import org.bson.io.{ BasicOutputBuffer, ByteBufferBsonInput, OutputBuffer } +import org.bson.types.ObjectId +import org.mongodb.scala.bson.BaseSpec +import org.mongodb.scala.bson.annotations.{ BsonIgnore, BsonProperty } +import org.mongodb.scala.bson.codecs.Macros.{ createCodecProvider, createCodecProviderIgnoreNone } +import org.mongodb.scala.bson.codecs.Registry.DEFAULT_CODEC_REGISTRY +import org.mongodb.scala.bson.collection.immutable.Document +import scala.collection.immutable.Vector + +import scala.collection.JavaConverters._ +import scala.reflect.ClassTag + +//scalastyle:off +class MacrosSpec extends BaseSpec { + + case class Empty() + case class Person(firstName: String, lastName: String) + case class DefaultValue(name: String, active: Boolean = false) + case class SeqOfStrings(name: String, value: Seq[String]) + case class RecursiveSeq(name: String, value: Seq[RecursiveSeq]) + case class AnnotatedClass(@BsonProperty("annotated_name") name: String) + case class IgnoredFieldClass(name: String, @BsonIgnore meta: String = "ignored_default") + + case class Binary(binary: Array[Byte]) { + + /** + * Custom equals + * + * Because `Array[Byte]` only does equality based on identity we use toSeq helper to compare the actual values. + * + * @param arg the other value + * @return true if equal else false + */ + override def equals(arg: Any): Boolean = arg match { + case that: Binary => that.binary.toSeq == binary.toSeq + case _ => false + } + } + case class AllTheBsonTypes( + documentMap: Map[String, String], + array: Seq[String], + date: Date, + boolean: Boolean, + double: Double, + int32: Int, + int64: Long, + string: String, + binary: Binary, + none: Option[String] + ) + + case class MapOfStrings(name: String, value: Map[String, String]) + case class SeqOfMapOfStrings(name: String, value: Seq[Map[String, String]]) + case class RecursiveMapOfStrings(name: String, value: Seq[Map[String, RecursiveMapOfStrings]]) + + type StringAlias = String + case class MapOfStringAliases(name: String, value: Map[StringAlias, StringAlias]) + + case class ContainsCaseClass(name: String, friend: Person) + case class ContainsSeqCaseClass(name: String, friends: Seq[Person]) + case class ContainsNestedSeqCaseClass(name: String, friends: Seq[Seq[Person]]) + case class ContainsMapOfCaseClasses(name: String, friends: Map[String, Person]) + case class ContainsMapOfMapOfCaseClasses(name: String, friends: Map[String, Map[String, Person]]) + case class ContainsCaseClassWithDefault(name: String, friend: Person = Person("Frank", "Sinatra")) + + case class ContainsSet(name: String, friends: Set[String]) + case class ContainsVector(name: String, friends: Vector[String]) + case class ContainsList(name: String, friends: List[String]) + case class ContainsStream(name: String, friends: Stream[String]) + + case class CaseClassWithVal(_id: ObjectId, name: String) { + val id: String = _id.toString + } + + case class OptionalValue(name: String, value: Option[String]) + case class OptionalCaseClass(name: String, value: Option[Person]) + case class OptionalRecursive(name: String, value: Option[OptionalRecursive]) + + sealed class Tree + case class Branch(@BsonProperty("l1") b1: Tree, @BsonProperty("r1") b2: Tree, value: Int) extends Tree + case class Leaf(value: Int) extends Tree + + sealed trait WithIgnored + case class MetaIgnoredField(data: String, @BsonIgnore meta: Seq[String] = Vector("ignore_me")) extends WithIgnored + case class LeafCountIgnoredField(branchCount: Int, @BsonIgnore leafCount: Int = 100) extends WithIgnored + case class ContainsIgnoredField(list: Seq[WithIgnored]) + + case class ContainsADT(name: String, tree: Tree) + case class ContainsSeqADT(name: String, trees: Seq[Tree]) + case class ContainsNestedSeqADT(name: String, trees: Seq[Seq[Tree]]) + + sealed class Graph + case class Node(name: String, value: Option[Graph]) extends Graph + + sealed class NotImplementedSealedClass + sealed trait NotImplementedSealedTrait + case class UnsupportedTuple(value: (String, String)) + case class UnsupportedMap(value: Map[Int, Int]) + + type SimpleTypeAlias = Map[String, String] + case class ContainsSimpleTypeAlias(a: String, b: SimpleTypeAlias = Map.empty) + type CaseClassTypeAlias = Person + case class ContainsCaseClassTypeAlias(a: String, b: CaseClassTypeAlias) + type ADTCaseClassTypeAlias = ContainsADT + case class ContainsADTCaseClassTypeAlias(a: String, b: ADTCaseClassTypeAlias) + + trait Tag + case class ContainsTaggedTypes( + a: Int with Tag, + b: String with Tag, + c: Map[String with Tag, Int with Tag] with Tag, + d: Empty with Tag + ) extends Tag + + case class ContainsTypeLessMap(a: BsonDocument) + + sealed class SealedClassCaseObject + object SealedClassCaseObject { + case object Alpha extends SealedClassCaseObject + } + + sealed trait CaseObjectEnum + case object Alpha extends CaseObjectEnum + case object Bravo extends CaseObjectEnum + case object Charlie extends CaseObjectEnum + + case class ContainsEnumADT(name: String, enum: CaseObjectEnum) + + sealed class SealedClass + case class SealedClassA(stringField: String) extends SealedClass + case class SealedClassB(intField: Int) extends SealedClass + case class ContainsSealedClass(list: List[SealedClass]) + + sealed abstract class SealedAbstractClass + case class SealedAbstractClassA(stringField: String) extends SealedAbstractClass + case class SealedAbstractClassB(intField: Int) extends SealedAbstractClass + case class ContainsSealedAbstractClass(list: List[SealedAbstractClass]) + + sealed class SealedClassWithParams(val superField: String) + case class SealedClassWithParamsA(stringField: String, override val superField: String) + extends SealedClassWithParams(superField) + case class SealedClassWithParamsB(intField: Int, override val superField: String) + extends SealedClassWithParams(superField) + case class ContainsSealedClassWithParams(list: List[SealedClassWithParams]) + + sealed abstract class SealedAbstractClassWithParams(val superField: String) + case class SealedAbstractClassWithParamsA(stringField: String, override val superField: String) + extends SealedAbstractClassWithParams(superField) + case class SealedAbstractClassWithParamsB(intField: Int, override val superField: String) + extends SealedAbstractClassWithParams(superField) + case class ContainsSealedAbstractClassWithParams(list: List[SealedAbstractClassWithParams]) + + sealed trait SealedTrait + case class SealedTraitA(stringField: String) extends SealedTrait + case class SealedTraitB(intField: Int) extends SealedTrait + case class ContainsSealedTrait(list: List[SealedTrait]) + + sealed class SingleSealedClass + case class SingleSealedClassImpl() extends SingleSealedClass + + sealed abstract class SingleSealedAbstractClass + case class SingleSealedAbstractClassImpl() extends SingleSealedAbstractClass + + sealed trait SingleSealedTrait + case class SingleSealedTraitImpl() extends SingleSealedTrait + + "Macros" should "be able to round trip simple case classes" in { + roundTrip(Empty(), "{}", classOf[Empty]) + roundTrip(Person("Bob", "Jones"), """{firstName: "Bob", lastName: "Jones"}""", classOf[Person]) + roundTrip(DefaultValue(name = "Bob"), """{name: "Bob", active: false}""", classOf[DefaultValue]) + roundTrip( + SeqOfStrings("Bob", Seq("scala", "jvm")), + """{name: "Bob", value: ["scala", "jvm"]}""", + classOf[SeqOfStrings] + ) + roundTrip( + RecursiveSeq("Bob", Seq(RecursiveSeq("Charlie", Seq.empty[RecursiveSeq]))), + """{name: "Bob", value: [{name: "Charlie", value: []}]}""", + classOf[RecursiveSeq] + ) + roundTrip(AnnotatedClass("Bob"), """{annotated_name: "Bob"}""", classOf[AnnotatedClass]) + roundTrip( + MapOfStrings("Bob", Map("brother" -> "Tom Jones")), + """{name: "Bob", value: {brother: "Tom Jones"}}""", + classOf[MapOfStrings] + ) + roundTrip( + MapOfStringAliases("Bob", Map("brother" -> "Tom Jones")), + """{name: "Bob", value: {brother: "Tom Jones"}}""", + classOf[MapOfStringAliases] + ) + roundTrip( + SeqOfMapOfStrings("Bob", Seq(Map("brother" -> "Tom Jones"))), + """{name: "Bob", value: [{brother: "Tom Jones"}]}""", + classOf[SeqOfMapOfStrings] + ) + roundTrip( + ContainsSet("Bob", Set("Tom", "Charlie")), + """{name: "Bob", friends: ["Tom","Charlie"]}""", + Macros.createCodecProvider(classOf[ContainsSet]) + ) + roundTrip( + ContainsVector("Bob", Vector("Tom", "Charlie")), + """{name: "Bob", friends: ["Tom","Charlie"]}""", + Macros.createCodecProvider(classOf[ContainsVector]) + ) + roundTrip( + ContainsList("Bob", List("Tom", "Charlie")), + """{name: "Bob", friends: ["Tom","Charlie"]}""", + Macros.createCodecProvider(classOf[ContainsList]) + ) + roundTrip( + ContainsStream("Bob", Stream("Tom", "Charlie")), + """{name: "Bob", friends: ["Tom","Charlie"]}""", + Macros.createCodecProvider(classOf[ContainsStream]) + ) + } + + it should "be able to ignore fields" in { + roundTrip( + IgnoredFieldClass("Bob", "singer"), + IgnoredFieldClass("Bob"), + """{name: "Bob"}""", + classOf[IgnoredFieldClass] + ) + + roundTrip( + ContainsIgnoredField(Vector(MetaIgnoredField("Bob", List("singer")), LeafCountIgnoredField(1, 10))), + ContainsIgnoredField(Vector(MetaIgnoredField("Bob"), LeafCountIgnoredField(1))), + """{"list" : [{"_t" : "MetaIgnoredField", "data" : "Bob" }, {"_t" : "LeafCountIgnoredField", "branchCount": 1}]}""", + classOf[ContainsIgnoredField], + classOf[WithIgnored] + ) + } + + it should "be able to round trip polymorphic nested case classes in a sealed class" in { + roundTrip( + ContainsSealedClass(List(SealedClassA("test"), SealedClassB(12))), + """{"list" : [{"_t" : "SealedClassA", "stringField" : "test"}, {"_t" : "SealedClassB", "intField" : 12}]}""", + classOf[ContainsSealedClass], + classOf[SealedClass] + ) + } + + it should "be able to round trip polymorphic nested case classes in a sealed abstract class" in { + roundTrip( + ContainsSealedAbstractClass(List(SealedAbstractClassA("test"), SealedAbstractClassB(12))), + """{"list" : [{"_t" : "SealedAbstractClassA", "stringField" : "test"}, {"_t" : "SealedAbstractClassB", "intField" : 12}]}""", + classOf[ContainsSealedAbstractClass], + classOf[SealedAbstractClass] + ) + } + + it should "be able to round trip polymorphic nested case classes in a sealed class with parameters" in { + roundTrip( + ContainsSealedClassWithParams( + List(SealedClassWithParamsA("test", "tested1"), SealedClassWithParamsB(12, "tested2")) + ), + """{"list" : [{"_t" : "SealedClassWithParamsA", "stringField" : "test", "superField" : "tested1"}, {"_t" : "SealedClassWithParamsB", "intField" : 12, "superField" : "tested2"}]}""", + classOf[ContainsSealedClassWithParams], + classOf[SealedClassWithParams] + ) + } + + it should "be able to round trip polymorphic nested case classes in a sealed abstract class with parameters" in { + roundTrip( + ContainsSealedAbstractClassWithParams( + List(SealedAbstractClassWithParamsA("test", "tested1"), SealedAbstractClassWithParamsB(12, "tested2")) + ), + """{"list" : [{"_t" : "SealedAbstractClassWithParamsA", "stringField" : "test", "superField" : "tested1"}, {"_t" : "SealedAbstractClassWithParamsB", "intField" : 12, "superField" : "tested2"}]}""", + classOf[ContainsSealedAbstractClassWithParams], + classOf[SealedAbstractClassWithParams] + ) + } + + it should "be able to round trip polymorphic nested case classes in a sealed trait" in { + roundTrip( + ContainsSealedTrait(List(SealedTraitA("test"), SealedTraitB(12))), + """{"list" : [{"_t" : "SealedTraitA", "stringField" : "test"}, {"_t" : "SealedTraitB", "intField" : 12}]}""", + classOf[ContainsSealedTrait], + classOf[SealedTrait] + ) + } + + it should "be able to round trip nested case classes" in { + roundTrip( + ContainsCaseClass("Charlie", Person("Bob", "Jones")), + """{name: "Charlie", friend: {firstName: "Bob", lastName: "Jones"}}""", + classOf[ContainsCaseClass], + classOf[Person] + ) + roundTrip( + ContainsSeqCaseClass("Charlie", Seq(Person("Bob", "Jones"))), + """{name: "Charlie", friends: [{firstName: "Bob", lastName: "Jones"}]}""", + classOf[ContainsSeqCaseClass], + classOf[Person] + ) + roundTrip( + ContainsNestedSeqCaseClass("Charlie", Seq(Seq(Person("Bob", "Jones")), Seq(Person("Tom", "Jones")))), + """{name: "Charlie", friends: [[{firstName: "Bob", lastName: "Jones"}], [{firstName: "Tom", lastName: "Jones"}]]}""", + classOf[ContainsNestedSeqCaseClass], + classOf[Person] + ) + } + + it should "be able to round trip nested case classes in maps" in { + roundTrip( + ContainsMapOfCaseClasses("Bob", Map("name" -> Person("Jane", "Jones"))), + """{name: "Bob", friends: {name: {firstName: "Jane", lastName: "Jones"}}}""", + classOf[ContainsMapOfCaseClasses], + classOf[Person] + ) + roundTrip( + ContainsMapOfMapOfCaseClasses("Bob", Map("maternal" -> Map("mother" -> Person("Jane", "Jones")))), + """{name: "Bob", friends: {maternal: {mother: {firstName: "Jane", lastName: "Jones"}}}}""", + classOf[ContainsMapOfMapOfCaseClasses], + classOf[Person] + ) + } + + it should "be able to round trip optional values" in { + roundTrip(OptionalValue("Bob", None), """{name: "Bob", value: null}""", classOf[OptionalValue]) + roundTrip(OptionalValue("Bob", Some("value")), """{name: "Bob", value: "value"}""", classOf[OptionalValue]) + roundTrip(OptionalCaseClass("Bob", None), """{name: "Bob", value: null}""", classOf[OptionalCaseClass]) + roundTrip( + OptionalCaseClass("Bob", Some(Person("Charlie", "Jones"))), + """{name: "Bob", value: {firstName: "Charlie", lastName: "Jones"}}""", + classOf[OptionalCaseClass], + classOf[Person] + ) + + roundTrip(OptionalRecursive("Bob", None), """{name: "Bob", value: null}""", classOf[OptionalRecursive]) + roundTrip( + OptionalRecursive("Bob", Some(OptionalRecursive("Charlie", None))), + """{name: "Bob", value: {name: "Charlie", value: null}}""", + classOf[OptionalRecursive] + ) + } + + it should "be able to round trip Map values where the top level implementations don't include type information" in { + roundTrip( + ContainsTypeLessMap(BsonDocument.parse("""{b: "c"}""")), + """{a: {b: "c"}}""", + classOf[ContainsTypeLessMap] + ) + } + + it should "be able to decode case classes missing optional values" in { + val registry = + CodecRegistries.fromRegistries(CodecRegistries.fromProviders(classOf[OptionalValue]), DEFAULT_CODEC_REGISTRY) + val buffer = encode(registry.get(classOf[Document]), Document("name" -> "Bob")) + + decode(registry.get(classOf[OptionalValue]), buffer) should equal(OptionalValue("Bob", None)) + } + + it should "be able to round trip default values" in { + roundTrip( + ContainsCaseClassWithDefault("Charlie"), + """{name: "Charlie", friend: { firstName: "Frank", lastName: "Sinatra"}}""", + classOf[ContainsCaseClassWithDefault], + classOf[Person] + ) + } + + it should "rountrip case classes containing vals" in { + val id = new ObjectId + roundTrip( + CaseClassWithVal(id, "Bob"), + s"""{"_id": {"$$oid": "${id.toHexString}" }, "name" : "Bob"}""", + classOf[CaseClassWithVal] + ) + } + + it should "be able to decode case class with vals" in { + val registry = CodecRegistries.fromRegistries( + CodecRegistries.fromProviders(classOf[CaseClassWithVal]), + DEFAULT_CODEC_REGISTRY + ) + + val id = new ObjectId + val buffer = encode( + registry.get(classOf[Document]), + Document("_id" -> id, "name" -> "Bob") + ) + + decode( + registry.get(classOf[CaseClassWithVal]), + buffer + ) should equal(CaseClassWithVal(id, "Bob")) + } + + it should "be able to round trip optional values, when None is ignored" in { + roundTrip(OptionalValue("Bob", None), """{name: "Bob"}""", createCodecProviderIgnoreNone[OptionalValue]()) + roundTrip( + OptionalValue("Bob", Some("value")), + """{name: "Bob", value: "value"}""", + createCodecProviderIgnoreNone[OptionalValue]() + ) + roundTrip(OptionalCaseClass("Bob", None), """{name: "Bob"}""", createCodecProviderIgnoreNone[OptionalCaseClass]()) + roundTrip( + OptionalCaseClass("Bob", Some(Person("Charlie", "Jones"))), + """{name: "Bob", value: {firstName: "Charlie", lastName: "Jones"}}""", + createCodecProviderIgnoreNone[OptionalCaseClass](), + createCodecProviderIgnoreNone[Person]() + ) + + roundTrip(OptionalRecursive("Bob", None), """{name: "Bob"}""", createCodecProviderIgnoreNone[OptionalRecursive]()) + roundTrip( + OptionalRecursive("Bob", Some(OptionalRecursive("Charlie", None))), + """{name: "Bob", value: {name: "Charlie"}}""", + createCodecProviderIgnoreNone[OptionalRecursive]() + ) + } + + it should "roundtrip all the supported bson types" in { + roundTrip( + AllTheBsonTypes( + Map("a" -> "b"), + Seq("a", "b", "c"), + new Date(123), + boolean = true, + 1.0, + 10, + 100L, + "string", + Binary(Array[Byte](123)), + None + ), + """{"documentMap" : { "a" : "b" }, "array" : ["a", "b", "c"], "date" : { "$date" : 123 }, "boolean" : true, + | "double" : 1.0, "int32" : 10, "int64" : { "$numberLong" : "100" }, "string" : "string", + | "binary" : { "binary": { "$binary" : "ew==", "$type" : "00" } }, "none" : null }""".stripMargin, + classOf[Binary], + classOf[AllTheBsonTypes] + ) + } + + it should "support ADT sealed case classes" in { + val leaf = Leaf(1) + val branch = Branch(Branch(Leaf(1), Leaf(2), 3), Branch(Leaf(4), Leaf(5), 6), 3) // scalastyle:ignore + val leafJson = createTreeJson(leaf) + val branchJson = createTreeJson(branch) + + roundTrip(leaf, leafJson, classOf[Tree]) + roundTrip(branch, branchJson, classOf[Tree]) + + roundTrip(ContainsADT("Bob", leaf), s"""{name: "Bob", tree: $leafJson}""", classOf[ContainsADT], classOf[Tree]) + roundTrip(ContainsADT("Bob", branch), s"""{name: "Bob", tree: $branchJson}""", classOf[ContainsADT], classOf[Tree]) + + roundTrip( + ContainsSeqADT("Bob", List(leaf, branch)), + s"""{name: "Bob", trees: [$leafJson, $branchJson]}""", + classOf[ContainsSeqADT], + classOf[Tree] + ) + roundTrip( + ContainsNestedSeqADT("Bob", List(List(leaf), List(branch))), + s"""{name: "Bob", trees: [[$leafJson], [$branchJson]]}""", + classOf[ContainsNestedSeqADT], + classOf[Tree] + ) + } + + it should "write the type of sealed classes and traits with only one subclass" in { + roundTrip(SingleSealedClassImpl(), """{ "_t" : "SingleSealedClassImpl" }""".stripMargin, classOf[SingleSealedClass]) + roundTrip( + SingleSealedAbstractClassImpl(), + """{ "_t" : "SingleSealedAbstractClassImpl" }""".stripMargin, + classOf[SingleSealedAbstractClass] + ) + roundTrip(SingleSealedTraitImpl(), """{ "_t" : "SingleSealedTraitImpl" }""".stripMargin, classOf[SingleSealedTrait]) + } + + it should "support optional values in ADT sealed classes" in { + val nodeA = Node("nodeA", None) + val nodeB = Node("nodeB", Some(nodeA)) + + val nodeAJson = """{_t: "Node", name: "nodeA", value: null}""" + val nodeBJson = s"""{_t: "Node", name: "nodeB", value: $nodeAJson}""" + + roundTrip(nodeA, nodeAJson, classOf[Graph]) + roundTrip(nodeB, nodeBJson, classOf[Graph]) + } + + it should "support type aliases in case classes" in { + roundTrip( + ContainsSimpleTypeAlias("c", Map("d" -> "c")), + """{a: "c", b: {d: "c"}}""", + classOf[ContainsSimpleTypeAlias] + ) + roundTrip( + ContainsCaseClassTypeAlias("c", Person("Tom", "Jones")), + """{a: "c", b: {firstName: "Tom", lastName: "Jones"}}""", + classOf[ContainsCaseClassTypeAlias], + classOf[CaseClassTypeAlias] + ) + + val branch = Branch(Branch(Leaf(1), Leaf(2), 3), Branch(Leaf(4), Leaf(5), 6), 3) // scalastyle:ignore + val branchJson = createTreeJson(branch) + roundTrip( + ContainsADTCaseClassTypeAlias("c", ContainsADT("Tom", branch)), + s"""{a: "c", b: {name: "Tom", tree: $branchJson}}""", + classOf[ContainsADTCaseClassTypeAlias], + classOf[ADTCaseClassTypeAlias], + classOf[Tree] + ) + } + + it should "support tagged types in case classes" in { + assume(!scala.util.Properties.versionNumberString.startsWith("2.11")) + val a = 1.asInstanceOf[Int with Tag] + val b = "b".asInstanceOf[String with Tag] + val c = Map("c" -> 0).asInstanceOf[Map[String with Tag, Int with Tag] with Tag] + val d = Empty().asInstanceOf[Empty with Tag] + roundTrip( + ContainsTaggedTypes(a, b, c, d), + """{a: 1, b: "b", c: {c: 0}, d: {}}""", + classOf[ContainsTaggedTypes], + classOf[Empty] + ) + } + + it should "be able to support value classes" in { + val valueClassCodecProvider = new CodecProvider { + override def get[T](clazz: Class[T], registry: CodecRegistry): Codec[T] = { + if (clazz == classOf[IsValueClass]) { + new Codec[IsValueClass] { + override def encode(writer: BsonWriter, value: IsValueClass, encoderContext: EncoderContext): Unit = + writer.writeInt32(value.id) + + override def getEncoderClass: Class[IsValueClass] = classOf[IsValueClass] + + override def decode(reader: BsonReader, decoderContext: DecoderContext): IsValueClass = + IsValueClass(reader.readInt32()) + }.asInstanceOf[Codec[T]] + } else { + null // scalastyle:ignore + } + } + } + roundTrip( + ContainsValueClass(IsValueClass(1), "string value"), + """{id: 1, myString: 'string value'}""", + classOf[ContainsValueClass], + valueClassCodecProvider + ) + } + + it should "support case object enum types" in { + roundTrip(Alpha, """{_t:"Alpha"}""", classOf[CaseObjectEnum]) + roundTrip(Bravo, """{_t:"Bravo"}""", classOf[CaseObjectEnum]) + roundTrip(Charlie, """{_t:"Charlie"}""", classOf[CaseObjectEnum]) + + roundTrip( + ContainsEnumADT("Bob", Alpha), + """{name:"Bob", enum:{_t:"Alpha"}}""", + classOf[ContainsEnumADT], + classOf[CaseObjectEnum] + ) + } + + it should "support extra fields in the document" in { + val json = + """{firstName: "Bob", lastName: "Jones", address: {number: 1, street: "Acacia Avenue"}, aliases: ["Robert", "Rob"]}""" + decode(Person("Bob", "Jones"), json, Macros.createCodec[Person]()) + } + + it should "support throw a CodecConfigurationException missing _t field" in { + val missing_t = """{name: "nodeA", value: null}""" + val registry = CodecRegistries.fromRegistries(CodecRegistries.fromProviders(classOf[Graph]), DEFAULT_CODEC_REGISTRY) + + val buffer = encode(registry.get(classOf[Document]), Document(missing_t)) + + an[BsonInvalidOperationException] should be thrownBy { + decode(registry.get(classOf[Graph]), buffer) + } + } + + it should "support throw a CodecConfigurationException with an unknown class name in the _t field" in { + val missing_t = """{_t: "Wibble", name: "nodeA", value: null}""" + val registry = CodecRegistries.fromRegistries(CodecRegistries.fromProviders(classOf[Graph]), DEFAULT_CODEC_REGISTRY) + val buffer = encode(registry.get(classOf[Document]), Document(missing_t)) + + an[BsonInvalidOperationException] should be thrownBy { + decode(registry.get(classOf[Graph]), buffer) + } + } + + it should "throw a CodecConfigurationException when encountering null values in case classes" in { + val registry = + CodecRegistries.fromRegistries(CodecRegistries.fromProviders(classOf[Person]), DEFAULT_CODEC_REGISTRY) + an[BsonInvalidOperationException] should be thrownBy { + encode(registry.get(classOf[Person]), null) + } + + an[BsonInvalidOperationException] should be thrownBy { + encode(registry.get(classOf[Person]), Person(null, null)) + } + } + + it should "not compile case classes with unsupported values" in { + "Macros.createCodecProvider(classOf[UnsupportedTuple])" shouldNot compile + "Macros.createCodecProvider(classOf[UnsupportedMap])" shouldNot compile + } + + it should "not compile if there are no concrete implementations of a sealed class or trait" in { + "Macros.createCodecProvider(classOf[NotImplementedSealedClass])" shouldNot compile + "Macros.createCodecProvider(classOf[NotImplementedSealedTrait])" shouldNot compile + } + + it should "error when reading unexpected lists" in { + val registry = CodecRegistries.fromRegistries( + CodecRegistries.fromProviders(classOf[ContainsCaseClass], classOf[Person]), + DEFAULT_CODEC_REGISTRY + ) + an[BsonInvalidOperationException] should be thrownBy { + val json = """{name: "Bob", friend: [{firstName: "Jane", lastName: "Ada"}]}""" + decode(ContainsCaseClass("Bob", Person("Jane", "Ada")), json, registry.get(classOf[ContainsCaseClass])) + } + } + + it should "error when reading unexpected documents" in { + val registry = CodecRegistries.fromRegistries( + CodecRegistries.fromProviders(classOf[ContainsCaseClass], classOf[Person]), + DEFAULT_CODEC_REGISTRY + ) + an[BsonInvalidOperationException] should be thrownBy { + val json = """{name: "Bob", friend: {first: {firstName: "Jane", lastName: "Ada"}}}""" + decode(ContainsCaseClass("Bob", Person("Jane", "Ada")), json, registry.get(classOf[ContainsCaseClass])) + } + } + + def roundTrip[T](value: T, expected: String, provider: CodecProvider, providers: CodecProvider*)( + implicit ct: ClassTag[T] + ): Unit = { + val codecProviders: util.List[CodecProvider] = (provider +: providers).asJava + val registry = CodecRegistries.fromRegistries(CodecRegistries.fromProviders(codecProviders), DEFAULT_CODEC_REGISTRY) + val codec = registry.get(ct.runtimeClass).asInstanceOf[Codec[T]] + roundTripCodec(value, Document(expected), codec) + } + + def roundTrip[T](value: T, decodedValue: T, expected: String, provider: CodecProvider, providers: CodecProvider*)( + implicit ct: ClassTag[T] + ): Unit = { + val codecProviders: util.List[CodecProvider] = (provider +: providers).asJava + val registry = CodecRegistries.fromRegistries(CodecRegistries.fromProviders(codecProviders), DEFAULT_CODEC_REGISTRY) + val codec = registry.get(ct.runtimeClass).asInstanceOf[Codec[T]] + roundTripCodec(value, decodedValue, Document(expected), codec) + } + + def roundTripCodec[T](value: T, expected: Document, codec: Codec[T]): Unit = { + val encoded = encode(codec, value) + val actual = decode(documentCodec, encoded) + assert(expected == actual, s"Encoded document: (${actual.toJson()}) did not equal: (${expected.toJson()})") + + val roundTripped = decode(codec, encode(codec, value)) + assert(roundTripped == value, s"Round Tripped case class: ($roundTripped) did not equal the original: ($value)") + } + + def roundTripCodec[T](value: T, decodedValue: T, expected: Document, codec: Codec[T]): Unit = { + val encoded = encode(codec, value) + val actual = decode(documentCodec, encoded) + assert(expected == actual, s"Encoded document: (${actual.toJson()}) did not equal: (${expected.toJson()})") + + val roundTripped = decode(codec, encode(codec, value)) + assert( + roundTripped == decodedValue, + s"Round Tripped case class: ($roundTripped) did not equal the expected: ($decodedValue)" + ) + } + + def encode[T](codec: Codec[T], value: T): OutputBuffer = { + val buffer = new BasicOutputBuffer() + val writer = new BsonBinaryWriter(buffer) + codec.encode(writer, value, EncoderContext.builder.build) + buffer + } + + def decode[T](codec: Codec[T], buffer: OutputBuffer): T = { + val reader = new BsonBinaryReader(new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap(buffer.toByteArray)))) + codec.decode(reader, DecoderContext.builder().build()) + } + + def decode[T](value: T, json: String, codec: Codec[T]): Unit = { + val roundTripped = decode(codec, encode(documentCodec, Document(json))) + assert(roundTripped == value, s"Round Tripped case class: ($roundTripped) did not equal the original: ($value)") + } + + val documentCodec: Codec[Document] = DEFAULT_CODEC_REGISTRY.get(classOf[Document]) + + def createTreeJson(tree: Tree): String = { + tree match { + case l: Leaf => s"""{_t: "Leaf", value: ${l.value}}""" + case b: Branch => + s"""{_t: "Branch", l1: ${createTreeJson(b.b1)}, r1: ${createTreeJson(b.b2)}, value: ${b.value}}""" + case _ => "{}" + } + } + +} + +case class IsValueClass(id: Int) extends AnyVal +case class ContainsValueClass(id: IsValueClass, myString: String) diff --git a/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/MutableDocumentCodecSpec.scala b/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/MutableDocumentCodecSpec.scala new file mode 100644 index 00000000000..6a6b78580b1 --- /dev/null +++ b/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/MutableDocumentCodecSpec.scala @@ -0,0 +1,127 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.codecs + +import java.nio.ByteBuffer +import java.util.Date + +import org.bson._ +import org.bson.codecs.configuration.CodecRegistry +import org.bson.codecs.{ DecoderContext, EncoderContext } +import org.bson.io.{ BasicOutputBuffer, ByteBufferBsonInput } +import org.bson.types.ObjectId +import org.mongodb.scala.bson.BaseSpec +import org.mongodb.scala.bson.codecs.Registry.DEFAULT_CODEC_REGISTRY +import org.mongodb.scala.bson.collection.mutable +import org.mongodb.scala.bson.collection.mutable.Document + +import scala.collection.JavaConverters._ + +class MutableDocumentCodecSpec extends BaseSpec { + + val registry: CodecRegistry = DEFAULT_CODEC_REGISTRY + + "MutableDocumentCodec" should "encode and decode all default types with readers and writers" in { + val original: mutable.Document = Document( + "binary" -> new BsonBinary("bson".toCharArray map (_.toByte)), + "boolean" -> new BsonBoolean(true), + "dateTime" -> new BsonDateTime(new Date().getTime), + "double" -> new BsonDouble(1.0), + "int" -> new BsonInt32(1), + "long" -> new BsonInt64(1L), + "null" -> new BsonNull(), + "objectId" -> new BsonObjectId(new ObjectId()), + "regEx" -> new BsonRegularExpression("^bson".r.regex), + "string" -> new BsonString("string"), + "symbol" -> new BsonSymbol(Symbol("bson").name), + "bsonDocument" -> new BsonDocument("a", new BsonString("string")), + "array" -> new BsonArray(List(new BsonString("string"), new BsonBoolean(false)).asJava) + ) + + info("encoding") + val writer: BsonBinaryWriter = new BsonBinaryWriter(new BasicOutputBuffer()) + MutableDocumentCodec(registry).encode(writer, original, EncoderContext.builder().build()) + + info("decoding") + val buffer: BasicOutputBuffer = writer.getBsonOutput().asInstanceOf[BasicOutputBuffer]; + val reader: BsonBinaryReader = + new BsonBinaryReader(new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap(buffer.toByteArray)))) + + val decodedDocument = MutableDocumentCodec().decode(reader, DecoderContext.builder().build()) + + decodedDocument shouldBe a[mutable.Document] + original should equal(decodedDocument) + } + + it should "respect encodeIdFirst property in encoder context" in { + val original: mutable.Document = Document( + "a" -> new BsonString("string"), + "_id" -> new BsonInt32(1), + "nested" -> Document("a" -> new BsonString("string"), "_id" -> new BsonInt32(1)).toBsonDocument + ) + + info("encoding") + val writer: BsonBinaryWriter = new BsonBinaryWriter(new BasicOutputBuffer()) + MutableDocumentCodec(registry).encode( + writer, + original, + EncoderContext.builder().isEncodingCollectibleDocument(true).build() + ) + + info("decoding") + val buffer: BasicOutputBuffer = writer.getBsonOutput().asInstanceOf[BasicOutputBuffer]; + val reader: BsonBinaryReader = + new BsonBinaryReader(new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap(buffer.toByteArray)))) + + val decodedDocument = MutableDocumentCodec().decode(reader, DecoderContext.builder().build()) + + decodedDocument shouldBe a[mutable.Document] + original should equal(decodedDocument) + decodedDocument.keys.toList should contain theSameElementsInOrderAs (List("_id", "a", "nested")) + + Document(decodedDocument[BsonDocument]("nested")).keys.toList should contain theSameElementsInOrderAs (List( + "a", + "_id" + )) + } + + it should "encoder class should work as expected" in { + MutableDocumentCodec().getEncoderClass should equal(classOf[mutable.Document]) + } + + it should "determine if document has an _id" in { + MutableDocumentCodec().documentHasId(Document()) should be(false) + MutableDocumentCodec().documentHasId(Document("_id" -> new BsonInt32(1))) should be(true) + } + + it should "get the document_id" in { + MutableDocumentCodec().getDocumentId(Document()) should be(null) + MutableDocumentCodec().getDocumentId(Document("_id" -> new BsonInt32(1))) should be(new BsonInt32(1)) + } + + it should "generate document id if absent" in { + val document = Document() + MutableDocumentCodec().generateIdIfAbsentFromDocument(document) + document("_id") shouldBe a[BsonObjectId] + } + + it should "not generate document id if present" in { + val document = Document("_id" -> new BsonInt32(1)) + MutableDocumentCodec().generateIdIfAbsentFromDocument(document) + document("_id") should equal(new BsonInt32(1)) + } +} diff --git a/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/Registry.scala b/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/Registry.scala new file mode 100644 index 00000000000..ae17988a770 --- /dev/null +++ b/bson-scala/src/test/scala/org/mongodb/scala/bson/codecs/Registry.scala @@ -0,0 +1,32 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.codecs + +import org.bson.codecs.configuration.CodecRegistries.fromProviders +import org.bson.codecs.configuration.CodecRegistry +import org.bson.codecs.{ BsonValueCodecProvider, ValueCodecProvider } + +object Registry { + + val DEFAULT_CODEC_REGISTRY: CodecRegistry = fromProviders( + DocumentCodecProvider(), + IterableCodecProvider(), + new ValueCodecProvider(), + new BsonValueCodecProvider() + ) + +} diff --git a/bson-scala/src/test/scala/org/mongodb/scala/bson/collections/DocumentImplicitTypeConversion.scala b/bson-scala/src/test/scala/org/mongodb/scala/bson/collections/DocumentImplicitTypeConversion.scala new file mode 100644 index 00000000000..0afce3b596b --- /dev/null +++ b/bson-scala/src/test/scala/org/mongodb/scala/bson/collections/DocumentImplicitTypeConversion.scala @@ -0,0 +1,72 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.collections + +import org.mongodb.scala.bson._ +import org.mongodb.scala.bson.collection.immutable.Document +import org.mongodb.scala.bson.collection.mutable + +class DocumentImplicitTypeConversion extends BaseSpec { + + val emptyDoc: Document = Document.empty + + "Document additions and updates" should "support simple additions" in { + val doc1: Document = Document() + ("key" -> "value") + doc1 should equal(Document("key" -> BsonString("value"))) + + val doc2: Document = doc1 + ("key2" -> 2) + doc2 should equal(Document("key" -> BsonString("value"), "key2" -> BsonInt32(2))) + } + + it should "support multiple additions" in { + val doc1: Document = emptyDoc + ("key" -> "value", "key2" -> 2, "key3" -> true, "key4" -> None) + doc1 should equal( + Document("key" -> BsonString("value"), "key2" -> BsonInt32(2), "key3" -> BsonBoolean(true), "key4" -> BsonNull()) + ) + } + + it should "support addition of a traversable" in { + val doc1: Document = emptyDoc ++ Document("key" -> "value", "key2" -> 2, "key3" -> true, "key4" -> None) + doc1 should equal( + Document("key" -> BsonString("value"), "key2" -> BsonInt32(2), "key3" -> BsonBoolean(true), "key4" -> BsonNull()) + ) + } + + it should "support updated" in { + val doc1: Document = emptyDoc.updated("key", "value") + emptyDoc should not be doc1 + doc1 should equal(Document("key" -> BsonString("value"))) + } + + it should "be creatable from mixed types" in { + val doc1: Document = Document( + "a" -> "string", + "b" -> true, + "c" -> List("a", "b", "c"), + "d" -> Document("a" -> "string", "b" -> true, "c" -> List("a", "b", "c")) + ) + + val doc2: mutable.Document = mutable.Document( + "a" -> "string", + "b" -> true, + "c" -> List("a", "b", "c"), + "d" -> + mutable.Document("a" -> "string", "b" -> true, "c" -> List("a", "b", "c")) + ) + doc1.toBsonDocument should equal(doc2.toBsonDocument) + } +} diff --git a/bson-scala/src/test/scala/org/mongodb/scala/bson/collections/ImmutableDocumentSpec.scala b/bson-scala/src/test/scala/org/mongodb/scala/bson/collections/ImmutableDocumentSpec.scala new file mode 100644 index 00000000000..d24ff044516 --- /dev/null +++ b/bson-scala/src/test/scala/org/mongodb/scala/bson/collections/ImmutableDocumentSpec.scala @@ -0,0 +1,216 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.collections + +import org.bson.json.JsonParseException +import org.mongodb.scala.bson._ +import org.mongodb.scala.bson.collection.immutable.Document + +import scala.collection.mutable + +class ImmutableDocumentSpec extends BaseSpec { + + val emptyDoc: Document = Document.empty + val doc: Document = Document("key" -> "value", "key2" -> "value2", "key3" -> "value3") + val docMap: Map[String, BsonValue] = doc.toMap + + "Document lookups" should "be the same as empty documents" in { + emptyDoc should equal(Document()) + } + + it should "support construction via json" in { + Document("{a: 1, b: true}") should equal(Document("a" -> 1, "b" -> true)) + + intercept[JsonParseException] { + Document("not Json") + } + } + + it should "support get()" in { + doc.get("key") should equal(Some(BsonString("value"))) + doc.get("nonexistent") should equal(None) + } + + it should "support direct lookup" in { + doc("key") should equal(BsonString("value")) + doc[BsonString]("key") should equal(BsonString("value")) + + // When the key doesn't exist + an[NoSuchElementException] should be thrownBy doc("nonexistent") + + // When the key exists but the type doesn't match" + an[NoSuchElementException] should be thrownBy doc[BsonArray]("key") + } + + it should "support getOrElse" in { + doc.getOrElse("key", BsonBoolean(false)) should equal(BsonString("value")) + doc.getOrElse("nonexistent", BsonBoolean(false)) should equal(BsonBoolean(false)) + } + + it should "support contains" in { + doc contains "key" should equal(true) + doc contains "nonexistent" should equal(false) + } + + "Document additions and updates" should "support simple additions" in { + val doc1: Document = emptyDoc + ("key" -> "value") + emptyDoc should not be doc1 + doc1 should equal(Document("key" -> "value")) + + val doc2: Document = doc1 + ("key2" -> "value2") + doc1 should not be doc2 + doc2 should equal(Document("key" -> "value", "key2" -> "value2")) + } + + it should "support multiple additions" in { + val doc1: Document = emptyDoc + ("key" -> "value", "key2" -> "value2", + "key3" -> "value3") + emptyDoc should not be doc1 + doc1 should equal(Document("key" -> "value", "key2" -> "value2", "key3" -> "value3")) + + val doc2: Document = doc1 + ("key4" -> "value4") + doc1 should not be doc2 + doc2 should equal(Document("key" -> "value", "key2" -> "value2", "key3" -> "value3", "key4" -> "value4")) + } + + it should "support addition of a traversable" in { + val doc1: Document = emptyDoc ++ Set("key" -> BsonString("value"), "key2" -> BsonString("value2")) + emptyDoc should not be doc1 + doc1 should equal(Document("key" -> BsonString("value"), "key2" -> BsonString("value2"))) + + val doc2: Document = doc1 ++ List("key3" -> BsonString("value3")) + doc1 should not be doc2 + doc2 should equal( + Document("key" -> BsonString("value"), "key2" -> BsonString("value2"), "key3" -> BsonString("value3")) + ) + } + + it should "support updated" in { + val doc1: Document = emptyDoc updated ("key", "value") + emptyDoc should not be doc1 + doc1 should equal(Document("key" -> "value")) + + val doc2: Document = doc1 updated ("key2" -> "value2") + doc1 should not be doc2 + doc2 should equal(Document("key" -> "value", "key2" -> "value2")) + } + + "Document removals" should "support subtractions" in { + val doc1: Document = doc - "nonexistent key" + doc1 should equal(doc) + + val doc2: Document = doc - "key" + doc1 should not be doc2 + doc2 should equal(Document("key2" -> "value2", "key3" -> "value3")) + } + + it should "support multiple subtractions" in { + val doc1: Document = doc - ("key", "key2") + doc should not be doc1 + doc1 should equal(Document("key3" -> "value3")) + + } + + it should "support subtraction of a traversable" in { + val doc1: Document = doc -- Set("key", "key2") + doc should not be doc1 + doc1 should equal(Document("key3" -> "value3")) + + val doc2: Document = doc -- List("key3") + doc1 should not be doc2 + doc2 should equal(Document("key" -> "value", "key2" -> "value2")) + + } + + "Document subcollections" should "provide keys in the order set" in { + doc.keys should equal(Set("key", "key2", "key3")) + + val doc1: Document = doc + ("aNewKey" -> "1") + doc1.keys should equal(Set("key", "key2", "key3", "aNewKey")) + } + + it should "provide a keySet in the order set" in { + doc.keySet should equal(Set("key", "key2", "key3")) + + val doc1: Document = doc + ("aNewKey" -> "1") + doc1.keySet should equal(Set("key", "key2", "key3", "aNewKey")) + } + + it should "provide a keysIterator in the order set" in { + doc.keysIterator.toSet should equal(Set("key", "key2", "key3")) + + val doc1: Document = doc + ("aNewKey" -> "1") + doc1.keysIterator.toSet should equal(Set("key", "key2", "key3", "aNewKey")) + } + + it should "provide values in the order set" in { + doc.values.toSet should equal(Set(BsonString("value"), BsonString("value2"), BsonString("value3"))) + + val doc1: Document = doc + ("aNewKey" -> 1) + doc1.values.toSet should equal(Set(BsonString("value"), BsonString("value2"), BsonString("value3"), BsonInt32(1))) + } + + it should "provide a valueSet in the order set" in { + doc.valuesIterator.toSet should equal(Set(BsonString("value"), BsonString("value2"), BsonString("value3"))) + + val doc1: Document = doc + ("aNewKey" -> 1) + doc1.valuesIterator.toSet should equal( + Set(BsonString("value"), BsonString("value2"), BsonString("value3"), BsonInt32(1)) + ) + } + + "Document transformations" should "be filterable by keys" in { + val doc1: Document = doc.filterKeys(k => k == "key") + + doc1 should equal(Document("key" -> "value")) + } + + "Traversable helpers" should "work as expected" in { + val map = mutable.Map[String, BsonValue]() + doc foreach (kv => map += kv) + + doc.toMap should equal(map) + } + + it should "be able to create new Documents from iterable" in { + val doc1 = Document(docMap) + doc should equal(doc1) + } + + // it should "be mappable thanks to CanBuildFrom" in { + // Document.empty.map({ kv => kv }) should equal(Document.empty) + // val doc1: Document = docMap.map(kv => kv).to(Document) + // + // doc1 should equal(doc) + // } + + it should "return a BsonDocument" in { + val bsonDoc: BsonDocument = doc.toBsonDocument + doc.underlying should equal(bsonDoc) + } + + it should "return a Json representation" in { + doc.toJson() should equal("""{"key": "value", "key2": "value2", "key3": "value3"}""") + } + + "Documents" should "support Traversable like builders" in { + val doc1 = doc.filter(kv => kv._1 == "key") + + doc1 should not equal doc + doc1 should equal(Document("key" -> "value")) + } +} diff --git a/bson-scala/src/test/scala/org/mongodb/scala/bson/collections/MutableDocumentSpec.scala b/bson-scala/src/test/scala/org/mongodb/scala/bson/collections/MutableDocumentSpec.scala new file mode 100644 index 00000000000..918b8f4c5f6 --- /dev/null +++ b/bson-scala/src/test/scala/org/mongodb/scala/bson/collections/MutableDocumentSpec.scala @@ -0,0 +1,341 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.bson.collections + +import org.bson.json.JsonParseException +import org.bson.{ BsonArray, BsonDocument, BsonValue } +import org.mongodb.scala.bson.collection.mutable.Document +import org.mongodb.scala.bson.{ BaseSpec, BsonBoolean, BsonString } + +import scala.collection.mutable + +class MutableDocumentSpec extends BaseSpec { + + val emptyDoc: Document = Document.empty + val doc: Document = Document("key" -> "value", "key2" -> "value2", "key3" -> "value3") + val docMap: Map[String, BsonValue] = doc.toMap + + "Document lookups" should "be the same as empty documents" in { + emptyDoc should equal(Document()) + } + + it should "support construction via json" in { + Document("{a: 1, b: true}") should equal(Document("a" -> 1, "b" -> true)) + + intercept[JsonParseException] { + Document("not Json") + } + } + + it should "support get()" in { + doc.get("key") should equal(Some(BsonString("value"))) + doc.get("nonexistent") should equal(None) + } + + it should "support direct lookup" in { + doc("key") should equal(BsonString("value")) + doc[BsonString]("key") should equal(BsonString("value")) + + // When the key doesn't exist + an[NoSuchElementException] should be thrownBy doc("nonexistent") + + // When the key exists but the type doesn't match" + an[NoSuchElementException] should be thrownBy doc[BsonArray]("key") + } + + it should "support getOrElse" in { + doc.getOrElse("key", BsonBoolean(false)) should equal(BsonString("value")) + doc.getOrElse("nonexistent", BsonBoolean(false)) should equal(BsonBoolean(false)) + } + + it should "support contains" in { + doc contains "key" should equal(true) + doc contains "nonexistent" should equal(false) + } + + "Document additions and updates" should "support simple additions" in { + val doc1: Document = emptyDoc + ("key" -> BsonString("value")) + emptyDoc should not be doc1 + doc1 should equal(Document("key" -> BsonString("value"))) + + val doc2: Document = doc1 + ("key2" -> BsonString("value2")) + doc1 should not be doc2 + doc2 should equal(Document("key" -> BsonString("value"), "key2" -> BsonString("value2"))) + } + + it should "support multiple additions" in { + val doc1: Document = emptyDoc + ("key" -> BsonString("value"), "key2" -> BsonString("value2"), + "key3" -> BsonString("value3")) + emptyDoc should not be doc1 + doc1 should equal( + Document("key" -> BsonString("value"), "key2" -> BsonString("value2"), "key3" -> BsonString("value3")) + ) + + val doc2: Document = doc1 + ("key4" -> BsonString("value4")) + doc1 should not be doc2 + doc2 should equal( + Document( + "key" -> BsonString("value"), + "key2" -> BsonString("value2"), + "key3" -> BsonString("value3"), + "key4" -> BsonString("value4") + ) + ) + } + + it should "support addition of a traversable" in { + val doc1: Document = emptyDoc ++ Set("key" -> BsonString("value"), "key2" -> BsonString("value2")) + emptyDoc should not be doc1 + doc1 should equal(Document("key" -> BsonString("value"), "key2" -> BsonString("value2"))) + + val doc2: Document = doc1 ++ List("key3" -> BsonString("value3")) + doc1 should not be doc2 + doc2 should equal( + Document("key" -> BsonString("value"), "key2" -> BsonString("value2"), "key3" -> BsonString("value3")) + ) + } + + it should "support updated" in { + val doc1: Document = emptyDoc updated ("key", BsonString("value")) + emptyDoc should not be doc1 + doc1 should equal(Document("key" -> BsonString("value"))) + + val doc2: Document = doc1 updated ("key2" -> BsonString("value2")) + doc1 should not be doc2 + doc2 should equal(Document("key" -> BsonString("value"), "key2" -> BsonString("value2"))) + } + + "Document removals" should "support subtractions" in { + val doc1: Document = doc - "nonexistent key" + doc1 should equal(doc) + + val doc2: Document = doc - "key" + doc1 should not be doc2 + doc2 should equal(Document("key2" -> BsonString("value2"), "key3" -> BsonString("value3"))) + + } + + it should "support multiple subtractions" in { + val doc1: Document = doc - ("key", "key2") + doc should not be doc1 + doc1 should equal(Document("key3" -> BsonString("value3"))) + + } + + it should "support subtraction of a traversable" in { + val doc1: Document = doc -- Set("key", "key2") + doc should not be doc1 + doc1 should equal(Document("key3" -> BsonString("value3"))) + + val doc2: Document = doc -- List("key3") + doc1 should not be doc2 + doc2 should equal(Document("key" -> BsonString("value"), "key2" -> BsonString("value2"))) + + } + + "Document subcollections" should "provide keys in the order set" in { + doc.keys should equal(Set("key", "key2", "key3")) + + val doc1: Document = doc + ("aNewKey" -> BsonString("1")) + doc1.keys should equal(Set("key", "key2", "key3", "aNewKey")) + } + + it should "provide a keySet in the order set" in { + doc.keySet should equal(Set("key", "key2", "key3")) + + val doc1: Document = doc + ("aNewKey" -> BsonString("1")) + doc1.keySet should equal(Set("key", "key2", "key3", "aNewKey")) + } + + it should "provide a keysIterator in the order set" in { + doc.keysIterator.toSet should equal(Set("key", "key2", "key3")) + + val doc1: Document = doc + ("aNewKey" -> BsonString("1")) + doc1.keysIterator.toSet should equal(Set("key", "key2", "key3", "aNewKey")) + } + + it should "provide values in the order set" in { + doc.values.toSet should equal(Set(BsonString("value"), BsonString("value2"), BsonString("value3"))) + + val doc1: Document = doc + ("aNewKey" -> BsonString("1")) + doc1.values.toSet should equal( + Set(BsonString("value"), BsonString("value2"), BsonString("value3"), BsonString("1")) + ) + } + + it should "provide a valueSet in the order set" in { + doc.valuesIterator.toSet should equal(Set(BsonString("value"), BsonString("value2"), BsonString("value3"))) + + val doc1: Document = doc + ("aNewKey" -> BsonString("1")) + doc1.valuesIterator.toSet should equal( + Set(BsonString("value"), BsonString("value2"), BsonString("value3"), BsonString("1")) + ) + } + + "Document transformations" should "be filterable by keys" in { + val doc1: Document = doc.filterKeys(k => k == "key") + + doc1 should equal(Document("key" -> BsonString("value"))) + } + + "Traversable helpers" should "work as expected" in { + val map = mutable.Map[String, BsonValue]() + doc foreach (kv => map += kv) + + doc.toMap should equal(map) + } + + it should "be able to create new Documents from iterable" in { + val doc1 = Document(docMap) + doc should equal(doc1) + } + + // it should "be mappable thanks to CanBuildFrom" in { + // Document.empty.map({ kv => kv }) should equal(Document.empty) + // val doc1: Document = docMap.map(kv => kv).to(Document) + // + // doc1 should equal(doc) + // } + + it should "return a BsonDocument" in { + val bsonDoc: BsonDocument = doc.toBsonDocument + doc.underlying should equal(bsonDoc) + } + + it should "return a Json representation" in { + doc.toJson() should equal("""{"key": "value", "key2": "value2", "key3": "value3"}""") + } + + "Documents" should "support Traversable like builders" in { + val doc1 = doc.filter(kv => kv._1 == "key") + + doc1 should not equal (doc) + doc1 should equal(Document("key" -> BsonString("value"))) + } + + "Mutable Documents" should "have maplike mutability" in { + val doc1 = Document.empty + doc1 += (("x", BsonString("x"))) + + doc1 should equal(Document("x" -> BsonString("x"))) + } + + it should "support multiple inline additions" in { + val doc1: Document = Document.empty += ("key" -> BsonString("value"), "key2" -> BsonString("value2")) + doc1 should equal(Document("key" -> BsonString("value"), "key2" -> BsonString("value2"))) + + val doc2: Document = doc1 += ("key3" -> BsonString("value3")) + doc1 should equal(doc2) + doc2 should equal( + Document("key" -> BsonString("value"), "key2" -> BsonString("value2"), "key3" -> BsonString("value3")) + ) + } + + it should "support inline addition of a traversable" in { + val doc1: Document = Document.empty ++= Set("key" -> BsonString("value"), "key2" -> BsonString("value2")) + doc1 should equal(Document("key" -> BsonString("value"), "key2" -> BsonString("value2"))) + + val doc2: Document = doc1 ++= List("key3" -> BsonString("value3")) + doc1 should equal(doc2) + doc2 should equal( + Document("key" -> BsonString("value"), "key2" -> BsonString("value2"), "key3" -> BsonString("value3")) + ) + } + + it should "support put" in { + val doc1: Document = Document.empty + doc1.put("key", BsonString("value")) shouldBe None + doc1 should equal(Document("key" -> BsonString("value"))) + + doc1.put("key", BsonString("newValue")) shouldBe Some(BsonString("value")) + doc1 should equal(Document("key" -> BsonString("newValue"))) + } + + it should "support getOrElseUpdate" in { + val doc1: Document = Document.empty + doc1.getOrElseUpdate("key", BsonString("value")) shouldBe BsonString("value") + doc1 should equal(Document("key" -> BsonString("value"))) + + doc1.getOrElseUpdate("key", BsonString("newValue")) shouldBe BsonString("value") + doc1 should equal(Document("key" -> BsonString("value"))) + } + + it should "support inline update" in { + val doc1: Document = Document.empty + doc1 update ("key", BsonString("value")) + doc1 should equal(Document("key" -> BsonString("value"))) + + doc1 update ("key2", BsonString("value2")) + doc1 should equal(Document("key" -> BsonString("value"), "key2" -> BsonString("value2"))) + } + + "Document removals" should "support inline subtractions" in { + val doc1: Document = doc.copy() -= "nonexistent key" + doc1 should equal(doc) + + val doc2: Document = doc1 -= "key" + doc1 should not be equal(doc2) + doc2 should equal(Document("key2" -> BsonString("value2"), "key3" -> BsonString("value3"))) + } + + it should "support multiple inline subtractions" in { + val doc1: Document = doc.copy() -= ("key", "key2") + doc should not be doc1 + doc1 should equal(Document("key3" -> BsonString("value3"))) + } + + it should "support inline subtraction of a traversable" in { + val doc1: Document = doc.copy() --= Set("key", "key2") + doc should not be doc1 + doc1 should equal(Document("key3" -> BsonString("value3"))) + + val doc2: Document = doc1 --= List("key3") + doc1 should equal(doc2) + doc2 should equal(Document()) + } + + it should "support remove" in { + val doc1: Document = Document("key" -> BsonString("value")) + + doc1.remove("key") shouldBe Some(BsonString("value")) + doc1 should equal(Document()) + + doc1.remove("noKey") shouldBe None + doc1 should equal(Document()) + } + + it should "support retain" in { + val doc1: Document = Document("key" -> BsonString("value"), "key2" -> BsonString("value2")) + + doc1.retain((k, v) => k == "key") + doc1 should equal(Document("key" -> BsonString("value"))) + } + + it should "support clear" in { + val doc1: Document = Document("key" -> BsonString("value"), "key2" -> BsonString("value2")) + + doc1.clear() + doc1 should equal(Document()) + } + + it should "support transform" in { + val doc1: Document = Document("key" -> BsonString("value"), "key2" -> BsonString("value2")) + + doc1.transform((k, v) => BsonString(v.asString().getValue.toUpperCase)) + doc1 should equal(Document("key" -> BsonString("VALUE"), "key2" -> BsonString("VALUE2"))) + } +} diff --git a/bson/build.gradle b/bson/build.gradle deleted file mode 100644 index a61dd12aa70..00000000000 --- a/bson/build.gradle +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -apply plugin: 'osgi' -apply plugin: 'java' -apply plugin: 'org.kordamp.gradle.clirr' - -def configDir = new File(rootDir, 'config') -archivesBaseName = 'bson' - -clirr { - excludeFilter = new File("$configDir/clirr-exclude.yml") - baseline 'org.mongodb:bson:3.4.0' - failOnErrors = true -} - -jar { - manifest { - instruction 'Automatic-Module-Name', 'org.mongodb.bson' - instruction 'Build-Version', getGitVersion() - instruction 'Import-Package', - 'javax.xml.bind.*', - 'org.slf4j;resolution:=optional' - } -} - -modifyPom { - project { - name 'BSON' - description 'The BSON library' - url 'http://bsonspec.org' - } -} diff --git a/bson/build.gradle.kts b/bson/build.gradle.kts new file mode 100644 index 00000000000..fab3cdaacb5 --- /dev/null +++ b/bson/build.gradle.kts @@ -0,0 +1,39 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import ProjectExtensions.configureJarManifest +import ProjectExtensions.configureMavenPublication + +plugins { + id("project.java") + id("conventions.testing-junit") + id("conventions.testing-spock") + id("conventions.test-artifacts") +} + +base.archivesName.set("bson") + +configureMavenPublication { + pom { + name.set("BSON") + description.set("The BSON library") + url.set("https://bsonspec.org") + } +} + +configureJarManifest { + attributes["Automatic-Module-Name"] = "org.mongodb.bson" + attributes["Import-Package"] = "org.slf4j.*;resolution:=optional" +} diff --git a/bson/src/main/org/bson/AbstractBsonReader.java b/bson/src/main/org/bson/AbstractBsonReader.java index a7195344324..88c5fda5153 100644 --- a/bson/src/main/org/bson/AbstractBsonReader.java +++ b/bson/src/main/org/bson/AbstractBsonReader.java @@ -20,7 +20,6 @@ import org.bson.types.ObjectId; import static java.lang.String.format; -import static java.util.Arrays.asList; /** * Abstract base class for BsonReader implementations. @@ -181,14 +180,14 @@ protected boolean isClosed() { protected abstract Decimal128 doReadDecimal128(); /** - * Handles the logic to read Javascript functions + * Handles the logic to read JavaScript functions * * @return the String value */ protected abstract String doReadJavaScript(); /** - * Handles the logic to read scoped Javascript functions + * Handles the logic to read scoped JavaScript functions * * @return the String value */ @@ -322,7 +321,7 @@ public double readDouble() { @Override public void readEndArray() { if (isClosed()) { - throw new IllegalStateException("BSONBinaryWriter"); + throw new IllegalStateException("BsonReader is closed"); } if (getContext().getContextType() != BsonContextType.ARRAY) { throwInvalidContextType("readEndArray", getContext().getContextType(), BsonContextType.ARRAY); @@ -342,7 +341,7 @@ public void readEndArray() { @Override public void readEndDocument() { if (isClosed()) { - throw new IllegalStateException("BSONBinaryWriter"); + throw new IllegalStateException("BsonReader is closed"); } if (getContext().getContextType() != BsonContextType.DOCUMENT && getContext().getContextType() != BsonContextType.SCOPE_DOCUMENT) { throwInvalidContextType("readEndDocument", @@ -649,7 +648,7 @@ public void readUndefined(final String name) { */ protected void throwInvalidContextType(final String methodName, final BsonContextType actualContextType, final BsonContextType... validContextTypes) { - String validContextTypesString = StringUtils.join(" or ", asList(validContextTypes)); + String validContextTypesString = StringUtils.join(" or ", validContextTypes); String message = format("%s can only be called when ContextType is %s, not when ContextType is %s.", methodName, validContextTypesString, actualContextType); throw new BsonInvalidOperationException(message); @@ -663,7 +662,7 @@ protected void throwInvalidContextType(final String methodName, final BsonContex * @throws BsonInvalidOperationException when the method called is not valid for the current state. */ protected void throwInvalidState(final String methodName, final State... validStates) { - String validStatesString = StringUtils.join(" or ", asList(validStates)); + String validStatesString = StringUtils.join(" or ", validStates); String message = format("%s can only be called when State is %s, not when State is %s.", methodName, validStatesString, state); throw new BsonInvalidOperationException(message); @@ -716,7 +715,7 @@ protected void verifyName(final String expectedName) { */ protected void checkPreconditions(final String methodName, final BsonType type) { if (isClosed()) { - throw new IllegalStateException("BsonWriter is closed"); + throw new IllegalStateException("BsonReader is closed"); } verifyBSONType(methodName, type); @@ -772,6 +771,9 @@ private void setStateOnEnd() { } } + /** + * An implementation of {@code BsonReaderMark}. + */ protected class Mark implements BsonReaderMark { private final State state; private final Context parentContext; @@ -779,14 +781,27 @@ protected class Mark implements BsonReaderMark { private final BsonType currentBsonType; private final String currentName; + /** + * Gets the parent context. + * + * @return the parent context + */ protected Context getParentContext() { return parentContext; } + /** + * Gets the context type. + * + * @return the context type + */ protected BsonContextType getContextType() { return contextType; } + /** + * Construct an instance. + */ protected Mark() { state = AbstractBsonReader.this.state; parentContext = AbstractBsonReader.this.context.parentContext; @@ -795,6 +810,7 @@ protected Mark() { currentName = AbstractBsonReader.this.currentName; } + @Override public void reset() { AbstractBsonReader.this.state = state; AbstractBsonReader.this.currentBsonType = currentBsonType; diff --git a/bson/src/main/org/bson/AbstractBsonWriter.java b/bson/src/main/org/bson/AbstractBsonWriter.java index 5dc229d7307..9d571862af0 100644 --- a/bson/src/main/org/bson/AbstractBsonWriter.java +++ b/bson/src/main/org/bson/AbstractBsonWriter.java @@ -20,10 +20,11 @@ import org.bson.types.ObjectId; import java.io.Closeable; +import java.util.ArrayDeque; import java.util.Arrays; +import java.util.Deque; import java.util.List; import java.util.Map; -import java.util.Stack; import static java.lang.String.format; import static org.bson.assertions.Assertions.notNull; @@ -35,7 +36,7 @@ */ public abstract class AbstractBsonWriter implements BsonWriter, Closeable { private final BsonWriterSettings settings; - private final Stack fieldNameValidatorStack = new Stack(); + private final Deque fieldNameValidatorStack = new ArrayDeque<>(); private State state; private Context context; private int serializationDepth; @@ -47,7 +48,7 @@ public abstract class AbstractBsonWriter implements BsonWriter, Closeable { * @param settings The writer settings. */ protected AbstractBsonWriter(final BsonWriterSettings settings) { - this(settings, new NoOpFieldNameValidator()); + this(settings, NoOpFieldNameValidator.INSTANCE); } /** @@ -276,7 +277,9 @@ public void writeStartDocument(final String name) { public void writeStartDocument() { checkPreconditions("writeStartDocument", State.INITIAL, State.VALUE, State.SCOPE_DOCUMENT, State.DONE); if (context != null && context.name != null) { - fieldNameValidatorStack.push(fieldNameValidatorStack.peek().getValidatorForField(getName())); + FieldNameValidator validator = fieldNameValidatorStack.peek().getValidatorForField(getName()); + fieldNameValidatorStack.push(validator); + validator.start(); } serializationDepth++; if (serializationDepth > settings.getMaxSerializationDepth()) { @@ -298,7 +301,7 @@ public void writeEndDocument() { } if (context.getParentContext() != null && context.getParentContext().name != null) { - fieldNameValidatorStack.pop(); + fieldNameValidatorStack.pop().end(); } serializationDepth--; @@ -528,8 +531,9 @@ public void writeName(final String name) { if (state != State.NAME) { throwInvalidState("WriteName", State.NAME); } - if (!fieldNameValidatorStack.peek().validate(name)) { - throw new IllegalArgumentException(format("Invalid BSON field name %s", name)); + FieldNameValidator fieldNameValidator = fieldNameValidatorStack.peek(); + if (!fieldNameValidator.validate(name)) { + throw new IllegalArgumentException(fieldNameValidator.getValidationErrorMessage(name)); } doWriteName(name); context.name = name; @@ -710,7 +714,7 @@ protected void checkPreconditions(final String methodName, final State... validS */ protected void throwInvalidContextType(final String methodName, final BsonContextType actualContextType, final BsonContextType... validContextTypes) { - String validContextTypesString = StringUtils.join(" or ", Arrays.asList(validContextTypes)); + String validContextTypesString = StringUtils.join(" or ", validContextTypes); throw new BsonInvalidOperationException(format("%s can only be called when ContextType is %s, " + "not when ContextType is %s.", methodName, validContextTypesString, actualContextType)); @@ -740,11 +744,20 @@ protected void throwInvalidState(final String methodName, final State... validSt } } - String validStatesString = StringUtils.join(" or ", Arrays.asList(validStates)); + String validStatesString = StringUtils.join(" or ", validStates); throw new BsonInvalidOperationException(format("%s can only be called when State is %s, not when State is %s", methodName, validStatesString, state)); } + /** + * {@inheritDoc} + *

+ * The {@link #flush()} method of {@link AbstractBsonWriter} does nothing.

+ */ + @Override + public void flush() { + } + @Override public void close() { closed = true; diff --git a/bson/src/main/org/bson/BSON.java b/bson/src/main/org/bson/BSON.java index 1630a8f82d6..2496bbc2348 100644 --- a/bson/src/main/org/bson/BSON.java +++ b/bson/src/main/org/bson/BSON.java @@ -16,10 +16,6 @@ package org.bson; -import org.bson.util.ClassMap; - -import java.util.List; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.regex.Pattern; /** @@ -28,42 +24,11 @@ * * @see org.bson.Transformer */ -public class BSON { - - public static final byte EOO = 0; - public static final byte NUMBER = 1; - public static final byte STRING = 2; - public static final byte OBJECT = 3; - public static final byte ARRAY = 4; - public static final byte BINARY = 5; - public static final byte UNDEFINED = 6; - public static final byte OID = 7; - public static final byte BOOLEAN = 8; - public static final byte DATE = 9; - public static final byte NULL = 10; - public static final byte REGEX = 11; - public static final byte REF = 12; - public static final byte CODE = 13; - public static final byte SYMBOL = 14; - public static final byte CODE_W_SCOPE = 15; - public static final byte NUMBER_INT = 16; - public static final byte TIMESTAMP = 17; - public static final byte NUMBER_LONG = 18; +class BSON { - public static final byte MINKEY = -1; - public static final byte MAXKEY = 127; - // --- binary types - /* - these are binary types - so the format would look like - <...> - */ - - public static final byte B_GENERAL = 0; - public static final byte B_FUNC = 1; - public static final byte B_BINARY = 2; - public static final byte B_UUID = 3; + static final byte B_GENERAL = 0; + static final byte B_BINARY = 2; // --- regex flags @@ -83,209 +48,6 @@ public class BSON { FLAG_LOOKUP['u'] = Pattern.UNICODE_CASE; } - private static volatile boolean encodeHooks = false; - private static volatile boolean decodeHooks = false; - private static final ClassMap> encodingHooks = new ClassMap>(); - private static final ClassMap> decodingHooks = new ClassMap>(); - - /** - * Gets whether any encoding transformers have been registered for any classes. - * - * @return true if any encoding hooks have been registered. - */ - public static boolean hasEncodeHooks() { - return encodeHooks; - } - - /** - * Gets whether any decoding transformers have been registered for any classes. - * - * @return true if any decoding hooks have been registered. - */ - public static boolean hasDecodeHooks() { - return decodeHooks; - } - - /** - * Registers a {@code Transformer} to use to encode a specific class into BSON. - * - * @param clazz the class to be transformed during encoding - * @param transformer the transformer to use during encoding - */ - public static void addEncodingHook(final Class clazz, final Transformer transformer) { - encodeHooks = true; - List transformersForClass = encodingHooks.get(clazz); - if (transformersForClass == null) { - transformersForClass = new CopyOnWriteArrayList(); - encodingHooks.put(clazz, transformersForClass); - } - transformersForClass.add(transformer); - } - - /** - * Registers a {@code Transformer} to use when decoding a specific class from BSON. This class will be one of the basic types supported - * by BSON. - * - * @param clazz the class to be transformed during decoding - * @param transformer the transformer to use during decoding - */ - public static void addDecodingHook(final Class clazz, final Transformer transformer) { - decodeHooks = true; - List transformersForClass = decodingHooks.get(clazz); - if (transformersForClass == null) { - transformersForClass = new CopyOnWriteArrayList(); - decodingHooks.put(clazz, transformersForClass); - } - transformersForClass.add(transformer); - } - - /** - * Transforms the {@code objectToEncode} using all transformers registered for the class of this object. - * - * @param objectToEncode the object being written to BSON. - * @return the transformed object - */ - public static Object applyEncodingHooks(final Object objectToEncode) { - Object transformedObject = objectToEncode; - if (!hasEncodeHooks() || objectToEncode == null || encodingHooks.size() == 0) { - return transformedObject; - } - List transformersForObject = encodingHooks.get(objectToEncode.getClass()); - if (transformersForObject != null) { - for (final Transformer transformer : transformersForObject) { - transformedObject = transformer.transform(objectToEncode); - } - } - return transformedObject; - } - - /** - * Transforms the {@code objectToDecode} using all transformers registered for the class of this object. - * - * @param objectToDecode the BSON object to decode - * @return the transformed object - */ - public static Object applyDecodingHooks(final Object objectToDecode) { - Object transformedObject = objectToDecode; - if (!hasDecodeHooks() || objectToDecode == null || decodingHooks.size() == 0) { - return transformedObject; - } - - List transformersForObject = decodingHooks.get(objectToDecode.getClass()); - if (transformersForObject != null) { - for (final Transformer transformer : transformersForObject) { - transformedObject = transformer.transform(objectToDecode); - } - } - return transformedObject; - } - - /** - * Returns the encoding hook(s) associated with the specified class. - * - * @param clazz the class to fetch the encoding hooks for - * @return a List of encoding transformers that apply to the given class - */ - public static List getEncodingHooks(final Class clazz) { - return encodingHooks.get(clazz); - } - - /** - * Clears all encoding hooks. - */ - public static void clearEncodingHooks() { - encodeHooks = false; - encodingHooks.clear(); - } - - /** - * Remove all encoding hooks for a specific class. - * - * @param clazz the class to remove all the decoding hooks for - */ - public static void removeEncodingHooks(final Class clazz) { - encodingHooks.remove(clazz); - } - - /** - * Remove a specific encoding hook for a specific class. The {@code transformer} passed as the parameter must be {@code equals} to the - * transformer to remove. - * - * @param clazz the class to remove the encoding hook for - * @param transformer the specific encoding hook to remove. - */ - public static void removeEncodingHook(final Class clazz, final Transformer transformer) { - getEncodingHooks(clazz).remove(transformer); - } - - /** - * Returns the decoding hook(s) associated with the specific class - * - * @param clazz the class to fetch the decoding hooks for - * @return a List of all the decoding Transformers that apply to the given class - */ - public static List getDecodingHooks(final Class clazz) { - return decodingHooks.get(clazz); - } - - /** - * Clears all decoding hooks. - */ - public static void clearDecodingHooks() { - decodeHooks = false; - decodingHooks.clear(); - } - - /** - * Remove all decoding hooks for a specific class. - * - * @param clazz the class to remove all the decoding hooks for - */ - public static void removeDecodingHooks(final Class clazz) { - decodingHooks.remove(clazz); - } - - /** - * Remove a specific encoding hook for a specific class. The {@code transformer} passed as the parameter must be {@code equals} to the - * transformer to remove. - * - * @param clazz the class to remove the decoding hook for - * @param transformer the specific decoding hook to remove. - */ - public static void removeDecodingHook(final Class clazz, final Transformer transformer) { - getDecodingHooks(clazz).remove(transformer); - } - - /** - * Remove all decoding and encoding hooks for all classes. - */ - public static void clearAllHooks() { - clearEncodingHooks(); - clearDecodingHooks(); - } - - // ----- static encode/decode ----- - - /** - * Encodes a DBObject as a BSON byte array. - * - * @param doc the document to encode - * @return the document encoded as BSON - */ - public static byte[] encode(final BSONObject doc) { - return new BasicBSONEncoder().encode(doc); - } - - /** - * Decodes a BSON byte array into a DBObject instance. - * - * @param bytes a document encoded as BSON - * @return the document as a DBObject - */ - public static BSONObject decode(final byte[] bytes) { - return new BasicBSONDecoder().readObject(bytes); - } - /** * Converts a sequence of regular expression modifiers from the database into Java regular expression flags. * @@ -293,7 +55,7 @@ public static BSONObject decode(final byte[] bytes) { * @return the Java flags * @throws IllegalArgumentException If sequence contains invalid flags. */ - public static int regexFlags(final String s) { + static int regexFlags(final String s) { int flags = 0; if (s == null) { @@ -314,7 +76,7 @@ public static int regexFlags(final String s) { * @return the Java flags * @throws IllegalArgumentException If sequence contains invalid flags. */ - public static int regexFlag(final char c) { + private static int regexFlag(final char c) { int flag = FLAG_LOOKUP[c]; @@ -332,7 +94,7 @@ public static int regexFlag(final char c) { * @return the Java flags * @throws IllegalArgumentException if some flags couldn't be recognized. */ - public static String regexFlags(final int flags) { + static String regexFlags(final int flags) { int processedFlags = flags; StringBuilder buf = new StringBuilder(); @@ -349,28 +111,4 @@ public static String regexFlags(final int flags) { return buf.toString(); } - - /** - * Provides an integer representation of Boolean or Number. If argument is {@link Boolean}, then {@code 1} for {@code true} will be - * returned or @{code 0} otherwise. If argument is {@code Number}, then {@link Number#intValue()} will be called. - * - * @param number the number to convert to an int - * @return integer value - * @throws IllegalArgumentException if the argument is {@code null} or not {@link Boolean} or {@link Number} - */ - public static int toInt(final Object number) { - if (number == null) { - throw new IllegalArgumentException("Argument shouldn't be null"); - } - - if (number instanceof Number) { - return ((Number) number).intValue(); - } - - if (number instanceof Boolean) { - return ((Boolean) number) ? 1 : 0; - } - - throw new IllegalArgumentException("Can't convert: " + number.getClass().getName() + " to int"); - } } diff --git a/bson/src/main/org/bson/BSONCallback.java b/bson/src/main/org/bson/BSONCallback.java index 0173d5538e2..007c34265a7 100644 --- a/bson/src/main/org/bson/BSONCallback.java +++ b/bson/src/main/org/bson/BSONCallback.java @@ -22,7 +22,7 @@ /** * A callback interface for describing the structure of a BSON document. Implementations of this define how to turn BSON read from MongoDB * into Java objects. - * + *

* See the BSON Spec. */ public interface BSONCallback { @@ -223,16 +223,6 @@ public interface BSONCallback { */ void gotDBRef(String name, String namespace, ObjectId id); - /** - * This method is not used. - * - * @param name the name of the field - * @param data the field's value - * @deprecated - */ - @Deprecated - void gotBinaryArray(String name, byte[] data); - /** * Called when reading a field with a {@link org.bson.BsonType#BINARY} value. Note that binary values have a subtype, which may * determine how the value is processed. diff --git a/bson/src/main/org/bson/BSONCallbackAdapter.java b/bson/src/main/org/bson/BSONCallbackAdapter.java index cc6d192a3db..1d8b5ffe746 100644 --- a/bson/src/main/org/bson/BSONCallbackAdapter.java +++ b/bson/src/main/org/bson/BSONCallbackAdapter.java @@ -16,10 +16,13 @@ package org.bson; +import org.bson.internal.UuidHelper; import org.bson.types.Decimal128; import org.bson.types.ObjectId; -import static org.bson.io.Bits.readLong; +import java.util.UUID; + +import static org.bson.BasicBSONDecoder.getDefaultUuidRepresentation; class BSONCallbackAdapter extends AbstractBsonWriter { @@ -36,11 +39,6 @@ protected BSONCallbackAdapter(final BsonWriterSettings settings, final BSONCallb this.bsonCallback = bsonCallback; } - @Override - public void flush() { - //Looks like should be no-op? - } - @Override public void doWriteStartDocument() { BsonContextType contextType = getState() == State.SCOPE_DOCUMENT @@ -83,10 +81,18 @@ protected void doWriteEndArray() { @Override protected void doWriteBinaryData(final BsonBinary value) { - if (value.getType() == BsonBinarySubType.UUID_LEGACY.getValue()) { - bsonCallback.gotUUID(getName(), - readLong(value.getData(), 0), - readLong(value.getData(), 8)); + if (BsonBinarySubType.isUuid(value.getType())) { + doWriteUuid(value); + } else { + bsonCallback.gotBinary(getName(), value.getType(), value.getData()); + } + } + + private void doWriteUuid(final BsonBinary value) { + UuidRepresentation defaultUuidRepresentation = getDefaultUuidRepresentation(); + if (value.getType() == defaultUuidRepresentation.getSubtype().getValue()) { + UUID uuid = UuidHelper.decodeBinaryToUuid(value.getData(), value.getType(), defaultUuidRepresentation); + bsonCallback.gotUUID(getName(), uuid.getMostSignificantBits(), uuid.getLeastSignificantBits()); } else { bsonCallback.gotBinary(getName(), value.getType(), value.getData()); } diff --git a/bson/src/main/org/bson/BSONException.java b/bson/src/main/org/bson/BSONException.java index dd445268a03..6b53a6c8bd0 100644 --- a/bson/src/main/org/bson/BSONException.java +++ b/bson/src/main/org/bson/BSONException.java @@ -18,6 +18,7 @@ /** * A general runtime exception raised in BSON processing. + * @serial exclude */ public class BSONException extends RuntimeException { diff --git a/bson/src/main/org/bson/BSONObject.java b/bson/src/main/org/bson/BSONObject.java index 277cbe4e000..55863736793 100644 --- a/bson/src/main/org/bson/BSONObject.java +++ b/bson/src/main/org/bson/BSONObject.java @@ -30,8 +30,8 @@ public interface BSONObject { * * @param key Name to set * @param v Corresponding value - * @return the previous value associated with key, or null if there was no mapping for key. (A null - * return can also indicate that the map previously associated null with key.) + * @return the previous value associated with {@code key}, or {@code null} if there was no mapping for {@code key}. (A + * {@code null} return can also indicate that the map previously associated {@code null} with {@code key}.) */ Object put(String key, Object v); @@ -72,16 +72,6 @@ public interface BSONObject { */ Object removeField(String key); - /** - * Deprecated - * - * @param key the key to check - * @return True if the key is present - * @deprecated Please use {@link #containsField(String)} instead - */ - @Deprecated - boolean containsKey(String key); - /** * Checks if this object contains a field with the given name. * diff --git a/bson/src/main/org/bson/BasicBSONCallback.java b/bson/src/main/org/bson/BasicBSONCallback.java index a05f31fe661..9990749a66f 100644 --- a/bson/src/main/org/bson/BasicBSONCallback.java +++ b/bson/src/main/org/bson/BasicBSONCallback.java @@ -47,8 +47,8 @@ public class BasicBSONCallback implements BSONCallback { * Creates a new instance. */ public BasicBSONCallback() { - stack = new LinkedList(); - nameStack = new LinkedList(); + stack = new LinkedList<>(); + nameStack = new LinkedList<>(); reset(); } @@ -117,7 +117,7 @@ public Object objectDone() { throw new IllegalStateException("Illegal object end in current context."); } - return !BSON.hasDecodeHooks() ? o : (BSONObject) BSON.applyDecodingHooks(o); + return o; } @Override @@ -218,12 +218,6 @@ public void gotDBRef(final String name, final String namespace, final ObjectId i _put(name, new BasicBSONObject("$ns", namespace).append("$id", id)); } - @Deprecated - @Override - public void gotBinaryArray(final String name, final byte[] data) { - gotBinary(name, BSON.B_GENERAL, data); - } - @Override public void gotBinary(final String name, final byte type, final byte[] data) { if (type == BSON.B_GENERAL || type == BSON.B_BINARY) { @@ -255,7 +249,7 @@ public void gotCodeWScope(final String name, final String code, final Object sco * @param value the value */ protected void _put(final String name, final Object value) { - cur().put(name, !BSON.hasDecodeHooks() ? value : BSON.applyDecodingHooks(value)); + cur().put(name, value); } /** diff --git a/bson/src/main/org/bson/BasicBSONDecoder.java b/bson/src/main/org/bson/BasicBSONDecoder.java index 7233aaf2396..35c44ea6033 100644 --- a/bson/src/main/org/bson/BasicBSONDecoder.java +++ b/bson/src/main/org/bson/BasicBSONDecoder.java @@ -16,18 +16,61 @@ package org.bson; -import org.bson.io.Bits; import org.bson.io.ByteBufferBsonInput; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; +import static org.bson.assertions.Assertions.notNull; + /** * Basic implementation of BSONDecoder interface that creates BasicBSONObject instances */ public class BasicBSONDecoder implements BSONDecoder { + /** + * Sets the global (JVM-wide) {@link UuidRepresentation} to use when decoding BSON binary values with subtypes of either + * {@link BsonBinarySubType#UUID_STANDARD} or {@link BsonBinarySubType#UUID_LEGACY}. + * + *

+ * If the {@link BsonBinarySubType} of the value to be decoded matches the binary subtype of the {@link UuidRepresentation}, + * then the value will be decoded to an instance of {@link java.util.UUID}, according to the semantics of the + * {@link UuidRepresentation}. Otherwise, it will be decoded to an instance of {@link org.bson.types.Binary}. + *

+ * + *

+ * Defaults to {@link UuidRepresentation#JAVA_LEGACY}. If set to {@link UuidRepresentation#UNSPECIFIED}, attempting to decode any + * UUID will throw a {@link BSONException}. + *

+ * + * @param uuidRepresentation the uuid representation, which may not be null + * @see BSONCallback#gotUUID(String, long, long) + * @see BasicBSONEncoder#setDefaultUuidRepresentation(UuidRepresentation) + * @since 4.7 + */ + public static void setDefaultUuidRepresentation(final UuidRepresentation uuidRepresentation) { + defaultUuidRepresentation = notNull("uuidRepresentation", uuidRepresentation); + } + + /** + * Gets the default {@link UuidRepresentation} to use when decoding BSON binary values. + * + *

+ * If unset, the default is {@link UuidRepresentation#JAVA_LEGACY}. + *

+ * + * @return the uuid representation, which may not be null + * @see BSONCallback#gotUUID(String, long, long) + * @see BasicBSONEncoder#setDefaultUuidRepresentation(UuidRepresentation) + * @since 4.7 + */ + public static UuidRepresentation getDefaultUuidRepresentation() { + return defaultUuidRepresentation; + } + + private static volatile UuidRepresentation defaultUuidRepresentation = UuidRepresentation.JAVA_LEGACY; + @Override public BSONObject readObject(final byte[] bytes) { BSONCallback bsonCallback = new BasicBSONCallback(); @@ -42,13 +85,10 @@ public BSONObject readObject(final InputStream in) throws IOException { @Override public int decode(final byte[] bytes, final BSONCallback callback) { - BsonBinaryReader reader = new BsonBinaryReader(new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap(bytes)))); - try { + try (BsonBinaryReader reader = new BsonBinaryReader(new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap(bytes))))) { BsonWriter writer = new BSONCallbackAdapter(new BsonWriterSettings(), callback); writer.pipe(reader); return reader.getBsonInput().getPosition(); //TODO check this. - } finally { - reader.close(); } } diff --git a/bson/src/main/org/bson/BasicBSONEncoder.java b/bson/src/main/org/bson/BasicBSONEncoder.java index a74ace47d30..d7a90afe480 100644 --- a/bson/src/main/org/bson/BasicBSONEncoder.java +++ b/bson/src/main/org/bson/BasicBSONEncoder.java @@ -16,6 +16,7 @@ package org.bson; +import org.bson.internal.UuidHelper; import org.bson.io.BasicOutputBuffer; import org.bson.io.OutputBuffer; import org.bson.types.BSONTimestamp; @@ -37,13 +38,48 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.regex.Pattern; -import static org.bson.BSON.regexFlags; +import static org.bson.assertions.Assertions.notNull; /** * This is meant to be pooled or cached. There is some per instance memory for string conversion, etc... */ public class BasicBSONEncoder implements BSONEncoder { + /** + * Sets the global (JVM-wide) {@link UuidRepresentation} to use when encoding UUID values to BSON binary. + * + *

+ * Defaults to {@link UuidRepresentation#JAVA_LEGACY}. If set to {@link UuidRepresentation#UNSPECIFIED}, attempting to encode any + * UUID will throw a {@link BSONException}. + *

+ * + * @param uuidRepresentation the uuid representation, which may not be null + * @see #putUUID(String, UUID) + * @see BasicBSONDecoder#setDefaultUuidRepresentation(UuidRepresentation) + * @since 4.7 + */ + public static void setDefaultUuidRepresentation(final UuidRepresentation uuidRepresentation) { + defaultUuidRepresentation = notNull("uuidRepresentation", uuidRepresentation); + } + + /** + * Sets the default {@link UuidRepresentation} to use when encoding UUID values to BSON binary. + * + *

+ * If unset, the default is {@link UuidRepresentation#JAVA_LEGACY}. + *

+ * + * @return the uuid representation, which may not be null + * @see #putUUID(String, UUID) + * @see BasicBSONDecoder#setDefaultUuidRepresentation(UuidRepresentation) + * @since 4.7 + */ + public static UuidRepresentation getDefaultUuidRepresentation() { + return defaultUuidRepresentation; + } + + private static volatile UuidRepresentation defaultUuidRepresentation = UuidRepresentation.JAVA_LEGACY; + private BsonBinaryWriter bsonWriter; private OutputBuffer outputBuffer; @@ -133,9 +169,9 @@ protected void putName(final String name) { * Encodes any Object type * * @param name the field name - * @param initialValue the value to write + * @param value the value to write */ - protected void _putObjectField(final String name, final Object initialValue) { + protected void _putObjectField(final String name, final Object value) { if ("_transientFields".equals(name)) { return; } @@ -143,20 +179,18 @@ protected void _putObjectField(final String name, final Object initialValue) { throw new IllegalArgumentException("Document field names can't have a NULL character. (Bad Key: '" + name + "')"); } - if ("$where".equals(name) && initialValue instanceof String) { - putCode(name, new Code((String) initialValue)); + if ("$where".equals(name) && value instanceof String) { + putCode(name, new Code((String) value)); } - Object value = BSON.applyEncodingHooks(initialValue); - if (value == null) { putNull(name); } else if (value instanceof Date) { putDate(name, (Date) value); - } else if (value instanceof Number) { - putNumber(name, (Number) value); } else if (value instanceof Decimal128) { putDecimal128(name, (Decimal128) value); + } else if (value instanceof Number) { + putNumber(name, (Number) value); } else if (value instanceof Character) { putString(name, value.toString()); } else if (value instanceof String) { @@ -346,10 +380,11 @@ protected void putBinary(final String name, final Binary binary) { */ protected void putUUID(final String name, final UUID uuid) { putName(name); - byte[] bytes = new byte[16]; - writeLongToArrayLittleEndian(bytes, 0, uuid.getMostSignificantBits()); - writeLongToArrayLittleEndian(bytes, 8, uuid.getLeastSignificantBits()); - bsonWriter.writeBinaryData(new BsonBinary(BsonBinarySubType.UUID_LEGACY, bytes)); + UuidRepresentation uuidRepresentation = defaultUuidRepresentation; + byte[] bytes = UuidHelper.encodeUuidToBinary(uuid, uuidRepresentation); + bsonWriter.writeBinaryData(new BsonBinary( + uuidRepresentation == UuidRepresentation.STANDARD ? BsonBinarySubType.UUID_STANDARD : BsonBinarySubType.UUID_LEGACY, + bytes)); } /** @@ -386,7 +421,7 @@ protected void putString(final String name, final String value) { */ protected void putPattern(final String name, final Pattern value) { putName(name); - bsonWriter.writeRegularExpression(new BsonRegularExpression(value.pattern(), regexFlags(value.flags()))); + bsonWriter.writeRegularExpression(new BsonRegularExpression(value.pattern(), org.bson.BSON.regexFlags(value.flags()))); } /** diff --git a/bson/src/main/org/bson/BasicBSONObject.java b/bson/src/main/org/bson/BasicBSONObject.java index 2a354fc636c..d247bce7d77 100644 --- a/bson/src/main/org/bson/BasicBSONObject.java +++ b/bson/src/main/org/bson/BasicBSONObject.java @@ -78,7 +78,7 @@ public BasicBSONObject(final Map map) { * @return the DBObject */ public Map toMap() { - return new LinkedHashMap(this); + return new LinkedHashMap<>(this); } /** @@ -98,13 +98,7 @@ public Object removeField(final String key) { * @return if the field exists */ public boolean containsField(final String field) { - return super.containsKey(field); - } - - @Deprecated - @Override - public boolean containsKey(final String key) { - return containsField(key); + return containsKey(field); } /** @@ -388,8 +382,8 @@ private static Object canonicalize(final Object from) { } private static Map canonicalizeMap(final Map from) { - Map canonicalized = new LinkedHashMap(from.size()); - TreeSet keysInOrder = new TreeSet(from.keySet()); + Map canonicalized = new LinkedHashMap<>(from.size()); + TreeSet keysInOrder = new TreeSet<>(from.keySet()); for (String key : keysInOrder) { Object val = from.get(key); canonicalized.put(key, canonicalize(val)); @@ -399,7 +393,7 @@ private static Map canonicalizeMap(final Map fro private static BasicBSONObject canonicalizeBSONObject(final BSONObject from) { BasicBSONObject canonicalized = new BasicBSONObject(); - TreeSet keysInOrder = new TreeSet(from.keySet()); + TreeSet keysInOrder = new TreeSet<>(from.keySet()); for (String key : keysInOrder) { Object val = from.get(key); canonicalized.put(key, canonicalize(val)); @@ -408,7 +402,7 @@ private static BasicBSONObject canonicalizeBSONObject(final BSONObject from) { } private static List canonicalizeList(final List list) { - List canonicalized = new ArrayList(list.size()); + List canonicalized = new ArrayList<>(list.size()); for (Object cur : list) { canonicalized.add(canonicalize(cur)); } diff --git a/bson/src/main/org/bson/BinaryVector.java b/bson/src/main/org/bson/BinaryVector.java new file mode 100644 index 00000000000..273b4a0e5e9 --- /dev/null +++ b/bson/src/main/org/bson/BinaryVector.java @@ -0,0 +1,201 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson; + +import org.bson.annotations.Beta; +import org.bson.annotations.Reason; + +import static org.bson.assertions.Assertions.isTrueArgument; +import static org.bson.assertions.Assertions.notNull; + +/** + * Binary Vectors are densely packed arrays of numbers, all the same type, which are stored and retrieved efficiently using the BSON Binary + * Subtype 9 format. This class supports multiple vector {@link DataType}'s and provides static methods to create vectors. + *

+ * NOTE: This class should be treated as sealed: it must not be extended or implemented by consumers of the library. + * + * @mongodb.server.release 6.0 + * @see BsonBinary + * @since 5.3 + */ +public abstract class BinaryVector { + private final DataType dataType; + + BinaryVector(final DataType dataType) { + this.dataType = dataType; + } + + /** + * Creates a vector with the {@link DataType#PACKED_BIT} data type. + *

+ * A {@link DataType#PACKED_BIT} vector is a binary quantized vector where each element of a vector is represented by a single bit (0 or 1). Each byte + * can hold up to 8 bits (vector elements). The padding parameter is used to specify how many least-significant bits in the final byte + * should be ignored.

+ * + *

For example, a vector with two bytes and a padding of 4 would have the following structure:

+ *
+     * Byte 1: 238 (binary: 11101110)
+     * Byte 2: 224 (binary: 11100000)
+     * Padding: 4 (ignore the last 4 bits in Byte 2)
+     * Resulting vector: 12 bits: 111011101110
+     * 
+ *

+ * NOTE: The byte array `data` is not copied; changes to the provided array will be reflected + * in the created {@link PackedBitBinaryVector} instance. + * + * @param data The byte array representing the packed bit vector data. Each byte can store 8 bits. + * @param padding The number of least-significant bits (0 to 7) to ignore in the final byte of the vector data. + * @return A {@link PackedBitBinaryVector} instance with the {@link DataType#PACKED_BIT} data type. + * @throws IllegalArgumentException If the padding value is greater than 7. + */ + @Beta(Reason.SERVER) + public static PackedBitBinaryVector packedBitVector(final byte[] data, final byte padding) { + notNull("data", data); + isTrueArgument("Padding must be between 0 and 7 bits. Provided padding: " + padding, padding >= 0 && padding <= 7); + isTrueArgument("Padding must be 0 if vector is empty. Provided padding: " + padding, padding == 0 || data.length > 0); + return new PackedBitBinaryVector(data, padding); + } + + /** + * Creates a vector with the {@link DataType#INT8} data type. + * + *

A {@link DataType#INT8} vector is a vector of 8-bit signed integers where each byte in the vector represents an element of a vector, + * with values in the range [-128, 127].

+ *

+ * NOTE: The byte array `data` is not copied; changes to the provided array will be reflected + * in the created {@link Int8BinaryVector} instance. + * + * @param data The byte array representing the {@link DataType#INT8} vector data. + * @return A {@link Int8BinaryVector} instance with the {@link DataType#INT8} data type. + */ + public static Int8BinaryVector int8Vector(final byte[] data) { + notNull("data", data); + return new Int8BinaryVector(data); + } + + /** + * Creates a vector with the {@link DataType#FLOAT32} data type. + *

+ * A {@link DataType#FLOAT32} vector is a vector of floating-point numbers, where each element in the vector is a float.

+ *

+ * NOTE: The float array `data` is not copied; changes to the provided array will be reflected + * in the created {@link Float32BinaryVector} instance. + * + * @param data The float array representing the {@link DataType#FLOAT32} vector data. + * @return A {@link Float32BinaryVector} instance with the {@link DataType#FLOAT32} data type. + */ + public static Float32BinaryVector floatVector(final float[] data) { + notNull("data", data); + return new Float32BinaryVector(data); + } + + /** + * Returns the {@link PackedBitBinaryVector}. + * + * @return {@link PackedBitBinaryVector}. + * @throws IllegalStateException if this vector is not of type {@link DataType#PACKED_BIT}. Use {@link #getDataType()} to check the vector + * type before calling this method. + */ + public PackedBitBinaryVector asPackedBitVector() { + ensureType(DataType.PACKED_BIT); + return (PackedBitBinaryVector) this; + } + + /** + * Returns the {@link Int8BinaryVector}. + * + * @return {@link Int8BinaryVector}. + * @throws IllegalStateException if this vector is not of type {@link DataType#INT8}. Use {@link #getDataType()} to check the vector + * type before calling this method. + */ + public Int8BinaryVector asInt8Vector() { + ensureType(DataType.INT8); + return (Int8BinaryVector) this; + } + + /** + * Returns the {@link Float32BinaryVector}. + * + * @return {@link Float32BinaryVector}. + * @throws IllegalStateException if this vector is not of type {@link DataType#FLOAT32}. Use {@link #getDataType()} to check the vector + * type before calling this method. + */ + public Float32BinaryVector asFloat32Vector() { + ensureType(DataType.FLOAT32); + return (Float32BinaryVector) this; + } + + /** + * Returns {@link DataType} of the vector. + * + * @return the data type of the vector. + */ + public DataType getDataType() { + return this.dataType; + } + + + private void ensureType(final DataType expected) { + if (this.dataType != expected) { + throw new IllegalStateException("Expected vector data type " + expected + ", but found " + this.dataType); + } + } + + /** + * Represents the data type (dtype) of a vector. + *

+ * Each dtype determines how the data in the vector is stored, including how many bits are used to represent each element + * in the vector. + * + * @mongodb.server.release 6.0 + * @since 5.3 + */ + public enum DataType { + /** + * An INT8 vector is a vector of 8-bit signed integers. The vector is stored as an array of bytes, where each byte + * represents a signed integer in the range [-128, 127]. + */ + INT8((byte) 0x03), + /** + * A FLOAT32 vector is a vector of 32-bit floating-point numbers, where each element in the vector is a float. + */ + FLOAT32((byte) 0x27), + /** + * A PACKED_BIT vector is a binary quantized vector where each element of a vector is represented by a single bit (0 or 1). + * Each byte can hold up to 8 bits (vector elements). + */ + PACKED_BIT((byte) 0x10); + + private final byte value; + + DataType(final byte value) { + this.value = value; + } + + /** + * Returns the byte value associated with this {@link DataType}. + * + *

This value is used in the BSON binary format to indicate the data type of the vector.

+ * + * @return the byte value representing the {@link DataType}. + */ + public byte getValue() { + return value; + } + } +} + diff --git a/bson/src/main/org/bson/Bits.java b/bson/src/main/org/bson/Bits.java new file mode 100644 index 00000000000..55c79222fc9 --- /dev/null +++ b/bson/src/main/org/bson/Bits.java @@ -0,0 +1,163 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson; + +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; + +/** + * Utility class for reading values from an input stream. + */ +class Bits { + + /** + * Reads bytes from the input stream and puts them into the given byte buffer. The equivalent of calling + * {@link #readFully(java.io.InputStream, byte[], int, int)} with an offset of zero and a length equal to the length of the buffer. + * + * @param inputStream the input stream to read from + * @param buffer the buffer into which the data is read. + * @throws IOException if there's an error reading from the {@code inputStream} + */ + static void readFully(final InputStream inputStream, final byte[] buffer) + throws IOException { + readFully(inputStream, buffer, 0, buffer.length); + } + + /** + * Reads bytes from the input stream and puts them into the given byte buffer. + * + * @param inputStream the input stream to read from + * @param buffer the buffer into which the data is read. + * @param offset the start offset in array {@code buffer} at which the data is written. + * @param length the maximum number of bytes to read. + * @throws IOException if there's an error reading from the {@code inputStream} + * @see java.io.InputStream#read(byte[], int, int) + */ + static void readFully(final InputStream inputStream, final byte[] buffer, final int offset, final int length) + throws IOException { + if (buffer.length < length + offset) { + throw new IllegalArgumentException("Buffer is too small"); + } + + int arrayOffset = offset; + int bytesToRead = length; + while (bytesToRead > 0) { + int bytesRead = inputStream.read(buffer, arrayOffset, bytesToRead); + if (bytesRead < 0) { + throw new EOFException(); + } + bytesToRead -= bytesRead; + arrayOffset += bytesRead; + } + } + + /** + * Reads and returns a single integer value from the input stream. + * + * @param inputStream the input stream to read from + * @param buffer the buffer to write the input stream bytes into + * @return the integer value + * @throws IOException if there's an error reading from the {@code inputStream} + */ + static int readInt(final InputStream inputStream, final byte[] buffer) throws IOException { + readFully(inputStream, buffer, 0, 4); + return readInt(buffer); + } + + /** + * Reads and returns a single integer value from the buffer. The equivalent of calling {@link #readInt(byte[], int)} + * with an offset of zero. + * + * @param buffer the buffer to read from + * @return the integer value + */ + static int readInt(final byte[] buffer) { + return readInt(buffer, 0); + } + + /** + * Reads and returns a single integer value from the buffer. + * + * @param buffer the buffer to read from + * @param offset the position to start reading from the buffer + * @return the integer value + */ + static int readInt(final byte[] buffer, final int offset) { + int x = 0; + x |= (0xFF & buffer[offset]) << 0; + x |= (0xFF & buffer[offset + 1]) << 8; + x |= (0xFF & buffer[offset + 2]) << 16; + x |= (0xFF & buffer[offset + 3]) << 24; + return x; + } + + /** + * Reads and returns a single long value from the input stream. + * + * @param inputStream the input stream to read from + * @return the long value + * @throws IOException if there's an error reading from the {@code inputStream} + */ + static long readLong(final InputStream inputStream) throws IOException { + return readLong(inputStream, new byte[8]); + } + + /** + * Reads and returns a single long value from the input stream. + * + * @param inputStream the input stream to read from + * @param buffer the buffer to write the input stream bytes into + * @return the long value + * @throws IOException if there's an error reading from the {@code inputStream} + */ + static long readLong(final InputStream inputStream, final byte[] buffer) throws IOException { + readFully(inputStream, buffer, 0, 8); + return readLong(buffer); + } + + /** + * Reads and returns a single long value from the buffer. The equivalent of called {@link #readLong(byte[], int)} with an offset of + * zero. + * + * @param buffer the buffer to read from + * @return the long value + */ + static long readLong(final byte[] buffer) { + return readLong(buffer, 0); + } + + /** + * Reads and returns a single long value from the buffer. + * + * @param buffer the buffer to read from + * @param offset the position to start reading from the buffer + * @return the long value + */ + static long readLong(final byte[] buffer, final int offset) { + long x = 0; + x |= (0xFFL & buffer[offset]) << 0; + x |= (0xFFL & buffer[offset + 1]) << 8; + x |= (0xFFL & buffer[offset + 2]) << 16; + x |= (0xFFL & buffer[offset + 3]) << 24; + x |= (0xFFL & buffer[offset + 4]) << 32; + x |= (0xFFL & buffer[offset + 5]) << 40; + x |= (0xFFL & buffer[offset + 6]) << 48; + x |= (0xFFL & buffer[offset + 7]) << 56; + return x; + } +} diff --git a/bson/src/main/org/bson/BsonArray.java b/bson/src/main/org/bson/BsonArray.java index f914e05567b..876858b01b0 100644 --- a/bson/src/main/org/bson/BsonArray.java +++ b/bson/src/main/org/bson/BsonArray.java @@ -49,13 +49,25 @@ public BsonArray(final List values) { * Construct an empty BsonArray */ public BsonArray() { - this(new ArrayList(), false); + this(new ArrayList<>(), false); + } + + /** + * Construct an empty BsonArray with the specified initial capacity. + * + * @param initialCapacity the initial capacity of the BsonArray + * @throws IllegalArgumentException if the specified initial capacity + * is negative + * @since 4.3 + */ + public BsonArray(final int initialCapacity) { + this(new ArrayList<>(initialCapacity), false); } @SuppressWarnings("unchecked") BsonArray(final List values, final boolean copy) { if (copy) { - this.values = new ArrayList(values); + this.values = new ArrayList<>(values); } else { this.values = (List) values; } @@ -225,13 +237,13 @@ public int hashCode() { @Override public String toString() { return "BsonArray{" - + "values=" + values + + "values=" + getValues() + '}'; } @Override public BsonArray clone() { - BsonArray to = new BsonArray(); + BsonArray to = new BsonArray(this.size()); for (BsonValue cur : this) { switch (cur.getBsonType()) { case DOCUMENT: diff --git a/bson/src/main/org/bson/BsonBinary.java b/bson/src/main/org/bson/BsonBinary.java index eb4fabca8c2..833a1b5ad29 100644 --- a/bson/src/main/org/bson/BsonBinary.java +++ b/bson/src/main/org/bson/BsonBinary.java @@ -16,7 +16,14 @@ package org.bson; +import org.bson.assertions.Assertions; +import org.bson.internal.UuidHelper; +import org.bson.internal.vector.BinaryVectorHelper; + import java.util.Arrays; +import java.util.UUID; + +import static org.bson.internal.vector.BinaryVectorHelper.encodeVectorToBinary; /** * A representation of the BSON Binary type. Note that for performance reasons instances of this class are not immutable, @@ -75,6 +82,104 @@ public BsonBinary(final byte type, final byte[] data) { this.data = data; } + /** + * Construct a Type 4 BsonBinary from the given UUID. + * + * @param uuid the UUID + * @since 3.9 + */ + public BsonBinary(final UUID uuid) { + this(uuid, UuidRepresentation.STANDARD); + } + + /** + * Constructs a {@linkplain BsonBinarySubType#VECTOR subtype 9} {@link BsonBinary} from the given {@link BinaryVector}. + * + * @param vector the {@link BinaryVector} + * @since 5.3 + */ + public BsonBinary(final BinaryVector vector) { + if (vector == null) { + throw new IllegalArgumentException("Vector must not be null"); + } + this.data = encodeVectorToBinary(vector); + type = BsonBinarySubType.VECTOR.getValue(); + } + + /** + * Construct a new instance from the given UUID and UuidRepresentation + * + * @param uuid the UUID + * @param uuidRepresentation the UUID representation + * @since 3.9 + */ + public BsonBinary(final UUID uuid, final UuidRepresentation uuidRepresentation) { + if (uuid == null) { + throw new IllegalArgumentException("uuid may not be null"); + } + if (uuidRepresentation == null) { + throw new IllegalArgumentException("uuidRepresentation may not be null"); + } + this.data = UuidHelper.encodeUuidToBinary(uuid, uuidRepresentation); + this.type = uuidRepresentation == UuidRepresentation.STANDARD + ? BsonBinarySubType.UUID_STANDARD.getValue() + : BsonBinarySubType.UUID_LEGACY.getValue(); + } + + /** + * Returns the binary as a UUID. The binary type must be 4. + * + * @return the uuid + * @since 3.9 + */ + public UUID asUuid() { + if (!BsonBinarySubType.isUuid(type)) { + throw new BsonInvalidOperationException("type must be a UUID subtype."); + } + + if (type != BsonBinarySubType.UUID_STANDARD.getValue()) { + throw new BsonInvalidOperationException("uuidRepresentation must be set to return the correct UUID."); + } + + return UuidHelper.decodeBinaryToUuid(this.data.clone(), this.type, UuidRepresentation.STANDARD); + } + + /** + * Returns the binary as a {@link BinaryVector}. The {@linkplain #getType() subtype} must be {@linkplain BsonBinarySubType#VECTOR 9}. + * + * @return the vector + * @throws BsonInvalidOperationException if the binary subtype is not {@link BsonBinarySubType#VECTOR}. + * @since 5.3 + */ + public BinaryVector asVector() { + if (type != BsonBinarySubType.VECTOR.getValue()) { + throw new BsonInvalidOperationException("type must be a Vector subtype."); + } + + return BinaryVectorHelper.decodeBinaryToVector(this.data); + } + + /** + * Returns the binary as a UUID. + * + * @param uuidRepresentation the UUID representation + * @return the uuid + * @since 3.9 + */ + public UUID asUuid(final UuidRepresentation uuidRepresentation) { + Assertions.notNull("uuidRepresentation", uuidRepresentation); + + byte uuidType = uuidRepresentation == UuidRepresentation.STANDARD + ? BsonBinarySubType.UUID_STANDARD.getValue() + : BsonBinarySubType.UUID_LEGACY.getValue(); + + if (type != uuidType) { + throw new BsonInvalidOperationException("uuidRepresentation does not match current uuidRepresentation."); + } + + return UuidHelper.decodeBinaryToUuid(data.clone(), type, uuidRepresentation); + } + @Override public BsonType getBsonType() { return BsonType.BINARY; @@ -91,6 +196,9 @@ public byte getType() { /** * Gets the data of this Binary. + *

+ * This method returns the internal copy of the byte array, so only modify the contents of the returned array if the intention is to + * change the state of this instance. * * @return the data */ @@ -121,7 +229,7 @@ public boolean equals(final Object o) { @Override public int hashCode() { - int result = (int) type; + int result = type; result = 31 * result + Arrays.hashCode(data); return result; } diff --git a/bson/src/main/org/bson/BsonBinaryReader.java b/bson/src/main/org/bson/BsonBinaryReader.java index ce1f5c5930f..5fff43beefe 100644 --- a/bson/src/main/org/bson/BsonBinaryReader.java +++ b/bson/src/main/org/bson/BsonBinaryReader.java @@ -35,7 +35,6 @@ public class BsonBinaryReader extends AbstractBsonReader { private final BsonInput bsonInput; - private Mark mark; /** * Construct an instance. @@ -150,18 +149,18 @@ protected BsonBinary doReadBinaryData() { @Override protected byte doPeekBinarySubType() { - mark(); + Mark mark = new Mark(); readSize(); byte type = bsonInput.readByte(); - reset(); + mark.reset(); return type; } @Override protected int doPeekBinarySize() { - mark(); + Mark mark = new Mark(); int size = readSize(); - reset(); + mark.reset(); return size; } @@ -390,47 +389,39 @@ protected Context getContext() { return (Context) super.getContext(); } - @Deprecated - @Override - public void mark() { - if (mark != null) { - throw new BSONException("A mark already exists; it needs to be reset before creating a new one"); - } - mark = new Mark(); - } - @Override public BsonReaderMark getMark() { return new Mark(); } - @Override - public void reset() { - if (mark == null) { - throw new BSONException("trying to reset a mark before creating it"); - } - mark.reset(); - mark = null; - } - + /** + * An implementation of {@code AbstractBsonReader.Mark}. + */ protected class Mark extends AbstractBsonReader.Mark { private final int startPosition; private final int size; private final BsonInputMark bsonInputMark; + /** + * Construct an instance. + */ protected Mark() { - super(); startPosition = BsonBinaryReader.this.getContext().startPosition; size = BsonBinaryReader.this.getContext().size; bsonInputMark = BsonBinaryReader.this.bsonInput.getMark(Integer.MAX_VALUE); } + @Override public void reset() { super.reset(); bsonInputMark.reset(); BsonBinaryReader.this.setContext(new Context((Context) getParentContext(), getContextType(), startPosition, size)); } } + + /** + * An implementation of {@code AbstractBsonReader.Context}. + */ protected class Context extends AbstractBsonReader.Context { private final int startPosition; private final int size; diff --git a/bson/src/main/org/bson/BsonBinarySubType.java b/bson/src/main/org/bson/BsonBinarySubType.java index b43b9eb519a..08c29e2ef09 100644 --- a/bson/src/main/org/bson/BsonBinarySubType.java +++ b/bson/src/main/org/bson/BsonBinarySubType.java @@ -17,7 +17,7 @@ package org.bson; /** - * The Binary subtype + * The Binary subtype. * * @since 3.0 */ @@ -52,6 +52,36 @@ public enum BsonBinarySubType { */ MD5((byte) 0x05), + /** + * Encrypted data. + * + * @since 4.4 + */ + ENCRYPTED((byte) 0x06), + + /** + * Columnar data. + * + * @since 4.4 + */ + COLUMN((byte) 0x07), + + /** + * Sensitive data (e.g., HMAC keys) that should be excluded from server-side logging. + * + * @since 5.3 + */ + SENSITIVE((byte) 0x08), + + /** + * Vector data. + * + * @mongodb.server.release 6.0 + * @since 5.3 + * @see BinaryVector + */ + VECTOR((byte) 0x09), + /** * User defined binary data. */ @@ -60,10 +90,10 @@ public enum BsonBinarySubType { private final byte value; /** - * Returns true if the given value is a UUID subtype + * Returns true if the given value is a UUID subtype. * - * @param value the subtype value as a byte - * @return true if value is a UUID subtype + * @param value the subtype value as a byte. + * @return true if value is a UUID subtype. * @since 3.4 */ public static boolean isUuid(final byte value) { diff --git a/bson/src/main/org/bson/BsonBinaryWriter.java b/bson/src/main/org/bson/BsonBinaryWriter.java index 2cce2090694..20e73d97d44 100644 --- a/bson/src/main/org/bson/BsonBinaryWriter.java +++ b/bson/src/main/org/bson/BsonBinaryWriter.java @@ -21,9 +21,11 @@ import org.bson.types.Decimal128; import org.bson.types.ObjectId; +import java.util.ArrayDeque; +import java.util.Deque; import java.util.List; -import java.util.Stack; +import static java.lang.Math.max; import static java.lang.String.format; import static org.bson.assertions.Assertions.notNull; @@ -36,9 +38,38 @@ public class BsonBinaryWriter extends AbstractBsonWriter { private final BsonBinaryWriterSettings binaryWriterSettings; private final BsonOutput bsonOutput; - private final Stack maxDocumentSizeStack = new Stack(); + private final Deque maxDocumentSizeStack = new ArrayDeque<>(); + private static final int ARRAY_INDEXES_CACHE_SIZE = 1000; + private static final byte[] ARRAY_INDEXES_BUFFER; + private static final int[] ARRAY_INDEXES_OFFSETS; + private static final int[] ARRAY_INDEXES_LENGTHS; private Mark mark; + static { + ARRAY_INDEXES_LENGTHS = new int[ARRAY_INDEXES_CACHE_SIZE]; + ARRAY_INDEXES_OFFSETS = new int[ARRAY_INDEXES_CACHE_SIZE]; + int totalSize = 0; + for (int i = 0; i < ARRAY_INDEXES_CACHE_SIZE; i++) { + totalSize += (int) (Math.log10(max(i, 1)) + + 1 // number of digits + + 1); // +1 for null terminator + } + ARRAY_INDEXES_BUFFER = new byte[totalSize]; + + // Fill buffer + int offset = 0; + for (int i = 0; i < ARRAY_INDEXES_CACHE_SIZE; i++) { + String string = Integer.toString(i); + int length = string.length(); + for (int j = 0; j < length; j++) { + ARRAY_INDEXES_BUFFER[offset++] = (byte) string.charAt(j); + } + ARRAY_INDEXES_BUFFER[offset++] = 0; + ARRAY_INDEXES_OFFSETS[i] = offset - (length + 1); + ARRAY_INDEXES_LENGTHS[i] = length + 1; // +1 for null terminator + } + } + /** * Construct an instance. * @@ -67,7 +98,7 @@ public BsonBinaryWriter(final BsonOutput bsonOutput) { */ public BsonBinaryWriter(final BsonWriterSettings settings, final BsonBinaryWriterSettings binaryWriterSettings, final BsonOutput bsonOutput) { - this(settings, binaryWriterSettings, bsonOutput, new NoOpFieldNameValidator()); + this(settings, binaryWriterSettings, bsonOutput, NoOpFieldNameValidator.INSTANCE); } /** @@ -108,10 +139,6 @@ public BsonBinaryWriterSettings getBinaryWriterSettings() { return binaryWriterSettings; } - @Override - public void flush() { - } - @Override protected Context getContext() { return (Context) super.getContext(); @@ -263,7 +290,7 @@ public void doWriteNull() { public void doWriteObjectId(final ObjectId value) { bsonOutput.writeByte(BsonType.OBJECT_ID.getValue()); writeCurrentName(); - bsonOutput.writeBytes(value.toByteArray()); + bsonOutput.writeObjectId(value); } @Override @@ -401,7 +428,14 @@ public void reset() { private void writeCurrentName() { if (getContext().getContextType() == BsonContextType.ARRAY) { - bsonOutput.writeCString(Integer.toString(getContext().index++)); + int index = getContext().index++; + if (index >= ARRAY_INDEXES_CACHE_SIZE) { + bsonOutput.writeCString(Integer.toString(index)); + } else { + bsonOutput.writeBytes(ARRAY_INDEXES_BUFFER, + ARRAY_INDEXES_OFFSETS[index], + ARRAY_INDEXES_LENGTHS[index]); + } } else { bsonOutput.writeCString(getName()); } @@ -420,6 +454,9 @@ private void validateSize(final int size) { } } + /** + * An implementation of {@code AbstractBsonWriter.Context}. + */ protected class Context extends AbstractBsonWriter.Context { private final int startPosition; private int index; // used when contextType is an array @@ -458,6 +495,9 @@ public Context copy() { } } + /** + * An implementation of {@code AbstractBsonWriter.Mark}. + */ protected class Mark extends AbstractBsonWriter.Mark { private final int position; diff --git a/bson/src/main/org/bson/BsonBoolean.java b/bson/src/main/org/bson/BsonBoolean.java index fc95ad2baf7..f8af1cd6df7 100644 --- a/bson/src/main/org/bson/BsonBoolean.java +++ b/bson/src/main/org/bson/BsonBoolean.java @@ -25,8 +25,14 @@ public final class BsonBoolean extends BsonValue implements Comparable, Cloneable, Bson, Serializable { private static final long serialVersionUID = 1L; - private final Map map = new LinkedHashMap(); + /** + * The underlying map. + */ + private final Map map; /** * Parses a string in MongoDB Extended JSON format to a {@code BsonDocument} @@ -68,6 +72,7 @@ public static BsonDocument parse(final String json) { * @param bsonElements a list of {@code BsonElement} */ public BsonDocument(final List bsonElements) { + this(bsonElements.size()); for (BsonElement cur : bsonElements) { put(cur.getName(), cur.getValue()); } @@ -80,13 +85,26 @@ public BsonDocument(final List bsonElements) { * @param value the value */ public BsonDocument(final String key, final BsonValue value) { + this(); put(key, value); } + /** + * Construct an empty document with the specified initial capacity. + * + * @param initialCapacity the initial capacity + * @throws IllegalArgumentException if the initial capacity is negative + * @since 4.3 + */ + public BsonDocument(final int initialCapacity) { + map = new LinkedHashMap<>(initialCapacity); + } + /** * Construct an empty document. */ public BsonDocument() { + map = new LinkedHashMap<>(); } @Override @@ -720,10 +738,6 @@ public BsonValue put(final String key, final BsonValue value) { if (value == null) { throw new IllegalArgumentException(format("The value for key %s can not be null", key)); } - if (key.contains("\0")) { - throw new BSONException(format("BSON cstring '%s' is not valid because it contains a null character at index %d", key, - key.indexOf('\0'))); - } return map.put(key, value); } @@ -813,16 +827,15 @@ public int hashCode() { } /** - * Gets a JSON representation of this document using the {@link org.bson.json.JsonMode#STRICT} output mode, and otherwise the default + * Gets a JSON representation of this document using the {@link org.bson.json.JsonMode#RELAXED} output mode, and otherwise the default * settings of {@link JsonWriterSettings.Builder}. * * @return a JSON representation of this document * @see #toJson(JsonWriterSettings) * @see JsonWriterSettings */ - @SuppressWarnings("deprecation") public String toJson() { - return toJson(new JsonWriterSettings()); + return toJson(JsonWriterSettings.builder().outputMode(JsonMode.RELAXED).build()); } /** @@ -843,7 +856,7 @@ public String toString() { @Override public BsonDocument clone() { - BsonDocument to = new BsonDocument(); + BsonDocument to = new BsonDocument(this.size()); for (Entry cur : entrySet()) { switch (cur.getValue().getBsonType()) { case DOCUMENT: @@ -871,12 +884,29 @@ private void throwIfKeyAbsent(final Object key) { } } - // see https://docs.oracle.com/javase/6/docs/platform/serialization/spec/output.html + /** + * Write the replacement object. + * + *

+ * See https://docs.oracle.com/javase/6/docs/platform/serialization/spec/output.html + *

+ * + * @return a proxy for the document + */ private Object writeReplace() { return new SerializationProxy(this); } - // see https://docs.oracle.com/javase/6/docs/platform/serialization/spec/input.html + /** + * Prevent normal deserialization. + * + *

+ * See https://docs.oracle.com/javase/6/docs/platform/serialization/spec/input.html + *

+ * + * @param stream the stream + * @throws InvalidObjectException in all cases + */ private void readObject(final ObjectInputStream stream) throws InvalidObjectException { throw new InvalidObjectException("Proxy required"); } diff --git a/bson/src/main/org/bson/BsonDocumentReader.java b/bson/src/main/org/bson/BsonDocumentReader.java index 4218f646c20..0aaca06eaef 100644 --- a/bson/src/main/org/bson/BsonDocumentReader.java +++ b/bson/src/main/org/bson/BsonDocumentReader.java @@ -35,7 +35,6 @@ */ public class BsonDocumentReader extends AbstractBsonReader { private BsonValue currentValue; - private Mark mark; /** * Construct a new instance. @@ -43,7 +42,6 @@ public class BsonDocumentReader extends AbstractBsonReader { * @param document the document to read from */ public BsonDocumentReader(final BsonDocument document) { - super(); setContext(new Context(null, BsonContextType.TOP_LEVEL, document)); currentValue = document; } @@ -235,44 +233,33 @@ public BsonType readBsonType() { return getCurrentBsonType(); } - @Deprecated - @Override - public void mark() { - if (mark != null) { - throw new BSONException("A mark already exists; it needs to be reset before creating a new one"); - } - mark = new Mark(); - } - @Override public BsonReaderMark getMark() { return new Mark(); } - @Override - public void reset() { - if (mark == null) { - throw new BSONException("trying to reset a mark before creating it"); - } - mark.reset(); - mark = null; - } - @Override protected Context getContext() { return (Context) super.getContext(); } + + /** + * An implementation of {@code AbstractBsonReader.Mark}. + */ protected class Mark extends AbstractBsonReader.Mark { private final BsonValue currentValue; private final Context context; + /** + * Construct an instance. + */ protected Mark() { - super(); currentValue = BsonDocumentReader.this.currentValue; context = BsonDocumentReader.this.getContext(); context.mark(); } + @Override public void reset() { super.reset(); BsonDocumentReader.this.currentValue = currentValue; @@ -283,8 +270,8 @@ public void reset() { private static class BsonDocumentMarkableIterator implements Iterator { - private Iterator baseIterator; - private List markIterator = new ArrayList(); + private final Iterator baseIterator; + private final List markIterator = new ArrayList<>(); private int curIndex; // index of the cursor private boolean marking; @@ -344,21 +331,43 @@ public void remove() { } } + /** + * An implementation of {@code AbstractBsonReader.Context}. + */ protected class Context extends AbstractBsonReader.Context { private BsonDocumentMarkableIterator> documentIterator; private BsonDocumentMarkableIterator arrayIterator; + /** + * Construct an instance. + * + * @param parentContext the parent context + * @param contextType the context type + * @param array the array context + */ protected Context(final Context parentContext, final BsonContextType contextType, final BsonArray array) { super(parentContext, contextType); - arrayIterator = new BsonDocumentMarkableIterator(array.iterator()); + arrayIterator = new BsonDocumentMarkableIterator<>(array.iterator()); } + /** + * Construct an instance. + * + * @param parentContext the parent context + * @param contextType the context type + * @param document the document context + */ protected Context(final Context parentContext, final BsonContextType contextType, final BsonDocument document) { super(parentContext, contextType); - documentIterator = new BsonDocumentMarkableIterator>(document.entrySet().iterator()); + documentIterator = new BsonDocumentMarkableIterator<>(document.entrySet().iterator()); } + /** + * Gets the next element. + * + * @return the next element, which may be null + */ public Map.Entry getNextElement() { if (documentIterator.hasNext()) { return documentIterator.next(); @@ -366,6 +375,10 @@ public Map.Entry getNextElement() { return null; } } + + /** + * Create a mark. + */ protected void mark() { if (documentIterator != null) { documentIterator.mark(); @@ -378,6 +391,9 @@ protected void mark() { } } + /** + * Reset the context. + */ protected void reset() { if (documentIterator != null) { documentIterator.reset(); @@ -390,6 +406,11 @@ protected void reset() { } } + /** + * Gets the next value. + * + * @return the next value, which may be null + */ public BsonValue getNextValue() { if (arrayIterator.hasNext()) { return arrayIterator.next(); diff --git a/bson/src/main/org/bson/BsonDocumentWrapper.java b/bson/src/main/org/bson/BsonDocumentWrapper.java index c90f3b5a931..f846d40e1c3 100644 --- a/bson/src/main/org/bson/BsonDocumentWrapper.java +++ b/bson/src/main/org/bson/BsonDocumentWrapper.java @@ -41,10 +41,14 @@ public final class BsonDocumentWrapper extends BsonDocument { private final transient T wrappedDocument; private final transient Encoder encoder; + + /** + * The unwrapped document, which may be null + */ private BsonDocument unwrapped; /** - * A helper to convert an document of type Object to a BsonDocument + * A helper to convert a document of type Object to a BsonDocument * *

If not already a BsonDocument it looks up the documents' class in the codecRegistry and wraps it into a BsonDocumentWrapper

* @@ -198,12 +202,29 @@ private BsonDocument getUnwrapped() { return unwrapped; } - // see https://docs.oracle.com/javase/6/docs/platform/serialization/spec/output.html + /** + * Write the replacement object. + * + *

+ * See https://docs.oracle.com/javase/6/docs/platform/serialization/spec/output.html + *

+ * + * @return a proxy for the document + */ private Object writeReplace() { return getUnwrapped(); } - // see https://docs.oracle.com/javase/6/docs/platform/serialization/spec/input.html + /** + * Prevent normal deserialization. + * + *

+ * See https://docs.oracle.com/javase/6/docs/platform/serialization/spec/input.html + *

+ * + * @param stream the stream + * @throws InvalidObjectException in all cases + */ private void readObject(final ObjectInputStream stream) throws InvalidObjectException { throw new InvalidObjectException("Proxy required"); } diff --git a/bson/src/main/org/bson/BsonDocumentWriter.java b/bson/src/main/org/bson/BsonDocumentWriter.java index 7c36a368336..a34188645cd 100644 --- a/bson/src/main/org/bson/BsonDocumentWriter.java +++ b/bson/src/main/org/bson/BsonDocumentWriter.java @@ -194,10 +194,6 @@ public void doWriteUndefined() { write(new BsonUndefined()); } - @Override - public void flush() { - } - @Override protected Context getContext() { return (Context) super.getContext(); diff --git a/bson/src/main/org/bson/BsonJavaScript.java b/bson/src/main/org/bson/BsonJavaScript.java index 506107e0f9d..2546bb3c9e4 100644 --- a/bson/src/main/org/bson/BsonJavaScript.java +++ b/bson/src/main/org/bson/BsonJavaScript.java @@ -28,7 +28,7 @@ public class BsonJavaScript extends BsonValue { /** * Construct a new instance with the given JavaScript code. * - * @param code the Javascript code + * @param code the JavaScript code */ public BsonJavaScript(final String code) { this.code = code; @@ -40,7 +40,7 @@ public BsonType getBsonType() { } /** - * Get the Javascript code. + * Get the JavaScript code. * * @return the code */ diff --git a/bson/src/main/org/bson/BsonNull.java b/bson/src/main/org/bson/BsonNull.java index 3ab61bc9cd2..5d6c701a465 100644 --- a/bson/src/main/org/bson/BsonNull.java +++ b/bson/src/main/org/bson/BsonNull.java @@ -23,6 +23,9 @@ */ public final class BsonNull extends BsonValue { + /** + * A singleton instance of the null value. + */ public static final BsonNull VALUE = new BsonNull(); @Override diff --git a/bson/src/main/org/bson/BsonNumber.java b/bson/src/main/org/bson/BsonNumber.java index 68f375ec25d..c0449e754ed 100644 --- a/bson/src/main/org/bson/BsonNumber.java +++ b/bson/src/main/org/bson/BsonNumber.java @@ -19,7 +19,7 @@ import org.bson.types.Decimal128; /** - * Base class for the three numeric BSON types. This class mirrors the functionality provided by {@code java.lang.Number}. + * Base class for the numeric BSON types. This class mirrors the functionality provided by {@code java.lang.Number}. * * @since 3.0 */ diff --git a/bson/src/main/org/bson/BsonReader.java b/bson/src/main/org/bson/BsonReader.java index 79c0a09e1f5..89251df5e42 100644 --- a/bson/src/main/org/bson/BsonReader.java +++ b/bson/src/main/org/bson/BsonReader.java @@ -50,7 +50,7 @@ public interface BsonReader extends Closeable { * Peeks the subtype of the binary data that the reader is positioned at. This operation is not permitted if the mark is already set. * * @return the subtype - * @see #mark() + * @see #getMark() */ byte peekBinarySubType(); @@ -58,7 +58,7 @@ public interface BsonReader extends Closeable { * Peeks the size of the binary data that the reader is positioned at. This operation is not permitted if the mark is already set. * * @return the size of the binary data - * @see #mark() + * @see #getMark() * @since 3.4 */ int peekBinarySize(); @@ -382,15 +382,6 @@ public interface BsonReader extends Closeable { */ void skipValue(); - /** - * Creates a bookmark in the BsonReader's input - * - * The previous mark must be cleared before creating a new one - * @deprecated Use {@link #getMark()} instead - */ - @Deprecated - void mark(); - /** * Gets a mark representing the current state of the reader. * @@ -399,13 +390,6 @@ public interface BsonReader extends Closeable { */ BsonReaderMark getMark(); - /** - * Go back to the state at the last mark and removes the mark - * - * @throws org.bson.BSONException if no mark has been set - */ - void reset(); - @Override void close(); } diff --git a/bson/src/main/org/bson/BsonTimestamp.java b/bson/src/main/org/bson/BsonTimestamp.java index 3909fbcb330..96b9e7610c8 100644 --- a/bson/src/main/org/bson/BsonTimestamp.java +++ b/bson/src/main/org/bson/BsonTimestamp.java @@ -16,7 +16,7 @@ package org.bson; -import org.bson.internal.UnsignedLongs; +import static java.lang.Long.compareUnsigned; /** * A value representing the BSON timestamp type. @@ -37,7 +37,7 @@ public BsonTimestamp() { /** * Construct a new instance for the given value, which combines the time in seconds and the increment as a single long value. * - * @param value the timetamp as a single long value + * @param value the timestamp as a single long value * @since 3.5 */ public BsonTimestamp(final long value) { @@ -99,7 +99,7 @@ public String toString() { @Override public int compareTo(final BsonTimestamp ts) { - return UnsignedLongs.compare(value, ts.value); + return compareUnsigned(value, ts.value); } @Override diff --git a/bson/src/main/org/bson/BsonValue.java b/bson/src/main/org/bson/BsonValue.java index 66edb96af0a..2318407d6b7 100644 --- a/bson/src/main/org/bson/BsonValue.java +++ b/bson/src/main/org/bson/BsonValue.java @@ -77,7 +77,7 @@ public BsonString asString() { * @throws org.bson.BsonInvalidOperationException if this value is not of the expected type */ public BsonNumber asNumber() { - if (getBsonType() != BsonType.INT32 && getBsonType() != BsonType.INT64 && getBsonType() != BsonType.DOUBLE) { + if (!isNumber()) { throw new BsonInvalidOperationException(format("Value expected to be of a numerical BSON type is of unexpected type %s", getBsonType())); } @@ -282,7 +282,7 @@ public boolean isString() { * @return true if this is a BsonNumber, false otherwise */ public boolean isNumber() { - return isInt32() || isInt64() || isDouble(); + return this instanceof BsonNumber; } /** diff --git a/bson/src/main/org/bson/ByteBuf.java b/bson/src/main/org/bson/ByteBuf.java index 4fb1db8e9c1..cd14d2f93df 100644 --- a/bson/src/main/org/bson/ByteBuf.java +++ b/bson/src/main/org/bson/ByteBuf.java @@ -42,12 +42,12 @@ public interface ByteBuf { *

Writes the given byte into this buffer at the given index.

* * @param index The index at which the byte will be written - * @param b The byte value to be written + * @param value The byte value to be written * @return This buffer * @throws IndexOutOfBoundsException If {@code index} is negative or not smaller than the buffer's limit * @throws java.nio.ReadOnlyBufferException If this buffer is read-only */ - ByteBuf put(int index, byte b); + ByteBuf put(int index, byte value); /** * Returns the number of elements between the current position and the limit. @@ -99,12 +99,61 @@ public interface ByteBuf { * *

Writes the given byte into this buffer at the current position, and then increments the position.

* - * @param b The byte to be written + * @param value The byte to be written * @return This buffer * @throws java.nio.BufferOverflowException If this buffer's current position is not smaller than its limit * @throws java.nio.ReadOnlyBufferException If this buffer is read-only */ - ByteBuf put(byte b); + ByteBuf put(byte value); + + /** + * Writes the given int value into this buffer at the current position, + * using the current byte order, and increments the position by 4. + * + * @param value the int value to be written + * @return this buffer + * @throws java.nio.BufferOverflowException if there are fewer than 4 bytes remaining in this buffer + * @throws java.nio.ReadOnlyBufferException if this buffer is read-only + * @since 5.4 + */ + ByteBuf putInt(int value); + + /** + * Writes the given int value into this buffer at the current position, + * using the current byte order, and increments the position by 4. + * + * @param value the int value to be written + * @param index the index at which the int will be written + * @return this buffer + * @throws java.nio.BufferOverflowException if there are fewer than 4 bytes remaining in this buffer + * @throws java.nio.ReadOnlyBufferException if this buffer is read-only + * @since 5.4 + */ + ByteBuf putInt(int index, int value); + + /** + * Writes the given double value into this buffer at the current position, + * using the current byte order, and increments the position by 8. + * + * @param value the double value to be written + * @return this buffer + * @throws java.nio.BufferOverflowException if there are fewer than 8 bytes remaining in this buffer + * @throws java.nio.ReadOnlyBufferException if this buffer is read-only + * @since 5.4 + */ + ByteBuf putDouble(double value); + + /** + * Writes the given long value into this buffer at the current position, + * using the current byte order, and increments the position by 8. + * + * @param value the long value to be written + * @return this buffer + * @throws java.nio.BufferOverflowException if there are fewer than 8 bytes remaining in this buffer + * @throws java.nio.ReadOnlyBufferException if this buffer is read-only + * @since 5.4 + */ + ByteBuf putLong(long value); /** *

Flips this buffer. The limit is set to the current position and then the position is set to zero. If the mark is defined then it @@ -136,6 +185,27 @@ public interface ByteBuf { */ byte[] array(); + /** + *

States whether this buffer is backed by an accessible byte array.

+ * + *

If this method returns {@code true} then the {@link #array()} and {@link #arrayOffset()} methods may safely be invoked.

+ * + * @return {@code true} if, and only if, this buffer is backed by an array and is not read-only + * @since 5.5 + */ + boolean isBackedByArray(); + + /** + * Returns the offset of the first byte within the backing byte array of + * this buffer. + * + * @return the offset within this buffer's array. + * @throws java.nio.ReadOnlyBufferException If this buffer is backed by an array but is read-only + * @throws UnsupportedOperationException if this buffer is not backed by an accessible array + * @since 5.5 + */ + int arrayOffset(); + /** * Returns this buffer's limit. * @@ -232,18 +302,18 @@ public interface ByteBuf { *

This method transfers bytes from this buffer into the given * destination array. If there are fewer bytes remaining in the * buffer than are required to satisfy the request, that is, if - * length > remaining(), then no + * length > remaining(), then no * bytes are transferred and a {@link java.nio.BufferUnderflowException} is * thrown. * - *

Otherwise, this method copies length bytes from this + *

Otherwise, this method copies {@code length} bytes from this * buffer into the given array, starting at the current position of this * buffer and at the given offset in the array. The position of this - * buffer is then incremented by length. + * buffer is then incremented by {@code length}. * *

In other words, an invocation of this method of the form - * src.get(dst, off, len) has exactly the same effect as - * the loop + * src.get(dst, off, len) + * has exactly the same effect as the loop * *

      * {@code
@@ -261,21 +331,21 @@ public interface ByteBuf  {
      * @param  offset
      *         The offset within the array of the first byte to be
      *         written; must be non-negative and no larger than
-     *         dst.length
+     *         {@code dst.length}
      *
      * @param  length
      *         The maximum number of bytes to be written to the given
      *         array; must be non-negative and no larger than
-     *         dst.length - offset
+     *         {@code dst.length - offset}
      *
      * @return  This buffer
      *
      * @throws java.nio.BufferUnderflowException
-     *          If there are fewer than length bytes
+     *          If there are fewer than {@code length} bytes
      *          remaining in this buffer
      *
      * @throws  IndexOutOfBoundsException
-     *          If the preconditions on the offset and length
+     *          If the preconditions on the {@code offset} and {@code length}
      *          parameters do not hold
      */
     ByteBuf get(byte[] bytes, int offset, int length);
@@ -434,7 +504,7 @@ public interface ByteBuf  {
     ByteBuffer asNIO();
 
     /**
-     * Gets the current reference count, which starts at 0.
+     * Gets the current reference count, which is 1 for a new {@link ByteBuf}.
      *
      * @return the current count, which must be greater than or equal to 0
      */
diff --git a/bson/src/main/org/bson/ByteBufNIO.java b/bson/src/main/org/bson/ByteBufNIO.java
index 83bfa7d893a..dfcc6379070 100644
--- a/bson/src/main/org/bson/ByteBufNIO.java
+++ b/bson/src/main/org/bson/ByteBufNIO.java
@@ -97,6 +97,30 @@ public ByteBuf put(final byte b) {
         return this;
     }
 
+    @Override
+    public ByteBuf putInt(final int b) {
+        buf.putInt(b);
+        return this;
+    }
+
+    @Override
+    public ByteBuf putInt(final int index, final int b) {
+        buf.putInt(index, b);
+        return this;
+    }
+
+    @Override
+    public ByteBuf putDouble(final double b) {
+        buf.putDouble(b);
+        return this;
+    }
+
+    @Override
+    public ByteBuf putLong(final long b) {
+        buf.putLong(b);
+        return this;
+    }
+
     @Override
     public ByteBuf flip() {
         ((Buffer) buf).flip();
@@ -108,6 +132,16 @@ public byte[] array() {
         return buf.array();
     }
 
+    @Override
+    public boolean isBackedByArray() {
+        return buf.hasArray();
+    }
+
+    @Override
+    public int arrayOffset() {
+        return buf.arrayOffset();
+    }
+
     @Override
     public int limit() {
         return buf.limit();
@@ -160,8 +194,13 @@ public ByteBuf get(final byte[] bytes, final int offset, final int length) {
 
     @Override
     public ByteBuf get(final int index, final byte[] bytes, final int offset, final int length) {
-        for (int i = 0; i < length; i++) {
-            bytes[offset + i] = buf.get(index + i);
+        if (buf.hasArray()) {
+            System.arraycopy(buf.array(), index, bytes, offset, length);
+        } else {
+            // Fallback to per-byte copying if no backing array is available.
+            for (int i = 0; i < length; i++) {
+                bytes[offset + i] = buf.get(index + i);
+            }
         }
         return this;
     }
diff --git a/bson/src/main/org/bson/Document.java b/bson/src/main/org/bson/Document.java
index 208f187d466..423d234c6d7 100644
--- a/bson/src/main/org/bson/Document.java
+++ b/bson/src/main/org/bson/Document.java
@@ -16,13 +16,21 @@
 
 package org.bson;
 
+import org.bson.codecs.BsonValueCodecProvider;
+import org.bson.codecs.Codec;
+import org.bson.codecs.CollectionCodecProvider;
 import org.bson.codecs.Decoder;
 import org.bson.codecs.DecoderContext;
 import org.bson.codecs.DocumentCodec;
+import org.bson.codecs.DocumentCodecProvider;
 import org.bson.codecs.Encoder;
 import org.bson.codecs.EncoderContext;
+import org.bson.codecs.IterableCodecProvider;
+import org.bson.codecs.MapCodecProvider;
+import org.bson.codecs.ValueCodecProvider;
 import org.bson.codecs.configuration.CodecRegistry;
 import org.bson.conversions.Bson;
+import org.bson.json.JsonMode;
 import org.bson.json.JsonReader;
 import org.bson.json.JsonWriter;
 import org.bson.json.JsonWriterSettings;
@@ -32,11 +40,18 @@
 import java.io.StringWriter;
 import java.util.Collection;
 import java.util.Date;
+import java.util.Iterator;
 import java.util.LinkedHashMap;
+import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import static java.lang.String.format;
+import static java.util.Arrays.asList;
+import static org.bson.assertions.Assertions.isTrue;
 import static org.bson.assertions.Assertions.notNull;
+import static org.bson.codecs.configuration.CodecRegistries.fromProviders;
+import static org.bson.codecs.configuration.CodecRegistries.withUuidRepresentation;
 
 /**
  * A representation of a document as a {@code Map}.  All iterators will traverse the elements in insertion order, as with {@code
@@ -46,15 +61,24 @@
  * @since 3.0.0
  */
 public class Document implements Map, Serializable, Bson {
+    private static final Codec DEFAULT_CODEC =
+            withUuidRepresentation(fromProviders(asList(new ValueCodecProvider(),
+                    new CollectionCodecProvider(), new IterableCodecProvider(),
+                    new BsonValueCodecProvider(), new DocumentCodecProvider(), new MapCodecProvider())), UuidRepresentation.STANDARD)
+                    .get(Document.class);
+
     private static final long serialVersionUID = 6297731997167536582L;
 
+    /**
+     * The map of keys to values.
+     */
     private final LinkedHashMap documentAsMap;
 
     /**
      * Creates an empty Document instance.
      */
     public Document() {
-        documentAsMap = new LinkedHashMap();
+        documentAsMap = new LinkedHashMap<>();
     }
 
     /**
@@ -64,7 +88,7 @@ public Document() {
      * @param value value
      */
     public Document(final String key, final Object value) {
-        documentAsMap = new LinkedHashMap();
+        documentAsMap = new LinkedHashMap<>();
         documentAsMap.put(key, value);
     }
 
@@ -73,8 +97,8 @@ public Document(final String key, final Object value) {
      *
      * @param map initial map
      */
-    public Document(final Map map) {
-        documentAsMap = new LinkedHashMap(map);
+    public Document(final Map map) {
+        documentAsMap = new LinkedHashMap<>(map);
     }
 
 
@@ -87,7 +111,7 @@ public Document(final Map map) {
      * @mongodb.driver.manual reference/mongodb-extended-json/ MongoDB Extended JSON
      */
     public static Document parse(final String json) {
-        return parse(json, new DocumentCodec());
+        return parse(json, DEFAULT_CODEC);
     }
 
     /**
@@ -107,7 +131,7 @@ public static Document parse(final String json, final Decoder decoder)
 
     @Override
     public  BsonDocument toBsonDocument(final Class documentClass, final CodecRegistry codecRegistry) {
-        return new BsonDocumentWrapper(this, codecRegistry.get(Document.class));
+        return new BsonDocumentWrapper<>(this, codecRegistry.get(Document.class));
     }
 
     /**
@@ -158,6 +182,76 @@ public  T get(final Object key, final T defaultValue) {
         return value == null ? defaultValue : (T) value;
     }
 
+    /**
+     * Gets the value in an embedded document, casting it to the given {@code Class}.  The list of keys represents a path to the
+     * embedded value, drilling down into an embedded document for each key. This is useful to avoid having casts in
+     * client code, though the effect is the same.
+     * 

+ * The generic type of the keys list is {@code ?} to be consistent with the corresponding {@code get} methods, but in practice + * the actual type of the argument should be {@code List}. So to get the embedded value of a key list that is of type String, + * you would write {@code String name = doc.getEmbedded(List.of("employee", "manager", "name"), String.class)} instead of + * {@code String name = (String) doc.get("employee", Document.class).get("manager", Document.class).get("name") }. + * + * @param keys the list of keys + * @param clazz the non-null class to cast the value to + * @param the type of the class + * @return the value of the given embedded key, or null if the instance does not contain this embedded key. + * @throws ClassCastException if the value of the given embedded key is not of type T + * @since 3.10 + */ + public T getEmbedded(final List keys, final Class clazz) { + notNull("keys", keys); + isTrue("keys", !keys.isEmpty()); + notNull("clazz", clazz); + return getEmbeddedValue(keys, clazz, null); + } + + /** + * Gets the value in an embedded document, casting it to the given {@code Class} or returning the default value if null. + * The list of keys represents a path to the embedded value, drilling down into an embedded document for each key. + * This is useful to avoid having casts in client code, though the effect is the same. + *

+ * The generic type of the keys list is {@code ?} to be consistent with the corresponding {@code get} methods, but in practice + * the actual type of the argument should be {@code List}. So to get the embedded value of a key list that is of type String, + * you would write {@code String name = doc.getEmbedded(List.of("employee", "manager", "name"), "John Smith")} instead of + * {@code String name = doc.get("employee", Document.class).get("manager", Document.class).get("name", "John Smith") }. + * + * @param keys the list of keys + * @param defaultValue what to return if the value is null + * @param the type of the class + * @return the value of the given key, or null if the instance does not contain this key. + * @throws ClassCastException if the value of the given key is not of type T + * @since 3.10 + */ + public T getEmbedded(final List keys, final T defaultValue) { + notNull("keys", keys); + isTrue("keys", !keys.isEmpty()); + notNull("defaultValue", defaultValue); + return getEmbeddedValue(keys, null, defaultValue); + } + + + // Gets the embedded value of the given list of keys, casting it to {@code Class} or returning the default value if null. + // Throws ClassCastException if any of the intermediate embedded values is not a Document. + @SuppressWarnings("unchecked") + private T getEmbeddedValue(final List keys, final Class clazz, final T defaultValue) { + Object value = this; + Iterator keyIterator = keys.iterator(); + while (keyIterator.hasNext()) { + Object key = keyIterator.next(); + value = ((Document) value).get(key); + if (!(value instanceof Document)) { + if (value == null) { + return defaultValue; + } else if (keyIterator.hasNext()) { + throw new ClassCastException(format("At key %s, the value is not a Document (%s)", + key, value.getClass().getName())); + } + } + } + return clazz != null ? clazz.cast(value) : (T) value; + } + /** * Gets the value of the given key as an Integer. * @@ -260,7 +354,59 @@ public Date getDate(final Object key) { } /** - * Gets a JSON representation of this document using the {@link org.bson.json.JsonMode#STRICT} output mode, and otherwise the default + * Gets the list value of the given key, casting the list elements to the given {@code Class}. This is useful to avoid having + * casts in client code, though the effect is the same. + * + * @param key the key + * @param clazz the non-null class to cast the list value to + * @param the type of the class + * @return the list value of the given key, or null if the instance does not contain this key. + * @throws ClassCastException if the elements in the list value of the given key is not of type T or the value is not a list + * @since 3.10 + */ + public List getList(final Object key, final Class clazz) { + notNull("clazz", clazz); + return constructValuesList(key, clazz, null); + } + + /** + * Gets the list value of the given key, casting the list elements to {@code Class} or returning the default list value if null. + * This is useful to avoid having casts in client code, though the effect is the same. + * + * @param key the key + * @param clazz the non-null class to cast the list value to + * @param defaultValue what to return if the value is null + * @param the type of the class + * @return the list value of the given key, or the default list value if the instance does not contain this key. + * @throws ClassCastException if the value of the given key is not of type T + * @since 3.10 + */ + public List getList(final Object key, final Class clazz, final List defaultValue) { + notNull("defaultValue", defaultValue); + notNull("clazz", clazz); + return constructValuesList(key, clazz, defaultValue); + } + + + // Construct the list of values for the specified key, or return the default value if the value is null. + // A ClassCastException will be thrown if an element in the list is not of type T. + @SuppressWarnings("unchecked") + private List constructValuesList(final Object key, final Class clazz, final List defaultValue) { + List value = get(key, List.class); + if (value == null) { + return defaultValue; + } + + for (Object item : value) { + if (item != null && !clazz.isAssignableFrom(item.getClass())) { + throw new ClassCastException(format("List element cannot be cast to %s", clazz.getName())); + } + } + return value; + } + + /** + * Gets a JSON representation of this document using the {@link org.bson.json.JsonMode#RELAXED} output mode, and otherwise the default * settings of {@link JsonWriterSettings.Builder} and {@link DocumentCodec}. * * @return a JSON representation of this document @@ -268,9 +414,8 @@ public Date getDate(final Object key) { * @see #toJson(JsonWriterSettings) * @see JsonWriterSettings */ - @SuppressWarnings("deprecation") public String toJson() { - return toJson(new JsonWriterSettings()); + return toJson(JsonWriterSettings.builder().outputMode(JsonMode.RELAXED).build()); } /** @@ -283,7 +428,7 @@ public String toJson() { * @throws org.bson.codecs.configuration.CodecConfigurationException if the document contains types not in the default registry */ public String toJson(final JsonWriterSettings writerSettings) { - return toJson(writerSettings, new DocumentCodec()); + return toJson(writerSettings, DEFAULT_CODEC); } /** @@ -295,9 +440,8 @@ public String toJson(final JsonWriterSettings writerSettings) { * @return a JSON representation of this document * @throws org.bson.codecs.configuration.CodecConfigurationException if the registry does not contain a codec for the document values. */ - @SuppressWarnings("deprecation") public String toJson(final Encoder encoder) { - return toJson(new JsonWriterSettings(), encoder); + return toJson(JsonWriterSettings.builder().outputMode(JsonMode.RELAXED).build(), encoder); } /** @@ -310,7 +454,7 @@ public String toJson(final Encoder encoder) { */ public String toJson(final JsonWriterSettings writerSettings, final Encoder encoder) { JsonWriter writer = new JsonWriter(new StringWriter(), writerSettings); - encoder.encode(writer, this, EncoderContext.builder().isEncodingCollectibleDocument(true).build()); + encoder.encode(writer, this, EncoderContext.builder().build()); return writer.getWriter().toString(); } diff --git a/bson/src/main/org/bson/EmptyBSONCallback.java b/bson/src/main/org/bson/EmptyBSONCallback.java index d7714f39407..db5782679d0 100644 --- a/bson/src/main/org/bson/EmptyBSONCallback.java +++ b/bson/src/main/org/bson/EmptyBSONCallback.java @@ -149,12 +149,6 @@ public void gotDBRef(final String name, final String namespace, final ObjectId i throw new UnsupportedOperationException("Operation is not supported"); } - @Override - @Deprecated - public void gotBinaryArray(final String name, final byte[] data) { - throw new UnsupportedOperationException("Operation is not supported"); - } - @Override public void gotBinary(final String name, final byte type, final byte[] data) { throw new UnsupportedOperationException("Operation is not supported"); diff --git a/bson/src/main/org/bson/FieldNameValidator.java b/bson/src/main/org/bson/FieldNameValidator.java index bb528da90d6..e7438cccb69 100644 --- a/bson/src/main/org/bson/FieldNameValidator.java +++ b/bson/src/main/org/bson/FieldNameValidator.java @@ -16,6 +16,9 @@ package org.bson; +import static java.lang.String.format; +import static org.bson.assertions.Assertions.isTrue; + /** * A field name validator, for use by BSON writers to validate field names as documents are encoded. * @@ -30,6 +33,18 @@ public interface FieldNameValidator { */ boolean validate(String fieldName); + /** + * Return the validation error message for an invalid field + * + * @param fieldName the field name + * @return the validation error message + * @throws IllegalArgumentException if fieldName is actually valid + */ + default String getValidationErrorMessage(final String fieldName) { + isTrue(fieldName + " is valid", !validate(fieldName)); + return format("Invalid BSON field name %s", fieldName); + } + /** * Gets a new validator to use for the value of the field with the given name. * @@ -37,4 +52,20 @@ public interface FieldNameValidator { * @return a non-null validator */ FieldNameValidator getValidatorForField(String fieldName); + + /** + * Start validation of a single document. + * + * @since 4.0 + */ + default void start() { + } + + /** + * End validation of a single document. + * + * @since 4.0 + */ + default void end() { + } } diff --git a/bson/src/main/org/bson/Float32BinaryVector.java b/bson/src/main/org/bson/Float32BinaryVector.java new file mode 100644 index 00000000000..37d1b8abb6e --- /dev/null +++ b/bson/src/main/org/bson/Float32BinaryVector.java @@ -0,0 +1,79 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson; + +import java.util.Arrays; + +import static org.bson.assertions.Assertions.assertNotNull; + +/** + * Represents a vector of 32-bit floating-point numbers, where each element in the vector is a float. + *

+ * The {@link Float32BinaryVector} is used to store and retrieve data efficiently using the BSON Binary Subtype 9 format. + * + * @mongodb.server.release 6.0 + * @see BinaryVector#floatVector(float[]) + * @see BsonBinary#BsonBinary(BinaryVector) + * @see BsonBinary#asVector() + * @since 5.3 + */ +public final class Float32BinaryVector extends BinaryVector { + + private final float[] data; + + Float32BinaryVector(final float[] vectorData) { + super(DataType.FLOAT32); + this.data = assertNotNull(vectorData); + } + + /** + * Retrieve the underlying float array representing this {@link Float32BinaryVector}, where each float + * represents an element of a vector. + *

+ * NOTE: The underlying float array is not copied; changes to the returned array will be reflected in this instance. + * + * @return the underlying float array representing this {@link Float32BinaryVector} vector. + */ + public float[] getData() { + return assertNotNull(data); + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Float32BinaryVector that = (Float32BinaryVector) o; + return Arrays.equals(data, that.data); + } + + @Override + public int hashCode() { + return Arrays.hashCode(data); + } + + @Override + public String toString() { + return "Float32Vector{" + + "data=" + Arrays.toString(data) + + ", dataType=" + getDataType() + + '}'; + } +} diff --git a/bson/src/main/org/bson/Int8BinaryVector.java b/bson/src/main/org/bson/Int8BinaryVector.java new file mode 100644 index 00000000000..a851aff94ff --- /dev/null +++ b/bson/src/main/org/bson/Int8BinaryVector.java @@ -0,0 +1,80 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson; + +import java.util.Arrays; +import java.util.Objects; + +import static org.bson.assertions.Assertions.assertNotNull; + +/** + * Represents a vector of 8-bit signed integers, where each element in the vector is a byte. + *

+ * The {@link Int8BinaryVector} is used to store and retrieve data efficiently using the BSON Binary Subtype 9 format. + * + * @mongodb.server.release 6.0 + * @see BinaryVector#int8Vector(byte[]) + * @see BsonBinary#BsonBinary(BinaryVector) + * @see BsonBinary#asVector() + * @since 5.3 + */ +public final class Int8BinaryVector extends BinaryVector { + + private byte[] data; + + Int8BinaryVector(final byte[] data) { + super(DataType.INT8); + this.data = assertNotNull(data); + } + + /** + * Retrieve the underlying byte array representing this {@link Int8BinaryVector} vector, where each byte represents + * an element of a vector. + *

+ * NOTE: The underlying byte array is not copied; changes to the returned array will be reflected in this instance. + * + * @return the underlying byte array representing this {@link Int8BinaryVector} vector. + */ + public byte[] getData() { + return assertNotNull(data); + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Int8BinaryVector that = (Int8BinaryVector) o; + return Objects.deepEquals(data, that.data); + } + + @Override + public int hashCode() { + return Arrays.hashCode(data); + } + + @Override + public String toString() { + return "Int8Vector{" + + "data=" + Arrays.toString(data) + + ", dataType=" + getDataType() + + '}'; + } +} diff --git a/bson/src/main/org/bson/LazyBSONDecoder.java b/bson/src/main/org/bson/LazyBSONDecoder.java index c133482d544..7ca3fd5099a 100644 --- a/bson/src/main/org/bson/LazyBSONDecoder.java +++ b/bson/src/main/org/bson/LazyBSONDecoder.java @@ -16,8 +16,6 @@ package org.bson; -import org.bson.io.Bits; - import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; diff --git a/bson/src/main/org/bson/LazyBSONList.java b/bson/src/main/org/bson/LazyBSONList.java index 2e6137171f5..43d91b37eab 100644 --- a/bson/src/main/org/bson/LazyBSONList.java +++ b/bson/src/main/org/bson/LazyBSONList.java @@ -68,7 +68,7 @@ public Iterator iterator() { @Override public boolean containsAll(final Collection collection) { - Set values = new HashSet(); + Set values = new HashSet<>(); for (final Object o : this) { values.add(o); } diff --git a/bson/src/main/org/bson/LazyBSONObject.java b/bson/src/main/org/bson/LazyBSONObject.java index 9125756fab8..35afc1b33ff 100644 --- a/bson/src/main/org/bson/LazyBSONObject.java +++ b/bson/src/main/org/bson/LazyBSONObject.java @@ -16,8 +16,6 @@ package org.bson; -import org.bson.codecs.DecoderContext; -import org.bson.codecs.UuidCodec; import org.bson.io.ByteBufferBsonInput; import org.bson.types.BSONTimestamp; import org.bson.types.Binary; @@ -122,16 +120,9 @@ public Object get(final String key) { return value; } - @Override - @Deprecated - public boolean containsKey(final String key) { - return containsField(key); - } - @Override public boolean containsField(final String s) { - BsonBinaryReader reader = getBsonReader(); - try { + try (BsonBinaryReader reader = getBsonReader()) { reader.readStartDocument(); while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { if (reader.readName().equals(s)) { @@ -140,25 +131,20 @@ public boolean containsField(final String s) { reader.skipValue(); } } - } finally { - reader.close(); } return false; } @Override public Set keySet() { - Set keys = new LinkedHashSet(); - BsonBinaryReader reader = getBsonReader(); - try { + Set keys = new LinkedHashSet<>(); + try (BsonBinaryReader reader = getBsonReader()) { reader.readStartDocument(); while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { keys.add(reader.readName()); reader.skipValue(); } reader.readEndDocument(); - } finally { - reader.close(); } return Collections.unmodifiableSet(keys); } @@ -175,9 +161,6 @@ Object readValue(final BsonBinaryReader reader) { return reader.readString(); case BINARY: byte binarySubType = reader.peekBinarySubType(); - if (BsonBinarySubType.isUuid(binarySubType) && reader.peekBinarySize() == 16) { - return new UuidCodec().decode(reader, DecoderContext.builder().build()); - } BsonBinary binary = reader.readBinaryData(); if (binarySubType == BINARY.getValue() || binarySubType == OLD_BINARY.getValue()) { return binary.getData(); @@ -303,16 +286,13 @@ public int pipe(final OutputStream os) throws IOException { * @return then entry set */ public Set> entrySet() { - final List> entries = new ArrayList>(); - BsonBinaryReader reader = getBsonReader(); - try { + List> entries = new ArrayList<>(); + try (BsonBinaryReader reader = getBsonReader()) { reader.readStartDocument(); while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { - entries.add(new AbstractMap.SimpleImmutableEntry(reader.readName(), readValue(reader))); + entries.add(new AbstractMap.SimpleImmutableEntry<>(reader.readName(), readValue(reader))); } reader.readEndDocument(); - } finally { - reader.close(); } return new Set>() { @Override @@ -482,7 +462,7 @@ public Object removeField(final String key) { @Override @SuppressWarnings("rawtypes") public Map toMap() { - Map map = new LinkedHashMap(); + Map map = new LinkedHashMap<>(); for (final Map.Entry entry : entrySet()) { map.put(entry.getKey(), entry.getValue()); } diff --git a/bson/src/main/org/bson/NoOpFieldNameValidator.java b/bson/src/main/org/bson/NoOpFieldNameValidator.java index 9d47705f574..33353498986 100644 --- a/bson/src/main/org/bson/NoOpFieldNameValidator.java +++ b/bson/src/main/org/bson/NoOpFieldNameValidator.java @@ -16,7 +16,12 @@ package org.bson; -class NoOpFieldNameValidator implements FieldNameValidator { +final class NoOpFieldNameValidator implements FieldNameValidator { + static final NoOpFieldNameValidator INSTANCE = new NoOpFieldNameValidator(); + + private NoOpFieldNameValidator() { + } + @Override public boolean validate(final String fieldName) { return true; diff --git a/bson/src/main/org/bson/PackedBitBinaryVector.java b/bson/src/main/org/bson/PackedBitBinaryVector.java new file mode 100644 index 00000000000..33200650204 --- /dev/null +++ b/bson/src/main/org/bson/PackedBitBinaryVector.java @@ -0,0 +1,105 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson; + +import org.bson.annotations.Beta; +import org.bson.annotations.Reason; + +import java.util.Arrays; +import java.util.Objects; + +import static org.bson.assertions.Assertions.assertNotNull; + +/** + * Represents a packed bit vector, where each element of the vector is represented by a single bit (0 or 1). + *

+ * The {@link PackedBitBinaryVector} is used to store data efficiently using the BSON Binary Subtype 9 format. + * + * @mongodb.server.release 6.0 + * @see BinaryVector#packedBitVector(byte[], byte) + * @see BsonBinary#BsonBinary(BinaryVector) + * @see BsonBinary#asVector() + * @since 5.3 + */ +@Beta(Reason.SERVER) +public final class PackedBitBinaryVector extends BinaryVector { + + private final byte padding; + private final byte[] data; + + PackedBitBinaryVector(final byte[] data, final byte padding) { + super(DataType.PACKED_BIT); + this.data = assertNotNull(data); + this.padding = padding; + } + + /** + * Retrieve the underlying byte array representing this {@link PackedBitBinaryVector} vector, where + * each bit represents an element of the vector (either 0 or 1). + *

+ * Note that the {@linkplain #getPadding() padding value} should be considered when interpreting the final byte of the array, + * as it indicates how many least-significant bits are to be ignored. + * + * @return the underlying byte array representing this {@link PackedBitBinaryVector} vector. + * @see #getPadding() + */ + public byte[] getData() { + return assertNotNull(data); + } + + /** + * Returns the padding value for this vector. + * + *

Padding refers to the number of least-significant bits in the final byte that are ignored when retrieving + * {@linkplain #getData() the vector array}. For instance, if the padding value is 3, this means that the last byte contains + * 3 least-significant unused bits, which should be disregarded during operations.

+ *

+ * + * NOTE: The underlying byte array is not copied; changes to the returned array will be reflected in this instance. + * + * @return the padding value (between 0 and 7). + */ + public byte getPadding() { + return this.padding; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + PackedBitBinaryVector that = (PackedBitBinaryVector) o; + return padding == that.padding && Arrays.equals(data, that.data); + } + + @Override + public int hashCode() { + return Objects.hash(padding, Arrays.hashCode(data)); + } + + @Override + public String toString() { + return "PackedBitVector{" + + "padding=" + padding + + ", data=" + Arrays.toString(data) + + ", dataType=" + getDataType() + + '}'; + } +} diff --git a/bson/src/main/org/bson/RawBsonArray.java b/bson/src/main/org/bson/RawBsonArray.java index 6e5c239980a..fc56f312e01 100644 --- a/bson/src/main/org/bson/RawBsonArray.java +++ b/bson/src/main/org/bson/RawBsonArray.java @@ -141,12 +141,29 @@ public int hashCode() { return super.hashCode(); } - // see https://docs.oracle.com/javase/6/docs/platform/serialization/spec/output.html + /** + * Write the replacement object. + * + *

+ * See https://docs.oracle.com/javase/6/docs/platform/serialization/spec/output.html + *

+ * + * @return a proxy for the document + */ private Object writeReplace() { return new SerializationProxy(delegate.bytes, delegate.offset, delegate.length); } - // see https://docs.oracle.com/javase/6/docs/platform/serialization/spec/input.html + /** + * Prevent normal deserialization. + * + *

+ * See https://docs.oracle.com/javase/6/docs/platform/serialization/spec/input.html + *

+ * + * @param stream the stream + * @throws InvalidObjectException in all cases + */ private void readObject(final ObjectInputStream stream) throws InvalidObjectException { throw new InvalidObjectException("Proxy required"); } @@ -194,8 +211,7 @@ public BsonValue get(final int index) { throw new IndexOutOfBoundsException(); } int curIndex = 0; - BsonBinaryReader bsonReader = createReader(); - try { + try (BsonBinaryReader bsonReader = createReader()) { bsonReader.readStartDocument(); while (bsonReader.readBsonType() != BsonType.END_OF_DOCUMENT) { bsonReader.skipName(); @@ -206,8 +222,6 @@ public BsonValue get(final int index) { curIndex++; } bsonReader.readEndDocument(); - } finally { - bsonReader.close(); } throw new IndexOutOfBoundsException(); } @@ -218,8 +232,7 @@ public int size() { return cachedSize; } int size = 0; - BsonBinaryReader bsonReader = createReader(); - try { + try (BsonBinaryReader bsonReader = createReader()) { bsonReader.readStartDocument(); while (bsonReader.readBsonType() != BsonType.END_OF_DOCUMENT) { size++; @@ -227,8 +240,6 @@ public int size() { bsonReader.skipValue(); } bsonReader.readEndDocument(); - } finally { - bsonReader.close(); } cachedSize = size; return cachedSize; diff --git a/bson/src/main/org/bson/RawBsonDocument.java b/bson/src/main/org/bson/RawBsonDocument.java index b4c9112823e..eb672bcef8d 100644 --- a/bson/src/main/org/bson/RawBsonDocument.java +++ b/bson/src/main/org/bson/RawBsonDocument.java @@ -24,6 +24,7 @@ import org.bson.codecs.RawBsonDocumentCodec; import org.bson.io.BasicOutputBuffer; import org.bson.io.ByteBufferBsonInput; +import org.bson.json.JsonMode; import org.bson.json.JsonReader; import org.bson.json.JsonWriter; import org.bson.json.JsonWriterSettings; @@ -51,8 +52,19 @@ public final class RawBsonDocument extends BsonDocument { private static final long serialVersionUID = 1L; private static final int MIN_BSON_DOCUMENT_SIZE = 5; + /** + * The raw bytes. + */ private final byte[] bytes; + + /** + * The offset into bytes, which must be less than {@code bytes.length}. + */ private final int offset; + + /** + * The length, which must be less than {@code offset + bytes.length}. + */ private final int length; /** @@ -112,14 +124,11 @@ public RawBsonDocument(final T document, final Codec codec) { notNull("document", document); notNull("codec", codec); BasicOutputBuffer buffer = new BasicOutputBuffer(); - BsonBinaryWriter writer = new BsonBinaryWriter(buffer); - try { + try (BsonBinaryWriter writer = new BsonBinaryWriter(buffer)) { codec.encode(writer, document, EncoderContext.builder().build()); this.bytes = buffer.getInternalBuffer(); this.offset = 0; this.length = buffer.getPosition(); - } finally { - writer.close(); } } @@ -155,11 +164,8 @@ public T decode(final Codec codec) { * @since 3.6 */ public T decode(final Decoder decoder) { - BsonBinaryReader reader = createReader(); - try { + try (BsonBinaryReader reader = createReader()) { return decoder.decode(reader, DecoderContext.builder().build()); - } finally { - reader.close(); } } @@ -190,15 +196,12 @@ public BsonValue remove(final Object key) { @Override public boolean isEmpty() { - BsonBinaryReader bsonReader = createReader(); - try { + try (BsonBinaryReader bsonReader = createReader()) { bsonReader.readStartDocument(); if (bsonReader.readBsonType() != BsonType.END_OF_DOCUMENT) { return false; } bsonReader.readEndDocument(); - } finally { - bsonReader.close(); } return true; @@ -207,8 +210,7 @@ public boolean isEmpty() { @Override public int size() { int size = 0; - BsonBinaryReader bsonReader = createReader(); - try { + try (BsonBinaryReader bsonReader = createReader()) { bsonReader.readStartDocument(); while (bsonReader.readBsonType() != BsonType.END_OF_DOCUMENT) { size++; @@ -216,8 +218,6 @@ public int size() { bsonReader.skipValue(); } bsonReader.readEndDocument(); - } finally { - bsonReader.close(); } return size; @@ -225,31 +225,28 @@ public int size() { @Override public Set> entrySet() { - return toBsonDocument().entrySet(); + return toBaseBsonDocument().entrySet(); } @Override public Collection values() { - return toBsonDocument().values(); + return toBaseBsonDocument().values(); } @Override public Set keySet() { - return toBsonDocument().keySet(); + return toBaseBsonDocument().keySet(); } @Override public String getFirstKey() { - BsonBinaryReader bsonReader = createReader(); - try { + try (BsonBinaryReader bsonReader = createReader()) { bsonReader.readStartDocument(); try { return bsonReader.readName(); } catch (BsonInvalidOperationException e) { throw new NoSuchElementException(); } - } finally { - bsonReader.close(); } } @@ -259,8 +256,7 @@ public boolean containsKey(final Object key) { throw new IllegalArgumentException("key can not be null"); } - BsonBinaryReader bsonReader = createReader(); - try { + try (BsonBinaryReader bsonReader = createReader()) { bsonReader.readStartDocument(); while (bsonReader.readBsonType() != BsonType.END_OF_DOCUMENT) { if (bsonReader.readName().equals(key)) { @@ -269,8 +265,6 @@ public boolean containsKey(final Object key) { bsonReader.skipValue(); } bsonReader.readEndDocument(); - } finally { - bsonReader.close(); } return false; @@ -278,8 +272,7 @@ public boolean containsKey(final Object key) { @Override public boolean containsValue(final Object value) { - BsonBinaryReader bsonReader = createReader(); - try { + try (BsonBinaryReader bsonReader = createReader()) { bsonReader.readStartDocument(); while (bsonReader.readBsonType() != BsonType.END_OF_DOCUMENT) { bsonReader.skipName(); @@ -288,8 +281,6 @@ public boolean containsValue(final Object value) { } } bsonReader.readEndDocument(); - } finally { - bsonReader.close(); } return false; @@ -299,8 +290,7 @@ public boolean containsValue(final Object value) { public BsonValue get(final Object key) { notNull("key", key); - BsonBinaryReader bsonReader = createReader(); - try { + try (BsonBinaryReader bsonReader = createReader()) { bsonReader.readStartDocument(); while (bsonReader.readBsonType() != BsonType.END_OF_DOCUMENT) { if (bsonReader.readName().equals(key)) { @@ -309,17 +299,14 @@ public BsonValue get(final Object key) { bsonReader.skipValue(); } bsonReader.readEndDocument(); - } finally { - bsonReader.close(); } return null; } @Override - @SuppressWarnings("deprecation") public String toJson() { - return toJson(new JsonWriterSettings()); + return toJson(JsonWriterSettings.builder().outputMode(JsonMode.RELAXED).build()); } @Override @@ -331,12 +318,12 @@ public String toJson(final JsonWriterSettings settings) { @Override public boolean equals(final Object o) { - return toBsonDocument().equals(o); + return toBaseBsonDocument().equals(o); } @Override public int hashCode() { - return toBsonDocument().hashCode(); + return toBaseBsonDocument().hashCode(); } @Override @@ -348,21 +335,36 @@ private BsonBinaryReader createReader() { return new BsonBinaryReader(new ByteBufferBsonInput(getByteBuffer())); } - private BsonDocument toBsonDocument() { - BsonBinaryReader bsonReader = createReader(); - try { + // Transform to an org.bson.BsonDocument instance + private BsonDocument toBaseBsonDocument() { + try (BsonBinaryReader bsonReader = createReader()) { return new BsonDocumentCodec().decode(bsonReader, DecoderContext.builder().build()); - } finally { - bsonReader.close(); } } - // see https://docs.oracle.com/javase/6/docs/platform/serialization/spec/output.html + /** + * Write the replacement object. + * + *

+ * See https://docs.oracle.com/javase/6/docs/platform/serialization/spec/output.html + *

+ * + * @return a proxy for the document + */ private Object writeReplace() { return new SerializationProxy(this.bytes, offset, length); } - // see https://docs.oracle.com/javase/6/docs/platform/serialization/spec/input.html + /** + * Prevent normal deserialization. + * + *

+ * See https://docs.oracle.com/javase/6/docs/platform/serialization/spec/input.html + *

+ * + * @param stream the stream + * @throws InvalidObjectException in all cases + */ private void readObject(final ObjectInputStream stream) throws InvalidObjectException { throw new InvalidObjectException("Proxy required"); } diff --git a/bson/src/main/org/bson/StringUtils.java b/bson/src/main/org/bson/StringUtils.java index a6017fc88a9..461b27d9113 100644 --- a/bson/src/main/org/bson/StringUtils.java +++ b/bson/src/main/org/bson/StringUtils.java @@ -16,21 +16,14 @@ package org.bson; -import java.util.Collection; -import java.util.Iterator; +import java.util.Arrays; +import java.util.stream.Collectors; final class StringUtils { - public static String join(final String delimiter, final Collection s) { - StringBuilder builder = new StringBuilder(); - Iterator iter = s.iterator(); - while (iter.hasNext()) { - builder.append(iter.next()); - if (!iter.hasNext()) { - break; - } - builder.append(delimiter); - } - return builder.toString(); + @SafeVarargs + @SuppressWarnings("varargs") + public static String join(final String delimiter, final T... values) { + return Arrays.stream(values).map(String::valueOf).collect(Collectors.joining(delimiter)); } private StringUtils() { } diff --git a/bson/src/main/org/bson/UuidRepresentation.java b/bson/src/main/org/bson/UuidRepresentation.java index fac0a5826ae..76695b65314 100644 --- a/bson/src/main/org/bson/UuidRepresentation.java +++ b/bson/src/main/org/bson/UuidRepresentation.java @@ -16,6 +16,10 @@ package org.bson; +import static java.lang.String.format; +import static org.bson.BsonBinarySubType.UUID_LEGACY; +import static org.bson.BsonBinarySubType.UUID_STANDARD; + /** * The representation to use when converting a UUID to a BSON binary value. * This class is necessary because the different drivers used to have different @@ -24,23 +28,31 @@ * @since 3.0 */ public enum UuidRepresentation { + /** - * The canonical representation of UUID + * An unspecified representation of UUID. Essentially, this is the null representation value. * + * @since 3.12 + */ + UNSPECIFIED, + + /** + * The canonical representation of UUID + *

* BSON binary subtype 4 */ STANDARD, /** * The legacy representation of UUID used by the C# driver - * + *

* BSON binary subtype 3 */ C_SHARP_LEGACY, /** * The legacy representation of UUID used by the Java driver - * + *

* BSON binary subtype 3 */ JAVA_LEGACY, @@ -48,8 +60,28 @@ public enum UuidRepresentation { /** * The legacy representation of UUID used by the Python driver, which is the same * format as STANDARD, but has the UUID old BSON subtype (\x03) - * + *

* BSON binary subtype 3 */ - PYTHON_LEGACY + PYTHON_LEGACY; + + /** + * Gets the BSON binary subtype for the representation. + * + * @return the BSON binary subtype for the representation + * @throws BSONException if this is {@link #UNSPECIFIED} + * @since 4.7 + */ + public BsonBinarySubType getSubtype() { + switch (this) { + case STANDARD: + return UUID_STANDARD; + case JAVA_LEGACY: + case PYTHON_LEGACY: + case C_SHARP_LEGACY: + return UUID_LEGACY; + default: + throw new BSONException(format("No BsonBinarySubType for %s", this)); + } + } } diff --git a/bson/src/main/org/bson/annotations/Beta.java b/bson/src/main/org/bson/annotations/Beta.java new file mode 100644 index 00000000000..0db9171952c --- /dev/null +++ b/bson/src/main/org/bson/annotations/Beta.java @@ -0,0 +1,56 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * Copyright 2010 The Guava Authors + * Copyright 2011 The Guava Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Signifies that a public API (public class, method or field) is subject to + * incompatible changes, or even removal, in a future release. An API bearing + * this annotation is exempt from any compatibility guarantees made by its + * containing library. Note that the presence of this annotation implies nothing + * about the quality or performance of the API in question, only the fact that + * it is not "API-frozen." + * + *

It is generally safe for applications to depend on beta APIs, at + * the cost of some extra work during upgrades. However it is generally + * inadvisable for libraries (which get included on users' CLASSPATHs, + * outside the library developers' control) to do so. + * + **/ +@Retention(RetentionPolicy.CLASS) +@Target({ + ElementType.ANNOTATION_TYPE, + ElementType.CONSTRUCTOR, + ElementType.FIELD, + ElementType.METHOD, + ElementType.PACKAGE, + ElementType.TYPE }) +@Documented +@Beta(Reason.CLIENT) +public @interface Beta { + /** + * @return The reason an API element is marked with {@link Beta}. + */ + Reason[] value(); +} diff --git a/bson/src/main/org/bson/annotations/Reason.java b/bson/src/main/org/bson/annotations/Reason.java new file mode 100644 index 00000000000..d0b11c79651 --- /dev/null +++ b/bson/src/main/org/bson/annotations/Reason.java @@ -0,0 +1,34 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.annotations; + +/** + * Enumerates the reasons an API element might be marked with annotations like {@link Beta}. + */ +@Beta(Reason.CLIENT) +public enum Reason { + /** + * Indicates that the status of the driver API is the reason for the annotation. + */ + CLIENT, + + /** + * The driver API relies on the server API. + * This dependency is the reason for the annotation and suggests that changes in the server API could impact the driver API. + */ + SERVER +} diff --git a/bson/src/main/org/bson/annotations/package-info.java b/bson/src/main/org/bson/annotations/package-info.java new file mode 100644 index 00000000000..ac5cd9dabf9 --- /dev/null +++ b/bson/src/main/org/bson/annotations/package-info.java @@ -0,0 +1,20 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains annotations that can apply to any part of the BSON library code. + */ +package org.bson.annotations; diff --git a/bson/src/main/org/bson/assertions/Assertions.java b/bson/src/main/org/bson/assertions/Assertions.java index 323923ba1ab..16e4a3f1737 100644 --- a/bson/src/main/org/bson/assertions/Assertions.java +++ b/bson/src/main/org/bson/assertions/Assertions.java @@ -17,6 +17,8 @@ package org.bson.assertions; +import javax.annotation.Nullable; + /** *

Design by contract assertions.

This class is not part of the public API and may be removed or changed at any time.

*/ @@ -63,6 +65,82 @@ public static void isTrueArgument(final String name, final boolean condition) { } } + /** + * Throw IllegalArgumentException if the condition if false, otherwise return the value. This is useful when arguments must be checked + * within an expression, as when using {@code this} to call another constructor, which must be the first line of the calling + * constructor. + * + * @param the value type + * @param name the name of the state that is being checked + * @param value the value of the argument + * @param condition the condition about the parameter to check + * @return the value + * @throws java.lang.IllegalArgumentException if the condition is false + */ + public static T isTrueArgument(final String name, final T value, final boolean condition) { + if (!condition) { + throw new IllegalArgumentException("state should be: " + name); + } + return value; + } + + /** + * @return Never completes normally. The return type is {@link AssertionError} to allow writing {@code throw fail()}. + * This may be helpful in non-{@code void} methods. + * @throws AssertionError Always + */ + public static AssertionError fail() throws AssertionError { + throw new AssertionError(); + } + + /** + * @param msg The failure message. + * @return Never completes normally. The return type is {@link AssertionError} to allow writing {@code throw fail("failure message")}. + * This may be helpful in non-{@code void} methods. + * @throws AssertionError Always + */ + public static AssertionError fail(final String msg) throws AssertionError { + throw new AssertionError(assertNotNull(msg)); + } + + /** + * @param msg The failure message. + * @param cause The underlying cause + * @return Never completes normally. The return type is {@link AssertionError} to allow writing + * {@code throw fail("failure message", throwable)}. + * This may be helpful in non-{@code void} methods. + * @throws AssertionError Always + */ + public static AssertionError fail(final String msg, final Throwable cause) throws AssertionError { + throw new AssertionError(assertNotNull(msg), assertNotNull(cause)); + } + + /** + * @param value A value to check. + * @param The type of {@code value}. + * @return {@code value} + * @throws AssertionError If {@code value} is {@code null}. + */ + public static T assertNotNull(@Nullable final T value) throws AssertionError { + if (value == null) { + throw new AssertionError(); + } + return value; + } + + /** + * Throw AssertionError if the condition if false. + * + * @param name the name of the state that is being checked + * @param condition the condition about the parameter to check + * @throws AssertionError if the condition is false + */ + public static void assertTrue(final String name, final boolean condition) { + if (!condition) { + throw new AssertionError("state should be: " + assertNotNull(name)); + } + } + /** * Cast an object to the given class and return it, or throw IllegalArgumentException if it's not assignable to that class. * diff --git a/bson/src/main/org/bson/codecs/AbstractCollectionCodec.java b/bson/src/main/org/bson/codecs/AbstractCollectionCodec.java new file mode 100644 index 00000000000..9d2edfd6da3 --- /dev/null +++ b/bson/src/main/org/bson/codecs/AbstractCollectionCodec.java @@ -0,0 +1,121 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.BsonReader; +import org.bson.BsonType; +import org.bson.BsonWriter; +import org.bson.codecs.configuration.CodecConfigurationException; + +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.util.AbstractCollection; +import java.util.AbstractList; +import java.util.AbstractSet; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.NavigableSet; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.function.Supplier; + +import static java.lang.String.format; +import static org.bson.assertions.Assertions.notNull; + +abstract class AbstractCollectionCodec> implements Codec { + + private final Class clazz; + private final Supplier supplier; + + @SuppressWarnings({"unchecked", "UnnecessaryLocalVariable", "rawtypes"}) + AbstractCollectionCodec(final Class clazz) { + this.clazz = notNull("clazz", clazz); + Class rawClass = clazz; + if (rawClass == Collection.class || rawClass == List.class || rawClass == AbstractCollection.class || rawClass == AbstractList.class + || rawClass == ArrayList.class) { + supplier = () -> (C) new ArrayList(); + } else if (rawClass == Set.class || rawClass == AbstractSet.class || rawClass == HashSet.class) { + supplier = () -> (C) new HashSet(); + } else if (rawClass == NavigableSet.class || rawClass == SortedSet.class || rawClass == TreeSet.class) { + //noinspection SortedCollectionWithNonComparableKeys + supplier = () -> (C) new TreeSet(); + } else { + Constructor> constructor; + Supplier supplier; + try { + constructor = clazz.getDeclaredConstructor(); + supplier = () -> { + try { + return (C) constructor.newInstance(); + } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { + throw new CodecConfigurationException(format("Can not invoke no-args constructor for Collection class %s", clazz), + e); + } + }; + } catch (NoSuchMethodException e) { + supplier = () -> { + throw new CodecConfigurationException(format("No no-args constructor for Collection class %s", clazz), e); + }; + } + this.supplier = supplier; + } + } + + abstract T readValue(BsonReader reader, DecoderContext decoderContext); + + abstract void writeValue(BsonWriter writer, T cur, EncoderContext encoderContext); + + @Override + public C decode(final BsonReader reader, final DecoderContext decoderContext) { + reader.readStartArray(); + + C collection = supplier.get(); + while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { + if (reader.getCurrentBsonType() == BsonType.NULL) { + reader.readNull(); + collection.add(null); + } else { + collection.add(readValue(reader, decoderContext)); + } + } + + reader.readEndArray(); + + return collection; + } + + @Override + public void encode(final BsonWriter writer, final C value, final EncoderContext encoderContext) { + writer.writeStartArray(); + for (final T cur : value) { + if (cur == null) { + writer.writeNull(); + } else { + writeValue(writer, cur, encoderContext); + } + } + writer.writeEndArray(); + } + + @Override + public Class getEncoderClass() { + return clazz; + } +} diff --git a/bson/src/main/org/bson/codecs/AbstractMapCodec.java b/bson/src/main/org/bson/codecs/AbstractMapCodec.java new file mode 100644 index 00000000000..3987c19da92 --- /dev/null +++ b/bson/src/main/org/bson/codecs/AbstractMapCodec.java @@ -0,0 +1,114 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.BsonReader; +import org.bson.BsonType; +import org.bson.BsonWriter; +import org.bson.codecs.configuration.CodecConfigurationException; + +import javax.annotation.Nullable; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.util.AbstractMap; +import java.util.HashMap; +import java.util.Map; +import java.util.NavigableMap; +import java.util.TreeMap; +import java.util.function.Supplier; + +import static java.lang.String.format; +import static org.bson.assertions.Assertions.notNull; + +abstract class AbstractMapCodec> implements Codec { + + private final Supplier supplier; + private final Class clazz; + + @SuppressWarnings({"unchecked", "UnnecessaryLocalVariable", "rawtypes"}) + AbstractMapCodec(@Nullable final Class clazz) { + this.clazz = notNull("clazz", clazz); + Class rawClass = clazz; + if (rawClass == Map.class || rawClass == AbstractMap.class || rawClass == HashMap.class) { + supplier = () -> (M) new HashMap(); + } else if (rawClass == NavigableMap.class || rawClass == TreeMap.class) { + supplier = () -> (M) new TreeMap(); + } else { + Constructor> constructor; + Supplier supplier; + try { + constructor = clazz.getDeclaredConstructor(); + supplier = () -> { + try { + return (M) constructor.newInstance(); + } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { + throw new CodecConfigurationException("Can not invoke no-args constructor for Map class %s", e); + } + }; + } catch (NoSuchMethodException e) { + supplier = () -> { + throw new CodecConfigurationException(format("Map class %s has no public no-args constructor", clazz), e); + }; + } + this.supplier = supplier; + } + } + + abstract T readValue(BsonReader reader, DecoderContext decoderContext); + + abstract void writeValue(BsonWriter writer, T value, EncoderContext encoderContext); + + @Override + public void encode(final BsonWriter writer, final M map, final EncoderContext encoderContext) { + writer.writeStartDocument(); + for (final Map.Entry entry : map.entrySet()) { + writer.writeName(entry.getKey()); + T value = entry.getValue(); + if (value == null) { + writer.writeNull(); + } else { + writeValue(writer, value, encoderContext); + } + } + writer.writeEndDocument(); + } + + + @Override + public M decode(final BsonReader reader, final DecoderContext decoderContext) { + M map = supplier.get(); + + reader.readStartDocument(); + while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { + String fieldName = reader.readName(); + if (reader.getCurrentBsonType() == BsonType.NULL) { + reader.readNull(); + map.put(fieldName, null); + } else { + map.put(fieldName, readValue(reader, decoderContext)); + } + } + + reader.readEndDocument(); + return map; + } + + @Override + public Class getEncoderClass() { + return clazz; + } +} diff --git a/bson/src/main/org/bson/codecs/AtomicIntegerCodec.java b/bson/src/main/org/bson/codecs/AtomicIntegerCodec.java index 8fd3e55876b..d8963ed40d7 100644 --- a/bson/src/main/org/bson/codecs/AtomicIntegerCodec.java +++ b/bson/src/main/org/bson/codecs/AtomicIntegerCodec.java @@ -21,7 +21,7 @@ import java.util.concurrent.atomic.AtomicInteger; -import static org.bson.codecs.NumberCodecHelper.decodeInt; +import static org.bson.internal.NumberCodecHelper.decodeInt; /** * Encodes and decodes {@code AtomicInteger} objects. diff --git a/bson/src/main/org/bson/codecs/AtomicLongCodec.java b/bson/src/main/org/bson/codecs/AtomicLongCodec.java index c6e053c6d9f..7f08af77961 100644 --- a/bson/src/main/org/bson/codecs/AtomicLongCodec.java +++ b/bson/src/main/org/bson/codecs/AtomicLongCodec.java @@ -21,7 +21,7 @@ import java.util.concurrent.atomic.AtomicLong; -import static org.bson.codecs.NumberCodecHelper.decodeLong; +import static org.bson.internal.NumberCodecHelper.decodeLong; /** * Encodes and decodes {@code AtomicLong} objects. diff --git a/bson/src/main/org/bson/codecs/BinaryVectorCodec.java b/bson/src/main/org/bson/codecs/BinaryVectorCodec.java new file mode 100644 index 00000000000..4d23557ad49 --- /dev/null +++ b/bson/src/main/org/bson/codecs/BinaryVectorCodec.java @@ -0,0 +1,56 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.BsonBinary; +import org.bson.BsonBinarySubType; +import org.bson.BsonInvalidOperationException; +import org.bson.BsonReader; +import org.bson.BsonWriter; +import org.bson.BinaryVector; + +/** + * Encodes and decodes {@link BinaryVector} objects. + * + */ + final class BinaryVectorCodec implements Codec { + + @Override + public void encode(final BsonWriter writer, final BinaryVector vectorToEncode, final EncoderContext encoderContext) { + writer.writeBinaryData(new BsonBinary(vectorToEncode)); + } + + @Override + public BinaryVector decode(final BsonReader reader, final DecoderContext decoderContext) { + byte subType = reader.peekBinarySubType(); + + if (subType != BsonBinarySubType.VECTOR.getValue()) { + throw new BsonInvalidOperationException("Expected vector binary subtype " + BsonBinarySubType.VECTOR.getValue() + " but found " + subType); + } + + return reader.readBinaryData() + .asBinary() + .asVector(); + } + + @Override + public Class getEncoderClass() { + return BinaryVector.class; + } +} + + diff --git a/bson/src/main/org/bson/codecs/BsonArrayCodec.java b/bson/src/main/org/bson/codecs/BsonArrayCodec.java index 9f9c11a1859..6d16bb7d1b0 100644 --- a/bson/src/main/org/bson/codecs/BsonArrayCodec.java +++ b/bson/src/main/org/bson/codecs/BsonArrayCodec.java @@ -23,10 +23,8 @@ import org.bson.BsonWriter; import org.bson.codecs.configuration.CodecRegistry; -import java.util.ArrayList; -import java.util.List; - import static org.bson.assertions.Assertions.notNull; +import static org.bson.codecs.BsonValueCodecProvider.getBsonTypeClassMap; import static org.bson.codecs.configuration.CodecRegistries.fromProviders; /** @@ -37,8 +35,8 @@ public class BsonArrayCodec implements Codec { private static final CodecRegistry DEFAULT_REGISTRY = fromProviders(new BsonValueCodecProvider()); - - private final CodecRegistry codecRegistry; + private static final BsonTypeCodecMap DEFAULT_BSON_TYPE_CODEC_MAP = new BsonTypeCodecMap(getBsonTypeClassMap(), DEFAULT_REGISTRY); + private final BsonTypeCodecMap bsonTypeCodecMap; /** * Creates a new instance with a default codec registry that uses the {@link BsonValueCodecProvider}. @@ -46,7 +44,7 @@ public class BsonArrayCodec implements Codec { * @since 3.4 */ public BsonArrayCodec() { - this(DEFAULT_REGISTRY); + this(DEFAULT_BSON_TYPE_CODEC_MAP); } /** @@ -55,21 +53,22 @@ public BsonArrayCodec() { * @param codecRegistry the codec registry */ public BsonArrayCodec(final CodecRegistry codecRegistry) { - this.codecRegistry = notNull("codecRegistry", codecRegistry); + this(new BsonTypeCodecMap(getBsonTypeClassMap(), codecRegistry)); + } + + private BsonArrayCodec(final BsonTypeCodecMap bsonTypeCodecMap) { + this.bsonTypeCodecMap = notNull("bsonTypeCodecMap", bsonTypeCodecMap); } @Override public BsonArray decode(final BsonReader reader, final DecoderContext decoderContext) { + BsonArray bsonArray = new BsonArray(); reader.readStartArray(); - - List list = new ArrayList(); while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { - list.add(readValue(reader, decoderContext)); + bsonArray.add(readValue(reader, decoderContext)); } - reader.readEndArray(); - - return new BsonArray(list); + return bsonArray; } @Override @@ -78,7 +77,7 @@ public void encode(final BsonWriter writer, final BsonArray array, final Encoder writer.writeStartArray(); for (BsonValue value : array) { - Codec codec = codecRegistry.get(value.getClass()); + Codec codec = bsonTypeCodecMap.get(value.getBsonType()); encoderContext.encodeWithChildContext(codec, writer, value); } @@ -99,7 +98,7 @@ public Class getEncoderClass() { * @return the non-null value read from the reader */ protected BsonValue readValue(final BsonReader reader, final DecoderContext decoderContext) { - return codecRegistry.get(BsonValueCodecProvider.getClassForBsonType(reader.getCurrentBsonType())).decode(reader, decoderContext); + BsonType currentBsonType = reader.getCurrentBsonType(); + return (BsonValue) bsonTypeCodecMap.get(currentBsonType).decode(reader, decoderContext); } - } diff --git a/bson/src/main/org/bson/codecs/BsonCodec.java b/bson/src/main/org/bson/codecs/BsonCodec.java new file mode 100644 index 00000000000..f7e81a0bb3f --- /dev/null +++ b/bson/src/main/org/bson/codecs/BsonCodec.java @@ -0,0 +1,65 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.BsonDocument; +import org.bson.BsonReader; +import org.bson.BsonWriter; +import org.bson.codecs.configuration.CodecConfigurationException; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.conversions.Bson; + +import static java.lang.String.format; + +/** + * A codec for encoding Bson Implementations + * + * @since 3.11 + */ +public class BsonCodec implements Codec { + private static final Codec BSON_DOCUMENT_CODEC = new BsonDocumentCodec(); + private final CodecRegistry registry; + + /** + * Create a new instance + * + * @param registry the codec registry + */ + public BsonCodec(final CodecRegistry registry) { + this.registry = registry; + } + + @Override + public Bson decode(final BsonReader reader, final DecoderContext decoderContext) { + throw new UnsupportedOperationException("The BsonCodec can only encode to Bson"); + } + + @Override + public void encode(final BsonWriter writer, final Bson value, final EncoderContext encoderContext) { + try { + BsonDocument bsonDocument = value.toBsonDocument(BsonDocument.class, registry); + BSON_DOCUMENT_CODEC.encode(writer, bsonDocument, encoderContext); + } catch (Exception e) { + throw new CodecConfigurationException(format("Unable to encode a Bson implementation: %s", value), e); + } + } + + @Override + public Class getEncoderClass() { + return Bson.class; + } +} diff --git a/bson/src/main/org/bson/codecs/BsonCodecProvider.java b/bson/src/main/org/bson/codecs/BsonCodecProvider.java new file mode 100644 index 00000000000..950379d68ad --- /dev/null +++ b/bson/src/main/org/bson/codecs/BsonCodecProvider.java @@ -0,0 +1,43 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.codecs.configuration.CodecProvider; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.conversions.Bson; + +/** + * A codec for encoding simple Bson interface implementations + * + * @since 3.11 + */ +public class BsonCodecProvider implements CodecProvider { + + @Override + @SuppressWarnings("unchecked") + public Codec get(final Class clazz, final CodecRegistry registry) { + if (Bson.class.isAssignableFrom(clazz)) { + return (Codec) new BsonCodec(registry); + } + return null; + } + + @Override + public String toString() { + return "BsonCodecProvider{}"; + } +} diff --git a/bson/src/main/org/bson/codecs/BsonDocumentCodec.java b/bson/src/main/org/bson/codecs/BsonDocumentCodec.java index 7e45e30abcd..75bd3b7a2b0 100644 --- a/bson/src/main/org/bson/codecs/BsonDocumentCodec.java +++ b/bson/src/main/org/bson/codecs/BsonDocumentCodec.java @@ -17,7 +17,6 @@ package org.bson.codecs; import org.bson.BsonDocument; -import org.bson.BsonElement; import org.bson.BsonObjectId; import org.bson.BsonReader; import org.bson.BsonType; @@ -26,10 +25,9 @@ import org.bson.codecs.configuration.CodecRegistry; import org.bson.types.ObjectId; -import java.util.ArrayList; -import java.util.List; import java.util.Map; +import static org.bson.assertions.Assertions.notNull; import static org.bson.codecs.BsonValueCodecProvider.getBsonTypeClassMap; import static org.bson.codecs.configuration.CodecRegistries.fromProviders; @@ -41,6 +39,7 @@ public class BsonDocumentCodec implements CollectibleCodec { private static final String ID_FIELD_NAME = "_id"; private static final CodecRegistry DEFAULT_REGISTRY = fromProviders(new BsonValueCodecProvider()); + private static final BsonTypeCodecMap DEFAULT_BSON_TYPE_CODEC_MAP = new BsonTypeCodecMap(getBsonTypeClassMap(), DEFAULT_REGISTRY); private final CodecRegistry codecRegistry; private final BsonTypeCodecMap bsonTypeCodecMap; @@ -49,7 +48,7 @@ public class BsonDocumentCodec implements CollectibleCodec { * Creates a new instance with a default codec registry that uses the {@link BsonValueCodecProvider}. */ public BsonDocumentCodec() { - this(DEFAULT_REGISTRY); + this(DEFAULT_REGISTRY, DEFAULT_BSON_TYPE_CODEC_MAP); } /** @@ -58,11 +57,12 @@ public BsonDocumentCodec() { * @param codecRegistry the {@code CodecRegistry} to use to look up the codecs for encoding and decoding to/from BSON */ public BsonDocumentCodec(final CodecRegistry codecRegistry) { - if (codecRegistry == null) { - throw new IllegalArgumentException("Codec registry can not be null"); - } - this.codecRegistry = codecRegistry; - this.bsonTypeCodecMap = new BsonTypeCodecMap(getBsonTypeClassMap(), codecRegistry); + this(codecRegistry, new BsonTypeCodecMap(getBsonTypeClassMap(), codecRegistry)); + } + + private BsonDocumentCodec(final CodecRegistry codecRegistry, final BsonTypeCodecMap bsonTypeCodecMap) { + this.codecRegistry = notNull("Codec registry", codecRegistry); + this.bsonTypeCodecMap = notNull("bsonTypeCodecMap", bsonTypeCodecMap); } /** @@ -76,17 +76,15 @@ public CodecRegistry getCodecRegistry() { @Override public BsonDocument decode(final BsonReader reader, final DecoderContext decoderContext) { - List keyValuePairs = new ArrayList(); - + BsonDocument bsonDocument = new BsonDocument(); reader.readStartDocument(); while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { String fieldName = reader.readName(); - keyValuePairs.add(new BsonElement(fieldName, readValue(reader, decoderContext))); + bsonDocument.append(fieldName, readValue(reader, decoderContext)); } reader.readEndDocument(); - - return new BsonDocument(keyValuePairs); + return bsonDocument; } /** @@ -132,7 +130,7 @@ private boolean skipField(final EncoderContext encoderContext, final String key) @SuppressWarnings({"unchecked", "rawtypes"}) private void writeValue(final BsonWriter writer, final EncoderContext encoderContext, final BsonValue value) { - Codec codec = codecRegistry.get(value.getClass()); + Codec codec = bsonTypeCodecMap.get(value.getBsonType()); encoderContext.encodeWithChildContext(codec, writer, value); } diff --git a/bson/src/main/org/bson/codecs/BsonTypeClassMap.java b/bson/src/main/org/bson/codecs/BsonTypeClassMap.java index 3f441a557a7..32acaeb7f85 100644 --- a/bson/src/main/org/bson/codecs/BsonTypeClassMap.java +++ b/bson/src/main/org/bson/codecs/BsonTypeClassMap.java @@ -31,12 +31,11 @@ import org.bson.types.ObjectId; import org.bson.types.Symbol; +import java.util.Arrays; import java.util.Collections; import java.util.Date; -import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; /** *

A map from a BSON types to the Class to which it should be decoded. This class is useful if, for example, @@ -70,7 +69,8 @@ * @since 3.0 */ public class BsonTypeClassMap { - private final Map> map = new HashMap>(); + static final BsonTypeClassMap DEFAULT_BSON_TYPE_CLASS_MAP = new BsonTypeClassMap(); + private final Class[] bsonTypeOrdinalToClassMap = new Class[256]; /** * Construct an instance with the default mapping, but replacing the default mapping with any values contained in the given map. @@ -80,18 +80,14 @@ public class BsonTypeClassMap { */ public BsonTypeClassMap(final Map> replacementsForDefaults) { addDefaults(); - map.putAll(replacementsForDefaults); + replacementsForDefaults.forEach((key, value) -> bsonTypeOrdinalToClassMap[key.getValue()] = value); } /** * Construct an instance with the default mappings. */ public BsonTypeClassMap() { - this(Collections.>emptyMap()); - } - - Set keys() { - return map.keySet(); + this(Collections.emptyMap()); } /** @@ -101,30 +97,30 @@ Set keys() { * @return the Class that is mapped to the BSON type */ public Class get(final BsonType bsonType) { - return map.get(bsonType); + return bsonTypeOrdinalToClassMap[bsonType.getValue()]; } private void addDefaults() { - map.put(BsonType.ARRAY, List.class); - map.put(BsonType.BINARY, Binary.class); - map.put(BsonType.BOOLEAN, Boolean.class); - map.put(BsonType.DATE_TIME, Date.class); - map.put(BsonType.DB_POINTER, BsonDbPointer.class); - map.put(BsonType.DOCUMENT, Document.class); - map.put(BsonType.DOUBLE, Double.class); - map.put(BsonType.INT32, Integer.class); - map.put(BsonType.INT64, Long.class); - map.put(BsonType.DECIMAL128, Decimal128.class); - map.put(BsonType.MAX_KEY, MaxKey.class); - map.put(BsonType.MIN_KEY, MinKey.class); - map.put(BsonType.JAVASCRIPT, Code.class); - map.put(BsonType.JAVASCRIPT_WITH_SCOPE, CodeWithScope.class); - map.put(BsonType.OBJECT_ID, ObjectId.class); - map.put(BsonType.REGULAR_EXPRESSION, BsonRegularExpression.class); - map.put(BsonType.STRING, String.class); - map.put(BsonType.SYMBOL, Symbol.class); - map.put(BsonType.TIMESTAMP, BsonTimestamp.class); - map.put(BsonType.UNDEFINED, BsonUndefined.class); + bsonTypeOrdinalToClassMap[BsonType.ARRAY.getValue()] = List.class; + bsonTypeOrdinalToClassMap[BsonType.BINARY.getValue()] = Binary.class; + bsonTypeOrdinalToClassMap[BsonType.BOOLEAN.getValue()] = Boolean.class; + bsonTypeOrdinalToClassMap[BsonType.DATE_TIME.getValue()] = Date.class; + bsonTypeOrdinalToClassMap[BsonType.DB_POINTER.getValue()] = BsonDbPointer.class; + bsonTypeOrdinalToClassMap[BsonType.DOCUMENT.getValue()] = Document.class; + bsonTypeOrdinalToClassMap[BsonType.DOUBLE.getValue()] = Double.class; + bsonTypeOrdinalToClassMap[BsonType.INT32.getValue()] = Integer.class; + bsonTypeOrdinalToClassMap[BsonType.INT64.getValue()] = Long.class; + bsonTypeOrdinalToClassMap[BsonType.DECIMAL128.getValue()] = Decimal128.class; + bsonTypeOrdinalToClassMap[BsonType.MAX_KEY.getValue()] = MaxKey.class; + bsonTypeOrdinalToClassMap[BsonType.MIN_KEY.getValue()] = MinKey.class; + bsonTypeOrdinalToClassMap[BsonType.JAVASCRIPT.getValue()] = Code.class; + bsonTypeOrdinalToClassMap[BsonType.JAVASCRIPT_WITH_SCOPE.getValue()] = CodeWithScope.class; + bsonTypeOrdinalToClassMap[BsonType.OBJECT_ID.getValue()] = ObjectId.class; + bsonTypeOrdinalToClassMap[BsonType.REGULAR_EXPRESSION.getValue()] = BsonRegularExpression.class; + bsonTypeOrdinalToClassMap[BsonType.STRING.getValue()] = String.class; + bsonTypeOrdinalToClassMap[BsonType.SYMBOL.getValue()] = Symbol.class; + bsonTypeOrdinalToClassMap[BsonType.TIMESTAMP.getValue()] = BsonTimestamp.class; + bsonTypeOrdinalToClassMap[BsonType.UNDEFINED.getValue()] = BsonUndefined.class; } @Override @@ -136,17 +132,13 @@ public boolean equals(final Object o) { return false; } - final BsonTypeClassMap that = (BsonTypeClassMap) o; - - if (!map.equals(that.map)) { - return false; - } + BsonTypeClassMap that = (BsonTypeClassMap) o; - return true; + return Arrays.equals(bsonTypeOrdinalToClassMap, that.bsonTypeOrdinalToClassMap); } @Override public int hashCode() { - return map.hashCode(); + return Arrays.hashCode(bsonTypeOrdinalToClassMap); } } diff --git a/bson/src/main/org/bson/codecs/BsonTypeCodecMap.java b/bson/src/main/org/bson/codecs/BsonTypeCodecMap.java index 510a6041a0b..3a3def7ca7f 100644 --- a/bson/src/main/org/bson/codecs/BsonTypeCodecMap.java +++ b/bson/src/main/org/bson/codecs/BsonTypeCodecMap.java @@ -40,7 +40,7 @@ public class BsonTypeCodecMap { public BsonTypeCodecMap(final BsonTypeClassMap bsonTypeClassMap, final CodecRegistry codecRegistry) { this.bsonTypeClassMap = notNull("bsonTypeClassMap", bsonTypeClassMap); notNull("codecRegistry", codecRegistry); - for (BsonType cur : bsonTypeClassMap.keys()) { + for (BsonType cur : BsonType.values()) { Class clazz = bsonTypeClassMap.get(cur); if (clazz != null) { try { diff --git a/bson/src/main/org/bson/codecs/BsonValueCodecProvider.java b/bson/src/main/org/bson/codecs/BsonValueCodecProvider.java index 662ef455c0f..8a7a3f77375 100644 --- a/bson/src/main/org/bson/codecs/BsonValueCodecProvider.java +++ b/bson/src/main/org/bson/codecs/BsonValueCodecProvider.java @@ -55,7 +55,7 @@ public class BsonValueCodecProvider implements CodecProvider { private static final BsonTypeClassMap DEFAULT_BSON_TYPE_CLASS_MAP; - private final Map, Codec> codecs = new HashMap, Codec>(); + private final Map, Codec> codecs = new HashMap<>(); /** * Construct a new instance with the default codec for each BSON type. @@ -91,10 +91,6 @@ public Codec get(final Class clazz, final CodecRegistry registry) { return (Codec) codecs.get(clazz); } - if (clazz == BsonArray.class) { - return (Codec) new BsonArrayCodec(registry); - } - if (clazz == BsonJavaScriptWithScope.class) { return (Codec) new BsonJavaScriptWithScopeCodec(registry.get(BsonDocument.class)); } @@ -115,6 +111,10 @@ public Codec get(final Class clazz, final CodecRegistry registry) { return (Codec) new BsonDocumentCodec(registry); } + if (BsonArray.class.isAssignableFrom(clazz)) { + return (Codec) new BsonArrayCodec(registry); + } + return null; } @@ -144,7 +144,7 @@ private void addCodec(final Codec codec) { } static { - Map> map = new HashMap>(); + Map> map = new HashMap<>(); map.put(BsonType.NULL, BsonNull.class); map.put(BsonType.ARRAY, BsonArray.class); @@ -170,4 +170,9 @@ private void addCodec(final Codec codec) { DEFAULT_BSON_TYPE_CLASS_MAP = new BsonTypeClassMap(map); } + + @Override + public String toString() { + return "BsonValueCodecProvider{}"; + } } diff --git a/bson/src/main/org/bson/codecs/ByteCodec.java b/bson/src/main/org/bson/codecs/ByteCodec.java index 26b5005ea66..e7011f8b58d 100644 --- a/bson/src/main/org/bson/codecs/ByteCodec.java +++ b/bson/src/main/org/bson/codecs/ByteCodec.java @@ -16,12 +16,10 @@ package org.bson.codecs; -import org.bson.BsonInvalidOperationException; import org.bson.BsonReader; import org.bson.BsonWriter; -import static java.lang.String.format; -import static org.bson.codecs.NumberCodecHelper.decodeInt; +import static org.bson.internal.NumberCodecHelper.decodeByte; /** * Encodes and decodes {@code Byte} objects. @@ -37,11 +35,7 @@ public void encode(final BsonWriter writer, final Byte value, final EncoderConte @Override public Byte decode(final BsonReader reader, final DecoderContext decoderContext) { - int value = decodeInt(reader); - if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) { - throw new BsonInvalidOperationException(format("%s can not be converted into a Byte.", value)); - } - return (byte) value; + return decodeByte(reader); } @Override diff --git a/bson/src/main/org/bson/codecs/CharacterCodec.java b/bson/src/main/org/bson/codecs/CharacterCodec.java index 0a9e6252056..4ad6efa2663 100644 --- a/bson/src/main/org/bson/codecs/CharacterCodec.java +++ b/bson/src/main/org/bson/codecs/CharacterCodec.java @@ -16,11 +16,10 @@ package org.bson.codecs; -import org.bson.BsonInvalidOperationException; import org.bson.BsonReader; import org.bson.BsonWriter; +import org.bson.internal.StringCodecHelper; -import static java.lang.String.format; import static org.bson.assertions.Assertions.notNull; /** @@ -38,13 +37,7 @@ public void encode(final BsonWriter writer, final Character value, final Encoder @Override public Character decode(final BsonReader reader, final DecoderContext decoderContext) { - String string = reader.readString(); - if (string.length() != 1) { - throw new BsonInvalidOperationException(format("Attempting to decode the string '%s' to a character, but its length is not " - + "equal to one", string)); - } - - return string.charAt(0); + return StringCodecHelper.decodeChar(reader); } @Override diff --git a/bson/src/main/org/bson/codecs/CollectionCodec.java b/bson/src/main/org/bson/codecs/CollectionCodec.java new file mode 100644 index 00000000000..d53ab4a937e --- /dev/null +++ b/bson/src/main/org/bson/codecs/CollectionCodec.java @@ -0,0 +1,95 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.BsonReader; +import org.bson.BsonWriter; +import org.bson.Transformer; +import org.bson.UuidRepresentation; +import org.bson.codecs.configuration.CodecRegistry; + +import java.util.Collection; +import java.util.List; + +import static org.bson.assertions.Assertions.notNull; + +/** + * A codec for {@code Collection}. + * + *

Supports {@link Collection}, {@link List}, {@link java.util.AbstractCollection}, {@link java.util.AbstractList}, + * {@link java.util.Set}, {@link java.util.NavigableSet}, {@link java.util.SortedSet}, {@link java.util.AbstractSet} or any + * concrete class that implements {@code Collection} and has a public no-args constructor. If the type argument is + * {@code Collection}, {@code List}, {@code AbstractCollection}, or {@code AbstractList}, + * it constructs {@code ArrayList} instances when decoding. If the type argument is {@code Set} or + * {@code AbstractSet}, it constructs {@code HashSet} instances when decoding. If the type argument is + * {@code NavigableSet} or {@code SortedSet}, it constructs {@code TreeSet} instances when decoding.

+ * + *

Replaces the now deprecated {@link IterableCodec}.

+ * + * @param the actual type of the Collection, e.g. {@code List} + */ +@SuppressWarnings("rawtypes") +final class CollectionCodec> extends AbstractCollectionCodec + implements OverridableUuidRepresentationCodec { + + private final CodecRegistry registry; + private final BsonTypeCodecMap bsonTypeCodecMap; + private final Transformer valueTransformer; + private final UuidRepresentation uuidRepresentation; + + /** + * Construct a new instance with the given {@code CodecRegistry} and {@code BsonTypeClassMap}. + * + * @param registry the non-null codec registry + * @param bsonTypeClassMap the non-null BsonTypeClassMap + * @param valueTransformer the value Transformer + * @param clazz the class + */ + CollectionCodec(final CodecRegistry registry, final BsonTypeClassMap bsonTypeClassMap, final Transformer valueTransformer, + final Class clazz) { + this(registry, new BsonTypeCodecMap(notNull("bsonTypeClassMap", bsonTypeClassMap), registry), valueTransformer, clazz, + UuidRepresentation.UNSPECIFIED); + } + private CollectionCodec(final CodecRegistry registry, final BsonTypeCodecMap bsonTypeCodecMap, final Transformer valueTransformer, + final Class clazz, final UuidRepresentation uuidRepresentation) { + super(clazz); + this.registry = notNull("registry", registry); + this.bsonTypeCodecMap = bsonTypeCodecMap; + this.valueTransformer = valueTransformer != null ? valueTransformer : (value) -> value; + this.uuidRepresentation = uuidRepresentation; + } + + @Override + public Codec withUuidRepresentation(final UuidRepresentation uuidRepresentation) { + if (this.uuidRepresentation.equals(uuidRepresentation)) { + return this; + } + return new CollectionCodec<>(registry, bsonTypeCodecMap, valueTransformer, getEncoderClass(), uuidRepresentation); + } + + @Override + Object readValue(final BsonReader reader, final DecoderContext decoderContext) { + return ContainerCodecHelper.readValue(reader, decoderContext, bsonTypeCodecMap, uuidRepresentation, registry, valueTransformer); + } + + @SuppressWarnings("unchecked") + @Override + void writeValue(final BsonWriter writer, final Object value, final EncoderContext encoderContext) { + Codec codec = registry.get(value.getClass()); + encoderContext.encodeWithChildContext(codec, writer, value); + } +} diff --git a/bson/src/main/org/bson/codecs/CollectionCodecProvider.java b/bson/src/main/org/bson/codecs/CollectionCodecProvider.java new file mode 100644 index 00000000000..c4c447e87bd --- /dev/null +++ b/bson/src/main/org/bson/codecs/CollectionCodecProvider.java @@ -0,0 +1,140 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.Transformer; +import org.bson.codecs.configuration.CodecConfigurationException; +import org.bson.codecs.configuration.CodecProvider; +import org.bson.codecs.configuration.CodecRegistry; + +import java.lang.reflect.Type; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +import static org.bson.assertions.Assertions.notNull; +import static org.bson.codecs.BsonTypeClassMap.DEFAULT_BSON_TYPE_CLASS_MAP; +import static org.bson.codecs.ContainerCodecHelper.getCodec; + +/** + * A {@code CodecProvider} for classes than implement the {@code Collection} interface. + * + * @since 3.3 + */ +public class CollectionCodecProvider implements CodecProvider { + private final BsonTypeClassMap bsonTypeClassMap; + private final Transformer valueTransformer; + + /** + * Construct a new instance with a default {@code BsonTypeClassMap} and no {@code Transformer}. + */ + public CollectionCodecProvider() { + this(DEFAULT_BSON_TYPE_CLASS_MAP); + } + + /** + * Construct a new instance with a default {@code BsonTypeClassMap} and the given {@code Transformer}. The transformer is used by the + * IterableCodec as a last step when decoding values. + * + * @param valueTransformer the value transformer for decoded values + */ + public CollectionCodecProvider(final Transformer valueTransformer) { + this(DEFAULT_BSON_TYPE_CLASS_MAP, valueTransformer); + } + + /** + * Construct a new instance with the given instance of {@code BsonTypeClassMap} and no {@code Transformer}. + * + * @param bsonTypeClassMap the non-null {@code BsonTypeClassMap} with which to construct instances of {@code DocumentCodec} and {@code + * ListCodec} + */ + public CollectionCodecProvider(final BsonTypeClassMap bsonTypeClassMap) { + this(bsonTypeClassMap, null); + } + + /** + * Construct a new instance with the given instance of {@code BsonTypeClassMap} and {@code Transformer}. + * + * @param bsonTypeClassMap the non-null {@code BsonTypeClassMap} with which to construct instances of {@code DocumentCodec} and {@code + * ListCodec}. + * @param valueTransformer the value transformer for decoded values + */ + public CollectionCodecProvider(final BsonTypeClassMap bsonTypeClassMap, final Transformer valueTransformer) { + this.bsonTypeClassMap = notNull("bsonTypeClassMap", bsonTypeClassMap); + this.valueTransformer = valueTransformer; + } + + @Override + public Codec get(final Class clazz, final CodecRegistry registry) { + return get(clazz, Collections.emptyList(), registry); + } + + @Override + public Codec get(final Class clazz, final List typeArguments, final CodecRegistry registry) { + if (Collection.class.isAssignableFrom(clazz)) { + int typeArgumentsSize = typeArguments.size(); + switch (typeArgumentsSize) { + case 0: { + @SuppressWarnings({"unchecked", "rawtypes"}) + Codec result = new CollectionCodec(registry, bsonTypeClassMap, valueTransformer, clazz); + return result; + } + case 1: { + @SuppressWarnings({"unchecked", "rawtypes"}) + Codec result = new ParameterizedCollectionCodec(getCodec(registry, typeArguments.get(0)), clazz); + return result; + } + default: { + throw new CodecConfigurationException("Expected only one type argument for a Collection, but found " + typeArgumentsSize); + } + } + } + return null; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + CollectionCodecProvider that = (CollectionCodecProvider) o; + + if (!bsonTypeClassMap.equals(that.bsonTypeClassMap)) { + return false; + } + if (!Objects.equals(valueTransformer, that.valueTransformer)) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + return Objects.hash(bsonTypeClassMap, valueTransformer); + } + + @Override + public String toString() { + return "CollectionCodecProvider{}"; + } +} diff --git a/bson/src/main/org/bson/codecs/ContainerCodecHelper.java b/bson/src/main/org/bson/codecs/ContainerCodecHelper.java new file mode 100644 index 00000000000..2243f209528 --- /dev/null +++ b/bson/src/main/org/bson/codecs/ContainerCodecHelper.java @@ -0,0 +1,108 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.BsonBinarySubType; +import org.bson.BsonReader; +import org.bson.BsonType; +import org.bson.Transformer; +import org.bson.UuidRepresentation; +import org.bson.BinaryVector; +import org.bson.codecs.configuration.CodecConfigurationException; +import org.bson.codecs.configuration.CodecRegistry; + +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; +import java.util.Arrays; +import java.util.UUID; + +import static org.bson.internal.UuidHelper.isLegacyUUID; + +/** + * Helper methods for Codec implementations for containers, e.g. {@code Map} and {@code Iterable}. + */ +final class ContainerCodecHelper { + + static Object readValue(final BsonReader reader, final DecoderContext decoderContext, + final BsonTypeCodecMap bsonTypeCodecMap, final UuidRepresentation uuidRepresentation, + final CodecRegistry registry, final Transformer valueTransformer) { + + BsonType bsonType = reader.getCurrentBsonType(); + if (bsonType == BsonType.NULL) { + reader.readNull(); + return null; + } else { + Codec currentCodec = bsonTypeCodecMap.get(bsonType); + + if (bsonType == BsonType.BINARY) { + byte binarySubType = reader.peekBinarySubType(); + currentCodec = getBinarySubTypeCodec( + reader, + uuidRepresentation, + registry, binarySubType, + currentCodec); + } + + return valueTransformer.transform(currentCodec.decode(reader, decoderContext)); + } + } + + private static Codec getBinarySubTypeCodec(final BsonReader reader, + final UuidRepresentation uuidRepresentation, + final CodecRegistry registry, + final byte binarySubType, + final Codec binaryTypeCodec) { + + if (binarySubType == BsonBinarySubType.VECTOR.getValue()) { + Codec vectorCodec = registry.get(BinaryVector.class, registry); + if (vectorCodec != null) { + return vectorCodec; + } + } else if (reader.peekBinarySize() == 16) { + switch (binarySubType) { + case 3: + if (isLegacyUUID(uuidRepresentation)) { + return registry.get(UUID.class); + } + break; + case 4: + if (uuidRepresentation == UuidRepresentation.STANDARD) { + return registry.get(UUID.class); + } + break; + default: + break; + } + } + + return binaryTypeCodec; + } + + static Codec getCodec(final CodecRegistry codecRegistry, final Type type) { + if (type instanceof Class) { + return codecRegistry.get((Class) type); + } else if (type instanceof ParameterizedType) { + ParameterizedType parameterizedType = (ParameterizedType) type; + return codecRegistry.get((Class) parameterizedType.getRawType(), Arrays.asList(parameterizedType.getActualTypeArguments())); + } else { + throw new CodecConfigurationException("Unsupported generic type of container: " + type); + } + } + + private ContainerCodecHelper() { + } +} diff --git a/bson/src/main/org/bson/codecs/DecoderContext.java b/bson/src/main/org/bson/codecs/DecoderContext.java index 30c0b700316..7ebabaad05b 100644 --- a/bson/src/main/org/bson/codecs/DecoderContext.java +++ b/bson/src/main/org/bson/codecs/DecoderContext.java @@ -18,6 +18,8 @@ import org.bson.BsonReader; +import static org.bson.assertions.Assertions.notNull; + /** * The context for decoding values to BSON. * @@ -90,6 +92,7 @@ public DecoderContext build() { * @since 3.5 */ public T decodeWithChildContext(final Decoder decoder, final BsonReader reader) { + notNull("decoder", decoder); return decoder.decode(reader, DEFAULT_CONTEXT); } diff --git a/bson/src/main/org/bson/codecs/DocumentCodec.java b/bson/src/main/org/bson/codecs/DocumentCodec.java index 5b8e4a9cdc6..0c4161f53fd 100644 --- a/bson/src/main/org/bson/codecs/DocumentCodec.java +++ b/bson/src/main/org/bson/codecs/DocumentCodec.java @@ -16,7 +16,6 @@ package org.bson.codecs; -import org.bson.BsonBinarySubType; import org.bson.BsonDocument; import org.bson.BsonDocumentWriter; import org.bson.BsonReader; @@ -25,15 +24,15 @@ import org.bson.BsonWriter; import org.bson.Document; import org.bson.Transformer; +import org.bson.UuidRepresentation; import org.bson.codecs.configuration.CodecRegistry; -import java.util.ArrayList; -import java.util.List; import java.util.Map; -import java.util.UUID; import static java.util.Arrays.asList; import static org.bson.assertions.Assertions.notNull; +import static org.bson.codecs.BsonTypeClassMap.DEFAULT_BSON_TYPE_CLASS_MAP; +import static org.bson.codecs.ContainerCodecHelper.readValue; import static org.bson.codecs.configuration.CodecRegistries.fromProviders; /** @@ -42,24 +41,26 @@ * @see org.bson.Document * @since 3.0 */ -public class DocumentCodec implements CollectibleCodec { +public class DocumentCodec implements CollectibleCodec, OverridableUuidRepresentationCodec { private static final String ID_FIELD_NAME = "_id"; private static final CodecRegistry DEFAULT_REGISTRY = fromProviders(asList(new ValueCodecProvider(), - new BsonValueCodecProvider(), - new DocumentCodecProvider())); - private static final BsonTypeClassMap DEFAULT_BSON_TYPE_CLASS_MAP = new BsonTypeClassMap(); + new CollectionCodecProvider(), new IterableCodecProvider(), + new BsonValueCodecProvider(), new DocumentCodecProvider(), new MapCodecProvider())); + private static final BsonTypeCodecMap DEFAULT_BSON_TYPE_CODEC_MAP = new BsonTypeCodecMap(DEFAULT_BSON_TYPE_CLASS_MAP, DEFAULT_REGISTRY); + private static final IdGenerator DEFAULT_ID_GENERATOR = new ObjectIdGenerator(); private final BsonTypeCodecMap bsonTypeCodecMap; private final CodecRegistry registry; private final IdGenerator idGenerator; private final Transformer valueTransformer; + private final UuidRepresentation uuidRepresentation; /** * Construct a new instance with a default {@code CodecRegistry}. */ public DocumentCodec() { - this(DEFAULT_REGISTRY); + this(DEFAULT_REGISTRY, DEFAULT_BSON_TYPE_CODEC_MAP, null); } /** @@ -92,15 +93,28 @@ public DocumentCodec(final CodecRegistry registry, final BsonTypeClassMap bsonTy * @param valueTransformer the value transformer to use as a final step when decoding the value of any field in the document */ public DocumentCodec(final CodecRegistry registry, final BsonTypeClassMap bsonTypeClassMap, final Transformer valueTransformer) { + this(registry, new BsonTypeCodecMap(notNull("bsonTypeClassMap", bsonTypeClassMap), registry), valueTransformer); + } + + private DocumentCodec(final CodecRegistry registry, final BsonTypeCodecMap bsonTypeCodecMap, final Transformer valueTransformer) { + this(registry, bsonTypeCodecMap, DEFAULT_ID_GENERATOR, valueTransformer, UuidRepresentation.UNSPECIFIED); + } + + private DocumentCodec(final CodecRegistry registry, final BsonTypeCodecMap bsonTypeCodecMap, final IdGenerator idGenerator, + final Transformer valueTransformer, final UuidRepresentation uuidRepresentation) { this.registry = notNull("registry", registry); - this.bsonTypeCodecMap = new BsonTypeCodecMap(notNull("bsonTypeClassMap", bsonTypeClassMap), registry); - this.idGenerator = new ObjectIdGenerator(); - this.valueTransformer = valueTransformer != null ? valueTransformer : new Transformer() { - @Override - public Object transform(final Object value) { - return value; - } - }; + this.bsonTypeCodecMap = bsonTypeCodecMap; + this.idGenerator = idGenerator; + this.valueTransformer = valueTransformer != null ? valueTransformer : value -> value; + this.uuidRepresentation = uuidRepresentation; + } + + @Override + public Codec withUuidRepresentation(final UuidRepresentation uuidRepresentation) { + if (this.uuidRepresentation.equals(uuidRepresentation)) { + return this; + } + return new DocumentCodec(registry, bsonTypeCodecMap, idGenerator, valueTransformer, uuidRepresentation); } @Override @@ -138,7 +152,18 @@ public Document generateIdIfAbsentFromDocument(final Document document) { @Override public void encode(final BsonWriter writer, final Document document, final EncoderContext encoderContext) { - writeMap(writer, document, encoderContext); + writer.writeStartDocument(); + + beforeFields(writer, encoderContext, document); + + for (final Map.Entry entry : document.entrySet()) { + if (skipField(encoderContext, entry.getKey())) { + continue; + } + writer.writeName(entry.getKey()); + writeValue(writer, encoderContext, entry.getValue()); + } + writer.writeEndDocument(); } @Override @@ -148,7 +173,7 @@ public Document decode(final BsonReader reader, final DecoderContext decoderCont reader.readStartDocument(); while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { String fieldName = reader.readName(); - document.put(fieldName, readValue(reader, decoderContext)); + document.put(fieldName, readValue(reader, decoderContext, bsonTypeCodecMap, uuidRepresentation, registry, valueTransformer)); } reader.readEndDocument(); @@ -176,59 +201,9 @@ private boolean skipField(final EncoderContext encoderContext, final String key) private void writeValue(final BsonWriter writer, final EncoderContext encoderContext, final Object value) { if (value == null) { writer.writeNull(); - } else if (value instanceof Iterable) { - writeIterable(writer, (Iterable) value, encoderContext.getChildContext()); - } else if (value instanceof Map) { - writeMap(writer, (Map) value, encoderContext.getChildContext()); } else { Codec codec = registry.get(value.getClass()); encoderContext.encodeWithChildContext(codec, writer, value); } } - - private void writeMap(final BsonWriter writer, final Map map, final EncoderContext encoderContext) { - writer.writeStartDocument(); - - beforeFields(writer, encoderContext, map); - - for (final Map.Entry entry : map.entrySet()) { - if (skipField(encoderContext, entry.getKey())) { - continue; - } - writer.writeName(entry.getKey()); - writeValue(writer, encoderContext, entry.getValue()); - } - writer.writeEndDocument(); - } - - private void writeIterable(final BsonWriter writer, final Iterable list, final EncoderContext encoderContext) { - writer.writeStartArray(); - for (final Object value : list) { - writeValue(writer, encoderContext, value); - } - writer.writeEndArray(); - } - - private Object readValue(final BsonReader reader, final DecoderContext decoderContext) { - BsonType bsonType = reader.getCurrentBsonType(); - if (bsonType == BsonType.NULL) { - reader.readNull(); - return null; - } else if (bsonType == BsonType.ARRAY) { - return readList(reader, decoderContext); - } else if (bsonType == BsonType.BINARY && BsonBinarySubType.isUuid(reader.peekBinarySubType()) && reader.peekBinarySize() == 16) { - return registry.get(UUID.class).decode(reader, decoderContext); - } - return valueTransformer.transform(bsonTypeCodecMap.get(bsonType).decode(reader, decoderContext)); - } - - private List readList(final BsonReader reader, final DecoderContext decoderContext) { - reader.readStartArray(); - List list = new ArrayList(); - while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { - list.add(readValue(reader, decoderContext)); - } - reader.readEndArray(); - return list; - } } diff --git a/bson/src/main/org/bson/codecs/DocumentCodecProvider.java b/bson/src/main/org/bson/codecs/DocumentCodecProvider.java index a2e08c99ede..2d5c34e9f1f 100644 --- a/bson/src/main/org/bson/codecs/DocumentCodecProvider.java +++ b/bson/src/main/org/bson/codecs/DocumentCodecProvider.java @@ -22,7 +22,10 @@ import org.bson.codecs.configuration.CodecRegistry; import org.bson.types.CodeWithScope; +import java.util.Objects; + import static org.bson.assertions.Assertions.notNull; +import static org.bson.codecs.BsonTypeClassMap.DEFAULT_BSON_TYPE_CLASS_MAP; /** * A {@code CodecProvider} for the Document class and all the default Codec implementations on which it depends. @@ -37,7 +40,7 @@ public class DocumentCodecProvider implements CodecProvider { * Construct a new instance with a default {@code BsonTypeClassMap}. */ public DocumentCodecProvider() { - this(new BsonTypeClassMap()); + this(DEFAULT_BSON_TYPE_CLASS_MAP); } /** @@ -48,7 +51,7 @@ public DocumentCodecProvider() { * @see org.bson.codecs.DocumentCodec#DocumentCodec(org.bson.codecs.configuration.CodecRegistry, BsonTypeClassMap, org.bson.Transformer) */ public DocumentCodecProvider(final Transformer valueTransformer) { - this(new BsonTypeClassMap(), valueTransformer); + this(DEFAULT_BSON_TYPE_CLASS_MAP, valueTransformer); } /** @@ -101,7 +104,7 @@ public boolean equals(final Object o) { if (!bsonTypeClassMap.equals(that.bsonTypeClassMap)) { return false; } - if (valueTransformer != null ? !valueTransformer.equals(that.valueTransformer) : that.valueTransformer != null) { + if (!Objects.equals(valueTransformer, that.valueTransformer)) { return false; } @@ -114,4 +117,9 @@ public int hashCode() { result = 31 * result + (valueTransformer != null ? valueTransformer.hashCode() : 0); return result; } + + @Override + public String toString() { + return "DocumentCodecProvider{}"; + } } diff --git a/bson/src/main/org/bson/codecs/DoubleCodec.java b/bson/src/main/org/bson/codecs/DoubleCodec.java index 523042bb163..33e3f6782bd 100644 --- a/bson/src/main/org/bson/codecs/DoubleCodec.java +++ b/bson/src/main/org/bson/codecs/DoubleCodec.java @@ -19,7 +19,7 @@ import org.bson.BsonReader; import org.bson.BsonWriter; -import static org.bson.codecs.NumberCodecHelper.decodeDouble; +import static org.bson.internal.NumberCodecHelper.decodeDouble; /** * Encodes and decodes {@code Double} objects. diff --git a/bson/src/main/org/bson/codecs/EncoderContext.java b/bson/src/main/org/bson/codecs/EncoderContext.java index cad35fce3e0..af074bb2664 100644 --- a/bson/src/main/org/bson/codecs/EncoderContext.java +++ b/bson/src/main/org/bson/codecs/EncoderContext.java @@ -49,7 +49,7 @@ private Builder() { } /** - * Set to true if the the value to be encoded is a document that will be put in a MongoDB collection. + * Set to true if the value to be encoded is a document that will be put in a MongoDB collection. * * @param encodingCollectibleDocument true if the value to be encoded is a document that will be put in a MongoDB collection * @return this @@ -69,7 +69,7 @@ public EncoderContext build() { } /** - * Returns true if the the value to be encoded is a document that will be put in a MongoDB collection. Encoders for such documents + * Returns true if the value to be encoded is a document that will be put in a MongoDB collection. Encoders for such documents * might choose to act differently when encoding such as documents, e.g. by re-ordering the fields in some way (like encoding the _id * field first). * diff --git a/bson/src/main/org/bson/codecs/EnumCodec.java b/bson/src/main/org/bson/codecs/EnumCodec.java new file mode 100644 index 00000000000..0ef6e28077d --- /dev/null +++ b/bson/src/main/org/bson/codecs/EnumCodec.java @@ -0,0 +1,54 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.BsonReader; +import org.bson.BsonWriter; + +/** + * A codec for classes that extends {@link Enum} + * + * @param The enum type + * @since 4.5 + */ +public final class EnumCodec> implements Codec { + private final Class clazz; + + /** + * Construct an instance for teh given enum class. + * + * @param clazz the enum class + */ + public EnumCodec(final Class clazz) { + this.clazz = clazz; + } + + @Override + public T decode(final BsonReader reader, final DecoderContext decoderContext) { + return Enum.valueOf(clazz, reader.readString()); + } + + @Override + public void encode(final BsonWriter writer, final T value, final EncoderContext encoderContext) { + writer.writeString(value.name()); + } + + @Override + public Class getEncoderClass() { + return clazz; + } +} diff --git a/bson/src/main/org/bson/codecs/EnumCodecProvider.java b/bson/src/main/org/bson/codecs/EnumCodecProvider.java new file mode 100644 index 00000000000..2ccd6ab9287 --- /dev/null +++ b/bson/src/main/org/bson/codecs/EnumCodecProvider.java @@ -0,0 +1,41 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.codecs.configuration.CodecProvider; +import org.bson.codecs.configuration.CodecRegistry; + +/** + * A codec provider for classes that extend {@link Enum}. + * + * @since 4.5 + */ +public final class EnumCodecProvider implements CodecProvider { + @Override + @SuppressWarnings({"unchecked", "rawtypes"}) + public Codec get(final Class clazz, final CodecRegistry registry) { + if (Enum.class.isAssignableFrom(clazz)) { + return (Codec) new EnumCodec(clazz); + } + return null; + } + + @Override + public String toString() { + return "EnumCodecProvider{}"; + } +} diff --git a/bson/src/main/org/bson/codecs/Float32BinaryVectorCodec.java b/bson/src/main/org/bson/codecs/Float32BinaryVectorCodec.java new file mode 100644 index 00000000000..99f740a6873 --- /dev/null +++ b/bson/src/main/org/bson/codecs/Float32BinaryVectorCodec.java @@ -0,0 +1,56 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.BsonBinary; +import org.bson.BsonBinarySubType; +import org.bson.BsonInvalidOperationException; +import org.bson.BsonReader; +import org.bson.BsonWriter; +import org.bson.Float32BinaryVector; + +/** + * Encodes and decodes {@link Float32BinaryVector} objects. + * + */ +final class Float32BinaryVectorCodec implements Codec { + + @Override + public void encode(final BsonWriter writer, final Float32BinaryVector vectorToEncode, final EncoderContext encoderContext) { + writer.writeBinaryData(new BsonBinary(vectorToEncode)); + } + + @Override + public Float32BinaryVector decode(final BsonReader reader, final DecoderContext decoderContext) { + byte subType = reader.peekBinarySubType(); + + if (subType != BsonBinarySubType.VECTOR.getValue()) { + throw new BsonInvalidOperationException("Expected vector binary subtype " + BsonBinarySubType.VECTOR.getValue() + " but found: " + subType); + } + + return reader.readBinaryData() + .asBinary() + .asVector() + .asFloat32Vector(); + } + + @Override + public Class getEncoderClass() { + return Float32BinaryVector.class; + } +} + diff --git a/bson/src/main/org/bson/codecs/FloatCodec.java b/bson/src/main/org/bson/codecs/FloatCodec.java index 84b85c5aa1b..49dc7e22aff 100644 --- a/bson/src/main/org/bson/codecs/FloatCodec.java +++ b/bson/src/main/org/bson/codecs/FloatCodec.java @@ -16,12 +16,10 @@ package org.bson.codecs; -import org.bson.BsonInvalidOperationException; import org.bson.BsonReader; import org.bson.BsonWriter; -import static java.lang.String.format; -import static org.bson.codecs.NumberCodecHelper.decodeDouble; +import static org.bson.internal.NumberCodecHelper.decodeFloat; /** * Encodes and decodes {@code Float} objects. @@ -37,11 +35,7 @@ public void encode(final BsonWriter writer, final Float value, final EncoderCont @Override public Float decode(final BsonReader reader, final DecoderContext decoderContext) { - double value = decodeDouble(reader); - if (value < -Float.MAX_VALUE || value > Float.MAX_VALUE) { - throw new BsonInvalidOperationException(format("%s can not be converted into a Float.", value)); - } - return (float) value; + return decodeFloat(reader); } @Override diff --git a/bson/src/main/org/bson/codecs/Int8VectorCodec.java b/bson/src/main/org/bson/codecs/Int8VectorCodec.java new file mode 100644 index 00000000000..963da625d7f --- /dev/null +++ b/bson/src/main/org/bson/codecs/Int8VectorCodec.java @@ -0,0 +1,58 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.BsonBinary; +import org.bson.BsonBinarySubType; +import org.bson.BsonInvalidOperationException; +import org.bson.BsonReader; +import org.bson.BsonWriter; +import org.bson.Int8BinaryVector; + +/** + * Encodes and decodes {@link Int8BinaryVector} objects. + * + * @since 5.3 + */ +final class Int8VectorCodec implements Codec { + + @Override + public void encode(final BsonWriter writer, final Int8BinaryVector vectorToEncode, final EncoderContext encoderContext) { + writer.writeBinaryData(new BsonBinary(vectorToEncode)); + } + + @Override + public Int8BinaryVector decode(final BsonReader reader, final DecoderContext decoderContext) { + byte subType = reader.peekBinarySubType(); + + if (subType != BsonBinarySubType.VECTOR.getValue()) { + throw new BsonInvalidOperationException("Expected vector binary subtype " + BsonBinarySubType.VECTOR.getValue() + " but found: " + subType); + } + + return reader.readBinaryData() + .asBinary() + .asVector() + .asInt8Vector(); + } + + + @Override + public Class getEncoderClass() { + return Int8BinaryVector.class; + } +} + diff --git a/bson/src/main/org/bson/codecs/IntegerCodec.java b/bson/src/main/org/bson/codecs/IntegerCodec.java index dee6e2512fb..bb0c5c082d5 100644 --- a/bson/src/main/org/bson/codecs/IntegerCodec.java +++ b/bson/src/main/org/bson/codecs/IntegerCodec.java @@ -19,7 +19,7 @@ import org.bson.BsonReader; import org.bson.BsonWriter; -import static org.bson.codecs.NumberCodecHelper.decodeInt; +import static org.bson.internal.NumberCodecHelper.decodeInt; /** * Encodes and decodes {@code Integer} objects. diff --git a/bson/src/main/org/bson/codecs/IterableCodec.java b/bson/src/main/org/bson/codecs/IterableCodec.java index 1c03817894b..028c571aaef 100644 --- a/bson/src/main/org/bson/codecs/IterableCodec.java +++ b/bson/src/main/org/bson/codecs/IterableCodec.java @@ -16,66 +16,56 @@ package org.bson.codecs; -import org.bson.BsonBinarySubType; import org.bson.BsonReader; import org.bson.BsonType; import org.bson.BsonWriter; import org.bson.Transformer; +import org.bson.UuidRepresentation; import org.bson.codecs.configuration.CodecRegistry; import java.util.ArrayList; import java.util.List; -import java.util.UUID; import static org.bson.assertions.Assertions.notNull; +import static org.bson.codecs.ContainerCodecHelper.readValue; /** * Encodes and decodes {@code Iterable} objects. - * - * @since 3.3 */ @SuppressWarnings("rawtypes") -public class IterableCodec implements Codec { +class IterableCodec implements Codec, OverridableUuidRepresentationCodec { private final CodecRegistry registry; private final BsonTypeCodecMap bsonTypeCodecMap; private final Transformer valueTransformer; + private final UuidRepresentation uuidRepresentation; - /** - * Construct a new instance with the given {@code CodecRegistry} and {@code BsonTypeClassMap}. - * - * @param registry the non-null codec registry - * @param bsonTypeClassMap the non-null BsonTypeClassMap - */ - public IterableCodec(final CodecRegistry registry, final BsonTypeClassMap bsonTypeClassMap) { - this(registry, bsonTypeClassMap, null); + IterableCodec(final CodecRegistry registry, final BsonTypeClassMap bsonTypeClassMap, final Transformer valueTransformer) { + this(registry, new BsonTypeCodecMap(notNull("bsonTypeClassMap", bsonTypeClassMap), registry), valueTransformer, + UuidRepresentation.UNSPECIFIED); } - /** - * Construct a new instance with the given {@code CodecRegistry} and {@code BsonTypeClassMap}. - * - * @param registry the non-null codec registry - * @param bsonTypeClassMap the non-null BsonTypeClassMap - * @param valueTransformer the value Transformer - */ - public IterableCodec(final CodecRegistry registry, final BsonTypeClassMap bsonTypeClassMap, final Transformer valueTransformer) { + private IterableCodec(final CodecRegistry registry, final BsonTypeCodecMap bsonTypeCodecMap, final Transformer valueTransformer, + final UuidRepresentation uuidRepresentation) { this.registry = notNull("registry", registry); - this.bsonTypeCodecMap = new BsonTypeCodecMap(notNull("bsonTypeClassMap", bsonTypeClassMap), registry); - this.valueTransformer = valueTransformer != null ? valueTransformer : new Transformer() { - @Override - public Object transform(final Object objectToTransform) { - return objectToTransform; - } - }; + this.bsonTypeCodecMap = bsonTypeCodecMap; + this.valueTransformer = valueTransformer != null ? valueTransformer : objectToTransform -> objectToTransform; + this.uuidRepresentation = uuidRepresentation; + } + + + @Override + public Codec withUuidRepresentation(final UuidRepresentation uuidRepresentation) { + return new IterableCodec(registry, bsonTypeCodecMap, valueTransformer, uuidRepresentation); } @Override public Iterable decode(final BsonReader reader, final DecoderContext decoderContext) { reader.readStartArray(); - List list = new ArrayList(); + List list = new ArrayList<>(); while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { - list.add(readValue(reader, decoderContext)); + list.add(readValue(reader, decoderContext, bsonTypeCodecMap, uuidRepresentation, registry, valueTransformer)); } reader.readEndArray(); @@ -106,15 +96,4 @@ private void writeValue(final BsonWriter writer, final EncoderContext encoderCon encoderContext.encodeWithChildContext(codec, writer, value); } } - - private Object readValue(final BsonReader reader, final DecoderContext decoderContext) { - BsonType bsonType = reader.getCurrentBsonType(); - if (bsonType == BsonType.NULL) { - reader.readNull(); - return null; - } else if (bsonType == BsonType.BINARY && BsonBinarySubType.isUuid(reader.peekBinarySubType()) && reader.peekBinarySize() == 16) { - return registry.get(UUID.class).decode(reader, decoderContext); - } - return valueTransformer.transform(bsonTypeCodecMap.get(bsonType).decode(reader, decoderContext)); - } } diff --git a/bson/src/main/org/bson/codecs/IterableCodecProvider.java b/bson/src/main/org/bson/codecs/IterableCodecProvider.java index 8a0cd4cc757..c59788aa007 100644 --- a/bson/src/main/org/bson/codecs/IterableCodecProvider.java +++ b/bson/src/main/org/bson/codecs/IterableCodecProvider.java @@ -20,7 +20,10 @@ import org.bson.codecs.configuration.CodecProvider; import org.bson.codecs.configuration.CodecRegistry; +import java.util.Objects; + import static org.bson.assertions.Assertions.notNull; +import static org.bson.codecs.BsonTypeClassMap.DEFAULT_BSON_TYPE_CLASS_MAP; /** * A {@code CodecProvider} for classes than implement the {@code Iterable} interface. @@ -35,7 +38,7 @@ public class IterableCodecProvider implements CodecProvider { * Construct a new instance with a default {@code BsonTypeClassMap} and no {@code Transformer}. */ public IterableCodecProvider() { - this(new BsonTypeClassMap()); + this(DEFAULT_BSON_TYPE_CLASS_MAP); } /** @@ -45,7 +48,7 @@ public IterableCodecProvider() { * @param valueTransformer the value transformer for decoded values */ public IterableCodecProvider(final Transformer valueTransformer) { - this(new BsonTypeClassMap(), valueTransformer); + this(DEFAULT_BSON_TYPE_CLASS_MAP, valueTransformer); } /** @@ -71,7 +74,7 @@ public IterableCodecProvider(final BsonTypeClassMap bsonTypeClassMap, final Tran } @Override - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "deprecation"}) public Codec get(final Class clazz, final CodecRegistry registry) { if (Iterable.class.isAssignableFrom(clazz)) { return (Codec) new IterableCodec(registry, bsonTypeClassMap, valueTransformer); @@ -94,7 +97,7 @@ public boolean equals(final Object o) { if (!bsonTypeClassMap.equals(that.bsonTypeClassMap)) { return false; } - if (valueTransformer != null ? !valueTransformer.equals(that.valueTransformer) : that.valueTransformer != null) { + if (!Objects.equals(valueTransformer, that.valueTransformer)) { return false; } @@ -107,4 +110,9 @@ public int hashCode() { result = 31 * result + (valueTransformer != null ? valueTransformer.hashCode() : 0); return result; } + + @Override + public String toString() { + return "IterableCodecProvider{}"; + } } diff --git a/bson/src/main/org/bson/codecs/JsonObjectCodec.java b/bson/src/main/org/bson/codecs/JsonObjectCodec.java new file mode 100644 index 00000000000..7fa5a6262d6 --- /dev/null +++ b/bson/src/main/org/bson/codecs/JsonObjectCodec.java @@ -0,0 +1,69 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.BsonReader; +import org.bson.BsonWriter; +import org.bson.json.JsonObject; +import org.bson.json.JsonReader; +import org.bson.json.JsonWriter; +import org.bson.json.JsonWriterSettings; + +import java.io.StringWriter; + +/** + * Encodes and Decodes JSON object strings. + * + * @since 4.2 + */ +public class JsonObjectCodec implements Codec { + private final JsonWriterSettings writerSettings; + + /** + * Construct a JsonObjectCodec with default JsonWriterSettings + */ + public JsonObjectCodec() { + this(JsonWriterSettings.builder().build()); + } + + /** + * Construct a JsonObjectCodec with provided JsonWriterSettings + * + * @param writerSettings the settings + */ + public JsonObjectCodec(final JsonWriterSettings writerSettings) { + this.writerSettings = writerSettings; + } + + @Override + public void encode(final BsonWriter writer, final JsonObject value, final EncoderContext encoderContext) { + writer.pipe(new JsonReader(value.getJson())); + } + + @Override + public JsonObject decode(final BsonReader reader, final DecoderContext decoderContext) { + StringWriter stringWriter = new StringWriter(); + new JsonWriter(stringWriter, writerSettings).pipe(reader); + return new JsonObject(stringWriter.toString()); + } + + @Override + public Class getEncoderClass() { + return JsonObject.class; + } + +} diff --git a/bson/src/main/org/bson/codecs/JsonObjectCodecProvider.java b/bson/src/main/org/bson/codecs/JsonObjectCodecProvider.java new file mode 100644 index 00000000000..f8f1ed79d1e --- /dev/null +++ b/bson/src/main/org/bson/codecs/JsonObjectCodecProvider.java @@ -0,0 +1,43 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.codecs.configuration.CodecProvider; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.json.JsonObject; + +/** + * A {@code CodecProvider} for JSON object strings + * + * @since 4.2 + */ +public final class JsonObjectCodecProvider implements CodecProvider { + + @Override + @SuppressWarnings("unchecked") + public Codec get(final Class clazz, final CodecRegistry registry) { + if (clazz.equals(JsonObject.class)) { + return (Codec) new JsonObjectCodec(); + } + return null; + } + + @Override + public String toString() { + return "JsonObjectCodecProvider{}"; + } +} diff --git a/bson/src/main/org/bson/codecs/LongCodec.java b/bson/src/main/org/bson/codecs/LongCodec.java index 29adc373488..0e16e4430bc 100644 --- a/bson/src/main/org/bson/codecs/LongCodec.java +++ b/bson/src/main/org/bson/codecs/LongCodec.java @@ -19,7 +19,7 @@ import org.bson.BsonReader; import org.bson.BsonWriter; -import static org.bson.codecs.NumberCodecHelper.decodeLong; +import static org.bson.internal.NumberCodecHelper.decodeLong; /** * Encodes and decodes {@code Long} objects. diff --git a/bson/src/main/org/bson/codecs/MapCodec.java b/bson/src/main/org/bson/codecs/MapCodec.java index 14fecdc6f07..e98a2bde399 100644 --- a/bson/src/main/org/bson/codecs/MapCodec.java +++ b/bson/src/main/org/bson/codecs/MapCodec.java @@ -16,61 +16,34 @@ package org.bson.codecs; -import org.bson.BsonBinarySubType; import org.bson.BsonReader; -import org.bson.BsonType; import org.bson.BsonWriter; import org.bson.Transformer; +import org.bson.UuidRepresentation; import org.bson.codecs.configuration.CodecRegistry; -import java.util.HashMap; -import java.util.List; import java.util.Map; -import java.util.UUID; -import static java.util.Arrays.asList; import static org.bson.assertions.Assertions.notNull; -import static org.bson.codecs.configuration.CodecRegistries.fromProviders; /** - * A Codec for Map instances. + * A codec for {@code Map}. * - * @since 3.5 + *

Supports {@link Map}, {@link java.util.NavigableMap}, {@link java.util.AbstractMap} or any concrete class that implements {@code + * Map} and has a public no-args constructor. If the type argument is {@code Map}, it constructs + * {@code HashMap} instances when decoding. If the type argument is {@code NavigableMap}, it constructs + * {@code TreeMap} instances when decoding.

+ * + * @param the actual type of the Map, e.g. {@code NavigableMap} */ -public class MapCodec implements Codec> { +@SuppressWarnings("rawtypes") +final class MapCodec> extends AbstractMapCodec + implements OverridableUuidRepresentationCodec { - private static final CodecRegistry DEFAULT_REGISTRY = fromProviders(asList(new ValueCodecProvider(), new BsonValueCodecProvider(), - new DocumentCodecProvider(), new IterableCodecProvider(), new MapCodecProvider())); - private static final BsonTypeClassMap DEFAULT_BSON_TYPE_CLASS_MAP = new BsonTypeClassMap(); private final BsonTypeCodecMap bsonTypeCodecMap; private final CodecRegistry registry; private final Transformer valueTransformer; - - /** - * Construct a new instance with a default {@code CodecRegistry} - */ - public MapCodec() { - this(DEFAULT_REGISTRY); - } - - /** - Construct a new instance with the given registry - * - * @param registry the registry - */ - public MapCodec(final CodecRegistry registry) { - this(registry, DEFAULT_BSON_TYPE_CLASS_MAP); - } - - /** - * Construct a new instance with the given registry and BSON type class map. - * - * @param registry the registry - * @param bsonTypeClassMap the BSON type class map - */ - public MapCodec(final CodecRegistry registry, final BsonTypeClassMap bsonTypeClassMap) { - this(registry, bsonTypeClassMap, null); - } + private final UuidRepresentation uuidRepresentation; /** * Construct a new instance with the given registry and BSON type class map. The transformer is applied as a last step when decoding @@ -80,68 +53,41 @@ public MapCodec(final CodecRegistry registry, final BsonTypeClassMap bsonTypeCla * @param registry the registry * @param bsonTypeClassMap the BSON type class map * @param valueTransformer the value transformer to use as a final step when decoding the value of any field in the map + * @param clazz the Map subclass + * @since 4.8 */ - public MapCodec(final CodecRegistry registry, final BsonTypeClassMap bsonTypeClassMap, final Transformer valueTransformer) { - this.registry = notNull("registry", registry); - this.bsonTypeCodecMap = new BsonTypeCodecMap(notNull("bsonTypeClassMap", bsonTypeClassMap), registry); - this.valueTransformer = valueTransformer != null ? valueTransformer : new Transformer() { - @Override - public Object transform(final Object value) { - return value; - } - }; + MapCodec(final CodecRegistry registry, final BsonTypeClassMap bsonTypeClassMap, final Transformer valueTransformer, + final Class clazz) { + this(registry, new BsonTypeCodecMap(notNull("bsonTypeClassMap", bsonTypeClassMap), registry), valueTransformer, + UuidRepresentation.UNSPECIFIED, clazz); } - @Override - public void encode(final BsonWriter writer, final Map map, final EncoderContext encoderContext) { - writer.writeStartDocument(); - for (final Map.Entry entry : map.entrySet()) { - writer.writeName(entry.getKey()); - writeValue(writer, encoderContext, entry.getValue()); - } - writer.writeEndDocument(); + private MapCodec(final CodecRegistry registry, final BsonTypeCodecMap bsonTypeCodecMap, final Transformer valueTransformer, + final UuidRepresentation uuidRepresentation, final Class clazz) { + super(clazz); + this.registry = notNull("registry", registry); + this.bsonTypeCodecMap = bsonTypeCodecMap; + this.valueTransformer = valueTransformer != null ? valueTransformer : (value) -> value; + this.uuidRepresentation = uuidRepresentation; } @Override - public Map decode(final BsonReader reader, final DecoderContext decoderContext) { - Map map = new HashMap(); - - reader.readStartDocument(); - while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { - String fieldName = reader.readName(); - map.put(fieldName, readValue(reader, decoderContext)); + public Codec withUuidRepresentation(final UuidRepresentation uuidRepresentation) { + if (this.uuidRepresentation.equals(uuidRepresentation)) { + return this; } - - reader.readEndDocument(); - return map; + return new MapCodec<>(registry, bsonTypeCodecMap, valueTransformer, uuidRepresentation, getEncoderClass()); } - @SuppressWarnings("unchecked") @Override - public Class> getEncoderClass() { - return (Class>) ((Class) Map.class); + Object readValue(final BsonReader reader, final DecoderContext decoderContext) { + return ContainerCodecHelper.readValue(reader, decoderContext, bsonTypeCodecMap, uuidRepresentation, registry, valueTransformer); } - private Object readValue(final BsonReader reader, final DecoderContext decoderContext) { - BsonType bsonType = reader.getCurrentBsonType(); - if (bsonType == BsonType.NULL) { - reader.readNull(); - return null; - } else if (bsonType == BsonType.ARRAY) { - return decoderContext.decodeWithChildContext(registry.get(List.class), reader); - } else if (bsonType == BsonType.BINARY && BsonBinarySubType.isUuid(reader.peekBinarySubType()) && reader.peekBinarySize() == 16) { - return decoderContext.decodeWithChildContext(registry.get(UUID.class), reader); - } - return valueTransformer.transform(bsonTypeCodecMap.get(bsonType).decode(reader, decoderContext)); - } - - @SuppressWarnings({"unchecked", "rawtypes"}) - private void writeValue(final BsonWriter writer, final EncoderContext encoderContext, final Object value) { - if (value == null) { - writer.writeNull(); - } else { - Codec codec = registry.get(value.getClass()); - encoderContext.encodeWithChildContext(codec, writer, value); - } + @SuppressWarnings({"rawtypes", "unchecked"}) + @Override + void writeValue(final BsonWriter writer, final Object value, final EncoderContext encoderContext) { + Codec codec = registry.get(value.getClass()); + encoderContext.encodeWithChildContext(codec, writer, value); } } diff --git a/bson/src/main/org/bson/codecs/MapCodecProvider.java b/bson/src/main/org/bson/codecs/MapCodecProvider.java index 348908eb6b5..d87de577211 100644 --- a/bson/src/main/org/bson/codecs/MapCodecProvider.java +++ b/bson/src/main/org/bson/codecs/MapCodecProvider.java @@ -17,12 +17,19 @@ package org.bson.codecs; import org.bson.Transformer; +import org.bson.codecs.configuration.CodecConfigurationException; import org.bson.codecs.configuration.CodecProvider; import org.bson.codecs.configuration.CodecRegistry; +import java.lang.reflect.Type; +import java.util.Collections; +import java.util.List; import java.util.Map; +import java.util.Objects; import static org.bson.assertions.Assertions.notNull; +import static org.bson.codecs.BsonTypeClassMap.DEFAULT_BSON_TYPE_CLASS_MAP; +import static org.bson.codecs.ContainerCodecHelper.getCodec; /** * A {@code CodecProvider} for the Map class and all the default Codec implementations on which it depends. @@ -37,7 +44,7 @@ public class MapCodecProvider implements CodecProvider { * Construct a new instance with a default {@code BsonTypeClassMap}. */ public MapCodecProvider() { - this(new BsonTypeClassMap()); + this(DEFAULT_BSON_TYPE_CLASS_MAP); } /** @@ -57,7 +64,7 @@ public MapCodecProvider(final BsonTypeClassMap bsonTypeClassMap) { * @param valueTransformer the value transformer for decoded values */ public MapCodecProvider(final Transformer valueTransformer) { - this(new BsonTypeClassMap(), valueTransformer); + this(DEFAULT_BSON_TYPE_CLASS_MAP, valueTransformer); } /** @@ -72,12 +79,34 @@ public MapCodecProvider(final BsonTypeClassMap bsonTypeClassMap, final Transform } @Override - @SuppressWarnings("unchecked") public Codec get(final Class clazz, final CodecRegistry registry) { + return get(clazz, Collections.emptyList(), registry); + } + + @Override + public Codec get(final Class clazz, final List typeArguments, final CodecRegistry registry) { if (Map.class.isAssignableFrom(clazz)) { - return (Codec) new MapCodec(registry, bsonTypeClassMap, valueTransformer); + int typeArgumentsSize = typeArguments.size(); + switch (typeArgumentsSize) { + case 0: { + @SuppressWarnings({"unchecked", "rawtypes"}) + Codec result = new MapCodec(registry, bsonTypeClassMap, valueTransformer, clazz); + return result; + } + case 2: { + Type genericTypeOfMapKey = typeArguments.get(0); + if (!genericTypeOfMapKey.getTypeName().equals("java.lang.String")) { + throw new CodecConfigurationException("Unsupported key type for Map: " + genericTypeOfMapKey.getTypeName()); + } + @SuppressWarnings({"unchecked", "rawtypes"}) + Codec result = new ParameterizedMapCodec(getCodec(registry, typeArguments.get(1)), clazz); + return result; + } + default: { + throw new CodecConfigurationException("Expected two parameterized type for an Iterable, but found " + typeArgumentsSize); + } + } } - return null; } @@ -94,7 +123,7 @@ public boolean equals(final Object o) { if (!bsonTypeClassMap.equals(that.bsonTypeClassMap)) { return false; } - if (valueTransformer != null ? !valueTransformer.equals(that.valueTransformer) : that.valueTransformer != null) { + if (!Objects.equals(valueTransformer, that.valueTransformer)) { return false; } @@ -107,4 +136,9 @@ public int hashCode() { result = 31 * result + (valueTransformer != null ? valueTransformer.hashCode() : 0); return result; } + + @Override + public String toString() { + return "MapCodecProvider{}"; + } } diff --git a/bson/src/main/org/bson/codecs/NumberCodecHelper.java b/bson/src/main/org/bson/codecs/NumberCodecHelper.java deleted file mode 100644 index d8e05300894..00000000000 --- a/bson/src/main/org/bson/codecs/NumberCodecHelper.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.codecs; - -import org.bson.BsonInvalidOperationException; -import org.bson.BsonReader; -import org.bson.BsonType; - -import static java.lang.String.format; - -final class NumberCodecHelper { - - static int decodeInt(final BsonReader reader) { - int intValue; - BsonType bsonType = reader.getCurrentBsonType(); - switch (bsonType) { - case INT32: - intValue = reader.readInt32(); - break; - case INT64: - long longValue = reader.readInt64(); - intValue = (int) longValue; - if (longValue != (long) intValue) { - throw invalidConversion(Integer.class, longValue); - } - break; - case DOUBLE: - double doubleValue = reader.readDouble(); - intValue = (int) doubleValue; - if (doubleValue != (double) intValue) { - throw invalidConversion(Integer.class, doubleValue); - } - break; - default: - throw new BsonInvalidOperationException(format("Invalid numeric type, found: %s", bsonType)); - } - return intValue; - } - - static long decodeLong(final BsonReader reader) { - long longValue; - BsonType bsonType = reader.getCurrentBsonType(); - switch (bsonType) { - case INT32: - longValue = reader.readInt32(); - break; - case INT64: - longValue = reader.readInt64(); - break; - case DOUBLE: - double doubleValue = reader.readDouble(); - longValue = (long) doubleValue; - if (doubleValue != (double) longValue) { - throw invalidConversion(Long.class, doubleValue); - } - break; - default: - throw new BsonInvalidOperationException(format("Invalid numeric type, found: %s", bsonType)); - } - return longValue; - } - - static double decodeDouble(final BsonReader reader) { - double doubleValue; - BsonType bsonType = reader.getCurrentBsonType(); - switch (bsonType) { - case INT32: - doubleValue = reader.readInt32(); - break; - case INT64: - long longValue = reader.readInt64(); - doubleValue = longValue; - if (longValue != (long) doubleValue) { - throw invalidConversion(Double.class, longValue); - } - break; - case DOUBLE: - doubleValue = reader.readDouble(); - break; - default: - throw new BsonInvalidOperationException(format("Invalid numeric type, found: %s", bsonType)); - } - return doubleValue; - } - - private static BsonInvalidOperationException invalidConversion(final Class clazz, final Number value) { - return new BsonInvalidOperationException(format("Could not convert `%s` to a %s without losing precision", value, clazz)); - } - - private NumberCodecHelper() { - } -} diff --git a/bson/src/main/org/bson/codecs/OverridableUuidRepresentationCodec.java b/bson/src/main/org/bson/codecs/OverridableUuidRepresentationCodec.java new file mode 100644 index 00000000000..f0f392dd140 --- /dev/null +++ b/bson/src/main/org/bson/codecs/OverridableUuidRepresentationCodec.java @@ -0,0 +1,34 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.UuidRepresentation; + +/** + * A marker interface for {@code Codec} implementations that can derive a new instance that overrides the {@code UuidRepresentation}. + * @param the value type + * @since 3.12 + */ +public interface OverridableUuidRepresentationCodec { + /** + * Implementations must return a new instance with the {@code UuidRepresentation} overridden with the given value. + * + * @param uuidRepresentation the UuidRepresentation + * @return a new instance equivalent to this but with the given UuidRepresentation + */ + Codec withUuidRepresentation(UuidRepresentation uuidRepresentation); +} diff --git a/bson/src/main/org/bson/codecs/OverridableUuidRepresentationUuidCodec.java b/bson/src/main/org/bson/codecs/OverridableUuidRepresentationUuidCodec.java new file mode 100644 index 00000000000..1076282e89f --- /dev/null +++ b/bson/src/main/org/bson/codecs/OverridableUuidRepresentationUuidCodec.java @@ -0,0 +1,53 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.UuidRepresentation; + +import java.util.UUID; + +/** + * An extension of {@code UuidCodec} that allows its configured {@code UuidRepresentation} to be overridden by an externally configured + * {@code UuidRepresentation}, most likely configured on {@code MongoClientSettings} or {@code MongoClientOptions}. + * + * @since 3.12 + */ +public class OverridableUuidRepresentationUuidCodec extends UuidCodec implements OverridableUuidRepresentationCodec { + + /** + * Construct an instance with the default UUID representation. + */ + public OverridableUuidRepresentationUuidCodec() { + } + + /** + * Construct an instance with the given UUID representation. + * + * @param uuidRepresentation the UUID representation + */ + public OverridableUuidRepresentationUuidCodec(final UuidRepresentation uuidRepresentation) { + super(uuidRepresentation); + } + + @Override + public Codec withUuidRepresentation(final UuidRepresentation uuidRepresentation) { + if (getUuidRepresentation().equals(uuidRepresentation)) { + return this; + } + return new OverridableUuidRepresentationUuidCodec(uuidRepresentation); + } +} diff --git a/bson/src/main/org/bson/codecs/PackedBitBinaryVectorCodec.java b/bson/src/main/org/bson/codecs/PackedBitBinaryVectorCodec.java new file mode 100644 index 00000000000..c8d0410a4c6 --- /dev/null +++ b/bson/src/main/org/bson/codecs/PackedBitBinaryVectorCodec.java @@ -0,0 +1,59 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.BsonBinary; +import org.bson.BsonBinarySubType; +import org.bson.BsonInvalidOperationException; +import org.bson.BsonReader; +import org.bson.BsonWriter; +import org.bson.PackedBitBinaryVector; + +/** + * Encodes and decodes {@link PackedBitBinaryVector} objects. + * + */ +final class PackedBitBinaryVectorCodec implements Codec { + + @Override + public void encode(final BsonWriter writer, final PackedBitBinaryVector vectorToEncode, final EncoderContext encoderContext) { + writer.writeBinaryData(new BsonBinary(vectorToEncode)); + } + + @Override + public PackedBitBinaryVector decode(final BsonReader reader, final DecoderContext decoderContext) { + byte subType = reader.peekBinarySubType(); + + if (subType != BsonBinarySubType.VECTOR.getValue()) { + throw new BsonInvalidOperationException( + "Expected vector binary subtype " + BsonBinarySubType.VECTOR.getValue() + " but found: " + subType); + } + + return reader.readBinaryData() + .asBinary() + .asVector() + .asPackedBitVector(); + } + + + @Override + public Class getEncoderClass() { + return PackedBitBinaryVector.class; + } +} + + diff --git a/bson/src/main/org/bson/codecs/ParameterizedCollectionCodec.java b/bson/src/main/org/bson/codecs/ParameterizedCollectionCodec.java new file mode 100644 index 00000000000..8d12a847a57 --- /dev/null +++ b/bson/src/main/org/bson/codecs/ParameterizedCollectionCodec.java @@ -0,0 +1,41 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.BsonReader; +import org.bson.BsonWriter; + +import java.util.Collection; + +class ParameterizedCollectionCodec> extends AbstractCollectionCodec { + private final Codec codec; + + ParameterizedCollectionCodec(final Codec codec, final Class clazz) { + super(clazz); + this.codec = codec; + } + + @Override + T readValue(final BsonReader reader, final DecoderContext decoderContext) { + return decoderContext.decodeWithChildContext(codec, reader); + } + + @Override + void writeValue(final BsonWriter writer, final T cur, final EncoderContext encoderContext) { + encoderContext.encodeWithChildContext(codec, writer, cur); + } +} diff --git a/bson/src/main/org/bson/codecs/ParameterizedMapCodec.java b/bson/src/main/org/bson/codecs/ParameterizedMapCodec.java new file mode 100644 index 00000000000..b4871f6c0c3 --- /dev/null +++ b/bson/src/main/org/bson/codecs/ParameterizedMapCodec.java @@ -0,0 +1,46 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.BsonReader; +import org.bson.BsonWriter; + +import java.util.Map; + +/** + * A Codec for Map instances. + * + * @since 3.5 + */ +class ParameterizedMapCodec> extends AbstractMapCodec { + private final Codec codec; + + ParameterizedMapCodec(final Codec codec, final Class clazz) { + super(clazz); + this.codec = codec; + } + + @Override + T readValue(final BsonReader reader, final DecoderContext decoderContext) { + return decoderContext.decodeWithChildContext(codec, reader); + } + + @Override + void writeValue(final BsonWriter writer, final T value, final EncoderContext encoderContext) { + encoderContext.encodeWithChildContext(codec, writer, value); + } +} diff --git a/bson/src/main/org/bson/codecs/PatternCodec.java b/bson/src/main/org/bson/codecs/PatternCodec.java index 8df0f603c5a..1287575c7dd 100644 --- a/bson/src/main/org/bson/codecs/PatternCodec.java +++ b/bson/src/main/org/bson/codecs/PatternCodec.java @@ -105,7 +105,7 @@ private enum RegexFlag { UNICODE_CASE(Pattern.UNICODE_CASE, 'u', "Pattern.UNICODE_CASE"), COMMENTS(Pattern.COMMENTS, 'x', null); - private static final Map BY_CHARACTER = new HashMap(); + private static final Map BY_CHARACTER = new HashMap<>(); private final int javaFlag; private final char flagChar; diff --git a/bson/src/main/org/bson/codecs/RawBsonDocumentCodec.java b/bson/src/main/org/bson/codecs/RawBsonDocumentCodec.java index 4d7b46fc291..4d81b7f97aa 100644 --- a/bson/src/main/org/bson/codecs/RawBsonDocumentCodec.java +++ b/bson/src/main/org/bson/codecs/RawBsonDocumentCodec.java @@ -40,11 +40,8 @@ public RawBsonDocumentCodec() { @Override public void encode(final BsonWriter writer, final RawBsonDocument value, final EncoderContext encoderContext) { - BsonBinaryReader reader = new BsonBinaryReader(new ByteBufferBsonInput(value.getByteBuffer())); - try { + try (BsonBinaryReader reader = new BsonBinaryReader(new ByteBufferBsonInput(value.getByteBuffer()))) { writer.pipe(reader); - } finally { - reader.close(); } } diff --git a/bson/src/main/org/bson/codecs/RepresentationConfigurable.java b/bson/src/main/org/bson/codecs/RepresentationConfigurable.java new file mode 100644 index 00000000000..2d33f991052 --- /dev/null +++ b/bson/src/main/org/bson/codecs/RepresentationConfigurable.java @@ -0,0 +1,47 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.BsonType; +import org.bson.codecs.configuration.CodecConfigurationException; + +/** + * Implementations of this interface can decode additional types + * and translate them to the desired value type depending on the BsonRepresentation. + * + * @param the value type + * @since 4.2 + */ +public interface RepresentationConfigurable { + + /** + * Gets the BsonRepresentation. + * + * @return the BsonRepresentation + */ + BsonType getRepresentation(); + + /** + * Returns an immutable codec with the given representation. If the provided representation + * is not supported an exception will be thrown. + * + * @param representation the BsonRepresentation. + * @return a new Codec with the correct representation. + * @throws CodecConfigurationException if the codec does not support the provided representation + */ + Codec withRepresentation(BsonType representation); +} diff --git a/bson/src/main/org/bson/codecs/ShortCodec.java b/bson/src/main/org/bson/codecs/ShortCodec.java index e5aaf8f9acb..8c439e36b8d 100644 --- a/bson/src/main/org/bson/codecs/ShortCodec.java +++ b/bson/src/main/org/bson/codecs/ShortCodec.java @@ -16,12 +16,10 @@ package org.bson.codecs; -import org.bson.BsonInvalidOperationException; import org.bson.BsonReader; import org.bson.BsonWriter; -import static java.lang.String.format; -import static org.bson.codecs.NumberCodecHelper.decodeInt; +import static org.bson.internal.NumberCodecHelper.decodeShort; /** * Encodes and decodes {@code Short} objects. @@ -37,11 +35,7 @@ public void encode(final BsonWriter writer, final Short value, final EncoderCont @Override public Short decode(final BsonReader reader, final DecoderContext decoderContext) { - int value = decodeInt(reader); - if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) { - throw new BsonInvalidOperationException(format("%s can not be converted into a Short.", value)); - } - return (short) value; + return decodeShort(reader); } @Override diff --git a/bson/src/main/org/bson/codecs/StringCodec.java b/bson/src/main/org/bson/codecs/StringCodec.java index f08508341a2..d31cc6eb24f 100644 --- a/bson/src/main/org/bson/codecs/StringCodec.java +++ b/bson/src/main/org/bson/codecs/StringCodec.java @@ -16,27 +16,73 @@ package org.bson.codecs; +import org.bson.BsonInvalidOperationException; import org.bson.BsonReader; import org.bson.BsonType; import org.bson.BsonWriter; +import org.bson.codecs.configuration.CodecConfigurationException; +import org.bson.types.ObjectId; /** * Encodes and decodes {@code String} objects. * * @since 3.0 */ -public class StringCodec implements Codec { +public class StringCodec implements Codec, RepresentationConfigurable { + private final BsonType representation; + + /** + * Constructs a StringCodec with a String representation. + */ + public StringCodec() { + representation = BsonType.STRING; + } + + private StringCodec(final BsonType representation) { + this.representation = representation; + } + + @Override + public BsonType getRepresentation() { + return representation; + } + + @Override + public Codec withRepresentation(final BsonType representation) { + if (representation != BsonType.OBJECT_ID && representation != BsonType.STRING) { + throw new CodecConfigurationException(representation + " is not a supported representation for StringCodec"); + } + return new StringCodec(representation); + } + + @Override public void encode(final BsonWriter writer, final String value, final EncoderContext encoderContext) { - writer.writeString(value); + switch (representation) { + case STRING: + writer.writeString(value); + break; + case OBJECT_ID: + writer.writeObjectId(new ObjectId(value)); + break; + default: + throw new BsonInvalidOperationException("Cannot encode a String to a " + representation); + } } @Override public String decode(final BsonReader reader, final DecoderContext decoderContext) { - if (reader.getCurrentBsonType() == BsonType.SYMBOL) { - return reader.readSymbol(); - } else { - return reader.readString(); + switch (representation) { + case STRING: + if (reader.getCurrentBsonType() == BsonType.SYMBOL) { + return reader.readSymbol(); + } else { + return reader.readString(); + } + case OBJECT_ID: + return reader.readObjectId().toHexString(); + default: + throw new CodecConfigurationException("Cannot decode " + representation + " to a String"); } } diff --git a/bson/src/main/org/bson/codecs/UuidCodec.java b/bson/src/main/org/bson/codecs/UuidCodec.java index eb3a9cfb938..a54b62ad46a 100644 --- a/bson/src/main/org/bson/codecs/UuidCodec.java +++ b/bson/src/main/org/bson/codecs/UuidCodec.java @@ -20,13 +20,14 @@ import org.bson.BsonBinary; import org.bson.BsonBinarySubType; import org.bson.BsonReader; -import org.bson.BsonSerializationException; import org.bson.BsonWriter; import org.bson.UuidRepresentation; +import org.bson.codecs.configuration.CodecConfigurationException; +import org.bson.internal.UuidHelper; import java.util.UUID; -import static org.bson.codecs.UuidCodecHelper.reverseByteArray; +import static org.bson.assertions.Assertions.notNull; /** * Encodes and decodes {@code UUID} objects. @@ -35,8 +36,7 @@ */ public class UuidCodec implements Codec { - private final UuidRepresentation encoderUuidRepresentation; - private final UuidRepresentation decoderUuidRepresentation; + private final UuidRepresentation uuidRepresentation; /** * The default UUIDRepresentation is JAVA_LEGACY to be compatible with existing documents @@ -45,41 +45,35 @@ public class UuidCodec implements Codec { * @see org.bson.UuidRepresentation */ public UuidCodec(final UuidRepresentation uuidRepresentation) { - this.encoderUuidRepresentation = uuidRepresentation; - this.decoderUuidRepresentation = uuidRepresentation; + notNull("uuidRepresentation", uuidRepresentation); + this.uuidRepresentation = uuidRepresentation; } /** * The constructor for UUIDCodec, default is JAVA_LEGACY */ public UuidCodec() { - this.encoderUuidRepresentation = UuidRepresentation.JAVA_LEGACY; - this.decoderUuidRepresentation = UuidRepresentation.JAVA_LEGACY; + this.uuidRepresentation = UuidRepresentation.UNSPECIFIED; + } + + /** + * The {@code UuidRepresentation} with which this instance is configured + * + * @return the uuid representation + * @since 3.12 + */ + public UuidRepresentation getUuidRepresentation() { + return uuidRepresentation; } @Override public void encode(final BsonWriter writer, final UUID value, final EncoderContext encoderContext) { - byte[] binaryData = new byte[16]; - writeLongToArrayBigEndian(binaryData, 0, value.getMostSignificantBits()); - writeLongToArrayBigEndian(binaryData, 8, value.getLeastSignificantBits()); - switch (encoderUuidRepresentation) { - case C_SHARP_LEGACY: - UuidCodecHelper.reverseByteArray(binaryData, 0, 4); - UuidCodecHelper.reverseByteArray(binaryData, 4, 2); - UuidCodecHelper.reverseByteArray(binaryData, 6, 2); - break; - case JAVA_LEGACY: - UuidCodecHelper.reverseByteArray(binaryData, 0, 8); - UuidCodecHelper.reverseByteArray(binaryData, 8, 8); - break; - case PYTHON_LEGACY: - case STANDARD: - break; - default: - throw new BSONException("Unexpected UUID representation"); + if (uuidRepresentation == UuidRepresentation.UNSPECIFIED) { + throw new CodecConfigurationException("The uuidRepresentation has not been specified, so the UUID cannot be encoded."); } + byte[] binaryData = UuidHelper.encodeUuidToBinary(value, uuidRepresentation); // changed the default subtype to STANDARD since 3.0 - if (encoderUuidRepresentation == UuidRepresentation.STANDARD) { + if (uuidRepresentation == UuidRepresentation.STANDARD) { writer.writeBinaryData(new BsonBinary(BsonBinarySubType.UUID_STANDARD, binaryData)); } else { writer.writeBinaryData(new BsonBinary(BsonBinarySubType.UUID_LEGACY, binaryData)); @@ -96,30 +90,7 @@ public UUID decode(final BsonReader reader, final DecoderContext decoderContext) byte[] bytes = reader.readBinaryData().getData(); - if (bytes.length != 16) { - throw new BsonSerializationException(String.format("Expected length to be 16, not %d.", bytes.length)); - } - - if (subType == BsonBinarySubType.UUID_LEGACY.getValue()) { - switch (decoderUuidRepresentation) { - case C_SHARP_LEGACY: - reverseByteArray(bytes, 0, 4); - reverseByteArray(bytes, 4, 2); - reverseByteArray(bytes, 6, 2); - break; - case JAVA_LEGACY: - reverseByteArray(bytes, 0, 8); - reverseByteArray(bytes, 8, 8); - break; - case PYTHON_LEGACY: - case STANDARD: - break; - default: - throw new BSONException("Unexpected UUID representation"); - } - } - - return new UUID(readLongFromArrayBigEndian(bytes, 0), readLongFromArrayBigEndian(bytes, 8)); + return UuidHelper.decodeBinaryToUuid(bytes, subType, uuidRepresentation); } @Override @@ -127,28 +98,10 @@ public Class getEncoderClass() { return UUID.class; } - private static void writeLongToArrayBigEndian(final byte[] bytes, final int offset, final long x) { - bytes[offset + 7] = (byte) (0xFFL & (x)); - bytes[offset + 6] = (byte) (0xFFL & (x >> 8)); - bytes[offset + 5] = (byte) (0xFFL & (x >> 16)); - bytes[offset + 4] = (byte) (0xFFL & (x >> 24)); - bytes[offset + 3] = (byte) (0xFFL & (x >> 32)); - bytes[offset + 2] = (byte) (0xFFL & (x >> 40)); - bytes[offset + 1] = (byte) (0xFFL & (x >> 48)); - bytes[offset] = (byte) (0xFFL & (x >> 56)); - } - - private static long readLongFromArrayBigEndian(final byte[] bytes, final int offset) { - long x = 0; - x |= (0xFFL & bytes[offset + 7]); - x |= (0xFFL & bytes[offset + 6]) << 8; - x |= (0xFFL & bytes[offset + 5]) << 16; - x |= (0xFFL & bytes[offset + 4]) << 24; - x |= (0xFFL & bytes[offset + 3]) << 32; - x |= (0xFFL & bytes[offset + 2]) << 40; - x |= (0xFFL & bytes[offset + 1]) << 48; - x |= (0xFFL & bytes[offset]) << 56; - return x; + @Override + public String toString() { + return "UuidCodec{" + + "uuidRepresentation=" + uuidRepresentation + + '}'; } - } diff --git a/bson/src/main/org/bson/codecs/UuidCodecProvider.java b/bson/src/main/org/bson/codecs/UuidCodecProvider.java index 25de388c3d2..de0fdf146e9 100644 --- a/bson/src/main/org/bson/codecs/UuidCodecProvider.java +++ b/bson/src/main/org/bson/codecs/UuidCodecProvider.java @@ -16,11 +16,11 @@ package org.bson.codecs; - import org.bson.UuidRepresentation; - import org.bson.codecs.configuration.CodecProvider; - import org.bson.codecs.configuration.CodecRegistry; +import org.bson.UuidRepresentation; +import org.bson.codecs.configuration.CodecProvider; +import org.bson.codecs.configuration.CodecRegistry; - import java.util.UUID; +import java.util.UUID; /** * A {@code CodecProvider} for UUID Codecs with custom UUID representations @@ -29,7 +29,7 @@ */ public class UuidCodecProvider implements CodecProvider { - private UuidRepresentation uuidRepresentation; + private final UuidRepresentation uuidRepresentation; /** * Set the UUIDRepresentation to be used in the codec diff --git a/bson/src/main/org/bson/codecs/ValueCodecProvider.java b/bson/src/main/org/bson/codecs/ValueCodecProvider.java index 96a41b816f0..5c21e048529 100644 --- a/bson/src/main/org/bson/codecs/ValueCodecProvider.java +++ b/bson/src/main/org/bson/codecs/ValueCodecProvider.java @@ -42,6 +42,10 @@ *
  • {@link org.bson.codecs.StringCodec}
  • *
  • {@link org.bson.codecs.SymbolCodec}
  • *
  • {@link org.bson.codecs.UuidCodec}
  • + *
  • {@link BinaryVectorCodec}
  • + *
  • {@link Float32BinaryVectorCodec}
  • + *
  • {@link Int8VectorCodec}
  • + *
  • {@link PackedBitBinaryVectorCodec}
  • *
  • {@link org.bson.codecs.ByteCodec}
  • *
  • {@link org.bson.codecs.ShortCodec}
  • *
  • {@link org.bson.codecs.ByteArrayCodec}
  • @@ -54,7 +58,7 @@ * @since 3.0 */ public class ValueCodecProvider implements CodecProvider { - private final Map, Codec> codecs = new HashMap, Codec>(); + private final Map, Codec> codecs = new HashMap<>(); /** * A provider of Codecs for simple value types. @@ -85,7 +89,11 @@ private void addCodecs() { addCodec(new CharacterCodec()); addCodec(new StringCodec()); addCodec(new SymbolCodec()); - addCodec(new UuidCodec()); + addCodec(new OverridableUuidRepresentationUuidCodec()); + addCodec(new BinaryVectorCodec()); + addCodec(new Float32BinaryVectorCodec()); + addCodec(new Int8VectorCodec()); + addCodec(new PackedBitBinaryVectorCodec()); addCodec(new ByteCodec()); addCodec(new PatternCodec()); @@ -117,4 +125,9 @@ public boolean equals(final Object o) { public int hashCode() { return 0; } + + @Override + public String toString() { + return "ValueCodecProvider{}"; + } } diff --git a/bson/src/main/org/bson/codecs/configuration/ChildCodecRegistry.java b/bson/src/main/org/bson/codecs/configuration/ChildCodecRegistry.java deleted file mode 100644 index e52544dac9e..00000000000 --- a/bson/src/main/org/bson/codecs/configuration/ChildCodecRegistry.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.codecs.configuration; - - -import org.bson.codecs.Codec; - -// An implementation of CodecRegistry that is used to detect cyclic dependencies between Codecs -class ChildCodecRegistry implements CodecRegistry { - - private final ChildCodecRegistry parent; - private final ProvidersCodecRegistry registry; - private final Class codecClass; - - ChildCodecRegistry(final ProvidersCodecRegistry registry, final Class codecClass) { - this.codecClass = codecClass; - this.parent = null; - this.registry = registry; - } - - - private ChildCodecRegistry(final ChildCodecRegistry parent, final Class codecClass) { - this.parent = parent; - this.codecClass = codecClass; - this.registry = parent.registry; - } - - public Class getCodecClass() { - return codecClass; - } - - // Gets a Codec, but if it detects a cyclic dependency, return a LazyCodec which breaks the chain. - public Codec get(final Class clazz) { - if (hasCycles(clazz)) { - return new LazyCodec(registry, clazz); - } else { - return registry.get(new ChildCodecRegistry(this, clazz)); - } - } - - @SuppressWarnings("rawtypes") - private Boolean hasCycles(final Class theClass) { - ChildCodecRegistry current = this; - while (current != null) { - if (current.codecClass.equals(theClass)) { - return true; - } - - current = current.parent; - } - - return false; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - ChildCodecRegistry that = (ChildCodecRegistry) o; - - if (!codecClass.equals(that.codecClass)) { - return false; - } - if (parent != null ? !parent.equals(that.parent) : that.parent != null) { - return false; - } - if (!registry.equals(that.registry)) { - return false; - } - - return true; - } - - @Override - public int hashCode() { - int result = parent != null ? parent.hashCode() : 0; - result = 31 * result + registry.hashCode(); - result = 31 * result + codecClass.hashCode(); - return result; - } -} diff --git a/bson/src/main/org/bson/codecs/configuration/CodecCache.java b/bson/src/main/org/bson/codecs/configuration/CodecCache.java deleted file mode 100644 index 49d6e448432..00000000000 --- a/bson/src/main/org/bson/codecs/configuration/CodecCache.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.codecs.configuration; - -import org.bson.codecs.Codec; - -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; - -import static java.lang.String.format; - -final class CodecCache { - private final ConcurrentMap, Optional>> codecCache = - new ConcurrentHashMap, Optional>>(); - - public boolean containsKey(final Class clazz) { - return codecCache.containsKey(clazz); - } - - public void put(final Class clazz, final Codec codec){ - codecCache.put(clazz, Optional.of(codec)); - } - - @SuppressWarnings("unchecked") - public Codec getOrThrow(final Class clazz) { - if (codecCache.containsKey(clazz)) { - Optional> optionalCodec = codecCache.get(clazz); - if (!optionalCodec.isEmpty()) { - return (Codec) optionalCodec.get(); - } - } - throw new CodecConfigurationException(format("Can't find a codec for %s.", clazz)); - } -} diff --git a/bson/src/main/org/bson/codecs/configuration/CodecProvider.java b/bson/src/main/org/bson/codecs/configuration/CodecProvider.java index b667067d4ff..8f01c60b551 100644 --- a/bson/src/main/org/bson/codecs/configuration/CodecProvider.java +++ b/bson/src/main/org/bson/codecs/configuration/CodecProvider.java @@ -18,6 +18,10 @@ import org.bson.codecs.Codec; +import java.lang.reflect.Type; +import java.util.Collection; +import java.util.List; + /** * A provider of {@code Codec} instances. Typically, an instance of a class implementing this interface would be used to construct a * {@code CodecRegistry}. @@ -34,10 +38,35 @@ public interface CodecProvider { /** * Get a {@code Codec} using the given context, which includes, most importantly, the Class for which a {@code Codec} is required. * + *

    This method is called by the driver only if {@link #get(Class, List, CodecRegistry)} is not overridden, + * or is overridden such that it calls this method.

    + * * @param clazz the Class for which to get a Codec * @param registry the registry to use for resolving dependent Codec instances * @param the type of the class for which a Codec is required * @return the Codec instance, which may be null, if this source is unable to provide one for the requested Class */ Codec get(Class clazz, CodecRegistry registry); + + /** + * Get a {@code Codec} using the given context, which includes, most importantly, the Class for which a {@code Codec} is required. + * + *

    The default implementation delegates to {@link #get(Class, CodecRegistry)}, thus not propagating {@code typeArguments} + * when it uses the {@code registry}.

    + * + * @param clazz the Class for which to get a Codec + * @param typeArguments The type arguments for the {@code clazz}. The size of the list is either equal to the + * number of type parameters of the {@code clazz}, or is zero. + * For example, if {@code clazz} is {@link Collection}{@code .class}, then the size of {@code typeArguments} is one, + * since {@link Collection} has a single type parameter. + * The list may be {@linkplain List#isEmpty() empty} either because the {@code clazz} is not generic, + * or because another {@link CodecProvider} did not propagate {@code clazz}'s type arguments to the {@code registry} when using it. + * @param registry the registry to use for resolving dependent Codec instances + * @return the Codec instance, which may be null, if this source is unable to provide one for the requested Class + * @param the type of the class for which a Codec is required + * @since 4.10 + */ + default Codec get(Class clazz, List typeArguments, CodecRegistry registry) { + return get(clazz, registry); + } } diff --git a/bson/src/main/org/bson/codecs/configuration/CodecRegistries.java b/bson/src/main/org/bson/codecs/configuration/CodecRegistries.java index 611c5768372..87996dbb632 100644 --- a/bson/src/main/org/bson/codecs/configuration/CodecRegistries.java +++ b/bson/src/main/org/bson/codecs/configuration/CodecRegistries.java @@ -16,9 +16,10 @@ package org.bson.codecs.configuration; +import org.bson.UuidRepresentation; import org.bson.codecs.Codec; +import org.bson.internal.ProvidersCodecRegistry; -import java.util.ArrayList; import java.util.List; import static java.util.Arrays.asList; @@ -30,6 +31,18 @@ */ public final class CodecRegistries { + /** + * Apply given {@link UuidRepresentation} to the given {@link CodecRegistry}. + * + * @param codecRegistry the code registry + * @param uuidRepresentation the uuid representation + * @return a {@code CodecRegistry} with the given {@code UuidRepresentation} applied to the given {@code CodecRegistry} + * @since 4.5 + */ + public static CodecRegistry withUuidRepresentation(final CodecRegistry codecRegistry, final UuidRepresentation uuidRepresentation) { + return fromProviders(new OverridableUuidRepresentationCodecProvider(codecRegistry, uuidRepresentation)); + } + /** * Creates a {@code CodecRegistry} from the provided list of {@code Codec} instances. * @@ -123,28 +136,7 @@ public static CodecRegistry fromRegistries(final CodecRegistry... registries) { * @return a {@code CodecRegistry} that combines the list of {@code CodecRegistry} instances into a single one */ public static CodecRegistry fromRegistries(final List registries) { - List providers = new ArrayList(); - for (CodecRegistry registry : registries) { - providers.add(providerFromRegistry(registry)); - } - return new ProvidersCodecRegistry(providers); - } - - private static CodecProvider providerFromRegistry(final CodecRegistry innerRegistry) { - if (innerRegistry instanceof CodecProvider) { - return (CodecProvider) innerRegistry; - } else { - return new CodecProvider() { - @Override - public Codec get(final Class clazz, final CodecRegistry outerRregistry) { - try { - return innerRegistry.get(clazz); - } catch (CodecConfigurationException e) { - return null; - } - } - }; - } + return new ProvidersCodecRegistry(registries); } private CodecRegistries() { diff --git a/bson/src/main/org/bson/codecs/configuration/CodecRegistry.java b/bson/src/main/org/bson/codecs/configuration/CodecRegistry.java index 0ccad168756..f77ad80068c 100644 --- a/bson/src/main/org/bson/codecs/configuration/CodecRegistry.java +++ b/bson/src/main/org/bson/codecs/configuration/CodecRegistry.java @@ -16,8 +16,12 @@ package org.bson.codecs.configuration; +import org.bson.assertions.Assertions; import org.bson.codecs.Codec; +import java.lang.reflect.Type; +import java.util.List; + /** * A registry of Codec instances searchable by the class that the Codec can encode and decode. * @@ -26,9 +30,15 @@ * {@code Object.equals}. It is not necessary to do so, and the simplest course of action is to rely on Object's implementation, but the * implementer may wish to implement a "value comparison" in place of the default "reference comparison."

    * + *

    As of the 4.0 release, this class extends the {@code CodecProvider} interface. This capability was introduced to enable nesting + * registries inside another registry.

    + * + *

    Applications are encouraged to NOT implement this interface, but rather use the factory methods in {@link CodecRegistries}.

    + * * @since 3.0 + * @see CodecRegistries */ -public interface CodecRegistry { +public interface CodecRegistry extends CodecProvider { /** * Gets a {@code Codec} for the given Class. * @@ -38,4 +48,25 @@ public interface CodecRegistry { * @throws CodecConfigurationException if the registry does not contain a codec for the given class. */ Codec get(Class clazz); + + /** + * Gets a Codec for the given parameterized class, after resolving any type variables with the given type arguments. + * + *

    + * The default behavior is to throw a {@link AssertionError}, as it's expected that {@code CodecRegistry} implementations are always + * provided by this library and will override the method appropriately. + *

    + * + * @param clazz the parameterized class + * @param typeArguments the type arguments to apply to the parameterized class. This list may be empty but not null. + * @param the class type + * @return a codec for the given class, with the given type parameters resolved + * @throws CodecConfigurationException if no codec can be found for the given class and type arguments. + * @throws AssertionError by default, if the implementation does not override this method, or if no codec can be found + * for the given class and type arguments. + * @since 4.8 + */ + default Codec get(Class clazz, List typeArguments) { + throw Assertions.fail("This method should have been overridden but was not."); + } } diff --git a/bson/src/main/org/bson/codecs/configuration/MapOfCodecsProvider.java b/bson/src/main/org/bson/codecs/configuration/MapOfCodecsProvider.java index e98ff3b7729..c8277a23942 100644 --- a/bson/src/main/org/bson/codecs/configuration/MapOfCodecsProvider.java +++ b/bson/src/main/org/bson/codecs/configuration/MapOfCodecsProvider.java @@ -23,7 +23,7 @@ import java.util.Map; final class MapOfCodecsProvider implements CodecProvider { - private final Map, Codec> codecsMap = new HashMap, Codec>(); + private final Map, Codec> codecsMap = new HashMap<>(); MapOfCodecsProvider(final List> codecsList) { for (Codec codec : codecsList) { @@ -37,4 +37,10 @@ public Codec get(final Class clazz, final CodecRegistry registry) { return (Codec) codecsMap.get(clazz); } + @Override + public String toString() { + return "MapOfCodecsProvider{" + + "codecsMap=" + codecsMap + + '}'; + } } diff --git a/bson/src/main/org/bson/codecs/configuration/Optional.java b/bson/src/main/org/bson/codecs/configuration/Optional.java deleted file mode 100644 index de6c7c60fec..00000000000 --- a/bson/src/main/org/bson/codecs/configuration/Optional.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.codecs.configuration; - -import java.util.NoSuchElementException; - - -abstract class Optional { - - private static final Optional NONE = new Optional() { - @Override - public Object get() { - throw new NoSuchElementException(".get call on None!"); - } - - @Override - public boolean isEmpty() { - return true; - } - }; - - @SuppressWarnings("unchecked") - public static Optional empty() { - return (Optional) NONE; - } - - @SuppressWarnings("unchecked") - public static Optional of(final T it) { - if (it == null) { - return (Optional) Optional.NONE; - } else { - return new Some(it); - } - } - - public abstract T get(); - - public abstract boolean isEmpty(); - - @Override - public String toString() { - return "None"; - } - - public boolean isDefined() { - return !isEmpty(); - } - - public static class Some extends Optional { - private final T value; - - Some(final T value) { - this.value = value; - } - - @Override - public T get() { - return value; - } - - @Override - public boolean isEmpty() { - return false; - } - - @Override - public String toString() { - return String.format("Some(%s)", value); - } - } -} diff --git a/bson/src/main/org/bson/codecs/configuration/OverridableUuidRepresentationCodecProvider.java b/bson/src/main/org/bson/codecs/configuration/OverridableUuidRepresentationCodecProvider.java new file mode 100644 index 00000000000..f46964fedd3 --- /dev/null +++ b/bson/src/main/org/bson/codecs/configuration/OverridableUuidRepresentationCodecProvider.java @@ -0,0 +1,86 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.configuration; + +import org.bson.UuidRepresentation; +import org.bson.codecs.Codec; +import org.bson.codecs.OverridableUuidRepresentationCodec; + +import java.lang.reflect.Type; +import java.util.Collections; +import java.util.List; + +import static org.bson.assertions.Assertions.notNull; + +final class OverridableUuidRepresentationCodecProvider implements CodecProvider { + + private final CodecProvider wrapped; + private final UuidRepresentation uuidRepresentation; + + OverridableUuidRepresentationCodecProvider(final CodecProvider wrapped, final UuidRepresentation uuidRepresentation) { + this.uuidRepresentation = notNull("uuidRepresentation", uuidRepresentation); + this.wrapped = notNull("wrapped", wrapped); + } + + @Override + public Codec get(final Class clazz, final CodecRegistry registry) { + return get(clazz, Collections.emptyList(), registry); + } + + @Override + public Codec get(final Class clazz, final List typeArguments, final CodecRegistry registry) { + Codec codec = wrapped.get(clazz, typeArguments, registry); + if (codec instanceof OverridableUuidRepresentationCodec) { + @SuppressWarnings("unchecked") + Codec codecWithUuidRepresentation = ((OverridableUuidRepresentationCodec) codec).withUuidRepresentation(uuidRepresentation); + codec = codecWithUuidRepresentation; + } + return codec; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + OverridableUuidRepresentationCodecProvider that = (OverridableUuidRepresentationCodecProvider) o; + + if (!wrapped.equals(that.wrapped)) { + return false; + } + return uuidRepresentation == that.uuidRepresentation; + } + + @Override + public int hashCode() { + int result = wrapped.hashCode(); + result = 31 * result + uuidRepresentation.hashCode(); + return result; + } + + @Override + public String toString() { + return "OverridableUuidRepresentationCodecRegistry{" + + "wrapped=" + wrapped + + ", uuidRepresentation=" + uuidRepresentation + + '}'; + } +} diff --git a/bson/src/main/org/bson/codecs/configuration/ProvidersCodecRegistry.java b/bson/src/main/org/bson/codecs/configuration/ProvidersCodecRegistry.java deleted file mode 100644 index 4a480fae821..00000000000 --- a/bson/src/main/org/bson/codecs/configuration/ProvidersCodecRegistry.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.codecs.configuration; - -import org.bson.codecs.Codec; - -import java.util.ArrayList; -import java.util.List; - -import static org.bson.assertions.Assertions.isTrueArgument; - -final class ProvidersCodecRegistry implements CodecRegistry, CodecProvider { - private final List codecProviders; - private final CodecCache codecCache = new CodecCache(); - - ProvidersCodecRegistry(final List codecProviders) { - isTrueArgument("codecProviders must not be null or empty", codecProviders != null && codecProviders.size() > 0); - this.codecProviders = new ArrayList(codecProviders); - } - - @Override - public Codec get(final Class clazz) { - return get(new ChildCodecRegistry(this, clazz)); - } - - @SuppressWarnings({"unchecked", "rawtypes"}) - public Codec get(final Class clazz, final CodecRegistry registry) { - for (CodecProvider provider : codecProviders) { - Codec codec = provider.get(clazz, registry); - if (codec != null) { - return codec; - } - } - return null; - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - Codec get(final ChildCodecRegistry context) { - if (!codecCache.containsKey(context.getCodecClass())) { - for (CodecProvider provider : codecProviders) { - Codec codec = provider.get(context.getCodecClass(), context); - if (codec != null) { - codecCache.put(context.getCodecClass(), codec); - return codec; - } - } - codecCache.put(context.getCodecClass(), null); - } - return codecCache.getOrThrow(context.getCodecClass()); - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - ProvidersCodecRegistry that = (ProvidersCodecRegistry) o; - if (codecProviders.size() != that.codecProviders.size()) { - return false; - } - for (int i = 0; i < codecProviders.size(); i++) { - if (codecProviders.get(i).getClass() != that.codecProviders.get(i).getClass()) { - return false; - } - } - return true; - } - - @Override - public int hashCode() { - return codecProviders.hashCode(); - } -} diff --git a/bson/src/main/org/bson/codecs/jsr310/InstantCodec.java b/bson/src/main/org/bson/codecs/jsr310/InstantCodec.java index 79134024b7a..29eb1f8469d 100644 --- a/bson/src/main/org/bson/codecs/jsr310/InstantCodec.java +++ b/bson/src/main/org/bson/codecs/jsr310/InstantCodec.java @@ -31,9 +31,9 @@ * Instant Codec. * *

    - * Encodes and decodes {@code Instant} objects to and from {@code DateTime}. Data is stored to millisecond accuracy. + * Encodes and decodes {@code Instant} objects to and from {@code DateTime}. + * Data is extracted via {@link Instant#toEpochMilli()} and stored to millisecond accuracy. *

    - *

    Note: Requires Java 8 or greater.

    * * @mongodb.driver.manual reference/bson-types * @since 3.7 diff --git a/bson/src/main/org/bson/codecs/jsr310/Jsr310CodecProvider.java b/bson/src/main/org/bson/codecs/jsr310/Jsr310CodecProvider.java index 941fc15cfff..feea82df72a 100644 --- a/bson/src/main/org/bson/codecs/jsr310/Jsr310CodecProvider.java +++ b/bson/src/main/org/bson/codecs/jsr310/Jsr310CodecProvider.java @@ -35,22 +35,16 @@ *
  • {@link LocalDateTimeCodec} *
  • {@link LocalTimeCodec} * - *

    Requires Java 8 or greater.

    * * @since 3.7 */ public class Jsr310CodecProvider implements CodecProvider { - private static final Map, Codec> JSR310_CODEC_MAP = new HashMap, Codec>(); + private static final Map, Codec> JSR310_CODEC_MAP = new HashMap<>(); static { - try { - Class.forName("java.time.Instant"); // JSR-310 support canary test. - putCodec(new InstantCodec()); - putCodec(new LocalDateCodec()); - putCodec(new LocalDateTimeCodec()); - putCodec(new LocalTimeCodec()); - } catch (ClassNotFoundException e) { - // No JSR-310 support - } + putCodec(new InstantCodec()); + putCodec(new LocalDateCodec()); + putCodec(new LocalDateTimeCodec()); + putCodec(new LocalTimeCodec()); } private static void putCodec(final Codec codec) { @@ -62,4 +56,9 @@ private static void putCodec(final Codec codec) { public Codec get(final Class clazz, final CodecRegistry registry) { return (Codec) JSR310_CODEC_MAP.get(clazz); } + + @Override + public String toString() { + return "Jsr310CodecProvider{}"; + } } diff --git a/bson/src/main/org/bson/codecs/jsr310/LocalDateCodec.java b/bson/src/main/org/bson/codecs/jsr310/LocalDateCodec.java index 3b273df3455..0074945038a 100644 --- a/bson/src/main/org/bson/codecs/jsr310/LocalDateCodec.java +++ b/bson/src/main/org/bson/codecs/jsr310/LocalDateCodec.java @@ -35,7 +35,6 @@ * *

    Encodes and decodes {@code LocalDate} objects to and from {@code DateTime}.

    *

    Converts the {@code LocalDate} values to and from {@link ZoneOffset#UTC}.

    - *

    Note: Requires Java 8 or greater.

    * * @mongodb.driver.manual reference/bson-types * @since 3.7 diff --git a/bson/src/main/org/bson/codecs/jsr310/LocalDateTimeCodec.java b/bson/src/main/org/bson/codecs/jsr310/LocalDateTimeCodec.java index a1cf068aecf..0444fec4f38 100644 --- a/bson/src/main/org/bson/codecs/jsr310/LocalDateTimeCodec.java +++ b/bson/src/main/org/bson/codecs/jsr310/LocalDateTimeCodec.java @@ -34,7 +34,6 @@ * *

    Encodes and decodes {@code LocalDateTime} objects to and from {@code DateTime}. Data is stored to millisecond accuracy.

    *

    Converts the {@code LocalDateTime} values to and from {@link ZoneOffset#UTC}.

    - *

    Note: Requires Java 8 or greater.

    * * @mongodb.driver.manual reference/bson-types * @since 3.7 diff --git a/bson/src/main/org/bson/codecs/jsr310/LocalTimeCodec.java b/bson/src/main/org/bson/codecs/jsr310/LocalTimeCodec.java index 8aa7081b765..710e6ef6fcf 100644 --- a/bson/src/main/org/bson/codecs/jsr310/LocalTimeCodec.java +++ b/bson/src/main/org/bson/codecs/jsr310/LocalTimeCodec.java @@ -32,7 +32,6 @@ * *

    Encodes and decodes {@code LocalTime} objects to and from {@code DateTime}. Data is stored to millisecond accuracy.

    *

    Converts the {@code LocalTime} values to and from EpochDay at {@link ZoneOffset#UTC}.

    - *

    Note: Requires Java 8 or greater.

    * * @mongodb.driver.manual reference/bson-types * @since 3.7 diff --git a/bson/src/main/org/bson/codecs/pojo/AutomaticPojoCodec.java b/bson/src/main/org/bson/codecs/pojo/AutomaticPojoCodec.java index 3b001ee047c..921976a78e3 100644 --- a/bson/src/main/org/bson/codecs/pojo/AutomaticPojoCodec.java +++ b/bson/src/main/org/bson/codecs/pojo/AutomaticPojoCodec.java @@ -66,4 +66,9 @@ public Class getEncoderClass() { ClassModel getClassModel() { return pojoCodec.getClassModel(); } + + @Override + DiscriminatorLookup getDiscriminatorLookup() { + return pojoCodec.getDiscriminatorLookup(); + } } diff --git a/bson/src/main/org/bson/codecs/pojo/ClassModel.java b/bson/src/main/org/bson/codecs/pojo/ClassModel.java index a57aa0f5e0d..d47452ab578 100644 --- a/bson/src/main/org/bson/codecs/pojo/ClassModel.java +++ b/bson/src/main/org/bson/codecs/pojo/ClassModel.java @@ -16,8 +16,12 @@ package org.bson.codecs.pojo; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; /** * This model represents the metadata for a class and all its properties. @@ -33,23 +37,25 @@ public final class ClassModel { private final boolean discriminatorEnabled; private final String discriminatorKey; private final String discriminator; - private final PropertyModel idProperty; + private final IdPropertyModelHolder idPropertyModelHolder; private final List> propertyModels; private final Map propertyNameToTypeParameterMap; ClassModel(final Class clazz, final Map propertyNameToTypeParameterMap, final InstanceCreatorFactory instanceCreatorFactory, final Boolean discriminatorEnabled, final String discriminatorKey, - final String discriminator, final PropertyModel idProperty, final List> propertyModels) { + final String discriminator, final IdPropertyModelHolder idPropertyModelHolder, + final List> propertyModels) { this.name = clazz.getSimpleName(); this.type = clazz; this.hasTypeParameters = clazz.getTypeParameters().length > 0; - this.propertyNameToTypeParameterMap = propertyNameToTypeParameterMap; + this.propertyNameToTypeParameterMap = Collections.unmodifiableMap( + new HashMap<>(propertyNameToTypeParameterMap)); this.instanceCreatorFactory = instanceCreatorFactory; this.discriminatorEnabled = discriminatorEnabled; this.discriminatorKey = discriminatorKey; this.discriminator = discriminator; - this.idProperty = idProperty; - this.propertyModels = propertyModels; + this.idPropertyModelHolder = idPropertyModelHolder; + this.propertyModels = Collections.unmodifiableList(new ArrayList<>(propertyModels)); } /** @@ -60,7 +66,7 @@ public final class ClassModel { * @return a new Class Model builder instance using reflection on the {@code clazz}. */ public static ClassModelBuilder builder(final Class type) { - return new ClassModelBuilder(type); + return new ClassModelBuilder<>(type); } /** @@ -139,7 +145,11 @@ public List> getPropertyModels() { * @return the PropertyModel for the id */ public PropertyModel getIdPropertyModel() { - return idProperty; + return idPropertyModelHolder != null ? idPropertyModelHolder.getPropertyModel() : null; + } + + IdPropertyModelHolder getIdPropertyModelHolder() { + return idPropertyModelHolder; } /** @@ -185,7 +195,7 @@ public boolean equals(final Object o) { if (getDiscriminator() != null ? !getDiscriminator().equals(that.getDiscriminator()) : that.getDiscriminator() != null) { return false; } - if (idProperty != null ? !idProperty.equals(that.idProperty) : that.idProperty != null) { + if (!Objects.equals(idPropertyModelHolder, that.idPropertyModelHolder)) { return false; } if (!getPropertyModels().equals(that.getPropertyModels())) { @@ -205,7 +215,7 @@ public int hashCode() { result = 31 * result + (discriminatorEnabled ? 1 : 0); result = 31 * result + (getDiscriminatorKey() != null ? getDiscriminatorKey().hashCode() : 0); result = 31 * result + (getDiscriminator() != null ? getDiscriminator().hashCode() : 0); - result = 31 * result + (idProperty != null ? idProperty.hashCode() : 0); + result = 31 * result + (getIdPropertyModelHolder() != null ? getIdPropertyModelHolder().hashCode() : 0); result = 31 * result + getPropertyModels().hashCode(); result = 31 * result + getPropertyNameToTypeParameterMap().hashCode(); return result; diff --git a/bson/src/main/org/bson/codecs/pojo/ClassModelBuilder.java b/bson/src/main/org/bson/codecs/pojo/ClassModelBuilder.java index f3a0eb339ae..98e7c25c6c0 100644 --- a/bson/src/main/org/bson/codecs/pojo/ClassModelBuilder.java +++ b/bson/src/main/org/bson/codecs/pojo/ClassModelBuilder.java @@ -20,7 +20,6 @@ import java.lang.annotation.Annotation; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -44,7 +43,8 @@ */ public class ClassModelBuilder { static final String ID_PROPERTY_NAME = "_id"; - private final List> propertyModelBuilders = new ArrayList>(); + private final List> propertyModelBuilders = new ArrayList<>(); + private IdGenerator idGenerator; private InstanceCreatorFactory instanceCreatorFactory; private Class type; private Map propertyNameToTypeParameterMap = emptyMap(); @@ -59,6 +59,26 @@ public class ClassModelBuilder { configureClassModelBuilder(this, notNull("type", type)); } + /** + * Sets the IdGenerator for the ClassModel + * + * @param idGenerator the IdGenerator + * @return this + * @since 3.10 + */ + public ClassModelBuilder idGenerator(final IdGenerator idGenerator) { + this.idGenerator = idGenerator; + return this; + } + + /** + * @return the IdGenerator for the ClassModel, or null if not set + * @since 3.10 + */ + public IdGenerator getIdGenerator() { + return idGenerator; + } + /** * Sets the InstanceCreatorFactory for the ClassModel * @@ -235,7 +255,7 @@ public PropertyModelBuilder getProperty(final String propertyName) { * @return the properties on the modeled type */ public List> getPropertyModelBuilders() { - return Collections.unmodifiableList(propertyModelBuilders); + return unmodifiableList(propertyModelBuilders); } /** @@ -244,7 +264,7 @@ public List> getPropertyModelBuilders() { * @return the new instance */ public ClassModel build() { - List> propertyModels = new ArrayList>(); + List> propertyModels = new ArrayList<>(); PropertyModel idPropertyModel = null; stateNotNull("type", type); @@ -271,10 +291,8 @@ public ClassModel build() { } } validatePropertyModels(type.getSimpleName(), propertyModels); - - - return new ClassModel(type, propertyNameToTypeParameterMap, instanceCreatorFactory, discriminatorEnabled, discriminatorKey, - discriminator, idPropertyModel, unmodifiableList(propertyModels)); + return new ClassModel<>(type, propertyNameToTypeParameterMap, instanceCreatorFactory, discriminatorEnabled, discriminatorKey, + discriminator, IdPropertyModelHolder.create(type, idPropertyModel, idGenerator), unmodifiableList(propertyModels)); } @Override @@ -287,7 +305,7 @@ Map getPropertyNameToTypeParameterMap() { } ClassModelBuilder propertyNameToTypeParameterMap(final Map propertyNameToTypeParameterMap) { - this.propertyNameToTypeParameterMap = unmodifiableMap(new HashMap(propertyNameToTypeParameterMap)); + this.propertyNameToTypeParameterMap = unmodifiableMap(new HashMap<>(propertyNameToTypeParameterMap)); return this; } @@ -297,11 +315,14 @@ ClassModelBuilder addProperty(final PropertyModelBuilder propertyModelBuil } private void validatePropertyModels(final String declaringClass, final List> propertyModels) { - Map propertyNameMap = new HashMap(); - Map propertyReadNameMap = new HashMap(); - Map propertyWriteNameMap = new HashMap(); + Map propertyNameMap = new HashMap<>(); + Map propertyReadNameMap = new HashMap<>(); + Map propertyWriteNameMap = new HashMap<>(); for (PropertyModel propertyModel : propertyModels) { + if (propertyModel.hasError()) { + throw new CodecConfigurationException(propertyModel.getError()); + } checkForDuplicates("property", propertyModel.getName(), propertyNameMap, declaringClass); if (propertyModel.isReadable()) { checkForDuplicates("read property", propertyModel.getReadName(), propertyReadNameMap, declaringClass); diff --git a/bson/src/main/org/bson/codecs/pojo/CollectionPropertyCodecProvider.java b/bson/src/main/org/bson/codecs/pojo/CollectionPropertyCodecProvider.java index e7440cfeb68..abf5add374c 100644 --- a/bson/src/main/org/bson/codecs/pojo/CollectionPropertyCodecProvider.java +++ b/bson/src/main/org/bson/codecs/pojo/CollectionPropertyCodecProvider.java @@ -15,6 +15,8 @@ */ package org.bson.codecs.pojo; +import java.util.TreeSet; + import org.bson.BsonReader; import org.bson.BsonType; import org.bson.BsonWriter; @@ -86,9 +88,11 @@ public Class> getEncoderClass() { private Collection getInstance() { if (encoderClass.isInterface()) { if (encoderClass.isAssignableFrom(ArrayList.class)) { - return new ArrayList(); + return new ArrayList<>(); } else if (encoderClass.isAssignableFrom(HashSet.class)) { - return new HashSet(); + return new HashSet<>(); + } else if (encoderClass.isAssignableFrom(TreeSet.class)) { + return new TreeSet<>(); } else { throw new CodecConfigurationException(format("Unsupported Collection interface of %s!", encoderClass.getName())); } @@ -96,7 +100,7 @@ private Collection getInstance() { try { return encoderClass.getDeclaredConstructor().newInstance(); - } catch (final Exception e) { + } catch (Exception e) { throw new CodecConfigurationException(e.getMessage(), e); } } diff --git a/bson/src/main/org/bson/codecs/pojo/ConventionAnnotationImpl.java b/bson/src/main/org/bson/codecs/pojo/ConventionAnnotationImpl.java index 776b0767301..e9adcaa9024 100644 --- a/bson/src/main/org/bson/codecs/pojo/ConventionAnnotationImpl.java +++ b/bson/src/main/org/bson/codecs/pojo/ConventionAnnotationImpl.java @@ -16,13 +16,17 @@ package org.bson.codecs.pojo; - +import org.bson.BsonType; import org.bson.codecs.configuration.CodecConfigurationException; import org.bson.codecs.pojo.annotations.BsonCreator; import org.bson.codecs.pojo.annotations.BsonDiscriminator; +import org.bson.codecs.pojo.annotations.BsonExtraElements; import org.bson.codecs.pojo.annotations.BsonId; import org.bson.codecs.pojo.annotations.BsonIgnore; import org.bson.codecs.pojo.annotations.BsonProperty; +import org.bson.codecs.pojo.annotations.BsonRepresentation; +import org.bson.diagnostics.Logger; +import org.bson.diagnostics.Loggers; import java.lang.annotation.Annotation; import java.lang.reflect.Constructor; @@ -30,6 +34,8 @@ import java.lang.reflect.Type; import java.util.ArrayList; import java.util.List; +import java.util.Objects; +import java.util.Map; import static java.lang.String.format; import static java.lang.reflect.Modifier.isPublic; @@ -38,6 +44,8 @@ final class ConventionAnnotationImpl implements Convention { + private static final Logger LOGGER = Loggers.getLogger("ConventionAnnotation"); + @Override public void apply(final ClassModelBuilder classModelBuilder) { for (final Annotation annotation : classModelBuilder.getAnnotations()) { @@ -85,6 +93,12 @@ private void processPropertyAnnotations(final ClassModelBuilder classModelBui classModelBuilder.idPropertyName(propertyModelBuilder.getName()); } else if (annotation instanceof BsonIgnore) { propertyModelBuilder.readName(null); + } else if (annotation instanceof BsonRepresentation) { + BsonRepresentation bsonRepresentation = (BsonRepresentation) annotation; + BsonType bsonRep = bsonRepresentation.value(); + propertyModelBuilder.bsonRepresentation(bsonRep); + } else if (annotation instanceof BsonExtraElements) { + processBsonExtraElementsAnnotation(propertyModelBuilder); } } @@ -111,7 +125,7 @@ private void processCreatorAnnotation(final ClassModelBuilder classModelB if (creatorExecutable != null) { throw new CodecConfigurationException("Found multiple constructors annotated with @BsonCreator"); } - creatorExecutable = new CreatorExecutable(clazz, (Constructor) constructor); + creatorExecutable = new CreatorExecutable<>(clazz, (Constructor) constructor); } } } @@ -131,7 +145,7 @@ private void processCreatorAnnotation(final ClassModelBuilder classModelB format("Invalid method annotated with @BsonCreator. Returns '%s', expected %s", method.getReturnType(), bsonCreatorClass)); } - creatorExecutable = new CreatorExecutable(clazz, method); + creatorExecutable = new CreatorExecutable<>(clazz, method); foundStaticBsonCreatorMethod = true; } } @@ -157,6 +171,12 @@ private void processCreatorAnnotation(final ClassModelBuilder classModelB PropertyModelBuilder propertyModelBuilder = null; if (isIdProperty) { + if (classModelBuilder.getIdPropertyName() == null) { + throw new CodecConfigurationException("A @BsonId annotation has been used with @BsonCreator " + + "but there is no known Id property.\n" + + "Please either use the @BsonProperty annotation in the creator or " + + "annotate the corresponding property in the class with the @BsonId."); + } propertyModelBuilder = classModelBuilder.getProperty(classModelBuilder.getIdPropertyName()); } else { BsonProperty bsonProperty = properties.get(i); @@ -195,7 +215,7 @@ private void processCreatorAnnotation(final ClassModelBuilder classModelB propertyModelBuilder.getWriteName(), propertyModelBuilder.getTypeData().getType(), parameterType)); } } - classModelBuilder.instanceCreatorFactory(new InstanceCreatorFactoryImpl(creatorExecutable)); + classModelBuilder.instanceCreatorFactory(new InstanceCreatorFactoryImpl<>(creatorExecutable)); } } @@ -213,21 +233,48 @@ private static void tryToExpandToGenericType(final Class parameterType, f private PropertyModelBuilder addCreatorPropertyToClassModelBuilder(final ClassModelBuilder classModelBuilder, final String name, final Class clazz) { - PropertyModelBuilder propertyModelBuilder = createPropertyModelBuilder(new PropertyMetadata(name, - classModelBuilder.getType().getSimpleName(), TypeData.builder(clazz).build())).readName(null).writeName(name); + PropertyModelBuilder propertyModelBuilder = createPropertyModelBuilder(new PropertyMetadata<>(name, + classModelBuilder.getType().getSimpleName(), TypeData.builder(clazz).build())).readName(null).writeName(name); classModelBuilder.addProperty(propertyModelBuilder); return propertyModelBuilder; } private void cleanPropertyBuilders(final ClassModelBuilder classModelBuilder) { - List propertiesToRemove = new ArrayList(); + List propertiesToRemove = new ArrayList<>(); for (PropertyModelBuilder propertyModelBuilder : classModelBuilder.getPropertyModelBuilders()) { if (!propertyModelBuilder.isReadable() && !propertyModelBuilder.isWritable()) { propertiesToRemove.add(propertyModelBuilder.getName()); } + if (classModelBuilder.useDiscriminator() && Objects.equals(classModelBuilder.getDiscriminatorKey(), propertyModelBuilder.getReadName())) { + propertiesToRemove.add(propertyModelBuilder.getName()); + LOGGER.warn( + format( + "Removed the property '%s' from the model because the discriminator has the same key", + classModelBuilder.getDiscriminatorKey() + ) + ); + } } for (String propertyName : propertiesToRemove) { classModelBuilder.removeProperty(propertyName); } } + + private void processBsonExtraElementsAnnotation(final PropertyModelBuilder propertyModelBuilder) { + PropertyAccessor propertyAccessor = propertyModelBuilder.getPropertyAccessor(); + if (!(propertyAccessor instanceof PropertyAccessorImpl)) { + throw new CodecConfigurationException(format("The @BsonExtraElements annotation is not compatible with " + + "propertyModelBuilder instances that have custom implementations of org.bson.codecs.pojo.PropertyAccessor: %s", + propertyModelBuilder.getPropertyAccessor().getClass().getName())); + } + + if (!Map.class.isAssignableFrom(propertyModelBuilder.getTypeData().getType())) { + throw new CodecConfigurationException(format("The @BsonExtraElements annotation is not compatible with " + + "propertyModelBuilder with the following type: %s. " + + "Please use a Document, BsonDocument or Map type.", + propertyModelBuilder.getTypeData())); + } + propertyModelBuilder.propertySerialization(new PropertyModelSerializationInlineImpl<>(propertyModelBuilder.getPropertySerialization())); + propertyModelBuilder.propertyAccessor(new FieldPropertyAccessor<>((PropertyAccessorImpl) propertyAccessor)); + } } diff --git a/bson/src/main/org/bson/codecs/pojo/ConventionObjectIdGeneratorsImpl.java b/bson/src/main/org/bson/codecs/pojo/ConventionObjectIdGeneratorsImpl.java new file mode 100644 index 00000000000..fe5be23ab5d --- /dev/null +++ b/bson/src/main/org/bson/codecs/pojo/ConventionObjectIdGeneratorsImpl.java @@ -0,0 +1,41 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo; + +import org.bson.BsonObjectId; +import org.bson.BsonType; +import org.bson.types.ObjectId; + +final class ConventionObjectIdGeneratorsImpl implements Convention { + @Override + public void apply(final ClassModelBuilder classModelBuilder) { + if (classModelBuilder.getIdGenerator() == null && classModelBuilder.getIdPropertyName() != null) { + PropertyModelBuilder idProperty = classModelBuilder.getProperty(classModelBuilder.getIdPropertyName()); + if (idProperty != null) { + Class idType = idProperty.getTypeData().getType(); + if (classModelBuilder.getIdGenerator() == null && idType.equals(ObjectId.class)) { + classModelBuilder.idGenerator(IdGenerators.OBJECT_ID_GENERATOR); + } else if (classModelBuilder.getIdGenerator() == null && idType.equals(BsonObjectId.class)) { + classModelBuilder.idGenerator(IdGenerators.BSON_OBJECT_ID_GENERATOR); + } else if (classModelBuilder.getIdGenerator() == null && idType.equals(String.class) + && idProperty.getBsonRepresentation() == BsonType.OBJECT_ID) { + classModelBuilder.idGenerator(IdGenerators.STRING_ID_GENERATOR); + } + } + } + } +} diff --git a/bson/src/main/org/bson/codecs/pojo/ConventionSetPrivateFieldImpl.java b/bson/src/main/org/bson/codecs/pojo/ConventionSetPrivateFieldImpl.java index 9da6a2c786c..74a28eb6c06 100644 --- a/bson/src/main/org/bson/codecs/pojo/ConventionSetPrivateFieldImpl.java +++ b/bson/src/main/org/bson/codecs/pojo/ConventionSetPrivateFieldImpl.java @@ -33,44 +33,15 @@ public void apply(final ClassModelBuilder classModelBuilder) { } PropertyAccessorImpl defaultAccessor = (PropertyAccessorImpl) propertyModelBuilder.getPropertyAccessor(); PropertyMetadata propertyMetaData = defaultAccessor.getPropertyMetadata(); - if (!propertyMetaData.isDeserializable() && isPrivate(propertyMetaData.getField().getModifiers())) { + if (!propertyMetaData.isDeserializable() && propertyMetaData.getField() != null + && isPrivate(propertyMetaData.getField().getModifiers())) { setPropertyAccessor(propertyModelBuilder); } } } - @SuppressWarnings("unchecked") private void setPropertyAccessor(final PropertyModelBuilder propertyModelBuilder) { - propertyModelBuilder.propertyAccessor(new PrivateProperyAccessor( - (PropertyAccessorImpl) propertyModelBuilder.getPropertyAccessor())); + propertyModelBuilder.propertyAccessor(new FieldPropertyAccessor<>((PropertyAccessorImpl) propertyModelBuilder.getPropertyAccessor())); } - private static final class PrivateProperyAccessor implements PropertyAccessor { - private final PropertyAccessorImpl wrapped; - - private PrivateProperyAccessor(final PropertyAccessorImpl wrapped) { - this.wrapped = wrapped; - try { - wrapped.getPropertyMetadata().getField().setAccessible(true); - } catch (Exception e) { - throw new CodecConfigurationException(format("Unable to make private field accessible '%s' in %s", - wrapped.getPropertyMetadata().getName(), wrapped.getPropertyMetadata().getDeclaringClassName()), e); - } - } - - @Override - public T get(final S instance) { - return wrapped.get(instance); - } - - @Override - public void set(final S instance, final T value) { - try { - wrapped.getPropertyMetadata().getField().set(instance, value); - } catch (Exception e) { - throw new CodecConfigurationException(format("Unable to set value for property '%s' in %s", - wrapped.getPropertyMetadata().getName(), wrapped.getPropertyMetadata().getDeclaringClassName()), e); - } - } - } } diff --git a/bson/src/main/org/bson/codecs/pojo/ConventionUseGettersAsSettersImpl.java b/bson/src/main/org/bson/codecs/pojo/ConventionUseGettersAsSettersImpl.java index 8dc1c9ceca4..7cc677cf96b 100644 --- a/bson/src/main/org/bson/codecs/pojo/ConventionUseGettersAsSettersImpl.java +++ b/bson/src/main/org/bson/codecs/pojo/ConventionUseGettersAsSettersImpl.java @@ -46,17 +46,16 @@ private boolean isMapOrCollection(final Class clazz) { return Collection.class.isAssignableFrom(clazz) || Map.class.isAssignableFrom(clazz); } - @SuppressWarnings("unchecked") private void setPropertyAccessor(final PropertyModelBuilder propertyModelBuilder) { - propertyModelBuilder.propertyAccessor(new PrivateProperyAccessor( + propertyModelBuilder.propertyAccessor(new PrivatePropertyAccessor<>( (PropertyAccessorImpl) propertyModelBuilder.getPropertyAccessor())); } @SuppressWarnings({"rawtypes", "unchecked"}) - private static final class PrivateProperyAccessor implements PropertyAccessor { + private static final class PrivatePropertyAccessor implements PropertyAccessor { private final PropertyAccessorImpl wrapped; - private PrivateProperyAccessor(final PropertyAccessorImpl wrapped) { + private PrivatePropertyAccessor(final PropertyAccessorImpl wrapped) { this.wrapped = wrapped; } diff --git a/bson/src/main/org/bson/codecs/pojo/Conventions.java b/bson/src/main/org/bson/codecs/pojo/Conventions.java index 64fdf15f454..0f54c13815d 100644 --- a/bson/src/main/org/bson/codecs/pojo/Conventions.java +++ b/bson/src/main/org/bson/codecs/pojo/Conventions.java @@ -69,18 +69,26 @@ public final class Conventions { */ public static final Convention USE_GETTERS_FOR_SETTERS = new ConventionUseGettersAsSettersImpl(); + + /** + * A convention that sets the IdGenerator if the id property is either a {@link org.bson.types.ObjectId} or + * {@link org.bson.BsonObjectId}. + * + * @since 3.10 + */ + public static final Convention OBJECT_ID_GENERATORS = new ConventionObjectIdGeneratorsImpl(); + /** * The default conventions list */ public static final List DEFAULT_CONVENTIONS = - unmodifiableList(asList(CLASS_AND_PROPERTY_CONVENTION, ANNOTATION_CONVENTION)); + unmodifiableList(asList(CLASS_AND_PROPERTY_CONVENTION, ANNOTATION_CONVENTION, OBJECT_ID_GENERATORS)); /** * An empty conventions list */ public static final List NO_CONVENTIONS = Collections.emptyList(); - private Conventions() { } } diff --git a/bson/src/main/org/bson/codecs/pojo/CreatorExecutable.java b/bson/src/main/org/bson/codecs/pojo/CreatorExecutable.java index c212dd2e000..d8c13f125a9 100644 --- a/bson/src/main/org/bson/codecs/pojo/CreatorExecutable.java +++ b/bson/src/main/org/bson/codecs/pojo/CreatorExecutable.java @@ -34,10 +34,10 @@ final class CreatorExecutable { private final Class clazz; private final Constructor constructor; private final Method method; - private final List properties = new ArrayList(); + private final List properties = new ArrayList<>(); private final Integer idPropertyIndex; - private final List> parameterTypes = new ArrayList>(); - private final List parameterGenericTypes = new ArrayList(); + private final List> parameterTypes = new ArrayList<>(); + private final List parameterGenericTypes = new ArrayList<>(); CreatorExecutable(final Class clazz, final Constructor constructor) { this(clazz, constructor, null); @@ -137,7 +137,9 @@ CodecConfigurationException getError(final Class clazz, final String msg) { private void checkHasAnExecutable() { if (constructor == null && method == null) { - throw new CodecConfigurationException(format("Cannot find a public constructor for '%s'.", clazz.getSimpleName())); + throw new CodecConfigurationException(format("Cannot find a public constructor for '%s'. Please ensure " + + "the class has a public, empty constructor with no arguments, or else a constructor with a " + + "BsonCreator annotation", clazz.getSimpleName())); } } diff --git a/bson/src/main/org/bson/codecs/pojo/DiscriminatorLookup.java b/bson/src/main/org/bson/codecs/pojo/DiscriminatorLookup.java index 18fb59d8fef..084eb75b6bc 100644 --- a/bson/src/main/org/bson/codecs/pojo/DiscriminatorLookup.java +++ b/bson/src/main/org/bson/codecs/pojo/DiscriminatorLookup.java @@ -25,7 +25,7 @@ import static java.lang.String.format; final class DiscriminatorLookup { - private final Map> discriminatorClassMap = new ConcurrentHashMap>(); + private final Map> discriminatorClassMap = new ConcurrentHashMap<>(); private final Set packages; DiscriminatorLookup(final Map, ClassModel> classModels, final Set packages) { @@ -65,7 +65,7 @@ private Class getClassForName(final String discriminator) { Class clazz = null; try { clazz = Class.forName(discriminator); - } catch (final ClassNotFoundException e) { + } catch (ClassNotFoundException e) { // Ignore } return clazz; diff --git a/bson/src/main/org/bson/codecs/pojo/Either.java b/bson/src/main/org/bson/codecs/pojo/Either.java new file mode 100644 index 00000000000..1ad37e4ceef --- /dev/null +++ b/bson/src/main/org/bson/codecs/pojo/Either.java @@ -0,0 +1,81 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo; + +import java.util.Objects; +import java.util.function.Consumer; +import java.util.function.Function; + +import static org.bson.assertions.Assertions.notNull; + +final class Either { + + public static Either left(final L value) { + return new Either<>(notNull("value", value), null); + } + + public static Either right(final R value) { + return new Either<>(null, notNull("value", value)); + } + + private final L left; + private final R right; + + private Either(final L l, final R r) { + left = l; + right = r; + } + + public T map(final Function lFunc, final Function rFunc) { + return left != null ? lFunc.apply(left) : rFunc.apply(right); + } + + public void apply(final Consumer lFunc, final Consumer rFunc) { + if (left != null){ + lFunc.accept(left); + } + if (right != null){ + rFunc.accept(right); + } + } + + @Override + public String toString() { + return "Either{" + + "left=" + left + + ", right=" + right + + '}'; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + Either either = (Either) o; + return Objects.equals(left, either.left) && Objects.equals(right, either.right); + } + + @Override + public int hashCode() { + return Objects.hash(left, right); + } +} diff --git a/bson/src/main/org/bson/codecs/pojo/EnumPropertyCodecProvider.java b/bson/src/main/org/bson/codecs/pojo/EnumPropertyCodecProvider.java index fba5f2c99ff..474efeb9fe6 100644 --- a/bson/src/main/org/bson/codecs/pojo/EnumPropertyCodecProvider.java +++ b/bson/src/main/org/bson/codecs/pojo/EnumPropertyCodecProvider.java @@ -16,11 +16,8 @@ package org.bson.codecs.pojo; -import org.bson.BsonReader; -import org.bson.BsonWriter; import org.bson.codecs.Codec; -import org.bson.codecs.DecoderContext; -import org.bson.codecs.EncoderContext; +import org.bson.codecs.EnumCodec; import org.bson.codecs.configuration.CodecConfigurationException; import org.bson.codecs.configuration.CodecRegistry; @@ -45,28 +42,4 @@ public Codec get(final TypeWithTypeParameters type, final PropertyCode } return null; } - - private static class EnumCodec> implements Codec { - private final Class clazz; - - EnumCodec(final Class clazz) { - this.clazz = clazz; - } - - @Override - public void encode(final BsonWriter writer, final T value, final EncoderContext encoderContext) { - writer.writeString(value.name()); - } - - @Override - public Class getEncoderClass() { - return clazz; - } - - @Override - public T decode(final BsonReader reader, final DecoderContext decoderContext) { - return Enum.valueOf(clazz, reader.readString()); - } - } - } diff --git a/bson/src/main/org/bson/codecs/pojo/FieldPropertyAccessor.java b/bson/src/main/org/bson/codecs/pojo/FieldPropertyAccessor.java new file mode 100644 index 00000000000..61c82fd641c --- /dev/null +++ b/bson/src/main/org/bson/codecs/pojo/FieldPropertyAccessor.java @@ -0,0 +1,49 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.pojo; + +import org.bson.codecs.configuration.CodecConfigurationException; + +import static java.lang.String.format; + +final class FieldPropertyAccessor implements PropertyAccessor { + private final PropertyAccessorImpl wrapped; + + FieldPropertyAccessor(final PropertyAccessorImpl wrapped) { + this.wrapped = wrapped; + try { + wrapped.getPropertyMetadata().getField().setAccessible(true); + } catch (Exception e) { + throw new CodecConfigurationException(format("Unable to make field accessible '%s' in %s", + wrapped.getPropertyMetadata().getName(), wrapped.getPropertyMetadata().getDeclaringClassName()), e); + } + } + + @Override + public T get(final S instance) { + return wrapped.get(instance); + } + + @Override + public void set(final S instance, final T value) { + try { + wrapped.getPropertyMetadata().getField().set(instance, value); + } catch (Exception e) { + throw new CodecConfigurationException(format("Unable to set value for property '%s' in %s", + wrapped.getPropertyMetadata().getName(), wrapped.getPropertyMetadata().getDeclaringClassName()), e); + } + } +} diff --git a/bson/src/main/org/bson/codecs/pojo/IdGenerator.java b/bson/src/main/org/bson/codecs/pojo/IdGenerator.java new file mode 100644 index 00000000000..e794c8841ef --- /dev/null +++ b/bson/src/main/org/bson/codecs/pojo/IdGenerator.java @@ -0,0 +1,37 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo; + +/** + * Classes that implement this interface define a way to create Ids for Pojo's. + * + * @param the type of the id value. + * @since 3.10 + */ +public interface IdGenerator { + /** + * Generates an id for a Pojo. + * + * @return the generated id value + */ + T generate(); + + /** + * @return the type of the generated id. + */ + Class getType(); +} diff --git a/bson/src/main/org/bson/codecs/pojo/IdGenerators.java b/bson/src/main/org/bson/codecs/pojo/IdGenerators.java new file mode 100644 index 00000000000..fdb023995db --- /dev/null +++ b/bson/src/main/org/bson/codecs/pojo/IdGenerators.java @@ -0,0 +1,79 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo; + +import org.bson.BsonObjectId; +import org.bson.types.ObjectId; + +/** + * The default IdGenerators + * + * @see IdGenerator + * @since 3.10 + */ +public final class IdGenerators { + + /** + * A IdGenerator for {@code ObjectId} + */ + public static final IdGenerator OBJECT_ID_GENERATOR = new IdGenerator() { + + @Override + public ObjectId generate() { + return new ObjectId(); + } + + @Override + public Class getType() { + return ObjectId.class; + } + }; + + /** + * A IdGenerator for {@code BsonObjectId} + */ + public static final IdGenerator BSON_OBJECT_ID_GENERATOR = new IdGenerator() { + + @Override + public BsonObjectId generate() { + return new BsonObjectId(); + } + + @Override + public Class getType() { + return BsonObjectId.class; + } + }; + + /** + * A IdGenerator for {@code String} + */ + public static final IdGenerator STRING_ID_GENERATOR = new IdGenerator() { + @Override + public String generate() { + return OBJECT_ID_GENERATOR.generate().toHexString(); + } + + @Override + public Class getType() { + return String.class; + } + }; + + private IdGenerators(){ + } +} diff --git a/bson/src/main/org/bson/codecs/pojo/IdPropertyModelHolder.java b/bson/src/main/org/bson/codecs/pojo/IdPropertyModelHolder.java new file mode 100644 index 00000000000..54a6a1a3341 --- /dev/null +++ b/bson/src/main/org/bson/codecs/pojo/IdPropertyModelHolder.java @@ -0,0 +1,81 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo; + +import org.bson.codecs.configuration.CodecConfigurationException; + +import java.util.Objects; + +import static java.lang.String.format; + +final class IdPropertyModelHolder { + private final PropertyModel propertyModel; + private final IdGenerator idGenerator; + + static IdPropertyModelHolder create(final ClassModel classModel, final PropertyModel idPropertyModel) { + return create(classModel.getType(), idPropertyModel, classModel.getIdPropertyModelHolder().getIdGenerator()); + } + + @SuppressWarnings("unchecked") + static IdPropertyModelHolder create(final Class type, final PropertyModel idProperty, + final IdGenerator idGenerator) { + if (idProperty == null && idGenerator != null) { + throw new CodecConfigurationException(format("Invalid IdGenerator. There is no IdProperty set for: %s", type)); + } else if (idGenerator != null && !idProperty.getTypeData().getType().isAssignableFrom(idGenerator.getType())) { + throw new CodecConfigurationException(format("Invalid IdGenerator. Mismatching types, the IdProperty type is: %s but" + + " the IdGenerator type is: %s", idProperty.getTypeData().getType(), idGenerator.getType())); + } + return new IdPropertyModelHolder<>(idProperty, (IdGenerator) idGenerator); + } + + private IdPropertyModelHolder(final PropertyModel propertyModel, final IdGenerator idGenerator) { + this.propertyModel = propertyModel; + this.idGenerator = idGenerator; + } + + PropertyModel getPropertyModel() { + return propertyModel; + } + + IdGenerator getIdGenerator() { + return idGenerator; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + IdPropertyModelHolder that = (IdPropertyModelHolder) o; + + if (!Objects.equals(propertyModel, that.propertyModel)) { + return false; + } + return Objects.equals(idGenerator, that.idGenerator); + } + + @Override + public int hashCode() { + int result = propertyModel != null ? propertyModel.hashCode() : 0; + result = 31 * result + (idGenerator != null ? idGenerator.hashCode() : 0); + return result; + } +} diff --git a/bson/src/main/org/bson/codecs/pojo/InstanceCreatorFactoryImpl.java b/bson/src/main/org/bson/codecs/pojo/InstanceCreatorFactoryImpl.java index 294e7f85142..49aa2f82766 100644 --- a/bson/src/main/org/bson/codecs/pojo/InstanceCreatorFactoryImpl.java +++ b/bson/src/main/org/bson/codecs/pojo/InstanceCreatorFactoryImpl.java @@ -25,6 +25,6 @@ final class InstanceCreatorFactoryImpl implements InstanceCreatorFactory { @Override public InstanceCreator create() { - return new InstanceCreatorImpl(creatorExecutable); + return new InstanceCreatorImpl<>(creatorExecutable); } } diff --git a/bson/src/main/org/bson/codecs/pojo/InstanceCreatorImpl.java b/bson/src/main/org/bson/codecs/pojo/InstanceCreatorImpl.java index 04fc86bd1f6..7f3ad9e818b 100644 --- a/bson/src/main/org/bson/codecs/pojo/InstanceCreatorImpl.java +++ b/bson/src/main/org/bson/codecs/pojo/InstanceCreatorImpl.java @@ -39,8 +39,8 @@ final class InstanceCreatorImpl implements InstanceCreator { this.params = null; this.newInstance = creatorExecutable.getInstance(); } else { - this.cachedValues = new HashMap, Object>(); - this.properties = new HashMap(); + this.cachedValues = new HashMap<>(); + this.properties = new HashMap<>(); for (int i = 0; i < creatorExecutable.getProperties().size(); i++) { if (creatorExecutable.getIdPropertyIndex() != null && creatorExecutable.getIdPropertyIndex() == i) { diff --git a/bson/src/main/org/bson/codecs/pojo/LazyMissingCodec.java b/bson/src/main/org/bson/codecs/pojo/LazyMissingCodec.java new file mode 100644 index 00000000000..b5e24292be1 --- /dev/null +++ b/bson/src/main/org/bson/codecs/pojo/LazyMissingCodec.java @@ -0,0 +1,50 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo; + +import org.bson.BsonReader; +import org.bson.BsonWriter; +import org.bson.codecs.Codec; +import org.bson.codecs.DecoderContext; +import org.bson.codecs.EncoderContext; +import org.bson.codecs.configuration.CodecConfigurationException; + + +class LazyMissingCodec implements Codec { + private final Class clazz; + private final CodecConfigurationException exception; + + LazyMissingCodec(final Class clazz, final CodecConfigurationException exception) { + this.clazz = clazz; + this.exception = exception; + } + + @Override + public S decode(final BsonReader reader, final DecoderContext decoderContext) { + throw exception; + } + + @Override + public void encode(final BsonWriter writer, final S value, final EncoderContext encoderContext) { + throw exception; + } + + @Override + public Class getEncoderClass() { + return clazz; + } +} diff --git a/bson/src/main/org/bson/codecs/pojo/LazyPojoCodec.java b/bson/src/main/org/bson/codecs/pojo/LazyPojoCodec.java deleted file mode 100644 index ec3d9469b39..00000000000 --- a/bson/src/main/org/bson/codecs/pojo/LazyPojoCodec.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.codecs.pojo; - -import org.bson.BsonReader; -import org.bson.BsonWriter; -import org.bson.codecs.Codec; -import org.bson.codecs.DecoderContext; -import org.bson.codecs.EncoderContext; -import org.bson.codecs.configuration.CodecRegistry; - -import java.util.concurrent.ConcurrentMap; - -class LazyPojoCodec extends PojoCodec { - private final ClassModel classModel; - private final CodecRegistry registry; - private final PropertyCodecRegistry propertyCodecRegistry; - private final DiscriminatorLookup discriminatorLookup; - private final ConcurrentMap, Codec> codecCache; - private volatile PojoCodecImpl pojoCodec; - - LazyPojoCodec(final ClassModel classModel, final CodecRegistry registry, final PropertyCodecRegistry propertyCodecRegistry, - final DiscriminatorLookup discriminatorLookup, final ConcurrentMap, Codec> codecCache) { - this.classModel = classModel; - this.registry = registry; - this.propertyCodecRegistry = propertyCodecRegistry; - this.discriminatorLookup = discriminatorLookup; - this.codecCache = codecCache; - } - - @Override - public void encode(final BsonWriter writer, final T value, final EncoderContext encoderContext) { - getPojoCodec().encode(writer, value, encoderContext); - } - - @Override - public Class getEncoderClass() { - return classModel.getType(); - } - - @Override - public T decode(final BsonReader reader, final DecoderContext decoderContext) { - return getPojoCodec().decode(reader, decoderContext); - } - - private Codec getPojoCodec() { - if (pojoCodec == null) { - pojoCodec = new PojoCodecImpl(classModel, registry, propertyCodecRegistry, discriminatorLookup, codecCache, true); - } - return pojoCodec; - } - - @Override - ClassModel getClassModel() { - return classModel; - } -} diff --git a/bson/src/main/org/bson/codecs/pojo/LazyPropertyModelCodec.java b/bson/src/main/org/bson/codecs/pojo/LazyPropertyModelCodec.java new file mode 100644 index 00000000000..24537ce1d8e --- /dev/null +++ b/bson/src/main/org/bson/codecs/pojo/LazyPropertyModelCodec.java @@ -0,0 +1,227 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.pojo; + +import org.bson.BsonReader; +import org.bson.BsonType; +import org.bson.BsonWriter; +import org.bson.codecs.Codec; +import org.bson.codecs.DecoderContext; +import org.bson.codecs.EncoderContext; +import org.bson.codecs.RepresentationConfigurable; +import org.bson.codecs.configuration.CodecConfigurationException; +import org.bson.codecs.configuration.CodecRegistry; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +import static java.lang.String.format; +import static org.bson.codecs.pojo.PojoSpecializationHelper.specializeTypeData; + +class LazyPropertyModelCodec implements Codec { + private final PropertyModel propertyModel; + private final CodecRegistry registry; + private final PropertyCodecRegistry propertyCodecRegistry; + private final Lock codecLock = new ReentrantLock(); + private volatile Codec codec; + + LazyPropertyModelCodec(final PropertyModel propertyModel, final CodecRegistry registry, + final PropertyCodecRegistry propertyCodecRegistry) { + this.propertyModel = propertyModel; + this.registry = registry; + this.propertyCodecRegistry = propertyCodecRegistry; + } + + @Override + public T decode(final BsonReader reader, final DecoderContext decoderContext) { + return getPropertyModelCodec().decode(reader, decoderContext); + } + + @Override + public void encode(final BsonWriter writer, final T value, final EncoderContext encoderContext) { + getPropertyModelCodec().encode(writer, value, encoderContext); + } + + @Override + public Class getEncoderClass() { + return propertyModel.getTypeData().getType(); + } + + private Codec getPropertyModelCodec() { + Codec codec = this.codec; + if (codec == null) { + codecLock.lock(); + try { + codec = this.codec; + if (codec == null) { + codec = createCodec(); + this.codec = codec; + } + } finally { + codecLock.unlock(); + } + } + return codec; + } + + private Codec createCodec() { + Codec localCodec = getCodecFromPropertyRegistry(propertyModel); + if (localCodec instanceof PojoCodec) { + PojoCodec pojoCodec = (PojoCodec) localCodec; + ClassModel specialized = getSpecializedClassModel(pojoCodec.getClassModel(), propertyModel); + localCodec = new PojoCodecImpl<>(specialized, registry, propertyCodecRegistry, pojoCodec.getDiscriminatorLookup()); + } + return localCodec; + } + + @SuppressWarnings("unchecked") + private Codec getCodecFromPropertyRegistry(final PropertyModel propertyModel) { + Codec localCodec; + try { + localCodec = propertyCodecRegistry.get(propertyModel.getTypeData()); + } catch (CodecConfigurationException e) { + return new LazyMissingCodec<>(propertyModel.getTypeData().getType(), e); + } + if (localCodec == null) { + localCodec = new LazyMissingCodec<>(propertyModel.getTypeData().getType(), + new CodecConfigurationException("Unexpected missing codec for: " + propertyModel.getName())); + } + BsonType representation = propertyModel.getBsonRepresentation(); + if (representation != null) { + if (localCodec instanceof RepresentationConfigurable) { + return ((RepresentationConfigurable) localCodec).withRepresentation(representation); + } + throw new CodecConfigurationException("Codec must implement RepresentationConfigurable to support BsonRepresentation"); + } + return localCodec; + } + + private ClassModel getSpecializedClassModel(final ClassModel clazzModel, final PropertyModel propertyModel) { + boolean useDiscriminator = propertyModel.useDiscriminator() == null ? clazzModel.useDiscriminator() + : propertyModel.useDiscriminator(); + boolean validDiscriminator = clazzModel.getDiscriminatorKey() != null && clazzModel.getDiscriminator() != null; + boolean changeTheDiscriminator = (useDiscriminator != clazzModel.useDiscriminator()) && validDiscriminator; + + if (propertyModel.getTypeData().getTypeParameters().isEmpty() && !changeTheDiscriminator){ + return clazzModel; + } + + ArrayList> concretePropertyModels = new ArrayList<>(clazzModel.getPropertyModels()); + PropertyModel concreteIdProperty = clazzModel.getIdPropertyModel(); + + List> propertyTypeParameters = propertyModel.getTypeData().getTypeParameters(); + for (int i = 0; i < concretePropertyModels.size(); i++) { + PropertyModel model = concretePropertyModels.get(i); + String propertyName = model.getName(); + TypeParameterMap typeParameterMap = clazzModel.getPropertyNameToTypeParameterMap().get(propertyName); + if (typeParameterMap.hasTypeParameters()) { + PropertyModel concretePropertyModel = getSpecializedPropertyModel(model, propertyTypeParameters, typeParameterMap); + concretePropertyModels.set(i, concretePropertyModel); + if (concreteIdProperty != null && concreteIdProperty.getName().equals(propertyName)) { + concreteIdProperty = concretePropertyModel; + } + } + } + + boolean discriminatorEnabled = changeTheDiscriminator ? propertyModel.useDiscriminator() : clazzModel.useDiscriminator(); + return new ClassModel<>(clazzModel.getType(), clazzModel.getPropertyNameToTypeParameterMap(), + clazzModel.getInstanceCreatorFactory(), discriminatorEnabled, clazzModel.getDiscriminatorKey(), + clazzModel.getDiscriminator(), IdPropertyModelHolder.create(clazzModel, concreteIdProperty), concretePropertyModels); + } + + private PropertyModel getSpecializedPropertyModel(final PropertyModel propertyModel, + final List> propertyTypeParameters, + final TypeParameterMap typeParameterMap) { + TypeData specializedPropertyType = specializeTypeData(propertyModel.getTypeData(), propertyTypeParameters, typeParameterMap); + if (propertyModel.getTypeData().equals(specializedPropertyType)) { + return propertyModel; + } + + return new PropertyModel<>(propertyModel.getName(), propertyModel.getReadName(), propertyModel.getWriteName(), + specializedPropertyType, null, propertyModel.getPropertySerialization(), propertyModel.useDiscriminator(), + propertyModel.getPropertyAccessor(), propertyModel.getError(), propertyModel.getBsonRepresentation()); + } + + /** + * Instances of this codec are supposed to be replaced with usable implementations by {@link LazyPropertyModelCodec#createCodec()}. + */ + static final class NeedSpecializationCodec extends PojoCodec { + private final ClassModel classModel; + private final DiscriminatorLookup discriminatorLookup; + private final CodecRegistry codecRegistry; + + NeedSpecializationCodec(final ClassModel classModel, final DiscriminatorLookup discriminatorLookup, final CodecRegistry codecRegistry) { + this.classModel = classModel; + this.discriminatorLookup = discriminatorLookup; + this.codecRegistry = codecRegistry; + } + + @Override + public void encode(final BsonWriter writer, final T value, final EncoderContext encoderContext) { + if (value.getClass().equals(classModel.getType())) { + throw exception(); + } + tryEncode(codecRegistry.get(value.getClass()), writer, value, encoderContext); + } + + @Override + public T decode(final BsonReader reader, final DecoderContext decoderContext) { + return tryDecode(reader, decoderContext); + } + + @SuppressWarnings("unchecked") + private void tryEncode(final Codec codec, final BsonWriter writer, final T value, final EncoderContext encoderContext) { + try { + codec.encode(writer, (A) value, encoderContext); + } catch (Exception e) { + throw exception(); + } + } + + @SuppressWarnings("unchecked") + public T tryDecode(final BsonReader reader, final DecoderContext decoderContext) { + Codec codec = PojoCodecImpl.getCodecFromDocument(reader, classModel.useDiscriminator(), classModel.getDiscriminatorKey(), + codecRegistry, discriminatorLookup, null, classModel.getName()); + if (codec != null) { + return codec.decode(reader, decoderContext); + } + + throw exception(); + } + + @Override + public Class getEncoderClass() { + return classModel.getType(); + } + + private CodecConfigurationException exception() { + return new CodecConfigurationException(format("%s contains generic types that have not been specialised.%n" + + "Top level classes with generic types are not supported by the PojoCodec.", classModel.getName())); + } + + @Override + ClassModel getClassModel() { + return classModel; + } + + @Override + DiscriminatorLookup getDiscriminatorLookup() { + return discriminatorLookup; + } + } +} diff --git a/bson/src/main/org/bson/codecs/pojo/MapPropertyCodecProvider.java b/bson/src/main/org/bson/codecs/pojo/MapPropertyCodecProvider.java index 50e665b3974..3bbfc871390 100644 --- a/bson/src/main/org/bson/codecs/pojo/MapPropertyCodecProvider.java +++ b/bson/src/main/org/bson/codecs/pojo/MapPropertyCodecProvider.java @@ -103,11 +103,11 @@ public Class> getEncoderClass() { private Map getInstance() { if (encoderClass.isInterface()) { - return new HashMap(); + return new HashMap<>(); } try { return encoderClass.getDeclaredConstructor().newInstance(); - } catch (final Exception e) { + } catch (Exception e) { throw new CodecConfigurationException(e.getMessage(), e); } } diff --git a/bson/src/main/org/bson/codecs/pojo/PojoBuilderHelper.java b/bson/src/main/org/bson/codecs/pojo/PojoBuilderHelper.java index f2ac780e78e..9a0f5e69d0e 100644 --- a/bson/src/main/org/bson/codecs/pojo/PojoBuilderHelper.java +++ b/bson/src/main/org/bson/codecs/pojo/PojoBuilderHelper.java @@ -16,8 +16,6 @@ package org.bson.codecs.pojo; -import org.bson.codecs.configuration.CodecConfigurationException; - import java.lang.annotation.Annotation; import java.lang.reflect.Constructor; import java.lang.reflect.Field; @@ -27,6 +25,7 @@ import java.lang.reflect.TypeVariable; import java.util.ArrayList; import java.util.HashMap; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -38,26 +37,28 @@ import static java.util.Arrays.asList; import static java.util.Collections.reverse; import static org.bson.assertions.Assertions.notNull; -import static org.bson.codecs.pojo.PropertyReflectionUtils.isGetter; +import static org.bson.codecs.pojo.PojoSpecializationHelper.specializeTypeData; import static org.bson.codecs.pojo.PropertyReflectionUtils.getPropertyMethods; +import static org.bson.codecs.pojo.PropertyReflectionUtils.isGetter; import static org.bson.codecs.pojo.PropertyReflectionUtils.toPropertyName; final class PojoBuilderHelper { + @SuppressWarnings("unchecked") static void configureClassModelBuilder(final ClassModelBuilder classModelBuilder, final Class clazz) { classModelBuilder.type(notNull("clazz", clazz)); - ArrayList annotations = new ArrayList(); - Set propertyNames = new TreeSet(); - Map propertyTypeParameterMap = new HashMap(); - Class currentClass = clazz; + ArrayList annotations = new ArrayList<>(); + Set propertyNames = new TreeSet<>(); + Map propertyTypeParameterMap = new HashMap<>(); String declaringClassName = clazz.getSimpleName(); - TypeData parentClassTypeData = null; - Map> propertyNameMap = new HashMap>(); - while (!currentClass.isEnum() && currentClass.getSuperclass() != null) { + Map> propertyNameMap = new HashMap<>(); + for (ClassWithParentTypeData currentClassWithParentTypeData : getClassHierarchy(clazz, null)) { + Class currentClass = currentClassWithParentTypeData.clazz; + TypeData parentClassTypeData = currentClassWithParentTypeData.parentClassTypeData; annotations.addAll(asList(currentClass.getDeclaredAnnotations())); - List genericTypeNames = new ArrayList(); + List genericTypeNames = new ArrayList<>(); for (TypeVariable> classTypeVariable : currentClass.getTypeParameters()) { genericTypeNames.add(classTypeVariable.getName()); } @@ -116,13 +117,6 @@ static void configureClassModelBuilder(final ClassModelBuilder classModel } } } - - parentClassTypeData = TypeData.newInstance(currentClass.getGenericSuperclass(), currentClass); - currentClass = currentClass.getSuperclass(); - } - - if (currentClass.isInterface()) { - annotations.addAll(asList(currentClass.getDeclaredAnnotations())); } for (String propertyName : propertyNames) { @@ -138,14 +132,14 @@ static void configureClassModelBuilder(final ClassModelBuilder classModel Constructor noArgsConstructor = null; for (Constructor constructor : clazz.getDeclaredConstructors()) { - if (constructor.getParameterTypes().length == 0 + if (constructor.getParameterCount() == 0 && (isPublic(constructor.getModifiers()) || isProtected(constructor.getModifiers()))) { noArgsConstructor = (Constructor) constructor; noArgsConstructor.setAccessible(true); } } - classModelBuilder.instanceCreatorFactory(new InstanceCreatorFactoryImpl(new CreatorExecutable(clazz, noArgsConstructor))); + classModelBuilder.instanceCreatorFactory(new InstanceCreatorFactoryImpl<>(new CreatorExecutable<>(clazz, noArgsConstructor))); } private static PropertyMetadata getOrCreateMethodPropertyMetadata(final String propertyName, @@ -157,15 +151,20 @@ private static PropertyMetadata getOrCreateMethodPropertyMetadata(fina final List genericTypeNames, final Type genericType) { PropertyMetadata propertyMetadata = getOrCreatePropertyMetadata(propertyName, declaringClassName, propertyNameMap, typeData); - if (!propertyMetadata.getTypeData().getType().isAssignableFrom(typeData.getType())) { - throw new CodecConfigurationException(format("Property '%s' in %s, has differing data types: %s and %s", propertyName, + if (!isAssignableClass(propertyMetadata.getTypeData().getType(), typeData.getType())) { + propertyMetadata.setError(format("Property '%s' in %s, has differing data types: %s and %s.", propertyName, declaringClassName, propertyMetadata.getTypeData(), typeData)); } - cachePropertyTypeData(propertyMetadata, propertyTypeParameterMap, parentClassTypeData, genericTypeNames, - genericType); + cachePropertyTypeData(propertyMetadata, propertyTypeParameterMap, parentClassTypeData, genericTypeNames, genericType); return propertyMetadata; } + private static boolean isAssignableClass(final Class propertyTypeClass, final Class typeDataClass) { + notNull("propertyTypeClass", propertyTypeClass); + notNull("typeDataClass", typeDataClass); + return propertyTypeClass.isAssignableFrom(typeDataClass) || typeDataClass.isAssignableFrom(propertyTypeClass); + } + private static PropertyMetadata getOrCreateFieldPropertyMetadata(final String propertyName, final String declaringClassName, final Map> propertyNameMap, @@ -189,7 +188,7 @@ private static PropertyMetadata getOrCreatePropertyMetadata(final String final TypeData typeData) { PropertyMetadata propertyMetadata = (PropertyMetadata) propertyNameMap.get(propertyName); if (propertyMetadata == null) { - propertyMetadata = new PropertyMetadata(propertyName, declaringClassName, typeData); + propertyMetadata = new PropertyMetadata<>(propertyName, declaringClassName, typeData); propertyNameMap.put(propertyName, propertyMetadata); } return propertyMetadata; @@ -209,7 +208,6 @@ private static Type getGenericType(final Method method) { return isGetter(method) ? method.getGenericReturnType() : method.getGenericParameterTypes()[0]; } - @SuppressWarnings("unchecked") static PropertyModelBuilder createPropertyModelBuilder(final PropertyMetadata propertyMetadata) { PropertyModelBuilder propertyModelBuilder = PropertyModel.builder() .propertyName(propertyMetadata.getName()) @@ -218,11 +216,13 @@ static PropertyModelBuilder createPropertyModelBuilder(final PropertyMeta .typeData(propertyMetadata.getTypeData()) .readAnnotations(propertyMetadata.getReadAnnotations()) .writeAnnotations(propertyMetadata.getWriteAnnotations()) - .propertySerialization(new PropertyModelSerializationImpl()) - .propertyAccessor(new PropertyAccessorImpl(propertyMetadata)); + .propertySerialization(new PropertyModelSerializationImpl<>()) + .propertyAccessor(new PropertyAccessorImpl<>(propertyMetadata)) + .setError(propertyMetadata.getError()); if (propertyMetadata.getTypeParameters() != null) { - specializePropertyModelBuilder(propertyModelBuilder, propertyMetadata); + propertyModelBuilder.typeData(specializeTypeData(propertyModelBuilder.getTypeData(), propertyMetadata.getTypeParameters(), + propertyMetadata.getTypeParameterMap())); } return propertyModelBuilder; @@ -240,37 +240,14 @@ private static TypeParameterMap getTypeParameterMap(final List genericTy classParamIndex = genericTypeNames.indexOf(pt.getActualTypeArguments()[i].toString()); if (classParamIndex != -1) { builder.addIndex(i, classParamIndex); + } else { + builder.addIndex(i, getTypeParameterMap(genericTypeNames, pt.getActualTypeArguments()[i])); } } } } return builder.build(); } - @SuppressWarnings("unchecked") - private static void specializePropertyModelBuilder(final PropertyModelBuilder propertyModelBuilder, - final PropertyMetadata propertyMetadata) { - if (propertyMetadata.getTypeParameterMap().hasTypeParameters() && !propertyMetadata.getTypeParameters().isEmpty()) { - TypeData specializedFieldType; - Map fieldToClassParamIndexMap = propertyMetadata.getTypeParameterMap().getPropertyToClassParamIndexMap(); - Integer classTypeParamRepresentsWholeField = fieldToClassParamIndexMap.get(-1); - if (classTypeParamRepresentsWholeField != null) { - specializedFieldType = (TypeData) propertyMetadata.getTypeParameters().get(classTypeParamRepresentsWholeField); - } else { - TypeData.Builder builder = TypeData.builder(propertyModelBuilder.getTypeData().getType()); - List> typeParameters = new ArrayList>(propertyModelBuilder.getTypeData().getTypeParameters()); - for (int i = 0; i < typeParameters.size(); i++) { - for (Map.Entry mapping : fieldToClassParamIndexMap.entrySet()) { - if (mapping.getKey().equals(i)) { - typeParameters.set(i, propertyMetadata.getTypeParameters().get(mapping.getValue())); - } - } - } - builder.addTypeParameters(typeParameters); - specializedFieldType = builder.build(); - } - propertyModelBuilder.typeData(specializedFieldType); - } - } static V stateNotNull(final String property, final V value) { if (value == null) { @@ -279,6 +256,33 @@ static V stateNotNull(final String property, final V value) { return value; } + @SuppressWarnings("unchecked") + private static Set> getClassHierarchy(final Class clazz, + final TypeData classTypeData) { + Set> classesToScan = new LinkedHashSet<>(); + Class currentClass = clazz; + TypeData parentClassTypeData = classTypeData; + while (currentClass != null && !currentClass.isEnum() && !currentClass.equals(Object.class)) { + classesToScan.add(new ClassWithParentTypeData<>(currentClass, parentClassTypeData)); + parentClassTypeData = TypeData.newInstance(currentClass.getGenericSuperclass(), currentClass); + for (Class interfaceClass : currentClass.getInterfaces()) { + classesToScan.addAll(getClassHierarchy((Class) interfaceClass, parentClassTypeData)); + } + currentClass = currentClass.getSuperclass(); + } + return classesToScan; + } + + private static final class ClassWithParentTypeData { + private final Class clazz; + private final TypeData parentClassTypeData; + + private ClassWithParentTypeData(final Class clazz, final TypeData parentClassTypeData) { + this.clazz = clazz; + this.parentClassTypeData = parentClassTypeData; + } + } + private PojoBuilderHelper() { } } diff --git a/bson/src/main/org/bson/codecs/pojo/PojoCodec.java b/bson/src/main/org/bson/codecs/pojo/PojoCodec.java index be6ac86248d..698e77f4b76 100644 --- a/bson/src/main/org/bson/codecs/pojo/PojoCodec.java +++ b/bson/src/main/org/bson/codecs/pojo/PojoCodec.java @@ -21,4 +21,6 @@ abstract class PojoCodec implements Codec { abstract ClassModel getClassModel(); + + abstract DiscriminatorLookup getDiscriminatorLookup(); } diff --git a/bson/src/main/org/bson/codecs/pojo/PojoCodecImpl.java b/bson/src/main/org/bson/codecs/pojo/PojoCodecImpl.java index 361e84b0104..cbcfc99b20d 100644 --- a/bson/src/main/org/bson/codecs/pojo/PojoCodecImpl.java +++ b/bson/src/main/org/bson/codecs/pojo/PojoCodecImpl.java @@ -15,97 +15,71 @@ */ package org.bson.codecs.pojo; +import org.bson.BsonDocument; +import org.bson.BsonDocumentReader; +import org.bson.BsonDocumentWrapper; import org.bson.BsonInvalidOperationException; import org.bson.BsonReader; import org.bson.BsonReaderMark; import org.bson.BsonType; +import org.bson.BsonValue; import org.bson.BsonWriter; +import org.bson.codecs.BsonValueCodec; import org.bson.codecs.Codec; import org.bson.codecs.DecoderContext; +import org.bson.codecs.Encoder; import org.bson.codecs.EncoderContext; import org.bson.codecs.configuration.CodecConfigurationException; import org.bson.codecs.configuration.CodecRegistry; import org.bson.diagnostics.Logger; import org.bson.diagnostics.Loggers; -import java.util.ArrayList; +import javax.annotation.Nullable; import java.util.Collection; import java.util.List; import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; +import java.util.function.Supplier; import static java.lang.String.format; -import static org.bson.codecs.configuration.CodecRegistries.fromCodecs; -import static org.bson.codecs.configuration.CodecRegistries.fromRegistries; final class PojoCodecImpl extends PojoCodec { private static final Logger LOGGER = Loggers.getLogger("PojoCodec"); + private static final Codec BSON_VALUE_CODEC = new BsonValueCodec(); private final ClassModel classModel; private final CodecRegistry registry; private final PropertyCodecRegistry propertyCodecRegistry; private final DiscriminatorLookup discriminatorLookup; - private final ConcurrentMap, Codec> codecCache; - private final boolean specialized; PojoCodecImpl(final ClassModel classModel, final CodecRegistry codecRegistry, - final List propertyCodecProviders, final DiscriminatorLookup discriminatorLookup) { + final List propertyCodecProviders, final DiscriminatorLookup discriminatorLookup) { this.classModel = classModel; - this.registry = fromRegistries(fromCodecs(this), codecRegistry); + this.registry = codecRegistry; this.discriminatorLookup = discriminatorLookup; - this.codecCache = new ConcurrentHashMap, Codec>(); this.propertyCodecRegistry = new PropertyCodecRegistryImpl(this, registry, propertyCodecProviders); - this.specialized = shouldSpecialize(classModel); specialize(); } - PojoCodecImpl(final ClassModel classModel, final CodecRegistry registry, final PropertyCodecRegistry propertyCodecRegistry, - final DiscriminatorLookup discriminatorLookup, final ConcurrentMap, Codec> codecCache, - final boolean specialized) { + PojoCodecImpl(final ClassModel classModel, final CodecRegistry codecRegistry, + final PropertyCodecRegistry propertyCodecRegistry, final DiscriminatorLookup discriminatorLookup) { this.classModel = classModel; - this.registry = fromRegistries(fromCodecs(this), registry); + this.registry = codecRegistry; this.discriminatorLookup = discriminatorLookup; - this.codecCache = codecCache; this.propertyCodecRegistry = propertyCodecRegistry; - this.specialized = specialized; specialize(); } - private void specialize() { - if (specialized) { - codecCache.put(classModel, this); - for (PropertyModel propertyModel : classModel.getPropertyModels()) { - try { - addToCache(propertyModel); - } catch (Exception e) { - throw new CodecConfigurationException(format("Could not create a PojoCodec for '%s'." - + " Property '%s' errored with: %s", classModel.getName(), propertyModel.getName(), e.getMessage()), e); - } - } - } - } - @SuppressWarnings("unchecked") @Override public void encode(final BsonWriter writer, final T value, final EncoderContext encoderContext) { - if (!specialized) { - throw new CodecConfigurationException(format("%s contains generic types that have not been specialised.%n" - + "Top level classes with generic types are not supported by the PojoCodec.", classModel.getName())); - } if (areEquivalentTypes(value.getClass(), classModel.getType())) { writer.writeStartDocument(); - PropertyModel idPropertyModel = classModel.getIdPropertyModel(); - if (idPropertyModel != null) { - encodeProperty(writer, value, encoderContext, idPropertyModel); - } - if (classModel.useDiscriminator()) { - writer.writeString(classModel.getDiscriminatorKey(), classModel.getDiscriminator()); - } + encodeIdProperty(writer, value, encoderContext, classModel.getIdPropertyModelHolder()); + encodeDiscriminatorProperty(writer); for (PropertyModel propertyModel : classModel.getPropertyModels()) { - if (propertyModel.equals(classModel.getIdPropertyModel())) { + if (idProperty(propertyModel)) { continue; } encodeProperty(writer, value, encoderContext, propertyModel); @@ -119,16 +93,13 @@ public void encode(final BsonWriter writer, final T value, final EncoderContext @Override public T decode(final BsonReader reader, final DecoderContext decoderContext) { if (decoderContext.hasCheckedDiscriminator()) { - if (!specialized) { - throw new CodecConfigurationException(format("%s contains generic types that have not been specialised.%n" - + "Top level classes with generic types are not supported by the PojoCodec.", classModel.getName())); - } InstanceCreator instanceCreator = classModel.getInstanceCreator(); decodeProperties(reader, decoderContext, instanceCreator); return instanceCreator.getInstance(); } else { return getCodecFromDocument(reader, classModel.useDiscriminator(), classModel.getDiscriminatorKey(), registry, - discriminatorLookup, this).decode(reader, DecoderContext.builder().checkedDiscriminator(true).build()); + discriminatorLookup, this, classModel.getName()) + .decode(reader, DecoderContext.builder().checkedDiscriminator(true).build()); } } @@ -146,167 +117,177 @@ ClassModel getClassModel() { return classModel; } - @SuppressWarnings("unchecked") + private void encodeIdProperty(final BsonWriter writer, final T instance, final EncoderContext encoderContext, + final IdPropertyModelHolder propertyModelHolder) { + if (propertyModelHolder.getPropertyModel() != null) { + if (propertyModelHolder.getIdGenerator() == null) { + encodeProperty(writer, instance, encoderContext, propertyModelHolder.getPropertyModel()); + } else { + S id = propertyModelHolder.getPropertyModel().getPropertyAccessor().get(instance); + if (id == null && encoderContext.isEncodingCollectibleDocument()) { + id = propertyModelHolder.getIdGenerator().generate(); + try { + propertyModelHolder.getPropertyModel().getPropertyAccessor().set(instance, id); + } catch (Exception e) { + // ignore + } + } + encodeValue(writer, encoderContext, propertyModelHolder.getPropertyModel(), id); + } + } + } + + private boolean idProperty(final PropertyModel propertyModel) { + return propertyModel.equals(classModel.getIdPropertyModel()); + } + + private void encodeDiscriminatorProperty(final BsonWriter writer) { + if (classModel.useDiscriminator()) { + writer.writeString(classModel.getDiscriminatorKey(), classModel.getDiscriminator()); + } + } + private void encodeProperty(final BsonWriter writer, final T instance, final EncoderContext encoderContext, final PropertyModel propertyModel) { - if (propertyModel.isReadable()) { + if (propertyModel != null && propertyModel.isReadable()) { S propertyValue = propertyModel.getPropertyAccessor().get(instance); - if (propertyModel.shouldSerialize(propertyValue)) { - writer.writeName(propertyModel.getReadName()); - if (propertyValue == null) { - writer.writeNull(); + encodeValue(writer, encoderContext, propertyModel, propertyValue); + } + } + + @SuppressWarnings("unchecked") + private void encodeValue(final BsonWriter writer, final EncoderContext encoderContext, final PropertyModel propertyModel, + final S propertyValue) { + if (propertyModel.shouldSerialize(propertyValue)) { + try { + if (propertyModel.getPropertySerialization().inline()) { + if (propertyValue != null) { + new BsonDocumentWrapper<>(propertyValue, propertyModel.getCachedCodec()).forEach((k, v) -> { + writer.writeName(k); + encoderContext.encodeWithChildContext((Encoder) registry.get(v.getClass()), writer, v); + }); + } } else { - propertyModel.getCachedCodec().encode(writer, propertyValue, encoderContext); + writer.writeName(propertyModel.getReadName()); + if (propertyValue == null) { + writer.writeNull(); + } else { + encoderContext.encodeWithChildContext(propertyModel.getCachedCodec(), writer, propertyValue); + } } + } catch (CodecConfigurationException e) { + throw new CodecConfigurationException(format("Failed to encode '%s'. Encoding '%s' errored with: %s", + classModel.getName(), propertyModel.getReadName(), e.getMessage()), e); } } } - @SuppressWarnings("unchecked") private void decodeProperties(final BsonReader reader, final DecoderContext decoderContext, final InstanceCreator instanceCreator) { + PropertyModel inlineElementsPropertyModel = classModel.getPropertyModels() + .stream() + .filter(p -> p.getPropertySerialization().inline()) + .findFirst() + .orElse(null); + + BsonDocument extraElements = inlineElementsPropertyModel == null ? null : new BsonDocument(); reader.readStartDocument(); while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { String name = reader.readName(); if (classModel.useDiscriminator() && classModel.getDiscriminatorKey().equals(name)) { reader.readString(); } else { - decodePropertyModel(reader, decoderContext, instanceCreator, name, getPropertyModelByWriteName(classModel, name)); + decodePropertyModel(reader, decoderContext, instanceCreator, name, getPropertyModelByWriteName(classModel, name), extraElements); } } reader.readEndDocument(); + setPropertyValueBsonExtraElements(instanceCreator, extraElements, inlineElementsPropertyModel); } - @SuppressWarnings("unchecked") private void decodePropertyModel(final BsonReader reader, final DecoderContext decoderContext, final InstanceCreator instanceCreator, final String name, - final PropertyModel propertyModel) { + final PropertyModel propertyModel, @Nullable final BsonDocument extraElements) { if (propertyModel != null) { - try { + setPropertyValue(instanceCreator, () -> { S value = null; if (reader.getCurrentBsonType() == BsonType.NULL) { reader.readNull(); } else { - value = decoderContext.decodeWithChildContext(propertyModel.getCachedCodec(), reader); - } - if (propertyModel.isWritable()) { - instanceCreator.set(value, propertyModel); + Codec codec = propertyModel.getCachedCodec(); + if (codec == null) { + throw new CodecConfigurationException(format("Missing codec in '%s' for '%s'", + classModel.getName(), propertyModel.getName())); + } + value = decoderContext.decodeWithChildContext(codec, reader); } - } catch (BsonInvalidOperationException e) { - throw new CodecConfigurationException(format("Failed to decode '%s'. Decoding '%s' errored with: %s", - classModel.getName(), name, e.getMessage()), e); - } catch (CodecConfigurationException e) { - throw new CodecConfigurationException(format("Failed to decode '%s'. Decoding '%s' errored with: %s", - classModel.getName(), name, e.getMessage()), e); - } - } else { + return value; + }, propertyModel); + } else if (extraElements == null) { if (LOGGER.isTraceEnabled()) { LOGGER.trace(format("Found property not present in the ClassModel: %s", name)); } reader.skipValue(); + } else { + try { + extraElements.append(name, decoderContext.decodeWithChildContext(BSON_VALUE_CODEC, reader)); + } catch (CodecConfigurationException e) { + throw new CodecConfigurationException(format("Failed to decode '%s'. Decoding '%s' errored with: %s", + classModel.getName(), name, e.getMessage()), e); + } } } - private void addToCache(final PropertyModel propertyModel) { - Codec codec = propertyModel.getCodec() != null ? propertyModel.getCodec() - : specializePojoCodec(propertyModel, propertyCodecRegistry.get(propertyModel.getTypeData())); - propertyModel.cachedCodec(codec); - } - - private boolean areEquivalentTypes(final Class t1, final Class t2) { - if (t1.equals(t2)) { - return true; - } else if (Collection.class.isAssignableFrom(t1) && Collection.class.isAssignableFrom(t2)) { - return true; - } else if (Map.class.isAssignableFrom(t1) && Map.class.isAssignableFrom(t2)) { - return true; + private void setPropertyValue(final InstanceCreator instanceCreator, final Supplier valueSupplier, + final PropertyModel propertyModel) { + try { + instanceCreator.set(valueSupplier.get(), propertyModel); + } catch (BsonInvalidOperationException | CodecConfigurationException e) { + throw new CodecConfigurationException(format("Failed to decode '%s'. Decoding '%s' errored with: %s", + classModel.getName(), propertyModel.getName(), e.getMessage()), e); } - return false; } - @SuppressWarnings("unchecked") - private Codec specializePojoCodec(final PropertyModel propertyModel, final Codec defaultCodec) { - Codec codec = defaultCodec; - if (codec != null && codec instanceof PojoCodec) { - PojoCodec pojoCodec = (PojoCodec) codec; - ClassModel specialized = getSpecializedClassModel(pojoCodec.getClassModel(), propertyModel); - if (codecCache.containsKey(specialized)) { - codec = (Codec) codecCache.get(specialized); - } else { - codec = new LazyPojoCodec(specialized, registry, propertyCodecRegistry, discriminatorLookup, codecCache); - } + private void setPropertyValueBsonExtraElements(final InstanceCreator instanceCreator, @Nullable final BsonDocument extraElements, + final PropertyModel inlineElementsPropertyModel) { + if (extraElements != null + && !extraElements.isEmpty() + && inlineElementsPropertyModel != null + && inlineElementsPropertyModel.isWritable()) { + setPropertyValue(instanceCreator, () -> + inlineElementsPropertyModel.getCachedCodec() + .decode(new BsonDocumentReader(extraElements), DecoderContext.builder().build()), + inlineElementsPropertyModel); } - return codec; } - @SuppressWarnings({"rawtypes", "unchecked"}) - private ClassModel getSpecializedClassModel(final ClassModel clazzModel, final PropertyModel propertyModel) { - boolean useDiscriminator = propertyModel.useDiscriminator() == null ? clazzModel.useDiscriminator() - : propertyModel.useDiscriminator(); - boolean validDiscriminator = clazzModel.getDiscriminatorKey() != null && clazzModel.getDiscriminator() != null; - boolean changeTheDiscriminator = (useDiscriminator != clazzModel.useDiscriminator()) && validDiscriminator; + private void specialize() { + classModel.getPropertyModels().forEach(this::cachePropertyModelCodec); + } - if (propertyModel.getTypeData().getTypeParameters().isEmpty() && !changeTheDiscriminator){ - return clazzModel; + private void cachePropertyModelCodec(final PropertyModel propertyModel) { + if (propertyModel.getCachedCodec() == null) { + Codec codec = propertyModel.getCodec() != null ? propertyModel.getCodec() + : new LazyPropertyModelCodec<>(propertyModel, registry, propertyCodecRegistry); + propertyModel.cachedCodec(codec); } - - ArrayList> concretePropertyModels = new ArrayList>(clazzModel.getPropertyModels()); - PropertyModel concreteIdProperty = clazzModel.getIdPropertyModel(); - - List> propertyTypeParameters = propertyModel.getTypeData().getTypeParameters(); - for (int i = 0; i < concretePropertyModels.size(); i++) { - PropertyModel model = concretePropertyModels.get(i); - String propertyName = model.getName(); - TypeParameterMap typeParameterMap = clazzModel.getPropertyNameToTypeParameterMap().get(propertyName); - if (typeParameterMap.hasTypeParameters()) { - PropertyModel concretePropertyModel = getSpecializedPropertyModel(model, typeParameterMap, propertyTypeParameters); - concretePropertyModels.set(i, concretePropertyModel); - if (concreteIdProperty != null && concreteIdProperty.getName().equals(propertyName)) { - concreteIdProperty = concretePropertyModel; - } - } - } - - boolean discriminatorEnabled = changeTheDiscriminator ? propertyModel.useDiscriminator() : clazzModel.useDiscriminator(); - return new ClassModel(clazzModel.getType(), clazzModel.getPropertyNameToTypeParameterMap(), - clazzModel.getInstanceCreatorFactory(), discriminatorEnabled, clazzModel.getDiscriminatorKey(), - clazzModel.getDiscriminator(), concreteIdProperty, concretePropertyModels); } - @SuppressWarnings("unchecked") - private PropertyModel getSpecializedPropertyModel(final PropertyModel propertyModel, final TypeParameterMap typeParameterMap, - final List> propertyTypeParameters) { - TypeData specializedPropertyType; - Map propertyToClassParamIndexMap = typeParameterMap.getPropertyToClassParamIndexMap(); - Integer classTypeParamRepresentsWholeProperty = propertyToClassParamIndexMap.get(-1); - if (classTypeParamRepresentsWholeProperty != null) { - specializedPropertyType = (TypeData) propertyTypeParameters.get(classTypeParamRepresentsWholeProperty); - } else { - TypeData.Builder builder = TypeData.builder(propertyModel.getTypeData().getType()); - List> typeParameters = new ArrayList>(propertyModel.getTypeData().getTypeParameters()); - for (int i = 0; i < typeParameters.size(); i++) { - for (Map.Entry mapping : propertyToClassParamIndexMap.entrySet()) { - if (mapping.getKey().equals(i)) { - typeParameters.set(i, propertyTypeParameters.get(mapping.getValue())); - } - } - } - builder.addTypeParameters(typeParameters); - specializedPropertyType = builder.build(); - } - if (propertyModel.getTypeData().equals(specializedPropertyType)) { - return propertyModel; + private boolean areEquivalentTypes(final Class t1, final Class t2) { + if (t1.equals(t2)) { + return true; + } else if (Collection.class.isAssignableFrom(t1) && Collection.class.isAssignableFrom(t2)) { + return true; + } else if (Map.class.isAssignableFrom(t1) && Map.class.isAssignableFrom(t2)) { + return true; } - - return new PropertyModel(propertyModel.getName(), propertyModel.getReadName(), propertyModel.getWriteName(), - specializedPropertyType, null, propertyModel.getPropertySerialization(), propertyModel.useDiscriminator(), - propertyModel.getPropertyAccessor()); + return false; } @SuppressWarnings("unchecked") - private Codec getCodecFromDocument(final BsonReader reader, final boolean useDiscriminator, final String discriminatorKey, - final CodecRegistry registry, final DiscriminatorLookup discriminatorLookup, - final Codec defaultCodec) { - Codec codec = defaultCodec; + @Nullable + static Codec getCodecFromDocument(final BsonReader reader, final boolean useDiscriminator, final String discriminatorKey, + final CodecRegistry registry, final DiscriminatorLookup discriminatorLookup, @Nullable final Codec defaultCodec, + final String simpleClassName) { + Codec codec = defaultCodec; if (useDiscriminator) { BsonReaderMark mark = reader.getMark(); reader.readStartDocument(); @@ -316,10 +297,13 @@ private Codec getCodecFromDocument(final BsonReader reader, final boolean use if (discriminatorKey.equals(name)) { discriminatorKeyFound = true; try { - codec = (Codec) registry.get(discriminatorLookup.lookup(reader.readString())); + Class discriminatorClass = discriminatorLookup.lookup(reader.readString()); + if (codec == null || !codec.getEncoderClass().equals(discriminatorClass)) { + codec = (Codec) registry.get(discriminatorClass); + } } catch (Exception e) { throw new CodecConfigurationException(format("Failed to decode '%s'. Decoding errored with: %s", - classModel.getName(), e.getMessage()), e); + simpleClassName, e.getMessage()), e); } } else { reader.skipValue(); @@ -339,18 +323,8 @@ private PropertyModel getPropertyModelByWriteName(final ClassModel classMo return null; } - private static boolean shouldSpecialize(final ClassModel classModel) { - if (!classModel.hasTypeParameters()) { - return true; - } - - for (Map.Entry entry : classModel.getPropertyNameToTypeParameterMap().entrySet()) { - TypeParameterMap typeParameterMap = entry.getValue(); - PropertyModel propertyModel = classModel.getPropertyModel(entry.getKey()); - if (typeParameterMap.hasTypeParameters() && (propertyModel == null || propertyModel.getCodec() == null)) { - return false; - } - } - return true; + @Override + DiscriminatorLookup getDiscriminatorLookup() { + return discriminatorLookup; } } diff --git a/bson/src/main/org/bson/codecs/pojo/PojoCodecProvider.java b/bson/src/main/org/bson/codecs/pojo/PojoCodecProvider.java index d63425dd5a2..255b520aabb 100644 --- a/bson/src/main/org/bson/codecs/pojo/PojoCodecProvider.java +++ b/bson/src/main/org/bson/codecs/pojo/PojoCodecProvider.java @@ -69,22 +69,22 @@ public static Builder builder() { @Override public Codec get(final Class clazz, final CodecRegistry registry) { - return getPojoCodec(clazz, registry); + return createCodec(clazz, registry); } @SuppressWarnings("unchecked") - private PojoCodec getPojoCodec(final Class clazz, final CodecRegistry registry) { + private PojoCodec createCodec(final Class clazz, final CodecRegistry registry) { ClassModel classModel = (ClassModel) classModels.get(clazz); if (classModel != null) { - return new PojoCodecImpl(classModel, registry, propertyCodecProviders, discriminatorLookup); + return createCodec(classModel, registry, propertyCodecProviders, discriminatorLookup); } else if (automatic || (clazz.getPackage() != null && packages.contains(clazz.getPackage().getName()))) { try { classModel = createClassModel(clazz, conventions); - if (!clazz.isInterface() && classModel.getPropertyModels().isEmpty()) { - return null; + if (clazz.isInterface() || !classModel.getPropertyModels().isEmpty() || classModel.useDiscriminator()) { + discriminatorLookup.addClassModel(classModel); + return new AutomaticPojoCodec<>(createCodec(classModel, registry, propertyCodecProviders, + discriminatorLookup)); } - discriminatorLookup.addClassModel(classModel); - return new AutomaticPojoCodec(new PojoCodecImpl(classModel, registry, propertyCodecProviders, discriminatorLookup)); } catch (Exception e) { LOGGER.warn(format("Cannot use '%s' with the PojoCodec.", clazz.getSimpleName()), e); return null; @@ -93,15 +93,22 @@ private PojoCodec getPojoCodec(final Class clazz, final CodecRegistry return null; } + private static PojoCodec createCodec(final ClassModel classModel, final CodecRegistry codecRegistry, + final List propertyCodecProviders, final DiscriminatorLookup discriminatorLookup) { + return shouldSpecialize(classModel) + ? new PojoCodecImpl<>(classModel, codecRegistry, propertyCodecProviders, discriminatorLookup) + : new LazyPropertyModelCodec.NeedSpecializationCodec<>(classModel, discriminatorLookup, codecRegistry); + } + /** * A Builder for the PojoCodecProvider */ public static final class Builder { - private final Set packages = new HashSet(); - private final Map, ClassModel> classModels = new HashMap, ClassModel>(); - private final List> clazzes = new ArrayList>(); + private final Set packages = new HashSet<>(); + private final Map, ClassModel> classModels = new HashMap<>(); + private final List> clazzes = new ArrayList<>(); private List conventions = null; - private final List propertyCodecProviders = new ArrayList(); + private final List propertyCodecProviders = new ArrayList<>(); private boolean automatic; /** @@ -112,7 +119,7 @@ public static final class Builder { */ public PojoCodecProvider build() { List immutableConventions = conventions != null - ? Collections.unmodifiableList(new ArrayList(conventions)) + ? Collections.unmodifiableList(new ArrayList<>(conventions)) : null; for (Class clazz : clazzes) { if (!classModels.containsKey(clazz)) { @@ -123,12 +130,12 @@ public PojoCodecProvider build() { } /** - * Sets whether the provider should automatically try to create a {@link ClassModel} for any class that is requested. + * Sets whether the provider should automatically try to wrap a {@link ClassModel} for any class that is requested. * *

    Note: As Java Beans are convention based, when using automatic settings the provider should be the last provider in the * registry.

    * - * @param automatic whether to automatically create {@code ClassModels} or not. + * @param automatic whether to automatically wrap {@code ClassModels} or not. * @return this */ public Builder automatic(final boolean automatic) { @@ -218,4 +225,19 @@ private static ClassModel createClassModel(final Class clazz, final Li } return builder.build(); } + + private static boolean shouldSpecialize(final ClassModel classModel) { + if (!classModel.hasTypeParameters()) { + return true; + } + + for (Map.Entry entry : classModel.getPropertyNameToTypeParameterMap().entrySet()) { + TypeParameterMap typeParameterMap = entry.getValue(); + PropertyModel propertyModel = classModel.getPropertyModel(entry.getKey()); + if (typeParameterMap.hasTypeParameters() && (propertyModel == null || propertyModel.getCodec() == null)) { + return false; + } + } + return true; + } } diff --git a/bson/src/main/org/bson/codecs/pojo/PojoSpecializationHelper.java b/bson/src/main/org/bson/codecs/pojo/PojoSpecializationHelper.java new file mode 100644 index 00000000000..8986c794af8 --- /dev/null +++ b/bson/src/main/org/bson/codecs/pojo/PojoSpecializationHelper.java @@ -0,0 +1,80 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +final class PojoSpecializationHelper { + + @SuppressWarnings("unchecked") + static TypeData specializeTypeData(final TypeData typeData, final List> typeParameters, + final TypeParameterMap typeParameterMap) { + if (!typeParameterMap.hasTypeParameters() || typeParameters.isEmpty()) { + return typeData; + } + + Map> propertyToClassParamIndexMap = typeParameterMap.getPropertyToClassParamIndexMap(); + Either classTypeParamRepresentsWholeField = propertyToClassParamIndexMap.get(-1); + if (classTypeParamRepresentsWholeField != null) { + Integer index = classTypeParamRepresentsWholeField.map(i -> i, e -> { + throw new IllegalStateException("Invalid state, the whole class cannot be represented by a subtype."); + }); + return (TypeData) typeParameters.get(index); + } else { + return getTypeData(typeData, typeParameters, propertyToClassParamIndexMap); + } + } + + private static TypeData getTypeData(final TypeData typeData, final List> specializedTypeParameters, + final Map> propertyToClassParamIndexMap) { + List> subTypeParameters = new ArrayList<>(typeData.getTypeParameters()); + for (int i = 0; i < typeData.getTypeParameters().size(); i++) { + subTypeParameters.set(i, getTypeData(subTypeParameters.get(i), specializedTypeParameters, propertyToClassParamIndexMap, i)); + } + return TypeData.builder(typeData.getType()).addTypeParameters(subTypeParameters).build(); + } + + private static TypeData getTypeData(final TypeData typeData, final List> specializedTypeParameters, + final Map> propertyToClassParamIndexMap, + final int index) { + if (!propertyToClassParamIndexMap.containsKey(index)) { + return typeData; + } + return propertyToClassParamIndexMap.get(index).map(l -> { + if (typeData.getTypeParameters().isEmpty()) { + // Represents the whole typeData + return specializedTypeParameters.get(l); + } else { + // Represents a single nested type parameter within this typeData + TypeData.Builder builder = TypeData.builder(typeData.getType()); + List> typeParameters = new ArrayList<>(typeData.getTypeParameters()); + typeParameters.set(index, specializedTypeParameters.get(l)); + builder.addTypeParameters(typeParameters); + return builder.build(); + } + }, + r -> { + // Represents a child type parameter of this typeData + return getTypeData(typeData, specializedTypeParameters, r.getPropertyToClassParamIndexMap()); + }); + } + + private PojoSpecializationHelper() { + } +} diff --git a/bson/src/main/org/bson/codecs/pojo/PropertyAccessorImpl.java b/bson/src/main/org/bson/codecs/pojo/PropertyAccessorImpl.java index d0d963b093e..cab25fa78ea 100644 --- a/bson/src/main/org/bson/codecs/pojo/PropertyAccessorImpl.java +++ b/bson/src/main/org/bson/codecs/pojo/PropertyAccessorImpl.java @@ -41,7 +41,7 @@ public T get(final S instance) { } else { throw getError(null); } - } catch (final Exception e) { + } catch (Exception e) { throw getError(e); } } @@ -56,7 +56,7 @@ public void set(final S instance, final T value) { propertyMetadata.getField().set(instance, value); } } - } catch (final Exception e) { + } catch (Exception e) { throw setError(e); } } diff --git a/bson/src/main/org/bson/codecs/pojo/PropertyCodecRegistryImpl.java b/bson/src/main/org/bson/codecs/pojo/PropertyCodecRegistryImpl.java index 5adeebeb018..b6b27626ac2 100644 --- a/bson/src/main/org/bson/codecs/pojo/PropertyCodecRegistryImpl.java +++ b/bson/src/main/org/bson/codecs/pojo/PropertyCodecRegistryImpl.java @@ -21,13 +21,15 @@ import java.util.ArrayList; import java.util.List; +import java.util.concurrent.ConcurrentHashMap; class PropertyCodecRegistryImpl implements PropertyCodecRegistry { private final List propertyCodecProviders; + private final ConcurrentHashMap, Codec> propertyCodecCache; PropertyCodecRegistryImpl(final PojoCodec pojoCodec, final CodecRegistry codecRegistry, final List propertyCodecProviders) { - List augmentedProviders = new ArrayList(); + List augmentedProviders = new ArrayList<>(); if (propertyCodecProviders != null) { augmentedProviders.addAll(propertyCodecProviders); } @@ -36,15 +38,22 @@ class PropertyCodecRegistryImpl implements PropertyCodecRegistry { augmentedProviders.add(new EnumPropertyCodecProvider(codecRegistry)); augmentedProviders.add(new FallbackPropertyCodecProvider(pojoCodec, codecRegistry)); this.propertyCodecProviders = augmentedProviders; + this.propertyCodecCache = new ConcurrentHashMap<>(); } + @SuppressWarnings("unchecked") @Override - public Codec get(final TypeWithTypeParameters type) { + public Codec get(final TypeWithTypeParameters typeWithTypeParameters) { + if (propertyCodecCache.containsKey(typeWithTypeParameters)) { + return (Codec) propertyCodecCache.get(typeWithTypeParameters); + } + for (PropertyCodecProvider propertyCodecProvider : propertyCodecProviders) { - Codec codec = propertyCodecProvider.get(type, this); - if (codec != null) { - return codec; - } + Codec codec = propertyCodecProvider.get(typeWithTypeParameters, this); + if (codec != null) { + propertyCodecCache.put(typeWithTypeParameters, codec); + return codec; + } } return null; } diff --git a/bson/src/main/org/bson/codecs/pojo/PropertyMetadata.java b/bson/src/main/org/bson/codecs/pojo/PropertyMetadata.java index 73e607554b3..69530ad3b4e 100644 --- a/bson/src/main/org/bson/codecs/pojo/PropertyMetadata.java +++ b/bson/src/main/org/bson/codecs/pojo/PropertyMetadata.java @@ -33,14 +33,16 @@ import static java.lang.reflect.Modifier.isTransient; final class PropertyMetadata { + private static final TypeData VOID_TYPE_DATA = TypeData.builder(Void.class).build(); private final String name; private final String declaringClassName; private final TypeData typeData; - private final Map, Annotation> readAnnotations = new HashMap, Annotation>(); - private final Map, Annotation> writeAnnotations = new HashMap, Annotation>(); + private final Map, Annotation> readAnnotations = new HashMap<>(); + private final Map, Annotation> writeAnnotations = new HashMap<>(); private TypeParameterMap typeParameterMap; private List> typeParameters; + private String error; private Field field; private Method getter; private Method setter; @@ -56,11 +58,14 @@ public String getName() { } public List getReadAnnotations() { - return new ArrayList(readAnnotations.values()); + return new ArrayList<>(readAnnotations.values()); } public PropertyMetadata addReadAnnotation(final Annotation annotation) { if (readAnnotations.containsKey(annotation.annotationType())) { + if (annotation.equals(readAnnotations.get(annotation.annotationType()))) { + return this; + } throw new CodecConfigurationException(format("Read annotation %s for '%s' already exists in %s", annotation.annotationType(), name, declaringClassName)); } @@ -69,11 +74,14 @@ public PropertyMetadata addReadAnnotation(final Annotation annotation) { } public List getWriteAnnotations() { - return new ArrayList(writeAnnotations.values()); + return new ArrayList<>(writeAnnotations.values()); } public PropertyMetadata addWriteAnnotation(final Annotation annotation) { if (writeAnnotations.containsKey(annotation.annotationType())) { + if (annotation.equals(writeAnnotations.get(annotation.annotationType()))) { + return this; + } throw new CodecConfigurationException(format("Write annotation %s for '%s' already exists in %s", annotation.annotationType(), name, declaringClassName)); } @@ -130,7 +138,18 @@ public PropertyMetadata typeParameterInfo(final TypeParameterMap typePara return this; } + String getError() { + return error; + } + + void setError(final String error) { + this.error = error; + } + public boolean isSerializable() { + if (isVoidType()) { + return false; + } if (getter != null) { return field == null || notStaticOrTransient(field.getModifiers()); } else { @@ -139,6 +158,9 @@ public boolean isSerializable() { } public boolean isDeserializable() { + if (isVoidType()) { + return false; + } if (setter != null) { return field == null || !isFinal(field.getModifiers()) && notStaticOrTransient(field.getModifiers()); } else { @@ -146,6 +168,10 @@ public boolean isDeserializable() { } } + private boolean isVoidType() { + return VOID_TYPE_DATA.equals(typeData); + } + private boolean notStaticOrTransient(final int modifiers) { return !(isTransient(modifiers) || isStatic(modifiers)); } @@ -153,4 +179,21 @@ private boolean notStaticOrTransient(final int modifiers) { private boolean isPublicAndNotStaticOrTransient(final int modifiers) { return isPublic(modifiers) && notStaticOrTransient(modifiers); } + + @Override + public String toString() { + return "PropertyMetadata{" + + "name='" + name + '\'' + + ", declaringClassName='" + declaringClassName + '\'' + + ", typeData=" + typeData + + ", readAnnotations=" + readAnnotations + + ", writeAnnotations=" + writeAnnotations + + ", typeParameterMap=" + typeParameterMap + + ", typeParameters=" + typeParameters + + ", error='" + error + '\'' + + ", field=" + field + + ", getter=" + getter + + ", setter=" + setter + + '}'; + } } diff --git a/bson/src/main/org/bson/codecs/pojo/PropertyModel.java b/bson/src/main/org/bson/codecs/pojo/PropertyModel.java index cad91075426..5e6079795ff 100644 --- a/bson/src/main/org/bson/codecs/pojo/PropertyModel.java +++ b/bson/src/main/org/bson/codecs/pojo/PropertyModel.java @@ -16,8 +16,11 @@ package org.bson.codecs.pojo; +import org.bson.BsonType; import org.bson.codecs.Codec; +import java.util.Objects; + /** * Represents a property on a class and stores various metadata such as generic parameters * @@ -33,11 +36,13 @@ public final class PropertyModel { private final PropertySerialization propertySerialization; private final Boolean useDiscriminator; private final PropertyAccessor propertyAccessor; + private final String error; private volatile Codec cachedCodec; + private final BsonType bsonRepresentation; PropertyModel(final String name, final String readName, final String writeName, final TypeData typeData, final Codec codec, final PropertySerialization propertySerialization, final Boolean useDiscriminator, - final PropertyAccessor propertyAccessor) { + final PropertyAccessor propertyAccessor, final String error, final BsonType bsonRepresentation) { this.name = name; this.readName = readName; this.writeName = writeName; @@ -47,6 +52,8 @@ public final class PropertyModel { this.propertySerialization = propertySerialization; this.useDiscriminator = useDiscriminator; this.propertyAccessor = propertyAccessor; + this.error = error; + this.bsonRepresentation = bsonRepresentation; } /** @@ -55,7 +62,7 @@ public final class PropertyModel { * @return the builder */ public static PropertyModelBuilder builder() { - return new PropertyModelBuilder(); + return new PropertyModelBuilder<>(); } /** @@ -111,6 +118,15 @@ public Codec getCodec() { return codec; } + /** + * @return the BsonRepresentation of the field + * + * @since 4.2 + */ + public BsonType getBsonRepresentation() { + return bsonRepresentation; + } + /** * Returns true if the value should be serialized. * @@ -175,13 +191,18 @@ public boolean equals(final Object o) { .getPropertySerialization() != null) { return false; } - if (useDiscriminator != null ? !useDiscriminator.equals(that.useDiscriminator) : that.useDiscriminator != null) { + if (!Objects.equals(useDiscriminator, that.useDiscriminator)) { return false; } if (getPropertyAccessor() != null ? !getPropertyAccessor().equals(that.getPropertyAccessor()) : that.getPropertyAccessor() != null) { return false; } + + if (getError() != null ? !getError().equals(that.getError()) : that.getError() != null) { + return false; + } + if (getCachedCodec() != null ? !getCachedCodec().equals(that.getCachedCodec()) : that.getCachedCodec() != null) { return false; } @@ -199,10 +220,19 @@ public int hashCode() { result = 31 * result + (getPropertySerialization() != null ? getPropertySerialization().hashCode() : 0); result = 31 * result + (useDiscriminator != null ? useDiscriminator.hashCode() : 0); result = 31 * result + (getPropertyAccessor() != null ? getPropertyAccessor().hashCode() : 0); + result = 31 * result + (getError() != null ? getError().hashCode() : 0); result = 31 * result + (getCachedCodec() != null ? getCachedCodec().hashCode() : 0); return result; } + boolean hasError() { + return error != null; + } + + String getError() { + return error; + } + PropertySerialization getPropertySerialization() { return propertySerialization; } diff --git a/bson/src/main/org/bson/codecs/pojo/PropertyModelBuilder.java b/bson/src/main/org/bson/codecs/pojo/PropertyModelBuilder.java index 2a605cf6f11..084e3908798 100644 --- a/bson/src/main/org/bson/codecs/pojo/PropertyModelBuilder.java +++ b/bson/src/main/org/bson/codecs/pojo/PropertyModelBuilder.java @@ -16,6 +16,7 @@ package org.bson.codecs.pojo; +import org.bson.BsonType; import org.bson.codecs.Codec; import java.lang.annotation.Annotation; @@ -45,6 +46,8 @@ public final class PropertyModelBuilder { private List readAnnotations = emptyList(); private List writeAnnotations = emptyList(); private Boolean discriminatorEnabled; + private String error; + private BsonType bsonRepresentation; PropertyModelBuilder() { } @@ -228,6 +231,28 @@ public PropertyModelBuilder propertyAccessor(final PropertyAccessor proper return this; } + /** + * Returns the BsonRepresentation + * + * @return the BsonRepresentation + * @since 4.2 + */ + public BsonType getBsonRepresentation() { + return bsonRepresentation; + } + + /** + * Sets the BsonRepresentation + * + * @param bsonRepresentation the BsonRepresentation + * @return this + * @since 4.2 + */ + public PropertyModelBuilder bsonRepresentation(final BsonType bsonRepresentation) { + this.bsonRepresentation = bsonRepresentation; + return this; + } + /** * Creates the {@link PropertyModel}. * @@ -246,7 +271,9 @@ public PropertyModel build() { codec, stateNotNull("propertySerialization", propertySerialization), discriminatorEnabled, - stateNotNull("propertyAccessor", propertyAccessor)); + stateNotNull("propertyAccessor", propertyAccessor), + error, + bsonRepresentation); } @Override @@ -267,4 +294,9 @@ PropertyModelBuilder typeData(final TypeData typeData) { this.typeData = notNull("typeData", typeData); return this; } + + PropertyModelBuilder setError(final String error) { + this.error = error; + return this; + } } diff --git a/bson/src/main/org/bson/codecs/pojo/PropertyModelSerializationInlineImpl.java b/bson/src/main/org/bson/codecs/pojo/PropertyModelSerializationInlineImpl.java new file mode 100644 index 00000000000..77d064af16f --- /dev/null +++ b/bson/src/main/org/bson/codecs/pojo/PropertyModelSerializationInlineImpl.java @@ -0,0 +1,36 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo; + +class PropertyModelSerializationInlineImpl implements PropertySerialization { + + private final PropertySerialization wrapped; + + PropertyModelSerializationInlineImpl(final PropertySerialization wrapped) { + this.wrapped = wrapped; + } + + @Override + public boolean shouldSerialize(final T value) { + return wrapped.shouldSerialize(value); + } + + @Override + public boolean inline() { + return true; + } +} diff --git a/bson/src/main/org/bson/codecs/pojo/PropertyReflectionUtils.java b/bson/src/main/org/bson/codecs/pojo/PropertyReflectionUtils.java index 7f6017a3dc9..6889a6579f8 100644 --- a/bson/src/main/org/bson/codecs/pojo/PropertyReflectionUtils.java +++ b/bson/src/main/org/bson/codecs/pojo/PropertyReflectionUtils.java @@ -22,6 +22,7 @@ import java.util.List; import static java.lang.reflect.Modifier.isPublic; +import static java.lang.reflect.Modifier.isStatic; final class PropertyReflectionUtils { private PropertyReflectionUtils() {} @@ -31,7 +32,7 @@ private PropertyReflectionUtils() {} private static final String SET_PREFIX = "set"; static boolean isGetter(final Method method) { - if (method.getParameterTypes().length > 0) { + if (method.getParameterCount() > 0) { return false; } else if (method.getName().startsWith(GET_PREFIX) && method.getName().length() > GET_PREFIX.length()) { return Character.isUpperCase(method.getName().charAt(GET_PREFIX.length())); @@ -43,7 +44,7 @@ static boolean isGetter(final Method method) { static boolean isSetter(final Method method) { if (method.getName().startsWith(SET_PREFIX) && method.getName().length() > SET_PREFIX.length() - && method.getParameterTypes().length == 1) { + && method.getParameterCount() == 1) { return Character.isUpperCase(method.getName().charAt(SET_PREFIX.length())); } return false; @@ -51,33 +52,47 @@ static boolean isSetter(final Method method) { static String toPropertyName(final Method method) { String name = method.getName(); - String propertyName = name.substring(name.startsWith(IS_PREFIX) ? 2 : 3, name.length()); + String propertyName = name.substring(name.startsWith(IS_PREFIX) ? 2 : 3); char[] chars = propertyName.toCharArray(); chars[0] = Character.toLowerCase(chars[0]); return new String(chars); } static PropertyMethods getPropertyMethods(final Class clazz) { - List setters = new ArrayList(); - List getters = new ArrayList(); - for (Method method : clazz.getDeclaredMethods()) { - // Note that if you override a getter to provide a more specific return type, getting the declared methods - // on the subclass will return the overridden method as well as the method that was overridden from - // the super class. This original method is copied over into the subclass as a bridge method, so we're - // excluding them here to avoid multiple getters of the same property with different return types - if (isPublic(method.getModifiers()) && !method.isBridge()) { - if (isGetter(method)) { - getters.add(method); - } else if (isSetter(method)) { - // Setters are a bit more tricky - don't do anything fancy here - setters.add(method); + List setters = new ArrayList<>(); + List getters = new ArrayList<>(); + + // get all the default method from interface + for (Class i : clazz.getInterfaces()) { + for (Method method : i.getDeclaredMethods()) { + if (method.isDefault()) { + verifyAddMethodToList(method, getters, setters); } } } + for (Method method : clazz.getDeclaredMethods()) { + verifyAddMethodToList(method, getters, setters); + } + return new PropertyMethods(getters, setters); } + private static void verifyAddMethodToList(final Method method, final List getters, final List setters) { + // Note that if you override a getter to provide a more specific return type, getting the declared methods + // on the subclass will return the overridden method as well as the method that was overridden from + // the super class. This original method is copied over into the subclass as a bridge method, so we're + // excluding them here to avoid multiple getters of the same property with different return types + if (isPublic(method.getModifiers()) && !isStatic(method.getModifiers()) && !method.isBridge()) { + if (isGetter(method)) { + getters.add(method); + } else if (isSetter(method)) { + // Setters are a bit more tricky - don't do anything fancy here + setters.add(method); + } + } + } + static class PropertyMethods { private final Collection getterMethods; private final Collection setterMethods; diff --git a/bson/src/main/org/bson/codecs/pojo/PropertySerialization.java b/bson/src/main/org/bson/codecs/pojo/PropertySerialization.java index 46e6324131f..471be733c59 100644 --- a/bson/src/main/org/bson/codecs/pojo/PropertySerialization.java +++ b/bson/src/main/org/bson/codecs/pojo/PropertySerialization.java @@ -31,4 +31,12 @@ public interface PropertySerialization { * @return true if the value should be serialized */ boolean shouldSerialize(T value); + + /** + * @return true if serialized inline + * @since 4.6 + */ + default boolean inline() { + return false; + } } diff --git a/bson/src/main/org/bson/codecs/pojo/TypeData.java b/bson/src/main/org/bson/codecs/pojo/TypeData.java index a082b610e7b..aebdba4c08f 100644 --- a/bson/src/main/org/bson/codecs/pojo/TypeData.java +++ b/bson/src/main/org/bson/codecs/pojo/TypeData.java @@ -21,6 +21,7 @@ import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; +import java.lang.reflect.WildcardType; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -44,7 +45,7 @@ final class TypeData implements TypeWithTypeParameters { * @return the builder */ public static Builder builder(final Class type) { - return new Builder(notNull("type", type)); + return new Builder<>(notNull("type", type)); } public static TypeData newInstance(final Method method) { @@ -79,6 +80,8 @@ private static void getNestedTypeData(final TypeData.Builder builder, fin getNestedTypeData(paramBuilder, argType); } builder.addTypeParameter(paramBuilder.build()); + } else if (type instanceof WildcardType) { + builder.addTypeParameter(TypeData.builder((Class) ((WildcardType) type).getUpperBounds()[0]).build()); } else if (type instanceof TypeVariable) { builder.addTypeParameter(TypeData.builder(Object.class).build()); } else if (type instanceof Class) { @@ -109,7 +112,7 @@ public List> getTypeParameters() { */ public static final class Builder { private final Class type; - private final List> typeParameters = new ArrayList>(); + private final List> typeParameters = new ArrayList<>(); private Builder(final Class type) { this.type = type; @@ -145,7 +148,7 @@ public Builder addTypeParameters(final List> typeParameters) { * @return the class type data */ public TypeData build() { - return new TypeData(type, Collections.unmodifiableList(typeParameters)); + return new TypeData<>(type, Collections.unmodifiableList(typeParameters)); } } @@ -224,7 +227,7 @@ private Class boxType(final Class clazz) { private static final Map, Class> PRIMITIVE_CLASS_MAP; static { - Map, Class> map = new HashMap, Class>(); + Map, Class> map = new HashMap<>(); map.put(boolean.class, Boolean.class); map.put(byte.class, Byte.class); map.put(char.class, Character.class); @@ -233,6 +236,7 @@ private Class boxType(final Class clazz) { map.put(int.class, Integer.class); map.put(long.class, Long.class); map.put(short.class, Short.class); + map.put(void.class, Void.class); PRIMITIVE_CLASS_MAP = map; } } diff --git a/bson/src/main/org/bson/codecs/pojo/TypeParameterMap.java b/bson/src/main/org/bson/codecs/pojo/TypeParameterMap.java index 60cdd8a6eec..7da10b83c81 100644 --- a/bson/src/main/org/bson/codecs/pojo/TypeParameterMap.java +++ b/bson/src/main/org/bson/codecs/pojo/TypeParameterMap.java @@ -26,7 +26,7 @@ * Maps the index of a class's generic parameter type index to a property's. */ final class TypeParameterMap { - private final Map propertyToClassParamIndexMap; + private final Map> propertyToClassParamIndexMap; /** * Creates a new builder for the TypeParameterMap @@ -44,7 +44,7 @@ static Builder builder() { * * @return a mapping of property type parameter index to the class type parameter index. */ - Map getPropertyToClassParamIndexMap() { + Map> getPropertyToClassParamIndexMap() { return propertyToClassParamIndexMap; } @@ -56,7 +56,7 @@ boolean hasTypeParameters() { * A builder for mapping field type parameter indices to the class type parameter indices */ static final class Builder { - private final Map propertyToClassParamIndexMap = new HashMap(); + private final Map> propertyToClassParamIndexMap = new HashMap<>(); private Builder() { } @@ -68,7 +68,7 @@ private Builder() { * @return this */ Builder addIndex(final int classTypeParameterIndex) { - propertyToClassParamIndexMap.put(-1, classTypeParameterIndex); + propertyToClassParamIndexMap.put(-1, Either.left(classTypeParameterIndex)); return this; } @@ -80,7 +80,20 @@ Builder addIndex(final int classTypeParameterIndex) { * @return this */ Builder addIndex(final int propertyTypeParameterIndex, final int classTypeParameterIndex) { - propertyToClassParamIndexMap.put(propertyTypeParameterIndex, classTypeParameterIndex); + propertyToClassParamIndexMap.put(propertyTypeParameterIndex, Either.left(classTypeParameterIndex)); + return this; + } + + + /** + * Adds a mapping that represents the property + * + * @param propertyTypeParameterIndex the property's type parameter index + * @param typeParameterMap the sub class's type parameter map + * @return this + */ + Builder addIndex(final int propertyTypeParameterIndex, final TypeParameterMap typeParameterMap) { + propertyToClassParamIndexMap.put(propertyTypeParameterIndex, Either.right(typeParameterMap)); return this; } @@ -125,7 +138,7 @@ public int hashCode() { return getPropertyToClassParamIndexMap().hashCode(); } - private TypeParameterMap(final Map propertyToClassParamIndexMap) { + private TypeParameterMap(final Map> propertyToClassParamIndexMap) { this.propertyToClassParamIndexMap = unmodifiableMap(propertyToClassParamIndexMap); } } diff --git a/bson/src/main/org/bson/codecs/pojo/annotations/BsonCreator.java b/bson/src/main/org/bson/codecs/pojo/annotations/BsonCreator.java index 82441d1aeb1..6f1627a17d5 100644 --- a/bson/src/main/org/bson/codecs/pojo/annotations/BsonCreator.java +++ b/bson/src/main/org/bson/codecs/pojo/annotations/BsonCreator.java @@ -22,9 +22,10 @@ import java.lang.annotation.Target; /** - * An annotation that configures a constructor or method as the Creator for the Pojo. + * An annotation that configures a constructor or method as the creator for the POJO. * - *

    Note: Requires the {@link org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION}

    + *

    For POJOs, requires the {@link org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION}

    + *

    For Java records, the annotation is not supported.

    * * @since 3.5 * @see org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION diff --git a/bson/src/main/org/bson/codecs/pojo/annotations/BsonDiscriminator.java b/bson/src/main/org/bson/codecs/pojo/annotations/BsonDiscriminator.java index 85da49bf0ba..81e3c972771 100644 --- a/bson/src/main/org/bson/codecs/pojo/annotations/BsonDiscriminator.java +++ b/bson/src/main/org/bson/codecs/pojo/annotations/BsonDiscriminator.java @@ -26,7 +26,8 @@ /** * An annotation that configures the discriminator key and value for a class. * - *

    Note: Requires the {@link org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION}

    + *

    For POJOs, requires the {@link org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION}

    + *

    For Java records, the annotation is not supported.

    * * @since 3.5 * @see org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION diff --git a/bson/src/main/org/bson/codecs/pojo/annotations/BsonExtraElements.java b/bson/src/main/org/bson/codecs/pojo/annotations/BsonExtraElements.java new file mode 100644 index 00000000000..1ae25e5da3d --- /dev/null +++ b/bson/src/main/org/bson/codecs/pojo/annotations/BsonExtraElements.java @@ -0,0 +1,42 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * An annotation that configures a property to be used as storage for any extra BSON elements that are not already mapped to other + * properties. All extra elements will be encoded from the BSON document into the annotated property, and encoded from the annotated + * property into the BSON document. + * + *

    Can only be used on a single field in a POJO. Field must be a {@code Map} instance eg. {@code Document} or + * {@code BsonDocument}. + *

    For POJOs, requires the {@link org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION}

    + *

    For Java records, the annotation is not yet supported.

    + * + * @since 4.7 + * @see org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION + */ +@Documented +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.METHOD, ElementType.FIELD, ElementType.PARAMETER}) +public @interface BsonExtraElements { +} diff --git a/bson/src/main/org/bson/codecs/pojo/annotations/BsonId.java b/bson/src/main/org/bson/codecs/pojo/annotations/BsonId.java index af6d4fb4fd9..25049e69e5a 100644 --- a/bson/src/main/org/bson/codecs/pojo/annotations/BsonId.java +++ b/bson/src/main/org/bson/codecs/pojo/annotations/BsonId.java @@ -23,9 +23,10 @@ import java.lang.annotation.Target; /** - * An annotation that configures the property as the id property for a {@link org.bson.codecs.pojo.ClassModel}. + * An annotation that configures the property as the id property for a {@link org.bson.codecs.pojo.ClassModel} or a Java record. * - *

    Note: Requires the {@link org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION}

    + *

    For POJOs, requires the {@link org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION}

    + *

    For Java records, the annotation is only supported on the record component.

    * * @since 3.5 * @see org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION diff --git a/bson/src/main/org/bson/codecs/pojo/annotations/BsonIgnore.java b/bson/src/main/org/bson/codecs/pojo/annotations/BsonIgnore.java index 019526b135e..96b91051995 100644 --- a/bson/src/main/org/bson/codecs/pojo/annotations/BsonIgnore.java +++ b/bson/src/main/org/bson/codecs/pojo/annotations/BsonIgnore.java @@ -25,13 +25,14 @@ /** * An annotation that configures a property to be ignored when reading and writing to BSON * - *

    Note: Requires the {@link org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION}

    + *

    For POJOs, requires the {@link org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION}

    + *

    For Java records, the annotation is not supported.

    * * @since 3.5 * @see org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION */ @Documented -@Target({ElementType.METHOD, ElementType.FIELD}) +@Target({ElementType.METHOD, ElementType.FIELD, ElementType.PARAMETER}) @Retention(RetentionPolicy.RUNTIME) public @interface BsonIgnore { } diff --git a/bson/src/main/org/bson/codecs/pojo/annotations/BsonProperty.java b/bson/src/main/org/bson/codecs/pojo/annotations/BsonProperty.java index 8ce352a16ff..7c9c7b9c22c 100644 --- a/bson/src/main/org/bson/codecs/pojo/annotations/BsonProperty.java +++ b/bson/src/main/org/bson/codecs/pojo/annotations/BsonProperty.java @@ -27,7 +27,8 @@ /** * An annotation that configures a property. * - *

    Note: Requires the {@link org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION}

    + *

    For POJOs, requires the {@link org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION}

    + *

    For Java records, the annotation is only supported on the record component.

    * * @since 3.5 * @see org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION @@ -40,6 +41,7 @@ * The name of the property. * *

    + * Note: Regarding POJOs:
    * For asymmetrical property names, the context of the {@code BsonProperty} can be important. * For example, when used with {@code @BsonCreator} the value will relate to the read name. * When used directly on a field it will set both the read name if unset and the write name if unset. diff --git a/bson/src/main/org/bson/codecs/pojo/annotations/BsonRepresentation.java b/bson/src/main/org/bson/codecs/pojo/annotations/BsonRepresentation.java new file mode 100644 index 00000000000..465e64d016f --- /dev/null +++ b/bson/src/main/org/bson/codecs/pojo/annotations/BsonRepresentation.java @@ -0,0 +1,47 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.annotations; + +import org.bson.BsonType; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * An annotation that specifies what type the property is stored as in the database. + * + *

    For POJOs, requires the {@link org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION}

    + *

    For Java records, the annotation is only supported on the record component.

    + *

    For Kotlin data classes, the annotation is only supported on the constructor parameter.

    + * + * @since 4.2 + * @see org.bson.codecs.pojo.Conventions#ANNOTATION_CONVENTION + */ +@Documented +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.FIELD, ElementType.METHOD, ElementType.PARAMETER}) +public @interface BsonRepresentation { + /** + * The type that the property is stored as in the database. + * + * @return the type that the property should be stored as. + */ + BsonType value(); +} diff --git a/bson/src/main/org/bson/conversions/Bson.java b/bson/src/main/org/bson/conversions/Bson.java index 4b5c22a691a..6ef749b7b3c 100644 --- a/bson/src/main/org/bson/conversions/Bson.java +++ b/bson/src/main/org/bson/conversions/Bson.java @@ -17,7 +17,20 @@ package org.bson.conversions; import org.bson.BsonDocument; +import org.bson.codecs.BsonCodecProvider; +import org.bson.codecs.BsonValueCodecProvider; +import org.bson.codecs.CollectionCodecProvider; +import org.bson.codecs.DocumentCodecProvider; +import org.bson.codecs.EnumCodecProvider; +import org.bson.codecs.IterableCodecProvider; +import org.bson.codecs.JsonObjectCodecProvider; +import org.bson.codecs.MapCodecProvider; +import org.bson.codecs.ValueCodecProvider; import org.bson.codecs.configuration.CodecRegistry; +import org.bson.codecs.jsr310.Jsr310CodecProvider; + +import static java.util.Arrays.asList; +import static org.bson.codecs.configuration.CodecRegistries.fromProviders; /** * An interface for types that are able to render themselves into a {@code BsonDocument}. @@ -26,7 +39,40 @@ */ public interface Bson { /** - * Render the filter into a BsonDocument. + * This registry includes the following providers: + *
      + *
    • {@link ValueCodecProvider}
    • + *
    • {@link BsonValueCodecProvider}
    • + *
    • {@link DocumentCodecProvider}
    • + *
    • {@link CollectionCodecProvider}
    • + *
    • {@link IterableCodecProvider}
    • + *
    • {@link MapCodecProvider}
    • + *
    • {@link Jsr310CodecProvider}
    • + *
    • {@link JsonObjectCodecProvider}
    • + *
    • {@link BsonCodecProvider}
    • + *
    • {@link EnumCodecProvider}
    • + *
    + *

    + * Additional providers may be added in a future release. + *

    + * + * @since 4.2 + */ + CodecRegistry DEFAULT_CODEC_REGISTRY = + fromProviders(asList( + new ValueCodecProvider(), + new BsonValueCodecProvider(), + new DocumentCodecProvider(), + new CollectionCodecProvider(), + new IterableCodecProvider(), + new MapCodecProvider(), + new Jsr310CodecProvider(), + new JsonObjectCodecProvider(), + new BsonCodecProvider(), + new EnumCodecProvider())); + + /** + * Render into a BsonDocument. * * @param documentClass the document class in scope for the collection. This parameter may be ignored, but it may be used to alter * the structure of the returned {@code BsonDocument} based on some knowledge of the document class. @@ -36,4 +82,18 @@ public interface Bson { * @return the BsonDocument */ BsonDocument toBsonDocument(Class documentClass, CodecRegistry codecRegistry); + + /** + * Render into a BsonDocument using a document class and codec registry appropriate for the implementation. + *

    + * The default implementation of this method calls {@link #toBsonDocument(Class, CodecRegistry)} with the + * {@link BsonDocument} class as the first argument and {@link #DEFAULT_CODEC_REGISTRY} as the second argument. + *

    + * + * @return the BsonDocument + * @since 4.2 + */ + default BsonDocument toBsonDocument() { + return toBsonDocument(BsonDocument.class, DEFAULT_CODEC_REGISTRY); + } } diff --git a/bson/src/main/org/bson/diagnostics/JULLogger.java b/bson/src/main/org/bson/diagnostics/JULLogger.java deleted file mode 100644 index 65595112140..00000000000 --- a/bson/src/main/org/bson/diagnostics/JULLogger.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.diagnostics; - -import java.util.logging.Level; - -import static java.util.logging.Level.FINE; -import static java.util.logging.Level.FINER; -import static java.util.logging.Level.INFO; -import static java.util.logging.Level.SEVERE; -import static java.util.logging.Level.WARNING; - -class JULLogger implements Logger { - - private final java.util.logging.Logger delegate; - - JULLogger(final String name) { - this.delegate = java.util.logging.Logger.getLogger(name); - } - - @Override - public String getName() { - return delegate.getName(); - } - - @Override - public boolean isTraceEnabled() { - return isEnabled(FINER); - } - - @Override - public void trace(final String msg) { - log(FINER, msg); - } - - @Override - public void trace(final String msg, final Throwable t) { - log(FINER, msg, t); - } - - @Override - public boolean isDebugEnabled() { - return isEnabled(FINE); - } - - @Override - public void debug(final String msg) { - log(FINE, msg); - } - - @Override - public void debug(final String msg, final Throwable t) { - log(FINE, msg, t); - } - - @Override - public boolean isInfoEnabled() { - return delegate.isLoggable(INFO); - } - - @Override - public void info(final String msg) { - log(INFO, msg); - } - - @Override - public void info(final String msg, final Throwable t) { - log(INFO, msg, t); - } - - @Override - public boolean isWarnEnabled() { - return delegate.isLoggable(WARNING); - } - - @Override - public void warn(final String msg) { - log(WARNING, msg); - } - - @Override - public void warn(final String msg, final Throwable t) { - log(WARNING, msg, t); - } - - - @Override - public boolean isErrorEnabled() { - return delegate.isLoggable(SEVERE); - } - - @Override - public void error(final String msg) { - log(SEVERE, msg); - } - - @Override - public void error(final String msg, final Throwable t) { - log(SEVERE, msg, t); - } - - - private boolean isEnabled(final Level level) { - return delegate.isLoggable(level); - } - - private void log(final Level level, final String msg) { - delegate.log(level, msg); - } - - public void log(final Level level, final String msg, final Throwable t) { - delegate.log(level, msg, t); - } -} diff --git a/bson/src/main/org/bson/diagnostics/Logger.java b/bson/src/main/org/bson/diagnostics/Logger.java index 2ff055ddb4e..8916dd2f258 100644 --- a/bson/src/main/org/bson/diagnostics/Logger.java +++ b/bson/src/main/org/bson/diagnostics/Logger.java @@ -34,7 +34,9 @@ public interface Logger { * @return True if this Logger is enabled for the TRACE level, false otherwise. * @since 1.4 */ - boolean isTraceEnabled(); + default boolean isTraceEnabled() { + return false; + } /** * Log a message at the TRACE level. @@ -42,7 +44,8 @@ public interface Logger { * @param msg the message string to be logged * @since 1.4 */ - void trace(String msg); + default void trace(String msg) { + } /** * Log an exception (throwable) at the TRACE level with an accompanying message. @@ -51,22 +54,25 @@ public interface Logger { * @param t the exception (throwable) to log * @since 1.4 */ - void trace(String msg, Throwable t); + default void trace(String msg, Throwable t) { + } /** * Is the logger instance enabled for the DEBUG level? * * @return True if this Logger is enabled for the DEBUG level, false otherwise. */ - boolean isDebugEnabled(); - + default boolean isDebugEnabled() { + return false; + } /** * Log a message at the DEBUG level. * * @param msg the message string to be logged */ - void debug(String msg); + default void debug(String msg) { + } /** @@ -75,22 +81,25 @@ public interface Logger { * @param msg the message accompanying the exception * @param t the exception (throwable) to log */ - void debug(String msg, Throwable t); + default void debug(String msg, Throwable t) { + } /** * Is the logger instance enabled for the INFO level? * * @return True if this Logger is enabled for the INFO level, false otherwise. */ - boolean isInfoEnabled(); - + default boolean isInfoEnabled() { + return false; + } /** * Log a message at the INFO level. * * @param msg the message string to be logged */ - void info(String msg); + default void info(String msg) { + } /** * Log an exception (throwable) at the INFO level with an accompanying message. @@ -98,21 +107,25 @@ public interface Logger { * @param msg the message accompanying the exception * @param t the exception (throwable) to log */ - void info(String msg, Throwable t); + default void info(String msg, Throwable t) { + } /** * Is the logger instance enabled for the WARN level? * * @return True if this Logger is enabled for the WARN level, false otherwise. */ - boolean isWarnEnabled(); + default boolean isWarnEnabled() { + return false; + } /** * Log a message at the WARN level. * * @param msg the message string to be logged */ - void warn(String msg); + default void warn(String msg) { + } /** * Log an exception (throwable) at the WARN level with an accompanying message. @@ -120,21 +133,25 @@ public interface Logger { * @param msg the message accompanying the exception * @param t the exception (throwable) to log */ - void warn(String msg, Throwable t); + default void warn(String msg, Throwable t) { + } /** * Is the logger instance enabled for the ERROR level? * * @return True if this Logger is enabled for the ERROR level, false otherwise. */ - boolean isErrorEnabled(); + default boolean isErrorEnabled() { + return false; + } /** * Log a message at the ERROR level. * * @param msg the message string to be logged */ - void error(String msg); + default void error(String msg) { + } /** * Log an exception (throwable) at the ERROR level with an accompanying message. @@ -142,5 +159,6 @@ public interface Logger { * @param msg the message accompanying the exception * @param t the exception (throwable) to log */ - void error(String msg, Throwable t); + default void error(String msg, Throwable t) { + } } diff --git a/bson/src/main/org/bson/diagnostics/Loggers.java b/bson/src/main/org/bson/diagnostics/Loggers.java index ad3bec649ed..972a45c3773 100644 --- a/bson/src/main/org/bson/diagnostics/Loggers.java +++ b/bson/src/main/org/bson/diagnostics/Loggers.java @@ -27,7 +27,7 @@ public final class Loggers { /** * The prefix for all logger names. */ - public static final String PREFIX = "org.bson"; + private static final String PREFIX = "org.bson"; private static final boolean USE_SLF4J = shouldUseSLF4J(); @@ -49,7 +49,7 @@ public static Logger getLogger(final String suffix) { if (USE_SLF4J) { return new SLF4JLogger(name); } else { - return new JULLogger(name); + return new NoOpLogger(name); } } @@ -58,6 +58,8 @@ private static boolean shouldUseSLF4J() { Class.forName("org.slf4j.Logger"); return true; } catch (ClassNotFoundException e) { + java.util.logging.Logger.getLogger("org.bson") + .warning(String.format("SLF4J not found on the classpath. Logging is disabled for the '%s' component", PREFIX)); return false; } } diff --git a/bson/src/main/org/bson/diagnostics/NoOpLogger.java b/bson/src/main/org/bson/diagnostics/NoOpLogger.java new file mode 100644 index 00000000000..ec31831be7e --- /dev/null +++ b/bson/src/main/org/bson/diagnostics/NoOpLogger.java @@ -0,0 +1,33 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.diagnostics; + +/** + * A logger that disables all levels and logs nothing + */ +class NoOpLogger implements Logger { + private final String name; + + NoOpLogger(final String name) { + this.name = name; + } + + @Override + public String getName() { + return name; + } +} diff --git a/bson/src/main/org/bson/internal/Base64.java b/bson/src/main/org/bson/internal/Base64.java deleted file mode 100644 index d6371f73f44..00000000000 --- a/bson/src/main/org/bson/internal/Base64.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.internal; - -/** - *

    Provides Base64 encoding and decoding.

    - *

    This class implements Base64 encoding

    - *

    Thanks to Apache Commons project. This class refactored from org.apache.commons.codec.binary

    - *
    - * - * @since 3.5 - */ -public final class Base64 { - private static final int BYTES_PER_UNENCODED_BLOCK = 3; - private static final int BYTES_PER_ENCODED_BLOCK = 4; - - /** - * Mask used to extract 6 bits, used when encoding - */ - private static final int SIX_BIT_MASK = 0x3f; - - /** - * padding char - */ - private static final byte PAD = '='; - - /** - * This array is a lookup table that translates 6-bit positive integer index values into their "Base64 Alphabet" - * equivalents as specified in Table 1 of RFC 2045. - */ - private static final byte[] ENCODE_TABLE = {'A', 'B', 'C', 'D', 'E', 'F', - 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', - 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', - 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', - 't', 'u', 'v', 'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', - '6', '7', '8', '9', '+', '/'}; - - private static final int[] DECODE_TABLE = new int[128]; - - static { - for (int i = 0; i < ENCODE_TABLE.length; i++) { - DECODE_TABLE[ENCODE_TABLE[i]] = i; - } - } - - /** - * Decodes the given Base64-encoded string. - * - * @param s the Base64-encoded string - * @return the decoded byte array - */ - public static byte[] decode(final String s) { - int delta = s.endsWith("==") ? 2 : s.endsWith("=") ? 1 : 0; - byte[] buffer = new byte[s.length() * BYTES_PER_UNENCODED_BLOCK / BYTES_PER_ENCODED_BLOCK - delta]; - int mask = 0xFF; - int pos = 0; - for (int i = 0; i < s.length(); i += BYTES_PER_ENCODED_BLOCK) { - int c0 = DECODE_TABLE[s.charAt(i)]; - int c1 = DECODE_TABLE[s.charAt(i + 1)]; - buffer[pos++] = (byte) (((c0 << 2) | (c1 >> 4)) & mask); - if (pos >= buffer.length) { - return buffer; - } - int c2 = DECODE_TABLE[s.charAt(i + 2)]; - buffer[pos++] = (byte) (((c1 << 4) | (c2 >> 2)) & mask); - if (pos >= buffer.length) { - return buffer; - } - int c3 = DECODE_TABLE[s.charAt(i + 3)]; - buffer[pos++] = (byte) (((c2 << 6) | c3) & mask); - } - return buffer; - } - - /** - * Encodes the given byte array into a Base64-encoded string. - * - * - * @param in the byte array - * @return the Base64-encoded string - */ - public static String encode(final byte[] in) { - - int modulus = 0; - int bitWorkArea = 0; - int numEncodedBytes = (in.length / BYTES_PER_UNENCODED_BLOCK) * BYTES_PER_ENCODED_BLOCK - + ((in.length % BYTES_PER_UNENCODED_BLOCK == 0) ? 0 : 4); - - byte[] buffer = new byte[numEncodedBytes]; - int pos = 0; - - for (int b : in) { - modulus = (modulus + 1) % BYTES_PER_UNENCODED_BLOCK; - - if (b < 0) { - b += 256; - } - - bitWorkArea = (bitWorkArea << 8) + b; // BITS_PER_BYTE - if (0 == modulus) { // 3 bytes = 24 bits = 4 * 6 bits to extract - buffer[pos++] = ENCODE_TABLE[(bitWorkArea >> 18) & SIX_BIT_MASK]; - buffer[pos++] = ENCODE_TABLE[(bitWorkArea >> 12) & SIX_BIT_MASK]; - buffer[pos++] = ENCODE_TABLE[(bitWorkArea >> 6) & SIX_BIT_MASK]; - buffer[pos++] = ENCODE_TABLE[bitWorkArea & SIX_BIT_MASK]; - } - } - - switch (modulus) { // 0-2 - case 1: // 8 bits = 6 + 2 - buffer[pos++] = ENCODE_TABLE[(bitWorkArea >> 2) & SIX_BIT_MASK]; // top 6 bits - buffer[pos++] = ENCODE_TABLE[(bitWorkArea << 4) & SIX_BIT_MASK]; // remaining 2 - buffer[pos++] = PAD; - buffer[pos] = PAD; // Last entry no need to ++ - break; - - case 2: // 16 bits = 6 + 6 + 4 - buffer[pos++] = ENCODE_TABLE[(bitWorkArea >> 10) & SIX_BIT_MASK]; - buffer[pos++] = ENCODE_TABLE[(bitWorkArea >> 4) & SIX_BIT_MASK]; - buffer[pos++] = ENCODE_TABLE[(bitWorkArea << 2) & SIX_BIT_MASK]; - buffer[pos] = PAD; // Last entry no need to ++ - break; - default: - break; - } - - return byteArrayToString(buffer); - } - - @SuppressWarnings("deprecation") - private static String byteArrayToString(final byte[] buffer) { - return new String(buffer, 0, 0, buffer.length); - } - - private Base64() { - } -} diff --git a/bson/src/main/org/bson/internal/BsonUtil.java b/bson/src/main/org/bson/internal/BsonUtil.java new file mode 100644 index 00000000000..6879c4c0e12 --- /dev/null +++ b/bson/src/main/org/bson/internal/BsonUtil.java @@ -0,0 +1,65 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.internal; + +import org.bson.BsonArray; +import org.bson.BsonBinary; +import org.bson.BsonDocument; +import org.bson.BsonJavaScriptWithScope; +import org.bson.BsonValue; + +/** + *

    This class is not part of the public API and may be removed or changed at any time

    + */ +public final class BsonUtil { + public static BsonDocument mutableDeepCopy(final BsonDocument original) { + BsonDocument copy = new BsonDocument(original.size()); + original.forEach((key, value) -> copy.put(key, mutableDeepCopy(value))); + return copy; + } + + private static BsonArray mutableDeepCopy(final BsonArray original) { + BsonArray copy = new BsonArray(original.size()); + original.forEach(element -> copy.add(mutableDeepCopy(element))); + return copy; + } + + private static BsonBinary mutableDeepCopy(final BsonBinary original) { + return new BsonBinary(original.getType(), original.getData().clone()); + } + + private static BsonJavaScriptWithScope mutableDeepCopy(final BsonJavaScriptWithScope original) { + return new BsonJavaScriptWithScope(original.getCode(), mutableDeepCopy(original.getScope())); + } + + private static BsonValue mutableDeepCopy(final BsonValue original) { + switch (original.getBsonType()) { + case DOCUMENT: + return mutableDeepCopy(original.asDocument()); + case ARRAY: + return mutableDeepCopy(original.asArray()); + case BINARY: + return mutableDeepCopy(original.asBinary()); + case JAVASCRIPT_WITH_SCOPE: + return mutableDeepCopy(original.asJavaScriptWithScope()); + default: + return original; + } + } + + private BsonUtil() { + } +} diff --git a/bson/src/main/org/bson/internal/ChildCodecRegistry.java b/bson/src/main/org/bson/internal/ChildCodecRegistry.java new file mode 100644 index 00000000000..73bb46630de --- /dev/null +++ b/bson/src/main/org/bson/internal/ChildCodecRegistry.java @@ -0,0 +1,139 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.internal; + + +import org.bson.codecs.Codec; +import org.bson.codecs.configuration.CodecRegistry; + +import java.lang.reflect.Type; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +import static java.lang.String.format; +import static org.bson.assertions.Assertions.isTrueArgument; +import static org.bson.assertions.Assertions.notNull; + +// An implementation of CodecRegistry that is used to detect cyclic dependencies between Codecs +class ChildCodecRegistry implements CodecRegistry { + + private final ChildCodecRegistry parent; + private final CycleDetectingCodecRegistry registry; + private final Class codecClass; + private final List types; + + ChildCodecRegistry(final CycleDetectingCodecRegistry registry, final Class codecClass, final List types) { + this.codecClass = codecClass; + this.parent = null; + this.registry = registry; + this.types = types; + } + + private ChildCodecRegistry(final ChildCodecRegistry parent, final Class codecClass, final List types) { + this.parent = parent; + this.codecClass = codecClass; + this.registry = parent.registry; + this.types = types; + } + + public Class getCodecClass() { + return codecClass; + } + + public Optional> getTypes() { + return Optional.ofNullable(types); + } + + // Gets a Codec, but if it detects a cyclic dependency, return a LazyCodec which breaks the chain. + public Codec get(final Class clazz) { + if (hasCycles(clazz)) { + return new LazyCodec<>(registry, clazz, null); + } else { + return registry.get(new ChildCodecRegistry<>(this, clazz, null)); + } + } + + @Override + public Codec get(final Class clazz, final List typeArguments) { + notNull("typeArguments", typeArguments); + isTrueArgument(format("typeArguments size should equal the number of type parameters in class %s, but is %d", + clazz, typeArguments.size()), + clazz.getTypeParameters().length == typeArguments.size()); + if (hasCycles(clazz)) { + return new LazyCodec<>(registry, clazz, typeArguments); + } else { + return registry.get(new ChildCodecRegistry<>(this, clazz, typeArguments)); + } + } + + @Override + public Codec get(final Class clazz, final CodecRegistry registry) { + return get(clazz, Collections.emptyList(), registry); + } + + @Override + public Codec get(final Class clazz, final List typeArguments, final CodecRegistry registry) { + return this.registry.get(clazz, typeArguments, registry); + } + + private Boolean hasCycles(final Class theClass) { + ChildCodecRegistry current = this; + while (current != null) { + if (current.codecClass.equals(theClass)) { + return true; + } + + current = current.parent; + } + + return false; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + ChildCodecRegistry that = (ChildCodecRegistry) o; + + if (!codecClass.equals(that.codecClass)) { + return false; + } + if (!Objects.equals(parent, that.parent)) { + return false; + } + if (!registry.equals(that.registry)) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = parent != null ? parent.hashCode() : 0; + result = 31 * result + registry.hashCode(); + result = 31 * result + codecClass.hashCode(); + return result; + } +} diff --git a/bson/src/main/org/bson/internal/CodecCache.java b/bson/src/main/org/bson/internal/CodecCache.java new file mode 100644 index 00000000000..bec178559e3 --- /dev/null +++ b/bson/src/main/org/bson/internal/CodecCache.java @@ -0,0 +1,81 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.internal; + +import org.bson.codecs.Codec; + +import java.lang.reflect.Type; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +import static org.bson.assertions.Assertions.assertNotNull; + +final class CodecCache { + + static final class CodecCacheKey { + private final Class clazz; + private final List types; + + CodecCacheKey(final Class clazz, final List types) { + this.clazz = clazz; + this.types = types; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + CodecCacheKey that = (CodecCacheKey) o; + return clazz.equals(that.clazz) && Objects.equals(types, that.types); + } + + @Override + public int hashCode() { + return Objects.hash(clazz, types); + } + + @Override + public String toString() { + return "CodecCacheKey{" + + "clazz=" + clazz + + ", types=" + types + + '}'; + } + } + + private final ConcurrentMap> codecCache = new ConcurrentHashMap<>(); + + public Codec putIfAbsent(final CodecCacheKey codecCacheKey, final Codec codec) { + assertNotNull(codec); + @SuppressWarnings("unchecked") + Codec prevCodec = (Codec) codecCache.putIfAbsent(codecCacheKey, codec); + return prevCodec == null ? codec : prevCodec; + } + + public Optional> get(final CodecCacheKey codecCacheKey) { + @SuppressWarnings("unchecked") + Codec codec = (Codec) codecCache.get(codecCacheKey); + return Optional.ofNullable(codec); + } +} diff --git a/bson/src/main/org/bson/internal/CycleDetectingCodecRegistry.java b/bson/src/main/org/bson/internal/CycleDetectingCodecRegistry.java new file mode 100644 index 00000000000..2aecba9f188 --- /dev/null +++ b/bson/src/main/org/bson/internal/CycleDetectingCodecRegistry.java @@ -0,0 +1,36 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.internal; + +import org.bson.codecs.Codec; +import org.bson.codecs.configuration.CodecRegistry; + +/** + * A marker interface for {@code CodecRegistry} implementations that are able to detect cycles. + * + * @since 3.12 + */ +interface CycleDetectingCodecRegistry extends CodecRegistry { + /** + * Get the Codec using the given context. + * + * @param context the child context + * @param the value type + * @return the Codec + */ + Codec get(ChildCodecRegistry context); +} diff --git a/bson/src/main/org/bson/codecs/configuration/LazyCodec.java b/bson/src/main/org/bson/internal/LazyCodec.java similarity index 80% rename from bson/src/main/org/bson/codecs/configuration/LazyCodec.java rename to bson/src/main/org/bson/internal/LazyCodec.java index e0e3e1e72c8..0e7f94e9441 100644 --- a/bson/src/main/org/bson/codecs/configuration/LazyCodec.java +++ b/bson/src/main/org/bson/internal/LazyCodec.java @@ -14,22 +14,28 @@ * limitations under the License. */ -package org.bson.codecs.configuration; +package org.bson.internal; import org.bson.BsonReader; import org.bson.BsonWriter; import org.bson.codecs.Codec; import org.bson.codecs.DecoderContext; import org.bson.codecs.EncoderContext; +import org.bson.codecs.configuration.CodecRegistry; + +import java.lang.reflect.Type; +import java.util.List; class LazyCodec implements Codec { private final CodecRegistry registry; private final Class clazz; + private final List types; private volatile Codec wrapped; - LazyCodec(final CodecRegistry registry, final Class clazz) { + LazyCodec(final CodecRegistry registry, final Class clazz, final List types) { this.registry = registry; this.clazz = clazz; + this.types = types; } @Override @@ -49,7 +55,11 @@ public T decode(final BsonReader reader, final DecoderContext decoderContext) { private Codec getWrapped() { if (wrapped == null) { - wrapped = registry.get(clazz); + if (types == null) { + wrapped = registry.get(clazz); + } else { + wrapped = registry.get(clazz, types); + } } return wrapped; diff --git a/bson/src/main/org/bson/internal/NumberCodecHelper.java b/bson/src/main/org/bson/internal/NumberCodecHelper.java new file mode 100644 index 00000000000..faf63e56eb5 --- /dev/null +++ b/bson/src/main/org/bson/internal/NumberCodecHelper.java @@ -0,0 +1,161 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.internal; + +import org.bson.BsonInvalidOperationException; +import org.bson.BsonReader; +import org.bson.BsonType; +import org.bson.types.Decimal128; + +import java.math.BigDecimal; + +import static java.lang.String.format; + +/** + * This class is not part of the public API. It may be removed or changed at any time. + */ +public final class NumberCodecHelper { + + public static byte decodeByte(final BsonReader reader) { + int value = decodeInt(reader); + if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) { + throw new BsonInvalidOperationException(format("%s can not be converted into a Byte.", value)); + } + return (byte) value; + } + + public static short decodeShort(final BsonReader reader) { + int value = decodeInt(reader); + if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) { + throw new BsonInvalidOperationException(format("%s can not be converted into a Short.", value)); + } + return (short) value; + } + + public static int decodeInt(final BsonReader reader) { + int intValue; + BsonType bsonType = reader.getCurrentBsonType(); + switch (bsonType) { + case INT32: + intValue = reader.readInt32(); + break; + case INT64: + long longValue = reader.readInt64(); + intValue = (int) longValue; + if (longValue != (long) intValue) { + throw invalidConversion(Integer.class, longValue); + } + break; + case DOUBLE: + double doubleValue = reader.readDouble(); + intValue = (int) doubleValue; + if (doubleValue != (double) intValue) { + throw invalidConversion(Integer.class, doubleValue); + } + break; + case DECIMAL128: + Decimal128 decimal128 = reader.readDecimal128(); + intValue = decimal128.intValue(); + if (!decimal128.equals(new Decimal128(intValue))) { + throw invalidConversion(Integer.class, decimal128); + } + break; + default: + throw new BsonInvalidOperationException(format("Invalid numeric type, found: %s", bsonType)); + } + return intValue; + } + + public static long decodeLong(final BsonReader reader) { + long longValue; + BsonType bsonType = reader.getCurrentBsonType(); + switch (bsonType) { + case INT32: + longValue = reader.readInt32(); + break; + case INT64: + longValue = reader.readInt64(); + break; + case DOUBLE: + double doubleValue = reader.readDouble(); + longValue = (long) doubleValue; + if (doubleValue != (double) longValue) { + throw invalidConversion(Long.class, doubleValue); + } + break; + case DECIMAL128: + Decimal128 decimal128 = reader.readDecimal128(); + longValue = decimal128.longValue(); + if (!decimal128.equals(new Decimal128(longValue))) { + throw invalidConversion(Long.class, decimal128); + } + break; + default: + throw new BsonInvalidOperationException(format("Invalid numeric type, found: %s", bsonType)); + } + return longValue; + } + + public static float decodeFloat(final BsonReader reader) { + double value = decodeDouble(reader); + if (value < -Float.MAX_VALUE || value > Float.MAX_VALUE) { + throw new BsonInvalidOperationException(format("%s can not be converted into a Float.", value)); + } + return (float) value; + } + + public static double decodeDouble(final BsonReader reader) { + double doubleValue; + BsonType bsonType = reader.getCurrentBsonType(); + switch (bsonType) { + case INT32: + doubleValue = reader.readInt32(); + break; + case INT64: + long longValue = reader.readInt64(); + doubleValue = longValue; + if (longValue != (long) doubleValue) { + throw invalidConversion(Double.class, longValue); + } + break; + case DOUBLE: + doubleValue = reader.readDouble(); + break; + case DECIMAL128: + Decimal128 decimal128 = reader.readDecimal128(); + try { + doubleValue = decimal128.doubleValue(); + if (!decimal128.equals(new Decimal128(new BigDecimal(doubleValue)))) { + throw invalidConversion(Double.class, decimal128); + } + } catch (NumberFormatException e) { + throw invalidConversion(Double.class, decimal128); + } + break; + default: + throw new BsonInvalidOperationException(format("Invalid numeric type, found: %s", bsonType)); + } + return doubleValue; + } + + private static BsonInvalidOperationException invalidConversion(final Class clazz, final Number value) { + return new BsonInvalidOperationException(format("Could not convert `%s` to a %s without losing precision", value, clazz)); + } + + private NumberCodecHelper() { + } +} diff --git a/bson/src/main/org/bson/internal/ProvidersCodecRegistry.java b/bson/src/main/org/bson/internal/ProvidersCodecRegistry.java new file mode 100644 index 00000000000..ddb3c44355d --- /dev/null +++ b/bson/src/main/org/bson/internal/ProvidersCodecRegistry.java @@ -0,0 +1,122 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.internal; + +import org.bson.codecs.Codec; +import org.bson.codecs.configuration.CodecConfigurationException; +import org.bson.codecs.configuration.CodecProvider; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.internal.CodecCache.CodecCacheKey; + +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static java.lang.String.format; +import static java.util.Collections.emptyList; +import static org.bson.assertions.Assertions.isTrueArgument; +import static org.bson.assertions.Assertions.notNull; + +/** + *

    This class is not part of the public API and may be removed or changed at any time

    + */ +public final class ProvidersCodecRegistry implements CycleDetectingCodecRegistry { + private final List codecProviders; + private final CodecCache codecCache = new CodecCache(); + + public ProvidersCodecRegistry(final List codecProviders) { + isTrueArgument("codecProviders must not be null or empty", codecProviders != null && codecProviders.size() > 0); + this.codecProviders = new ArrayList<>(codecProviders); + } + + @Override + public Codec get(final Class clazz) { + return get(new ChildCodecRegistry<>(this, clazz, null)); + } + + @Override + public Codec get(final Class clazz, final List typeArguments) { + notNull("typeArguments", typeArguments); + isTrueArgument(format("typeArguments size should equal the number of type parameters in class %s, but is %d", + clazz, typeArguments.size()), + clazz.getTypeParameters().length == typeArguments.size()); + return get(new ChildCodecRegistry<>(this, clazz, typeArguments)); + } + + @Override + public Codec get(final Class clazz, final CodecRegistry registry) { + return get(clazz, Collections.emptyList(), registry); + } + + @Override + public Codec get(final Class clazz, final List typeArguments, final CodecRegistry registry) { + for (CodecProvider provider : codecProviders) { + Codec codec = provider.get(clazz, typeArguments, registry); + if (codec != null) { + return codec; + } + } + return null; + } + + public Codec get(final ChildCodecRegistry context) { + CodecCacheKey codecCacheKey = new CodecCacheKey(context.getCodecClass(), context.getTypes().orElse(null)); + return codecCache.get(codecCacheKey).orElseGet(() -> { + for (CodecProvider provider : codecProviders) { + Codec codec = provider.get(context.getCodecClass(), context.getTypes().orElse(emptyList()), context); + if (codec != null) { + return codecCache.putIfAbsent(codecCacheKey, codec); + } + } + throw new CodecConfigurationException(format("Can't find a codec for %s.", codecCacheKey)); + }); + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + ProvidersCodecRegistry that = (ProvidersCodecRegistry) o; + if (codecProviders.size() != that.codecProviders.size()) { + return false; + } + for (int i = 0; i < codecProviders.size(); i++) { + if (codecProviders.get(i).getClass() != that.codecProviders.get(i).getClass()) { + return false; + } + } + return true; + } + + @Override + public int hashCode() { + return codecProviders.hashCode(); + } + + @Override + public String toString() { + return "ProvidersCodecRegistry{" + + "codecProviders=" + codecProviders + + '}'; + } +} diff --git a/bson/src/main/org/bson/internal/StringCodecHelper.java b/bson/src/main/org/bson/internal/StringCodecHelper.java new file mode 100644 index 00000000000..04225aad939 --- /dev/null +++ b/bson/src/main/org/bson/internal/StringCodecHelper.java @@ -0,0 +1,46 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.internal; + +import org.bson.BsonInvalidOperationException; +import org.bson.BsonReader; +import org.bson.BsonType; + +import static java.lang.String.format; + +/** + * This class is not part of the public API. It may be removed or changed at any time. + */ +public final class StringCodecHelper { + + private StringCodecHelper(){ + //NOP + } + + public static char decodeChar(final BsonReader reader) { + BsonType currentBsonType = reader.getCurrentBsonType(); + if (currentBsonType != BsonType.STRING) { + throw new BsonInvalidOperationException(format("Invalid string type, found: %s", currentBsonType)); + } + String string = reader.readString(); + if (string.length() != 1) { + throw new BsonInvalidOperationException(format("Attempting to decode the string '%s' to a character, but its length is not " + + "equal to one", string)); + } + return string.charAt(0); + } +} diff --git a/bson/src/main/org/bson/internal/UnsignedLongs.java b/bson/src/main/org/bson/internal/UnsignedLongs.java deleted file mode 100644 index 3980df5b80a..00000000000 --- a/bson/src/main/org/bson/internal/UnsignedLongs.java +++ /dev/null @@ -1,183 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * Copyright 2010 The Guava Authors - * Copyright 2011 The Guava Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.internal; - -import java.math.BigInteger; - -/** - * Utilities for treating long values as unsigned. - * - *

    - * Similar methods are now available in Java 8, but are required here for Java 6/7 compatibility. - *

    - *

    - * This class is not part of the public API and may be removed or changed at any time. - *

    - */ -public final class UnsignedLongs { - /** - * Equivalent of Long.compareUnsigned in Java 8. - * - * @param first the first value - * @param second the second value - * @return 0 if the values are equal, a value greater than zero if first is greater than second, - * a value less than zero if first is less than second - */ - public static int compare(final long first, final long second) { - return compareLongs(first + Long.MIN_VALUE, second + Long.MIN_VALUE); - } - - /** - * Equivalent to Long.toUnsignedString in Java 8. - * - * @param value the long value to treat as unsigned - * @return the string representation of unsignedLong treated as an unsigned value - */ - public static String toString(final long value) { - if (value >= 0) { - return Long.toString(value); - } else { - // emulate unsigned division and then append the remainder - long quotient = (value >>> 1) / 5; // Unsigned divide by 10 and floor - long remainder = value - quotient * 10; - return Long.toString(quotient) + remainder; - } - } - - // - - /** - * Equivalent to Long.parseUnsignedLong in Java 8. - * - * @param string the string representation of an unsigned long - * @return the unsigned long - */ - public static long parse(final String string) { - if (string.length() == 0) { - throw new NumberFormatException("empty string"); - } - int radix = 10; - int maxSafePos = MAX_SAFE_DIGITS[radix] - 1; - long value = 0; - for (int pos = 0; pos < string.length(); pos++) { - int digit = Character.digit(string.charAt(pos), radix); - if (digit == -1) { - throw new NumberFormatException(string); - } - if (pos > maxSafePos && overflowInParse(value, digit, radix)) { - throw new NumberFormatException("Too large for unsigned long: " + string); - } - value = (value * radix) + digit; - } - - return value; - } - - // Returns true if (current * radix) + digit is a number too large to be represented by an - // unsigned long. This is useful for detecting overflow while parsing a string representation of a - // number. - private static boolean overflowInParse(final long current, final int digit, final int radix) { - if (current >= 0) { - if (current < MAX_VALUE_DIVS[radix]) { - return false; - } - if (current > MAX_VALUE_DIVS[radix]) { - return true; - } - // current == maxValueDivs[radix] - return (digit > MAX_VALUE_MODS[radix]); - } - - // current < 0: high bit is set - return true; - } - - // this is the equivalent of Long.compare in Java 7 - private static int compareLongs(final long x, final long y) { - return (x < y) ? -1 : ((x == y) ? 0 : 1); - } - - // Returns dividend / divisor, where the dividend and divisor are treated as unsigned 64-bit quantities. - private static long divide(final long dividend, final long divisor) { - if (divisor < 0) { // i.e., divisor >= 2^63: - if (compare(dividend, divisor) < 0) { - return 0; // dividend < divisor - } else { - return 1; // dividend >= divisor - } - } - - // Optimization - use signed division if dividend < 2^63 - if (dividend >= 0) { - return dividend / divisor; - } - - - // Otherwise, approximate the quotient, check, and correct if necessary. Our approximation is - // guaranteed to be either exact or one less than the correct value. This follows from fact that - // floor(floor(x)/i) == floor(x/i) for any real x and integer i != 0. The proof is not quite - // trivial. - long quotient = ((dividend >>> 1) / divisor) << 1; - long rem = dividend - quotient * divisor; - return quotient + (compare(rem, divisor) >= 0 ? 1 : 0); - } - - // Returns dividend % divisor, where the dividend and divisor are treated as unsigned 64-bit* quantities. - private static long remainder(final long dividend, final long divisor) { - if (divisor < 0) { // i.e., divisor >= 2^63: - if (compare(dividend, divisor) < 0) { - return dividend; // dividend < divisor - } else { - return dividend - divisor; // dividend >= divisor - } - } - - // Optimization - use signed modulus if dividend < 2^63 - if (dividend >= 0) { - return dividend % divisor; - } - - - // Otherwise, approximate the quotient, check, and correct if necessary. Our approximation is - // guaranteed to be either exact or one less than the correct value. This follows from the fact - // that floor(floor(x)/i) == floor(x/i) for any real x and integer i != 0. The proof is not - // quite trivial. - long quotient = ((dividend >>> 1) / divisor) << 1; - long rem = dividend - quotient * divisor; - return rem - (compare(rem, divisor) >= 0 ? divisor : 0); - } - - private static final long MAX_VALUE = -1L; // Equivalent to 2^64 - 1 - private static final long[] MAX_VALUE_DIVS = new long[Character.MAX_RADIX + 1]; - private static final int[] MAX_VALUE_MODS = new int[Character.MAX_RADIX + 1]; - private static final int[] MAX_SAFE_DIGITS = new int[Character.MAX_RADIX + 1]; - - static { - BigInteger overflow = new BigInteger("10000000000000000", 16); - for (int i = Character.MIN_RADIX; i <= Character.MAX_RADIX; i++) { - MAX_VALUE_DIVS[i] = divide(MAX_VALUE, i); - MAX_VALUE_MODS[i] = (int) remainder(MAX_VALUE, i); - MAX_SAFE_DIGITS[i] = overflow.toString(i).length() - 1; - } - } - - private UnsignedLongs() { - } - -} diff --git a/bson/src/main/org/bson/internal/UuidHelper.java b/bson/src/main/org/bson/internal/UuidHelper.java new file mode 100644 index 00000000000..9c46614b56e --- /dev/null +++ b/bson/src/main/org/bson/internal/UuidHelper.java @@ -0,0 +1,135 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.internal; + +import org.bson.BSONException; +import org.bson.BsonBinarySubType; +import org.bson.BsonSerializationException; +import org.bson.UuidRepresentation; + +import java.util.Arrays; +import java.util.UUID; + +/** + * Utilities for encoding and decoding UUID into binary. + * + *

    This class is not part of the public API and may be removed or changed at any time

    + */ +public final class UuidHelper { + private static void writeLongToArrayBigEndian(final byte[] bytes, final int offset, final long x) { + bytes[offset + 7] = (byte) (0xFFL & (x)); + bytes[offset + 6] = (byte) (0xFFL & (x >> 8)); + bytes[offset + 5] = (byte) (0xFFL & (x >> 16)); + bytes[offset + 4] = (byte) (0xFFL & (x >> 24)); + bytes[offset + 3] = (byte) (0xFFL & (x >> 32)); + bytes[offset + 2] = (byte) (0xFFL & (x >> 40)); + bytes[offset + 1] = (byte) (0xFFL & (x >> 48)); + bytes[offset] = (byte) (0xFFL & (x >> 56)); + } + + private static long readLongFromArrayBigEndian(final byte[] bytes, final int offset) { + long x = 0; + x |= (0xFFL & bytes[offset + 7]); + x |= (0xFFL & bytes[offset + 6]) << 8; + x |= (0xFFL & bytes[offset + 5]) << 16; + x |= (0xFFL & bytes[offset + 4]) << 24; + x |= (0xFFL & bytes[offset + 3]) << 32; + x |= (0xFFL & bytes[offset + 2]) << 40; + x |= (0xFFL & bytes[offset + 1]) << 48; + x |= (0xFFL & bytes[offset]) << 56; + return x; + } + + // reverse elements in the subarray data[start:start+length] + private static void reverseByteArray(final byte[] data, final int start, final int length) { + for (int left = start, right = start + length - 1; left < right; left++, right--) { + // swap the values at the left and right indices + byte temp = data[left]; + data[left] = data[right]; + data[right] = temp; + } + } + + public static byte[] encodeUuidToBinary(final UUID uuid, final UuidRepresentation uuidRepresentation) { + byte[] binaryData = new byte[16]; + writeLongToArrayBigEndian(binaryData, 0, uuid.getMostSignificantBits()); + writeLongToArrayBigEndian(binaryData, 8, uuid.getLeastSignificantBits()); + switch(uuidRepresentation) { + case C_SHARP_LEGACY: + reverseByteArray(binaryData, 0, 4); + reverseByteArray(binaryData, 4, 2); + reverseByteArray(binaryData, 6, 2); + break; + case JAVA_LEGACY: + reverseByteArray(binaryData, 0, 8); + reverseByteArray(binaryData, 8, 8); + break; + case PYTHON_LEGACY: + case STANDARD: + break; + default: + throw new BSONException("Unexpected UUID representation: " + uuidRepresentation); + } + + return binaryData; + } + + // This method will NOT modify the contents of the byte array + public static UUID decodeBinaryToUuid(final byte[] data, final byte type, final UuidRepresentation uuidRepresentation) { + if (data.length != 16) { + throw new BsonSerializationException(String.format("Expected length to be 16, not %d.", data.length)); + } + + byte[] localData = data; + + if (type == BsonBinarySubType.UUID_LEGACY.getValue()) { + switch(uuidRepresentation) { + case C_SHARP_LEGACY: + localData = Arrays.copyOf(data, 16); + + reverseByteArray(localData, 0, 4); + reverseByteArray(localData, 4, 2); + reverseByteArray(localData, 6, 2); + break; + case JAVA_LEGACY: + localData = Arrays.copyOf(data, 16); + + reverseByteArray(localData, 0, 8); + reverseByteArray(localData, 8, 8); + break; + case PYTHON_LEGACY: + break; + case STANDARD: + throw new BSONException("Can not decode a subtype 3 (UUID legacy) BSON binary when the decoder is configured to use " + + "the standard UUID representation"); + default: + throw new BSONException("Unexpected UUID representation: " + uuidRepresentation); + } + } + + return new UUID(readLongFromArrayBigEndian(localData, 0), readLongFromArrayBigEndian(localData, 8)); + } + + public static boolean isLegacyUUID(final UuidRepresentation uuidRepresentation) { + return uuidRepresentation == UuidRepresentation.JAVA_LEGACY + || uuidRepresentation == UuidRepresentation.C_SHARP_LEGACY + || uuidRepresentation == UuidRepresentation.PYTHON_LEGACY; + } + + private UuidHelper() { + } +} diff --git a/bson/src/main/org/bson/internal/vector/BinaryVectorHelper.java b/bson/src/main/org/bson/internal/vector/BinaryVectorHelper.java new file mode 100644 index 00000000000..74d50d334fc --- /dev/null +++ b/bson/src/main/org/bson/internal/vector/BinaryVectorHelper.java @@ -0,0 +1,177 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.internal.vector; + +import org.bson.BsonBinary; +import org.bson.BsonInvalidOperationException; +import org.bson.Float32BinaryVector; +import org.bson.Int8BinaryVector; +import org.bson.PackedBitBinaryVector; +import org.bson.BinaryVector; +import org.bson.assertions.Assertions; +import org.bson.types.Binary; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.FloatBuffer; + +/** + * Helper class for encoding and decoding vectors to and from {@link BsonBinary}/{@link Binary}. + * + *

    + * This class is not part of the public API and may be removed or changed at any time. + * + * @see BinaryVector + * @see BsonBinary#asVector() + * @see BsonBinary#BsonBinary(BinaryVector) + */ +public final class BinaryVectorHelper { + + private static final ByteOrder STORED_BYTE_ORDER = ByteOrder.LITTLE_ENDIAN; + private static final String ERROR_MESSAGE_UNKNOWN_VECTOR_DATA_TYPE = "Unknown vector data type: "; + private static final byte ZERO_PADDING = 0; + + private BinaryVectorHelper() { + //NOP + } + + private static final int METADATA_SIZE = 2; + + public static byte[] encodeVectorToBinary(final BinaryVector vector) { + BinaryVector.DataType dataType = vector.getDataType(); + switch (dataType) { + case INT8: + return encodeVector(dataType.getValue(), ZERO_PADDING, vector.asInt8Vector().getData()); + case PACKED_BIT: + PackedBitBinaryVector packedBitVector = vector.asPackedBitVector(); + return encodeVector(dataType.getValue(), packedBitVector.getPadding(), packedBitVector.getData()); + case FLOAT32: + return encodeVector(dataType.getValue(), vector.asFloat32Vector().getData()); + default: + throw Assertions.fail(ERROR_MESSAGE_UNKNOWN_VECTOR_DATA_TYPE + dataType); + } + } + + /** + * Decodes a vector from a binary representation. + *

    + * encodedVector is not mutated nor stored in the returned {@link BinaryVector}. + */ + public static BinaryVector decodeBinaryToVector(final byte[] encodedVector) { + isTrue("Vector encoded array length must be at least 2, but found: " + encodedVector.length, encodedVector.length >= METADATA_SIZE); + BinaryVector.DataType dataType = determineVectorDType(encodedVector[0]); + byte padding = encodedVector[1]; + switch (dataType) { + case INT8: + return decodeInt8Vector(encodedVector, padding); + case PACKED_BIT: + return decodePackedBitVector(encodedVector, padding); + case FLOAT32: + return decodeFloat32Vector(encodedVector, padding); + default: + throw Assertions.fail(ERROR_MESSAGE_UNKNOWN_VECTOR_DATA_TYPE + dataType); + } + } + + private static Float32BinaryVector decodeFloat32Vector(final byte[] encodedVector, final byte padding) { + isTrue("Padding must be 0 for FLOAT32 data type, but found: " + padding, padding == 0); + return BinaryVector.floatVector(decodeLittleEndianFloats(encodedVector)); + } + + private static PackedBitBinaryVector decodePackedBitVector(final byte[] encodedVector, final byte padding) { + byte[] packedBitVector = extractVectorData(encodedVector); + isTrue("Padding must be 0 if vector is empty, but found: " + padding, padding == 0 || packedBitVector.length > 0); + isTrue("Padding must be between 0 and 7 bits, but found: " + padding, padding >= 0 && padding <= 7); + return BinaryVector.packedBitVector(packedBitVector, padding); + } + + private static Int8BinaryVector decodeInt8Vector(final byte[] encodedVector, final byte padding) { + isTrue("Padding must be 0 for INT8 data type, but found: " + padding, padding == 0); + byte[] int8Vector = extractVectorData(encodedVector); + return BinaryVector.int8Vector(int8Vector); + } + + private static byte[] extractVectorData(final byte[] encodedVector) { + int vectorDataLength = encodedVector.length - METADATA_SIZE; + byte[] vectorData = new byte[vectorDataLength]; + System.arraycopy(encodedVector, METADATA_SIZE, vectorData, 0, vectorDataLength); + return vectorData; + } + + private static byte[] encodeVector(final byte dType, final byte padding, final byte[] vectorData) { + final byte[] bytes = new byte[vectorData.length + METADATA_SIZE]; + bytes[0] = dType; + bytes[1] = padding; + System.arraycopy(vectorData, 0, bytes, METADATA_SIZE, vectorData.length); + return bytes; + } + + private static byte[] encodeVector(final byte dType, final float[] vectorData) { + final byte[] bytes = new byte[vectorData.length * Float.BYTES + METADATA_SIZE]; + + bytes[0] = dType; + bytes[1] = ZERO_PADDING; + + ByteBuffer buffer = ByteBuffer.wrap(bytes); + buffer.order(STORED_BYTE_ORDER); + buffer.position(METADATA_SIZE); + + FloatBuffer floatBuffer = buffer.asFloatBuffer(); + + // The JVM may optimize this operation internally, potentially using intrinsics + // or platform-specific optimizations (such as SIMD). If the byte order matches the underlying system's + // native order, the operation may involve a direct memory copy. + floatBuffer.put(vectorData); + + return bytes; + } + + private static float[] decodeLittleEndianFloats(final byte[] encodedVector) { + isTrue("Byte array length must be a multiple of 4 for FLOAT32 data type, but found: " + encodedVector.length, + (encodedVector.length - METADATA_SIZE) % Float.BYTES == 0); + + int vectorSize = encodedVector.length - METADATA_SIZE; + + int numFloats = vectorSize / Float.BYTES; + float[] floatArray = new float[numFloats]; + + ByteBuffer buffer = ByteBuffer.wrap(encodedVector, METADATA_SIZE, vectorSize); + buffer.order(STORED_BYTE_ORDER); + + // The JVM may optimize this operation internally, potentially using intrinsics + // or platform-specific optimizations (such as SIMD). If the byte order matches the underlying system's + // native order, the operation may involve a direct memory copy. + buffer.asFloatBuffer().get(floatArray); + return floatArray; + } + + public static BinaryVector.DataType determineVectorDType(final byte dType) { + BinaryVector.DataType[] values = BinaryVector.DataType.values(); + for (BinaryVector.DataType value : values) { + if (value.getValue() == dType) { + return value; + } + } + throw new BsonInvalidOperationException(ERROR_MESSAGE_UNKNOWN_VECTOR_DATA_TYPE + dType); + } + + private static void isTrue(final String message, final boolean condition) { + if (!condition) { + throw new BsonInvalidOperationException(message); + } + } +} diff --git a/bson/src/main/org/bson/io/BasicOutputBuffer.java b/bson/src/main/org/bson/io/BasicOutputBuffer.java index 80acefcb7ec..aaff34d6476 100644 --- a/bson/src/main/org/bson/io/BasicOutputBuffer.java +++ b/bson/src/main/org/bson/io/BasicOutputBuffer.java @@ -18,11 +18,14 @@ import org.bson.ByteBuf; import org.bson.ByteBufNIO; +import org.bson.types.ObjectId; import java.io.IOException; import java.io.OutputStream; +import java.nio.Buffer; import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.Collections; import java.util.List; import static java.lang.String.format; @@ -32,8 +35,12 @@ * A BSON output stream that stores the output in a single, un-pooled byte array. */ public class BasicOutputBuffer extends OutputBuffer { - private byte[] buffer = new byte[1024]; - private int position; + + /** + * This ByteBuffer allows us to write ObjectIDs without allocating a temporary array per object, and enables us + * to leverage JVM intrinsics for writing little-endian numeric values. + */ + private ByteBuffer buffer; /** * Construct an instance with a default initial byte array size. @@ -48,7 +55,8 @@ public BasicOutputBuffer() { * @param initialSize the initial size of the byte array */ public BasicOutputBuffer(final int initialSize) { - buffer = new byte[initialSize]; + // Allocate heap buffer to ensure we can access underlying array + buffer = ByteBuffer.allocate(initialSize).order(LITTLE_ENDIAN); } /** @@ -58,13 +66,46 @@ public BasicOutputBuffer(final int initialSize) { * @since 3.3 */ public byte[] getInternalBuffer() { - return buffer; + return buffer.array(); } @Override public void write(final byte[] b) { + writeBytes(b, 0, b.length); + } + + @Override + public byte[] toByteArray() { + ensureOpen(); + return Arrays.copyOf(buffer.array(), buffer.position()); + } + + @Override + public void writeInt32(final int value) { + ensureOpen(); + ensure(4); + buffer.putInt(value); + } + + @Override + public void writeInt32(final int position, final int value) { + ensureOpen(); + checkPosition(position, 4); + buffer.putInt(position, value); + } + + @Override + public void writeInt64(final long value) { + ensureOpen(); + ensure(8); + buffer.putLong(value); + } + + @Override + public void writeObjectId(final ObjectId value) { ensureOpen(); - write(b, 0, b.length); + ensure(12); + value.putToByteBuffer(buffer); } @Override @@ -72,8 +113,7 @@ public void writeBytes(final byte[] bytes, final int offset, final int length) { ensureOpen(); ensure(length); - System.arraycopy(bytes, offset, buffer, position, length); - position += length; + buffer.put(bytes, offset, length); } @Override @@ -81,27 +121,21 @@ public void writeByte(final int value) { ensureOpen(); ensure(1); - buffer[position++] = (byte) (0xFF & value); + buffer.put((byte) (0xFF & value)); } @Override protected void write(final int absolutePosition, final int value) { ensureOpen(); + checkPosition(absolutePosition, 1); - if (absolutePosition < 0) { - throw new IllegalArgumentException(format("position must be >= 0 but was %d", absolutePosition)); - } - if (absolutePosition > position - 1) { - throw new IllegalArgumentException(format("position must be <= %d but was %d", position - 1, absolutePosition)); - } - - buffer[absolutePosition] = (byte) (0xFF & value); + buffer.put(absolutePosition, (byte) (0xFF & value)); } @Override public int getPosition() { ensureOpen(); - return position; + return buffer.position(); } /** @@ -110,29 +144,32 @@ public int getPosition() { @Override public int getSize() { ensureOpen(); - return position; + return buffer.position(); } @Override public int pipe(final OutputStream out) throws IOException { ensureOpen(); - out.write(buffer, 0, position); - return position; + out.write(buffer.array(), 0, buffer.position()); + return buffer.position(); } @Override public void truncateToPosition(final int newPosition) { ensureOpen(); - if (newPosition > position || newPosition < 0) { + if (newPosition > buffer.position() || newPosition < 0) { throw new IllegalArgumentException(); } - position = newPosition; + // The cast is required for compatibility with JDK 9+ where ByteBuffer's position method is inherited from Buffer. + ((Buffer) buffer).position(newPosition); } @Override public List getByteBuffers() { ensureOpen(); - return Arrays.asList(new ByteBufNIO(ByteBuffer.wrap(buffer, 0, position).duplicate().order(LITTLE_ENDIAN))); + // Create a flipped copy of the buffer for reading. Note that ByteBufNIO overwrites the endian-ness. + ByteBuffer flipped = ByteBuffer.wrap(buffer.array(), 0, buffer.position()); + return Collections.singletonList(new ByteBufNIO(flipped)); } @Override @@ -147,19 +184,32 @@ private void ensureOpen() { } private void ensure(final int more) { - int need = position + more; - if (need <= buffer.length) { + int length = buffer.position(); + int need = length + more; + if (need <= buffer.capacity()) { return; } - int newSize = buffer.length * 2; + int newSize = length * 2; if (newSize < need) { newSize = need + 128; } - byte[] n = new byte[newSize]; - System.arraycopy(buffer, 0, n, 0, position); - buffer = n; + ByteBuffer tmp = ByteBuffer.allocate(newSize).order(LITTLE_ENDIAN); + tmp.put(buffer.array(), 0, length); // Avoids covariant call to flip on jdk8 + this.buffer = tmp; } + /** + * Ensures that `absolutePosition` is a valid index in `this.buffer` and there is room to write at + * least `bytesToWrite` bytes. + */ + private void checkPosition(final int absolutePosition, final int bytesToWrite) { + if (absolutePosition < 0) { + throw new IllegalArgumentException(format("position must be >= 0 but was %d", absolutePosition)); + } + if (absolutePosition > buffer.position() - bytesToWrite) { + throw new IllegalArgumentException(format("position must be <= %d but was %d", buffer.position() - bytesToWrite, absolutePosition)); + } + } } diff --git a/bson/src/main/org/bson/io/Bits.java b/bson/src/main/org/bson/io/Bits.java deleted file mode 100644 index 83953e6431a..00000000000 --- a/bson/src/main/org/bson/io/Bits.java +++ /dev/null @@ -1,204 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.io; - -import java.io.EOFException; -import java.io.IOException; -import java.io.InputStream; - -/** - * Utility class for reading values from an input stream. - */ -public class Bits { - - /** - * Reads bytes from the input stream and puts them into the given byte buffer. The equivalent of calling - * {@link #readFully(java.io.InputStream, byte[], int, int)} with an offset of zero and a length equal to the length of the buffer. - * - * @param inputStream the input stream to read from - * @param buffer the buffer into which the data is read. - * @throws IOException if there's an error reading from the {@code inputStream} - */ - public static void readFully(final InputStream inputStream, final byte[] buffer) - throws IOException { - readFully(inputStream, buffer, buffer.length); - } - - /** - * Reads bytes from the input stream and puts them into the given byte buffer. The equivalent of calling - * {@link #readFully(java.io.InputStream, byte[], int, int)} with an offset of zero. - * - * @param inputStream the input stream to read from - * @param buffer the buffer into which the data is read. - * @param length the maximum number of bytes to read. - * @throws IOException if there's an error reading from the {@code inputStream} - */ - public static void readFully(final InputStream inputStream, final byte[] buffer, final int length) - throws IOException { - readFully(inputStream, buffer, 0, length); - } - - /** - * Reads bytes from the input stream and puts them into the given byte buffer. - * - * @param inputStream the input stream to read from - * @param buffer the buffer into which the data is read. - * @param offset the start offset in array {@code buffer} at which the data is written. - * @param length the maximum number of bytes to read. - * @throws IOException if there's an error reading from the {@code inputStream} - * @see java.io.InputStream#read(byte[], int, int) - */ - public static void readFully(final InputStream inputStream, final byte[] buffer, final int offset, final int length) - throws IOException { - if (buffer.length < length + offset) { - throw new IllegalArgumentException("Buffer is too small"); - } - - int arrayOffset = offset; - int bytesToRead = length; - while (bytesToRead > 0) { - int bytesRead = inputStream.read(buffer, arrayOffset, bytesToRead); - if (bytesRead < 0) { - throw new EOFException(); - } - bytesToRead -= bytesRead; - arrayOffset += bytesRead; - } - } - - /** - * Reads and returns a single integer value from the input stream. - * - * @param inputStream the input stream to read from - * @return the integer value - * @throws IOException if there's an error reading from the {@code inputStream} - */ - public static int readInt(final InputStream inputStream) throws IOException { - return readInt(inputStream, new byte[4]); - } - - /** - * Reads and returns a single integer value from the input stream. - * - * @param inputStream the input stream to read from - * @param buffer the buffer to write the input stream bytes into - * @return the integer value - * @throws IOException if there's an error reading from the {@code inputStream} - */ - public static int readInt(final InputStream inputStream, final byte[] buffer) throws IOException { - readFully(inputStream, buffer, 4); - return readInt(buffer); - } - - /** - * Reads and returns a single integer value from the buffer. The equivalent of calling {@link #readInt(byte[], int)} - * with an offset of zero. - * - * @param buffer the buffer to read from - * @return the integer value - */ - public static int readInt(final byte[] buffer) { - return readInt(buffer, 0); - } - - /** - * Reads and returns a single integer value from the buffer. - * - * @param buffer the buffer to read from - * @param offset the position to start reading from the buffer - * @return the integer value - */ - public static int readInt(final byte[] buffer, final int offset) { - int x = 0; - x |= (0xFF & buffer[offset + 0]) << 0; - x |= (0xFF & buffer[offset + 1]) << 8; - x |= (0xFF & buffer[offset + 2]) << 16; - x |= (0xFF & buffer[offset + 3]) << 24; - return x; - } - - /** - * Reads and returns a single big-endian integer value - * - * @param buffer the buffer to read from - * @param offset the position to start reading from the buffer - * @return the integer value - */ - public static int readIntBE(final byte[] buffer, final int offset) { - int x = 0; - x |= (0xFF & buffer[offset + 0]) << 24; - x |= (0xFF & buffer[offset + 1]) << 16; - x |= (0xFF & buffer[offset + 2]) << 8; - x |= (0xFF & buffer[offset + 3]) << 0; - return x; - } - - /** - * Reads and returns a single long value from the input stream. - * - * @param inputStream the input stream to read from - * @return the long value - * @throws IOException if there's an error reading from the {@code inputStream} - */ - public static long readLong(final InputStream inputStream) throws IOException { - return readLong(inputStream, new byte[8]); - } - - /** - * Reads and returns a single long value from the input stream. - * - * @param inputStream the input stream to read from - * @param buffer the buffer to write the input stream bytes into - * @return the long value - * @throws IOException if there's an error reading from the {@code inputStream} - */ - public static long readLong(final InputStream inputStream, final byte[] buffer) throws IOException { - readFully(inputStream, buffer, 8); - return readLong(buffer); - } - - /** - * Reads and returns a single long value from the buffer. The equivalent of called {@link #readLong(byte[], int)} with an offset of - * zero. - * - * @param buffer the buffer to read from - * @return the long value - */ - public static long readLong(final byte[] buffer) { - return readLong(buffer, 0); - } - - /** - * Reads and returns a single long value from the buffer. - * - * @param buffer the buffer to read from - * @param offset the position to start reading from the buffer - * @return the long value - */ - public static long readLong(final byte[] buffer, final int offset) { - long x = 0; - x |= (0xFFL & buffer[offset + 0]) << 0; - x |= (0xFFL & buffer[offset + 1]) << 8; - x |= (0xFFL & buffer[offset + 2]) << 16; - x |= (0xFFL & buffer[offset + 3]) << 24; - x |= (0xFFL & buffer[offset + 4]) << 32; - x |= (0xFFL & buffer[offset + 5]) << 40; - x |= (0xFFL & buffer[offset + 6]) << 48; - x |= (0xFFL & buffer[offset + 7]) << 56; - return x; - } -} diff --git a/bson/src/main/org/bson/io/BsonInput.java b/bson/src/main/org/bson/io/BsonInput.java index 58894472339..823355fe3ee 100644 --- a/bson/src/main/org/bson/io/BsonInput.java +++ b/bson/src/main/org/bson/io/BsonInput.java @@ -41,7 +41,7 @@ public interface BsonInput extends Closeable { byte readByte(); /** - * Reads the specified number of bytes into the given byte array. This is equivalent to to {@code readBytes(bytes, 0, bytes.length)}. + * Reads the specified number of bytes into the given byte array. This is equivalent to {@code readBytes(bytes, 0, bytes.length)}. * * @param bytes the byte array to write into */ @@ -111,15 +111,6 @@ public interface BsonInput extends Closeable { */ void skip(int numBytes); - /** - * Marks the current position in the stream. This method obeys the contract as specified in the same method in {@code InputStream}. - * - * @param readLimit the maximum limit of bytes that can be read before the mark position becomes invalid - * @deprecated Use {@link #getMark(int)} instead - */ - @Deprecated - void mark(int readLimit); - /** * Gets a mark for the current position in the stream. * @@ -129,11 +120,6 @@ public interface BsonInput extends Closeable { */ BsonInputMark getMark(int readLimit); - /** - * Resets the stream to the current mark. This method obeys the contract as specified in the same method in {@code InputStream}. - */ - void reset(); - /** * Returns true if there are more bytes left in the stream. * diff --git a/bson/src/main/org/bson/io/ByteBufferBsonInput.java b/bson/src/main/org/bson/io/ByteBufferBsonInput.java index f29dbd395c6..2819bdcb091 100644 --- a/bson/src/main/org/bson/io/ByteBufferBsonInput.java +++ b/bson/src/main/org/bson/io/ByteBufferBsonInput.java @@ -21,7 +21,7 @@ import org.bson.types.ObjectId; import java.nio.ByteOrder; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import static java.lang.String.format; @@ -31,9 +31,15 @@ * @since 3.0 */ public class ByteBufferBsonInput implements BsonInput { - private static final Charset UTF8_CHARSET = Charset.forName("UTF-8"); private static final String[] ONE_BYTE_ASCII_STRINGS = new String[Byte.MAX_VALUE + 1]; + /* A dynamically sized scratch buffer, that is reused across BSON String reads: + * 1. Reduces garbage collection by avoiding new byte array creation. + * 2. Improves cache utilization through temporal locality. + * 3. Avoids JVM allocation and zeroing cost for new memory allocations. + */ + private byte[] scratchBuffer; + static { for (int b = 0; b < ONE_BYTE_ASCII_STRINGS.length; b++) { @@ -42,7 +48,6 @@ public class ByteBufferBsonInput implements BsonInput { } private ByteBuf buffer; - private int mark = -1; /** * Construct an instance with the given byte buffer. The stream takes over ownership of the buffer and closes it when this instance is @@ -121,77 +126,141 @@ public String readString() { int size = readInt32(); if (size <= 0) { throw new BsonSerializationException(format("While decoding a BSON string found a size that is not a positive number: %d", - size)); + size)); } + ensureAvailable(size); return readString(size); } @Override public String readCString() { ensureOpen(); - - // TODO: potentially optimize this - int mark = buffer.position(); - readUntilNullByte(); - int size = buffer.position() - mark; - buffer.position(mark); - + int size = computeCStringLength(buffer.position()); return readString(size); } - private String readString(final int size) { - if (size == 2) { - byte asciiByte = readByte(); // if only one byte in the string, it must be ascii. - byte nullByte = readByte(); // read null terminator + private String readString(final int bsonStringSize) { + if (bsonStringSize == 2) { + byte asciiByte = buffer.get(); // if only one byte in the string, it must be ascii. + byte nullByte = buffer.get(); // read null terminator if (nullByte != 0) { throw new BsonSerializationException("Found a BSON string that is not null-terminated"); } if (asciiByte < 0) { - return UTF8_CHARSET.newDecoder().replacement(); + return StandardCharsets.UTF_8.newDecoder().replacement(); } return ONE_BYTE_ASCII_STRINGS[asciiByte]; // this will throw if asciiByte is negative } else { - byte[] bytes = new byte[size - 1]; - readBytes(bytes); - byte nullByte = readByte(); - if (nullByte != 0) { - throw new BsonSerializationException("Found a BSON string that is not null-terminated"); + if (buffer.isBackedByArray()) { + int position = buffer.position(); + int arrayOffset = buffer.arrayOffset(); + int newPosition = position + bsonStringSize; + buffer.position(newPosition); + + byte[] array = buffer.array(); + if (array[arrayOffset + newPosition - 1] != 0) { + throw new BsonSerializationException("Found a BSON string that is not null-terminated"); + } + return new String(array, arrayOffset + position, bsonStringSize - 1, StandardCharsets.UTF_8); + } else if (scratchBuffer == null || bsonStringSize > scratchBuffer.length) { + int scratchBufferSize = bsonStringSize + (bsonStringSize >>> 1); //1.5 times the size + scratchBuffer = new byte[scratchBufferSize]; } - return new String(bytes, UTF8_CHARSET); - } - } - private void readUntilNullByte() { - //CHECKSTYLE:OFF - while (readByte() != 0) { //NOPMD - //do nothing - checkstyle & PMD hate this, not surprisingly + buffer.get(scratchBuffer, 0, bsonStringSize); + if (scratchBuffer[bsonStringSize - 1] != 0) { + throw new BsonSerializationException("BSON string not null-terminated"); + } + return new String(scratchBuffer, 0, bsonStringSize - 1, StandardCharsets.UTF_8); } - //CHECKSTYLE:ON } @Override public void skipCString() { ensureOpen(); - readUntilNullByte(); + int pos = buffer.position(); + int length = computeCStringLength(pos); + buffer.position(pos + length); } - @Override - public void skip(final int numBytes) { - ensureOpen(); - buffer.position(buffer.position() + numBytes); + /** + * Detects the position of the first NULL (0x00) byte in a 64-bit word using SWAR technique. + * + */ + private int computeCStringLength(final int prevPos) { + int pos = prevPos; + int limit = buffer.limit(); + + // `>>> 3` means dividing without remainder by `Long.BYTES` because `Long.BYTES` is 2^3 + int chunks = (limit - pos) >>> 3; + // `<< 3` means multiplying by `Long.BYTES` because `Long.BYTES` is 2^3 + int toPos = pos + (chunks << 3); + for (; pos < toPos; pos += Long.BYTES) { + long chunk = buffer.getLong(pos); + /* + Subtract 0x0101010101010101L to cause a borrow on 0x00 bytes. + if original byte is 00000000, then 00000000 - 00000001 = 11111111 (borrow causes the most significant bit set to 1). + */ + long mask = chunk - 0x0101010101010101L; + /* + mask will only have the most significant bit in each byte set iff it was a 0x00 byte (0x00 becomes 0xFF because of the borrow). + ~chunk will have bits that were originally 0 set to 1. + mask & ~chunk will have the most significant bit in each byte set iff original byte was 0x00. + */ + mask &= ~chunk; + /* + 0x8080808080808080: + 10000000 10000000 10000000 10000000 10000000 10000000 10000000 10000000 + + mask: + 00000000 00000000 11111111 00000000 00000001 00000001 00000000 00000111 + + ANDing mask with 0x8080808080808080 isolates the most significant bit in each byte where + the original byte was 0x00, thereby setting the most significant bit to 1 in each 0x00 original byte. + + result: + 00000000 00000000 10000000 00000000 00000000 00000000 00000000 00000000 + ^^^^^^^^ + The most significant bit is set in each 0x00 byte, and only there. + */ + mask &= 0x8080808080808080L; + if (mask != 0) { + /* + The UTF-8 data is endian-independent and stored in left-to-right order in the buffer, with the first byte at the lowest index. + After calling getLong() in little-endian mode, the first UTF-8 byte ends up in the least significant byte of the long (bits 0–7), + and the last one in the most significant byte (bits 56–63). + + numberOfTrailingZeros scans from the least significant bit, which aligns with the position of the first UTF-8 byte. + We then use >>> 3, which means dividing without remainder by Long.BYTES because Long.BYTES is 2^3, computing the byte offset + of the NULL terminator in the original UTF-8 data. + */ + int offset = Long.numberOfTrailingZeros(mask) >>> 3; + // Find the NULL terminator at pos + offset + return (pos - prevPos) + offset + 1; + } + } + + // Process remaining bytes one by one. + while (pos < limit) { + if (buffer.get(pos++) == 0) { + return (pos - prevPos); + } + } + + buffer.position(pos); + throw new BsonSerializationException("Found a BSON string that is not null-terminated"); } - @Deprecated @Override - public void mark(final int readLimit) { + public void skip(final int numBytes) { ensureOpen(); - mark = buffer.position(); + buffer.position(buffer.position() + numBytes); } @Override public BsonInputMark getMark(final int readLimit) { return new BsonInputMark() { - private int mark = buffer.position(); + private final int mark = buffer.position(); @Override public void reset() { ensureOpen(); @@ -200,15 +269,6 @@ public void reset() { }; } - @Override - public void reset() { - ensureOpen(); - if (mark == -1) { - throw new IllegalStateException("Mark not set"); - } - buffer.position(mark); - } - @Override public boolean hasRemaining() { ensureOpen(); diff --git a/bson/src/main/org/bson/io/OutputBuffer.java b/bson/src/main/org/bson/io/OutputBuffer.java index c733032bad1..d4ae12d4245 100644 --- a/bson/src/main/org/bson/io/OutputBuffer.java +++ b/bson/src/main/org/bson/io/OutputBuffer.java @@ -41,6 +41,16 @@ public void write(final byte[] b) { public void close() { } + /** + * {@inheritDoc} + *

    + * The {@link #flush()} method of {@link OutputBuffer} does nothing.

    + */ + @Override + public void flush() throws IOException { + super.flush(); + } + @Override public void write(final byte[] bytes, final int offset, final int length) { writeBytes(bytes, offset, length); @@ -60,6 +70,7 @@ public void writeInt32(final int value) { } @Override + @Deprecated public void writeInt32(final int position, final int value) { write(position, value >> 0); write(position + 1, value >> 8); @@ -122,7 +133,10 @@ public int size() { * Get a list of byte buffers that are prepared to be read from; in other words, whose position is 0 and whose limit is the number of * bytes that should read.

    Note that the byte buffers may be read-only.

    * - * @return the non-null list of byte buffers, in LITTLE_ENDIAN order + * @return the non-null list of byte buffers, in LITTLE_ENDIAN order. The returned {@link ByteBuf}s must eventually be + * {@linkplain ByteBuf#release() released} explicitly, calling {@link OutputBuffer#close()} may be not enough to release them. + * The caller must not use the {@link ByteBuf}s after closing this {@link OutputBuffer}, + * though releasing them is allowed to be done after closing this {@link OutputBuffer}. */ public abstract List getByteBuffers(); @@ -183,7 +197,15 @@ public void writeLong(final long value) { writeInt64(value); } - private int writeCharacters(final String str, final boolean checkForNullCharacters) { + /** + * Writes the characters of a string to the buffer as UTF-8 bytes. + * + * @param str the string to write. + * @param checkForNullCharacters if true, check for and disallow null characters in the string. + * @return the total number of bytes written. + * @throws BsonSerializationException if checkForNullCharacters is true and the string contains a null character. + */ + protected int writeCharacters(final String str, final boolean checkForNullCharacters) { int len = str.length(); int total = 0; diff --git a/bson/src/main/org/bson/json/DateTimeFormatter.java b/bson/src/main/org/bson/json/DateTimeFormatter.java index 9188e05bfd2..9f060cf6c4e 100644 --- a/bson/src/main/org/bson/json/DateTimeFormatter.java +++ b/bson/src/main/org/bson/json/DateTimeFormatter.java @@ -16,143 +16,32 @@ package org.bson.json; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; import java.time.Instant; +import java.time.LocalDate; import java.time.ZoneId; +import java.time.ZoneOffset; import java.time.ZonedDateTime; -import java.time.format.DateTimeParseException; -import java.time.temporal.TemporalAccessor; -import java.time.temporal.TemporalQuery; -import java.util.Calendar; -import java.util.TimeZone; +import static java.time.format.DateTimeFormatter.ISO_LOCAL_DATE; import static java.time.format.DateTimeFormatter.ISO_OFFSET_DATE_TIME; final class DateTimeFormatter { - private static final FormatterImpl FORMATTER_IMPL; - - static { - FormatterImpl dateTimeHelper; - try { - dateTimeHelper = loadDateTimeFormatter("org.bson.json.DateTimeFormatter$Java8DateTimeFormatter"); - } catch (LinkageError e) { - // this is expected if running on a release prior to Java 8: fallback to JAXB. - dateTimeHelper = loadDateTimeFormatter("org.bson.json.DateTimeFormatter$JaxbDateTimeFormatter"); - } - - FORMATTER_IMPL = dateTimeHelper; - } - - private static FormatterImpl loadDateTimeFormatter(final String className) { - try { - return (FormatterImpl) Class.forName(className).getDeclaredConstructor().newInstance(); - } catch (ClassNotFoundException e) { - // this is unexpected as it means the class itself is not found - throw new ExceptionInInitializerError(e); - } catch (InstantiationException e) { - // this is unexpected as it means the class can't be instantiated - throw new ExceptionInInitializerError(e); - } catch (IllegalAccessException e) { - // this is unexpected as it means the no-args constructor isn't accessible - throw new ExceptionInInitializerError(e); - } catch (NoSuchMethodException e) { - throw new ExceptionInInitializerError(e); - } catch (InvocationTargetException e) { - throw new ExceptionInInitializerError(e); - } - } + private static final int DATE_STRING_LENGTH = "1970-01-01".length(); static long parse(final String dateTimeString) { - return FORMATTER_IMPL.parse(dateTimeString); - } - - static String format(final long dateTime) { - return FORMATTER_IMPL.format(dateTime); - } - - private interface FormatterImpl { - long parse(String dateTimeString); - String format(long dateTime); - } - - // Reflective use of DatatypeConverter avoids a compile-time dependency on the java.xml.bind module in Java 9 - static class JaxbDateTimeFormatter implements FormatterImpl { - - private static final Method DATATYPE_CONVERTER_PARSE_DATE_TIME_METHOD; - private static final Method DATATYPE_CONVERTER_PRINT_DATE_TIME_METHOD; - - static { - try { - DATATYPE_CONVERTER_PARSE_DATE_TIME_METHOD = Class.forName("javax.xml.bind.DatatypeConverter") - .getDeclaredMethod("parseDateTime", String.class); - DATATYPE_CONVERTER_PRINT_DATE_TIME_METHOD = Class.forName("javax.xml.bind.DatatypeConverter") - .getDeclaredMethod("printDateTime", Calendar.class); - } catch (NoSuchMethodException e) { - throw new ExceptionInInitializerError(e); - } catch (ClassNotFoundException e) { - throw new ExceptionInInitializerError(e); - } - } - - @Override - public long parse(final String dateTimeString) { - try { - return ((Calendar) DATATYPE_CONVERTER_PARSE_DATE_TIME_METHOD.invoke(null, dateTimeString)).getTimeInMillis(); - } catch (IllegalAccessException e) { - throw new IllegalStateException(e); - } catch (InvocationTargetException e) { - throw (RuntimeException) e.getCause(); - } - } - - @Override - public String format(final long dateTime) { - Calendar calendar = Calendar.getInstance(); - calendar.setTimeInMillis(dateTime); - calendar.setTimeZone(TimeZone.getTimeZone("Z")); - try { - return (String) DATATYPE_CONVERTER_PRINT_DATE_TIME_METHOD.invoke(null, calendar); - } catch (IllegalAccessException e) { - throw new IllegalStateException(); - } catch (InvocationTargetException e) { - throw (RuntimeException) e.getCause(); - } + // ISO_OFFSET_DATE_TIME will not parse date strings consisting of just year-month-day, so use ISO_LOCAL_DATE for those + if (dateTimeString.length() == DATE_STRING_LENGTH) { + return LocalDate.parse(dateTimeString, ISO_LOCAL_DATE).atStartOfDay().toInstant(ZoneOffset.UTC).toEpochMilli(); + } else { + return ISO_OFFSET_DATE_TIME.parse(dateTimeString, temporal -> Instant.from(temporal)).toEpochMilli(); } } - static class Java8DateTimeFormatter implements FormatterImpl { - - // if running on Java 8 or above then java.time.format.DateTimeFormatter will be available and initialization will succeed. - // Otherwise it will fail. - static { - try { - Class.forName("java.time.format.DateTimeFormatter"); - } catch (ClassNotFoundException e) { - throw new ExceptionInInitializerError(e); - } - } - - @Override - public long parse(final String dateTimeString) { - try { - return ISO_OFFSET_DATE_TIME.parse(dateTimeString, new TemporalQuery() { - @Override - public Instant queryFrom(final TemporalAccessor temporal) { - return Instant.from(temporal); - } - }).toEpochMilli(); - } catch (DateTimeParseException e) { - throw new IllegalArgumentException(e.getMessage()); - } - } - - @Override - public String format(final long dateTime) { - return ZonedDateTime.ofInstant(Instant.ofEpochMilli(dateTime), ZoneId.of("Z")).format(ISO_OFFSET_DATE_TIME); - } + static String format(final long dateTime) { + return ZonedDateTime.ofInstant(Instant.ofEpochMilli(dateTime), ZoneId.of("Z")).format(ISO_OFFSET_DATE_TIME); } private DateTimeFormatter() { } + } diff --git a/bson/src/main/org/bson/json/ExtendedJsonBinaryConverter.java b/bson/src/main/org/bson/json/ExtendedJsonBinaryConverter.java index 4deaf55d333..a779a4ef694 100644 --- a/bson/src/main/org/bson/json/ExtendedJsonBinaryConverter.java +++ b/bson/src/main/org/bson/json/ExtendedJsonBinaryConverter.java @@ -17,7 +17,8 @@ package org.bson.json; import org.bson.BsonBinary; -import org.bson.internal.Base64; + +import java.util.Base64; class ExtendedJsonBinaryConverter implements Converter { @@ -25,7 +26,7 @@ class ExtendedJsonBinaryConverter implements Converter { public void convert(final BsonBinary value, final StrictJsonWriter writer) { writer.writeStartObject(); writer.writeStartObject("$binary"); - writer.writeString("base64", Base64.encode(value.getData())); + writer.writeString("base64", Base64.getEncoder().encodeToString(value.getData())); writer.writeString("subType", String.format("%02X", value.getType())); writer.writeEndObject(); writer.writeEndObject(); diff --git a/bson/src/main/org/bson/json/ExtendedJsonTimestampConverter.java b/bson/src/main/org/bson/json/ExtendedJsonTimestampConverter.java index e82549abc5d..d664a60dc37 100644 --- a/bson/src/main/org/bson/json/ExtendedJsonTimestampConverter.java +++ b/bson/src/main/org/bson/json/ExtendedJsonTimestampConverter.java @@ -17,21 +17,18 @@ package org.bson.json; import org.bson.BsonTimestamp; -import org.bson.internal.UnsignedLongs; + +import static java.lang.Integer.toUnsignedLong; +import static java.lang.Long.toUnsignedString; class ExtendedJsonTimestampConverter implements Converter { @Override public void convert(final BsonTimestamp value, final StrictJsonWriter writer) { writer.writeStartObject(); writer.writeStartObject("$timestamp"); - writer.writeNumber("t", UnsignedLongs.toString(toUnsignedLong(value.getTime()))); - writer.writeNumber("i", UnsignedLongs.toString(toUnsignedLong(value.getInc()))); + writer.writeNumber("t", toUnsignedString(toUnsignedLong(value.getTime()))); + writer.writeNumber("i", toUnsignedString(toUnsignedLong(value.getInc()))); writer.writeEndObject(); writer.writeEndObject(); } - - // Equivalent to Integer.toUnsignedLong() in Java 8 - private long toUnsignedLong(final int value) { - return ((long) value) & 0xffffffffL; - } } diff --git a/bson/src/main/org/bson/json/JsonBuffer.java b/bson/src/main/org/bson/json/JsonBuffer.java index 957440c70a5..2db6c116238 100644 --- a/bson/src/main/org/bson/json/JsonBuffer.java +++ b/bson/src/main/org/bson/json/JsonBuffer.java @@ -16,47 +16,17 @@ package org.bson.json; -class JsonBuffer { - - private final String buffer; - private int position; - private boolean eof; - - JsonBuffer(final String buffer) { - this.buffer = buffer; - } - - public int getPosition() { - return position; - } - - public void setPosition(final int position) { - this.position = position; - } - - public int read() { - if (eof) { - throw new JsonParseException("Trying to read past EOF."); - } else if (position >= buffer.length()) { - eof = true; - return -1; - } else { - return buffer.charAt(position++); - } - } - - public void unread(final int c) { - eof = false; - if (c != -1 && buffer.charAt(position - 1) == c) { - position--; - } - } - - public String substring(final int beginIndex) { - return buffer.substring(beginIndex); - } - - public String substring(final int beginIndex, final int endIndex) { - return buffer.substring(beginIndex, endIndex); - } +interface JsonBuffer { + + int getPosition(); + + int read(); + + void unread(int c); + + int mark(); + + void reset(int markPos); + + void discard(int markPos); } diff --git a/bson/src/main/org/bson/json/JsonMode.java b/bson/src/main/org/bson/json/JsonMode.java index e2693b68e6d..7b5a99d6551 100644 --- a/bson/src/main/org/bson/json/JsonMode.java +++ b/bson/src/main/org/bson/json/JsonMode.java @@ -27,9 +27,10 @@ public enum JsonMode { /** * Strict mode representations of BSON types conform to the
    JSON RFC spec. * - * @deprecated The format generated with this mode is no longer considered standard for MongoDB tools. + * @deprecated The format generated with this mode is no longer considered standard for MongoDB tools. This value is not currently + * scheduled for removal. */ - @Deprecated + @Deprecated // NOT CURRENTLY INTENDED FOR REMOVAL STRICT, /** @@ -42,7 +43,7 @@ public enum JsonMode { * Standard extended JSON representation. * * @since 3.5 - * @see Extended JSON Specification + * @see Extended JSON Specification */ EXTENDED, @@ -50,7 +51,7 @@ public enum JsonMode { * Standard relaxed extended JSON representation. * * @since 3.5 - * @see Extended JSON Specification + * @see Extended JSON Specification */ RELAXED } diff --git a/bson/src/main/org/bson/json/JsonObject.java b/bson/src/main/org/bson/json/JsonObject.java new file mode 100644 index 00000000000..5bb8b746f9f --- /dev/null +++ b/bson/src/main/org/bson/json/JsonObject.java @@ -0,0 +1,103 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.json; + +import org.bson.BsonDocument; +import org.bson.BsonDocumentWrapper; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.conversions.Bson; + +import static org.bson.assertions.Assertions.isTrueArgument; +import static org.bson.assertions.Assertions.notNull; + +/** + * A wrapper class that holds a JSON object string. This class makes decoding JSON efficient. + * Note that this class only holds valid JSON objects, not arrays or other values. + * + * @since 4.2 + */ +public class JsonObject implements Bson { + private final String json; + + /** + * Constructs a new instance with the given JSON object string. Clients must ensure + * they only pass in valid JSON objects to this constructor. The constructor does not + * perform full validation on construction, but an invalid JsonObject can cause errors + * when it is used later on. + * + * @param json the JSON object string + */ + public JsonObject(final String json) { + notNull("Json", json); + + boolean foundBrace = false; + for (int i = 0; i < json.length(); i++) { + char c = json.charAt(i); + if (c == '{') { + foundBrace = true; + break; + } + isTrueArgument("json is a valid JSON object", Character.isWhitespace(c)); + } + isTrueArgument("json is a valid JSON object", foundBrace); + + this.json = json; + } + + /** + * Gets the JSON object string + * + * @return the JSON object string + */ + public String getJson() { + return json; + } + + @Override + public BsonDocument toBsonDocument(final Class documentClass, final CodecRegistry registry) { + return new BsonDocumentWrapper<>(this, registry.get(JsonObject.class)); + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + + if (o == null || getClass() != o.getClass()) { + return false; + } + + JsonObject that = (JsonObject) o; + + if (!json.equals(that.getJson())) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + return json.hashCode(); + } + + @Override + public String toString() { + return json; + } +} diff --git a/bson/src/main/org/bson/json/JsonParseException.java b/bson/src/main/org/bson/json/JsonParseException.java index 00dcc172f98..93c2efd85fa 100644 --- a/bson/src/main/org/bson/json/JsonParseException.java +++ b/bson/src/main/org/bson/json/JsonParseException.java @@ -33,7 +33,6 @@ public class JsonParseException extends RuntimeException { * Constructs a new runtime exception with null as its detail message. */ public JsonParseException() { - super(); } /** @@ -45,6 +44,7 @@ public JsonParseException(final String s) { super(s); } + /** * Constructs a new runtime exception with string formatted using specified pattern and arguments. * @@ -55,10 +55,21 @@ public JsonParseException(final String pattern, final Object... args) { super(format(pattern, args)); } + /** + * Constructs a new runtime exception with the specified detail message and root cause. + * + * @param s The detail message + * @param t the throwable root cause + * @since 4.2 + */ + public JsonParseException(final String s, final Throwable t) { + super(s, t); + } + /** * Create a JSONParseException with the given {@link Throwable} cause. * - * @param t the throwable root case + * @param t the throwable root cause */ public JsonParseException(final Throwable t) { super(t); diff --git a/bson/src/main/org/bson/json/JsonReader.java b/bson/src/main/org/bson/json/JsonReader.java index ebf436588cd..0884ebb7879 100644 --- a/bson/src/main/org/bson/json/JsonReader.java +++ b/bson/src/main/org/bson/json/JsonReader.java @@ -18,7 +18,6 @@ import org.bson.AbstractBsonReader; -import org.bson.BSONException; import org.bson.BsonBinary; import org.bson.BsonBinarySubType; import org.bson.BsonContextType; @@ -29,19 +28,22 @@ import org.bson.BsonTimestamp; import org.bson.BsonType; import org.bson.BsonUndefined; -import org.bson.internal.Base64; import org.bson.types.Decimal128; import org.bson.types.MaxKey; import org.bson.types.MinKey; import org.bson.types.ObjectId; +import java.io.Reader; import java.text.DateFormat; import java.text.ParsePosition; import java.text.SimpleDateFormat; +import java.time.format.DateTimeParseException; +import java.util.Base64; import java.util.Calendar; import java.util.Date; import java.util.Locale; import java.util.TimeZone; +import java.util.UUID; import static java.lang.String.format; @@ -51,12 +53,12 @@ *
      *
    • Strict mode that conforms to the JSON RFC specifications.
    • *
    • JavaScript mode that that most JavaScript interpreters can process
    • - *
    • Shell mode that the mongo shell can process. + *
    • Shell mode that the mongo shell can process. * This is also called "extended" JavaScript format.
    • *
    * For more information about this modes please see - * - * http://docs.mongodb.org/manual/reference/mongodb-extended-json/ + * + * https://www.mongodb.com/docs/manual/reference/mongodb-extended-json/ * * * @since 3.0 @@ -66,16 +68,32 @@ public class JsonReader extends AbstractBsonReader { private final JsonScanner scanner; private JsonToken pushedToken; private Object currentValue; - private Mark mark; /** - * Constructs a new instance with the given JSON string. + * Constructs a new instance with the given string positioned at a JSON object. * - * @param json A string representation of a JSON. + * @param json A string representation of a JSON object. */ public JsonReader(final String json) { - super(); - scanner = new JsonScanner(json); + this(new JsonScanner(json)); + } + + /** + * Constructs a new instance with the given {@code Reader} positioned at a JSON object. + * + *

    + * The application is responsible for closing the {@code Reader}. + *

    + * + * @param reader A reader representation of a JSON object. + * @since 3.11 + */ + public JsonReader(final Reader reader) { + this(new JsonScanner(reader)); + } + + private JsonReader(final JsonScanner scanner) { + this.scanner = scanner; setContext(new Context(null, BsonContextType.TOP_LEVEL)); } @@ -227,16 +245,9 @@ public BsonType readBsonType() { } else if ("DBPointer".equals(value)) { setCurrentBsonType(BsonType.DB_POINTER); currentValue = visitDBPointerConstructor(); - } else if ("UUID".equals(value) - || "GUID".equals(value) - || "CSUUID".equals(value) - || "CSGUID".equals(value) - || "JUUID".equals(value) - || "JGUID".equals(value) - || "PYUUID".equals(value) - || "PYGUID".equals(value)) { + } else if ("UUID".equals(value)) { setCurrentBsonType(BsonType.BINARY); - currentValue = visitUUIDConstructor(value); + currentValue = visitUUIDConstructor(); } else if ("new".equals(value)) { visitNew(); } else { @@ -577,15 +588,8 @@ private void visitNew() { } else if ("DBPointer".equals(value)) { currentValue = visitDBPointerConstructor(); setCurrentBsonType(BsonType.DB_POINTER); - } else if ("UUID".equals(value) - || "GUID".equals(value) - || "CSUUID".equals(value) - || "CSGUID".equals(value) - || "JUUID".equals(value) - || "JGUID".equals(value) - || "PYUUID".equals(value) - || "PYGUID".equals(value)) { - currentValue = visitUUIDConstructor(value); + } else if ("UUID".equals(value)) { + currentValue = visitUUIDConstructor(); setCurrentBsonType(BsonType.BINARY); } else { throw new JsonParseException("JSON reader expected a type name but found '%s'.", value); @@ -605,6 +609,10 @@ private void visitExtendedJSON() { setCurrentBsonType(BsonType.BINARY); return; } + } if ("$uuid".equals(value)) { + currentValue = visitUuidExtendedJson(); + setCurrentBsonType(BsonType.BINARY); + return; } else if ("$regex".equals(value) || "$options".equals(value)) { currentValue = visitRegularExpressionExtendedJson(value); if (currentValue != null) { @@ -695,20 +703,15 @@ private BsonBinary visitBinDataConstructor() { } verifyToken(JsonTokenType.RIGHT_PAREN); - byte[] bytes = Base64.decode(bytesToken.getValue(String.class)); + byte[] bytes = Base64.getDecoder().decode(bytesToken.getValue(String.class)); return new BsonBinary(subTypeToken.getValue(Integer.class).byteValue(), bytes); } - private BsonBinary visitUUIDConstructor(final String uuidConstructorName) { + private BsonBinary visitUUIDConstructor() { verifyToken(JsonTokenType.LEFT_PAREN); - String hexString = readStringFromExtendedJson().replaceAll("\\{", "").replaceAll("}", "").replaceAll("-", ""); + String hexString = readStringFromExtendedJson().replace("-", ""); verifyToken(JsonTokenType.RIGHT_PAREN); - byte[] bytes = decodeHex(hexString); - BsonBinarySubType subType = BsonBinarySubType.UUID_STANDARD; - if (!"UUID".equals(uuidConstructorName) || !"GUID".equals(uuidConstructorName)) { - subType = BsonBinarySubType.UUID_LEGACY; - } - return new BsonBinary(subType, bytes); + return new BsonBinary(BsonBinarySubType.UUID_STANDARD, decodeHex(hexString)); } private BsonRegularExpression visitRegularExpressionConstructor() { @@ -820,28 +823,14 @@ private long visitISODateTimeConstructor() { } verifyToken(JsonTokenType.RIGHT_PAREN); - String[] patterns = {"yyyy-MM-dd", "yyyy-MM-dd'T'HH:mm:ssz", "yyyy-MM-dd'T'HH:mm:ss.SSSz"}; - SimpleDateFormat format = new SimpleDateFormat(patterns[0], Locale.ENGLISH); - ParsePosition pos = new ParsePosition(0); - String s = token.getValue(String.class); - - if (s.endsWith("Z")) { - s = s.substring(0, s.length() - 1) + "GMT-00:00"; - } + String dateTimeString = token.getValue(String.class); - for (final String pattern : patterns) { - format.applyPattern(pattern); - format.setLenient(true); - pos.setIndex(0); - - Date date = format.parse(s, pos); - - if (date != null && pos.getIndex() == s.length()) { - return date.getTime(); - } + try { + return DateTimeFormatter.parse(dateTimeString); + } catch (DateTimeParseException e) { + throw new JsonParseException("Failed to parse string as a date: " + dateTimeString, e); } - throw new JsonParseException("Invalid date format."); } private BsonBinary visitHexDataConstructor() { @@ -947,42 +936,46 @@ private BsonBinary visitBinDataExtendedJson(final String firstKey) { Mark mark = new Mark(); - verifyToken(JsonTokenType.COLON); + try { + verifyToken(JsonTokenType.COLON); - if (firstKey.equals("$binary")) { - JsonToken nextToken = popToken(); - if (nextToken.getType() == JsonTokenType.BEGIN_OBJECT) { - JsonToken nameToken = popToken(); - String firstNestedKey = nameToken.getValue(String.class); - byte[] data; - byte type; - if (firstNestedKey.equals("base64")) { - verifyToken(JsonTokenType.COLON); - data = Base64.decode(readStringFromExtendedJson()); - verifyToken(JsonTokenType.COMMA); - verifyString("subType"); - verifyToken(JsonTokenType.COLON); - type = readBinarySubtypeFromExtendedJson(); - } else if (firstNestedKey.equals("subType")) { - verifyToken(JsonTokenType.COLON); - type = readBinarySubtypeFromExtendedJson(); - verifyToken(JsonTokenType.COMMA); - verifyString("base64"); - verifyToken(JsonTokenType.COLON); - data = Base64.decode(readStringFromExtendedJson()); + if (firstKey.equals("$binary")) { + JsonToken nextToken = popToken(); + if (nextToken.getType() == JsonTokenType.BEGIN_OBJECT) { + JsonToken nameToken = popToken(); + String firstNestedKey = nameToken.getValue(String.class); + byte[] data; + byte type; + if (firstNestedKey.equals("base64")) { + verifyToken(JsonTokenType.COLON); + data = Base64.getDecoder().decode(readStringFromExtendedJson()); + verifyToken(JsonTokenType.COMMA); + verifyString("subType"); + verifyToken(JsonTokenType.COLON); + type = readBinarySubtypeFromExtendedJson(); + } else if (firstNestedKey.equals("subType")) { + verifyToken(JsonTokenType.COLON); + type = readBinarySubtypeFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("base64"); + verifyToken(JsonTokenType.COLON); + data = Base64.getDecoder().decode(readStringFromExtendedJson()); + } else { + throw new JsonParseException("Unexpected key for $binary: " + firstNestedKey); + } + verifyToken(JsonTokenType.END_OBJECT); + verifyToken(JsonTokenType.END_OBJECT); + return new BsonBinary(type, data); } else { - throw new JsonParseException("Unexpected key for $binary: " + firstNestedKey); + mark.reset(); + return visitLegacyBinaryExtendedJson(firstKey); } - verifyToken(JsonTokenType.END_OBJECT); - verifyToken(JsonTokenType.END_OBJECT); - return new BsonBinary(type, data); } else { mark.reset(); return visitLegacyBinaryExtendedJson(firstKey); } - } else { - mark.reset(); - return visitLegacyBinaryExtendedJson(firstKey); + } finally { + mark.discard(); } } @@ -997,7 +990,7 @@ private BsonBinary visitLegacyBinaryExtendedJson(final String firstKey) { byte type; if (firstKey.equals("$binary")) { - data = Base64.decode(readStringFromExtendedJson()); + data = Base64.getDecoder().decode(readStringFromExtendedJson()); verifyToken(JsonTokenType.COMMA); verifyString("$type"); verifyToken(JsonTokenType.COLON); @@ -1007,17 +1000,16 @@ private BsonBinary visitLegacyBinaryExtendedJson(final String firstKey) { verifyToken(JsonTokenType.COMMA); verifyString("$binary"); verifyToken(JsonTokenType.COLON); - data = Base64.decode(readStringFromExtendedJson()); + data = Base64.getDecoder().decode(readStringFromExtendedJson()); } verifyToken(JsonTokenType.END_OBJECT); return new BsonBinary(type, data); - } catch (JsonParseException e) { - mark.reset(); - return null; - } catch (NumberFormatException e) { + } catch (JsonParseException | NumberFormatException e) { mark.reset(); return null; + } finally { + mark.discard(); } } @@ -1042,7 +1034,7 @@ private long visitDateTimeExtendedJson() { JsonToken nameToken = popToken(); String name = nameToken.getValue(String.class); if (!name.equals("$numberLong")) { - throw new JsonParseException(String.format("JSON reader expected $numberLong within $date, but found %s", name)); + throw new JsonParseException(format("JSON reader expected $numberLong within $date, but found %s", name)); } value = visitNumberLongExtendedJson(); verifyToken(JsonTokenType.END_OBJECT); @@ -1053,7 +1045,7 @@ private long visitDateTimeExtendedJson() { String dateTimeString = valueToken.getValue(String.class); try { value = DateTimeFormatter.parse(dateTimeString); - } catch (IllegalArgumentException e) { + } catch (DateTimeParseException e) { throw new JsonParseException("Failed to parse string as a date", e); } } else { @@ -1091,8 +1083,7 @@ private BsonRegularExpression visitNewRegularExpressionExtendedJson() { String pattern; String options = ""; - - String firstKey = readStringFromExtendedJson(); + String firstKey = readStringKeyFromExtendedJson(); if (firstKey.equals("pattern")) { verifyToken(JsonTokenType.COLON); pattern = readStringFromExtendedJson(); @@ -1108,7 +1099,7 @@ private BsonRegularExpression visitNewRegularExpressionExtendedJson() { verifyToken(JsonTokenType.COLON); pattern = readStringFromExtendedJson(); } else { - throw new JsonParseException("Expected 't' and 'i' fields in $timestamp document but found " + firstKey); + throw new JsonParseException("Expected 'pattern' and 'options' fields in $regularExpression document but found " + firstKey); } verifyToken(JsonTokenType.END_OBJECT); @@ -1142,6 +1133,8 @@ private BsonRegularExpression visitRegularExpressionExtendedJson(final String fi } catch (JsonParseException e) { extendedJsonMark.reset(); return null; + } finally { + extendedJsonMark.discard(); } } @@ -1168,7 +1161,7 @@ private BsonTimestamp visitTimestampExtendedJson() { int time; int increment; - String firstKey = readStringFromExtendedJson(); + String firstKey = readStringKeyFromExtendedJson(); if (firstKey.equals("t")) { verifyToken(JsonTokenType.COLON); time = readIntFromExtendedJson(); @@ -1205,6 +1198,18 @@ private int readIntFromExtendedJson() { return value; } + private BsonBinary visitUuidExtendedJson() { + verifyToken(JsonTokenType.COLON); + String uuidString = readStringFromExtendedJson(); + verifyToken(JsonTokenType.END_OBJECT); + try { + UuidStringValidator.validate(uuidString); + return new BsonBinary(UUID.fromString(uuidString)); + } catch (IllegalArgumentException e) { + throw new JsonParseException(e); + } + } + private void visitJavaScriptExtendedJson() { verifyToken(JsonTokenType.COLON); String code = readStringFromExtendedJson(); @@ -1329,65 +1334,71 @@ private ObjectId readDbPointerIdFromExtendedJson() { return oid; } - @Deprecated - @Override - public void mark() { - if (mark != null) { - throw new BSONException("A mark already exists; it needs to be reset before creating a new one"); - } - mark = new Mark(); - } - @Override public BsonReaderMark getMark() { return new Mark(); } - @Override - public void reset() { - if (mark == null) { - throw new BSONException("trying to reset a mark before creating it"); - } - mark.reset(); - mark = null; - } - @Override protected Context getContext() { return (Context) super.getContext(); } + /** + * An implementation of {@code AbstractBsonReader.Mark}. + */ protected class Mark extends AbstractBsonReader.Mark { private final JsonToken pushedToken; private final Object currentValue; - private final int position; + private final int markPos; + /** + * Construct an instance. + */ protected Mark() { - super(); pushedToken = JsonReader.this.pushedToken; currentValue = JsonReader.this.currentValue; - position = JsonReader.this.scanner.getBufferPosition(); + markPos = JsonReader.this.scanner.mark(); } + @Override public void reset() { super.reset(); JsonReader.this.pushedToken = pushedToken; JsonReader.this.currentValue = currentValue; - JsonReader.this.scanner.setBufferPosition(position); + JsonReader.this.scanner.reset(markPos); JsonReader.this.setContext(new Context(getParentContext(), getContextType())); } + + /** + * Discard the mark. + */ + public void discard() { + JsonReader.this.scanner.discard(markPos); + } } + /** + * An implementation of {@code AbstractBsonReader.Context}/ + */ protected class Context extends AbstractBsonReader.Context { + /** + * Construct an instance. + * + * @param parentContext the parent context + * @param contextType the context type + */ protected Context(final AbstractBsonReader.Context parentContext, final BsonContextType contextType) { super(parentContext, contextType); } + @Override protected Context getParentContext() { return (Context) super.getParentContext(); } + @Override protected BsonContextType getContextType() { return super.getContextType(); } @@ -1412,5 +1423,18 @@ private static byte[] decodeHex(final String hex) { return out; } + + /** + * Read an extended json key and verify its type. + * Throws a org.bson.json.JsonParseException if the key is not an unquoted string or a simple string. + * @return the key string value + */ + private String readStringKeyFromExtendedJson() { + JsonToken patternToken = popToken(); + if (patternToken.getType() != JsonTokenType.STRING && patternToken.getType() != JsonTokenType.UNQUOTED_STRING) { + throw new JsonParseException("JSON reader expected a string but found '%s'.", patternToken.getValue()); + } + return patternToken.getValue(String.class); + } } diff --git a/bson/src/main/org/bson/json/JsonScanner.java b/bson/src/main/org/bson/json/JsonScanner.java index abfb2906963..77ae680342d 100644 --- a/bson/src/main/org/bson/json/JsonScanner.java +++ b/bson/src/main/org/bson/json/JsonScanner.java @@ -18,6 +18,8 @@ import org.bson.BsonRegularExpression; +import java.io.Reader; + /** * Parses the string representation of a JSON object into a set of {@link JsonToken}-derived objects. * @@ -32,21 +34,23 @@ class JsonScanner { } JsonScanner(final String json) { - this(new JsonBuffer(json)); + this(new JsonStringBuffer(json)); } - /** - * @param newPosition the new position of the cursor position in the buffer - */ - public void setBufferPosition(final int newPosition) { - buffer.setPosition(newPosition); + JsonScanner(final Reader reader) { + this(new JsonStreamBuffer(reader)); } - /** - * @return the current location of the cursor in the buffer - */ - public int getBufferPosition() { - return buffer.getPosition(); + public void reset(final int markPos) { + buffer.reset(markPos); + } + + public int mark() { + return buffer.mark(); + } + + public void discard(final int markPos) { + buffer.discard(markPos); } /** @@ -92,7 +96,7 @@ public JsonToken nextToken() { if (c == '-' || Character.isDigit(c)) { return scanNumber((char) c); } else if (c == '$' || c == '_' || Character.isLetter(c)) { - return scanUnquotedString(); + return scanUnquotedString((char) c); } else { int position = buffer.getPosition(); buffer.unread(c); @@ -115,9 +119,8 @@ public JsonToken nextToken() { */ private JsonToken scanRegularExpression() { - int start = buffer.getPosition() - 1; - int options = -1; - + StringBuilder patternBuilder = new StringBuilder(); + StringBuilder optionsBuilder = new StringBuilder(); RegularExpressionState state = RegularExpressionState.IN_PATTERN; while (true) { int c = buffer.read(); @@ -129,7 +132,6 @@ private JsonToken scanRegularExpression() { break; case '/': state = RegularExpressionState.IN_OPTIONS; - options = buffer.getPosition(); break; case '\\': state = RegularExpressionState.IN_ESCAPE_SEQUENCE; @@ -173,13 +175,19 @@ private JsonToken scanRegularExpression() { switch (state) { case DONE: buffer.unread(c); - int end = buffer.getPosition(); BsonRegularExpression regex - = new BsonRegularExpression(buffer.substring(start + 1, options - 1), buffer.substring(options, end)); + = new BsonRegularExpression(patternBuilder.toString(), optionsBuilder.toString()); return new JsonToken(JsonTokenType.REGULAR_EXPRESSION, regex); case INVALID: throw new JsonParseException("Invalid JSON regular expression. Position: %d.", buffer.getPosition()); default: + if (state == RegularExpressionState.IN_OPTIONS) { + if (c != '/') { + optionsBuilder.append((char) c); + } + } else { + patternBuilder.append((char) c); + } } } } @@ -189,14 +197,16 @@ private JsonToken scanRegularExpression() { * * @return The string token. */ - private JsonToken scanUnquotedString() { - int start = buffer.getPosition() - 1; + private JsonToken scanUnquotedString(final char firstChar) { + StringBuilder sb = new StringBuilder(); + sb.append(firstChar); int c = buffer.read(); while (c == '$' || c == '_' || Character.isLetterOrDigit(c)) { + sb.append((char) c); c = buffer.read(); } buffer.unread(c); - String lexeme = buffer.substring(start, buffer.getPosition()); + String lexeme = sb.toString(); return new JsonToken(JsonTokenType.UNQUOTED_STRING, lexeme); } @@ -222,8 +232,8 @@ private JsonToken scanUnquotedString() { private JsonToken scanNumber(final char firstChar) { int c = firstChar; - - int start = buffer.getPosition() - 1; + StringBuilder sb = new StringBuilder(); + sb.append(firstChar); NumberState state; @@ -392,12 +402,13 @@ private JsonToken scanNumber(final char firstChar) { break; case SAW_MINUS_I: boolean sawMinusInfinity = true; - char[] nfinity = new char[]{'n', 'f', 'i', 'n', 'i', 't', 'y'}; + char[] nfinity = {'n', 'f', 'i', 'n', 'i', 't', 'y'}; for (int i = 0; i < nfinity.length; i++) { if (c != nfinity[i]) { sawMinusInfinity = false; break; } + sb.append((char) c); c = buffer.read(); } if (sawMinusInfinity) { @@ -430,7 +441,7 @@ private JsonToken scanNumber(final char firstChar) { throw new JsonParseException("Invalid JSON number"); case DONE: buffer.unread(c); - String lexeme = buffer.substring(start, buffer.getPosition()); + String lexeme = sb.toString(); if (type == JsonTokenType.DOUBLE) { return new JsonToken(JsonTokenType.DOUBLE, Double.parseDouble(lexeme)); } else { @@ -442,6 +453,7 @@ private JsonToken scanNumber(final char firstChar) { } } default: + sb.append((char) c); } } @@ -460,59 +472,56 @@ private JsonToken scanString(final char quoteCharacter) { while (true) { int c = buffer.read(); - switch (c) { - case '\\': - c = buffer.read(); - switch (c) { - case '\'': - sb.append('\''); - break; - case '"': - sb.append('"'); - break; - case '\\': - sb.append('\\'); - break; - case '/': - sb.append('/'); - break; - case 'b': - sb.append('\b'); - break; - case 'f': - sb.append('\f'); - break; - case 'n': - sb.append('\n'); - break; - case 'r': - sb.append('\r'); - break; - case 't': - sb.append('\t'); - break; - case 'u': - int u1 = buffer.read(); - int u2 = buffer.read(); - int u3 = buffer.read(); - int u4 = buffer.read(); - if (u4 != -1) { - String hex = new String(new char[]{(char) u1, (char) u2, (char) u3, (char) u4}); - sb.append((char) Integer.parseInt(hex, 16)); - } - break; - default: - throw new JsonParseException("Invalid escape sequence in JSON string '\\%c'.", c); - } - break; - - default: - if (c == quoteCharacter) { - return new JsonToken(JsonTokenType.STRING, sb.toString()); - } - if (c != -1) { - sb.append((char) c); - } + if (c == '\\') { + c = buffer.read(); + switch (c) { + case '\'': + sb.append('\''); + break; + case '"': + sb.append('"'); + break; + case '\\': + sb.append('\\'); + break; + case '/': + sb.append('/'); + break; + case 'b': + sb.append('\b'); + break; + case 'f': + sb.append('\f'); + break; + case 'n': + sb.append('\n'); + break; + case 'r': + sb.append('\r'); + break; + case 't': + sb.append('\t'); + break; + case 'u': + int u1 = buffer.read(); + int u2 = buffer.read(); + int u3 = buffer.read(); + int u4 = buffer.read(); + if (u4 != -1) { + String hex = new String(new char[]{(char) u1, (char) u2, (char) u3, (char) u4}); + sb.append((char) Integer.parseInt(hex, 16)); + } + break; + default: + throw new JsonParseException("Invalid escape sequence in JSON string '\\%c'.", c); + } + } else { + if (c == quoteCharacter) { + return new JsonToken(JsonTokenType.STRING, sb.toString()); + } + if (c != -1) { + sb.append((char) c); + } } if (c == -1) { throw new JsonParseException("End of file in JSON string."); diff --git a/bson/src/main/org/bson/json/JsonStreamBuffer.java b/bson/src/main/org/bson/json/JsonStreamBuffer.java new file mode 100644 index 00000000000..077f141fd81 --- /dev/null +++ b/bson/src/main/org/bson/json/JsonStreamBuffer.java @@ -0,0 +1,155 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.json; + +import java.io.IOException; +import java.io.Reader; +import java.util.ArrayList; +import java.util.List; + +class JsonStreamBuffer implements JsonBuffer { + + private final Reader reader; + private final List markedPositions = new ArrayList<>(); + private final int initialBufferSize; + private int position; + private int lastChar; + private boolean reuseLastChar; + private boolean eof; + private char[] buffer; + private int bufferStartPos; + private int bufferCount; + + JsonStreamBuffer(final Reader reader) { + this(reader, 16); + } + + JsonStreamBuffer(final Reader reader, final int initialBufferSize) { + this.initialBufferSize = initialBufferSize; + this.reader = reader; + resetBuffer(); + } + + public int getPosition() { + return position; + } + + public int read() { + if (eof) { + throw new JsonParseException("Trying to read past EOF."); + } + + // if we just unread, we need to use the last character read since it may not be in the + // buffer + if (reuseLastChar) { + reuseLastChar = false; + int reusedChar = lastChar; + lastChar = -1; + position++; + return reusedChar; + } + + // use the buffer until we catch up to the stream position + if (position - bufferStartPos < bufferCount) { + int currChar = buffer[position - bufferStartPos]; + lastChar = currChar; + position++; + return currChar; + } + + if (markedPositions.isEmpty()) { + resetBuffer(); + } + + // otherwise, try and read from the stream + try { + int nextChar = reader.read(); + if (nextChar != -1) { + lastChar = nextChar; + addToBuffer((char) nextChar); + } + position++; + if (nextChar == -1) { + eof = true; + } + return nextChar; + + } catch (IOException e) { + throw new JsonParseException(e); + } + } + + private void resetBuffer() { + bufferStartPos = -1; + bufferCount = 0; + buffer = new char[initialBufferSize]; + } + + public void unread(final int c) { + eof = false; + if (c != -1 && lastChar == c) { + reuseLastChar = true; + position--; + } + } + + public int mark() { + if (bufferCount == 0) { // Why not markedPositions.isEmpty()? + bufferStartPos = position; + } + if (!markedPositions.contains(position)) { + markedPositions.add(position); + } + return position; + } + + public void reset(final int markPos) { + if (markPos > position) { + throw new IllegalStateException("mark cannot reset ahead of position, only back"); + } + int idx = markedPositions.indexOf(markPos); + if (idx == -1) { + throw new IllegalArgumentException("mark invalidated"); + } + if (markPos != position) { + reuseLastChar = false; + } + markedPositions.subList(idx, markedPositions.size()).clear(); + position = markPos; + } + + public void discard(final int markPos) { + int idx = markedPositions.indexOf(markPos); + if (idx == -1) { + return; + } + markedPositions.subList(idx, markedPositions.size()).clear(); + } + + private void addToBuffer(final char curChar) { + // if the lowest mark is ahead of our position, we can safely add it to our buffer + if (!markedPositions.isEmpty()) { + if (bufferCount == buffer.length) { + char[] newBuffer = new char[buffer.length * 2]; + System.arraycopy(buffer, 0, newBuffer, 0, bufferCount); + buffer = newBuffer; + } + buffer[bufferCount] = curChar; + bufferCount++; + } + } +} diff --git a/bson/src/main/org/bson/json/JsonStringBuffer.java b/bson/src/main/org/bson/json/JsonStringBuffer.java new file mode 100644 index 00000000000..79f8783f9b7 --- /dev/null +++ b/bson/src/main/org/bson/json/JsonStringBuffer.java @@ -0,0 +1,65 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.json; + +class JsonStringBuffer implements JsonBuffer { + + private final String buffer; + private int position; + private boolean eof; + + JsonStringBuffer(final String buffer) { + this.buffer = buffer; + } + + public int getPosition() { + return position; + } + + public int read() { + if (eof) { + throw new JsonParseException("Trying to read past EOF."); + } else if (position >= buffer.length()) { + eof = true; + return -1; + } else { + return buffer.charAt(position++); + } + } + + public void unread(final int c) { + eof = false; + if (c != -1 && buffer.charAt(position - 1) == c) { + position--; + } + } + + public int mark() { + return position; + } + + public void reset(final int markPos) { + if (markPos > position) { + throw new IllegalStateException("mark cannot reset ahead of position, only back"); + } + position = markPos; + } + + public void discard(final int markPos) { + } + +} diff --git a/bson/src/main/org/bson/json/JsonWriter.java b/bson/src/main/org/bson/json/JsonWriter.java index f0feabb3e5f..a1baf0ef0a2 100644 --- a/bson/src/main/org/bson/json/JsonWriter.java +++ b/bson/src/main/org/bson/json/JsonWriter.java @@ -41,9 +41,8 @@ public class JsonWriter extends AbstractBsonWriter { * * @param writer the writer to write JSON to. */ - @SuppressWarnings("deprecation") public JsonWriter(final Writer writer) { - this(writer, new JsonWriterSettings()); + this(writer, JsonWriterSettings.builder().build()); } /** @@ -133,29 +132,19 @@ protected void doWriteDateTime(final long value) { @Override protected void doWriteDBPointer(final BsonDbPointer value) { if (settings.getOutputMode() == JsonMode.EXTENDED) { - new Converter() { - @Override - public void convert(final BsonDbPointer value1, final StrictJsonWriter writer) { - writer.writeStartObject(); - writer.writeStartObject("$dbPointer"); - writer.writeString("$ref", value1.getNamespace()); - writer.writeName("$id"); - doWriteObjectId(value1.getId()); - writer.writeEndObject(); - writer.writeEndObject(); - } - }.convert(value, strictJsonWriter); + strictJsonWriter.writeStartObject(); + strictJsonWriter.writeStartObject("$dbPointer"); + strictJsonWriter.writeString("$ref", value.getNamespace()); + strictJsonWriter.writeName("$id"); + doWriteObjectId(value.getId()); + strictJsonWriter.writeEndObject(); + strictJsonWriter.writeEndObject(); } else { - new Converter() { - @Override - public void convert(final BsonDbPointer value1, final StrictJsonWriter writer) { - writer.writeStartObject(); - writer.writeString("$ref", value1.getNamespace()); - writer.writeName("$id"); - doWriteObjectId(value1.getId()); - writer.writeEndObject(); - } - }.convert(value, strictJsonWriter); + strictJsonWriter.writeStartObject(); + strictJsonWriter.writeString("$ref", value.getNamespace()); + strictJsonWriter.writeName("$id"); + doWriteObjectId(value.getId()); + strictJsonWriter.writeEndObject(); } } @@ -242,11 +231,11 @@ public void flush() { } /** - * Return true if the output has been truncated due to exceeding the length specified in {@link JsonWriterSettings#maxLength}. + * Return true if the output has been truncated due to exceeding the length specified in {@link JsonWriterSettings#getMaxLength()}. * * @return true if the output has been truncated * @since 3.7 - * @see JsonWriterSettings#maxLength + * @see JsonWriterSettings#getMaxLength() */ public boolean isTruncated() { return strictJsonWriter.isTruncated(); @@ -263,18 +252,6 @@ protected boolean abortPipe() { */ public class Context extends AbstractBsonWriter.Context { - /** - * Creates a new context. - * - * @param parentContext the parent context that can be used for going back up to the parent level - * @param contextType the type of this context - * @param indentChars the String to use for indentation at this level. - */ - @Deprecated - public Context(final Context parentContext, final BsonContextType contextType, final String indentChars) { - this(parentContext, contextType); - } - /** * Creates a new context. * diff --git a/bson/src/main/org/bson/json/JsonWriterSettings.java b/bson/src/main/org/bson/json/JsonWriterSettings.java index 1b3aea26977..53b340e295f 100644 --- a/bson/src/main/org/bson/json/JsonWriterSettings.java +++ b/bson/src/main/org/bson/json/JsonWriterSettings.java @@ -36,7 +36,7 @@ * @see JsonWriter * @since 3.0 */ -public class JsonWriterSettings extends BsonWriterSettings { +public final class JsonWriterSettings extends BsonWriterSettings { private static final JsonNullConverter JSON_NULL_CONVERTER = new JsonNullConverter(); private static final JsonStringConverter JSON_STRING_CONVERTER = new JsonStringConverter(); @@ -114,79 +114,6 @@ public static Builder builder() { return new Builder(); } - /** - * Creates a new instance with default values for all properties. - *

    - * Defaults to {@link JsonMode#STRICT} - *

    - * - * @deprecated Prefer {@link #builder()}, but note that the default output mode is different for that method - */ - @Deprecated - public JsonWriterSettings() { - this(builder().outputMode(JsonMode.STRICT)); - } - - /** - * Creates a new instance with the given output mode and default values for all other properties. - * - * @param outputMode the output mode - * @deprecated Use the {@link Builder} instead - */ - @Deprecated - public JsonWriterSettings(final JsonMode outputMode) { - this(builder().outputMode(outputMode)); - } - - /** - * Creates a new instance with indent mode enabled, and the default value for all other properties. - * - * @param indent whether indent mode is enabled - * @deprecated Use the {@link Builder} instead - */ - @Deprecated - public JsonWriterSettings(final boolean indent) { - this(builder().indent(indent)); - } - - /** - * Creates a new instance with the given output mode, indent mode enabled, and the default value for all other properties. - * - * @param outputMode the output mode - * @param indent whether indent mode is enabled - * @deprecated Use the {@link Builder} instead - */ - @Deprecated - public JsonWriterSettings(final JsonMode outputMode, final boolean indent) { - this(builder().outputMode(outputMode).indent(indent)); - } - - /** - * Creates a new instance with the given values for all properties, indent mode enabled and the default value of {@code - * newLineCharacters}. - * - * @param outputMode the output mode - * @param indentCharacters the indent characters - * @deprecated Use the {@link Builder} instead - */ - @Deprecated - public JsonWriterSettings(final JsonMode outputMode, final String indentCharacters) { - this(builder().outputMode(outputMode).indent(true).indentCharacters(indentCharacters)); - } - - /** - * Creates a new instance with the given values for all properties and indent mode enabled. - * - * @param outputMode the output mode - * @param indentCharacters the indent characters - * @param newLineCharacters the new line character(s) to use - * @deprecated Use the {@link Builder} instead - */ - @Deprecated - public JsonWriterSettings(final JsonMode outputMode, final String indentCharacters, final String newLineCharacters) { - this(builder().outputMode(outputMode).indent(true).indentCharacters(indentCharacters).newLineCharacters(newLineCharacters)); - } - @SuppressWarnings("deprecation") private JsonWriterSettings(final Builder builder) { indent = builder.indent; @@ -364,7 +291,7 @@ public String getIndentCharacters() { } /** - * The output mode to use. The default value is {@code }JSONMode.STRICT}. + * The output mode to use. The default value is {@code }JSONMode.RELAXED}. * * @return the output mode. */ @@ -557,7 +484,6 @@ public Converter getJavaScriptConverter() { * * @since 3.5 */ - @SuppressWarnings("deprecation") public static final class Builder { private boolean indent; private String newLineCharacters = System.getProperty("line.separator"); diff --git a/bson/src/main/org/bson/json/LegacyExtendedJsonBinaryConverter.java b/bson/src/main/org/bson/json/LegacyExtendedJsonBinaryConverter.java index 7a02e62be18..22bf03939bb 100644 --- a/bson/src/main/org/bson/json/LegacyExtendedJsonBinaryConverter.java +++ b/bson/src/main/org/bson/json/LegacyExtendedJsonBinaryConverter.java @@ -17,14 +17,15 @@ package org.bson.json; import org.bson.BsonBinary; -import org.bson.internal.Base64; + +import java.util.Base64; class LegacyExtendedJsonBinaryConverter implements Converter { @Override public void convert(final BsonBinary value, final StrictJsonWriter writer) { writer.writeStartObject(); - writer.writeString("$binary", Base64.encode(value.getData())); + writer.writeString("$binary", Base64.getEncoder().encodeToString(value.getData())); writer.writeString("$type", String.format("%02X", value.getType())); writer.writeEndObject(); } diff --git a/bson/src/main/org/bson/json/ShellBinaryConverter.java b/bson/src/main/org/bson/json/ShellBinaryConverter.java index cea8fce9878..aec8204583e 100644 --- a/bson/src/main/org/bson/json/ShellBinaryConverter.java +++ b/bson/src/main/org/bson/json/ShellBinaryConverter.java @@ -17,14 +17,15 @@ package org.bson.json; import org.bson.BsonBinary; -import org.bson.internal.Base64; + +import java.util.Base64; import static java.lang.String.format; class ShellBinaryConverter implements Converter { @Override public void convert(final BsonBinary value, final StrictJsonWriter writer) { - writer.writeRaw(format("new BinData(%s, \"%s\")", Integer.toString(value.getType() & 0xFF), - Base64.encode(value.getData()))); + writer.writeRaw(format("new BinData(%s, \"%s\")", value.getType() & 0xFF, + Base64.getEncoder().encodeToString(value.getData()))); } } diff --git a/bson/src/main/org/bson/json/ShellDateTimeConverter.java b/bson/src/main/org/bson/json/ShellDateTimeConverter.java index ae024899853..95c6441df6a 100644 --- a/bson/src/main/org/bson/json/ShellDateTimeConverter.java +++ b/bson/src/main/org/bson/json/ShellDateTimeConverter.java @@ -26,7 +26,7 @@ class ShellDateTimeConverter implements Converter { @Override public void convert(final Long value, final StrictJsonWriter writer) { - SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd\'T\'HH:mm:ss.SSS\'Z\'"); + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); if (value >= -59014396800000L && value <= 253399536000000L) { writer.writeRaw(format("ISODate(\"%s\")", dateFormat.format(new Date(value)))); diff --git a/bson/src/main/org/bson/json/StrictCharacterStreamJsonWriter.java b/bson/src/main/org/bson/json/StrictCharacterStreamJsonWriter.java index ce3114a5b28..cce8af2fa17 100644 --- a/bson/src/main/org/bson/json/StrictCharacterStreamJsonWriter.java +++ b/bson/src/main/org/bson/json/StrictCharacterStreamJsonWriter.java @@ -98,7 +98,6 @@ public void writeStartArray(final String name) { @Override public void writeBoolean(final String name, final boolean value) { notNull("name", name); - notNull("value", value); writeName(name); writeBoolean(value); } @@ -136,7 +135,7 @@ public void writeNull(final String name) { @Override public void writeName(final String name) { notNull("name", name); - checkPreconditions(State.NAME); + checkState(State.NAME); if (context.hasElements) { write(","); @@ -144,18 +143,18 @@ public void writeName(final String name) { if (settings.isIndent()) { write(settings.getNewLineCharacters()); write(context.indentation); - } else { + } else if (context.hasElements){ write(" "); } writeStringHelper(name); - write(" : "); + write(": "); state = State.VALUE; } @Override public void writeBoolean(final boolean value) { - checkPreconditions(State.VALUE); + checkState(State.VALUE); preWriteValue(); write(value ? "true" : "false"); setNextState(); @@ -164,7 +163,7 @@ public void writeBoolean(final boolean value) { @Override public void writeNumber(final String value) { notNull("value", value); - checkPreconditions(State.VALUE); + checkState(State.VALUE); preWriteValue(); write(value); setNextState(); @@ -173,7 +172,7 @@ public void writeNumber(final String value) { @Override public void writeString(final String value) { notNull("value", value); - checkPreconditions(State.VALUE); + checkState(State.VALUE); preWriteValue(); writeStringHelper(value); setNextState(); @@ -182,7 +181,7 @@ public void writeString(final String value) { @Override public void writeRaw(final String value) { notNull("value", value); - checkPreconditions(State.VALUE); + checkState(State.VALUE); preWriteValue(); write(value); setNextState(); @@ -190,7 +189,7 @@ public void writeRaw(final String value) { @Override public void writeNull() { - checkPreconditions(State.VALUE); + checkState(State.VALUE); preWriteValue(); write("null"); setNextState(); @@ -198,7 +197,9 @@ public void writeNull() { @Override public void writeStartObject() { - checkPreconditions(State.INITIAL, State.VALUE); + if (state != State.INITIAL && state != State.VALUE) { + throw new BsonInvalidOperationException("Invalid state " + state); + } preWriteValue(); write("{"); context = new StrictJsonContext(context, JsonContextType.DOCUMENT, settings.getIndentCharacters()); @@ -215,13 +216,11 @@ public void writeStartArray() { @Override public void writeEndObject() { - checkPreconditions(State.NAME); + checkState(State.NAME); if (settings.isIndent() && context.hasElements) { write(settings.getNewLineCharacters()); write(context.parentContext.indentation); - } else { - write(" "); } write("}"); context = context.parentContext; @@ -234,12 +233,16 @@ public void writeEndObject() { @Override public void writeEndArray() { - checkPreconditions(State.VALUE); + checkState(State.VALUE); if (context.contextType != JsonContextType.ARRAY) { throw new BsonInvalidOperationException("Can't end an array if not in an array"); } + if (settings.isIndent() && context.hasElements) { + write(settings.getNewLineCharacters()); + write(context.parentContext.indentation); + } write("]"); context = context.parentContext; if (context.contextType == JsonContextType.TOP_LEVEL) { @@ -251,11 +254,11 @@ public void writeEndArray() { /** * Return true if the output has been truncated due to exceeding the length specified in - * {@link StrictCharacterStreamJsonWriterSettings#maxLength}. + * {@link StrictCharacterStreamJsonWriterSettings#getMaxLength()}. * * @return true if the output has been truncated * @since 3.7 - * @see StrictCharacterStreamJsonWriterSettings#maxLength + * @see StrictCharacterStreamJsonWriterSettings#getMaxLength() */ public boolean isTruncated() { return isTruncated; @@ -276,7 +279,13 @@ Writer getWriter() { private void preWriteValue() { if (context.contextType == JsonContextType.ARRAY) { if (context.hasElements) { - write(", "); + write(","); + } + if (settings.isIndent()) { + write(settings.getNewLineCharacters()); + write(context.indentation); + } else if (context.hasElements) { + write(" "); } } context.hasElements = true; @@ -381,22 +390,12 @@ private void write(final char c) { } } - private void checkPreconditions(final State... validStates) { - if (!checkState(validStates)) { + private void checkState(final State requiredState) { + if (state != requiredState) { throw new BsonInvalidOperationException("Invalid state " + state); } } - private boolean checkState(final State... validStates) { - for (State cur : validStates) { - if (cur == state) { - return true; - } - } - return false; - - } - private void throwBSONException(final IOException e) { throw new BSONException("Wrapping IOException", e); } diff --git a/bson/src/main/org/bson/json/UuidStringValidator.java b/bson/src/main/org/bson/json/UuidStringValidator.java new file mode 100644 index 00000000000..5f0f18d96ba --- /dev/null +++ b/bson/src/main/org/bson/json/UuidStringValidator.java @@ -0,0 +1,73 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.json; + +import java.util.BitSet; + +final class UuidStringValidator { + private static final BitSet HEX_CHARS; + + static { + HEX_CHARS = new BitSet('f' + 1); + HEX_CHARS.set('0', '9' + 1); + HEX_CHARS.set('A', 'F' + 1); + HEX_CHARS.set('a', 'f' + 1); + } + + private static void validateFourHexChars(final String str, final int startPos) { + if (!(HEX_CHARS.get(str.charAt(startPos)) + && HEX_CHARS.get(str.charAt(startPos + 1)) + && HEX_CHARS.get(str.charAt(startPos + 2)) + && HEX_CHARS.get(str.charAt(startPos + 3)))) { + throw new IllegalArgumentException(String.format("Expected four hexadecimal characters in UUID string \"%s\" starting at " + + "position %d", str, startPos)); + } + } + + private static void validateDash(final String str, final int pos) { + if (str.charAt(pos) != '-') { + throw new IllegalArgumentException(String.format("Expected dash in UUID string \"%s\" at position %d", str, pos)); + } + } + + // UUID strings must be in the form 73ffd264-44b3-4c69-90e8-e7d1dfc035d4, but UUID.fromString fails to fully validate against that + // form, even though the Javadoc claims that it does For example, it will parse 73ff-d26444b-34c6-990e8e-7d1dfc035d4 (same as previous + // value but with hyphens in the wrong positions), but return a UUID that is not equal to the one it returns for the string with the + // hyphens in the correct positions. Given that, in order to comply with the Extended JSON specification, we add our own validation + // before calling UUID.fromString. + static void validate(final String uuidString) { + if (uuidString.length() != 36) { + throw new IllegalArgumentException(String.format("UUID string \"%s\" must be 36 characters", uuidString)); + } + + validateFourHexChars(uuidString, 0); + validateFourHexChars(uuidString, 4); + validateDash(uuidString, 8); + validateFourHexChars(uuidString, 9); + validateDash(uuidString, 13); + validateFourHexChars(uuidString, 14); + validateDash(uuidString, 18); + validateFourHexChars(uuidString, 19); + validateDash(uuidString, 23); + validateFourHexChars(uuidString, 24); + validateFourHexChars(uuidString, 28); + validateFourHexChars(uuidString, 32); + } + + private UuidStringValidator() { + } +} diff --git a/bson/src/main/org/bson/types/BSONTimestamp.java b/bson/src/main/org/bson/types/BSONTimestamp.java index 8979f777867..55178d6b3fd 100644 --- a/bson/src/main/org/bson/types/BSONTimestamp.java +++ b/bson/src/main/org/bson/types/BSONTimestamp.java @@ -29,7 +29,13 @@ public final class BSONTimestamp implements Comparable, Serializa private static final long serialVersionUID = -3268482672267936464L; + /** + * The millisecond increment within the second. + */ private final int inc; + /** + * The time, in seconds + */ private final Date time; /** diff --git a/bson/src/main/org/bson/types/BasicBSONList.java b/bson/src/main/org/bson/types/BasicBSONList.java index 9a76f0cc975..00fe637ade1 100644 --- a/bson/src/main/org/bson/types/BasicBSONList.java +++ b/bson/src/main/org/bson/types/BasicBSONList.java @@ -124,12 +124,6 @@ public Object removeField(final String key) { return remove(i); } - @Override - @Deprecated - public boolean containsKey(final String key) { - return containsField(key); - } - @Override public boolean containsField(final String key) { int i = _getInt(key, false); diff --git a/bson/src/main/org/bson/types/Binary.java b/bson/src/main/org/bson/types/Binary.java index ac67bb878f4..5ba482ccc41 100644 --- a/bson/src/main/org/bson/types/Binary.java +++ b/bson/src/main/org/bson/types/Binary.java @@ -27,7 +27,14 @@ public class Binary implements Serializable { private static final long serialVersionUID = 7902997490338209467L; + /** + * The binary sub-type. + */ private final byte type; + + /** + * The binary data. + */ private final byte[] data; /** @@ -110,7 +117,7 @@ public boolean equals(final Object o) { @Override public int hashCode() { - int result = (int) type; + int result = type; result = 31 * result + Arrays.hashCode(data); return result; } diff --git a/bson/src/main/org/bson/types/Code.java b/bson/src/main/org/bson/types/Code.java index 1f8cbb01cbf..0bf9161315e 100644 --- a/bson/src/main/org/bson/types/Code.java +++ b/bson/src/main/org/bson/types/Code.java @@ -27,19 +27,22 @@ public class Code implements Serializable { private static final long serialVersionUID = 475535263314046697L; + /** + * The JavaScript code string. + */ private final String code; /** * Construct a new instance with the given code. * - * @param code the Javascript code + * @param code the JavaScript code */ public Code(final String code) { this.code = code; } /** - * Get the Javascript code. + * Get the JavaScript code. * * @return the code */ diff --git a/bson/src/main/org/bson/types/CodeWScope.java b/bson/src/main/org/bson/types/CodeWScope.java index b46591b2efa..57c95e29c24 100644 --- a/bson/src/main/org/bson/types/CodeWScope.java +++ b/bson/src/main/org/bson/types/CodeWScope.java @@ -25,6 +25,9 @@ */ public class CodeWScope extends Code { + /** + * The scope document. + */ private final BSONObject scope; private static final long serialVersionUID = -6284832275113680002L; diff --git a/bson/src/main/org/bson/types/CodeWithScope.java b/bson/src/main/org/bson/types/CodeWithScope.java index 7c5fe75f8bd..f2cec479b84 100644 --- a/bson/src/main/org/bson/types/CodeWithScope.java +++ b/bson/src/main/org/bson/types/CodeWithScope.java @@ -18,6 +18,8 @@ import org.bson.Document; +import java.util.Objects; + /** * A representation of the JavaScript Code with Scope BSON type. * @@ -25,6 +27,9 @@ */ public class CodeWithScope extends Code { + /** + * The scope document. + */ private final Document scope; private static final long serialVersionUID = -6284832275113680002L; @@ -41,7 +46,7 @@ public CodeWithScope(final String code, final Document scope) { } /** - * Gets the scope, which is is a mapping from identifiers to values, representing the scope in which the code should be evaluated. + * Gets the scope, which is a mapping from identifiers to values, representing the scope in which the code should be evaluated. * * @return the scope */ @@ -63,7 +68,7 @@ public boolean equals(final Object o) { CodeWithScope that = (CodeWithScope) o; - if (scope != null ? !scope.equals(that.scope) : that.scope != null) { + if (!Objects.equals(scope, that.scope)) { return false; } diff --git a/bson/src/main/org/bson/types/Decimal128.java b/bson/src/main/org/bson/types/Decimal128.java index 09d183317d2..1cd3d9745fa 100644 --- a/bson/src/main/org/bson/types/Decimal128.java +++ b/bson/src/main/org/bson/types/Decimal128.java @@ -16,27 +16,26 @@ package org.bson.types; -import java.io.Serializable; import java.math.BigDecimal; import java.math.BigInteger; -import java.util.HashSet; -import java.util.Set; - import static java.math.MathContext.DECIMAL128; import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; +import java.util.HashSet; +import java.util.Set; /** * A binary integer decimal representation of a 128-bit decimal value, supporting 34 decimal digits of significand and an exponent range * of -6143 to +6144. * * @since 3.4 - * @see BSON Decimal128 + * @see BSON Decimal128 * specification * @see binary integer decimal * @see decimal128 floating-point format * @see 754-2008 - IEEE Standard for Floating-Point Arithmetic */ -public final class Decimal128 implements Serializable { +public final class Decimal128 extends Number implements Comparable { private static final long serialVersionUID = 4570973266503637887L; @@ -53,10 +52,10 @@ public final class Decimal128 implements Serializable { private static final BigInteger BIG_INT_ONE = new BigInteger("1"); private static final BigInteger BIG_INT_ZERO = new BigInteger("0"); - private static final Set NaN_STRINGS = new HashSet(asList("nan")); - private static final Set NEGATIVE_NaN_STRINGS = new HashSet(asList("-nan")); - private static final Set POSITIVE_INFINITY_STRINGS = new HashSet(asList("inf", "+inf", "infinity", "+infinity")); - private static final Set NEGATIVE_INFINITY_STRINGS = new HashSet(asList("-inf", "-infinity")); + private static final Set NaN_STRINGS = new HashSet<>(singletonList("nan")); + private static final Set NEGATIVE_NaN_STRINGS = new HashSet<>(singletonList("-nan")); + private static final Set POSITIVE_INFINITY_STRINGS = new HashSet<>(asList("inf", "+inf", "infinity", "+infinity")); + private static final Set NEGATIVE_INFINITY_STRINGS = new HashSet<>(asList("-inf", "-infinity")); /** * A constant holding the positive infinity of type {@code Decimal128}. It is equal to the value return by @@ -83,7 +82,7 @@ public final class Decimal128 implements Serializable { public static final Decimal128 NaN = fromIEEE754BIDEncoding(NaN_MASK, 0); /** - * A constant holding a postive zero value of type {@code Decimal128}. It is equal to the value return by + * A constant holding a positive zero value of type {@code Decimal128}. It is equal to the value return by * {@code Decimal128.valueOf("0")}. */ public static final Decimal128 POSITIVE_ZERO = fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000000L); @@ -94,7 +93,13 @@ public final class Decimal128 implements Serializable { */ public static final Decimal128 NEGATIVE_ZERO = fromIEEE754BIDEncoding(0xb040000000000000L, 0x0000000000000000L); + /** + * The high bits. + */ private final long high; + /** + * The low bits. + */ private final long low; /** @@ -104,7 +109,7 @@ public final class Decimal128 implements Serializable { * @return the Decimal128 value representing the given String * @throws NumberFormatException if the value is out of the Decimal128 range * @see - * + * * From-String Specification */ public static Decimal128 parse(final String value) { @@ -294,6 +299,15 @@ public BigDecimal bigDecimalValue() { return bigDecimal; } + // Make sure that the argument comes from a call to bigDecimalValueNoNegativeZeroCheck on this instance + private boolean hasDifferentSign(final BigDecimal bigDecimal) { + return isNegative() && bigDecimal.signum() == 0; + } + + private boolean isZero(final BigDecimal bigDecimal) { + return !isNaN() && !isInfinite() && bigDecimal.compareTo(BigDecimal.ZERO) == 0; + } + private BigDecimal bigDecimalValueNoNegativeZeroCheck() { int scale = -getExponent(); @@ -325,8 +339,7 @@ private byte[] getBytes() { return bytes; } - // Consider making this method public - int getExponent() { + private int getExponent() { if (twoHighestCombinationBitsAreSet()) { return (int) ((high & 0x1fffe00000000000L) >>> 47) - EXPONENT_OFFSET; } else { @@ -374,6 +387,137 @@ public boolean isNaN() { return (high & NaN_MASK) == NaN_MASK; } + + @Override + public int compareTo(final Decimal128 o) { + if (isNaN()) { + return o.isNaN() ? 0 : 1; + } + if (isInfinite()) { + if (isNegative()) { + if (o.isInfinite() && o.isNegative()) { + return 0; + } else { + return -1; + } + } else { + if (o.isNaN()) { + return -1; + } else if (o.isInfinite() && !o.isNegative()) { + return 0; + } else { + return 1; + } + } + } + BigDecimal bigDecimal = bigDecimalValueNoNegativeZeroCheck(); + BigDecimal otherBigDecimal = o.bigDecimalValueNoNegativeZeroCheck(); + + if (isZero(bigDecimal) && o.isZero(otherBigDecimal)) { + if (hasDifferentSign(bigDecimal)) { + if (o.hasDifferentSign(otherBigDecimal)) { + return 0; + } + else { + return -1; + } + } else if (o.hasDifferentSign(otherBigDecimal)) { + return 1; + } + } + + if (o.isNaN()) { + return -1; + } else if (o.isInfinite()) { + if (o.isNegative()) { + return 1; + } else { + return -1; + } + } else { + return bigDecimal.compareTo(otherBigDecimal); + } + } + + /** + * Converts this {@code Decimal128} to a {@code int}. This conversion is analogous to the narrowing primitive conversion from + * {@code double} to {@code int} as defined in The Java™ Language Specification: any fractional part of this + * {@code Decimal128} will be discarded, and if the resulting integral value is too big to fit in a {@code int}, only the + * low-order 32 bits are returned. Note that this conversion can lose information about the overall magnitude and precision of this + * {@code Decimal128} value as well as return a result with the opposite sign. Note that {@code #NEGATIVE_ZERO} is converted to + * {@code 0}. + * + * @return this {@code Decimal128} converted to a {@code int}. + * @since 3.10 + */ + @Override + public int intValue() { + return (int) doubleValue(); + } + + /** + * Converts this {@code Decimal128} to a {@code long}. This conversion is analogous to the narrowing primitive conversion from + * {@code double} to {@code long} as defined in The Java™ Language Specification: any fractional part of this + * {@code Decimal128} will be discarded, and if the resulting integral value is too big to fit in a {@code long}, only the + * low-order 64 bits are returned. Note that this conversion can lose information about the overall magnitude and precision of this + * {@code Decimal128} value as well as return a result with the opposite sign. Note that {@code #NEGATIVE_ZERO} is converted to + * {@code 0L}. + * + * @return this {@code Decimal128} converted to a {@code long}. + * @since 3.10 + */ + @Override + public long longValue() { + return (long) doubleValue(); + } + + /** + * Converts this {@code Decimal128} to a {@code float}. This conversion is similar to the narrowing primitive conversion from + * {@code double} to {@code float} as defined in The Java™ Language Specification: if this {@code Decimal128} has + * too great a magnitude to represent as a {@code float}, it will be converted to {@link Float#NEGATIVE_INFINITY} or + * {@link Float#POSITIVE_INFINITY} as appropriate. Note that even when the return value is finite, this conversion can lose + * information about the precision of the {@code Decimal128} value. Note that {@code #NEGATIVE_ZERO} is converted to {@code 0.0f}. + * + * @return this {@code Decimal128} converted to a {@code float}. + * @since 3.10 + */ + @Override + public float floatValue() { + return (float) doubleValue(); + } + + /** + * Converts this {@code Decimal128} to a {@code double}. This conversion is similar to the narrowing primitive conversion from + * {@code double} to {@code float} as defined in The Java™ Language Specification: if this {@code Decimal128} has + * too great a magnitude to represent as a {@code double}, it will be converted to {@link Double#NEGATIVE_INFINITY} or + * {@link Double#POSITIVE_INFINITY} as appropriate. Note that even when the return value is finite, this conversion can lose + * information about the precision of the {@code Decimal128} value. Note that {@code #NEGATIVE_ZERO} is converted to {@code 0.0d}. + * + * @return this {@code Decimal128} converted to a {@code double}. + * @since 3.10 + */ + @Override + public double doubleValue() { + if (isNaN()) { + return Double.NaN; + } + if (isInfinite()) { + if (isNegative()) { + return Double.NEGATIVE_INFINITY; + } else { + return Double.POSITIVE_INFINITY; + } + } + + BigDecimal bigDecimal = bigDecimalValueNoNegativeZeroCheck(); + + if (hasDifferentSign(bigDecimal)) { + return -0.0d; + } + + return bigDecimal.doubleValue(); + } + /** * Returns true if the encoded representation of this instance is the same as the encoded representation of {@code o}. *

    @@ -420,7 +564,7 @@ public int hashCode() { * Returns the String representation of the Decimal128 value. * * @return the String representation - * @see + * @see * To-String Sprecification */ @Override diff --git a/bson/src/main/org/bson/types/ObjectId.java b/bson/src/main/org/bson/types/ObjectId.java index 14f76603353..927d3ab0c31 100644 --- a/bson/src/main/org/bson/types/ObjectId.java +++ b/bson/src/main/org/bson/types/ObjectId.java @@ -16,33 +16,30 @@ package org.bson.types; -import org.bson.diagnostics.Logger; -import org.bson.diagnostics.Loggers; +import static org.bson.assertions.Assertions.isTrueArgument; +import static org.bson.assertions.Assertions.notNull; +import java.io.InvalidObjectException; +import java.io.ObjectInputStream; import java.io.Serializable; -import java.net.NetworkInterface; -import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; +import java.nio.ByteOrder; import java.security.SecureRandom; import java.util.Date; -import java.util.Enumeration; import java.util.concurrent.atomic.AtomicInteger; -import static org.bson.assertions.Assertions.isTrueArgument; -import static org.bson.assertions.Assertions.notNull; - /** *

    A globally unique identifier for objects.

    * *

    Consists of 12 bytes, divided as follows:

    * - * - * - * - * - * - * - * + * + * + * + * + * + * + * *
    ObjectID layout
    01234567891011
    timemachine pidinc
    ObjectID layout
    01234567891011
    timerandom valueinc
    * *

    Instances of this class are immutable.

    @@ -51,24 +48,31 @@ */ public final class ObjectId implements Comparable, Serializable { - private static final long serialVersionUID = 3670079982654483072L; - - static final Logger LOGGER = Loggers.getLogger("ObjectId"); + // unused, as this class uses a proxy for serialization + private static final long serialVersionUID = 1L; + private static final int OBJECT_ID_LENGTH = 12; private static final int LOW_ORDER_THREE_BYTES = 0x00ffffff; - private static final int MACHINE_IDENTIFIER; - private static final short PROCESS_IDENTIFIER; - private static final AtomicInteger NEXT_COUNTER = new AtomicInteger(new SecureRandom().nextInt()); + // Use upper bytes of a long to represent the 5-byte random value. + private static final long RANDOM_VALUE; + + private static final AtomicInteger NEXT_COUNTER; - private static final char[] HEX_CHARS = new char[] { - '0', '1', '2', '3', '4', '5', '6', '7', - '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' }; + private static final char[] HEX_CHARS = { + '0', '1', '2', '3', '4', '5', '6', '7', + '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'}; + /** + * The timestamp + */ private final int timestamp; - private final int machineIdentifier; - private final short processIdentifier; - private final int counter; + + /** + * The final 8 bytes of the ObjectID are 5 bytes probabilistically unique to the machine and + * process, followed by a 3 byte incrementing counter initialized to a random value. + */ + private final long nonce; /** * Gets a new object id. @@ -79,6 +83,21 @@ public static ObjectId get() { return new ObjectId(); } + /** + * Gets a new object id with the given date value and all other bits zeroed. + *

    + * The returned object id will compare as less than or equal to any other object id within the same second as the given date, and + * less than any later date. + *

    + * + * @param date the date + * @return the ObjectId + * @since 4.1 + */ + public static ObjectId getSmallestWithDate(final Date date) { + return new ObjectId(dateToTimestampSeconds(date), 0L); + } + /** * Checks if a string could be an {@code ObjectId}. * @@ -114,54 +133,6 @@ public static boolean isValid(final String hexString) { return true; } - /** - * Gets the generated machine identifier. - * - * @return an int representing the machine identifier - */ - public static int getGeneratedMachineIdentifier() { - return MACHINE_IDENTIFIER; - } - - /** - * Gets the generated process identifier. - * - * @return the process id - */ - public static int getGeneratedProcessIdentifier() { - return PROCESS_IDENTIFIER; - } - - /** - * Gets the current value of the auto-incrementing counter. - * - * @return the current counter value. - */ - public static int getCurrentCounter() { - return NEXT_COUNTER.get(); - } - - /** - *

    Creates an ObjectId using time, machine and inc values. The Java driver used to create all ObjectIds this way, but it does not - * match the ObjectId specification, which requires four values, not - * three. This major release of the Java driver conforms to the specification, but still supports clients that are relying on the - * behavior of the previous major release by providing this explicit factory method that takes three parameters instead of four.

    - * - *

    Ordinary users of the driver will not need this method. It's only for those that have written there own BSON decoders.

    - * - *

    NOTE: This will not break any application that use ObjectIds. The 12-byte representation will be round-trippable from old to new - * driver releases.

    - * - * @param time time in seconds - * @param machine machine ID - * @param inc incremental value - * @return a new {@code ObjectId} created from the given values - * @since 2.12.0 - */ - public static ObjectId createFromLegacyFormat(final int time, final int machine, final int inc) { - return new ObjectId(time, machine, inc); - } - /** * Create a new object id. */ @@ -175,7 +146,7 @@ public ObjectId() { * @param date the date */ public ObjectId(final Date date) { - this(dateToTimestampSeconds(date), MACHINE_IDENTIFIER, PROCESS_IDENTIFIER, NEXT_COUNTER.getAndIncrement(), false); + this(dateToTimestampSeconds(date), RANDOM_VALUE | (NEXT_COUNTER.getAndIncrement() & LOW_ORDER_THREE_BYTES)); } /** @@ -186,47 +157,30 @@ public ObjectId(final Date date) { * @throws IllegalArgumentException if the high order byte of counter is not zero */ public ObjectId(final Date date, final int counter) { - this(date, MACHINE_IDENTIFIER, PROCESS_IDENTIFIER, counter); + this(dateToTimestampSeconds(date), getNonceFromUntrustedCounter(counter)); } /** - * Constructs a new instances using the given date, machine identifier, process identifier, and counter. + * Creates an ObjectId using the given time and counter. * - * @param date the date - * @param machineIdentifier the machine identifier - * @param processIdentifier the process identifier - * @param counter the counter - * @throws IllegalArgumentException if the high order byte of machineIdentifier or counter is not zero + * @param timestamp the time in seconds + * @param counter the counter + * @throws IllegalArgumentException if the high order byte of counter is not zero */ - public ObjectId(final Date date, final int machineIdentifier, final short processIdentifier, final int counter) { - this(dateToTimestampSeconds(date), machineIdentifier, processIdentifier, counter); + public ObjectId(final int timestamp, final int counter) { + this(timestamp, getNonceFromUntrustedCounter(counter)); } - /** - * Creates an ObjectId using the given time, machine identifier, process identifier, and counter. - * - * @param timestamp the time in seconds - * @param machineIdentifier the machine identifier - * @param processIdentifier the process identifier - * @param counter the counter - * @throws IllegalArgumentException if the high order byte of machineIdentifier or counter is not zero - */ - public ObjectId(final int timestamp, final int machineIdentifier, final short processIdentifier, final int counter) { - this(timestamp, machineIdentifier, processIdentifier, counter, true); + private ObjectId(final int timestamp, final long nonce) { + this.timestamp = timestamp; + this.nonce = nonce; } - private ObjectId(final int timestamp, final int machineIdentifier, final short processIdentifier, final int counter, - final boolean checkCounter) { - if ((machineIdentifier & 0xff000000) != 0) { - throw new IllegalArgumentException("The machine identifier must be between 0 and 16777215 (it must fit in three bytes)."); - } - if (checkCounter && ((counter & 0xff000000) != 0)) { + private static long getNonceFromUntrustedCounter(final int counter) { + if ((counter & 0xff000000) != 0) { throw new IllegalArgumentException("The counter must be between 0 and 16777215 (it must fit in three bytes)."); } - this.timestamp = timestamp; - this.machineIdentifier = machineIdentifier; - this.processIdentifier = processIdentifier; - this.counter = counter & LOW_ORDER_THREE_BYTES; + return RANDOM_VALUE | counter; } /** @@ -246,18 +200,7 @@ public ObjectId(final String hexString) { * @throws IllegalArgumentException if array is null or not of length 12 */ public ObjectId(final byte[] bytes) { - this(ByteBuffer.wrap(notNull("bytes", bytes))); - } - - /** - * Creates an ObjectId - * - * @param timestamp time in seconds - * @param machineAndProcessIdentifier machine and process identifier - * @param counter incremental value - */ - ObjectId(final int timestamp, final int machineAndProcessIdentifier, final int counter) { - this(legacyToBytes(timestamp, machineAndProcessIdentifier, counter)); + this(ByteBuffer.wrap(isTrueArgument("bytes has length of 12", bytes, notNull("bytes", bytes).length == 12))); } /** @@ -269,31 +212,16 @@ public ObjectId(final byte[] bytes) { */ public ObjectId(final ByteBuffer buffer) { notNull("buffer", buffer); - isTrueArgument("buffer.remaining() >=12", buffer.remaining() >= 12); - - // Note: Cannot use ByteBuffer.getInt because it depends on tbe buffer's byte order - // and ObjectId's are always in big-endian order. - timestamp = makeInt(buffer.get(), buffer.get(), buffer.get(), buffer.get()); - machineIdentifier = makeInt((byte) 0, buffer.get(), buffer.get(), buffer.get()); - processIdentifier = (short) makeInt((byte) 0, (byte) 0, buffer.get(), buffer.get()); - counter = makeInt((byte) 0, buffer.get(), buffer.get(), buffer.get()); - } + isTrueArgument("buffer.remaining() >=12", buffer.remaining() >= OBJECT_ID_LENGTH); - private static byte[] legacyToBytes(final int timestamp, final int machineAndProcessIdentifier, final int counter) { - byte[] bytes = new byte[12]; - bytes[0] = int3(timestamp); - bytes[1] = int2(timestamp); - bytes[2] = int1(timestamp); - bytes[3] = int0(timestamp); - bytes[4] = int3(machineAndProcessIdentifier); - bytes[5] = int2(machineAndProcessIdentifier); - bytes[6] = int1(machineAndProcessIdentifier); - bytes[7] = int0(machineAndProcessIdentifier); - bytes[8] = int3(counter); - bytes[9] = int2(counter); - bytes[10] = int1(counter); - bytes[11] = int0(counter); - return bytes; + ByteOrder originalOrder = buffer.order(); + try { + buffer.order(ByteOrder.BIG_ENDIAN); + this.timestamp = buffer.getInt(); + this.nonce = buffer.getLong(); + } finally { + buffer.order(originalOrder); + } } /** @@ -302,36 +230,34 @@ private static byte[] legacyToBytes(final int timestamp, final int machineAndPro * @return the byte array */ public byte[] toByteArray() { - ByteBuffer buffer = ByteBuffer.allocate(12); - putToByteBuffer(buffer); - return buffer.array(); // using .allocate ensures there is a backing array that can be returned + // using .allocate ensures there is a backing array that can be returned + return ByteBuffer.allocate(OBJECT_ID_LENGTH) + .putInt(this.timestamp) + .putLong(this.nonce) + .array(); } /** - * Convert to bytes and put those bytes to the provided ByteBuffer. - * Note that the numbers are stored in big-endian order. - * - * @param buffer the ByteBuffer - * @throws IllegalArgumentException if the buffer is null or does not have at least 12 bytes remaining - * @since 3.4 - */ + * Convert to bytes and put those bytes to the provided ByteBuffer. + * Note that the numbers are stored in big-endian order. + * + * @param buffer the ByteBuffer + * @throws IllegalArgumentException if the buffer is null or does not have at least 12 bytes remaining + * @since 3.4 + */ public void putToByteBuffer(final ByteBuffer buffer) { notNull("buffer", buffer); - isTrueArgument("buffer.remaining() >=12", buffer.remaining() >= 12); - - buffer.put(int3(timestamp)); - buffer.put(int2(timestamp)); - buffer.put(int1(timestamp)); - buffer.put(int0(timestamp)); - buffer.put(int2(machineIdentifier)); - buffer.put(int1(machineIdentifier)); - buffer.put(int0(machineIdentifier)); - buffer.put(short1(processIdentifier)); - buffer.put(short0(processIdentifier)); - buffer.put(int2(counter)); - buffer.put(int1(counter)); - buffer.put(int0(counter)); - } + isTrueArgument("buffer.remaining() >=12", buffer.remaining() >= OBJECT_ID_LENGTH); + + ByteOrder originalOrder = buffer.order(); + try { + buffer.order(ByteOrder.BIG_ENDIAN); + buffer.putInt(this.timestamp); + buffer.putLong(this.nonce); + } finally { + buffer.order(originalOrder); + } + } /** * Gets the timestamp (number of seconds since the Unix epoch). @@ -342,40 +268,13 @@ public int getTimestamp() { return timestamp; } - /** - * Gets the machine identifier. - * - * @return the machine identifier - */ - public int getMachineIdentifier() { - return machineIdentifier; - } - - /** - * Gets the process identifier. - * - * @return the process identifier - */ - public short getProcessIdentifier() { - return processIdentifier; - } - - /** - * Gets the counter. - * - * @return the counter - */ - public int getCounter() { - return counter; - } - /** * Gets the timestamp as a {@code Date} instance. * * @return the Date */ public Date getDate() { - return new Date(timestamp * 1000L); + return new Date((timestamp & 0xFFFFFFFFL) * 1000L); } /** @@ -384,13 +283,13 @@ public Date getDate() { * @return a string representation of the ObjectId in hexadecimal format */ public String toHexString() { - char[] chars = new char[24]; - int i = 0; - for (byte b : toByteArray()) { - chars[i++] = HEX_CHARS[b >> 4 & 0xF]; - chars[i++] = HEX_CHARS[b & 0xF]; - } - return new String(chars); + char[] chars = new char[OBJECT_ID_LENGTH * 2]; + int i = 0; + for (byte b : toByteArray()) { + chars[i++] = HEX_CHARS[b >> 4 & 0xF]; + chars[i++] = HEX_CHARS[b & 0xF]; + } + return new String(chars); } @Override @@ -402,47 +301,26 @@ public boolean equals(final Object o) { return false; } - ObjectId objectId = (ObjectId) o; - - if (counter != objectId.counter) { - return false; - } - if (machineIdentifier != objectId.machineIdentifier) { - return false; - } - if (processIdentifier != objectId.processIdentifier) { - return false; - } - if (timestamp != objectId.timestamp) { + ObjectId other = (ObjectId) o; + if (timestamp != other.timestamp) { return false; } - - return true; + return nonce == other.nonce; } @Override public int hashCode() { - int result = timestamp; - result = 31 * result + machineIdentifier; - result = 31 * result + (int) processIdentifier; - result = 31 * result + counter; - return result; + return 31 * timestamp + Long.hashCode(nonce); } @Override public int compareTo(final ObjectId other) { - if (other == null) { - throw new NullPointerException(); + int cmp = Integer.compareUnsigned(this.timestamp, other.timestamp); + if (cmp != 0) { + return cmp; } - byte[] byteArray = toByteArray(); - byte[] otherByteArray = other.toByteArray(); - for (int i = 0; i < 12; i++) { - if (byteArray[i] != otherByteArray[i]) { - return ((byteArray[i] & 0xff) < (otherByteArray[i] & 0xff)) ? -1 : 1; - } - } - return 0; + return Long.compareUnsigned(nonce, other.nonce); } @Override @@ -450,150 +328,83 @@ public String toString() { return toHexString(); } - // Deprecated methods - /** - * Gets the time of this ID, in seconds. + * Write the replacement object. + * + *

    + * See https://docs.oracle.com/javase/6/docs/platform/serialization/spec/output.html + *

    * - * @deprecated Use #getTimestamp instead - * @return the time component of this ID in seconds + * @return a proxy for the document */ - @Deprecated - public int getTimeSecond() { - return timestamp; + private Object writeReplace() { + return new SerializationProxy(this); } /** - * Gets the time of this instance, in milliseconds. + * Prevent normal deserialization. + * + *

    + * See https://docs.oracle.com/javase/6/docs/platform/serialization/spec/input.html + *

    * - * @deprecated Use #getDate instead - * @return the time component of this ID in milliseconds + * @param stream the stream + * @throws InvalidObjectException in all cases */ - @Deprecated - public long getTime() { - return timestamp * 1000L; + private void readObject(final ObjectInputStream stream) throws InvalidObjectException { + throw new InvalidObjectException("Proxy required"); } - /** - * @return a string representation of the ObjectId in hexadecimal format - * @see ObjectId#toHexString() - * @deprecated use {@link #toHexString()} - */ - @Deprecated - public String toStringMongod() { - return toHexString(); - } + private static class SerializationProxy implements Serializable { + private static final long serialVersionUID = 1L; - static { - try { - MACHINE_IDENTIFIER = createMachineIdentifier(); - PROCESS_IDENTIFIER = createProcessIdentifier(); - } catch (Exception e) { - throw new RuntimeException(e); + private final byte[] bytes; + + SerializationProxy(final ObjectId objectId) { + bytes = objectId.toByteArray(); } - } - private static int createMachineIdentifier() { - // build a 2-byte machine piece based on NICs info - int machinePiece; - try { - StringBuilder sb = new StringBuilder(); - Enumeration e = NetworkInterface.getNetworkInterfaces(); - while (e.hasMoreElements()) { - NetworkInterface ni = e.nextElement(); - sb.append(ni.toString()); - byte[] mac = ni.getHardwareAddress(); - if (mac != null) { - ByteBuffer bb = ByteBuffer.wrap(mac); - try { - sb.append(bb.getChar()); - sb.append(bb.getChar()); - sb.append(bb.getChar()); - } catch (BufferUnderflowException shortHardwareAddressException) { //NOPMD - // mac with less than 6 bytes. continue - } - } - } - machinePiece = sb.toString().hashCode(); - } catch (Throwable t) { - // exception sometimes happens with IBM JVM, use SecureRandom instead - machinePiece = (new SecureRandom().nextInt()); - LOGGER.debug("Failed to get machine identifier from network interface, using SecureRandom instead"); + private Object readResolve() { + return new ObjectId(bytes); } - machinePiece = machinePiece & LOW_ORDER_THREE_BYTES; - return machinePiece; } - // Creates the process identifier. This does not have to be unique per class loader because - // NEXT_COUNTER will provide the uniqueness. - private static short createProcessIdentifier() { - short processId; + static { try { - String processName = java.lang.management.ManagementFactory.getRuntimeMXBean().getName(); - if (processName.contains("@")) { - processId = (short) Integer.parseInt(processName.substring(0, processName.indexOf('@'))); - } else { - processId = (short) java.lang.management.ManagementFactory.getRuntimeMXBean().getName().hashCode(); - } - - } catch (Throwable t) { - // JMX not available on Android, use SecureRandom instead - processId = (short) new SecureRandom().nextInt(); - LOGGER.debug("Failed to get process identifier from JMX, using SecureRandom instead"); + SecureRandom secureRandom = new SecureRandom(); + RANDOM_VALUE = secureRandom.nextLong() & ~LOW_ORDER_THREE_BYTES; + NEXT_COUNTER = new AtomicInteger(secureRandom.nextInt()); + } catch (Exception e) { + throw new RuntimeException(e); } - - return processId; } private static byte[] parseHexString(final String s) { - if (!isValid(s)) { - throw new IllegalArgumentException("invalid hexadecimal representation of an ObjectId: [" + s + "]"); - } + notNull("hexString", s); + isTrueArgument("hexString has 24 characters", s.length() == 24); - byte[] b = new byte[12]; + byte[] b = new byte[OBJECT_ID_LENGTH]; for (int i = 0; i < b.length; i++) { - b[i] = (byte) Integer.parseInt(s.substring(i * 2, i * 2 + 2), 16); + int pos = i << 1; + char c1 = s.charAt(pos); + char c2 = s.charAt(pos + 1); + b[i] = (byte) ((hexCharToInt(c1) << 4) + hexCharToInt(c2)); } return b; } - private static int dateToTimestampSeconds(final Date time) { - return (int) (time.getTime() / 1000); - } - - // Big-Endian helpers, in this class because all other BSON numbers are little-endian - - private static int makeInt(final byte b3, final byte b2, final byte b1, final byte b0) { - // CHECKSTYLE:OFF - return (((b3) << 24) | - ((b2 & 0xff) << 16) | - ((b1 & 0xff) << 8) | - ((b0 & 0xff))); - // CHECKSTYLE:ON - } - - private static byte int3(final int x) { - return (byte) (x >> 24); - } - - private static byte int2(final int x) { - return (byte) (x >> 16); - } - - private static byte int1(final int x) { - return (byte) (x >> 8); - } - - private static byte int0(final int x) { - return (byte) (x); - } - - private static byte short1(final short x) { - return (byte) (x >> 8); + private static int hexCharToInt(final char c) { + if (c >= '0' && c <= '9') { + return c - 48; + } else if (c >= 'a' && c <= 'f') { + return c - 87; + } else if (c >= 'A' && c <= 'F') { + return c - 55; + } + throw new IllegalArgumentException("invalid hexadecimal character: [" + c + "]"); } - private static byte short0(final short x) { - return (byte) (x); + private static int dateToTimestampSeconds(final Date time) { + return (int) (time.getTime() / 1000); } } - diff --git a/bson/src/main/org/bson/types/Symbol.java b/bson/src/main/org/bson/types/Symbol.java index 67960e443bc..2cf0dc4d859 100644 --- a/bson/src/main/org/bson/types/Symbol.java +++ b/bson/src/main/org/bson/types/Symbol.java @@ -27,6 +27,9 @@ public class Symbol implements Serializable { private static final long serialVersionUID = 1326269319883146072L; + /** + * The symbol string. + */ private final String symbol; /** diff --git a/bson/src/main/org/bson/util/AbstractCopyOnWriteMap.java b/bson/src/main/org/bson/util/AbstractCopyOnWriteMap.java deleted file mode 100644 index e023f0538c0..00000000000 --- a/bson/src/main/org/bson/util/AbstractCopyOnWriteMap.java +++ /dev/null @@ -1,622 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * Copyright (c) 2008-2014 Atlassian Pty Ltd - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.util; - - -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReentrantLock; - -import static java.util.Collections.unmodifiableCollection; -import static java.util.Collections.unmodifiableSet; -import static org.bson.assertions.Assertions.notNull; - -/** - * Abstract base class for COW {@link java.util.Map} implementations that delegate to an internal map. - * - * @param The key type - * @param The value type - * @param the internal {@link java.util.Map} or extension for things like sorted and navigable maps. - */ -abstract class AbstractCopyOnWriteMap> implements ConcurrentMap { - - // @GuardedBy("lock") - private volatile M delegate; - - // import edu.umd.cs.findbugs.annotations.@SuppressWarnings - private final transient Lock lock = new ReentrantLock(); - - // private final transient EntrySet entrySet = new EntrySet(); - // private final transient KeySet keySet = new KeySet(); - // private final transient Values values = new Values(); - // private final View.Type viewType; - private final View view; - - /** - * Create a new {@link CopyOnWriteMap} with the supplied {@link Map} to initialize the values. - * - * @param map the initial map to initialize with - * @param viewType for writable or read-only key, value and entrySet views - */ - protected > AbstractCopyOnWriteMap(final N map, final View.Type viewType) { - this.delegate = notNull("delegate", copy(notNull("map", map))); - this.view = notNull("viewType", viewType).get(this); - } - - /** - * Copy function, implemented by sub-classes. - * - * @param the map to copy and return. - * @param map the initial values of the newly created map. - * @return a new map. Will never be modified after construction. - */ - // @GuardedBy("lock") - abstract > M copy(N map); - - // - // mutable operations - // - - public final void clear() { - lock.lock(); - try { - set(copy(Collections.emptyMap())); - } finally { - lock.unlock(); - } - } - - public final V remove(final Object key) { - lock.lock(); - try { - // short circuit if key doesn't exist - if (!delegate.containsKey(key)) { - return null; - } - M map = copy(); - try { - return map.remove(key); - } finally { - set(map); - } - } finally { - lock.unlock(); - } - } - - public boolean remove(final Object key, final Object value) { - lock.lock(); - try { - if (delegate.containsKey(key) && equals(value, delegate.get(key))) { - M map = copy(); - map.remove(key); - set(map); - return true; - } else { - return false; - } - } finally { - lock.unlock(); - } - } - - public boolean replace(final K key, final V oldValue, final V newValue) { - lock.lock(); - try { - if (!delegate.containsKey(key) || !equals(oldValue, delegate.get(key))) { - return false; - } - M map = copy(); - map.put(key, newValue); - set(map); - return true; - } finally { - lock.unlock(); - } - } - - public V replace(final K key, final V value) { - lock.lock(); - try { - if (!delegate.containsKey(key)) { - return null; - } - M map = copy(); - try { - return map.put(key, value); - } finally { - set(map); - } - } finally { - lock.unlock(); - } - } - - public final V put(final K key, final V value) { - lock.lock(); - try { - M map = copy(); - try { - return map.put(key, value); - } finally { - set(map); - } - } finally { - lock.unlock(); - } - } - - public V putIfAbsent(final K key, final V value) { - lock.lock(); - try { - if (!delegate.containsKey(key)) { - M map = copy(); - try { - return map.put(key, value); - } finally { - set(map); - } - } - return delegate.get(key); - } finally { - lock.unlock(); - } - } - - public final void putAll(final Map t) { - lock.lock(); - try { - M map = copy(); - map.putAll(t); - set(map); - } finally { - lock.unlock(); - } - } - - protected M copy() { - lock.lock(); - try { - return copy(delegate); - } finally { - lock.unlock(); - } - } - - // @GuardedBy("lock") - protected void set(final M map) { - delegate = map; - } - - // - // Collection views - // - - public final Set> entrySet() { - return view.entrySet(); - } - - public final Set keySet() { - return view.keySet(); - } - - public final Collection values() { - return view.values(); - } - - // - // delegate operations - // - - public final boolean containsKey(final Object key) { - return delegate.containsKey(key); - } - - public final boolean containsValue(final Object value) { - return delegate.containsValue(value); - } - - public final V get(final Object key) { - return delegate.get(key); - } - - public final boolean isEmpty() { - return delegate.isEmpty(); - } - - public final int size() { - return delegate.size(); - } - - @Override - public final boolean equals(final Object o) { - return delegate.equals(o); - } - - @Override - public final int hashCode() { - return delegate.hashCode(); - } - - protected final M getDelegate() { - return delegate; - } - - @Override - public String toString() { - return delegate.toString(); - } - - // - // inner classes - // - - private class KeySet extends CollectionView implements Set { - - @Override - Collection getDelegate() { - return delegate.keySet(); - } - - // - // mutable operations - // - - public void clear() { - lock.lock(); - try { - M map = copy(); - map.keySet().clear(); - set(map); - } finally { - lock.unlock(); - } - } - - public boolean remove(final Object o) { - return AbstractCopyOnWriteMap.this.remove(o) != null; - } - - public boolean removeAll(final Collection c) { - lock.lock(); - try { - M map = copy(); - try { - return map.keySet().removeAll(c); - } finally { - set(map); - } - } finally { - lock.unlock(); - } - } - - public boolean retainAll(final Collection c) { - lock.lock(); - try { - M map = copy(); - try { - return map.keySet().retainAll(c); - } finally { - set(map); - } - } finally { - lock.unlock(); - } - } - } - - private final class Values extends CollectionView { - - @Override - Collection getDelegate() { - return delegate.values(); - } - - public void clear() { - lock.lock(); - try { - M map = copy(); - map.values().clear(); - set(map); - } finally { - lock.unlock(); - } - } - - public boolean remove(final Object o) { - lock.lock(); - try { - if (!contains(o)) { - return false; - } - M map = copy(); - try { - return map.values().remove(o); - } finally { - set(map); - } - } finally { - lock.unlock(); - } - } - - public boolean removeAll(final Collection c) { - lock.lock(); - try { - M map = copy(); - try { - return map.values().removeAll(c); - } finally { - set(map); - } - } finally { - lock.unlock(); - } - } - - public boolean retainAll(final Collection c) { - lock.lock(); - try { - M map = copy(); - try { - return map.values().retainAll(c); - } finally { - set(map); - } - } finally { - lock.unlock(); - } - } - } - - private class EntrySet extends CollectionView> implements Set> { - - @Override - Collection> getDelegate() { - return delegate.entrySet(); - } - - public void clear() { - lock.lock(); - try { - M map = copy(); - map.entrySet().clear(); - set(map); - } finally { - lock.unlock(); - } - } - - public boolean remove(final Object o) { - lock.lock(); - try { - if (!contains(o)) { - return false; - } - M map = copy(); - try { - return map.entrySet().remove(o); - } finally { - set(map); - } - } finally { - lock.unlock(); - } - } - - public boolean removeAll(final Collection c) { - lock.lock(); - try { - M map = copy(); - try { - return map.entrySet().removeAll(c); - } finally { - set(map); - } - } finally { - lock.unlock(); - } - } - - public boolean retainAll(final Collection c) { - lock.lock(); - try { - M map = copy(); - try { - return map.entrySet().retainAll(c); - } finally { - set(map); - } - } finally { - lock.unlock(); - } - } - } - - private static class UnmodifiableIterator implements Iterator { - private final Iterator delegate; - - UnmodifiableIterator(final Iterator delegate) { - this.delegate = delegate; - } - - public boolean hasNext() { - return delegate.hasNext(); - } - - public T next() { - return delegate.next(); - } - - public void remove() { - throw new UnsupportedOperationException(); - } - } - - protected abstract static class CollectionView implements Collection { - - abstract Collection getDelegate(); - - // - // delegate operations - // - - public final boolean contains(final Object o) { - return getDelegate().contains(o); - } - - public final boolean containsAll(final Collection c) { - return getDelegate().containsAll(c); - } - - public final Iterator iterator() { - return new UnmodifiableIterator(getDelegate().iterator()); - } - - public final boolean isEmpty() { - return getDelegate().isEmpty(); - } - - public final int size() { - return getDelegate().size(); - } - - public final Object[] toArray() { - return getDelegate().toArray(); - } - - public final T[] toArray(final T[] a) { - return getDelegate().toArray(a); - } - - @Override - public int hashCode() { - return getDelegate().hashCode(); - } - - @Override - public boolean equals(final Object obj) { - return getDelegate().equals(obj); - } - - @Override - public String toString() { - return getDelegate().toString(); - } - - // - // unsupported operations - // - - public final boolean add(final E o) { - throw new UnsupportedOperationException(); - } - - public final boolean addAll(final Collection c) { - throw new UnsupportedOperationException(); - } - } - - private boolean equals(final Object o1, final Object o2) { - if (o1 == null) { - return o2 == null; - } - return o1.equals(o2); - } - - /** - * Provides access to the views of the underlying key, value and entry collections. - */ - public abstract static class View { - View() { - } - - abstract Set keySet(); - - abstract Set> entrySet(); - - abstract Collection values(); - - /** - * The different types of {@link View} available - */ - public enum Type { - STABLE { - @Override - > View get(final AbstractCopyOnWriteMap host) { - return host.new Immutable(); - } - }, - LIVE { - @Override - > View get(final AbstractCopyOnWriteMap host) { - return host.new Mutable(); - } - }; - - abstract > View get(AbstractCopyOnWriteMap host); - } - } - - final class Immutable extends View { - - @Override - public Set keySet() { - return unmodifiableSet(delegate.keySet()); - } - - @Override - public Set> entrySet() { - return unmodifiableSet(delegate.entrySet()); - } - - @Override - public Collection values() { - return unmodifiableCollection(delegate.values()); - } - } - - final class Mutable extends View { - - private final transient KeySet keySet = new KeySet(); - private final transient EntrySet entrySet = new EntrySet(); - private final transient Values values = new Values(); - - @Override - public Set keySet() { - return keySet; - } - - @Override - public Set> entrySet() { - return entrySet; - } - - @Override - public Collection values() { - return values; - } - } -} - diff --git a/bson/src/main/org/bson/util/ClassAncestry.java b/bson/src/main/org/bson/util/ClassAncestry.java deleted file mode 100644 index 6d932fdf5a6..00000000000 --- a/bson/src/main/org/bson/util/ClassAncestry.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.util; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.concurrent.ConcurrentMap; - -import static java.util.Collections.unmodifiableList; -import static org.bson.util.CopyOnWriteMap.newHashMap; - -class ClassAncestry { - - /** - *

    Walks superclass and interface graph, superclasses first, then interfaces, to compute an ancestry list. Supertypes are visited - * left - * to right. Duplicates are removed such that no Class will appear in the list before one of its subtypes.

    - * - *

    Does not need to be synchronized, races are harmless as the Class graph does not change at runtime.

    - */ - public static List> getAncestry(final Class c) { - ConcurrentMap, List>> cache = getClassAncestryCache(); - while (true) { - List> cachedResult = cache.get(c); - if (cachedResult != null) { - return cachedResult; - } - cache.putIfAbsent(c, computeAncestry(c)); - } - } - - /** - * Starting with children and going back to parents - */ - private static List> computeAncestry(final Class c) { - List> result = new ArrayList>(); - result.add(Object.class); - computeAncestry(c, result); - Collections.reverse(result); - return unmodifiableList(new ArrayList>(result)); - } - - private static void computeAncestry(final Class c, final List> result) { - if ((c == null) || (c == Object.class)) { - return; - } - - // first interfaces (looks backwards but is not) - Class[] interfaces = c.getInterfaces(); - for (int i = interfaces.length - 1; i >= 0; i--) { - computeAncestry(interfaces[i], result); - } - - // next superclass - computeAncestry(c.getSuperclass(), result); - - if (!result.contains(c)) { - result.add(c); - } - } - - /** - * classAncestryCache - */ - private static ConcurrentMap, List>> getClassAncestryCache() { - return (_ancestryCache); - } - - private static final ConcurrentMap, List>> _ancestryCache = newHashMap(); -} - diff --git a/bson/src/main/org/bson/util/ClassMap.java b/bson/src/main/org/bson/util/ClassMap.java deleted file mode 100644 index 8e4e90cbbc4..00000000000 --- a/bson/src/main/org/bson/util/ClassMap.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.util; - -import java.util.List; -import java.util.Map; - -/** - *

    Maps Class objects to values. A ClassMap is different from a regular Map in that {@code get(clazz)} does not only look to see if - * {@code clazz} is a key in the Map, but also walks the up superclass and interface graph of {@code clazz} to find matches. Derived matches - * of this sort are then cached in the registry so that matches are faster on future gets.

    - * - *

    This is a very useful class for Class based registries.

    - * - *

    Example:

    - *
    {@code
    - * ClassMap m = new ClassMap();
    - * m.put(Animal.class, "Animal");
    - * m.put(Fox.class, "Fox");
    - * m.get(Fox.class) --> "Fox"
    - * m.get(Dog.class) --> "Animal"
    - * } 
    - * - * (assuming Dog.class < Animal.class) - * - * @param the type of the value in this map - */ -public class ClassMap { - /** - * Helper method that walks superclass and interface graph, superclasses first, then interfaces, to compute an ancestry list. Super - * types are visited left to right. Duplicates are removed such that no Class will appear in the list before one of its subtypes. - * - * @param clazz the class to get the ancestors for - * @param the type of the class modeled by this {@code Class} object. - * @return a list of all the super classes of {@code clazz}, starting with the class, and ending with {@code java.lang.Object}. - */ - public static List> getAncestry(final Class clazz) { - return ClassAncestry.getAncestry(clazz); - } - - private final class ComputeFunction implements Function, T> { - @Override - public T apply(final Class a) { - for (final Class cls : getAncestry(a)) { - T result = map.get(cls); - if (result != null) { - return result; - } - } - return null; - } - } - - private final Map, T> map = CopyOnWriteMap.newHashMap(); - private final Map, T> cache = ComputingMap.create(new ComputeFunction()); - - /** - * Gets the value associated with either this Class or a superclass of this class. If fetching for a super class, it fetches the value - * for the closest superclass. Returns null if the given class and none of its superclasses are in the map. - * - * @param key a {@code Class} to get the value for - * @return the value for either this class or its nearest superclass - */ - public T get(final Object key) { - return cache.get(key); - } - - /** - * As per {@code java.util.Map}, associates the specified value with the specified key in this map. If the map previously contained a - * mapping for the key, the old value is replaced by the specified value. - * - * @param key a {@code Class} key - * @param value the value for this class - * @return the previous value associated with {@code key}, or null if there was no mapping for key. - * @see java.util.Map#put(Object, Object) - */ - public T put(final Class key, final T value) { - try { - return map.put(key, value); - } finally { - cache.clear(); - } - } - - /** - * As per {@code java.util.Map}, removes the mapping for a key from this map if it is present - * - * @param key a {@code Class} key - * @return the previous value associated with {@code key}, or null if there was no mapping for key. - * @see java.util.Map#remove(Object) - */ - public T remove(final Object key) { - try { - return map.remove(key); - } finally { - cache.clear(); - } - } - - /** - * As per {@code java.util.Map}, removes all of the mappings from this map (optional operation). - * - * @see java.util.Map#clear() - */ - public void clear() { - map.clear(); - cache.clear(); - } - - /** - * As per {@code java.util.Map}, returns the number of key-value mappings in this map. This will only return the number of keys - * explicitly added to the map, not any cached hierarchy keys. - * - * @return the size of this map - * @see java.util.Map#size() - */ - public int size() { - return map.size(); - } - - /** - * As per {@code java.util.Map}, returns {@code true} if this map contains no key-value mappings. - * - * @return true if there are no values in the map - * @see java.util.Map#isEmpty() - */ - public boolean isEmpty() { - return map.isEmpty(); - } -} diff --git a/bson/src/main/org/bson/util/ComputingMap.java b/bson/src/main/org/bson/util/ComputingMap.java deleted file mode 100644 index b93330383c2..00000000000 --- a/bson/src/main/org/bson/util/ComputingMap.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.util; - -import java.util.Collection; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentMap; - -import static org.bson.assertions.Assertions.notNull; - -final class ComputingMap implements Map, Function { - - public static Map create(final Function function) { - return new ComputingMap(CopyOnWriteMap.newHashMap(), function); - } - - private final ConcurrentMap map; - private final Function function; - - ComputingMap(final ConcurrentMap map, final Function function) { - this.map = notNull("map", map); - this.function = notNull("function", function); - } - - public V get(final Object key) { - while (true) { - V v = map.get(key); - if (v != null) { - return v; - } - @SuppressWarnings("unchecked") - K k = (K) key; - V value = function.apply(k); - if (value == null) { - return null; - } - map.putIfAbsent(k, value); - } - } - - public V apply(final K k) { - return get(k); - } - - public V putIfAbsent(final K key, final V value) { - return map.putIfAbsent(key, value); - } - - public boolean remove(final Object key, final Object value) { - return map.remove(key, value); - } - - public boolean replace(final K key, final V oldValue, final V newValue) { - return map.replace(key, oldValue, newValue); - } - - public V replace(final K key, final V value) { - return map.replace(key, value); - } - - public int size() { - return map.size(); - } - - public boolean isEmpty() { - return map.isEmpty(); - } - - public boolean containsKey(final Object key) { - return map.containsKey(key); - } - - public boolean containsValue(final Object value) { - return map.containsValue(value); - } - - public V put(final K key, final V value) { - return map.put(key, value); - } - - public V remove(final Object key) { - return map.remove(key); - } - - public void putAll(final Map m) { - map.putAll(m); - } - - public void clear() { - map.clear(); - } - - public Set keySet() { - return map.keySet(); - } - - public Collection values() { - return map.values(); - } - - public Set> entrySet() { - return map.entrySet(); - } - - public boolean equals(final Object o) { - return map.equals(o); - } - - public int hashCode() { - return map.hashCode(); - } -} diff --git a/bson/src/main/org/bson/util/CopyOnWriteMap.java b/bson/src/main/org/bson/util/CopyOnWriteMap.java deleted file mode 100644 index 40f505d9d97..00000000000 --- a/bson/src/main/org/bson/util/CopyOnWriteMap.java +++ /dev/null @@ -1,243 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * Copyright (c) 2008-2014 Atlassian Pty Ltd - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.util; - -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Map; - -/** - *

    A thread-safe variant of {@link java.util.Map} in which all mutative operations (the "destructive" operations described by {@link - * java.util.Map} put, remove and so on) are implemented by making a fresh copy of the underlying map.

    - * - *

    This is ordinarily too costly, but may be more efficient than alternatives when traversal operations vastly out-number - * mutations, and is useful when you cannot or don't want to synchronize traversals, yet need to preclude interference among concurrent - * threads. The "snapshot" style iterators on the collections returned by {@link #entrySet()}, {@link #keySet()} and {@link #values()} use a - * reference to the internal map at the point that the iterator was created. This map never changes during the lifetime of the iterator, so - * interference is impossible and the iterator is guaranteed not to throw ConcurrentModificationException. The iterators will not - * reflect additions, removals, or changes to the list since the iterator was created. Removing elements via these iterators is not - * supported. The mutable operations on these collections (remove, retain etc.) are supported but as with the {@link java.util.Map} - * interface, add and addAll are not and throw {@link UnsupportedOperationException}.

    The actual copy is performed by an abstract - * {@link #copy(java.util.Map)} method. The method is responsible for the underlying Map implementation (for instance a {@link - * java.util.HashMap}, {@link java.util.TreeMap}, {@link java.util.LinkedHashMap} etc.) and therefore the semantics of what this map will - * cope with as far as null keys and values, iteration ordering etc. See the note below about suitable candidates for underlying Map - * implementations

    - * - *

    There are supplied implementations for the common j.u.c {@link java.util.Map} implementations via the {@link CopyOnWriteMap} static - * {@link Builder}.

    - * - *

    Collection views of the keys, values and entries are optionally {@link View.Type#LIVE live} or {@link View.Type#STABLE stable}. Live - * views are modifiable will cause a copy if a modifying method is called on them. Methods on these will reflect the current state of the - * collection, although iterators will be snapshot style. If the collection views are stable they are unmodifiable, and will be a snapshot - * of the state of the map at the time the collection was asked for.

    - * - *

    Please note that the thread-safety guarantees are limited to the thread-safety of the non-mutative (non-destructive) - * operations of the underlying map implementation. For instance some implementations such as {@link java.util.WeakHashMap} and {@link - * java.util.LinkedHashMap} with access ordering are actually structurally modified by the {@link #get(Object)} method and are therefore not - * suitable candidates as delegates for this class. - * - * @param the key type - * @param the value type - * @author Jed Wesley-Smith - */ -abstract class CopyOnWriteMap extends AbstractCopyOnWriteMap> { - private static final long serialVersionUID = 7935514534647505917L; - - /** - * Get a {@link Builder} for a CopyOnWriteMap instance. - * - * @param key type - * @param value type - * @return a fresh builder - */ - public static Builder builder() { - return new Builder(); - } - - /** - * Build a {@link CopyOnWriteMap} and specify all the options. - * - * @param key type - * @param value type - */ - public static class Builder { - private View.Type viewType = View.Type.STABLE; - private final Map initialValues = new HashMap(); - - Builder() { - } - - /** - * Views are stable (fixed in time) and unmodifiable. - */ - public Builder stableViews() { - viewType = View.Type.STABLE; - return this; - } - - /** - * Views are live (reflecting concurrent updates) and mutator methods are supported. - */ - public Builder addAll(final Map values) { - initialValues.putAll(values); - return this; - } - - /** - * Views are live (reflecting concurrent updates) and mutator methods are supported. - */ - public Builder liveViews() { - viewType = View.Type.LIVE; - return this; - } - - public CopyOnWriteMap newHashMap() { - return new Hash(initialValues, viewType); - } - - public CopyOnWriteMap newLinkedMap() { - return new Linked(initialValues, viewType); - } - } - - /** - *

    Creates a new {@link CopyOnWriteMap} with an underlying {@link HashMap}.

    - * - *

    This map has {@link View.Type#STABLE stable} views.

    - */ - public static CopyOnWriteMap newHashMap() { - Builder builder = builder(); - return builder.newHashMap(); - } - - /** - *

    Creates a new {@link CopyOnWriteMap} with an underlying {@link HashMap} using the supplied map as the initial values.

    - * - *

    This map has {@link View.Type#STABLE stable} views.

    - */ - public static CopyOnWriteMap newHashMap(final Map map) { - Builder builder = builder(); - return builder.addAll(map).newHashMap(); - } - - /** - *

    Creates a new {@link CopyOnWriteMap} with an underlying {@link java.util.LinkedHashMap}. Iterators for this map will be return - * elements in insertion order.

    - * - *

    This map has {@link View.Type#STABLE stable} views.

    - */ - public static CopyOnWriteMap newLinkedMap() { - Builder builder = builder(); - return builder.newLinkedMap(); - } - - /** - *

    Creates a new {@link CopyOnWriteMap} with an underlying {@link java.util.LinkedHashMap} using the supplied map as the initial - * values. Iterators for this map will be return elements in insertion order.

    - * - *

    This map has {@link View.Type#STABLE stable} views.

    - */ - public static CopyOnWriteMap newLinkedMap(final Map map) { - Builder builder = builder(); - return builder.addAll(map).newLinkedMap(); - } - - // - // constructors - // - - /** - * Create a new {@link CopyOnWriteMap} with the supplied {@link Map} to initialize the values. - * - * @param map the initial map to initialize with - * @deprecated since 0.0.12 use the versions that explicitly specify View.Type - */ - @Deprecated - protected CopyOnWriteMap(final Map map) { - this(map, View.Type.LIVE); - } - - /** - * Create a new empty {@link CopyOnWriteMap}. - * - * @deprecated since 0.0.12 use the versions that explicitly specify View.Type - */ - @Deprecated - protected CopyOnWriteMap() { - this(Collections.emptyMap(), View.Type.LIVE); - } - - /** - * Create a new {@link CopyOnWriteMap} with the supplied {@link Map} to initialize the values. This map may be optionally modified using - * any of the key, entry or value views - * - * @param map the initial map to initialize with - */ - protected CopyOnWriteMap(final Map map, final View.Type viewType) { - super(map, viewType); - } - - /** - * Create a new empty {@link CopyOnWriteMap}. This map may be optionally modified using any of the key, entry or value views - */ - protected CopyOnWriteMap(final View.Type viewType) { - super(Collections.emptyMap(), viewType); - } - - @Override - // @GuardedBy("internal-lock") - protected abstract > Map copy(N map); - - // - // inner classes - // - - /** - * Uses {@link HashMap} instances as its internal storage. - */ - static class Hash extends CopyOnWriteMap { - private static final long serialVersionUID = 5221824943734164497L; - - Hash(final Map map, final View.Type viewType) { - super(map, viewType); - } - - @Override - public > Map copy(final N map) { - return new HashMap(map); - } - } - - /** - * Uses {@link java.util.LinkedHashMap} instances as its internal storage. - */ - static class Linked extends CopyOnWriteMap { - private static final long serialVersionUID = -8659999465009072124L; - - Linked(final Map map, final View.Type viewType) { - super(map, viewType); - } - - @Override - public > Map copy(final N map) { - return new LinkedHashMap(map); - } - } -} - diff --git a/bson/src/main/org/bson/util/Function.java b/bson/src/main/org/bson/util/Function.java deleted file mode 100644 index b4cc3d66b09..00000000000 --- a/bson/src/main/org/bson/util/Function.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.util; - -interface Function { - B apply(A a); -} diff --git a/bson/src/main/org/bson/util/package-info.java b/bson/src/main/org/bson/util/package-info.java deleted file mode 100644 index 9226065ec35..00000000000 --- a/bson/src/main/org/bson/util/package-info.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * Contains helper classes for working with the BSON protocol. - */ -package org.bson.util; diff --git a/bson/src/main/resources/META-INF/native-image/native-image.properties b/bson/src/main/resources/META-INF/native-image/native-image.properties new file mode 100644 index 00000000000..65c60367503 --- /dev/null +++ b/bson/src/main/resources/META-INF/native-image/native-image.properties @@ -0,0 +1,16 @@ +# +# Copyright 2008-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +Args = --initialize-at-run-time=org.bson.types.ObjectId diff --git a/bson/src/main/resources/META-INF/native-image/reflect-config.json b/bson/src/main/resources/META-INF/native-image/reflect-config.json new file mode 100644 index 00000000000..dd27feda44d --- /dev/null +++ b/bson/src/main/resources/META-INF/native-image/reflect-config.json @@ -0,0 +1,17 @@ +[ +{ + "name":"java.lang.Object", + "queryAllDeclaredMethods":true +}, +{ + "name":"sun.security.provider.NativePRNG", + "methods":[{"name":"","parameterTypes":[] }, {"name":"","parameterTypes":["java.security.SecureRandomParameters"] }] +}, +{ + "name":"sun.security.provider.SHA", + "methods":[{"name":"","parameterTypes":[] }] +}, +{ + "name":"org.slf4j.Logger" +} +] diff --git a/bson/src/test/resources/bson-binary-vector/float32.json b/bson/src/test/resources/bson-binary-vector/float32.json new file mode 100644 index 00000000000..e1d142c184b --- /dev/null +++ b/bson/src/test/resources/bson-binary-vector/float32.json @@ -0,0 +1,50 @@ +{ + "description": "Tests of Binary subtype 9, Vectors, with dtype FLOAT32", + "test_key": "vector", + "tests": [ + { + "description": "Simple Vector FLOAT32", + "valid": true, + "vector": [127.0, 7.0], + "dtype_hex": "0x27", + "dtype_alias": "FLOAT32", + "padding": 0, + "canonical_bson": "1C00000005766563746F72000A0000000927000000FE420000E04000" + }, + { + "description": "Vector with decimals and negative value FLOAT32", + "valid": true, + "vector": [127.7, -7.7], + "dtype_hex": "0x27", + "dtype_alias": "FLOAT32", + "padding": 0, + "canonical_bson": "1C00000005766563746F72000A0000000927006666FF426666F6C000" + }, + { + "description": "Empty Vector FLOAT32", + "valid": true, + "vector": [], + "dtype_hex": "0x27", + "dtype_alias": "FLOAT32", + "padding": 0, + "canonical_bson": "1400000005766563746F72000200000009270000" + }, + { + "description": "Infinity Vector FLOAT32", + "valid": true, + "vector": ["-inf", 0.0, "inf"], + "dtype_hex": "0x27", + "dtype_alias": "FLOAT32", + "padding": 0, + "canonical_bson": "2000000005766563746F72000E000000092700000080FF000000000000807F00" + }, + { + "description": "FLOAT32 with padding", + "valid": false, + "vector": [127.0, 7.0], + "dtype_hex": "0x27", + "dtype_alias": "FLOAT32", + "padding": 3 + } + ] +} \ No newline at end of file diff --git a/bson/src/test/resources/bson-binary-vector/int8.json b/bson/src/test/resources/bson-binary-vector/int8.json new file mode 100644 index 00000000000..c10c1b7d4e2 --- /dev/null +++ b/bson/src/test/resources/bson-binary-vector/int8.json @@ -0,0 +1,56 @@ +{ + "description": "Tests of Binary subtype 9, Vectors, with dtype INT8", + "test_key": "vector", + "tests": [ + { + "description": "Simple Vector INT8", + "valid": true, + "vector": [127, 7], + "dtype_hex": "0x03", + "dtype_alias": "INT8", + "padding": 0, + "canonical_bson": "1600000005766563746F7200040000000903007F0700" + }, + { + "description": "Empty Vector INT8", + "valid": true, + "vector": [], + "dtype_hex": "0x03", + "dtype_alias": "INT8", + "padding": 0, + "canonical_bson": "1400000005766563746F72000200000009030000" + }, + { + "description": "Overflow Vector INT8", + "valid": false, + "vector": [128], + "dtype_hex": "0x03", + "dtype_alias": "INT8", + "padding": 0 + }, + { + "description": "Underflow Vector INT8", + "valid": false, + "vector": [-129], + "dtype_hex": "0x03", + "dtype_alias": "INT8", + "padding": 0 + }, + { + "description": "INT8 with padding", + "valid": false, + "vector": [127, 7], + "dtype_hex": "0x03", + "dtype_alias": "INT8", + "padding": 3 + }, + { + "description": "INT8 with float inputs", + "valid": false, + "vector": [127.77, 7.77], + "dtype_hex": "0x03", + "dtype_alias": "INT8", + "padding": 0 + } + ] +} \ No newline at end of file diff --git a/bson/src/test/resources/bson-binary-vector/packed_bit.json b/bson/src/test/resources/bson-binary-vector/packed_bit.json new file mode 100644 index 00000000000..69fb3948335 --- /dev/null +++ b/bson/src/test/resources/bson-binary-vector/packed_bit.json @@ -0,0 +1,97 @@ +{ + "description": "Tests of Binary subtype 9, Vectors, with dtype PACKED_BIT", + "test_key": "vector", + "tests": [ + { + "description": "Padding specified with no vector data PACKED_BIT", + "valid": false, + "vector": [], + "dtype_hex": "0x10", + "dtype_alias": "PACKED_BIT", + "padding": 1 + }, + { + "description": "Simple Vector PACKED_BIT", + "valid": true, + "vector": [127, 7], + "dtype_hex": "0x10", + "dtype_alias": "PACKED_BIT", + "padding": 0, + "canonical_bson": "1600000005766563746F7200040000000910007F0700" + }, + { + "description": "Empty Vector PACKED_BIT", + "valid": true, + "vector": [], + "dtype_hex": "0x10", + "dtype_alias": "PACKED_BIT", + "padding": 0, + "canonical_bson": "1400000005766563746F72000200000009100000" + }, + { + "description": "PACKED_BIT with padding", + "valid": true, + "vector": [127, 7], + "dtype_hex": "0x10", + "dtype_alias": "PACKED_BIT", + "padding": 3, + "canonical_bson": "1600000005766563746F7200040000000910037F0700" + }, + { + "description": "Overflow Vector PACKED_BIT", + "valid": false, + "vector": [256], + "dtype_hex": "0x10", + "dtype_alias": "PACKED_BIT", + "padding": 0 + }, + { + "description": "Underflow Vector PACKED_BIT", + "valid": false, + "vector": [-1], + "dtype_hex": "0x10", + "dtype_alias": "PACKED_BIT", + "padding": 0 + }, + { + "description": "Vector with float values PACKED_BIT", + "valid": false, + "vector": [127.5], + "dtype_hex": "0x10", + "dtype_alias": "PACKED_BIT", + "padding": 0 + }, + { + "description": "Padding specified with no vector data PACKED_BIT", + "valid": false, + "vector": [], + "dtype_hex": "0x10", + "dtype_alias": "PACKED_BIT", + "padding": 1 + }, + { + "description": "Exceeding maximum padding PACKED_BIT", + "valid": false, + "vector": [1], + "dtype_hex": "0x10", + "dtype_alias": "PACKED_BIT", + "padding": 8 + }, + { + "description": "Negative padding PACKED_BIT", + "valid": false, + "vector": [1], + "dtype_hex": "0x10", + "dtype_alias": "PACKED_BIT", + "padding": -1 + }, + { + "description": "Vector with float values PACKED_BIT", + "valid": false, + "vector": [127.5], + "dtype_hex": "0x10", + "dtype_alias": "PACKED_BIT", + "padding": 0 + } + ] +} \ No newline at end of file diff --git a/bson/src/test/resources/bson/array.json b/bson/src/test/resources/bson/array.json index 1c654cf36b7..9ff953e5ae7 100644 --- a/bson/src/test/resources/bson/array.json +++ b/bson/src/test/resources/bson/array.json @@ -14,16 +14,22 @@ "canonical_extjson": "{\"a\" : [{\"$numberInt\": \"10\"}]}" }, { - "description": "Single Element Array with index set incorrectly", + "description": "Single Element Array with index set incorrectly to empty string", "degenerate_bson": "130000000461000B00000010000A0000000000", "canonical_bson": "140000000461000C0000001030000A0000000000", "canonical_extjson": "{\"a\" : [{\"$numberInt\": \"10\"}]}" }, { - "description": "Single Element Array with index set incorrectly", + "description": "Single Element Array with index set incorrectly to ab", "degenerate_bson": "150000000461000D000000106162000A0000000000", "canonical_bson": "140000000461000C0000001030000A0000000000", "canonical_extjson": "{\"a\" : [{\"$numberInt\": \"10\"}]}" + }, + { + "description": "Multi Element Array with duplicate indexes", + "degenerate_bson": "1b000000046100130000001030000a000000103000140000000000", + "canonical_bson": "1b000000046100130000001030000a000000103100140000000000", + "canonical_extjson": "{\"a\" : [{\"$numberInt\": \"10\"}, {\"$numberInt\": \"20\"}]}" } ], "decodeErrors": [ diff --git a/bson/src/test/resources/bson/binary.json b/bson/src/test/resources/bson/binary.json index 90a15c1a1c4..0e0056f3a2c 100644 --- a/bson/src/test/resources/bson/binary.json +++ b/bson/src/test/resources/bson/binary.json @@ -39,11 +39,27 @@ "canonical_bson": "1D000000057800100000000473FFD26444B34C6990E8E7D1DFC035D400", "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"c//SZESzTGmQ6OfR38A11A==\", \"subType\" : \"04\"}}}" }, + { + "description": "subtype 0x04 UUID", + "canonical_bson": "1D000000057800100000000473FFD26444B34C6990E8E7D1DFC035D400", + "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"c//SZESzTGmQ6OfR38A11A==\", \"subType\" : \"04\"}}}", + "degenerate_extjson": "{\"x\" : { \"$uuid\" : \"73ffd264-44b3-4c69-90e8-e7d1dfc035d4\"}}" + }, { "description": "subtype 0x05", "canonical_bson": "1D000000057800100000000573FFD26444B34C6990E8E7D1DFC035D400", "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"c//SZESzTGmQ6OfR38A11A==\", \"subType\" : \"05\"}}}" }, + { + "description": "subtype 0x07", + "canonical_bson": "1D000000057800100000000773FFD26444B34C6990E8E7D1DFC035D400", + "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"c//SZESzTGmQ6OfR38A11A==\", \"subType\" : \"07\"}}}" + }, + { + "description": "subtype 0x08", + "canonical_bson": "1D000000057800100000000873FFD26444B34C6990E8E7D1DFC035D400", + "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"c//SZESzTGmQ6OfR38A11A==\", \"subType\" : \"08\"}}}" + }, { "description": "subtype 0x80", "canonical_bson": "0F0000000578000200000080FFFF00", @@ -58,6 +74,36 @@ "description": "$type query operator (conflicts with legacy $binary form with $type field)", "canonical_bson": "180000000378001000000010247479706500020000000000", "canonical_extjson": "{\"x\" : { \"$type\" : {\"$numberInt\": \"2\"}}}" + }, + { + "description": "subtype 0x09 Vector FLOAT32", + "canonical_bson": "170000000578000A0000000927000000FE420000E04000", + "canonical_extjson": "{\"x\": {\"$binary\": {\"base64\": \"JwAAAP5CAADgQA==\", \"subType\": \"09\"}}}" + }, + { + "description": "subtype 0x09 Vector INT8", + "canonical_bson": "11000000057800040000000903007F0700", + "canonical_extjson": "{\"x\": {\"$binary\": {\"base64\": \"AwB/Bw==\", \"subType\": \"09\"}}}" + }, + { + "description": "subtype 0x09 Vector PACKED_BIT", + "canonical_bson": "11000000057800040000000910007F0700", + "canonical_extjson": "{\"x\": {\"$binary\": {\"base64\": \"EAB/Bw==\", \"subType\": \"09\"}}}" + }, + { + "description": "subtype 0x09 Vector (Zero-length) FLOAT32", + "canonical_bson": "0F0000000578000200000009270000", + "canonical_extjson": "{\"x\": {\"$binary\": {\"base64\": \"JwA=\", \"subType\": \"09\"}}}" + }, + { + "description": "subtype 0x09 Vector (Zero-length) INT8", + "canonical_bson": "0F0000000578000200000009030000", + "canonical_extjson": "{\"x\": {\"$binary\": {\"base64\": \"AwA=\", \"subType\": \"09\"}}}" + }, + { + "description": "subtype 0x09 Vector (Zero-length) PACKED_BIT", + "canonical_bson": "0F0000000578000200000009100000", + "canonical_extjson": "{\"x\": {\"$binary\": {\"base64\": \"EAA=\", \"subType\": \"09\"}}}" } ], "decodeErrors": [ @@ -81,5 +127,27 @@ "description": "subtype 0x02 length negative one", "bson": "130000000578000600000002FFFFFFFFFFFF00" } + ], + "parseErrors": [ + { + "description": "$uuid wrong type", + "string": "{\"x\" : { \"$uuid\" : { \"data\" : \"73ffd264-44b3-4c69-90e8-e7d1dfc035d4\"}}}" + }, + { + "description": "$uuid invalid value--too short", + "string": "{\"x\" : { \"$uuid\" : \"73ffd264-44b3-90e8-e7d1dfc035d4\"}}" + }, + { + "description": "$uuid invalid value--too long", + "string": "{\"x\" : { \"$uuid\" : \"73ffd264-44b3-4c69-90e8-e7d1dfc035d4-789e4\"}}" + }, + { + "description": "$uuid invalid value--misplaced hyphens", + "string": "{\"x\" : { \"$uuid\" : \"73ff-d26444b-34c6-990e8e-7d1dfc035d4\"}}" + }, + { + "description": "$uuid invalid value--too many hyphens", + "string": "{\"x\" : { \"$uuid\" : \"----d264-44b3-4--9-90e8-e7d1dfc0----\"}}" + } ] } diff --git a/bson/src/test/resources/bson/code.json b/bson/src/test/resources/bson/code.json index 6f37349ad0b..b8482b2541b 100644 --- a/bson/src/test/resources/bson/code.json +++ b/bson/src/test/resources/bson/code.json @@ -20,48 +20,48 @@ }, { "description": "two-byte UTF-8 (\u00e9)", - "canonical_bson": "190000000261000D000000C3A9C3A9C3A9C3A9C3A9C3A90000", - "canonical_extjson": "{\"a\" : \"\\u00e9\\u00e9\\u00e9\\u00e9\\u00e9\\u00e9\"}" + "canonical_bson": "190000000D61000D000000C3A9C3A9C3A9C3A9C3A9C3A90000", + "canonical_extjson": "{\"a\" : {\"$code\" : \"\\u00e9\\u00e9\\u00e9\\u00e9\\u00e9\\u00e9\"}}" }, { "description": "three-byte UTF-8 (\u2606)", - "canonical_bson": "190000000261000D000000E29886E29886E29886E298860000", - "canonical_extjson": "{\"a\" : \"\\u2606\\u2606\\u2606\\u2606\"}" + "canonical_bson": "190000000D61000D000000E29886E29886E29886E298860000", + "canonical_extjson": "{\"a\" : {\"$code\" : \"\\u2606\\u2606\\u2606\\u2606\"}}" }, { "description": "Embedded nulls", - "canonical_bson": "190000000261000D0000006162006261620062616261620000", - "canonical_extjson": "{\"a\" : \"ab\\u0000bab\\u0000babab\"}" + "canonical_bson": "190000000D61000D0000006162006261620062616261620000", + "canonical_extjson": "{\"a\" : {\"$code\" : \"ab\\u0000bab\\u0000babab\"}}" } ], "decodeErrors": [ { "description": "bad code string length: 0 (but no 0x00 either)", - "bson": "0C0000000261000000000000" + "bson": "0C0000000D61000000000000" }, { "description": "bad code string length: -1", - "bson": "0C000000026100FFFFFFFF00" + "bson": "0C0000000D6100FFFFFFFF00" }, { "description": "bad code string length: eats terminator", - "bson": "10000000026100050000006200620000" + "bson": "100000000D6100050000006200620000" }, { "description": "bad code string length: longer than rest of document", - "bson": "120000000200FFFFFF00666F6F6261720000" + "bson": "120000000D00FFFFFF00666F6F6261720000" }, { "description": "code string is not null-terminated", - "bson": "1000000002610004000000616263FF00" + "bson": "100000000D610004000000616263FF00" }, { "description": "empty code string, but extra null", - "bson": "0E00000002610001000000000000" + "bson": "0E0000000D610001000000000000" }, { "description": "invalid UTF-8", - "bson": "0E00000002610002000000E90000" + "bson": "0E0000000D610002000000E90000" } ] } diff --git a/bson/src/test/resources/bson/datetime.json b/bson/src/test/resources/bson/datetime.json index 60506ce1749..f857afdc367 100644 --- a/bson/src/test/resources/bson/datetime.json +++ b/bson/src/test/resources/bson/datetime.json @@ -25,6 +25,12 @@ "description" : "Y10K", "canonical_bson" : "1000000009610000DC1FD277E6000000", "canonical_extjson" : "{\"a\":{\"$date\":{\"$numberLong\":\"253402300800000\"}}}" + }, + { + "description": "leading zero ms", + "canonical_bson": "10000000096100D1D6D6CC3B01000000", + "relaxed_extjson": "{\"a\" : {\"$date\" : \"2012-12-24T12:15:30.001Z\"}}", + "canonical_extjson": "{\"a\" : {\"$date\" : {\"$numberLong\" : \"1356351330001\"}}}" } ], "decodeErrors": [ diff --git a/bson/src/test/resources/bson/dbref.json b/bson/src/test/resources/bson/dbref.json index 1fe12c6f68d..41c0b09d0ea 100644 --- a/bson/src/test/resources/bson/dbref.json +++ b/bson/src/test/resources/bson/dbref.json @@ -1,5 +1,5 @@ { - "description": "DBRef", + "description": "Document type (DBRef sub-documents)", "bson_type": "0x03", "valid": [ { @@ -26,6 +26,26 @@ "description": "Document with key names similar to those of a DBRef", "canonical_bson": "3e0000000224726566000c0000006e6f742d612d646272656600072469640058921b3e6e32ab156a22b59e022462616e616e6100050000007065656c0000", "canonical_extjson": "{\"$ref\": \"not-a-dbref\", \"$id\": {\"$oid\": \"58921b3e6e32ab156a22b59e\"}, \"$banana\": \"peel\"}" + }, + { + "description": "DBRef with additional dollar-prefixed and dotted fields", + "canonical_bson": "48000000036462726566003c0000000224726566000b000000636f6c6c656374696f6e00072469640058921b3e6e32ab156a22b59e10612e62000100000010246300010000000000", + "canonical_extjson": "{\"dbref\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"58921b3e6e32ab156a22b59e\"}, \"a.b\": {\"$numberInt\": \"1\"}, \"$c\": {\"$numberInt\": \"1\"}}}" + }, + { + "description": "Sub-document resembles DBRef but $id is missing", + "canonical_bson": "26000000036462726566001a0000000224726566000b000000636f6c6c656374696f6e000000", + "canonical_extjson": "{\"dbref\": {\"$ref\": \"collection\"}}" + }, + { + "description": "Sub-document resembles DBRef but $ref is not a string", + "canonical_bson": "2c000000036462726566002000000010247265660001000000072469640058921b3e6e32ab156a22b59e0000", + "canonical_extjson": "{\"dbref\": {\"$ref\": {\"$numberInt\": \"1\"}, \"$id\": {\"$oid\": \"58921b3e6e32ab156a22b59e\"}}}" + }, + { + "description": "Sub-document resembles DBRef but $db is not a string", + "canonical_bson": "4000000003646272656600340000000224726566000b000000636f6c6c656374696f6e00072469640058921b3e6e32ab156a22b59e1024646200010000000000", + "canonical_extjson": "{\"dbref\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"58921b3e6e32ab156a22b59e\"}, \"$db\": {\"$numberInt\": \"1\"}}}" } ] } diff --git a/bson/src/test/resources/bson/decimal128-1.json b/bson/src/test/resources/bson/decimal128-1.json index 7eefec6bf79..8e7fbc93c6f 100644 --- a/bson/src/test/resources/bson/decimal128-1.json +++ b/bson/src/test/resources/bson/decimal128-1.json @@ -312,6 +312,30 @@ "canonical_bson": "18000000136400000000000a5bc138938d44c64d31cc3700", "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000\"}}", "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000000E+999\"}}" + }, + { + "description": "Clamped zeros with a large positive exponent", + "canonical_bson": "180000001364000000000000000000000000000000FE5F00", + "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+2147483647\"}}", + "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6111\"}}" + }, + { + "description": "Clamped zeros with a large negative exponent", + "canonical_bson": "180000001364000000000000000000000000000000000000", + "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-2147483647\"}}", + "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-6176\"}}" + }, + { + "description": "Clamped negative zeros with a large positive exponent", + "canonical_bson": "180000001364000000000000000000000000000000FEDF00", + "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+2147483647\"}}", + "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+6111\"}}" + }, + { + "description": "Clamped negative zeros with a large negative exponent", + "canonical_bson": "180000001364000000000000000000000000000000008000", + "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-2147483647\"}}", + "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-6176\"}}" } ] } diff --git a/bson/src/test/resources/bson/document.json b/bson/src/test/resources/bson/document.json index 3ec9187044f..698e7ae90af 100644 --- a/bson/src/test/resources/bson/document.json +++ b/bson/src/test/resources/bson/document.json @@ -17,6 +17,26 @@ "description": "Single-character key subdoc", "canonical_bson": "160000000378000E0000000261000200000062000000", "canonical_extjson": "{\"x\" : {\"a\" : \"b\"}}" + }, + { + "description": "Dollar-prefixed key in sub-document", + "canonical_bson": "170000000378000F000000022461000200000062000000", + "canonical_extjson": "{\"x\" : {\"$a\" : \"b\"}}" + }, + { + "description": "Dollar as key in sub-document", + "canonical_bson": "160000000378000E0000000224000200000061000000", + "canonical_extjson": "{\"x\" : {\"$\" : \"a\"}}" + }, + { + "description": "Dotted key in sub-document", + "canonical_bson": "180000000378001000000002612E62000200000063000000", + "canonical_extjson": "{\"x\" : {\"a.b\" : \"c\"}}" + }, + { + "description": "Dot as key in sub-document", + "canonical_bson": "160000000378000E000000022E000200000061000000", + "canonical_extjson": "{\"x\" : {\".\" : \"a\"}}" } ], "decodeErrors": [ @@ -31,6 +51,10 @@ { "description": "Invalid subdocument: bad string length in field", "bson": "1C00000003666F6F001200000002626172000500000062617A000000" + }, + { + "description": "Null byte in sub-document key", + "bson": "150000000378000D00000010610000010000000000" } ] } diff --git a/bson/src/test/resources/bson/double.json b/bson/src/test/resources/bson/double.json index a483f696761..7a3bad158b3 100644 --- a/bson/src/test/resources/bson/double.json +++ b/bson/src/test/resources/bson/double.json @@ -28,16 +28,16 @@ "relaxed_extjson": "{\"d\" : -1.0001220703125}" }, { - "description": "1.23456789012345677E18", - "canonical_bson": "1000000001640081E97DF41022B14300", - "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"1.23456789012345677E18\"}}", - "relaxed_extjson": "{\"d\" : 1.23456789012345677E18}" + "description": "1.2345678921232E18", + "canonical_bson": "100000000164002a1bf5f41022b14300", + "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"1.2345678921232E18\"}}", + "relaxed_extjson": "{\"d\" : 1.2345678921232E18}" }, { - "description": "-1.23456789012345677E18", - "canonical_bson": "1000000001640081E97DF41022B1C300", - "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"-1.23456789012345677E18\"}}", - "relaxed_extjson": "{\"d\" : -1.23456789012345677E18}" + "description": "-1.2345678921232E18", + "canonical_bson": "100000000164002a1bf5f41022b1c300", + "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"-1.2345678921232E18\"}}", + "relaxed_extjson": "{\"d\" : -1.2345678921232E18}" }, { "description": "0.0", diff --git a/bson/src/test/resources/bson/multi-type-deprecated.json b/bson/src/test/resources/bson/multi-type-deprecated.json index 5aac1bd2e7d..665f388cd41 100644 --- a/bson/src/test/resources/bson/multi-type-deprecated.json +++ b/bson/src/test/resources/bson/multi-type-deprecated.json @@ -5,10 +5,10 @@ "valid": [ { "description": "All BSON types", - "canonical_bson": "3B020000075F69640057E193D7A9CC81B4027498B50E53796D626F6C000700000073796D626F6C0002537472696E670007000000737472696E670010496E743332002A00000012496E743634002A0000000000000001446F75626C6500000000000000F0BF0542696E617279001000000003A34C38F7C3ABEDC8A37814A992AB8DB60542696E61727955736572446566696E656400050000008001020304050D436F6465000E00000066756E6374696F6E2829207B7D000F436F64655769746853636F7065001B0000000E00000066756E6374696F6E2829207B7D00050000000003537562646F63756D656E74001200000002666F6F0004000000626172000004417272617900280000001030000100000010310002000000103200030000001033000400000010340005000000001154696D657374616D7000010000002A0000000B5265676578007061747465726E0000094461746574696D6545706F6368000000000000000000094461746574696D65506F73697469766500FFFFFF7F00000000094461746574696D654E656761746976650000000080FFFFFFFF085472756500010846616C736500000C4442506F696E746572000E00000064622E636F6C6C656374696F6E0057E193D7A9CC81B4027498B1034442526566003D0000000224726566000B000000636F6C6C656374696F6E00072469640057FD71E96E32AB4225B723FB02246462000900000064617461626173650000FF4D696E6B6579007F4D61786B6579000A4E756C6C0006556E646566696E65640000", - "converted_bson": "4b020000075f69640057e193d7a9cc81b4027498b50253796d626f6c000700000073796d626f6c0002537472696e670007000000737472696e670010496e743332002a00000012496e743634002a0000000000000001446f75626c6500000000000000f0bf0542696e617279001000000003a34c38f7c3abedc8a37814a992ab8db60542696e61727955736572446566696e656400050000008001020304050d436f6465000e00000066756e6374696f6e2829207b7d000f436f64655769746853636f7065001b0000000e00000066756e6374696f6e2829207b7d00050000000003537562646f63756d656e74001200000002666f6f0004000000626172000004417272617900280000001030000100000010310002000000103200030000001033000400000010340005000000001154696d657374616d7000010000002a0000000b5265676578007061747465726e0000094461746574696d6545706f6368000000000000000000094461746574696d65506f73697469766500ffffff7f00000000094461746574696d654e656761746976650000000080ffffffff085472756500010846616c73650000034442506f696e746572002e0000000224726566000e00000064622e636f6c6c656374696f6e00072469640057e193d7a9cc81b4027498b100034442526566003d0000000224726566000b000000636f6c6c656374696f6e00072469640057fd71e96e32ab4225b723fb02246462000900000064617461626173650000ff4d696e6b6579007f4d61786b6579000a4e756c6c000a556e646566696e65640000", - "canonical_extjson": "{\"_id\": {\"$oid\": \"57e193d7a9cc81b4027498b5\"}, \"Symbol\": {\"$symbol\": \"symbol\"}, \"String\": \"string\", \"Int32\": {\"$numberInt\": \"42\"}, \"Int64\": {\"$numberLong\": \"42\"}, \"Double\": {\"$numberDouble\": \"-1.0\"}, \"Binary\": { \"$binary\" : {\"base64\": \"o0w498Or7cijeBSpkquNtg==\", \"subType\": \"03\"}}, \"BinaryUserDefined\": { \"$binary\" : {\"base64\": \"AQIDBAU=\", \"subType\": \"80\"}}, \"Code\": {\"$code\": \"function() {}\"}, \"CodeWithScope\": {\"$code\": \"function() {}\", \"$scope\": {}}, \"Subdocument\": {\"foo\": \"bar\"}, \"Array\": [{\"$numberInt\": \"1\"}, {\"$numberInt\": \"2\"}, {\"$numberInt\": \"3\"}, {\"$numberInt\": \"4\"}, {\"$numberInt\": \"5\"}], \"Timestamp\": {\"$timestamp\": {\"t\": 42, \"i\": 1}}, \"Regex\": {\"$regularExpression\": {\"pattern\": \"pattern\", \"options\": \"\"}}, \"DatetimeEpoch\": {\"$date\": {\"$numberLong\": \"0\"}}, \"DatetimePositive\": {\"$date\": {\"$numberLong\": \"2147483647\"}}, \"DatetimeNegative\": {\"$date\": {\"$numberLong\": \"-2147483648\"}}, \"True\": true, \"False\": false, \"DBPointer\": {\"$dbPointer\": {\"$ref\": \"db.collection\", \"$id\": {\"$oid\": \"57e193d7a9cc81b4027498b1\"}}}, \"DBRef\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"57fd71e96e32ab4225b723fb\"}, \"$db\": \"database\"}, \"Minkey\": {\"$minKey\": 1}, \"Maxkey\": {\"$maxKey\": 1}, \"Null\": null, \"Undefined\": {\"$undefined\": true}}", - "converted_extjson": "{\"_id\": {\"$oid\": \"57e193d7a9cc81b4027498b5\"}, \"Symbol\": \"symbol\", \"String\": \"string\", \"Int32\": {\"$numberInt\": \"42\"}, \"Int64\": {\"$numberLong\": \"42\"}, \"Double\": {\"$numberDouble\": \"-1.0\"}, \"Binary\": { \"$binary\" : {\"base64\": \"o0w498Or7cijeBSpkquNtg==\", \"subType\": \"03\"}}, \"BinaryUserDefined\": { \"$binary\" : {\"base64\": \"AQIDBAU=\", \"subType\": \"80\"}}, \"Code\": {\"$code\": \"function() {}\"}, \"CodeWithScope\": {\"$code\": \"function() {}\", \"$scope\": {}}, \"Subdocument\": {\"foo\": \"bar\"}, \"Array\": [{\"$numberInt\": \"1\"}, {\"$numberInt\": \"2\"}, {\"$numberInt\": \"3\"}, {\"$numberInt\": \"4\"}, {\"$numberInt\": \"5\"}], \"Timestamp\": {\"$timestamp\": {\"t\": 42, \"i\": 1}}, \"Regex\": {\"$regularExpression\": {\"pattern\": \"pattern\", \"options\": \"\"}}, \"DatetimeEpoch\": {\"$date\": {\"$numberLong\": \"0\"}}, \"DatetimePositive\": {\"$date\": {\"$numberLong\": \"2147483647\"}}, \"DatetimeNegative\": {\"$date\": {\"$numberLong\": \"-2147483648\"}}, \"True\": true, \"False\": false, \"DBPointer\": {\"$ref\": \"db.collection\", \"$id\": {\"$oid\": \"57e193d7a9cc81b4027498b1\"}}, \"DBRef\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"57fd71e96e32ab4225b723fb\"}, \"$db\": \"database\"}, \"Minkey\": {\"$minKey\": 1}, \"Maxkey\": {\"$maxKey\": 1}, \"Null\": null, \"Undefined\": null}" + "canonical_bson": "38020000075F69640057E193D7A9CC81B4027498B50E53796D626F6C000700000073796D626F6C0002537472696E670007000000737472696E670010496E743332002A00000012496E743634002A0000000000000001446F75626C6500000000000000F0BF0542696E617279001000000003A34C38F7C3ABEDC8A37814A992AB8DB60542696E61727955736572446566696E656400050000008001020304050D436F6465000E00000066756E6374696F6E2829207B7D000F436F64655769746853636F7065001B0000000E00000066756E6374696F6E2829207B7D00050000000003537562646F63756D656E74001200000002666F6F0004000000626172000004417272617900280000001030000100000010310002000000103200030000001033000400000010340005000000001154696D657374616D7000010000002A0000000B5265676578007061747465726E0000094461746574696D6545706F6368000000000000000000094461746574696D65506F73697469766500FFFFFF7F00000000094461746574696D654E656761746976650000000080FFFFFFFF085472756500010846616C736500000C4442506F696E746572000B000000636F6C6C656374696F6E0057E193D7A9CC81B4027498B1034442526566003D0000000224726566000B000000636F6C6C656374696F6E00072469640057FD71E96E32AB4225B723FB02246462000900000064617461626173650000FF4D696E6B6579007F4D61786B6579000A4E756C6C0006556E646566696E65640000", + "converted_bson": "48020000075f69640057e193d7a9cc81b4027498b50253796d626f6c000700000073796d626f6c0002537472696e670007000000737472696e670010496e743332002a00000012496e743634002a0000000000000001446f75626c6500000000000000f0bf0542696e617279001000000003a34c38f7c3abedc8a37814a992ab8db60542696e61727955736572446566696e656400050000008001020304050d436f6465000e00000066756e6374696f6e2829207b7d000f436f64655769746853636f7065001b0000000e00000066756e6374696f6e2829207b7d00050000000003537562646f63756d656e74001200000002666f6f0004000000626172000004417272617900280000001030000100000010310002000000103200030000001033000400000010340005000000001154696d657374616d7000010000002a0000000b5265676578007061747465726e0000094461746574696d6545706f6368000000000000000000094461746574696d65506f73697469766500ffffff7f00000000094461746574696d654e656761746976650000000080ffffffff085472756500010846616c73650000034442506f696e746572002b0000000224726566000b000000636f6c6c656374696f6e00072469640057e193d7a9cc81b4027498b100034442526566003d0000000224726566000b000000636f6c6c656374696f6e00072469640057fd71e96e32ab4225b723fb02246462000900000064617461626173650000ff4d696e6b6579007f4d61786b6579000a4e756c6c000a556e646566696e65640000", + "canonical_extjson": "{\"_id\": {\"$oid\": \"57e193d7a9cc81b4027498b5\"}, \"Symbol\": {\"$symbol\": \"symbol\"}, \"String\": \"string\", \"Int32\": {\"$numberInt\": \"42\"}, \"Int64\": {\"$numberLong\": \"42\"}, \"Double\": {\"$numberDouble\": \"-1.0\"}, \"Binary\": { \"$binary\" : {\"base64\": \"o0w498Or7cijeBSpkquNtg==\", \"subType\": \"03\"}}, \"BinaryUserDefined\": { \"$binary\" : {\"base64\": \"AQIDBAU=\", \"subType\": \"80\"}}, \"Code\": {\"$code\": \"function() {}\"}, \"CodeWithScope\": {\"$code\": \"function() {}\", \"$scope\": {}}, \"Subdocument\": {\"foo\": \"bar\"}, \"Array\": [{\"$numberInt\": \"1\"}, {\"$numberInt\": \"2\"}, {\"$numberInt\": \"3\"}, {\"$numberInt\": \"4\"}, {\"$numberInt\": \"5\"}], \"Timestamp\": {\"$timestamp\": {\"t\": 42, \"i\": 1}}, \"Regex\": {\"$regularExpression\": {\"pattern\": \"pattern\", \"options\": \"\"}}, \"DatetimeEpoch\": {\"$date\": {\"$numberLong\": \"0\"}}, \"DatetimePositive\": {\"$date\": {\"$numberLong\": \"2147483647\"}}, \"DatetimeNegative\": {\"$date\": {\"$numberLong\": \"-2147483648\"}}, \"True\": true, \"False\": false, \"DBPointer\": {\"$dbPointer\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"57e193d7a9cc81b4027498b1\"}}}, \"DBRef\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"57fd71e96e32ab4225b723fb\"}, \"$db\": \"database\"}, \"Minkey\": {\"$minKey\": 1}, \"Maxkey\": {\"$maxKey\": 1}, \"Null\": null, \"Undefined\": {\"$undefined\": true}}", + "converted_extjson": "{\"_id\": {\"$oid\": \"57e193d7a9cc81b4027498b5\"}, \"Symbol\": \"symbol\", \"String\": \"string\", \"Int32\": {\"$numberInt\": \"42\"}, \"Int64\": {\"$numberLong\": \"42\"}, \"Double\": {\"$numberDouble\": \"-1.0\"}, \"Binary\": { \"$binary\" : {\"base64\": \"o0w498Or7cijeBSpkquNtg==\", \"subType\": \"03\"}}, \"BinaryUserDefined\": { \"$binary\" : {\"base64\": \"AQIDBAU=\", \"subType\": \"80\"}}, \"Code\": {\"$code\": \"function() {}\"}, \"CodeWithScope\": {\"$code\": \"function() {}\", \"$scope\": {}}, \"Subdocument\": {\"foo\": \"bar\"}, \"Array\": [{\"$numberInt\": \"1\"}, {\"$numberInt\": \"2\"}, {\"$numberInt\": \"3\"}, {\"$numberInt\": \"4\"}, {\"$numberInt\": \"5\"}], \"Timestamp\": {\"$timestamp\": {\"t\": 42, \"i\": 1}}, \"Regex\": {\"$regularExpression\": {\"pattern\": \"pattern\", \"options\": \"\"}}, \"DatetimeEpoch\": {\"$date\": {\"$numberLong\": \"0\"}}, \"DatetimePositive\": {\"$date\": {\"$numberLong\": \"2147483647\"}}, \"DatetimeNegative\": {\"$date\": {\"$numberLong\": \"-2147483648\"}}, \"True\": true, \"False\": false, \"DBPointer\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"57e193d7a9cc81b4027498b1\"}}, \"DBRef\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"57fd71e96e32ab4225b723fb\"}, \"$db\": \"database\"}, \"Minkey\": {\"$minKey\": 1}, \"Maxkey\": {\"$maxKey\": 1}, \"Null\": null, \"Undefined\": null}" } ] } diff --git a/bson/src/test/resources/bson/regex.json b/bson/src/test/resources/bson/regex.json index c62b019cdf4..223802169df 100644 --- a/bson/src/test/resources/bson/regex.json +++ b/bson/src/test/resources/bson/regex.json @@ -54,11 +54,11 @@ ], "decodeErrors": [ { - "description": "embedded null in pattern", + "description": "Null byte in pattern string", "bson": "0F0000000B610061006300696D0000" }, { - "description": "embedded null in flags", + "description": "Null byte in flags string", "bson": "100000000B61006162630069006D0000" } ] diff --git a/bson/src/test/resources/bson/symbol.json b/bson/src/test/resources/bson/symbol.json index 4e46cb95117..3dd3577ebd1 100644 --- a/bson/src/test/resources/bson/symbol.json +++ b/bson/src/test/resources/bson/symbol.json @@ -50,31 +50,31 @@ "decodeErrors": [ { "description": "bad symbol length: 0 (but no 0x00 either)", - "bson": "0C0000000261000000000000" + "bson": "0C0000000E61000000000000" }, { "description": "bad symbol length: -1", - "bson": "0C000000026100FFFFFFFF00" + "bson": "0C0000000E6100FFFFFFFF00" }, { "description": "bad symbol length: eats terminator", - "bson": "10000000026100050000006200620000" + "bson": "100000000E6100050000006200620000" }, { "description": "bad symbol length: longer than rest of document", - "bson": "120000000200FFFFFF00666F6F6261720000" + "bson": "120000000E00FFFFFF00666F6F6261720000" }, { "description": "symbol is not null-terminated", - "bson": "1000000002610004000000616263FF00" + "bson": "100000000E610004000000616263FF00" }, { "description": "empty symbol, but extra null", - "bson": "0E00000002610001000000000000" + "bson": "0E0000000E610001000000000000" }, { "description": "invalid UTF-8", - "bson": "0E00000002610002000000E90000" + "bson": "0E0000000E610002000000E90000" } ] } diff --git a/bson/src/test/resources/bson/timestamp.json b/bson/src/test/resources/bson/timestamp.json index c76bc2998eb..6f46564a327 100644 --- a/bson/src/test/resources/bson/timestamp.json +++ b/bson/src/test/resources/bson/timestamp.json @@ -18,6 +18,11 @@ "description": "Timestamp with high-order bit set on both seconds and increment", "canonical_bson": "10000000116100FFFFFFFFFFFFFFFF00", "canonical_extjson": "{\"a\" : {\"$timestamp\" : {\"t\" : 4294967295, \"i\" : 4294967295} } }" + }, + { + "description": "Timestamp with high-order bit set on both seconds and increment (not UINT32_MAX)", + "canonical_bson": "1000000011610000286BEE00286BEE00", + "canonical_extjson": "{\"a\" : {\"$timestamp\" : {\"t\" : 4000000000, \"i\" : 4000000000} } }" } ], "decodeErrors": [ diff --git a/bson/src/test/resources/bson/top.json b/bson/src/test/resources/bson/top.json index 68b51195ab1..9c649b5e3f0 100644 --- a/bson/src/test/resources/bson/top.json +++ b/bson/src/test/resources/bson/top.json @@ -3,9 +3,24 @@ "bson_type": "0x00", "valid": [ { - "description": "Document with keys that start with $", + "description": "Dollar-prefixed key in top-level document", "canonical_bson": "0F00000010246B6579002A00000000", "canonical_extjson": "{\"$key\": {\"$numberInt\": \"42\"}}" + }, + { + "description": "Dollar as key in top-level document", + "canonical_bson": "0E00000002240002000000610000", + "canonical_extjson": "{\"$\": \"a\"}" + }, + { + "description": "Dotted key in top-level document", + "canonical_bson": "1000000002612E620002000000630000", + "canonical_extjson": "{\"a.b\": \"c\"}" + }, + { + "description": "Dot as key in top-level document", + "canonical_bson": "0E000000022E0002000000610000", + "canonical_extjson": "{\".\": \"a\"}" } ], "decodeErrors": [ @@ -64,28 +79,32 @@ { "description": "Document truncated mid-key", "bson": "1200000002666F" + }, + { + "description": "Null byte in document key", + "bson": "0D000000107800000100000000" } ], "parseErrors": [ { "description" : "Bad $regularExpression (extra field)", - "string" : "{\"a\" : \"$regularExpression\": {\"pattern\": \"abc\", \"options\": \"\", \"unrelated\": true}}}" + "string" : "{\"a\" : {\"$regularExpression\": {\"pattern\": \"abc\", \"options\": \"\", \"unrelated\": true}}}" }, { "description" : "Bad $regularExpression (missing options field)", - "string" : "{\"a\" : \"$regularExpression\": {\"pattern\": \"abc\"}}}" + "string" : "{\"a\" : {\"$regularExpression\": {\"pattern\": \"abc\"}}}" }, { "description": "Bad $regularExpression (pattern is number, not string)", - "string": "{\"x\" : {\"$regularExpression\" : { \"pattern\": 42, \"$options\" : \"\"}}}" + "string": "{\"x\" : {\"$regularExpression\" : { \"pattern\": 42, \"options\" : \"\"}}}" }, { "description": "Bad $regularExpression (options are number, not string)", - "string": "{\"x\" : {\"$regularExpression\" : { \"pattern\": \"a\", \"$options\" : 0}}}" + "string": "{\"x\" : {\"$regularExpression\" : { \"pattern\": \"a\", \"options\" : 0}}}" }, { "description" : "Bad $regularExpression (missing pattern field)", - "string" : "{\"a\" : \"$regularExpression\": {\"options\":\"ix\"}}}" + "string" : "{\"a\" : {\"$regularExpression\": {\"options\":\"ix\"}}}" }, { "description": "Bad $oid (number, not string)", @@ -151,6 +170,10 @@ "description": "Bad $code (type is number, not string)", "string": "{\"a\" : {\"$code\" : 42}}" }, + { + "description": "Bad $code (type is number, not string) when $scope is also present", + "string": "{\"a\" : {\"$code\" : 42, \"$scope\" : {}}}" + }, { "description": "Bad $code (extra field)", "string": "{\"a\" : {\"$code\" : \"\", \"unrelated\": true}}" @@ -195,14 +218,6 @@ "description": "Bad $date (extra field)", "string": "{\"a\" : {\"$date\" : {\"$numberLong\" : \"1356351330501\"}, \"unrelated\": true}}" }, - { - "description": "Bad DBRef (ref is number, not string)", - "string": "{\"x\" : {\"$ref\" : 42, \"$id\" : \"abc\"}}" - }, - { - "description": "Bad DBRef (db is number, not string)", - "string": "{\"x\" : {\"$ref\" : \"a\", \"$id\" : \"abc\", \"$db\" : 42}}" - }, { "description": "Bad $minKey (boolean, not integer)", "string": "{\"a\" : {\"$minKey\" : true}}" @@ -230,7 +245,22 @@ { "description": "Bad DBpointer (extra field)", "string": "{\"a\": {\"$dbPointer\": {\"a\": {\"$numberInt\": \"1\"}, \"$id\": {\"$oid\": \"56e1fc72e0c917e9c4714161\"}, \"c\": {\"$numberInt\": \"2\"}, \"$ref\": \"b\"}}}" + }, + { + "description" : "Null byte in document key", + "string" : "{\"a\\u0000\": 1 }" + }, + { + "description" : "Null byte in sub-document key", + "string" : "{\"a\" : {\"b\\u0000\": 1 }}" + }, + { + "description": "Null byte in $regularExpression pattern", + "string": "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"b\\u0000\", \"options\" : \"i\"}}}" + }, + { + "description": "Null byte in $regularExpression options", + "string": "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"b\", \"options\" : \"i\\u0000\"}}}" } - ] } diff --git a/bson/src/test/unit/org/bson/BSONTest.java b/bson/src/test/unit/org/bson/BSONTest.java deleted file mode 100644 index bb749898e6b..00000000000 --- a/bson/src/test/unit/org/bson/BSONTest.java +++ /dev/null @@ -1,425 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson; - -import org.bson.io.BasicOutputBuffer; -import org.bson.io.OutputBuffer; -import org.bson.types.CodeWScope; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.nio.charset.Charset; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; - -import static java.util.Arrays.asList; -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.not; -import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.Matchers.contains; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; - -public class BSONTest { - - @Before - public void setUp() { - BSON.clearAllHooks(); - } - - @After - public void tearDown() { - BSON.clearAllHooks(); - } - - @Test - public void testSimpleDocuments() throws IOException { - checkEncodingAndDecoding(new BasicBSONObject("x", true), 9, "090000000878000100"); - checkEncodingAndDecoding(new BasicBSONObject("x", null), 8, "080000000a780000"); - checkEncodingAndDecoding(new BasicBSONObject("x", 5.2), 16, "10000000017800cdcccccccccc144000"); - checkEncodingAndDecoding(new BasicBSONObject("x", "eliot"), 18, - "1200000002780006000000656c696f740000"); - checkEncodingAndDecoding(new BasicBSONObject("x", 5.2).append("y", "truth") - .append("z", 1.1), - 40, - "28000000017800cdcccccccccc144002790006000000747275746800017a009a9999999999f13f00"); - - checkEncodingAndDecoding(new BasicBSONObject("a", new BasicBSONObject("b", 1.1)), 24, - "18000000036100100000000162009a9999999999f13f0000"); - checkEncodingAndDecoding(new BasicBSONObject("x", 5.2).append("y", new BasicBSONObject("a", "eliot").append("b", true)) - .append("z", null), - 44, - "2c000000017800cdcccccccccc14400379001600000002610006000000656c696f740008620001000a7a0000"); - checkEncodingAndDecoding(new BasicBSONObject("x", 5.2).append("y", new Object[]{"a", "eliot", "b", true}) - .append("z", null), - 62, - "3e000000017800cdcccccccccc14400479002800000002300002000000610002310006000000656c696f740002" - + "320002000000620008330001000a7a0000"); - checkEncodingAndDecoding(new BasicBSONObject("x", 4), 12, "0c0000001078000400000000"); - } - - @Test - public void testArray() throws IOException { - checkEncodingAndDecoding(new BasicBSONObject("x", new int[]{1, 2, 3, 4}), 41, - "2900000004780021000000103000010000001031000200000010320003000000103300040000000000"); - } - - @Test - public void testCode() throws IOException { - BSONObject scope = new BasicBSONObject("x", 1); - CodeWScope c = new CodeWScope("function() { x += 1; }", scope); - BSONObject document = new BasicBSONObject("map", c); - checkEncodingAndDecoding(document, 53, - "350000000f6d6170002b0000001700000066756e6374696f6e2829207b2078202b3d20313b207d000c000000107800010000000000"); - } - - @Test - public void testBinary() throws IOException { - byte[] data = new byte[100]; - for (int i = 0; i < 100; i++) { - data[i] = 1; - } - BSONObject document = new BasicBSONObject("bin", data); - checkEncodingAndDecoding(document, 115, - "730000000562696e006400000000010101010101010101010101010101010101010101010101010101010101010101010101010101" - + "01010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101" - + "01010101010100"); - } - - private void checkEncodingAndDecoding(final BSONObject toEncodeAndDecode, - final int expectedEncodedSize, - final String expectedHex) throws IOException { - // check encoding - BSONEncoder bsonEncoder = new BasicBSONEncoder(); - OutputBuffer buf = new BasicOutputBuffer(); - bsonEncoder.set(buf); - bsonEncoder.putObject(toEncodeAndDecode); - assertEquals(expectedEncodedSize, buf.size()); - assertEquals(expectedHex, toHex(buf.toByteArray())); - bsonEncoder.done(); - - // check decoding - BSONDecoder bsonDecoder = new BasicBSONDecoder(); - BSONCallback callback = new BasicBSONCallback(); - int numberOfBytesDecoded = bsonDecoder.decode(new ByteArrayInputStream(buf.toByteArray()), callback); - assertEquals(expectedEncodedSize, numberOfBytesDecoded); - assertEquals(callback.get(), toEncodeAndDecode); - - // I believe this is an obscure way of checking the decoded object is the the one we expect - OutputBuffer buf2 = new BasicOutputBuffer(); - bsonEncoder.set(buf2); - bsonEncoder.putObject((BSONObject) callback.get()); - assertEquals(expectedEncodedSize, buf2.size()); - assertEquals(expectedHex, toHex(buf2.toByteArray())); - } - - @Test - public void testOBBig1() { - BasicOutputBuffer a = new BasicOutputBuffer(); - StringBuilder b = new StringBuilder(); - for (final String x : prepareData()) { - a.write(x.getBytes()); - b.append(x); - } - assertEquals(new String(a.toByteArray(), Charset.forName("UTF-8")), b.toString()); - } - - private List prepareData() { - List data = new ArrayList(); - - for (int x = 8; x < 2048; x *= 2) { - StringBuilder buf = new StringBuilder(); - while (buf.length() < x) { - buf.append(x); - } - data.add(buf.toString()); - } - return data; - } - - @Test(expected = IllegalArgumentException.class) - public void shouldClearCustomEncoders() throws IOException { - // given - BSON.addEncodingHook(TestDate.class, new TestDateTransformer()); - BSONEncoder encoder = new BasicBSONEncoder(); - encoder.set(new BasicOutputBuffer()); - - // when - BSON.clearEncodingHooks(); - encoder.putObject(new BasicBSONObject("date", new TestDate(2009, 1, 23, 10, 53, 42))); - } - - @Test - public void shouldTransformTestDateToUtilDateWithTestDateTransformer() throws IOException { - // given - Transformer transformer = new TestDateTransformer(); - - // when - Object transformedDate = transformer.transform(new TestDate(2009, 1, 23, 10, 53, 42)); - - // then - assertThat(transformedDate, is(instanceOf(java.util.Date.class))); - } - - @Test - public void shouldUseCustomEncodersWhenDecodingObjectOfRegisteredClass() throws IOException { - // given - StubTransformer stubTransformer = new StubTransformer(); - BSON.addEncodingHook(TestDate.class, stubTransformer); - BSONEncoder encoder = new BasicBSONEncoder(); - encoder.set(new BasicOutputBuffer()); - - BSONObject document = new BasicBSONObject("date", new TestDate(2009, 1, 23, 10, 53, 42)); - - // when - encoder.putObject(document); - encoder.done(); - - // then - assertThat(stubTransformer.transformCalled, is(true)); - } - - @Test - public void shouldReturnRegisteredCustomEncoders() throws IOException { - // when - Transformer transformer = new TestDateTransformer(); - BSON.addEncodingHook(TestDate.class, transformer); - - // then - assertThat(BSON.hasEncodeHooks(), is(true)); - - List encodingHooks = BSON.getEncodingHooks(TestDate.class); - assertThat(encodingHooks, is(notNullValue())); - assertThat(encodingHooks, is(asList(transformer))); - } - - @Test - public void shouldRemoveSpecificRegisteredCustomEncoders() throws IOException { - Transformer transformer = new TestDateTransformer(); - BSON.addEncodingHook(TestDate.class, transformer); - - // when - BSON.removeEncodingHook(TestDate.class, transformer); - - // then - assertThat(BSON.getEncodingHooks(TestDate.class), not(contains(transformer))); - } - - @Test - public void shouldClearCustomDecoders() throws IOException { - // given - BSON.addDecodingHook(Date.class, new TestDateTransformer()); - byte[] encodedDocument = encodeDocumentToByteArray(new BasicBSONObject("date", new Date())); - BSONCallback bsonCallback = new BasicBSONCallback(); - - // when - BSON.clearDecodingHooks(); - new BasicBSONDecoder().decode(new ByteArrayInputStream(encodedDocument), bsonCallback); - - // then - BSONObject decodedDocument = (BSONObject) bsonCallback.get(); - assertThat(decodedDocument.get("date"), is(instanceOf(java.util.Date.class))); - } - - @Test - public void shouldUseCustomDecodersWhenDecodingObjectOfRegisteredClass() throws IOException { - // given - @SuppressWarnings("deprecation") - byte[] encodedDocument = encodeDocumentToByteArray(new BasicBSONObject("date", new Date(2009, 01, 23, 10, 53, 42))); - BSONCallback bsonCallback = new BasicBSONCallback(); - - // when - BSON.addDecodingHook(Date.class, new TestDateTransformer()); - new BasicBSONDecoder().decode(new ByteArrayInputStream(encodedDocument), bsonCallback); - - // then - BSONObject decodedDocument = (BSONObject) bsonCallback.get(); - assertThat(decodedDocument.get("date"), is(instanceOf(TestDate.class))); - assertThat((TestDate) decodedDocument.get("date"), is(new TestDate(2009, 01, 23, 10, 53, 42))); - } - - @Test - public void shouldReturnRegisteredCustomDecoders() throws IOException { - // when - Transformer transformer = new TestDateTransformer(); - BSON.addDecodingHook(Date.class, transformer); - - // then - assertThat(BSON.hasDecodeHooks(), is(true)); - - List decodingHooks = BSON.getDecodingHooks(Date.class); - assertThat(decodingHooks, is(notNullValue())); - assertThat(decodingHooks, is(asList(transformer))); - } - - @Test - public void shouldRemoveSpecificRegisteredCustomDecoders() throws IOException { - // given - Transformer transformer = new TestDateTransformer(); - BSON.addDecodingHook(Date.class, transformer); - - // when - BSON.removeDecodingHook(Date.class, transformer); - - // expect - assertThat(BSON.getDecodingHooks(Date.class), not(contains(transformer))); - } - - @Test - public void testEquals() { - assertThat(new BasicBSONObject("a", 1111111111111111111L), is(not(new BasicBSONObject("a", 1111111111111111112L)))); - assertThat(new BasicBSONObject("a", 100.1D), is(not(new BasicBSONObject("a", 100.2D)))); - assertThat(new BasicBSONObject("a", 100.1F), is(not(new BasicBSONObject("a", 100.2F)))); - assertEquals(new BasicBSONObject("a", 100.1D), new BasicBSONObject("a", 100.1D)); - assertEquals(new BasicBSONObject("a", 100.1F), new BasicBSONObject("a", 100.1F)); - assertEquals(new BasicBSONObject("a", 100L), new BasicBSONObject("a", 100L)); - } - - @Test - public void testRandomRoundTrips() { - roundTrip(new BasicBSONObject("a", "")); - roundTrip(new BasicBSONObject("a", "a")); - roundTrip(new BasicBSONObject("a", "b")); - } - - private byte[] encodeDocumentToByteArray(final BSONObject document) { - OutputBuffer outputBuffer = new BasicOutputBuffer(); - BSONEncoder encoder = new BasicBSONEncoder(); - encoder.set(outputBuffer); - encoder.putObject(document); - encoder.done(); - return outputBuffer.toByteArray(); - } - - private void roundTrip(final BSONObject o) { - assertEquals(o, BSON.decode(BSON.encode(o))); - } - - @Test - public void testEncodingDecode() { - BasicBSONObject inputDoc = new BasicBSONObject("_id", 1); - byte[] encoded = BSON.encode(inputDoc); - assertEquals(inputDoc, BSON.decode(encoded)); - } - - @Test - public void testToInt() { - assertEquals(1, BSON.toInt(Boolean.TRUE)); - assertEquals(0, BSON.toInt(Boolean.FALSE)); - assertEquals(12, BSON.toInt(12.23f)); - assertEquals(21, BSON.toInt(21.32d)); - assertEquals(13, BSON.toInt(13)); - } - - public static String toHex(final byte[] bytes) { - StringBuilder sb = new StringBuilder(); - for (final byte b : bytes) { - String s = Integer.toHexString(0xff & b); - - if (s.length() < 2) { - sb.append("0"); - } - sb.append(s); - } - return sb.toString(); - } - - - private static class StubTransformer implements Transformer { - - private boolean transformCalled = false; - @Override - public Object transform(final Object objectToTransform) { - transformCalled = true; - return true; - } - - } - - private class TestDateTransformer implements Transformer { - @SuppressWarnings("deprecation") - public Object transform(final Object objectToTransform) { - if (objectToTransform instanceof TestDate) { - TestDate td = (TestDate) objectToTransform; - return new java.util.Date(td.year, td.month, td.date, td.hour, td.minute, td.second); - } else if (objectToTransform instanceof java.util.Date) { - Date d = (Date) objectToTransform; - return new TestDate(d.getYear(), d.getMonth(), d.getDate(), d.getHours(), d.getMinutes(), d.getSeconds()); - } else { - return objectToTransform; - } - } - } - - private class TestDate { - private final int year; - private final int month; - private final int date; - private final int hour; - private final int minute; - private final int second; - - TestDate(final int year, final int month, final int date, final int hour, final int minute, final int second) { - this.year = year; - this.month = month; - this.date = date; - this.hour = hour; - this.minute = minute; - this.second = second; - } - - @Override - public boolean equals(final Object other) { - if (this == other) { - return true; - } - if (!(other instanceof TestDate)) { - return false; - } - - TestDate otherTestDate = (TestDate) other; - return (otherTestDate.year == this.year - && otherTestDate.month == this.month - && otherTestDate.date == this.date - && otherTestDate.hour == this.hour - && otherTestDate.minute == this.minute - && otherTestDate.second == this.second - ); - } - - @Override - public int hashCode() { - int result = year; - result = 31 * result + month; - result = 31 * result + date; - result = 31 * result + hour; - result = 31 * result + minute; - result = 31 * result + second; - return result; - } - - @Override - public String toString() { - return year + "-" + month + "-" + date + " " + hour + ":" + minute + ":" + second; - } - } -} diff --git a/bson/src/test/unit/org/bson/BasicBSONDecoderSpecification.groovy b/bson/src/test/unit/org/bson/BasicBSONDecoderSpecification.groovy index 17f91ac2e8a..9f13447e001 100644 --- a/bson/src/test/unit/org/bson/BasicBSONDecoderSpecification.groovy +++ b/bson/src/test/unit/org/bson/BasicBSONDecoderSpecification.groovy @@ -16,6 +16,9 @@ package org.bson +import org.bson.codecs.BsonDocumentCodec +import org.bson.codecs.EncoderContext +import org.bson.io.BasicOutputBuffer import org.bson.types.BSONTimestamp import org.bson.types.Binary import org.bson.types.Code @@ -30,14 +33,22 @@ import spock.lang.Unroll import java.util.regex.Pattern +import static org.bson.BasicBSONDecoder.getDefaultUuidRepresentation +import static org.bson.BasicBSONDecoder.setDefaultUuidRepresentation +import static org.bson.BsonBinarySubType.UUID_LEGACY +import static org.bson.BsonBinarySubType.UUID_STANDARD +import static org.bson.UuidRepresentation.JAVA_LEGACY +import static org.bson.UuidRepresentation.STANDARD +import static org.bson.internal.UuidHelper.encodeUuidToBinary + @SuppressWarnings(['LineLength', 'DuplicateMapLiteral', 'UnnecessaryBooleanExpression']) class BasicBSONDecoderSpecification extends Specification { @Subject - private final BSONDecoder bsonDecoder = new BasicBSONDecoder(); + private final BasicBSONDecoder bsonDecoder = new BasicBSONDecoder() def setupSpec() { - Map.metaClass.bitwiseNegate = { new BasicBSONObject(delegate) } + Map.metaClass.bitwiseNegate = { new BasicBSONObject(delegate as Map) } Pattern.metaClass.equals = { Pattern other -> delegate.pattern() == other.pattern() && delegate.flags() == other.flags() } @@ -45,7 +56,7 @@ class BasicBSONDecoderSpecification extends Specification { def 'should decode from input stream'() { setup: - InputStream is = new ByteArrayInputStream((byte[]) [12, 0, 0, 0, 16, 97, 0, 1, 0, 0, 0, 0]); + InputStream is = new ByteArrayInputStream((byte[]) [12, 0, 0, 0, 16, 97, 0, 1, 0, 0, 0, 0]) when: BSONObject document = bsonDecoder.readObject(is) @@ -57,10 +68,10 @@ class BasicBSONDecoderSpecification extends Specification { @Unroll def 'should decode #type'() { expect: - document == bsonDecoder.readObject((byte[]) bytes) + documentWithType as BasicBSONObject == bsonDecoder.readObject((byte[]) bytes) where: - document | bytes + documentWithType | bytes ['d1': -1.01] | [17, 0, 0, 0, 1, 100, 49, 0, 41, 92, -113, -62, -11, 40, -16, -65, 0] ['d2': Float.MIN_VALUE] | [17, 0, 0, 0, 1, 100, 50, 0, 0, 0, 0, 0, 0, 0, -96, 54, 0] ['d3': Double.MAX_VALUE] | [17, 0, 0, 0, 1, 100, 51, 0, -1, -1, -1, -1, -1, -1, -17, 127, 0] @@ -91,16 +102,17 @@ class BasicBSONDecoderSpecification extends Specification { ['k1': new MinKey()] | [9, 0, 0, 0, -1, 107, 49, 0, 0] ['k2': new MaxKey()] | [9, 0, 0, 0, 127, 107, 50, 0, 0] ['f': Decimal128.parse('0E-6176')] | [24, 0, 0, 0, 19, 102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + ['u': new UUID(1, 2)] | [29, 0, 0, 0, 5, 117, 0, 16, 0, 0, 0, 3, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0] type = BsonType.findByValue(bytes[4]) } def 'should decode complex structures'() { expect: - document == bsonDecoder.readObject((byte[]) bytes) + complexDocument as BasicBSONObject == bsonDecoder.readObject((byte[]) bytes) where: - document | bytes + complexDocument | bytes ['a': ~['d1': ~['b': true], 'd2': ~['b': false]]] | [39, 0, 0, 0, 3, 97, 0, 31, 0, 0, 0, 3, 100, 49, 0, 9, 0, 0, 0, 8, 98, 0, 1, 0, 3, 100, 50, 0, 9, 0, 0, 0, 8, 98, 0, 0, 0, 0, 0] ['a': [~['b1': true], ~['b2': false]]] | [39, 0, 0, 0, 4, 97, 0, 31, 0, 0, 0, 3, 48, 0, 10, 0, 0, 0, 8, 98, 49, 0, 1, 0, 3, 49, 0, 10, 0, 0, 0, 8, 98, 50, 0, 0, 0, 0, 0] ['a': [[1, 2]]] | [35, 0, 0, 0, 4, 97, 0, 27, 0, 0, 0, 4, 48, 0, 19, 0, 0, 0, 16, 48, 0, 1, 0, 0, 0, 16, 49, 0, 2, 0, 0, 0, 0, 0, 0] @@ -165,4 +177,47 @@ class BasicBSONDecoderSpecification extends Specification { BsonSerializationException | [5, 0, 0, 0, 16, 97, 0, 1, 0, 0, 0, 0] BsonSerializationException | [5, 0, 0, 0, 16, 97, 45, 1, 0, 0, 0, 0] } + + + def 'default value of defaultUuidRepresentation is JAVA_LEGACY'() { + expect: + getDefaultUuidRepresentation() == JAVA_LEGACY + } + + @Unroll + def 'should decode UUID according to default uuid representation'() { + given: + def uuid = new UUID(1, 2) + def output = new BasicOutputBuffer() + new BsonDocumentCodec().encode(new BsonBinaryWriter(output), + new BsonDocument('u', new BsonBinary(uuid, encodedUuidRepresentation)), EncoderContext.builder().build()) + + when: + setDefaultUuidRepresentation(decodedUuidRepresentation) + + then: + getDefaultUuidRepresentation() == decodedUuidRepresentation + + when: + def decodedUuid = bsonDecoder.readObject(output.getInternalBuffer()).get('u') + + then: + decodedUuid == expectedUuid + + cleanup: + setDefaultUuidRepresentation(JAVA_LEGACY) + + where: + [encodedUuidRepresentation, decodedUuidRepresentation, expectedUuid] << [ + [JAVA_LEGACY, JAVA_LEGACY, + new UUID(1, 2)], + [JAVA_LEGACY, STANDARD, + new Binary(UUID_LEGACY, encodeUuidToBinary(new UUID(1, 2), JAVA_LEGACY))], + [STANDARD, JAVA_LEGACY, + new Binary(UUID_STANDARD, encodeUuidToBinary(new UUID(1, 2), STANDARD))], + [STANDARD, STANDARD, + new UUID(1, 2)] + + ] + } } diff --git a/bson/src/test/unit/org/bson/BasicBSONEncoderSpecification.groovy b/bson/src/test/unit/org/bson/BasicBSONEncoderSpecification.groovy index 6a453b90dd9..886c784f6d3 100644 --- a/bson/src/test/unit/org/bson/BasicBSONEncoderSpecification.groovy +++ b/bson/src/test/unit/org/bson/BasicBSONEncoderSpecification.groovy @@ -16,6 +16,8 @@ package org.bson +import org.bson.codecs.BsonDocumentCodec +import org.bson.codecs.DecoderContext import org.bson.io.BasicOutputBuffer import org.bson.io.OutputBuffer import org.bson.types.BSONTimestamp @@ -32,8 +34,14 @@ import spock.lang.Specification import spock.lang.Subject import spock.lang.Unroll +import java.nio.ByteBuffer import java.util.regex.Pattern +import static org.bson.BasicBSONEncoder.getDefaultUuidRepresentation +import static org.bson.BasicBSONEncoder.setDefaultUuidRepresentation +import static org.bson.UuidRepresentation.JAVA_LEGACY +import static org.bson.UuidRepresentation.STANDARD + @SuppressWarnings(['LineLength', 'DuplicateMapLiteral']) class BasicBSONEncoderSpecification extends Specification { @@ -45,7 +53,7 @@ class BasicBSONEncoderSpecification extends Specification { } @Subject - private final BSONEncoder bsonEncoder = new BasicBSONEncoder(); + private final BSONEncoder bsonEncoder = new BasicBSONEncoder() @Unroll def 'should encode #aClass'() { @@ -89,6 +97,7 @@ class BasicBSONEncoderSpecification extends Specification { ['k': new MinKey()] | [8, 0, 0, 0, -1, 107, 0, 0] ['k': new MaxKey()] | [8, 0, 0, 0, 127, 107, 0, 0] ['f': Decimal128.parse('0E-6176')] | [24, 0, 0, 0, 19, 102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + ['u': new UUID(1, 2)] | [29, 0, 0, 0, 5, 117, 0, 16, 0, 0, 0, 3, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0] aClass = document.find { true }.value.getClass() } @@ -152,11 +161,11 @@ class BasicBSONEncoderSpecification extends Specification { def 'should throw IllegalStateException on setting buffer while encoder in use'() { given: - bsonEncoder.set(new BasicOutputBuffer()); - bsonEncoder.putObject(new BasicBSONObject()); + bsonEncoder.set(new BasicOutputBuffer()) + bsonEncoder.putObject(new BasicBSONObject()) when: - bsonEncoder.set(new BasicOutputBuffer()); + bsonEncoder.set(new BasicOutputBuffer()) then: thrown(IllegalStateException) @@ -179,4 +188,28 @@ class BasicBSONEncoderSpecification extends Specification { 1 * buffer.writeCString('a') 1 * buffer.writeInt32(2) } + + def 'should encode UUID according to default uuid representation'() { + given: + def defaultUuidRepresentation = getDefaultUuidRepresentation() + def uuid = new UUID(1, 2) + def document = new BasicBSONObject() + document.append('u', uuid) + + when: + setDefaultUuidRepresentation(uuidRepresentation) + def bytes = bsonEncoder.encode(new BasicBSONObject(document)) + def decodedDocument = new BsonDocumentCodec().decode(new BsonBinaryReader(ByteBuffer.wrap(bytes)), + DecoderContext.builder().build()) + + then: + defaultUuidRepresentation == JAVA_LEGACY + decodedDocument.getBinary('u').asUuid(uuidRepresentation) == uuid + + cleanup: + setDefaultUuidRepresentation(defaultUuidRepresentation) + + where: + uuidRepresentation << [JAVA_LEGACY, STANDARD] + } } diff --git a/bson/src/test/unit/org/bson/BinaryVectorTest.java b/bson/src/test/unit/org/bson/BinaryVectorTest.java new file mode 100644 index 00000000000..57e8b294019 --- /dev/null +++ b/bson/src/test/unit/org/bson/BinaryVectorTest.java @@ -0,0 +1,179 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +class BinaryVectorTest { + + @Test + void shouldCreateInt8Vector() { + // given + byte[] data = {1, 2, 3, 4, 5}; + + // when + Int8BinaryVector vector = BinaryVector.int8Vector(data); + + // then + assertNotNull(vector); + assertEquals(BinaryVector.DataType.INT8, vector.getDataType()); + assertArrayEquals(data, vector.getData()); + } + + @Test + void shouldThrowExceptionWhenCreatingInt8VectorWithNullData() { + // given + byte[] data = null; + + // when & Then + IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> BinaryVector.int8Vector(data)); + assertEquals("data can not be null", exception.getMessage()); + } + + @Test + void shouldCreateFloat32Vector() { + // given + float[] data = {1.0f, 2.0f, 3.0f}; + + // when + Float32BinaryVector vector = BinaryVector.floatVector(data); + + // then + assertNotNull(vector); + assertEquals(BinaryVector.DataType.FLOAT32, vector.getDataType()); + assertArrayEquals(data, vector.getData()); + } + + @Test + void shouldThrowExceptionWhenCreatingFloat32VectorWithNullData() { + // given + float[] data = null; + + // when & Then + IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> BinaryVector.floatVector(data)); + assertEquals("data can not be null", exception.getMessage()); + } + + + @ParameterizedTest(name = "{index}: validPadding={0}") + @ValueSource(bytes = {0, 1, 2, 3, 4, 5, 6, 7}) + void shouldCreatePackedBitVector(final byte validPadding) { + // given + byte[] data = {(byte) 0b10101010, (byte) 0b01010101}; + + // when + PackedBitBinaryVector vector = BinaryVector.packedBitVector(data, validPadding); + + // then + assertNotNull(vector); + assertEquals(BinaryVector.DataType.PACKED_BIT, vector.getDataType()); + assertArrayEquals(data, vector.getData()); + assertEquals(validPadding, vector.getPadding()); + } + + @ParameterizedTest(name = "{index}: invalidPadding={0}") + @ValueSource(bytes = {-1, 8}) + void shouldThrowExceptionWhenPackedBitVectorHasInvalidPadding(final byte invalidPadding) { + // given + byte[] data = {(byte) 0b10101010}; + + // when & Then + IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> + BinaryVector.packedBitVector(data, invalidPadding)); + assertEquals("state should be: Padding must be between 0 and 7 bits. Provided padding: " + invalidPadding, exception.getMessage()); + } + + @Test + void shouldThrowExceptionWhenPackedBitVectorIsCreatedWithNullData() { + // given + byte[] data = null; + byte padding = 0; + + // when & Then + IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> + BinaryVector.packedBitVector(data, padding)); + assertEquals("data can not be null", exception.getMessage()); + } + + @Test + void shouldCreatePackedBitVectorWithZeroPaddingAndEmptyData() { + // given + byte[] data = new byte[0]; + byte padding = 0; + + // when + PackedBitBinaryVector vector = BinaryVector.packedBitVector(data, padding); + + // then + assertNotNull(vector); + assertEquals(BinaryVector.DataType.PACKED_BIT, vector.getDataType()); + assertArrayEquals(data, vector.getData()); + assertEquals(padding, vector.getPadding()); + } + + @Test + void shouldThrowExceptionWhenPackedBitVectorWithNonZeroPaddingAndEmptyData() { + // given + byte[] data = new byte[0]; + byte padding = 1; + + // when & Then + IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> + BinaryVector.packedBitVector(data, padding)); + assertEquals("state should be: Padding must be 0 if vector is empty. Provided padding: " + padding, exception.getMessage()); + } + + @Test + void shouldThrowExceptionWhenRetrievingInt8DataFromNonInt8Vector() { + // given + float[] data = {1.0f, 2.0f}; + BinaryVector vector = BinaryVector.floatVector(data); + + // when & Then + IllegalStateException exception = assertThrows(IllegalStateException.class, vector::asInt8Vector); + assertEquals("Expected vector data type INT8, but found FLOAT32", exception.getMessage()); + } + + @Test + void shouldThrowExceptionWhenRetrievingFloat32DataFromNonFloat32Vector() { + // given + byte[] data = {1, 2, 3}; + BinaryVector vector = BinaryVector.int8Vector(data); + + // when & Then + IllegalStateException exception = assertThrows(IllegalStateException.class, vector::asFloat32Vector); + assertEquals("Expected vector data type FLOAT32, but found INT8", exception.getMessage()); + } + + @Test + void shouldThrowExceptionWhenRetrievingPackedBitDataFromNonPackedBitVector() { + // given + float[] data = {1.0f, 2.0f}; + BinaryVector vector = BinaryVector.floatVector(data); + + // when & Then + IllegalStateException exception = assertThrows(IllegalStateException.class, vector::asPackedBitVector); + assertEquals("Expected vector data type PACKED_BIT, but found FLOAT32", exception.getMessage()); + } +} diff --git a/bson/src/test/unit/org/bson/io/BitsTest.java b/bson/src/test/unit/org/bson/BitsTest.java similarity index 76% rename from bson/src/test/unit/org/bson/io/BitsTest.java rename to bson/src/test/unit/org/bson/BitsTest.java index 1ff8d6130e4..28d35ffeaa0 100644 --- a/bson/src/test/unit/org/bson/io/BitsTest.java +++ b/bson/src/test/unit/org/bson/BitsTest.java @@ -12,18 +12,23 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. + * */ -package org.bson.io; +package org.bson; + + -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.Arrays; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + public class BitsTest { @@ -36,7 +41,7 @@ public class BitsTest { @Test public void testReadFullyWithBufferLargerThanExpected() throws IOException { byte[] buffer = new byte[8192]; - Bits.readFully(new ByteArrayInputStream(BYTES), buffer, BYTES.length); + Bits.readFully(new ByteArrayInputStream(BYTES), buffer, 0, BYTES.length); assertArrayEquals(BYTES, Arrays.copyOfRange(buffer, 0, BYTES.length)); } @@ -56,16 +61,20 @@ public void testReadFullyWithBufferEqualsToExpected() throws IOException { assertArrayEquals(BYTES, Arrays.copyOfRange(buffer, offset, BYTES.length + offset)); } - @Test(expected = IllegalArgumentException.class) + @Test public void testReadFullyUsingNotEnoughBigBuffer() throws IOException { - Bits.readFully(new ByteArrayInputStream(BYTES), new byte[2], BYTES.length); + assertThrows(IllegalArgumentException.class, () -> + Bits.readFully(new ByteArrayInputStream(BYTES), new byte[2], 0, BYTES.length) + ); } - @Test(expected = IllegalArgumentException.class) + @Test public void testReadFullyUsingNotEnoughBigBufferWithOffset() throws IOException { - int offset = 10; - byte[] buffer = new byte[BYTES.length]; - Bits.readFully(new ByteArrayInputStream(BYTES), buffer, offset, BYTES.length); + assertThrows(IllegalArgumentException.class, () -> { + int offset = 10; + byte[] buffer = new byte[BYTES.length]; + Bits.readFully(new ByteArrayInputStream(BYTES), buffer, offset, BYTES.length); + }); } @Test @@ -75,7 +84,7 @@ public void testReadInt() { @Test public void testReadIntFromInputStream() throws IOException { - assertEquals(41, Bits.readInt(new ByteArrayInputStream(BYTES))); + assertEquals(41, Bits.readInt(new ByteArrayInputStream(BYTES), new byte[4])); } @Test @@ -83,19 +92,16 @@ public void testReadIntWithOffset() { assertEquals(-12, Bits.readInt(BYTES, 8)); } - @Test - public void testReadIntInBigEndianNotation() { - assertEquals(-12, Bits.readIntBE(new byte[]{-1, -1, -1, -12}, 0)); - } - @Test public void testReadLong() { assertEquals(Long.MAX_VALUE, Bits.readLong(BYTES, 24)); } - @Test(expected = ArrayIndexOutOfBoundsException.class) + @Test public void testReadLongWithNotEnoughData() { - Bits.readLong(Arrays.copyOfRange(BYTES, 24, 30), 0); + assertThrows(ArrayIndexOutOfBoundsException.class, () -> + Bits.readLong(Arrays.copyOfRange(BYTES, 24, 30), 0) + ); } } diff --git a/bson/src/test/unit/org/bson/BsonBinaryReaderTest.java b/bson/src/test/unit/org/bson/BsonBinaryReaderTest.java index 54c3d0034e6..bffda74ecaa 100644 --- a/bson/src/test/unit/org/bson/BsonBinaryReaderTest.java +++ b/bson/src/test/unit/org/bson/BsonBinaryReaderTest.java @@ -18,14 +18,14 @@ import org.bson.io.ByteBufferBsonInput; import org.bson.types.ObjectId; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.nio.ByteBuffer; import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; public class BsonBinaryReaderTest { @@ -53,8 +53,7 @@ public void testInvalidBsonType() { reader.readBsonType(); fail("Should have thrown BsonSerializationException"); } catch (BsonSerializationException e) { - assertEquals("Detected unknown BSON type \"\\x16\" for fieldname \"a\". Are you using the latest driver version?", - e.getMessage()); + assertEquals("Detected unknown BSON type \"\\x16\" for fieldname \"a\". Are you using the latest driver version?", e.getMessage()); } } @@ -67,7 +66,7 @@ public void testInvalidBsonTypeFollowedByInvalidCString() { reader.readBsonType(); fail("Should have thrown BsonSerializationException"); } catch (BsonSerializationException e) { - assertEquals("While decoding a BSON document 1 bytes were required, but only 0 remain", e.getMessage()); + assertEquals("Found a BSON string that is not null-terminated", e.getMessage()); } } diff --git a/bson/src/test/unit/org/bson/BsonBinarySpecification.groovy b/bson/src/test/unit/org/bson/BsonBinarySpecification.groovy new file mode 100644 index 00000000000..503440daa04 --- /dev/null +++ b/bson/src/test/unit/org/bson/BsonBinarySpecification.groovy @@ -0,0 +1,88 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson + +import spock.lang.Specification +import spock.lang.Unroll + +class BsonBinarySpecification extends Specification { + + @Unroll + def 'should initialize with data'() { + given: + def bsonBinary = new BsonBinary((byte) 80, data as byte[]) + + expect: + data == bsonBinary.getData() + + where: + data << [ + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16], + [2, 5, 4, 67, 3, 4, 5, 2, 4, 2, 5, 6, 7, 4, 5, 12], + [34, 24, 56, 76, 3, 4, 1, 12, 1, 9, 8, 7, 56, 46, 3, 9] + ] + } + + @Unroll + def 'should initialize with data and BsonBinarySubType'() { + given: + byte[] data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + def bsonBinary = new BsonBinary(subType, data) + + expect: + subType.getValue() == bsonBinary.getType() + data == bsonBinary.getData() + + where: + subType << [BsonBinarySubType.BINARY, + BsonBinarySubType.FUNCTION, + BsonBinarySubType.MD5, + BsonBinarySubType.OLD_BINARY, + BsonBinarySubType.USER_DEFINED, + BsonBinarySubType.UUID_LEGACY, + BsonBinarySubType.UUID_STANDARD, + BsonBinarySubType.VECTOR] + } + + @Unroll + def 'should initialize with UUID'() { + given: + def bsonBinary = new BsonBinary(uuid) + + expect: + uuid == bsonBinary.asUuid() + + where: + uuid << [UUID.fromString('ffadee18-b533-11e8-96f8-529269fb1459'), + UUID.fromString('a5dc280e-b534-11e8-96f8-529269fb1459'), + UUID.fromString('4ef2a357-cb16-45a6-a6f6-a11ae1972917')] + } + + @Unroll + def 'should initialize with UUID and UUID representation'() { + given: + def uuid = UUID.fromString('ffadee18-b533-11e8-96f8-529269fb1459') + def bsonBinary = new BsonBinary(uuid, uuidRepresentation) + + expect: + uuid == bsonBinary.asUuid(uuidRepresentation) + + where: + uuidRepresentation << [UuidRepresentation.STANDARD, UuidRepresentation.C_SHARP_LEGACY, + UuidRepresentation.JAVA_LEGACY, UuidRepresentation.PYTHON_LEGACY] + } +} diff --git a/bson/src/test/unit/org/bson/BsonBinarySubTypeSpecification.groovy b/bson/src/test/unit/org/bson/BsonBinarySubTypeSpecification.groovy index 5376f8203eb..448d63f23fd 100644 --- a/bson/src/test/unit/org/bson/BsonBinarySubTypeSpecification.groovy +++ b/bson/src/test/unit/org/bson/BsonBinarySubTypeSpecification.groovy @@ -31,5 +31,9 @@ class BsonBinarySubTypeSpecification extends Specification { 3 | true 4 | true 5 | false + 6 | false + 7 | false + 8 | false + 9 | false } } diff --git a/bson/src/test/unit/org/bson/BsonBinaryTest.java b/bson/src/test/unit/org/bson/BsonBinaryTest.java new file mode 100644 index 00000000000..b47bcbf8a79 --- /dev/null +++ b/bson/src/test/unit/org/bson/BsonBinaryTest.java @@ -0,0 +1,266 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.EnumSource; +import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.ValueSource; + +import java.util.stream.Stream; + +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.params.provider.Arguments.arguments; + +class BsonBinaryTest { + + private static final byte FLOAT32_DTYPE = BinaryVector.DataType.FLOAT32.getValue(); + private static final byte INT8_DTYPE = BinaryVector.DataType.INT8.getValue(); + private static final byte PACKED_BIT_DTYPE = BinaryVector.DataType.PACKED_BIT.getValue(); + public static final int ZERO_PADDING = 0; + + @Test + void shouldThrowExceptionWhenCreatingBsonBinaryWithNullVector() { + // given + BinaryVector vector = null; + + // when & then + IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> new BsonBinary(vector)); + assertEquals("Vector must not be null", exception.getMessage()); + } + + @ParameterizedTest + @EnumSource(value = BsonBinarySubType.class, mode = EnumSource.Mode.EXCLUDE, names = {"VECTOR"}) + void shouldThrowExceptionWhenBsonBinarySubTypeIsNotVector(final BsonBinarySubType bsonBinarySubType) { + // given + byte[] data = new byte[]{1, 2, 3, 4}; + BsonBinary bsonBinary = new BsonBinary(bsonBinarySubType.getValue(), data); + + // when & then + BsonInvalidOperationException exception = assertThrows(BsonInvalidOperationException.class, bsonBinary::asVector); + assertEquals("type must be a Vector subtype.", exception.getMessage()); + } + + @ParameterizedTest(name = "{index}: {0}") + @MethodSource("provideFloatVectors") + void shouldEncodeFloatVector(final BinaryVector actualFloat32Vector, final byte[] expectedBsonEncodedVector) { + // when + BsonBinary actualBsonBinary = new BsonBinary(actualFloat32Vector); + byte[] actualBsonEncodedVector = actualBsonBinary.getData(); + + // then + assertEquals(BsonBinarySubType.VECTOR.getValue(), actualBsonBinary.getType(), "The subtype must be VECTOR"); + assertArrayEquals(expectedBsonEncodedVector, actualBsonEncodedVector); + } + + @ParameterizedTest(name = "{index}: {0}") + @MethodSource("provideFloatVectors") + void shouldDecodeFloatVector(final Float32BinaryVector expectedFloatVector, final byte[] bsonEncodedVector) { + // when + Float32BinaryVector decodedVector = (Float32BinaryVector) new BsonBinary(BsonBinarySubType.VECTOR, bsonEncodedVector).asVector(); + + // then + assertEquals(expectedFloatVector, decodedVector); + } + + private static Stream provideFloatVectors() { + return Stream.of( + arguments( + BinaryVector.floatVector(new float[]{1.1f, 2.2f, 3.3f, -1.0f, Float.MAX_VALUE, Float.MIN_VALUE, Float.POSITIVE_INFINITY, + Float.NEGATIVE_INFINITY}), + new byte[]{FLOAT32_DTYPE, ZERO_PADDING, + (byte) 205, (byte) 204, (byte) 140, (byte) 63, // 1.1f in little-endian + (byte) 205, (byte) 204, (byte) 12, (byte) 64, // 2.2f in little-endian + (byte) 51, (byte) 51, (byte) 83, (byte) 64, // 3.3f in little-endian + (byte) 0, (byte) 0, (byte) 128, (byte) 191, // -1.0f in little-endian + (byte) 255, (byte) 255, (byte) 127, (byte) 127, // Float.MAX_VALUE in little-endian + (byte) 1, (byte) 0, (byte) 0, (byte) 0, // Float.MIN_VALUE in little-endian + (byte) 0, (byte) 0, (byte) 128, (byte) 127, // Float.POSITIVE_INFINITY in little-endian + (byte) 0, (byte) 0, (byte) 128, (byte) 255 // Float.NEGATIVE_INFINITY in little-endian + } + ), + arguments( + BinaryVector.floatVector(new float[]{0.0f}), + new byte[]{FLOAT32_DTYPE, ZERO_PADDING, + (byte) 0, (byte) 0, (byte) 0, (byte) 0 // 0.0f in little-endian + } + ), + arguments( + BinaryVector.floatVector(new float[]{}), + new byte[]{FLOAT32_DTYPE, ZERO_PADDING} + ) + ); + } + + @ParameterizedTest(name = "{index}: {0}") + @MethodSource("provideInt8Vectors") + void shouldEncodeInt8Vector(final BinaryVector actualInt8Vector, final byte[] expectedBsonEncodedVector) { + // when + BsonBinary actualBsonBinary = new BsonBinary(actualInt8Vector); + byte[] actualBsonEncodedVector = actualBsonBinary.getData(); + + // then + assertEquals(BsonBinarySubType.VECTOR.getValue(), actualBsonBinary.getType(), "The subtype must be VECTOR"); + assertArrayEquals(expectedBsonEncodedVector, actualBsonEncodedVector); + } + + @ParameterizedTest(name = "{index}: {0}") + @MethodSource("provideInt8Vectors") + void shouldDecodeInt8Vector(final Int8BinaryVector expectedInt8Vector, final byte[] bsonEncodedVector) { + // when + Int8BinaryVector decodedVector = (Int8BinaryVector) new BsonBinary(BsonBinarySubType.VECTOR, bsonEncodedVector).asVector(); + + // then + assertEquals(expectedInt8Vector, decodedVector); + } + + private static Stream provideInt8Vectors() { + return Stream.of( + arguments( + BinaryVector.int8Vector(new byte[]{Byte.MAX_VALUE, 1, 2, 3, 4, Byte.MIN_VALUE}), + new byte[]{INT8_DTYPE, ZERO_PADDING, Byte.MAX_VALUE, 1, 2, 3, 4, Byte.MIN_VALUE + }), + arguments(BinaryVector.int8Vector(new byte[]{}), + new byte[]{INT8_DTYPE, ZERO_PADDING} + ) + ); + } + + @ParameterizedTest + @MethodSource("providePackedBitVectors") + void shouldEncodePackedBitVector(final BinaryVector actualPackedBitVector, final byte[] expectedBsonEncodedVector) { + // when + BsonBinary actualBsonBinary = new BsonBinary(actualPackedBitVector); + byte[] actualBsonEncodedVector = actualBsonBinary.getData(); + + // then + assertEquals(BsonBinarySubType.VECTOR.getValue(), actualBsonBinary.getType(), "The subtype must be VECTOR"); + assertArrayEquals(expectedBsonEncodedVector, actualBsonEncodedVector); + } + + @ParameterizedTest + @MethodSource("providePackedBitVectors") + void shouldDecodePackedBitVector(final PackedBitBinaryVector expectedPackedBitVector, final byte[] bsonEncodedVector) { + // when + PackedBitBinaryVector decodedVector = (PackedBitBinaryVector) new BsonBinary(BsonBinarySubType.VECTOR, bsonEncodedVector).asVector(); + + // then + assertEquals(expectedPackedBitVector, decodedVector); + } + + private static Stream providePackedBitVectors() { + return Stream.of( + arguments( + BinaryVector.packedBitVector(new byte[]{(byte) 0, (byte) 255, (byte) 10}, (byte) 2), + new byte[]{PACKED_BIT_DTYPE, 2, (byte) 0, (byte) 255, (byte) 10} + ), + arguments( + BinaryVector.packedBitVector(new byte[0], (byte) 0), + new byte[]{PACKED_BIT_DTYPE, 0} + )); + } + + @Test + void shouldThrowExceptionForInvalidFloatArrayLengthWhenDecode() { + // given + byte[] invalidData = {FLOAT32_DTYPE, 0, 10, 20, 30}; + + // when & Then + BsonInvalidOperationException thrown = assertThrows(BsonInvalidOperationException.class, () -> { + new BsonBinary(BsonBinarySubType.VECTOR, invalidData).asVector(); + }); + assertEquals("Byte array length must be a multiple of 4 for FLOAT32 data type, but found: " + invalidData.length, + thrown.getMessage()); + } + + @ParameterizedTest + @ValueSource(ints = {0, 1}) + void shouldThrowExceptionWhenEncodedVectorLengthIsLessThenMetadataLength(final int encodedVectorLength) { + // given + byte[] invalidData = new byte[encodedVectorLength]; + + // when & Then + BsonInvalidOperationException thrown = assertThrows(BsonInvalidOperationException.class, () -> { + new BsonBinary(BsonBinarySubType.VECTOR, invalidData).asVector(); + }); + assertEquals("Vector encoded array length must be at least 2, but found: " + encodedVectorLength, + thrown.getMessage()); + } + + @ParameterizedTest + @ValueSource(bytes = {-1, 1}) + void shouldThrowExceptionForInvalidFloatArrayPaddingWhenDecode(final byte invalidPadding) { + // given + byte[] invalidData = {FLOAT32_DTYPE, invalidPadding, 10, 20, 30, 20}; + + // when & Then + BsonInvalidOperationException thrown = assertThrows(BsonInvalidOperationException.class, () -> { + new BsonBinary(BsonBinarySubType.VECTOR, invalidData).asVector(); + }); + assertEquals("Padding must be 0 for FLOAT32 data type, but found: " + invalidPadding, thrown.getMessage()); + } + + @ParameterizedTest + @ValueSource(bytes = {-1, 1}) + void shouldThrowExceptionForInvalidInt8ArrayPaddingWhenDecode(final byte invalidPadding) { + // given + byte[] invalidData = {INT8_DTYPE, invalidPadding, 10, 20, 30, 20}; + + // when & Then + BsonInvalidOperationException thrown = assertThrows(BsonInvalidOperationException.class, () -> { + new BsonBinary(BsonBinarySubType.VECTOR, invalidData).asVector(); + }); + assertEquals("Padding must be 0 for INT8 data type, but found: " + invalidPadding, thrown.getMessage()); + } + + @ParameterizedTest + @ValueSource(bytes = {-1, 8}) + void shouldThrowExceptionForInvalidPackedBitArrayPaddingWhenDecode(final byte invalidPadding) { + // given + byte[] invalidData = {PACKED_BIT_DTYPE, invalidPadding, 10, 20, 30, 20}; + + // when & then + BsonInvalidOperationException thrown = assertThrows(BsonInvalidOperationException.class, () -> { + new BsonBinary(BsonBinarySubType.VECTOR, invalidData).asVector(); + }); + assertEquals("Padding must be between 0 and 7 bits, but found: " + invalidPadding, thrown.getMessage()); + } + + @ParameterizedTest + @ValueSource(bytes = {-1, 1, 2, 3, 4, 5, 6, 7, 8}) + void shouldThrowExceptionForInvalidPackedBitArrayPaddingWhenDecodeEmptyVector(final byte invalidPadding) { + // given + byte[] invalidData = {PACKED_BIT_DTYPE, invalidPadding}; + + // when & Then + BsonInvalidOperationException thrown = assertThrows(BsonInvalidOperationException.class, () -> { + new BsonBinary(BsonBinarySubType.VECTOR, invalidData).asVector(); + }); + assertEquals("Padding must be 0 if vector is empty, but found: " + invalidPadding, thrown.getMessage()); + } + + @Test + void shouldThrowWhenUnknownVectorDType() { + // when + BsonBinary bsonBinary = new BsonBinary(BsonBinarySubType.VECTOR, new byte[]{(byte) 0}); + assertThrows(BsonInvalidOperationException.class, bsonBinary::asVector); + } +} diff --git a/bson/src/test/unit/org/bson/BsonBinaryWriterTest.java b/bson/src/test/unit/org/bson/BsonBinaryWriterTest.java index 450647eda13..0b067fc816f 100644 --- a/bson/src/test/unit/org/bson/BsonBinaryWriterTest.java +++ b/bson/src/test/unit/org/bson/BsonBinaryWriterTest.java @@ -19,34 +19,41 @@ import org.bson.io.BasicOutputBuffer; import org.bson.io.ByteBufferBsonInput; import org.bson.types.ObjectId; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; import java.util.List; import static java.util.Arrays.asList; import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; public class BsonBinaryWriterTest { + private static final byte FLOAT32_DTYPE = BinaryVector.DataType.FLOAT32.getValue(); + private static final int ZERO_PADDING = 0; + private BsonBinaryWriter writer; private BasicOutputBuffer buffer; - @Before + @BeforeEach public void setup() { buffer = new BasicOutputBuffer(); - writer = new BsonBinaryWriter(new BsonWriterSettings(100), new BsonBinaryWriterSettings(1024), buffer); + writer = new BsonBinaryWriter(new BsonWriterSettings(100), new BsonBinaryWriterSettings(12904), buffer); } - @After + @AfterEach public void tearDown() { writer.close(); } @@ -55,11 +62,11 @@ public void tearDown() { public void shouldThrowWhenMaxDocumentSizeIsExceeded() { try { writer.writeStartDocument(); - writer.writeBinaryData("b", new BsonBinary(new byte[1024])); + writer.writeBinaryData("b", new BsonBinary(new byte[12904])); writer.writeEndDocument(); fail(); } catch (BsonMaximumSizeExceededException e) { - assertEquals("Document size of 1037 is larger than maximum of 1024.", e.getMessage()); + assertEquals("Document size of 12917 is larger than maximum of 12904.", e.getMessage()); } } @@ -101,10 +108,10 @@ public void testWriteAndReadBoolean() { reader.readStartDocument(); assertThat(reader.readBsonType(), is(BsonType.BOOLEAN)); assertEquals("b1", reader.readName()); - assertEquals(true, reader.readBoolean()); + assertTrue(reader.readBoolean()); assertThat(reader.readBsonType(), is(BsonType.BOOLEAN)); assertEquals("b2", reader.readName()); - assertEquals(false, reader.readBoolean()); + assertFalse(reader.readBoolean()); reader.readEndDocument(); } @@ -191,16 +198,39 @@ public void testWriteArray() { } @Test - public void testWriteArrayElements() { + public void testWriteArrayElements() throws IOException { + ByteArrayOutputStream expectedOutput = new ByteArrayOutputStream(); + expectedOutput.write(new byte[]{ + -52, 25, 0, 0, //document length + 4, // array type + 97, 49, 0, // "a1" name + null terminator + -61, 25, 0, 0}); // array length + writer.writeStartDocument(); writer.writeStartArray("a1"); - writer.writeBoolean(true); - writer.writeBoolean(false); + int arrayIndex = 0; + while (arrayIndex < 1100) { + writer.writeBoolean(true); + + expectedOutput.write(BsonType.BOOLEAN.getValue()); + expectedOutput.write(Integer.toString(arrayIndex++).getBytes(StandardCharsets.UTF_8)); + expectedOutput.write(0); // null terminator + expectedOutput.write(1); // boolean value + + writer.writeBoolean(false); + + expectedOutput.write(BsonType.BOOLEAN.getValue()); + expectedOutput.write(Integer.toString(arrayIndex++).getBytes(StandardCharsets.UTF_8)); + expectedOutput.write(0); // null terminator + expectedOutput.write(0); // boolean value + } writer.writeEndArray(); + expectedOutput.write(0); // end of array writer.writeEndDocument(); - byte[] expectedValues = {22, 0, 0, 0, 4, 97, 49, 0, 13, 0, 0, 0, 8, 48, 0, 1, 8, 49, 0, 0, 0, 0}; - assertArrayEquals(expectedValues, buffer.toByteArray()); + expectedOutput.write(0); // end of a document + + assertArrayEquals(expectedOutput.toByteArray(), buffer.toByteArray()); } @Test @@ -296,12 +326,38 @@ public void testWriteBinary() { writer.writeBinaryData("b1", new BsonBinary(new byte[]{0, 0, 0, 0, 0, 0, 0, 0})); writer.writeBinaryData("b2", new BsonBinary(BsonBinarySubType.OLD_BINARY, new byte[]{1, 1, 1, 1, 1})); writer.writeBinaryData("b3", new BsonBinary(BsonBinarySubType.FUNCTION, new byte[]{})); + writer.writeBinaryData("b4", new BsonBinary(BsonBinarySubType.VECTOR, new byte[]{FLOAT32_DTYPE, ZERO_PADDING, + (byte) 205, (byte) 204, (byte) 140, (byte) 63})); writer.writeEndDocument(); + byte[] expectedValues = new byte[]{ + 64, // total document length + 0, 0, 0, + + //Binary + (byte) BsonType.BINARY.getValue(), + 98, 49, 0, // name "b1" + 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + + // Old binary + (byte) BsonType.BINARY.getValue(), + 98, 50, 0, // name "b2" + 9, 0, 0, 0, 2, 5, 0, 0, 0, 1, 1, 1, 1, 1, + + // Function binary + (byte) BsonType.BINARY.getValue(), + 98, 51, 0, // name "b3" + 0, 0, 0, 0, 1, + + //Vector binary + (byte) BsonType.BINARY.getValue(), + 98, 52, 0, // name "b4" + 6, 0, 0, 0, // total length, int32 (little endian) + BsonBinarySubType.VECTOR.getValue(), FLOAT32_DTYPE, ZERO_PADDING, (byte) 205, (byte) 204, (byte) 140, 63, + + 0 //end of document + }; - byte[] expectedValues = {49, 0, 0, 0, 5, 98, 49, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 98, 50, 0, - 9, 0, - 0, 0, 2, 5, 0, 0, 0, 1, 1, 1, 1, 1, 5, 98, 51, 0, 0, 0, 0, 0, 1, 0}; assertArrayEquals(expectedValues, buffer.toByteArray()); } @@ -354,6 +410,36 @@ public void testWriteDBPointer() { reader.readEndDocument(); } + @Test + public void testNullByteInTopLevelName() { + writer.writeStartDocument(); + writer.writeName("a\u0000b"); + assertThrows(BsonSerializationException.class, () -> writer.writeBoolean(true)); + } + + @Test + public void testNullByteInNestedName() { + writer.writeStartDocument(); + writer.writeName("nested"); + writer.writeStartDocument(); + writer.writeName("a\u0000b"); + assertThrows(BsonSerializationException.class, () -> writer.writeBoolean(true)); + } + + @Test + public void testNullByteInRegularExpressionPattern() { + writer.writeStartDocument(); + writer.writeName("regex"); + assertThrows(BsonSerializationException.class, () -> writer.writeRegularExpression(new BsonRegularExpression("a\u0000b"))); + } + + @Test + public void testNullByteInRegularExpressionOptions() { + writer.writeStartDocument(); + writer.writeName("regex"); + assertThrows(BsonSerializationException.class, () -> writer.writeRegularExpression(new BsonRegularExpression("a*", "i\u0000"))); + } + @Test //CHECKSTYLE:OFF public void testWriteRead() throws IOException { @@ -393,19 +479,17 @@ public void testWriteRead() throws IOException { ByteBufferBsonInput basicInputBuffer = new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap(baos.toByteArray()))); - BsonBinaryReader reader = new BsonBinaryReader(basicInputBuffer); - - try { + try (BsonBinaryReader reader = new BsonBinaryReader(basicInputBuffer)) { assertEquals(BsonType.DOCUMENT, reader.readBsonType()); reader.readStartDocument(); { assertEquals(BsonType.BOOLEAN, reader.readBsonType()); assertEquals("b1", reader.readName()); - assertEquals(true, reader.readBoolean()); + assertTrue(reader.readBoolean()); assertEquals(BsonType.BOOLEAN, reader.readBsonType()); assertEquals("b2", reader.readName()); - assertEquals(false, reader.readBoolean()); + assertFalse(reader.readBoolean()); assertEquals(BsonType.ARRAY, reader.readBsonType()); assertEquals("a1", reader.readName()); @@ -458,8 +542,6 @@ public void testWriteRead() throws IOException { reader.readEndDocument(); } - } finally { - reader.close(); } } //CHECKSTYLE:ON @@ -473,16 +555,10 @@ public void testPipe() { byte[] bytes = buffer.toByteArray(); BasicOutputBuffer newBuffer = new BasicOutputBuffer(); - BsonBinaryWriter newWriter = new BsonBinaryWriter(newBuffer); - try { - BsonBinaryReader reader = new BsonBinaryReader(new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap(bytes)))); - try { + try (BsonBinaryWriter newWriter = new BsonBinaryWriter(newBuffer)) { + try (BsonBinaryReader reader = new BsonBinaryReader(new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap(bytes))))) { newWriter.pipe(reader); - } finally { - reader.close(); } - } finally { - newWriter.close(); } assertArrayEquals(bytes, newBuffer.toByteArray()); } @@ -520,7 +596,7 @@ public void testPipeNestedDocument() { reader2.readStartDocument(); //checking what writer piped assertEquals(BsonType.BOOLEAN, reader2.readBsonType()); assertEquals("a", reader2.readName()); - assertEquals(true, reader2.readBoolean()); + assertTrue(reader2.readBoolean()); reader2.readEndDocument(); } @@ -661,17 +737,10 @@ public void testPipeWithExtraElements() { ); BasicOutputBuffer newBuffer = new BasicOutputBuffer(); - BsonBinaryWriter newWriter = new BsonBinaryWriter(newBuffer); - try { - BsonBinaryReader reader = - new BsonBinaryReader(new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap(pipedBuffer.toByteArray())))); - try { + try (BsonBinaryWriter newWriter = new BsonBinaryWriter(newBuffer)) { + try (BsonBinaryReader reader = new BsonBinaryReader(new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap(pipedBuffer.toByteArray()))))) { newWriter.pipe(reader, extraElements); - } finally { - reader.close(); } - } finally { - newWriter.close(); } assertArrayEquals(bytes, newBuffer.toByteArray()); } @@ -707,21 +776,14 @@ public void testPipeOfNestedDocumentWithExtraElements() { ); BasicOutputBuffer newBuffer = new BasicOutputBuffer(); - BsonBinaryWriter newWriter = new BsonBinaryWriter(newBuffer); - try { - BsonBinaryReader reader = - new BsonBinaryReader(new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap(pipedBuffer.toByteArray())))); - try { + try (BsonBinaryWriter newWriter = new BsonBinaryWriter(newBuffer)) { + try (BsonBinaryReader reader = new BsonBinaryReader(new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap(pipedBuffer.toByteArray()))))) { newWriter.writeStartDocument(); newWriter.writeName("nested"); newWriter.pipe(reader, extraElements); newWriter.writeBoolean("b", true); newWriter.writeEndDocument(); - } finally { - reader.close(); } - } finally { - newWriter.close(); } byte[] actualBytes = newBuffer.toByteArray(); assertArrayEquals(bytes, actualBytes); @@ -732,20 +794,13 @@ public void testPipeOfDocumentWithInvalidSize() { byte[] bytes = {4, 0, 0, 0}; // minimum document size is 5; BasicOutputBuffer newBuffer = new BasicOutputBuffer(); - BsonBinaryWriter newWriter = new BsonBinaryWriter(newBuffer); - try { - BsonBinaryReader reader = new BsonBinaryReader(new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap(bytes)))); - try { + try (BsonBinaryWriter newWriter = new BsonBinaryWriter(newBuffer)) { + try (BsonBinaryReader reader = new BsonBinaryReader(new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap(bytes))))) { newWriter.pipe(reader); fail("Pipe is expected to fail with document size is < 5"); } catch (BsonSerializationException e) { // expected } - finally { - reader.close(); - } - } finally { - newWriter.close(); } } @@ -780,9 +835,7 @@ public void testMarkAndReset() throws IOException { ByteBufferBsonInput basicInputBuffer = new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap(baos.toByteArray()))); - BsonBinaryReader reader = new BsonBinaryReader(basicInputBuffer); - - try { + try (BsonBinaryReader reader = new BsonBinaryReader(basicInputBuffer)) { reader.readStartDocument(); reader.readName("a"); reader.readStartArray(); @@ -798,10 +851,6 @@ public void testMarkAndReset() throws IOException { } reader.readEndArray(); reader.readEndDocument(); - } finally - - { - reader.close(); } } // CHECKSTYLE:ON diff --git a/bson/src/test/unit/org/bson/BsonDocumentTest.java b/bson/src/test/unit/org/bson/BsonDocumentTest.java index e9029022713..32d56166f12 100644 --- a/bson/src/test/unit/org/bson/BsonDocumentTest.java +++ b/bson/src/test/unit/org/bson/BsonDocumentTest.java @@ -23,51 +23,50 @@ import org.bson.json.JsonReader; import org.bson.json.JsonWriter; import org.bson.json.JsonWriterSettings; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.StringWriter; import java.util.Arrays; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; // Don't convert to Spock, as Groovy intercepts equals/hashCode methods that we are trying to test public class BsonDocumentTest { - private BsonDocument emptyDocument = new BsonDocument(); - private BsonDocument emptyRawDocument = new RawBsonDocument(emptyDocument, new BsonDocumentCodec()); - private BsonDocument document = new BsonDocument() + private final BsonDocument emptyDocument = new BsonDocument(); + private final BsonDocument emptyRawDocument = new RawBsonDocument(emptyDocument, new BsonDocumentCodec()); + private final BsonDocument document = new BsonDocument() .append("a", new BsonInt32(1)) .append("b", new BsonInt32(2)) .append("c", new BsonDocument("x", BsonBoolean.TRUE)) - .append("d", new BsonArray(Arrays.asList(new BsonDocument("y", + .append("d", new BsonArray(Arrays.asList(new BsonDocument("y", BsonBoolean.FALSE), new BsonInt32(1)))); - private BsonDocument rawDocument = new RawBsonDocument(document, new BsonDocumentCodec()); + private final BsonDocument rawDocument = new RawBsonDocument(document, new BsonDocumentCodec()); @Test public void shouldBeEqualToItself() { - assertTrue(emptyDocument.equals(emptyDocument)); - assertTrue(document.equals(document)); + assertEquals(emptyDocument, emptyDocument); + assertEquals(document, document); } @Test public void shouldBeEqualToEquivalentBsonDocument() { - assertTrue(emptyDocument.equals(emptyRawDocument)); - assertTrue(document.equals(rawDocument)); - assertTrue(emptyRawDocument.equals(emptyDocument)); - assertTrue(rawDocument.equals(document)); + assertEquals(emptyDocument, emptyRawDocument); + assertEquals(document, rawDocument); + assertEquals(emptyRawDocument, emptyDocument); + assertEquals(rawDocument, document); } @Test public void shouldNotBeEqualToDifferentBsonDocument() { // expect - assertFalse(emptyDocument.equals(document)); - assertFalse(document.equals(emptyRawDocument)); - assertFalse(document.equals(emptyRawDocument)); - assertFalse(emptyRawDocument.equals(document)); - assertFalse(rawDocument.equals(emptyDocument)); + assertNotEquals(emptyDocument, document); + assertNotEquals(document, emptyRawDocument); + assertNotEquals(document, emptyRawDocument); + assertNotEquals(emptyRawDocument, document); + assertNotEquals(rawDocument, emptyDocument); } @Test @@ -79,8 +78,7 @@ public void shouldHaveSameHashCodeAsEquivalentBsonDocument() { @Test public void toJsonShouldReturnEquivalent() { - assertEquals(new BsonDocumentCodec().decode(new JsonReader(document.toJson()), DecoderContext.builder().build()), - document); + assertEquals(new BsonDocumentCodec().decode(new JsonReader(document.toJson()), DecoderContext.builder().build()), document); } @Test diff --git a/bson/src/test/unit/org/bson/BsonDocumentWrapperSpecification.groovy b/bson/src/test/unit/org/bson/BsonDocumentWrapperSpecification.groovy index b3e3ba2ea53..6c11a99f4bd 100644 --- a/bson/src/test/unit/org/bson/BsonDocumentWrapperSpecification.groovy +++ b/bson/src/test/unit/org/bson/BsonDocumentWrapperSpecification.groovy @@ -26,7 +26,7 @@ class BsonDocumentWrapperSpecification extends Specification { .append('a', 1) .append('b', 2) .append('c', asList('x', true)) - .append('d', asList(new Document('y', false), 1)); + .append('d', asList(new Document('y', false), 1)) def wrapper = new BsonDocumentWrapper(document, new DocumentCodec()) diff --git a/bson/src/test/unit/org/bson/BsonDocumentWriterSpecification.groovy b/bson/src/test/unit/org/bson/BsonDocumentWriterSpecification.groovy index 125628272ed..924f1f66368 100644 --- a/bson/src/test/unit/org/bson/BsonDocumentWriterSpecification.groovy +++ b/bson/src/test/unit/org/bson/BsonDocumentWriterSpecification.groovy @@ -26,7 +26,7 @@ class BsonDocumentWriterSpecification extends Specification { def 'should write all types'() { when: - def encodedDoc = new BsonDocument(); + def encodedDoc = new BsonDocument() new BsonDocumentCodec().encode(new BsonDocumentWriter(encodedDoc), documentWithValuesOfEveryType(), EncoderContext.builder().build()) diff --git a/bson/src/test/unit/org/bson/BsonHelper.java b/bson/src/test/unit/org/bson/BsonHelper.java index 29920f6651e..59fdba474a2 100644 --- a/bson/src/test/unit/org/bson/BsonHelper.java +++ b/bson/src/test/unit/org/bson/BsonHelper.java @@ -17,10 +17,12 @@ package org.bson; import org.bson.codecs.BsonDocumentCodec; +import org.bson.codecs.DecoderContext; import org.bson.codecs.EncoderContext; import org.bson.io.BasicOutputBuffer; import org.bson.types.Decimal128; import org.bson.types.ObjectId; +import util.Hex; import java.nio.ByteBuffer; import java.util.Date; @@ -34,7 +36,7 @@ public final class BsonHelper { private static final Date DATE = new Date(); private static final ObjectId OBJECT_ID = new ObjectId(); - private static List getBsonValues() { + public static List getBsonValues() { return asList( new BsonNull(), new BsonInt32(42), @@ -109,4 +111,23 @@ public static ByteBuffer toBson(final BsonDocument document) { private BsonHelper() { } + + public static BsonDocument decodeToDocument(final String subjectHex, final String description) { + ByteBuffer byteBuffer = ByteBuffer.wrap(Hex.decode(subjectHex)); + BsonDocument actualDecodedDocument = new BsonDocumentCodec().decode(new BsonBinaryReader(byteBuffer), + DecoderContext.builder().build()); + + if (byteBuffer.hasRemaining()) { + throw new BsonSerializationException(format("Should have consumed all bytes, but " + byteBuffer.remaining() + + " still remain in the buffer for document with description ", + description)); + } + return actualDecodedDocument; + } + + public static String encodeToHex(final BsonDocument decodedDocument) { + BasicOutputBuffer outputBuffer = new BasicOutputBuffer(); + new BsonDocumentCodec().encode(new BsonBinaryWriter(outputBuffer), decodedDocument, EncoderContext.builder().build()); + return Hex.encode(outputBuffer.toByteArray()); + } } diff --git a/bson/src/test/unit/org/bson/BsonValueSpecification.groovy b/bson/src/test/unit/org/bson/BsonValueSpecification.groovy index 1313bca7edf..e23b1c43305 100644 --- a/bson/src/test/unit/org/bson/BsonValueSpecification.groovy +++ b/bson/src/test/unit/org/bson/BsonValueSpecification.groovy @@ -29,6 +29,7 @@ class BsonValueSpecification extends Specification { new BsonInt64(52L).isInt64() new BsonInt64(52L).isNumber() new BsonDecimal128(Decimal128.parse('1')).isDecimal128() + new BsonDecimal128(Decimal128.parse('1')).isNumber() new BsonDouble(62.0).isDouble() new BsonDouble(62.0).isNumber() new BsonBoolean(true).isBoolean() @@ -71,7 +72,26 @@ class BsonValueSpecification extends Specification { !new BsonNull().isDocument() } - def 'as methods should return false for the incorrect type'() { + def 'support BsonNumber interface for all number types'() { + expect: + bsonValue.asNumber() == bsonValue + bsonValue.asNumber().intValue()== intValue + bsonValue.asNumber().longValue() == longValue + bsonValue.asNumber().doubleValue() == doubleValue + bsonValue.asNumber().decimal128Value() == decimal128Value + + where: + bsonValue | intValue | longValue | doubleValue | decimal128Value + new BsonInt32(42) | 42 | 42L | 42.0 | Decimal128.parse('42') + new BsonInt64(42) | 42 | 42L | 42.0 | Decimal128.parse('42') + new BsonDouble(42) | 42 | 42L | 42.0 | Decimal128.parse('42') + new BsonDecimal128(Decimal128.parse('42')) | 42 | 42L | 42.0 | Decimal128.parse('42') + new BsonDecimal128(Decimal128.POSITIVE_INFINITY) | Integer.MAX_VALUE | Long.MAX_VALUE | Double.POSITIVE_INFINITY | Decimal128.POSITIVE_INFINITY + new BsonDecimal128(Decimal128.NEGATIVE_INFINITY) | Integer.MIN_VALUE | Long.MIN_VALUE | Double.NEGATIVE_INFINITY | Decimal128.NEGATIVE_INFINITY + new BsonDecimal128(Decimal128.NaN) | 0 | 0L | Double.NaN | Decimal128.NaN + } + + def 'as methods should return throw for the incorrect type'() { when: new BsonNull().asInt32() diff --git a/bson/src/test/unit/org/bson/BsonWriterSpecification.groovy b/bson/src/test/unit/org/bson/BsonWriterSpecification.groovy index d85cb47bcaf..05fa945a87f 100644 --- a/bson/src/test/unit/org/bson/BsonWriterSpecification.groovy +++ b/bson/src/test/unit/org/bson/BsonWriterSpecification.groovy @@ -1,7 +1,7 @@ /* * Copyright 2008-present MongoDB, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); + * Licensed under the Apache License, Version 2.0 (the "License") * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * @@ -17,14 +17,13 @@ package org.bson import org.bson.io.BasicOutputBuffer -import org.junit.Test import spock.lang.Specification class BsonWriterSpecification extends Specification { - def shouldThrowExceptionForBooleanWhenWritingBeforeStartingDocument() { + def 'shouldThrowExceptionForBooleanWhenWritingBeforeStartingDocument'() { when: - writer.writeBoolean('b1', true); + writer.writeBoolean('b1', true) then: thrown(BsonInvalidOperationException) @@ -33,9 +32,9 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowExceptionForArrayWhenWritingBeforeStartingDocument() { + def 'shouldThrowExceptionForArrayWhenWritingBeforeStartingDocument'() { when: - writer.writeStartArray(); + writer.writeStartArray() then: thrown(BsonInvalidOperationException) @@ -44,9 +43,9 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowExceptionForNullWhenWritingBeforeStartingDocument() { + def 'shouldThrowExceptionForNullWhenWritingBeforeStartingDocument'() { when: - writer.writeNull(); + writer.writeNull() then: thrown(BsonInvalidOperationException) @@ -55,10 +54,10 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowExceptionForStringWhenStateIsValue() { + def 'shouldThrowExceptionForStringWhenStateIsValue'() { when: - writer.writeStartDocument(); - writer.writeString('SomeString'); + writer.writeStartDocument() + writer.writeString('SomeString') then: thrown(BsonInvalidOperationException) @@ -67,10 +66,10 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowExceptionWhenEndingAnArrayWhenStateIsValue() { + def 'shouldThrowExceptionWhenEndingAnArrayWhenStateIsValue'() { when: - writer.writeStartDocument(); - writer.writeEndArray(); + writer.writeStartDocument() + writer.writeEndArray() then: thrown(BsonInvalidOperationException) @@ -79,11 +78,11 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowExceptionWhenWritingASecondName() { + def 'shouldThrowExceptionWhenWritingASecondName'() { when: - writer.writeStartDocument(); - writer.writeName('f1'); - writer.writeName('i2'); + writer.writeStartDocument() + writer.writeName('f1') + writer.writeName('i2') then: thrown(BsonInvalidOperationException) @@ -92,11 +91,11 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowExceptionWhenEndingADocumentBeforeValueIsWritten() { + def 'shouldThrowExceptionWhenEndingADocumentBeforeValueIsWritten'() { when: - writer.writeStartDocument(); - writer.writeName('f1'); - writer.writeEndDocument(); + writer.writeStartDocument() + writer.writeName('f1') + writer.writeEndDocument() then: thrown(BsonInvalidOperationException) @@ -105,12 +104,12 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowAnExceptionWhenTryingToWriteASecondValue() { + def 'shouldThrowAnExceptionWhenTryingToWriteASecondValue'() { when: - writer.writeStartDocument(); - writer.writeName('f1'); - writer.writeDouble(100); - writer.writeString('i2'); + writer.writeStartDocument() + writer.writeName('f1') + writer.writeDouble(100) + writer.writeString('i2') then: thrown(BsonInvalidOperationException) @@ -119,12 +118,12 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowAnExceptionWhenTryingToWriteJavaScript() { + def 'shouldThrowAnExceptionWhenTryingToWriteJavaScript'() { when: - writer.writeStartDocument(); - writer.writeName('f1'); - writer.writeDouble(100); - writer.writeJavaScript('var i'); + writer.writeStartDocument() + writer.writeName('f1') + writer.writeDouble(100) + writer.writeJavaScript('var i') then: thrown(BsonInvalidOperationException) @@ -133,13 +132,13 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowAnExceptionWhenWritingANameInAnArray() { + def 'shouldThrowAnExceptionWhenWritingANameInAnArray'() { when: - writer.writeStartDocument(); - writer.writeName('f1'); - writer.writeDouble(100); - writer.writeStartArray('f2'); - writer.writeName('i3'); + writer.writeStartDocument() + writer.writeName('f1') + writer.writeDouble(100) + writer.writeStartArray('f2') + writer.writeName('i3') then: thrown(BsonInvalidOperationException) @@ -148,13 +147,13 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowAnExceptionWhenEndingDocumentInTheMiddleOfWritingAnArray() { + def 'shouldThrowAnExceptionWhenEndingDocumentInTheMiddleOfWritingAnArray'() { when: - writer.writeStartDocument(); - writer.writeName('f1'); - writer.writeDouble(100); - writer.writeStartArray('f2'); - writer.writeEndDocument(); + writer.writeStartDocument() + writer.writeName('f1') + writer.writeDouble(100) + writer.writeStartArray('f2') + writer.writeEndDocument() then: thrown(BsonInvalidOperationException) @@ -163,15 +162,15 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowAnExceptionWhenEndingAnArrayInASubDocument() { + def 'shouldThrowAnExceptionWhenEndingAnArrayInASubDocument'() { when: writer.with { - writeStartDocument(); - writeName('f1'); - writeDouble(100); - writeStartArray('f2'); - writeStartDocument(); - writeEndArray(); + writeStartDocument() + writeName('f1') + writeDouble(100) + writeStartArray('f2') + writeStartDocument() + writeEndArray() } then: @@ -181,17 +180,17 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowAnExceptionWhenWritingANameInAnArrayEvenWhenSubDocumentExistsInArray() { + def 'shouldThrowAnExceptionWhenWritingANameInAnArrayEvenWhenSubDocumentExistsInArray'() { when: //Does this test even make sense? writer.with { - writeStartDocument(); - writeName('f1'); - writeDouble(100); - writeStartArray('f2'); - writeStartDocument(); - writeEndDocument(); - writeName('i3'); + writeStartDocument() + writeName('f1') + writeDouble(100) + writeStartArray('f2') + writeStartDocument() + writeEndDocument() + writeName('i3') } then: @@ -201,18 +200,18 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowExceptionWhenWritingObjectsIntoNestedArrays() { + def 'shouldThrowExceptionWhenWritingObjectsIntoNestedArrays'() { when: //This test seem redundant? writer.with { - writeStartDocument(); - writeName('f1'); - writeDouble(100); - writeStartArray('f2'); - writeStartArray(); - writeStartArray(); - writeStartArray(); - writeInt64('i4', 10); + writeStartDocument() + writeName('f1') + writeDouble(100) + writeStartArray('f2') + writeStartArray() + writeStartArray() + writeStartArray() + writeInt64('i4', 10) } then: thrown(BsonInvalidOperationException) @@ -221,13 +220,13 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowAnExceptionWhenAttemptingToEndAnArrayThatWasNotStarted() { + def 'shouldThrowAnExceptionWhenAttemptingToEndAnArrayThatWasNotStarted'() { when: writer.with { - writeStartDocument(); - writeStartArray('f2'); - writeEndArray(); - writeEndArray(); + writeStartDocument() + writeStartArray('f2') + writeEndArray() + writeEndArray() } then: thrown(BsonInvalidOperationException) @@ -236,12 +235,12 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowAnErrorIfTryingToWriteNamesIntoAJavascriptScope1() { + def 'shouldThrowAnErrorIfTryingToWriteNamesIntoAJavascriptScope1'() { when: - writer.writeStartDocument(); - writer.writeJavaScriptWithScope('js1', 'var i = 1'); + writer.writeStartDocument() + writer.writeJavaScriptWithScope('js1', 'var i = 1') - writer.writeBoolean('b4', true); + writer.writeBoolean('b4', true) then: thrown(BsonInvalidOperationException) @@ -250,14 +249,13 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - @Test(expected = BsonInvalidOperationException) - def shouldThrowAnErrorIfTryingToWriteNamesIntoAJavascriptScope2() { + def 'shouldThrowAnErrorIfTryingToWriteNamesIntoAJavascriptScope2'() { when: //do we really need to test every type written after writeJavaScriptWithScope? - writer.writeStartDocument(); - writer.writeJavaScriptWithScope('js1', 'var i = 1'); + writer.writeStartDocument() + writer.writeJavaScriptWithScope('js1', 'var i = 1') - writer.writeBinaryData(new BsonBinary([0, 0, 1, 0] as byte[])); + writer.writeBinaryData(new BsonBinary([0, 0, 1, 0] as byte[])) then: thrown(BsonInvalidOperationException) @@ -266,13 +264,13 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowAnErrorIfTryingToWriteNamesIntoAJavascriptScope3() { + def 'shouldThrowAnErrorIfTryingToWriteNamesIntoAJavascriptScope3'() { when: //do we really need to test every type written after writeJavaScriptWithScope? - writer.writeStartDocument(); - writer.writeJavaScriptWithScope('js1', 'var i = 1'); + writer.writeStartDocument() + writer.writeJavaScriptWithScope('js1', 'var i = 1') - writer.writeStartArray(); + writer.writeStartArray() then: thrown(BsonInvalidOperationException) @@ -281,13 +279,13 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowAnErrorIfTryingToWriteNamesIntoAJavascriptScope4() { + def 'shouldThrowAnErrorIfTryingToWriteNamesIntoAJavascriptScope4'() { when: //do we really need to test every type written after writeJavaScriptWithScope? - writer.writeStartDocument(); - writer.writeJavaScriptWithScope('js1', 'var i = 1'); + writer.writeStartDocument() + writer.writeJavaScriptWithScope('js1', 'var i = 1') - writer.writeEndDocument(); + writer.writeEndDocument() then: thrown(BsonInvalidOperationException) @@ -296,23 +294,23 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowAnErrorIfKeyContainsNullCharacter() { + def 'shouldThrowAnErrorIfKeyContainsNullCharacter'() { when: - writer.writeStartDocument(); - writer.writeBoolean('h\u0000i', true); + writer.writeStartDocument() + writer.writeBoolean('h\u0000i', true) then: thrown(BSONException) where: - writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] + writer << [new BsonBinaryWriter(new BasicOutputBuffer())] } - def shouldNotThrowAnErrorIfValueContainsNullCharacter() { + def 'shouldNotThrowAnErrorIfValueContainsNullCharacter'() { when: - writer.writeStartDocument(); - writer.writeString('x', 'h\u0000i'); + writer.writeStartDocument() + writer.writeString('x', 'h\u0000i') then: true @@ -321,15 +319,15 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldNotThrowAnExceptionIfCorrectlyStartingAndEndingDocumentsAndSubDocuments() { + def 'shouldNotThrowAnExceptionIfCorrectlyStartingAndEndingDocumentsAndSubDocuments'() { when: - writer.writeStartDocument(); - writer.writeJavaScriptWithScope('js1', 'var i = 1'); + writer.writeStartDocument() + writer.writeJavaScriptWithScope('js1', 'var i = 1') - writer.writeStartDocument(); - writer.writeEndDocument(); + writer.writeStartDocument() + writer.writeEndDocument() - writer.writeEndDocument(); + writer.writeEndDocument() then: true @@ -338,9 +336,9 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer()), new BsonDocumentWriter(new BsonDocument())] } - def shouldThrowOnInvalidFieldName() { + def 'shouldThrowOnInvalidFieldName'() { given: - writer.writeStartDocument(); + writer.writeStartDocument() writer.writeString('good', 'string') when: @@ -353,7 +351,7 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer(), new TestFieldNameValidator('bad'))] } - def shouldThrowOnInvalidFieldNameNestedInDocument() { + def 'shouldThrowOnInvalidFieldNameNestedInDocument'() { given: writer.with { writeStartDocument() @@ -372,7 +370,7 @@ class BsonWriterSpecification extends Specification { writer << [new BsonBinaryWriter(new BasicOutputBuffer(), new TestFieldNameValidator('bad'))] } - def shouldThrowOnInvalidFieldNameNestedInDocumentInArray() { + def 'shouldThrowOnInvalidFieldNameNestedInDocumentInArray'() { given: writer.with { writeStartDocument() @@ -386,16 +384,15 @@ class BsonWriterSpecification extends Specification { writer.writeString('bad-child', 'string') then: - thrown(IllegalArgumentException) + def e = thrown(IllegalArgumentException) + e.getMessage() == 'testFieldNameValidator error' where: writer << [new BsonBinaryWriter(new BasicOutputBuffer(), new TestFieldNameValidator('bad'))] } - class TestFieldNameValidator implements FieldNameValidator { - private final String badFieldName; - + private final String badFieldName TestFieldNameValidator(final String badFieldName) { this.badFieldName = badFieldName @@ -406,6 +403,11 @@ class BsonWriterSpecification extends Specification { fieldName != badFieldName } + @Override + String getValidationErrorMessage(final String fieldName) { + 'testFieldNameValidator error' + } + @Override FieldNameValidator getValidatorForField(final String fieldName) { new TestFieldNameValidator(badFieldName + '-child') diff --git a/bson/src/test/unit/org/bson/DocumentTest.java b/bson/src/test/unit/org/bson/DocumentTest.java index 034d29190f1..bd9551e9407 100644 --- a/bson/src/test/unit/org/bson/DocumentTest.java +++ b/bson/src/test/unit/org/bson/DocumentTest.java @@ -26,21 +26,21 @@ import org.bson.codecs.ValueCodecProvider; import org.bson.codecs.configuration.CodecConfigurationException; import org.bson.codecs.configuration.CodecRegistry; +import org.bson.conversions.Bson; import org.bson.json.JsonReader; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.UUID; import static java.util.Arrays.asList; import static org.bson.codecs.configuration.CodecRegistries.fromCodecs; import static org.bson.codecs.configuration.CodecRegistries.fromProviders; import static org.bson.codecs.configuration.CodecRegistries.fromRegistries; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.fail; // Don't convert to Spock, as Groovy intercepts equals/hashCode methods that we are trying to test public class DocumentTest { @@ -58,14 +58,14 @@ public class DocumentTest { @Test public void shouldBeEqualToItself() { - assertTrue(emptyDocument.equals(emptyDocument)); - assertTrue(document.equals(document)); + assertEquals(emptyDocument, emptyDocument); + assertEquals(document, document); } @Test public void shouldNotBeEqualToDifferentBsonDocument() { // expect - assertFalse(emptyDocument.equals(document)); + assertNotEquals(emptyDocument, document); } @Test @@ -75,8 +75,17 @@ public void shouldHaveSameHashCodeAsEquivalentBsonDocument() { @Test public void toJsonShouldReturnEquivalent() { - assertEquals(new DocumentCodec().decode(new JsonReader(document.toJson()), DecoderContext.builder().build()), - document); + assertEquals(new DocumentCodec().decode(new JsonReader(document.toJson()), DecoderContext.builder().build()), document); + } + + // Test to ensure that toJson does not reorder _id field + @Test + public void toJsonShouldNotReorderIdField() { + // given + Document d = new Document().append("x", 1) + .append("y", Collections.singletonList("one")) + .append("_id", "1"); + assertEquals("{\"x\": 1, \"y\": [\"one\"], \"_id\": \"1\"}", d.toJson()); } // Test in Java to make sure none of the casts result in compiler warnings or class cast exceptions @@ -89,22 +98,22 @@ public void shouldGetWithDefaultValue() { // when the key is found int x = d.get("x", 2); - List y = d.get("y", Arrays.asList("three", "four")); + List y = d.get("y", asList("three", "four")); String z = d.get("z", "bar"); // then it returns the value assertEquals(1, x); - assertEquals(Arrays.asList("one"), y); + assertEquals(asList("one"), y); assertEquals("foo", z); // when the key is not found int x2 = d.get("x2", 2); - List y2 = d.get("y2", Arrays.asList("three", "four")); + List y2 = d.get("y2", asList("three", "four")); String z2 = d.get("z2", "bar"); // then it returns the default value assertEquals(2, x2); - assertEquals(Arrays.asList("three", "four"), y2); + assertEquals(asList("three", "four"), y2); assertEquals("bar", z2); } @@ -118,7 +127,28 @@ public void toJsonShouldTakeACustomDocumentCodec() { // noop } - assertEquals("{ \"database\" : { \"name\" : \"MongoDB\" } }", customDocument.toJson(customDocumentCodec)); + assertEquals("{\"database\": {\"name\": \"MongoDB\"}}", customDocument.toJson(customDocumentCodec)); + } + + @Test + public void toBsonDocumentShouldCreateBsonDocument() { + BsonDocument expected = new BsonDocument() + .append("a", new BsonInt32(1)) + .append("b", new BsonInt32(2)) + .append("c", new BsonDocument("x", BsonBoolean.TRUE)) + .append("d", new BsonArray(asList(new BsonDocument("y", BsonBoolean.FALSE), new BsonInt32(1)))); + + assertEquals(expected, document.toBsonDocument(BsonDocument.class, Bson.DEFAULT_CODEC_REGISTRY)); + assertEquals(expected, document.toBsonDocument()); + } + + @Test + public void toJsonShouldRenderUuidAsStandard() { + UUID uuid = UUID.randomUUID(); + Document doc = new Document("_id", uuid); + + String json = doc.toJson(); + assertEquals(new BsonDocument("_id", new BsonBinary(uuid)), BsonDocument.parse(json)); } public class Name { diff --git a/bson/src/test/unit/org/bson/GenericBsonTest.java b/bson/src/test/unit/org/bson/GenericBsonTest.java index 0a611267f05..582ec5d83dc 100644 --- a/bson/src/test/unit/org/bson/GenericBsonTest.java +++ b/bson/src/test/unit/org/bson/GenericBsonTest.java @@ -16,40 +16,34 @@ package org.bson; -import org.bson.codecs.BsonDocumentCodec; -import org.bson.codecs.DecoderContext; -import org.bson.codecs.EncoderContext; -import org.bson.io.BasicOutputBuffer; import org.bson.json.JsonMode; import org.bson.json.JsonParseException; import org.bson.json.JsonWriterSettings; import org.bson.types.Decimal128; -import org.junit.Assume; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import util.Hex; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; import util.JsonPoweredTestHelper; -import java.io.File; import java.io.IOException; import java.io.StringReader; import java.io.StringWriter; -import java.net.URISyntaxException; -import java.nio.ByteBuffer; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.List; +import java.util.stream.Stream; import static java.lang.String.format; import static org.bson.BsonDocument.parse; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.bson.BsonHelper.decodeToDocument; +import static org.bson.BsonHelper.encodeToHex; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assumptions.assumeFalse; // BSON tests powered by language-agnostic JSON-based tests included in test resources -@RunWith(Parameterized.class) public class GenericBsonTest { private static final List IGNORED_PARSE_ERRORS = Arrays.asList( @@ -64,35 +58,26 @@ enum TestCaseType { PARSE_ERROR } - private final BsonDocument testDefinition; - private final BsonDocument testCase; - private final TestCaseType testCaseType; - - public GenericBsonTest(final String description, final BsonDocument testDefinition, final BsonDocument testCase, - final TestCaseType testCaseType) { - this.testDefinition = testDefinition; - this.testCase = testCase; - this.testCaseType = testCaseType; - } - - @Test - public void shouldPassAllOutcomes() { + @ParameterizedTest(name = "{0}") + @MethodSource("data") + public void shouldPassAllOutcomes(@SuppressWarnings("unused") final String description, + final BsonDocument testDefinition, final BsonDocument testCase, final TestCaseType testCaseType) { switch (testCaseType) { case VALID: - runValid(); + runValid(testCase); break; case DECODE_ERROR: - runDecodeError(); + runDecodeError(testCase); break; case PARSE_ERROR: - runParseError(); + runParseError(testDefinition, testCase); break; default: throw new IllegalArgumentException(format("Unsupported test case type %s", testCaseType)); } } - private void runValid() { + private void runValid(final BsonDocument testCase) { String description = testCase.getString("description").getValue(); String canonicalBsonHex = testCase.getString("canonical_bson").getValue().toUpperCase(); String degenerateBsonHex = testCase.getString("degenerate_bson", new BsonString("")).getValue().toUpperCase(); @@ -104,50 +89,51 @@ private void runValid() { BsonDocument decodedDocument = decodeToDocument(canonicalBsonHex, description); // native_to_bson( bson_to_native(cB) ) = cB - assertEquals(format("Failed to create expected BSON for document with description '%s'", description), - canonicalBsonHex, encodeToHex(decodedDocument)); + assertEquals(canonicalBsonHex, encodeToHex(decodedDocument), + format("Failed to create expected BSON for document with description '%s'", description)); JsonWriterSettings canonicalJsonWriterSettings = JsonWriterSettings.builder().outputMode(JsonMode.EXTENDED).build(); JsonWriterSettings relaxedJsonWriterSettings = JsonWriterSettings.builder().outputMode(JsonMode.RELAXED).build(); if (!canonicalJson.isEmpty()) { // native_to_canonical_extended_json( bson_to_native(cB) ) = cEJ - assertEquals(format("Failed to create expected canonical JSON for document with description '%s'", description), - stripWhiteSpace(canonicalJson), stripWhiteSpace(decodedDocument.toJson(canonicalJsonWriterSettings))); + assertEquals(stripWhiteSpace(canonicalJson), stripWhiteSpace(decodedDocument.toJson(canonicalJsonWriterSettings)), + format("Failed to create expected canonical JSON for document with description '%s'", description)); // native_to_canonical_extended_json( json_to_native(cEJ) ) = cEJ BsonDocument parsedCanonicalJsonDocument = parse(canonicalJson); - assertEquals("Failed to create expected canonical JSON from parsing canonical JSON", - stripWhiteSpace(canonicalJson), stripWhiteSpace(parsedCanonicalJsonDocument.toJson(canonicalJsonWriterSettings))); + assertEquals(stripWhiteSpace(canonicalJson), stripWhiteSpace(parsedCanonicalJsonDocument.toJson(canonicalJsonWriterSettings)), + "Failed to create expected canonical JSON from parsing canonical JSON"); if (!lossy) { // native_to_bson( json_to_native(cEJ) ) = cB - assertEquals("Failed to create expected canonical BSON from parsing canonical JSON", - canonicalBsonHex, encodeToHex(parsedCanonicalJsonDocument)); + assertEquals(canonicalBsonHex, encodeToHex(parsedCanonicalJsonDocument), + "Failed to create expected canonical BSON from parsing canonical JSON"); } } if (!relaxedJson.isEmpty()) { // native_to_relaxed_extended_json( bson_to_native(cB) ) = rEJ - assertEquals(format("Failed to create expected relaxed JSON for document with description '%s'", description), - stripWhiteSpace(relaxedJson), stripWhiteSpace(decodedDocument.toJson(relaxedJsonWriterSettings))); + assertEquals(stripWhiteSpace(relaxedJson), stripWhiteSpace(decodedDocument.toJson(relaxedJsonWriterSettings)), + format("Failed to create expected relaxed JSON for document with description '%s'", description)); // native_to_relaxed_extended_json( json_to_native(rEJ) ) = rEJ - assertEquals("Failed to create expected relaxed JSON from parsing relaxed JSON", stripWhiteSpace(relaxedJson), - stripWhiteSpace(parse(relaxedJson).toJson(relaxedJsonWriterSettings))); + assertEquals(stripWhiteSpace(relaxedJson), stripWhiteSpace(parse(relaxedJson).toJson(relaxedJsonWriterSettings)), + "Failed to create expected relaxed JSON from parsing relaxed JSON"); } if (!degenerateJson.isEmpty()) { // native_to_bson( json_to_native(dEJ) ) = cB - assertEquals("Failed to create expected canonical BSON from parsing canonical JSON", - canonicalBsonHex, encodeToHex(parse(degenerateJson))); + assertEquals(canonicalBsonHex, encodeToHex(parse(degenerateJson)), + "Failed to create expected canonical BSON from parsing canonical JSON"); } if (!degenerateBsonHex.isEmpty()) { BsonDocument decodedDegenerateDocument = decodeToDocument(degenerateBsonHex, description); // native_to_bson( bson_to_native(dB) ) = cB - assertEquals(format("Failed to create expected canonical BSON from degenerate BSON for document with description " - + "'%s'", description), canonicalBsonHex, encodeToHex(decodedDegenerateDocument)); + assertEquals(canonicalBsonHex, encodeToHex(decodedDegenerateDocument), + format("Failed to create expected canonical BSON from degenerate BSON for document with description '%s'", + description)); } } @@ -215,39 +201,20 @@ private boolean shouldEscapeCharacter(final char escapedChar) { } } - private BsonDocument decodeToDocument(final String subjectHex, final String description) { - ByteBuffer byteBuffer = ByteBuffer.wrap(Hex.decode(subjectHex)); - BsonDocument actualDecodedDocument = new BsonDocumentCodec().decode(new BsonBinaryReader(byteBuffer), - DecoderContext.builder().build()); - - if (byteBuffer.hasRemaining()) { - throw new BsonSerializationException(format("Should have consumed all bytes, but " + byteBuffer.remaining() - + " still remain in the buffer for document with description ", - description)); - } - return actualDecodedDocument; - } - - private String encodeToHex(final BsonDocument decodedDocument) { - BasicOutputBuffer outputBuffer = new BasicOutputBuffer(); - new BsonDocumentCodec().encode(new BsonBinaryWriter(outputBuffer), decodedDocument, EncoderContext.builder().build()); - return Hex.encode(outputBuffer.toByteArray()); - } - - private void runDecodeError() { + private void runDecodeError(final BsonDocument testCase) { try { String description = testCase.getString("description").getValue(); - throwIfValueIsStringContainingReplacementCharacter(description); + throwIfValueIsStringContainingReplacementCharacter(testCase, description); fail(format("Should have failed parsing for subject with description '%s'", description)); } catch (BsonSerializationException e) { // all good } } - private void runParseError() { + private void runParseError(final BsonDocument testDefinition, final BsonDocument testCase) { String description = testCase.getString("description").getValue(); - Assume.assumeFalse(IGNORED_PARSE_ERRORS.contains(description)); + assumeFalse(IGNORED_PARSE_ERRORS.contains(description)); String str = testCase.getString("string").getValue(); @@ -259,9 +226,10 @@ private void runParseError() { } catch (NumberFormatException e) { // all good } - } else if (testDefinitionDescription.startsWith("Top-level")) { + } else if (testDefinitionDescription.startsWith("Top-level") || testDefinitionDescription.startsWith("Binary type")) { try { - parse(str); + BsonDocument document = parse(str); + encodeToHex(document); fail("Should fail to parse JSON '" + str + "' with description '" + description + "'"); } catch (JsonParseException e) { // all good @@ -270,66 +238,79 @@ private void runParseError() { fail("Should throw JsonParseException for '" + str + "' with description '" + description + "'"); } // all good + } catch (BsonSerializationException e) { + if (isTestOfNullByteInCString(description)) { + assertTrue(e.getMessage().contains("is not valid because it contains a null character")); + } else { + fail("Unexpected BsonSerializationException"); + } } } else { fail("Unrecognized test definition description: " + testDefinitionDescription); } } + private boolean isTestOfNullByteInCString(final String description) { + return description.startsWith("Null byte"); + } - - // TODO: Working around the fact that the Java driver doesn't report an error for invalid UTF-8, but rather replaces the invalid + // Working around the fact that the Java driver doesn't report an error for invalid UTF-8, but rather replaces the invalid // sequence with the replacement character - private void throwIfValueIsStringContainingReplacementCharacter(final String description) { + private void throwIfValueIsStringContainingReplacementCharacter(final BsonDocument testCase, final String description) { BsonDocument decodedDocument = decodeToDocument(testCase.getString("bson").getValue(), description); - String testKey = decodedDocument.keySet().iterator().next(); - - if (decodedDocument.containsKey(testKey)) { - String decodedString = null; - if (decodedDocument.get(testKey).isString()) { - decodedString = decodedDocument.getString(testKey).getValue(); - } - if (decodedDocument.get(testKey).isDBPointer()) { - decodedString = decodedDocument.get(testKey).asDBPointer().getNamespace(); - } - if (decodedString != null && decodedString.contains(Charset.forName("UTF-8").newDecoder().replacement())) { - throw new BsonSerializationException("String contains replacement character"); - } + BsonValue value = decodedDocument.get(decodedDocument.getFirstKey()); + + String decodedString; + if (value.isString()) { + decodedString = value.asString().getValue(); + } else if (value.isDBPointer()) { + decodedString = value.asDBPointer().getNamespace(); + } else if (value.isJavaScript()) { + decodedString = value.asJavaScript().getCode(); + } else if (value.isJavaScriptWithScope()) { + decodedString = value.asJavaScriptWithScope().getCode(); + } else if (value.isSymbol()) { + decodedString = value.asSymbol().getSymbol(); + } else { + throw new UnsupportedOperationException("Unsupported test for BSON type " + value.getBsonType()); + } + if (decodedString.contains(StandardCharsets.UTF_8.newDecoder().replacement())) { + throw new BsonSerializationException("String contains replacement character"); } } - @Parameterized.Parameters(name = "{0}") - public static Collection data() throws URISyntaxException, IOException { - List data = new ArrayList(); - for (File file : JsonPoweredTestHelper.getTestFiles("/bson")) { - BsonDocument testDocument = JsonPoweredTestHelper.getTestDocument(file); + private static Stream data() { + List data = new ArrayList<>(); + for (BsonDocument testDocument : JsonPoweredTestHelper.getTestDocuments("/bson")) { for (BsonValue curValue : testDocument.getArray("valid", new BsonArray())) { BsonDocument testCaseDocument = curValue.asDocument(); - data.add(new Object[]{createTestCaseDescription(testDocument, testCaseDocument, "valid"), testDocument, testCaseDocument, - TestCaseType.VALID}); + data.add(Arguments.of( + createTestCaseDescription(testDocument, testCaseDocument, "valid"), testDocument, testCaseDocument, + TestCaseType.VALID)); } for (BsonValue curValue : testDocument.getArray("decodeErrors", new BsonArray())) { BsonDocument testCaseDocument = curValue.asDocument(); - data.add(new Object[]{createTestCaseDescription(testDocument, testCaseDocument, "decodeError"), testDocument, - testCaseDocument, TestCaseType.DECODE_ERROR}); + data.add(Arguments.of( + createTestCaseDescription(testDocument, testCaseDocument, "decodeError"), testDocument, testCaseDocument, + TestCaseType.DECODE_ERROR)); } for (BsonValue curValue : testDocument.getArray("parseErrors", new BsonArray())) { BsonDocument testCaseDocument = curValue.asDocument(); - data.add(new Object[]{createTestCaseDescription(testDocument, testCaseDocument, "parseError"), testDocument, - testCaseDocument, TestCaseType.PARSE_ERROR}); + data.add(Arguments.of(createTestCaseDescription(testDocument, testCaseDocument, "parseError"), testDocument, + testCaseDocument, TestCaseType.PARSE_ERROR)); } } - return data; + return data.stream(); } private static String createTestCaseDescription(final BsonDocument testDocument, final BsonDocument testCaseDocument, - final String testCaseType) { + final String testCaseType) { return testDocument.getString("description").getValue() - + "[" + testCaseType + "]" - + ": " + testCaseDocument.getString("description").getValue(); + + "[" + testCaseType + "]" + + ": " + testCaseDocument.getString("description").getValue(); } private String stripWhiteSpace(final String json) { diff --git a/bson/src/test/unit/org/bson/LazyBSONDecoderTest.java b/bson/src/test/unit/org/bson/LazyBSONDecoderTest.java index 8e7679a8fb6..32b2f047f43 100644 --- a/bson/src/test/unit/org/bson/LazyBSONDecoderTest.java +++ b/bson/src/test/unit/org/bson/LazyBSONDecoderTest.java @@ -16,8 +16,8 @@ package org.bson; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -25,14 +25,15 @@ import static org.hamcrest.CoreMatchers.hasItems; import static org.hamcrest.CoreMatchers.instanceOf; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; public class LazyBSONDecoderTest { private BSONDecoder bsonDecoder; - @Before + @BeforeEach public void setUp() { bsonDecoder = new LazyBSONDecoder(); } @@ -50,7 +51,7 @@ public void testDecodingFromInputStream() throws IOException { @Test public void testDecodingFromByteArray() throws IOException { - byte[] bytes = new byte[]{12, 0, 0, 0, 16, 97, 0, 1, 0, 0, 0, 0}; + byte[] bytes = {12, 0, 0, 0, 16, 97, 0, 1, 0, 0, 0, 0}; BSONObject document = bsonDecoder.readObject(bytes); assertNotNull(document); assertThat(document, instanceOf(LazyBSONObject.class)); @@ -59,10 +60,10 @@ public void testDecodingFromByteArray() throws IOException { assertEquals(1, document.get("a")); } - @Test(expected = BSONException.class) + @Test public void testDecodingFromInvalidInput() { - byte[] bytes = new byte[]{16, 0, 0, 0, 16, 97, 0, 1, 0, 0, 0, 0}; - bsonDecoder.readObject(bytes); + byte[] bytes = {16, 0, 0, 0, 16, 97, 0, 1, 0, 0, 0, 0}; + assertThrows(BSONException.class, () -> bsonDecoder.readObject(bytes)); } } diff --git a/bson/src/test/unit/org/bson/LazyBSONListTest.java b/bson/src/test/unit/org/bson/LazyBSONListTest.java index dff7420a22d..cd2672b6575 100644 --- a/bson/src/test/unit/org/bson/LazyBSONListTest.java +++ b/bson/src/test/unit/org/bson/LazyBSONListTest.java @@ -16,22 +16,23 @@ package org.bson; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import static java.util.Arrays.asList; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; -@SuppressWarnings("rawtypes") +@SuppressWarnings({"rawtypes"}) public class LazyBSONListTest { private LazyBSONList encodeAndExtractList(final List list) { BSONObject document = new BasicBSONObject("l", list); - return (LazyBSONList) new LazyBSONObject(BSON.encode(document), new LazyBSONCallback()).get("l"); + return (LazyBSONList) new LazyBSONObject(new BasicBSONEncoder().encode(document), new LazyBSONCallback()).get("l"); } @@ -86,12 +87,14 @@ public void testIterator() { assertFalse(it.hasNext()); } - @Test(expected = NoSuchElementException.class) + @Test public void testIteratorNextWhileNothingLeft() { - LazyBSONList list = encodeAndExtractList(asList()); - Iterator it = list.iterator(); - assertFalse(it.hasNext()); - it.next(); + assertThrows(NoSuchElementException.class, () -> { + LazyBSONList list = encodeAndExtractList(asList()); + Iterator it = list.iterator(); + assertFalse(it.hasNext()); + it.next(); + }); } } diff --git a/bson/src/test/unit/org/bson/LazyBSONObjectSpecification.groovy b/bson/src/test/unit/org/bson/LazyBSONObjectSpecification.groovy index 0ec23b66c21..43d910bd5fa 100644 --- a/bson/src/test/unit/org/bson/LazyBSONObjectSpecification.groovy +++ b/bson/src/test/unit/org/bson/LazyBSONObjectSpecification.groovy @@ -71,8 +71,6 @@ class LazyBSONObjectSpecification extends Specification { new Binary((byte) 0x01, (byte[]) [115, 116, 11]) | [16, 0, 0, 0, 5, 102, 0, 3, 0, 0, 0, 1, 115, 116, 11, 0] new Binary((byte) 0x03, (byte[]) [115, 116, 11]) | [16, 0, 0, 0, 5, 102, 0, 3, 0, 0, 0, 3, 115, 116, 11, 0] new Binary((byte) 0x04, (byte[]) [115, 116, 11]) | [16, 0, 0, 0, 5, 102, 0, 3, 0, 0, 0, 4, 115, 116, 11, 0] - UUID.fromString('08070605-0403-0201-100f-0e0d0c0b0a09') | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 3, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] - UUID.fromString('01020304-0506-0708-090a-0b0c0d0e0f10') | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 4, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] [13, 12] as byte[] | [15, 0, 0, 0, 5, 102, 0, 2, 0, 0, 0, 0, 13, 12, 0] [102, 111, 111] as byte[] | [16, 0, 0, 0, 5, 102, 0, 3, 0, 0, 0, 0, 102, 111, 111, 0] new ObjectId('50d3332018c6a1d8d1662b61') | [20, 0, 0, 0, 7, 102, 0, 80, -45, 51, 32, 24, -58, -95, -40, -47, 102, 43, 97, 0] @@ -131,7 +129,7 @@ class LazyBSONObjectSpecification extends Specification { byte[] bytes = [ 53, 0, 0, 0, 4, 97, 0, 26, 0, 0, 0, 16, 48, 0, 1, 0, 0, 0, 16, 49, 0, 2, 0, 0, 0, 16, 50, 0, 3, 0, 0, 0, 0, 3, 111, 0, 16, 0, 0, 0, 1, 122, 0, -102, -103, -103, -103, -103, -103, -71, 63, 0, 0 - ]; + ] when: LazyBSONObject document = new LazyBSONObject(bytes, new LazyBSONCallback()) @@ -348,7 +346,7 @@ class LazyBSONObjectSpecification extends Specification { def 'should pipe to stream'() { given: - byte[] bytes = [16, 0, 0, 0, 16, 97, 0, 1, 0, 0, 0, 8, 98, 0, 1, 0]; + byte[] bytes = [16, 0, 0, 0, 16, 97, 0, 1, 0, 0, 0, 8, 98, 0, 1, 0] LazyBSONObject document = new LazyBSONObject(bytes, new LazyBSONCallback()) ByteArrayOutputStream baos = new ByteArrayOutputStream() diff --git a/bson/src/test/unit/org/bson/LimitedLookaheadMarkSpecification.groovy b/bson/src/test/unit/org/bson/LimitedLookaheadMarkSpecification.groovy index e2d2c12dabe..5a859c396eb 100644 --- a/bson/src/test/unit/org/bson/LimitedLookaheadMarkSpecification.groovy +++ b/bson/src/test/unit/org/bson/LimitedLookaheadMarkSpecification.groovy @@ -18,230 +18,17 @@ package org.bson import org.bson.io.BasicOutputBuffer import org.bson.io.ByteBufferBsonInput +import org.bson.json.JsonMode import org.bson.json.JsonReader import org.bson.json.JsonWriter +import org.bson.json.JsonWriterSettings import spock.lang.Specification @SuppressWarnings('UnnecessaryObjectReferences') -@SuppressWarnings('deprecation') class LimitedLookaheadMarkSpecification extends Specification { - def 'should throw if mark without resetting previous mark'(BsonWriter writer) { - given: - writer.with { - writeStartDocument() - writeInt64('int64', 52L) - writeEndDocument() - } - - BsonReader reader - if (writer instanceof BsonDocumentWriter) { - reader = new BsonDocumentReader(writer.document) - } else if (writer instanceof BsonBinaryWriter) { - BasicOutputBuffer buffer = (BasicOutputBuffer) writer.getBsonOutput(); - reader = new BsonBinaryReader(new ByteBufferBsonInput(buffer.getByteBuffers().get(0))) - } else if (writer instanceof JsonWriter) { - reader = new JsonReader(writer.writer.toString()) - } - - reader.readStartDocument() - reader.mark() - - when: - reader.mark() - - then: - thrown(BSONException) - - where: - writer << [ - new BsonDocumentWriter(new BsonDocument()), - new BsonBinaryWriter(new BasicOutputBuffer()), - new JsonWriter(new StringWriter()) - ] - } - - def 'should throw if reset without mark'(BsonWriter writer) { - given: - writer.with { - writeStartDocument() - writeInt64('int64', 52L) - writeEndDocument() - } - - BsonReader reader - if (writer instanceof BsonDocumentWriter) { - reader = new BsonDocumentReader(writer.document) - } else if (writer instanceof BsonBinaryWriter) { - BasicOutputBuffer buffer = (BasicOutputBuffer) writer.getBsonOutput(); - reader = new BsonBinaryReader(new ByteBufferBsonInput(buffer.getByteBuffers().get(0))) - } else if (writer instanceof JsonWriter) { - reader = new JsonReader(writer.writer.toString()) - } - - reader.readStartDocument() - - when: - reader.reset() - - then: - thrown(BSONException) - - where: - writer << [ - new BsonDocumentWriter(new BsonDocument()), - new BsonBinaryWriter(new BasicOutputBuffer()), - new JsonWriter(new StringWriter()) - ] - } - - def 'Lookahead should work at various states'(BsonWriter writer) { - given: - writer.with { - writeStartDocument() - writeInt64('int64', 52L) - writeStartArray('array') - writeInt32(1) - writeInt64(2L) - writeStartArray() - writeInt32(3) - writeInt32(4) - writeEndArray() - writeStartDocument() - writeInt32('a', 5) - writeEndDocument() - writeNull() - writeEndArray() - writeStartDocument('document') - writeInt32('a', 6) - writeEndDocument() - writeEndDocument() - } - - - when: - BsonReader reader - if (writer instanceof BsonDocumentWriter) { - reader = new BsonDocumentReader(writer.document) - } else if (writer instanceof BsonBinaryWriter) { - BasicOutputBuffer buffer = (BasicOutputBuffer) writer.getBsonOutput(); - reader = new BsonBinaryReader(new ByteBufferBsonInput(buffer.getByteBuffers().get(0))) - } else if (writer instanceof JsonWriter) { - reader = new JsonReader(writer.writer.toString()) - } - - then: - reader.readStartDocument() - // mark beginning of document * 1 - reader.mark() - reader.readName() == 'int64' - reader.readInt64() == 52L - reader.readStartArray() - // reset to beginning of document * 2 - reader.reset() - // mark beginning of document * 2 - reader.mark() - reader.readName() == 'int64' - reader.readInt64() == 52L - // reset to beginning of document * 3 - reader.reset() - // mark beginning of document * 3 - reader.mark() - reader.readName() == 'int64' - reader.readInt64() == 52L - reader.readName() == 'array' - reader.readStartArray() - reader.readInt32() == 1 - reader.readInt64() == 2 - reader.readStartArray() - reader.readInt32() == 3 - reader.readInt32() == 4 - reader.readEndArray() - reader.readStartDocument() - reader.readName() == 'a' - reader.readInt32() == 5 - reader.readEndDocument() - reader.readNull() - reader.readEndArray() - reader.readName() == 'document' - reader.readStartDocument() - reader.readName() == 'a' - reader.readInt32() == 6 - reader.readEndDocument() - reader.readEndDocument() - // read entire document, reset to beginning - reader.reset() - reader.readName() == 'int64' - reader.readInt64() == 52L - reader.readName() == 'array' - // mar in outer-document * 1 - reader.mark() - reader.readStartArray() - reader.readInt32() == 1 - reader.readInt64() == 2 - reader.readStartArray() - // reset in sub-document * 1 - reader.reset() - // mark in outer-document * 2 - reader.mark() - reader.readStartArray() - reader.readInt32() == 1 - reader.readInt64() == 2 - reader.readStartArray() - reader.readInt32() == 3 - // reset in sub-document * 2 - reader.reset() - reader.readStartArray() - reader.readInt32() == 1 - reader.readInt64() == 2 - reader.readStartArray() - reader.readInt32() == 3 - reader.readInt32() == 4 - // mark in sub-document * 1 - reader.mark() - reader.readEndArray() - reader.readStartDocument() - reader.readName() == 'a' - reader.readInt32() == 5 - reader.readEndDocument() - reader.readNull() - reader.readEndArray() - // reset in outer-document * 1 - reader.reset() - // mark in sub-document * 2 - reader.mark() - reader.readEndArray() - reader.readStartDocument() - reader.readName() == 'a' - reader.readInt32() == 5 - reader.readEndDocument() - reader.readNull() - reader.readEndArray() - // reset in out-document * 2 - reader.reset() - reader.readEndArray() - reader.readStartDocument() - reader.readName() == 'a' - reader.readInt32() == 5 - reader.readEndDocument() - reader.readNull() - reader.readEndArray() - reader.readName() == 'document' - reader.readStartDocument() - reader.readName() == 'a' - reader.readInt32() == 6 - reader.readEndDocument() - reader.readEndDocument() - where: - writer << [ - new BsonDocumentWriter(new BsonDocument()), - new BsonBinaryWriter(new BasicOutputBuffer()), - new JsonWriter(new StringWriter()) - ] - } - - def 'Lookahead should work at various states with Mark'(BsonWriter writer) { + def 'Lookahead should work at various states with Mark'(BsonWriter writer, boolean useAlternateReader) { given: writer.with { writeStartDocument() @@ -271,10 +58,14 @@ class LimitedLookaheadMarkSpecification extends Specification { if (writer instanceof BsonDocumentWriter) { reader = new BsonDocumentReader(writer.document) } else if (writer instanceof BsonBinaryWriter) { - BasicOutputBuffer buffer = (BasicOutputBuffer) writer.getBsonOutput(); + BasicOutputBuffer buffer = (BasicOutputBuffer) writer.getBsonOutput() reader = new BsonBinaryReader(new ByteBufferBsonInput(buffer.getByteBuffers().get(0))) } else if (writer instanceof JsonWriter) { - reader = new JsonReader(writer.writer.toString()) + if (useAlternateReader) { + reader = new JsonReader(new InputStreamReader(new ByteArrayInputStream(writer.writer.toString().getBytes()))) + } else { + reader = new JsonReader(writer.writer.toString()) + } } reader.readStartDocument() @@ -420,11 +211,11 @@ class LimitedLookaheadMarkSpecification extends Specification { reader.readEndDocument() where: - writer << [ - new BsonDocumentWriter(new BsonDocument()), - new BsonBinaryWriter(new BasicOutputBuffer()), - new JsonWriter(new StringWriter()) - ] + writer | useAlternateReader + new BsonDocumentWriter(new BsonDocument()) | false + new BsonBinaryWriter(new BasicOutputBuffer()) | false + new JsonWriter(new StringWriter(), JsonWriterSettings.builder().outputMode(JsonMode.STRICT).build()) | false + new JsonWriter(new StringWriter(), JsonWriterSettings.builder().outputMode(JsonMode.STRICT).build()) | true } def 'should peek binary subtype and size'(BsonWriter writer) { @@ -441,7 +232,7 @@ class LimitedLookaheadMarkSpecification extends Specification { if (writer instanceof BsonDocumentWriter) { reader = new BsonDocumentReader(writer.document) } else if (writer instanceof BsonBinaryWriter) { - BasicOutputBuffer buffer = (BasicOutputBuffer) writer.getBsonOutput(); + BasicOutputBuffer buffer = (BasicOutputBuffer) writer.getBsonOutput() reader = new BsonBinaryReader(new ByteBufferBsonInput(buffer.getByteBuffers().get(0))) } else if (writer instanceof JsonWriter) { reader = new JsonReader(writer.writer.toString()) @@ -465,7 +256,7 @@ class LimitedLookaheadMarkSpecification extends Specification { writer << [ new BsonDocumentWriter(new BsonDocument()), new BsonBinaryWriter(new BasicOutputBuffer()), - new JsonWriter(new StringWriter()) + new JsonWriter(new StringWriter(), JsonWriterSettings.builder().outputMode(JsonMode.STRICT).build()) ] } } diff --git a/bson/src/test/unit/org/bson/RawBsonDocumentSpecification.groovy b/bson/src/test/unit/org/bson/RawBsonDocumentSpecification.groovy index e4688592e6b..a23ec06dedb 100644 --- a/bson/src/test/unit/org/bson/RawBsonDocumentSpecification.groovy +++ b/bson/src/test/unit/org/bson/RawBsonDocumentSpecification.groovy @@ -319,7 +319,7 @@ class RawBsonDocumentSpecification extends Specification { def 'toJson should respect JsonWriterSettings'() { given: - def jsonWriterSettings = new JsonWriterSettings(JsonMode.SHELL) + def jsonWriterSettings = JsonWriterSettings.builder().outputMode(JsonMode.SHELL).build() def writer = new StringWriter() when: diff --git a/bson/src/test/unit/org/bson/codecs/AtomicIntegerCodecTest.java b/bson/src/test/unit/org/bson/codecs/AtomicIntegerCodecTest.java index e8922aaaf26..e4fcfd001ed 100644 --- a/bson/src/test/unit/org/bson/codecs/AtomicIntegerCodecTest.java +++ b/bson/src/test/unit/org/bson/codecs/AtomicIntegerCodecTest.java @@ -18,11 +18,12 @@ import org.bson.BsonInvalidOperationException; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.concurrent.atomic.AtomicInteger; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; public final class AtomicIntegerCodecTest extends CodecTestCase { @@ -44,20 +45,20 @@ public void shouldHandleAlternativeNumberValues() { roundTrip(new Document("a", 9.9999999999999992), new AtomicIntegerComparator(expected)); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowWhenHandlingLossyDoubleValues() { Document original = new Document("a", 9.9999999999999991); - roundTrip(original, new AtomicIntegerComparator(original)); + assertThrows(BsonInvalidOperationException.class, () ->roundTrip(original, new AtomicIntegerComparator(original))); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldErrorDecodingOutsideMinRange() { - roundTrip(new Document("a", Long.MIN_VALUE)); + assertThrows(BsonInvalidOperationException.class, () ->roundTrip(new Document("a", Long.MIN_VALUE))); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldErrorDecodingOutsideMaxRange() { - roundTrip(new Document("a", Long.MAX_VALUE)); + assertThrows(BsonInvalidOperationException.class, () ->roundTrip(new Document("a", Long.MAX_VALUE))); } @Override @@ -74,7 +75,7 @@ private class AtomicIntegerComparator implements Comparator { @Override public void apply(final Document result) { - assertEquals("Codec Round Trip", + assertEquals( expected.get("a", AtomicInteger.class).get(), result.get("a", AtomicInteger.class).get()); } diff --git a/bson/src/test/unit/org/bson/codecs/AtomicLongCodecTest.java b/bson/src/test/unit/org/bson/codecs/AtomicLongCodecTest.java index 2321becc1ed..1efb30e6348 100644 --- a/bson/src/test/unit/org/bson/codecs/AtomicLongCodecTest.java +++ b/bson/src/test/unit/org/bson/codecs/AtomicLongCodecTest.java @@ -18,11 +18,12 @@ import org.bson.BsonInvalidOperationException; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.concurrent.atomic.AtomicLong; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; public final class AtomicLongCodecTest extends CodecTestCase { @@ -44,20 +45,20 @@ public void shouldHandleAlternativeNumberValues() { roundTrip(new Document("a", 9.9999999999999992), new AtomicLongComparator(expected)); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowWhenHandlingLossyDoubleValues() { Document original = new Document("a", 9.9999999999999991); - roundTrip(original, new AtomicLongComparator(original)); + assertThrows(BsonInvalidOperationException.class, () -> roundTrip(original, new AtomicLongComparator(original))); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldErrorDecodingOutsideMinRange() { - roundTrip(new Document("a", -Double.MAX_VALUE)); + assertThrows(BsonInvalidOperationException.class, () -> roundTrip(new Document("a", -Double.MAX_VALUE))); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldErrorDecodingOutsideMaxRange() { - roundTrip(new Document("a", Double.MAX_VALUE)); + assertThrows(BsonInvalidOperationException.class, () -> roundTrip(new Document("a", Double.MAX_VALUE))); } @Override @@ -74,9 +75,7 @@ private class AtomicLongComparator implements Comparator { @Override public void apply(final Document result) { - assertEquals("Codec Round Trip", - expected.get("a", AtomicLong.class).get(), - result.get("a", AtomicLong.class).get()); + assertEquals(expected.get("a", AtomicLong.class).get(), result.get("a", AtomicLong.class).get()); } } diff --git a/bson/src/test/unit/org/bson/codecs/BinaryBinaryVectorCodecTest.java b/bson/src/test/unit/org/bson/codecs/BinaryBinaryVectorCodecTest.java new file mode 100644 index 00000000000..fadddb7a635 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/BinaryBinaryVectorCodecTest.java @@ -0,0 +1,152 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.BsonBinary; +import org.bson.BsonBinaryReader; +import org.bson.BsonBinarySubType; +import org.bson.BsonBinaryWriter; +import org.bson.BsonDocument; +import org.bson.BsonInvalidOperationException; +import org.bson.BsonType; +import org.bson.BsonWriter; +import org.bson.ByteBufNIO; +import org.bson.Float32BinaryVector; +import org.bson.Int8BinaryVector; +import org.bson.PackedBitBinaryVector; +import org.bson.BinaryVector; +import org.bson.io.BasicOutputBuffer; +import org.bson.io.ByteBufferBsonInput; +import org.bson.io.OutputBuffer; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.EnumSource; +import org.junit.jupiter.params.provider.MethodSource; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.stream.Stream; + +import static org.bson.BsonHelper.toBson; +import static org.bson.assertions.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.params.provider.Arguments.arguments; + +class BinaryBinaryVectorCodecTest extends CodecTestCase { + + private static Stream provideVectorsAndCodecs() { + return Stream.of( + arguments(BinaryVector.floatVector(new float[]{1.1f, 2.2f, 3.3f}), new Float32BinaryVectorCodec(), Float32BinaryVector.class), + arguments(BinaryVector.int8Vector(new byte[]{10, 20, 30, 40}), new Int8VectorCodec(), Int8BinaryVector.class), + arguments(BinaryVector.packedBitVector(new byte[]{(byte) 0b10101010, (byte) 0b01010101}, (byte) 3), new PackedBitBinaryVectorCodec(), PackedBitBinaryVector.class), + arguments(BinaryVector.packedBitVector(new byte[]{(byte) 0b10101010, (byte) 0b01010101}, (byte) 3), new BinaryVectorCodec(), BinaryVector.class), + arguments(BinaryVector.int8Vector(new byte[]{10, 20, 30, 40}), new BinaryVectorCodec(), BinaryVector.class), + arguments(BinaryVector.packedBitVector(new byte[]{(byte) 0b10101010, (byte) 0b01010101}, (byte) 3), new BinaryVectorCodec(), BinaryVector.class) + ); + } + + @ParameterizedTest + @MethodSource("provideVectorsAndCodecs") + void shouldEncodeVector(final BinaryVector vectorToEncode, final Codec vectorCodec) throws IOException { + // given + BsonBinary bsonBinary = new BsonBinary(vectorToEncode); + byte[] encodedVector = bsonBinary.getData(); + ByteArrayOutputStream expectedStream = new ByteArrayOutputStream(); + // Total length of a Document (int 32). It is 0, because we do not expect + // codec to write the end of the document (that is when we back-patch the length of the document). + expectedStream.write(new byte[]{0, 0, 0, 0}); + // Bson type + expectedStream.write((byte) BsonType.BINARY.getValue()); + // Field name "b4" + expectedStream.write(new byte[]{98, 52, 0}); + // Total length of binary data (little-endian format) + expectedStream.write(new byte[]{(byte) encodedVector.length, 0, 0, 0}); + // Vector binary subtype + expectedStream.write(BsonBinarySubType.VECTOR.getValue()); + // Actual BSON binary data + expectedStream.write(encodedVector); + + OutputBuffer buffer = new BasicOutputBuffer(); + BsonWriter writer = new BsonBinaryWriter(buffer); + writer.writeStartDocument(); + writer.writeName("b4"); + + // when + vectorCodec.encode(writer, vectorToEncode, EncoderContext.builder().build()); + + // then + assertArrayEquals(expectedStream.toByteArray(), buffer.toByteArray()); + } + + @ParameterizedTest + @MethodSource("provideVectorsAndCodecs") + void shouldDecodeVector(final BinaryVector vectorToDecode, final Codec vectorCodec) { + // given + OutputBuffer buffer = new BasicOutputBuffer(); + BsonWriter writer = new BsonBinaryWriter(buffer); + writer.writeStartDocument(); + writer.writeName("vector"); + writer.writeBinaryData(new BsonBinary(vectorToDecode)); + writer.writeEndDocument(); + + BsonBinaryReader reader = new BsonBinaryReader(new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap(buffer.toByteArray())))); + reader.readStartDocument(); + + // when + BinaryVector decodedVector = vectorCodec.decode(reader, DecoderContext.builder().build()); + + // then + assertDoesNotThrow(reader::readEndDocument); + assertNotNull(decodedVector); + assertEquals(vectorToDecode, decodedVector); + } + + + @ParameterizedTest + @EnumSource(value = BsonBinarySubType.class, mode = EnumSource.Mode.EXCLUDE, names = {"VECTOR"}) + void shouldThrowExceptionForInvalidSubType(final BsonBinarySubType subType) { + // given + BsonDocument document = new BsonDocument("name", new BsonBinary(subType.getValue(), new byte[]{})); + BsonBinaryReader reader = new BsonBinaryReader(toBson(document)); + reader.readStartDocument(); + + // when & then + Stream.of(new Float32BinaryVectorCodec(), new Int8VectorCodec(), new PackedBitBinaryVectorCodec()) + .forEach(codec -> { + BsonInvalidOperationException exception = assertThrows(BsonInvalidOperationException.class, () -> + codec.decode(reader, DecoderContext.builder().build())); + assertEquals("Expected vector binary subtype 9 but found: " + subType.getValue(), exception.getMessage()); + }); + } + + + @ParameterizedTest + @MethodSource("provideVectorsAndCodecs") + void shouldReturnCorrectEncoderClass(final BinaryVector vector, + final Codec codec, + final Class expectedEncoderClass) { + // when + Class encoderClass = codec.getEncoderClass(); + + // then + assertEquals(expectedEncoderClass, encoderClass); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/BsonCodecProviderSpecification.groovy b/bson/src/test/unit/org/bson/codecs/BsonCodecProviderSpecification.groovy new file mode 100644 index 00000000000..13739fe539f --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/BsonCodecProviderSpecification.groovy @@ -0,0 +1,41 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs + + +import org.bson.BsonDocument +import org.bson.BsonDocumentWrapper +import org.bson.RawBsonDocument +import spock.lang.Specification + +import static org.bson.codecs.configuration.CodecRegistries.fromProviders + +class BsonCodecProviderSpecification extends Specification { + + def provider = new BsonCodecProvider() + def codecRegistry = fromProviders(provider) + + def 'should get correct codec'() { + expect: + provider.get(String, codecRegistry) == null + + provider.get(BsonDocument, codecRegistry).class == BsonCodec + provider.get(BsonDocumentWrapper, codecRegistry).class == BsonCodec + provider.get(RawBsonDocument, codecRegistry).class == BsonCodec + provider.get(BsonDocumentSubclass, codecRegistry).class == BsonCodec + } +} diff --git a/bson/src/test/unit/org/bson/codecs/BsonCodecSpecification.groovy b/bson/src/test/unit/org/bson/codecs/BsonCodecSpecification.groovy new file mode 100644 index 00000000000..22add20813b --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/BsonCodecSpecification.groovy @@ -0,0 +1,87 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs + + +import org.bson.BsonDocument +import org.bson.BsonDocumentWriter +import org.bson.BsonReader +import org.bson.codecs.configuration.CodecConfigurationException +import org.bson.codecs.configuration.CodecRegistry +import org.bson.conversions.Bson +import spock.lang.Specification + +import static org.bson.codecs.configuration.CodecRegistries.fromProviders + +class BsonCodecSpecification extends Specification { + + def provider = new BsonCodecProvider() + def registry = fromProviders(provider) + + def 'should encode Bson'() { + given: + def codec = new BsonCodec() + def customBson = new CustomBson() + + when: + def writer = new BsonDocumentWriter(new BsonDocument()) + writer.writeStartDocument() + writer.writeName('customBson') + codec.encode(writer, customBson, EncoderContext.builder().build()) + writer.writeEndDocument() + + then: + BsonDocument.parse('{a: 1, b:2}') == writer.getDocument().get('customBson') + } + + def 'should throw CodecConfiguration exception if cannot encode Bson'() { + given: + def codec = new BsonCodec() + def customBson = new ExceptionRaisingBson() + + when: + def writer = new BsonDocumentWriter(new BsonDocument()) + writer.writeStartDocument() + writer.writeName('customBson') + codec.encode(writer, customBson, EncoderContext.builder().build()) + + then: + thrown(CodecConfigurationException) + } + + def 'should throw UnsupportedOperation exception if decode is called'() { + when: + new BsonCodec().decode(Stub(BsonReader), DecoderContext.builder().build()) + + then: + thrown(UnsupportedOperationException) + } + + class CustomBson implements Bson { + @Override + BsonDocument toBsonDocument(final Class clazz, final CodecRegistry codecRegistry) { + BsonDocument.parse('{a: 1, b: 2}') + } + } + + class ExceptionRaisingBson implements Bson { + @Override + BsonDocument toBsonDocument(final Class clazz, final CodecRegistry codecRegistry) { + throw new Exception('Cannot encode') + } + } +} diff --git a/bson/src/test/unit/org/bson/codecs/BsonDocumentCodecSpecification.groovy b/bson/src/test/unit/org/bson/codecs/BsonDocumentCodecSpecification.groovy index 889ae17dccf..8ac2ebcec51 100644 --- a/bson/src/test/unit/org/bson/codecs/BsonDocumentCodecSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/BsonDocumentCodecSpecification.groovy @@ -159,7 +159,7 @@ class BsonDocumentCodecSpecification extends Specification { def 'should encode nested raw documents'() { given: def doc = new BsonDocument('a', BsonBoolean.TRUE) - def rawDoc = new RawBsonDocument(doc, new BsonDocumentCodec()); + def rawDoc = new RawBsonDocument(doc, new BsonDocumentCodec()) def docWithNestedRawDoc = new BsonDocument('a', rawDoc).append('b', new BsonArray(asList(rawDoc))) when: @@ -173,13 +173,13 @@ class BsonDocumentCodecSpecification extends Specification { def 'should determine if document has an id'() { expect: - !new BsonDocumentCodec().documentHasId(new BsonDocument()); - new BsonDocumentCodec().documentHasId(new BsonDocument('_id', new BsonInt32(1))); + !new BsonDocumentCodec().documentHasId(new BsonDocument()) + new BsonDocumentCodec().documentHasId(new BsonDocument('_id', new BsonInt32(1))) } def 'should get document id'() { expect: - !new BsonDocumentCodec().getDocumentId(new BsonDocument()); + !new BsonDocumentCodec().getDocumentId(new BsonDocument()) new BsonDocumentCodec().getDocumentId(new BsonDocument('_id', new BsonInt32(1))) == new BsonInt32(1) } @@ -188,7 +188,7 @@ class BsonDocumentCodecSpecification extends Specification { def document = new BsonDocument() when: - document = new BsonDocumentCodec().generateIdIfAbsentFromDocument(document); + document = new BsonDocumentCodec().generateIdIfAbsentFromDocument(document) then: document.get('_id') instanceof BsonObjectId @@ -199,7 +199,7 @@ class BsonDocumentCodecSpecification extends Specification { def document = new BsonDocument('_id', new BsonInt32(1)) when: - document = new BsonDocumentCodec().generateIdIfAbsentFromDocument(document); + document = new BsonDocumentCodec().generateIdIfAbsentFromDocument(document) then: document.get('_id') == new BsonInt32(1) diff --git a/bson/src/test/unit/org/bson/codecs/BsonValueCodecProviderSpecification.groovy b/bson/src/test/unit/org/bson/codecs/BsonValueCodecProviderSpecification.groovy index ad217ef1362..de01d107551 100644 --- a/bson/src/test/unit/org/bson/codecs/BsonValueCodecProviderSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/BsonValueCodecProviderSpecification.groovy @@ -37,6 +37,7 @@ import org.bson.BsonString import org.bson.BsonSymbol import org.bson.BsonTimestamp import org.bson.BsonUndefined +import org.bson.RawBsonArray import org.bson.RawBsonDocument import spock.lang.Specification @@ -73,6 +74,7 @@ class BsonValueCodecProviderSpecification extends Specification { provider.get(BsonJavaScriptWithScope, codecRegistry).class == BsonJavaScriptWithScopeCodec provider.get(BsonArray, codecRegistry).class == BsonArrayCodec + provider.get(RawBsonArray, codecRegistry).class == BsonArrayCodec provider.get(BsonDocument, codecRegistry).class == BsonDocumentCodec provider.get(BsonDocumentWrapper, codecRegistry).class == BsonDocumentWrapperCodec diff --git a/bson/src/test/unit/org/bson/codecs/ByteCodecTest.java b/bson/src/test/unit/org/bson/codecs/ByteCodecTest.java index 667c1308527..20629fb027d 100644 --- a/bson/src/test/unit/org/bson/codecs/ByteCodecTest.java +++ b/bson/src/test/unit/org/bson/codecs/ByteCodecTest.java @@ -18,7 +18,9 @@ import org.bson.BsonInvalidOperationException; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertThrows; public final class ByteCodecTest extends CodecTestCase { @@ -36,19 +38,22 @@ public void shouldHandleAlternativeNumberValues() { roundTrip(new Document("a", 9.9999999999999992), expected); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldErrorDecodingOutsideMinRange() { - roundTrip(new Document("a", Integer.MIN_VALUE)); + assertThrows(BsonInvalidOperationException.class, () -> + roundTrip(new Document("a", Integer.MIN_VALUE))); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldErrorDecodingOutsideMaxRange() { - roundTrip(new Document("a", Integer.MAX_VALUE)); + assertThrows(BsonInvalidOperationException.class, () -> + roundTrip(new Document("a", Integer.MAX_VALUE))); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowWhenHandlingLossyDoubleValues() { - roundTrip(new Document("a", 9.9999999999999991)); + assertThrows(BsonInvalidOperationException.class, () -> + roundTrip(new Document("a", 9.9999999999999991))); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/CodeWithScopeSpecification.groovy b/bson/src/test/unit/org/bson/codecs/CodeWithScopeSpecification.groovy index 5315843f040..d1df8412a0f 100644 --- a/bson/src/test/unit/org/bson/codecs/CodeWithScopeSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/CodeWithScopeSpecification.groovy @@ -26,40 +26,40 @@ import spock.lang.Subject import static CodecTestUtil.prepareReaderWithObjectToBeDecoded class CodeWithScopeSpecification extends Specification { - private final BsonWriter bsonWriter = Mock(); + private final BsonWriter bsonWriter = Mock() @Subject - private final CodeWithScopeCodec codeWithScopeCodec = new CodeWithScopeCodec(new DocumentCodec()); + private final CodeWithScopeCodec codeWithScopeCodec = new CodeWithScopeCodec(new DocumentCodec()) def 'should encode code with scope as java script followed by document of scope'() { given: - String javascriptCode = ''; - CodeWithScope codeWithScope = new CodeWithScope(javascriptCode, new Document('the', 'scope')); + String javascriptCode = '' + CodeWithScope codeWithScope = new CodeWithScope(javascriptCode, new Document('the', 'scope')) when: - codeWithScopeCodec.encode(bsonWriter, codeWithScope, EncoderContext.builder().build()); + codeWithScopeCodec.encode(bsonWriter, codeWithScope, EncoderContext.builder().build()) then: - 1 * bsonWriter.writeJavaScriptWithScope(javascriptCode); + 1 * bsonWriter.writeJavaScriptWithScope(javascriptCode) then: - 1 * bsonWriter.writeStartDocument(); + 1 * bsonWriter.writeStartDocument() then: - 1 * bsonWriter.writeName('the'); + 1 * bsonWriter.writeName('the') then: - 1 * bsonWriter.writeString('scope'); + 1 * bsonWriter.writeString('scope') then: - 1 * bsonWriter.writeEndDocument(); + 1 * bsonWriter.writeEndDocument() } def 'should decode code with scope'() { given: - CodeWithScope codeWithScope = new CodeWithScope('{javascript code}', new Document('the', 'scope')); - BsonBinaryReader reader = prepareReaderWithObjectToBeDecoded(codeWithScope); + CodeWithScope codeWithScope = new CodeWithScope('{javascript code}', new Document('the', 'scope')) + BsonBinaryReader reader = prepareReaderWithObjectToBeDecoded(codeWithScope) when: - CodeWithScope actualCodeWithScope = codeWithScopeCodec.decode(reader, DecoderContext.builder().build()); + CodeWithScope actualCodeWithScope = codeWithScopeCodec.decode(reader, DecoderContext.builder().build()) then: - actualCodeWithScope == codeWithScope; + actualCodeWithScope == codeWithScope } } diff --git a/bson/src/test/unit/org/bson/codecs/CodecTestCase.java b/bson/src/test/unit/org/bson/codecs/CodecTestCase.java index 5074d470001..17768d0d133 100644 --- a/bson/src/test/unit/org/bson/codecs/CodecTestCase.java +++ b/bson/src/test/unit/org/bson/codecs/CodecTestCase.java @@ -18,7 +18,11 @@ import org.bson.BsonBinaryReader; import org.bson.BsonBinaryWriter; +import org.bson.BsonDocument; +import org.bson.BsonDocumentReader; +import org.bson.BsonDocumentWriter; import org.bson.BsonType; +import org.bson.BsonValue; import org.bson.BsonWriter; import org.bson.ByteBufNIO; import org.bson.Document; @@ -32,7 +36,7 @@ import static java.util.Arrays.asList; import static org.bson.codecs.configuration.CodecRegistries.fromProviders; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; abstract class CodecTestCase { @@ -44,8 +48,25 @@ CodecRegistry getRegistry() { return fromProviders(asList(new ValueCodecProvider(), getDocumentCodecProvider())); } + T getDecodedValue(final BsonValue bsonValue, final Decoder decoder) { + BsonDocument document = new BsonDocument("val", bsonValue); + BsonDocumentReader reader = new BsonDocumentReader(document); + reader.readStartDocument(); + reader.readName("val"); + return decoder.decode(reader, DecoderContext.builder().build()); + } + + BsonValue getEncodedValue(final T value, final Encoder encoder) { + BsonDocumentWriter writer = new BsonDocumentWriter(new BsonDocument()); + writer.writeStartDocument(); + writer.writeName("val"); + encoder.encode(writer, value, EncoderContext.builder().build()); + writer.writeEndDocument(); + return writer.getDocument().get("val"); + } + void roundTrip(final T value) { - roundTrip(value, new DefaultComparator(value)); + roundTrip(value, new DefaultComparator<>(value)); } void roundTrip(final T value, final Comparator comparator) { @@ -61,12 +82,7 @@ void roundTripWithRegistry(final T value, final Comparator comparator, fi } public void roundTrip(final Document input, final Document expected) { - roundTrip(input, new Comparator() { - @Override - public void apply(final Document result) { - assertEquals("Codec Round Trip", expected, result); - } - }); + roundTrip(input, result -> assertEquals(expected, result)); } OutputBuffer encode(final Codec codec, final T value) { @@ -82,10 +98,11 @@ T decode(final Codec codec, final OutputBuffer buffer) { } DocumentCodecProvider getSpecificNumberDocumentCodecProvider(final Class clazz) { - HashMap> replacements = new HashMap>(); + HashMap> replacements = new HashMap<>(); replacements.put(BsonType.DOUBLE, clazz); replacements.put(BsonType.INT32, clazz); replacements.put(BsonType.INT64, clazz); + replacements.put(BsonType.DECIMAL128, clazz); return new DocumentCodecProvider(new BsonTypeClassMap(replacements)); } @@ -102,7 +119,7 @@ class DefaultComparator implements Comparator { @Override public void apply(final T result) { - assertEquals("Codec Round Trip", original, result); + assertEquals(original, result); } } diff --git a/bson/src/test/unit/org/bson/codecs/CollectionCodecProviderTest.java b/bson/src/test/unit/org/bson/codecs/CollectionCodecProviderTest.java new file mode 100644 index 00000000000..d15a992f251 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/CollectionCodecProviderTest.java @@ -0,0 +1,52 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.conversions.Bson; +import org.junit.jupiter.api.Test; + +import java.util.Set; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +final class CollectionCodecProviderTest { + @Test + void shouldReturnNullForNonCollection() { + CollectionCodecProvider provider = new CollectionCodecProvider(); + assertNull(provider.get(String.class, Bson.DEFAULT_CODEC_REGISTRY)); + } + + @Test + void shouldReturnCollectionCodecForCollection() { + CollectionCodecProvider provider = new CollectionCodecProvider(); + @SuppressWarnings({"rawtypes", "unchecked"}) + Codec> codec = (Codec>) (Codec) provider.get(Set.class, Bson.DEFAULT_CODEC_REGISTRY); + assertTrue(codec instanceof CollectionCodec); + CollectionCodec> recordCodec = (CollectionCodec>) codec; + assertEquals(Set.class, recordCodec.getEncoderClass()); + } + + @Test + public void shouldReturnCollectionCodecForCollectionUsingDefaultRegistry() { + @SuppressWarnings({"rawtypes", "unchecked"}) + Codec> codec = (Codec>) (Codec) Bson.DEFAULT_CODEC_REGISTRY.get(Set.class); + assertTrue(codec instanceof CollectionCodec); + CollectionCodec> recordCodec = (CollectionCodec>) codec; + assertEquals(Set.class, recordCodec.getEncoderClass()); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/CollectionCodecSpecification.groovy b/bson/src/test/unit/org/bson/codecs/CollectionCodecSpecification.groovy new file mode 100644 index 00000000000..269032b8014 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/CollectionCodecSpecification.groovy @@ -0,0 +1,238 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs + +import org.bson.BsonArray +import org.bson.BsonDateTime +import org.bson.BsonDocument +import org.bson.BsonDocumentReader +import org.bson.BsonDocumentWriter +import org.bson.codecs.jsr310.Jsr310CodecProvider +import org.bson.types.Binary +import spock.lang.Specification +import spock.lang.Unroll + +import java.lang.reflect.ParameterizedType +import java.time.Instant +import java.util.concurrent.CopyOnWriteArrayList + +import static java.util.Arrays.asList +import static org.bson.BsonDocument.parse +import static org.bson.UuidRepresentation.C_SHARP_LEGACY +import static org.bson.UuidRepresentation.JAVA_LEGACY +import static org.bson.UuidRepresentation.PYTHON_LEGACY +import static org.bson.UuidRepresentation.STANDARD +import static org.bson.UuidRepresentation.UNSPECIFIED +import static org.bson.codecs.configuration.CodecRegistries.fromCodecs +import static org.bson.codecs.configuration.CodecRegistries.fromProviders +import static org.bson.codecs.configuration.CodecRegistries.fromRegistries + +class CollectionCodecSpecification extends Specification { + + static final REGISTRY = fromRegistries(fromCodecs(new UuidCodec(JAVA_LEGACY)), + fromProviders(new ValueCodecProvider(), new DocumentCodecProvider(), new BsonValueCodecProvider(), + new CollectionCodecProvider(), new MapCodecProvider())) + + def 'should decode to specified generic class'() { + given: + def doc = new BsonDocument('a', new BsonArray()) + + when: + def codec = new CollectionCodec(fromProviders([new ValueCodecProvider()]), new BsonTypeClassMap(), null, collectionType) + def reader = new BsonDocumentReader(doc) + reader.readStartDocument() + reader.readName('a') + def collection = codec.decode(reader, DecoderContext.builder().build()) + + then: + codec.getEncoderClass() == collectionType + collection.getClass() == decodedType + + where: + collectionType | decodedType + Collection | ArrayList + List | ArrayList + AbstractList | ArrayList + AbstractCollection | ArrayList + ArrayList | ArrayList + Set | HashSet + AbstractSet | HashSet + HashSet | HashSet + NavigableSet | TreeSet + SortedSet | TreeSet + TreeSet | TreeSet + CopyOnWriteArrayList | CopyOnWriteArrayList + } + + def 'should encode a Collection to a BSON array'() { + given: + def codec = new CollectionCodec(REGISTRY, new BsonTypeClassMap(), null, Collection) + def writer = new BsonDocumentWriter(new BsonDocument()) + + when: + writer.writeStartDocument() + writer.writeName('array') + codec.encode(writer, [1, 2, 3, null], EncoderContext.builder().build()) + writer.writeEndDocument() + + then: + writer.document == parse('{array : [1, 2, 3, null]}') + } + + def 'should decode a BSON array to a Collection'() { + given: + def codec = new CollectionCodec(REGISTRY, new BsonTypeClassMap(), null, Collection) + def reader = new BsonDocumentReader(parse('{array : [1, 2, 3, null]}')) + + when: + reader.readStartDocument() + reader.readName('array') + def collection = codec.decode(reader, DecoderContext.builder().build()) + reader.readEndDocument() + + then: + collection == [1, 2, 3, null] + } + + def 'should decode a BSON array of arrays to a Collection of Collection'() { + given: + def codec = new CollectionCodec(REGISTRY, new BsonTypeClassMap(), null, Collection) + def reader = new BsonDocumentReader(parse('{array : [[1, 2], [3, 4, 5]]}')) + + when: + reader.readStartDocument() + reader.readName('array') + def collection = codec.decode(reader, DecoderContext.builder().build()) + reader.readEndDocument() + + then: + collection == [[1, 2], [3, 4, 5]] + } + + def 'should use provided transformer'() { + given: + def codec = new CollectionCodec(REGISTRY, new BsonTypeClassMap(), { Object from -> + from.toString() + }, Collection) + def reader = new BsonDocumentReader(parse('{array : [1, 2, 3]}')) + + when: + reader.readStartDocument() + reader.readName('array') + def collection = codec.decode(reader, DecoderContext.builder().build()) + reader.readEndDocument() + + then: + collection == ['1', '2', '3'] + } + + @SuppressWarnings(['LineLength']) + @Unroll + def 'should decode binary subtype 3 for UUID'() { + given: + def reader = new BsonDocumentReader(parse(document)) + def codec = new CollectionCodec(fromCodecs(new UuidCodec(representation), new BinaryCodec()), new BsonTypeClassMap(), + null, Collection) + .withUuidRepresentation(representation) + + when: + reader.readStartDocument() + reader.readName('array') + def collection = codec.decode(reader, DecoderContext.builder().build()) + reader.readEndDocument() + + then: + value == collection + + where: + representation | value | document + JAVA_LEGACY | [UUID.fromString('08070605-0403-0201-100f-0e0d0c0b0a09')] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "3" }]}' + C_SHARP_LEGACY | [UUID.fromString('04030201-0605-0807-090a-0b0c0d0e0f10')] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "3" }]}' + PYTHON_LEGACY | [UUID.fromString('01020304-0506-0708-090a-0b0c0d0e0f10')] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "3" }]}' + STANDARD | [new Binary((byte) 3, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[])] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "3" }]}' + UNSPECIFIED | [new Binary((byte) 3, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[])] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "3" }]}' + } + + @SuppressWarnings(['LineLength']) + @Unroll + def 'should decode binary subtype 4 for UUID'() { + given: + def reader = new BsonDocumentReader(parse(document)) + def codec = new CollectionCodec(fromCodecs(new UuidCodec(representation), new BinaryCodec()), new BsonTypeClassMap(), + null, Collection) + .withUuidRepresentation(representation) + + when: + reader.readStartDocument() + reader.readName('array') + def collection = codec.decode(reader, DecoderContext.builder().build()) + reader.readEndDocument() + + then: + value == collection + + where: + representation | value | document + STANDARD | [UUID.fromString('01020304-0506-0708-090a-0b0c0d0e0f10')] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "4" }]}' + JAVA_LEGACY | [UUID.fromString('01020304-0506-0708-090a-0b0c0d0e0f10')] | '{"array": [{ "$binary" : "CAcGBQQDAgEQDw4NDAsKCQ==", "$type" : "3" }]}' + C_SHARP_LEGACY | [new Binary((byte) 4, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[])] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "4" }]}' + PYTHON_LEGACY | [new Binary((byte) 4, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[])] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "4" }]}' + UNSPECIFIED | [new Binary((byte) 4, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[])] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "4" }]}' + } + + def 'should parameterize'() { + given: + def codec = fromProviders(new Jsr310CodecProvider(), REGISTRY).get( + Collection, + asList(((ParameterizedType) Container.getMethod('getInstants').genericReturnType).actualTypeArguments)) + def writer = new BsonDocumentWriter(new BsonDocument()) + def reader = new BsonDocumentReader(writer.getDocument()) + def instants = [ + ['firstMap': [Instant.ofEpochMilli(1), Instant.ofEpochMilli(2)]], + ['secondMap': [Instant.ofEpochMilli(3), Instant.ofEpochMilli(4)]]] + when: + writer.writeStartDocument() + writer.writeName('instants') + codec.encode(writer, instants, EncoderContext.builder().build()) + writer.writeEndDocument() + + then: + writer.getDocument() == new BsonDocument() + .append('instants', new BsonArray( + [ + new BsonDocument('firstMap', new BsonArray([new BsonDateTime(1), new BsonDateTime(2)])), + new BsonDocument('secondMap', new BsonArray([new BsonDateTime(3), new BsonDateTime(4)])) + ])) + + when: + reader.readStartDocument() + reader.readName('instants') + def decodedInstants = codec.decode(reader, DecoderContext.builder().build()) + + then: + decodedInstants == instants + } + + @SuppressWarnings('unused') + static class Container { + private final List>> instants = [] + + List>> getInstants() { + instants + } + } +} diff --git a/bson/src/test/unit/org/bson/codecs/DocumentCodecSpecification.groovy b/bson/src/test/unit/org/bson/codecs/DocumentCodecSpecification.groovy index c9391431b45..c2dac8a6027 100644 --- a/bson/src/test/unit/org/bson/codecs/DocumentCodecSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/DocumentCodecSpecification.groovy @@ -30,6 +30,7 @@ import org.bson.BsonUndefined import org.bson.BsonWriter import org.bson.ByteBufNIO import org.bson.Document +import org.bson.codecs.configuration.CodecRegistry import org.bson.io.BasicOutputBuffer import org.bson.io.ByteBufferBsonInput import org.bson.json.JsonReader @@ -42,6 +43,7 @@ import org.bson.types.ObjectId import org.bson.types.Symbol import spock.lang.Shared import spock.lang.Specification +import spock.lang.Unroll import java.nio.ByteBuffer import java.util.concurrent.atomic.AtomicBoolean @@ -49,9 +51,20 @@ import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.atomic.AtomicLong import static java.util.Arrays.asList +import static org.bson.UuidRepresentation.C_SHARP_LEGACY +import static org.bson.UuidRepresentation.JAVA_LEGACY +import static org.bson.UuidRepresentation.PYTHON_LEGACY +import static org.bson.UuidRepresentation.STANDARD +import static org.bson.UuidRepresentation.UNSPECIFIED +import static org.bson.codecs.configuration.CodecRegistries.fromCodecs import static org.bson.codecs.configuration.CodecRegistries.fromProviders +import static org.bson.codecs.configuration.CodecRegistries.fromRegistries class DocumentCodecSpecification extends Specification { + static final CodecRegistry REGISTRY = fromRegistries(fromCodecs(new UuidCodec(STANDARD)), + fromProviders(asList(new ValueCodecProvider(), new CollectionCodecProvider(), + new BsonValueCodecProvider(), new DocumentCodecProvider(), new MapCodecProvider()))) + @Shared BsonDocument bsonDoc = new BsonDocument() @Shared @@ -90,18 +103,19 @@ class DocumentCodecSpecification extends Specification { } when: - new DocumentCodec().encode(writer, originalDocument, EncoderContext.builder().build()) + new DocumentCodec(REGISTRY).withUuidRepresentation(STANDARD) + .encode(writer, originalDocument, EncoderContext.builder().build()) BsonReader reader if (writer instanceof BsonDocumentWriter) { reader = new BsonDocumentReader(bsonDoc) } else if (writer instanceof BsonBinaryWriter) { - BasicOutputBuffer buffer = (BasicOutputBuffer)writer.getBsonOutput(); + BasicOutputBuffer buffer = (BasicOutputBuffer)writer.getBsonOutput() reader = new BsonBinaryReader(new ByteBufferBsonInput(new ByteBufNIO( ByteBuffer.wrap(buffer.toByteArray())))) } else { reader = new JsonReader(stringWriter.toString()) } - def decodedDoc = new DocumentCodec().decode(reader, DecoderContext.builder().build()) + def decodedDoc = new DocumentCodec(REGISTRY).withUuidRepresentation(STANDARD).decode(reader, DecoderContext.builder().build()) then: decodedDoc.get('null') == originalDocument.get('null') @@ -139,8 +153,7 @@ class DocumentCodecSpecification extends Specification { ] } - @SuppressWarnings(['LineLength']) - def 'should decode binary subtypes for UUID'() { + def 'should decode binary subtypes for UUID that are not 16 bytes into Binary'() { given: def reader = new BsonBinaryReader(ByteBuffer.wrap(bytes as byte[])) @@ -151,11 +164,55 @@ class DocumentCodecSpecification extends Specification { value == document.get('f') where: - value | bytes - new Binary((byte) 0x03, (byte[]) [115, 116, 11]) | [16, 0, 0, 0, 5, 102, 0, 3, 0, 0, 0, 3, 115, 116, 11, 0] - new Binary((byte) 0x04, (byte[]) [115, 116, 11]) | [16, 0, 0, 0, 5, 102, 0, 3, 0, 0, 0, 4, 115, 116, 11, 0] - UUID.fromString('08070605-0403-0201-100f-0e0d0c0b0a09') | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 3, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] - UUID.fromString('01020304-0506-0708-090a-0b0c0d0e0f10') | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 4, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + value | bytes + new Binary((byte) 0x03, (byte[]) [115, 116, 11]) | [16, 0, 0, 0, 5, 102, 0, 3, 0, 0, 0, 3, 115, 116, 11, 0] + new Binary((byte) 0x04, (byte[]) [115, 116, 11]) | [16, 0, 0, 0, 5, 102, 0, 3, 0, 0, 0, 4, 115, 116, 11, 0] + } + + @SuppressWarnings(['LineLength']) + @Unroll + def 'should decode binary subtype 3 for UUID'() { + given: + def reader = new BsonBinaryReader(ByteBuffer.wrap(bytes as byte[])) + + when: + def document = new DocumentCodec(fromCodecs(new UuidCodec(representation), new BinaryCodec())) + .withUuidRepresentation(representation) + .decode(reader, DecoderContext.builder().build()) + + then: + value == document.get('f') + + where: + representation | value | bytes + JAVA_LEGACY | UUID.fromString('08070605-0403-0201-100f-0e0d0c0b0a09') | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 3, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + C_SHARP_LEGACY | UUID.fromString('04030201-0605-0807-090a-0b0c0d0e0f10') | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 3, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + PYTHON_LEGACY | UUID.fromString('01020304-0506-0708-090a-0b0c0d0e0f10') | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 3, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + STANDARD | new Binary((byte) 3, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[]) | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 3, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + UNSPECIFIED | new Binary((byte) 3, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[]) | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 3, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + } + + @SuppressWarnings(['LineLength']) + @Unroll + def 'should decode binary subtype 4 for UUID'() { + given: + def reader = new BsonBinaryReader(ByteBuffer.wrap(bytes as byte[])) + + when: + def document = new DocumentCodec(fromCodecs(new UuidCodec(representation), new BinaryCodec())) + .withUuidRepresentation(representation) + .decode(reader, DecoderContext.builder().build()) + + then: + value == document.get('f') + + where: + representation | value | bytes + STANDARD | UUID.fromString('01020304-0506-0708-090a-0b0c0d0e0f10') | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 4, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + JAVA_LEGACY | new Binary((byte) 4, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[]) | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 4, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + C_SHARP_LEGACY | new Binary((byte) 4, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[]) | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 4, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + PYTHON_LEGACY | new Binary((byte) 4, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[]) | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 4, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + UNSPECIFIED | new Binary((byte) 4, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[]) | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 4, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] } def 'should respect encodeIdFirst property in encoder context'() { diff --git a/bson/src/test/unit/org/bson/codecs/DocumentCodecTest.java b/bson/src/test/unit/org/bson/codecs/DocumentCodecTest.java index 61a4f71618e..7343707d5a7 100644 --- a/bson/src/test/unit/org/bson/codecs/DocumentCodecTest.java +++ b/bson/src/test/unit/org/bson/codecs/DocumentCodecTest.java @@ -23,6 +23,7 @@ import org.bson.BsonObjectId; import org.bson.ByteBufNIO; import org.bson.Document; +import org.bson.BinaryVector; import org.bson.io.BasicOutputBuffer; import org.bson.io.BsonInput; import org.bson.io.ByteBufferBsonInput; @@ -33,9 +34,9 @@ import org.bson.types.MaxKey; import org.bson.types.MinKey; import org.bson.types.ObjectId; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -45,21 +46,21 @@ import java.util.List; import static java.util.Arrays.asList; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; public class DocumentCodecTest { private BasicOutputBuffer buffer; private BsonBinaryWriter writer; - @Before + @BeforeEach public void setUp() throws Exception { buffer = new BasicOutputBuffer(); writer = new BsonBinaryWriter(buffer); } - @After + @AfterEach public void tearDown() { writer.close(); } @@ -80,6 +81,9 @@ public void testPrimitiveBSONTypeCodecs() throws IOException { doc.put("code", new Code("var i = 0")); doc.put("minkey", new MinKey()); doc.put("maxkey", new MaxKey()); + doc.put("vectorFloat", BinaryVector.floatVector(new float[]{1.1f, 2.2f, 3.3f})); + doc.put("vectorInt8", BinaryVector.int8Vector(new byte[]{10, 20, 30, 40})); + doc.put("vectorPackedBit", BinaryVector.packedBitVector(new byte[]{(byte) 0b10101010, (byte) 0b01010101}, (byte) 3)); // doc.put("pattern", Pattern.compile("^hello")); // TODO: Pattern doesn't override equals method! doc.put("null", null); @@ -95,7 +99,7 @@ public void testIterableEncoding() throws IOException { DocumentCodec documentCodec = new DocumentCodec(); Document doc = new Document() .append("list", asList(1, 2, 3, 4, 5)) - .append("set", new HashSet(asList(1, 2, 3, 4))); + .append("set", new HashSet<>(asList(1, 2, 3, 4))); documentCodec.encode(writer, doc, EncoderContext.builder().build()); @@ -103,8 +107,7 @@ public void testIterableEncoding() throws IOException { Document decodedDocument = documentCodec.decode(new BsonBinaryReader(bsonInput), DecoderContext.builder().build()); assertEquals(new Document() .append("list", asList(1, 2, 3, 4, 5)) - .append("set", asList(1, 2, 3, 4)), - decodedDocument); + .append("set", asList(1, 2, 3, 4)), decodedDocument); } @Test @@ -123,7 +126,6 @@ public void testCodeWithScopeEncoding() throws IOException { public void testIterableContainingOtherIterableEncoding() throws IOException { DocumentCodec documentCodec = new DocumentCodec(); Document doc = new Document(); - @SuppressWarnings("unchecked") List> listOfLists = asList(asList(1), asList(2)); doc.put("array", listOfLists); diff --git a/bson/src/test/unit/org/bson/codecs/DoubleCodecTest.java b/bson/src/test/unit/org/bson/codecs/DoubleCodecTest.java index 8cb5d3adbb0..cbf6031fb88 100644 --- a/bson/src/test/unit/org/bson/codecs/DoubleCodecTest.java +++ b/bson/src/test/unit/org/bson/codecs/DoubleCodecTest.java @@ -18,7 +18,10 @@ import org.bson.BsonInvalidOperationException; import org.bson.Document; -import org.junit.Test; +import org.bson.types.Decimal128; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertThrows; public final class DoubleCodecTest extends CodecTestCase { @@ -33,16 +36,27 @@ public void shouldHandleAlternativeNumberValues() { Document expected = new Document("a", 10.00); roundTrip(new Document("a", 10), expected); roundTrip(new Document("a", 10L), expected); + roundTrip(new Document("a", Decimal128.parse("10")), expected); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowWhenHandlingLossyLongValues() { - roundTrip(new Document("a", Long.MAX_VALUE - 1)); + assertThrows(BsonInvalidOperationException.class, () -> roundTrip(new Document("a", Long.MAX_VALUE - 1))); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowWhenHandlingLossyLongValues2() { - roundTrip(new Document("a", Long.MIN_VALUE + 1)); + assertThrows(BsonInvalidOperationException.class, () -> roundTrip(new Document("a", Long.MIN_VALUE + 1))); + } + + @Test + public void shouldThrowWhenHandlingLossyDecimal128Values() { + assertThrows(BsonInvalidOperationException.class, () -> roundTrip(new Document("a", Decimal128.parse("10.0")))); + } + + @Test + public void shouldThrowWhenHandlingNonExpressibleDecimal128Values() { + assertThrows(BsonInvalidOperationException.class, () -> roundTrip(new Document("a", Decimal128.parse("NaN")))); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/EnumCodecProviderTest.java b/bson/src/test/unit/org/bson/codecs/EnumCodecProviderTest.java new file mode 100644 index 00000000000..2e682fafdb5 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/EnumCodecProviderTest.java @@ -0,0 +1,41 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.codecs.configuration.CodecRegistries; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; + +public class EnumCodecProviderTest { + @Test + public void shouldProvideCodecForEnum() { + EnumCodecProvider provider = new EnumCodecProvider(); + Codec codec = provider.get(SimpleEnum.class, CodecRegistries.fromProviders(provider)); + assertNotNull(codec); + assertEquals(EnumCodec.class, codec.getClass()); + } + + @Test + public void shouldNotProvideCodecForNonEnum() { + EnumCodecProvider provider = new EnumCodecProvider(); + Codec codec = provider.get(String.class, CodecRegistries.fromProviders(provider)); + assertNull(codec); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/EnumCodecTest.java b/bson/src/test/unit/org/bson/codecs/EnumCodecTest.java new file mode 100644 index 00000000000..5e714b5afd7 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/EnumCodecTest.java @@ -0,0 +1,39 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.BsonString; +import org.bson.BsonValue; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class EnumCodecTest extends CodecTestCase { + @Test + public void shouldEncodeEnum() { + Codec codec = new EnumCodec<>(SimpleEnum.class); + BsonValue encodedValue = getEncodedValue(SimpleEnum.BRAVO, codec); + assertEquals(SimpleEnum.BRAVO.name(), encodedValue.asString().getValue()); + } + + @Test + public void shouldDecodeEnum() { + Codec codec = new EnumCodec<>(SimpleEnum.class); + SimpleEnum decodedValue = getDecodedValue(new BsonString(SimpleEnum.BRAVO.name()), codec); + assertEquals(SimpleEnum.BRAVO, decodedValue); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/FloatCodecTest.java b/bson/src/test/unit/org/bson/codecs/FloatCodecTest.java index b46f9cc5718..90cf41a20de 100644 --- a/bson/src/test/unit/org/bson/codecs/FloatCodecTest.java +++ b/bson/src/test/unit/org/bson/codecs/FloatCodecTest.java @@ -18,7 +18,10 @@ import org.bson.BsonInvalidOperationException; import org.bson.Document; -import org.junit.Test; +import org.bson.types.Decimal128; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertThrows; public final class FloatCodecTest extends CodecTestCase { @@ -39,16 +42,22 @@ public void shouldHandleAlternativeNumberValues() { roundTrip(new Document("a", 10), expected); roundTrip(new Document("a", 10L), expected); roundTrip(new Document("a", 9.9999999999999992), expected); + roundTrip(new Document("a", Decimal128.parse("10")), expected); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldErrorDecodingOutsideMinRange() { - roundTrip(new Document("a", -Double.MAX_VALUE)); + assertThrows(BsonInvalidOperationException.class, () -> roundTrip(new Document("a", -Double.MAX_VALUE))); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldErrorDecodingOutsideMaxRange() { - roundTrip(new Document("a", Double.MAX_VALUE)); + assertThrows(BsonInvalidOperationException.class, () -> roundTrip(new Document("a", Double.MAX_VALUE))); + } + + @Test + public void shouldThrowWhenHandlingLossyDecimal128Values() { + assertThrows(BsonInvalidOperationException.class, () -> roundTrip(new Document("a", Decimal128.parse("10.0")))); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/IntegerCodecTest.java b/bson/src/test/unit/org/bson/codecs/IntegerCodecTest.java index 1b874cc5408..11a8ac3647c 100644 --- a/bson/src/test/unit/org/bson/codecs/IntegerCodecTest.java +++ b/bson/src/test/unit/org/bson/codecs/IntegerCodecTest.java @@ -18,7 +18,10 @@ import org.bson.BsonInvalidOperationException; import org.bson.Document; -import org.junit.Test; +import org.bson.types.Decimal128; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertThrows; public final class IntegerCodecTest extends CodecTestCase { @@ -34,21 +37,27 @@ public void shouldHandleAlternativeNumberValues() { roundTrip(new Document("a", 10L), expected); roundTrip(new Document("a", 10.00), expected); roundTrip(new Document("a", 9.9999999999999992), expected); + roundTrip(new Document("a", Decimal128.parse("10")), expected); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldErrorDecodingOutsideMinRange() { - roundTrip(new Document("a", Long.MIN_VALUE)); + assertThrows(BsonInvalidOperationException.class, () -> roundTrip(new Document("a", Long.MIN_VALUE))); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldErrorDecodingOutsideMaxRange() { - roundTrip(new Document("a", Long.MAX_VALUE)); + assertThrows(BsonInvalidOperationException.class, () -> roundTrip(new Document("a", Long.MAX_VALUE))); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowWhenHandlingLossyDoubleValues() { - roundTrip(new Document("a", 9.9999999999999991)); + assertThrows(BsonInvalidOperationException.class, () -> roundTrip(new Document("a", 9.9999999999999991))); + } + + @Test + public void shouldThrowWhenHandlingLossyDecimal128Values() { + assertThrows(BsonInvalidOperationException.class, () -> roundTrip(new Document("a", Decimal128.parse("10.0")))); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/IterableCodecProviderSpecification.groovy b/bson/src/test/unit/org/bson/codecs/IterableCodecProviderSpecification.groovy index b0eae796fc4..b5217676871 100644 --- a/bson/src/test/unit/org/bson/codecs/IterableCodecProviderSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/IterableCodecProviderSpecification.groovy @@ -16,6 +16,7 @@ package org.bson.codecs +import org.bson.BsonType import spock.lang.Specification import static org.bson.codecs.configuration.CodecRegistries.fromProviders @@ -57,7 +58,7 @@ class IterableCodecProviderSpecification extends Specification { def 'unidentical instances should not be equal'() { given: def first = new IterableCodecProvider() - def second = new IterableCodecProvider(new BsonTypeClassMap([BOOLEAN: String])) + def second = new IterableCodecProvider(new BsonTypeClassMap([(BsonType.BOOLEAN): String])) def third = new IterableCodecProvider(new BsonTypeClassMap(), { Object from -> from }) diff --git a/bson/src/test/unit/org/bson/codecs/IterableCodecSpecification.groovy b/bson/src/test/unit/org/bson/codecs/IterableCodecSpecification.groovy index 0427a4b6a32..6af13dfc2ac 100644 --- a/bson/src/test/unit/org/bson/codecs/IterableCodecSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/IterableCodecSpecification.groovy @@ -21,18 +21,29 @@ import org.bson.BsonDocumentReader import org.bson.BsonDocumentWriter import org.bson.types.Binary import spock.lang.Specification +import spock.lang.Unroll + +import java.time.Instant import static org.bson.BsonDocument.parse +import static org.bson.UuidRepresentation.C_SHARP_LEGACY +import static org.bson.UuidRepresentation.JAVA_LEGACY +import static org.bson.UuidRepresentation.PYTHON_LEGACY +import static org.bson.UuidRepresentation.STANDARD +import static org.bson.UuidRepresentation.UNSPECIFIED +import static org.bson.codecs.configuration.CodecRegistries.fromCodecs import static org.bson.codecs.configuration.CodecRegistries.fromProviders +import static org.bson.codecs.configuration.CodecRegistries.fromRegistries class IterableCodecSpecification extends Specification { - static final REGISTRY = fromProviders(new ValueCodecProvider(), new DocumentCodecProvider(), new BsonValueCodecProvider(), - new IterableCodecProvider()) + static final REGISTRY = fromRegistries(fromCodecs(new UuidCodec(JAVA_LEGACY)), + fromProviders(new ValueCodecProvider(), new DocumentCodecProvider(), new BsonValueCodecProvider(), + new IterableCodecProvider(), new MapCodecProvider())) def 'should have Iterable encoding class'() { given: - def codec = new IterableCodec(REGISTRY, new BsonTypeClassMap()) + def codec = new IterableCodec(REGISTRY, new BsonTypeClassMap(), null) expect: codec.getEncoderClass() == Iterable @@ -40,7 +51,7 @@ class IterableCodecSpecification extends Specification { def 'should encode an Iterable to a BSON array'() { given: - def codec = new IterableCodec(REGISTRY, new BsonTypeClassMap()) + def codec = new IterableCodec(REGISTRY, new BsonTypeClassMap(), null) def writer = new BsonDocumentWriter(new BsonDocument()) when: @@ -55,7 +66,7 @@ class IterableCodecSpecification extends Specification { def 'should decode a BSON array to an Iterable'() { given: - def codec = new IterableCodec(REGISTRY, new BsonTypeClassMap()) + def codec = new IterableCodec(REGISTRY, new BsonTypeClassMap(), null) def reader = new BsonDocumentReader(parse('{array : [1, 2, 3, null]}')) when: @@ -70,7 +81,7 @@ class IterableCodecSpecification extends Specification { def 'should decode a BSON array of arrays to an Iterable of Iterables'() { given: - def codec = new IterableCodec(REGISTRY, new BsonTypeClassMap()) + def codec = new IterableCodec(REGISTRY, new BsonTypeClassMap(), null) def reader = new BsonDocumentReader(parse('{array : [[1, 2], [3, 4, 5]]}')) when: @@ -100,10 +111,39 @@ class IterableCodecSpecification extends Specification { iterable == ['1', '2', '3'] } - def 'should decode binary subtypes for UUID'() { + @SuppressWarnings(['LineLength']) + @Unroll + def 'should decode binary subtype 3 for UUID'() { + given: + def reader = new BsonDocumentReader(parse(document)) + def codec = new IterableCodec(fromCodecs(new UuidCodec(representation), new BinaryCodec()), new BsonTypeClassMap(), null) + .withUuidRepresentation(representation) + + when: + reader.readStartDocument() + reader.readName('array') + def iterable = codec.decode(reader, DecoderContext.builder().build()) + reader.readEndDocument() + + then: + value == iterable + + where: + representation | value | document + JAVA_LEGACY | [UUID.fromString('08070605-0403-0201-100f-0e0d0c0b0a09')] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "3" }]}' + C_SHARP_LEGACY | [UUID.fromString('04030201-0605-0807-090a-0b0c0d0e0f10')] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "3" }]}' + PYTHON_LEGACY | [UUID.fromString('01020304-0506-0708-090a-0b0c0d0e0f10')] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "3" }]}' + STANDARD | [new Binary((byte) 3, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[])] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "3" }]}' + UNSPECIFIED | [new Binary((byte) 3, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[])] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "3" }]}' + } + + @SuppressWarnings(['LineLength']) + @Unroll + def 'should decode binary subtype 4 for UUID'() { given: - def codec = new IterableCodec(REGISTRY, new BsonTypeClassMap(), null) def reader = new BsonDocumentReader(parse(document)) + def codec = new IterableCodec(fromCodecs(new UuidCodec(representation), new BinaryCodec()), new BsonTypeClassMap(), null) + .withUuidRepresentation(representation) when: reader.readStartDocument() @@ -112,14 +152,23 @@ class IterableCodecSpecification extends Specification { reader.readEndDocument() then: - iterable == value + value == iterable where: - document | value - '{"array": [{ "$binary" : "c3QL", "$type" : "3" }]}' | [new Binary((byte) 0x03, (byte[]) [115, 116, 11])] - '{"array": [{ "$binary" : "c3QL", "$type" : "4" }]}' | [new Binary((byte) 0x04, (byte[]) [115, 116, 11])] - '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "3" }]}' | [UUID.fromString('08070605-0403-0201-100f-0e0d0c0b0a09')] - '{"array": [{ "$binary" : "CAcGBQQDAgEQDw4NDAsKCQ==", "$type" : "3" }]}' | [UUID.fromString('01020304-0506-0708-090a-0b0c0d0e0f10')] + representation | value | document + STANDARD | [UUID.fromString('01020304-0506-0708-090a-0b0c0d0e0f10')] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "4" }]}' + JAVA_LEGACY | [UUID.fromString('01020304-0506-0708-090a-0b0c0d0e0f10')] | '{"array": [{ "$binary" : "CAcGBQQDAgEQDw4NDAsKCQ==", "$type" : "3" }]}' + C_SHARP_LEGACY | [new Binary((byte) 4, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[])] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "4" }]}' + PYTHON_LEGACY | [new Binary((byte) 4, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[])] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "4" }]}' + UNSPECIFIED | [new Binary((byte) 4, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[])] | '{"array": [{ "$binary" : "AQIDBAUGBwgJCgsMDQ4PEA==", "$type" : "4" }]}' } + @SuppressWarnings('unused') + static class Container { + private final List>> instants = [] + + List>> getInstants() { + instants + } + } } diff --git a/bson/src/test/unit/org/bson/codecs/JsonObjectCodecProviderTest.java b/bson/src/test/unit/org/bson/codecs/JsonObjectCodecProviderTest.java new file mode 100644 index 00000000000..f3af17ceefb --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/JsonObjectCodecProviderTest.java @@ -0,0 +1,37 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.codecs.configuration.CodecProvider; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.json.JsonObject; +import org.junit.jupiter.api.Test; + +import static org.bson.codecs.configuration.CodecRegistries.fromProviders; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; + +public class JsonObjectCodecProviderTest { + + @Test + public void testJsonObjectCodecProvider() { + CodecProvider provider = new JsonObjectCodecProvider(); + CodecRegistry registry = fromProviders(provider); + assertEquals(provider.get(JsonObject.class, registry).getClass(), JsonObjectCodec.class); + assertNull(provider.get(Integer.class, registry)); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/JsonObjectCodecSpecification.groovy b/bson/src/test/unit/org/bson/codecs/JsonObjectCodecSpecification.groovy new file mode 100644 index 00000000000..3c4a9c79723 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/JsonObjectCodecSpecification.groovy @@ -0,0 +1,73 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs + +import org.bson.BsonDocument +import org.bson.BsonDocumentReader +import org.bson.BsonDocumentWriter +import org.bson.json.JsonMode +import org.bson.json.JsonObject +import org.bson.json.JsonWriterSettings +import spock.lang.Specification + +import static org.bson.BsonDocument.parse + +class JsonObjectCodecSpecification extends Specification { + def 'should have JsonObject encoding class'() { + given: + def codec = new JsonObjectCodec() + + expect: + codec.getEncoderClass() == JsonObject + } + + def 'should encode JsonObject correctly'() { + given: + def codec = new JsonObjectCodec() + def writer = new BsonDocumentWriter(new BsonDocument()) + + when: + codec.encode(writer, new JsonObject('{hello: {world: 1}}'), EncoderContext.builder().build()) + + then: + writer.document == parse('{hello: {world: 1}}') + } + + def 'should decode JsonObject correctly'() { + given: + def codec = new JsonObjectCodec() + def reader = new BsonDocumentReader(parse('{hello: {world: 1}}')) + + when: + def jsonObject = codec.decode(reader, DecoderContext.builder().build()) + + then: + jsonObject.getJson() == '{"hello": {"world": 1}}' + } + + def 'should use JsonWriterSettings'() { + given: + def codec = new JsonObjectCodec(JsonWriterSettings.builder().outputMode(JsonMode.EXTENDED).build()) + def reader = new BsonDocumentReader(parse('{hello: 1}')) + + when: + def jsonObject = codec.decode(reader, DecoderContext.builder().build()) + + then: + jsonObject.getJson() == '{"hello": {"$numberInt": "1"}}' + } +} diff --git a/bson/src/test/unit/org/bson/codecs/LongCodecTest.java b/bson/src/test/unit/org/bson/codecs/LongCodecTest.java index 2ac54222ee6..2005718a05d 100644 --- a/bson/src/test/unit/org/bson/codecs/LongCodecTest.java +++ b/bson/src/test/unit/org/bson/codecs/LongCodecTest.java @@ -18,7 +18,10 @@ import org.bson.BsonInvalidOperationException; import org.bson.Document; -import org.junit.Test; +import org.bson.types.Decimal128; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertThrows; public final class LongCodecTest extends CodecTestCase { @@ -34,16 +37,22 @@ public void shouldHandleAlternativeNumberValues() { roundTrip(new Document("a", 10), expected); roundTrip(new Document("a", 10.00), expected); roundTrip(new Document("a", 9.9999999999999992), expected); + roundTrip(new Document("a", Decimal128.parse("10")), expected); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowWhenHandlingLossyValues() { - roundTrip(new Document("a", Double.MAX_VALUE)); + assertThrows(BsonInvalidOperationException.class, () ->roundTrip(new Document("a", Double.MAX_VALUE))); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowWhenHandlingLossyDoubleValues() { - roundTrip(new Document("a", 9.9999999999999991)); + assertThrows(BsonInvalidOperationException.class, () ->roundTrip(new Document("a", 9.9999999999999991))); + } + + @Test + public void shouldThrowWhenHandlingLossyDecimal128Values() { + assertThrows(BsonInvalidOperationException.class, () ->roundTrip(new Document("a", Decimal128.parse("10.0")))); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/MapCodecProviderTest.java b/bson/src/test/unit/org/bson/codecs/MapCodecProviderTest.java new file mode 100644 index 00000000000..6437334675a --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/MapCodecProviderTest.java @@ -0,0 +1,53 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.conversions.Bson; +import org.junit.jupiter.api.Test; + +import java.util.Map; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +final class MapCodecProviderTest { + @Test + void shouldReturnNullForNonMap() { + MapCodecProvider provider = new MapCodecProvider(); + assertNull(provider.get(String.class, Bson.DEFAULT_CODEC_REGISTRY)); + } + + @Test + void shouldReturnMapCodecForMap() { + MapCodecProvider provider = new MapCodecProvider(); + @SuppressWarnings({"rawtypes", "unchecked"}) + Codec> codec = (Codec>) (Codec) provider.get(Map.class, Bson.DEFAULT_CODEC_REGISTRY); + assertTrue(codec instanceof MapCodec); + MapCodec> recordCodec = (MapCodec>) codec; + assertEquals(Map.class, recordCodec.getEncoderClass()); + } + + @Test + public void shouldReturnMapCodecForMapUsingDefaultRegistry() { + @SuppressWarnings({"rawtypes", "unchecked"}) + Codec> codec = (Codec>) (Codec) Bson.DEFAULT_CODEC_REGISTRY.get(Map.class); + assertTrue(codec instanceof MapCodec); + MapCodec> recordCodec = (MapCodec>) codec; + assertEquals(Map.class, recordCodec.getEncoderClass()); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/MapCodecSpecification.groovy b/bson/src/test/unit/org/bson/codecs/MapCodecSpecification.groovy index 3bc478ff988..ffe66e32d10 100644 --- a/bson/src/test/unit/org/bson/codecs/MapCodecSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/MapCodecSpecification.groovy @@ -16,8 +16,10 @@ package org.bson.codecs +import org.bson.BsonArray import org.bson.BsonBinaryReader import org.bson.BsonBinaryWriter +import org.bson.BsonDateTime import org.bson.BsonDbPointer import org.bson.BsonDocument import org.bson.BsonDocumentReader @@ -30,6 +32,7 @@ import org.bson.BsonUndefined import org.bson.BsonWriter import org.bson.ByteBufNIO import org.bson.Document +import org.bson.codecs.jsr310.Jsr310CodecProvider import org.bson.io.BasicOutputBuffer import org.bson.io.ByteBufferBsonInput import org.bson.json.JsonReader @@ -42,16 +45,31 @@ import org.bson.types.ObjectId import org.bson.types.Symbol import spock.lang.Shared import spock.lang.Specification +import spock.lang.Unroll +import java.lang.reflect.ParameterizedType import java.nio.ByteBuffer +import java.time.Instant import java.util.concurrent.atomic.AtomicBoolean import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.atomic.AtomicLong import static java.util.Arrays.asList +import static org.bson.UuidRepresentation.C_SHARP_LEGACY +import static org.bson.UuidRepresentation.JAVA_LEGACY +import static org.bson.UuidRepresentation.PYTHON_LEGACY +import static org.bson.UuidRepresentation.STANDARD +import static org.bson.UuidRepresentation.UNSPECIFIED +import static org.bson.codecs.configuration.CodecRegistries.fromCodecs import static org.bson.codecs.configuration.CodecRegistries.fromProviders +import static org.bson.codecs.configuration.CodecRegistries.fromRegistries class MapCodecSpecification extends Specification { + + static final REGISTRY = fromRegistries(fromCodecs(new UuidCodec(JAVA_LEGACY)), + fromProviders(asList(new ValueCodecProvider(), new BsonValueCodecProvider(), + new DocumentCodecProvider(), new CollectionCodecProvider(), new MapCodecProvider()))) + @Shared BsonDocument bsonDoc = new BsonDocument() @Shared @@ -81,7 +99,6 @@ class MapCodecSpecification extends Specification { put('undefined', new BsonUndefined()) put('binary', new Binary((byte) 0x80, [5, 4, 3, 2, 1] as byte[])) put('array', asList(1, 1L, true, [1, 2, 3], new Document('a', 1), null)) - put('uuid', new UUID(1L, 2L)) put('document', new Document('a', 2)) put('map', [a:1, b:2]) put('atomicLong', new AtomicLong(1)) @@ -90,18 +107,18 @@ class MapCodecSpecification extends Specification { } when: - new MapCodec().encode(writer, originalDocument, EncoderContext.builder().build()) + new MapCodec(REGISTRY, new BsonTypeClassMap(), null, Map).encode(writer, originalDocument, EncoderContext.builder().build()) BsonReader reader if (writer instanceof BsonDocumentWriter) { reader = new BsonDocumentReader(bsonDoc) } else if (writer instanceof BsonBinaryWriter) { - BasicOutputBuffer buffer = (BasicOutputBuffer)writer.getBsonOutput(); + BasicOutputBuffer buffer = (BasicOutputBuffer)writer.getBsonOutput() reader = new BsonBinaryReader(new ByteBufferBsonInput(new ByteBufNIO( ByteBuffer.wrap(buffer.toByteArray())))) } else { reader = new JsonReader(stringWriter.toString()) } - def decodedDoc = new MapCodec().decode(reader, DecoderContext.builder().build()) + def decodedDoc = new MapCodec(REGISTRY, new BsonTypeClassMap(), null, Map).decode(reader, DecoderContext.builder().build()) then: decodedDoc.get('null') == originalDocument.get('null') @@ -123,7 +140,6 @@ class MapCodecSpecification extends Specification { decodedDoc.get('timestamp') == originalDocument.get('timestamp') decodedDoc.get('undefined') == originalDocument.get('undefined') decodedDoc.get('binary') == originalDocument.get('binary') - decodedDoc.get('uuid') == originalDocument.get('uuid') decodedDoc.get('array') == originalDocument.get('array') decodedDoc.get('document') == originalDocument.get('document') decodedDoc.get('map') == originalDocument.get('map') @@ -138,34 +154,143 @@ class MapCodecSpecification extends Specification { ] } - @SuppressWarnings(['LineLength']) - def 'should decode binary subtypes for UUID'() { + def 'should decode binary subtypes for UUID that are not 16 bytes into Binary'() { given: def reader = new BsonBinaryReader(ByteBuffer.wrap(bytes as byte[])) when: - def document = new MapCodec().decode(reader, DecoderContext.builder().build()) + def document = new DocumentCodec().decode(reader, DecoderContext.builder().build()) then: value == document.get('f') where: - value | bytes - new Binary((byte) 0x03, (byte[]) [115, 116, 11]) | [16, 0, 0, 0, 5, 102, 0, 3, 0, 0, 0, 3, 115, 116, 11, 0] - new Binary((byte) 0x04, (byte[]) [115, 116, 11]) | [16, 0, 0, 0, 5, 102, 0, 3, 0, 0, 0, 4, 115, 116, 11, 0] - UUID.fromString('08070605-0403-0201-100f-0e0d0c0b0a09') | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 3, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] - UUID.fromString('01020304-0506-0708-090a-0b0c0d0e0f10') | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 4, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + value | bytes + new Binary((byte) 0x03, (byte[]) [115, 116, 11]) | [16, 0, 0, 0, 5, 102, 0, 3, 0, 0, 0, 3, 115, 116, 11, 0] + new Binary((byte) 0x04, (byte[]) [115, 116, 11]) | [16, 0, 0, 0, 5, 102, 0, 3, 0, 0, 0, 4, 115, 116, 11, 0] + } + + @SuppressWarnings(['LineLength']) + @Unroll + def 'should decode binary subtype 3 for UUID'() { + given: + def reader = new BsonBinaryReader(ByteBuffer.wrap(bytes as byte[])) + + when: + def map = new MapCodec(fromCodecs(new UuidCodec(representation), new BinaryCodec()), new BsonTypeClassMap(), null, Map) + .withUuidRepresentation(representation) + .decode(reader, DecoderContext.builder().build()) + + then: + value == map.get('f') + + where: + representation | value | bytes + JAVA_LEGACY | UUID.fromString('08070605-0403-0201-100f-0e0d0c0b0a09') | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 3, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + C_SHARP_LEGACY | UUID.fromString('04030201-0605-0807-090a-0b0c0d0e0f10') | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 3, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + PYTHON_LEGACY | UUID.fromString('01020304-0506-0708-090a-0b0c0d0e0f10') | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 3, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + STANDARD | new Binary((byte) 3, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[]) | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 3, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + UNSPECIFIED | new Binary((byte) 3, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[]) | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 3, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] } + @SuppressWarnings(['LineLength']) + @Unroll + def 'should decode binary subtype 4 for UUID'() { + given: + def reader = new BsonBinaryReader(ByteBuffer.wrap(bytes as byte[])) + + when: + def map = new MapCodec(fromCodecs(new UuidCodec(representation), new BinaryCodec()), new BsonTypeClassMap(), null, Map) + .withUuidRepresentation(representation) + .decode(reader, DecoderContext.builder().build()) + + then: + value == map.get('f') + + where: + representation | value | bytes + STANDARD | UUID.fromString('01020304-0506-0708-090a-0b0c0d0e0f10') | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 4, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + JAVA_LEGACY | new Binary((byte) 4, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[]) | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 4, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + C_SHARP_LEGACY | new Binary((byte) 4, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[]) | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 4, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + PYTHON_LEGACY | new Binary((byte) 4, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[]) | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 4, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + UNSPECIFIED | new Binary((byte) 4, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[]) | [29, 0, 0, 0, 5, 102, 0, 16, 0, 0, 0, 4, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0] + } + + def 'should apply transformer to decoded values'() { given: def codec = new MapCodec(fromProviders([new ValueCodecProvider(), new DocumentCodecProvider(), new BsonValueCodecProvider()]), new BsonTypeClassMap(), - { Object value -> 5 }) + { Object value -> 5 }, Map) when: def doc = codec.decode(new BsonDocumentReader(new BsonDocument('_id', new BsonInt32(1))), DecoderContext.builder().build()) then: doc['_id'] == 5 } + + def 'should decode to specified generic class'() { + given: + def doc = new BsonDocument('_id', new BsonInt32(1)) + + when: + def codec = new MapCodec(fromProviders([new ValueCodecProvider()]), new BsonTypeClassMap(), null, mapType) + def map = codec.decode(new BsonDocumentReader(doc), DecoderContext.builder().build()) + + then: + codec.getEncoderClass() == mapType + map.getClass() == actualType + + where: + mapType | actualType + Map | HashMap + NavigableMap | TreeMap + AbstractMap | HashMap + HashMap | HashMap + TreeMap | TreeMap + WeakHashMap | WeakHashMap + } + + + def 'should parameterize'() { + given: + def codec = fromProviders(new Jsr310CodecProvider(), REGISTRY).get( + Map, + asList(((ParameterizedType) Container.getMethod('getInstants').genericReturnType).actualTypeArguments)) + + def writer = new BsonDocumentWriter(new BsonDocument()) + def reader = new BsonDocumentReader(writer.getDocument()) + def instants = + ['firstMap': [Instant.ofEpochMilli(1), Instant.ofEpochMilli(2)], + 'secondMap': [Instant.ofEpochMilli(3), Instant.ofEpochMilli(4)]] + when: + writer.writeStartDocument() + writer.writeName('instants') + codec.encode(writer, instants, EncoderContext.builder().build()) + writer.writeEndDocument() + + then: + writer.getDocument() == new BsonDocument() + .append('instants', + new BsonDocument() + .append('firstMap', new BsonArray([new BsonDateTime(1), new BsonDateTime(2)])) + .append('secondMap', new BsonArray([new BsonDateTime(3), new BsonDateTime(4)]))) + + when: + reader.readStartDocument() + reader.readName('instants') + def decodedInstants = codec.decode(reader, DecoderContext.builder().build()) + + then: + decodedInstants == instants + } + + @SuppressWarnings('unused') + static class Container { + private final Map> instants = [:] + + Map> getInstants() { + instants + } + } } diff --git a/bson/src/test/unit/org/bson/codecs/OverridableUuidRepresentationUuidCodecSpecification.groovy b/bson/src/test/unit/org/bson/codecs/OverridableUuidRepresentationUuidCodecSpecification.groovy new file mode 100644 index 00000000000..4f52409c8d7 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/OverridableUuidRepresentationUuidCodecSpecification.groovy @@ -0,0 +1,39 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs + +import org.bson.UuidRepresentation +import spock.lang.Specification + +class OverridableUuidRepresentationUuidCodecSpecification extends Specification{ + + def 'should change uuid representation'() { + when: + def codec = new OverridableUuidRepresentationUuidCodec() + + then: + codec.getUuidRepresentation() == UuidRepresentation.UNSPECIFIED + + when: + def newCodec = codec.withUuidRepresentation(UuidRepresentation.STANDARD) + + then: + newCodec instanceof OverridableUuidRepresentationCodec + (newCodec as OverridableUuidRepresentationCodec).getUuidRepresentation() == UuidRepresentation.STANDARD + } + +} diff --git a/bson/src/test/unit/org/bson/codecs/RawBsonDocumentCodecSpecification.groovy b/bson/src/test/unit/org/bson/codecs/RawBsonDocumentCodecSpecification.groovy index eb3f1e7a9c3..ee6dd3125af 100644 --- a/bson/src/test/unit/org/bson/codecs/RawBsonDocumentCodecSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/RawBsonDocumentCodecSpecification.groovy @@ -32,7 +32,7 @@ class RawBsonDocumentCodecSpecification extends Specification { def codec = new RawBsonDocumentCodec() def document = new BsonDocument([new BsonElement('b1', BsonBoolean.TRUE), new BsonElement('b2', BsonBoolean.FALSE)]) - def documentBytes = [15, 0, 0, 0, 8, 98, 49, 0, 1, 8, 98, 50, 0, 0, 0] as byte[]; + def documentBytes = [15, 0, 0, 0, 8, 98, 49, 0, 1, 8, 98, 50, 0, 0, 0] as byte[] def 'should get encoder class'() { expect: diff --git a/bson/src/test/unit/org/bson/codecs/ShortCodecTest.java b/bson/src/test/unit/org/bson/codecs/ShortCodecTest.java index 3712f35176b..6bfb41fbb1a 100644 --- a/bson/src/test/unit/org/bson/codecs/ShortCodecTest.java +++ b/bson/src/test/unit/org/bson/codecs/ShortCodecTest.java @@ -18,7 +18,9 @@ import org.bson.BsonInvalidOperationException; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertThrows; public final class ShortCodecTest extends CodecTestCase { @@ -37,14 +39,14 @@ public void shouldHandleAlternativeNumberValues() { roundTrip(new Document("a", 9.9999999999999992), expected); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldErrorDecodingOutsideMinRange() { - roundTrip(new Document("a", Integer.MIN_VALUE)); + assertThrows(BsonInvalidOperationException.class, () -> roundTrip(new Document("a", Integer.MIN_VALUE))); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldErrorDecodingOutsideMaxRange() { - roundTrip(new Document("a", Integer.MAX_VALUE)); + assertThrows(BsonInvalidOperationException.class, () -> roundTrip(new Document("a", Integer.MAX_VALUE))); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleEnum.java b/bson/src/test/unit/org/bson/codecs/SimpleEnum.java similarity index 94% rename from bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleEnum.java rename to bson/src/test/unit/org/bson/codecs/SimpleEnum.java index a7830c1991a..bd0ff19188a 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleEnum.java +++ b/bson/src/test/unit/org/bson/codecs/SimpleEnum.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.bson.codecs.pojo.entities; +package org.bson.codecs; public enum SimpleEnum { ALPHA, diff --git a/bson/src/test/unit/org/bson/codecs/StringCodecTest.java b/bson/src/test/unit/org/bson/codecs/StringCodecTest.java new file mode 100644 index 00000000000..2c9ae408c11 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/StringCodecTest.java @@ -0,0 +1,125 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs; + +import org.bson.BsonInvalidOperationException; +import org.bson.BsonReader; +import org.bson.BsonType; +import org.bson.BsonWriter; +import org.bson.codecs.configuration.CodecConfigurationException; +import org.bson.json.JsonReader; +import org.bson.json.JsonWriter; +import org.junit.jupiter.api.Test; + +import java.io.StringWriter; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +public class StringCodecTest { + + private final DecoderContext decoderContext = DecoderContext.builder().build(); + private final EncoderContext encoderContext = EncoderContext.builder().build(); + private final Codec parent = new StringCodec(); + @SuppressWarnings("unchecked") + private final Codec child = ((RepresentationConfigurable) parent).withRepresentation(BsonType.OBJECT_ID); + + @Test + public void testSettingRepresentation() { + assertEquals(((RepresentationConfigurable) parent).getRepresentation(), BsonType.STRING); + assertEquals(((RepresentationConfigurable) child).getRepresentation(), BsonType.OBJECT_ID); + } + + @Test + public void testStringRepresentation() { + @SuppressWarnings("unchecked") + Codec child = ((RepresentationConfigurable) parent).withRepresentation(BsonType.STRING); + assertEquals(((RepresentationConfigurable) child).getRepresentation(), BsonType.STRING); + } + + @Test + public void testInvalidRepresentation() { + assertThrows(CodecConfigurationException.class, () -> ((RepresentationConfigurable) parent).withRepresentation(BsonType.INT32)); + } + + + @Test + public void testDecodeOnObjectIdWithObjectIdRep() { + BsonReader reader = new JsonReader("{'_id': ObjectId('5f5a6cc03237b5e06d6b887b'), 'name': 'Brian'}"); + reader.readStartDocument(); + reader.readName(); + String stringId = child.decode(reader, decoderContext); + + assertEquals(stringId, "5f5a6cc03237b5e06d6b887b"); + } + + @Test + public void testDecodeOnObjectIdWithStringRep() { + assertThrows(BsonInvalidOperationException.class, () -> { + BsonReader reader = new JsonReader("{'_id': ObjectId('5f5a6cc03237b5e06d6b887b'), 'name': 'Brian'}"); + reader.readStartDocument(); + reader.readName(); + parent.decode(reader, decoderContext); + }); + } + + @Test + public void testDecodeOnStringWithObjectIdRep() { + assertThrows(BsonInvalidOperationException.class, () -> { + BsonReader reader = new JsonReader("{'name': 'Brian'"); + reader.readStartDocument(); + reader.readName(); + child.decode(reader, decoderContext); + }); + } + + @Test + public void testDecodeOnStringWithStringRep() { + BsonReader reader = new JsonReader("{'name': 'Brian'"); + reader.readStartDocument(); + reader.readName(); + assertEquals(parent.decode(reader, decoderContext), "Brian"); + } + + @Test + public void testEncodeWithObjectIdRep() { + StringWriter writer = new StringWriter(); + BsonWriter jsonWriter = new JsonWriter(writer); + jsonWriter.writeStartDocument(); + jsonWriter.writeName("_id"); + + child.encode(jsonWriter, "5f5a6cc03237b5e06d6b887b", encoderContext); + + jsonWriter.writeEndDocument(); + + assertEquals(writer.toString(), "{\"_id\": {\"$oid\": \"5f5a6cc03237b5e06d6b887b\"}}"); + } + + @Test + public void testEncodeWithStringRep() { + StringWriter writer = new StringWriter(); + BsonWriter jsonWriter = new JsonWriter(writer); + jsonWriter.writeStartDocument(); + jsonWriter.writeName("_id"); + + parent.encode(jsonWriter, "5f5a6cc03237b5e06d6b887b", EncoderContext.builder().build()); + + jsonWriter.writeEndDocument(); + + assertEquals(writer.toString(), "{\"_id\": \"5f5a6cc03237b5e06d6b887b\"}"); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/UndefinedCodecSpecification.groovy b/bson/src/test/unit/org/bson/codecs/UndefinedCodecSpecification.groovy index a6074f88731..ac95db63efe 100644 --- a/bson/src/test/unit/org/bson/codecs/UndefinedCodecSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/UndefinedCodecSpecification.groovy @@ -24,7 +24,7 @@ import spock.lang.Subject class UndefinedCodecSpecification extends Specification { @Subject - BsonUndefinedCodec codec = new BsonUndefinedCodec(); + BsonUndefinedCodec codec = new BsonUndefinedCodec() def 'should return Undefined class'() { expect: diff --git a/bson/src/test/unit/org/bson/codecs/UuidCodecSpecification.groovy b/bson/src/test/unit/org/bson/codecs/UuidCodecSpecification.groovy index 1028050a86e..8bafd639882 100644 --- a/bson/src/test/unit/org/bson/codecs/UuidCodecSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/UuidCodecSpecification.groovy @@ -18,8 +18,11 @@ package org.bson.codecs import org.bson.BsonBinaryReader import org.bson.BsonBinaryWriter +import org.bson.BsonDocument +import org.bson.BsonDocumentWriter import org.bson.ByteBufNIO import org.bson.UuidRepresentation +import org.bson.codecs.configuration.CodecConfigurationException import org.bson.io.BasicOutputBuffer import org.bson.io.ByteBufferBsonInput import spock.lang.Shared @@ -32,12 +35,17 @@ import java.nio.ByteBuffer */ class UuidCodecSpecification extends Specification { - @Shared private UuidCodec uuidCodec; - @Shared private BasicOutputBuffer outputBuffer; + @Shared private UuidCodec uuidCodec + @Shared private BasicOutputBuffer outputBuffer def setup() { - uuidCodec = new UuidCodec(); - outputBuffer = new BasicOutputBuffer(); + uuidCodec = new UuidCodec() + outputBuffer = new BasicOutputBuffer() + } + + def 'should default to unspecified representation'() { + expect: + new UuidCodec().getUuidRepresentation() == UuidRepresentation.UNSPECIFIED } def 'should decode different types of UUID'(UuidCodec codec, byte[] list) throws IOException { @@ -62,7 +70,7 @@ class UuidCodecSpecification extends Specification { where: codec << [ - new UuidCodec(), + new UuidCodec(UuidRepresentation.JAVA_LEGACY), new UuidCodec(UuidRepresentation.STANDARD), new UuidCodec(UuidRepresentation.PYTHON_LEGACY), new UuidCodec(UuidRepresentation.C_SHARP_LEGACY), @@ -136,7 +144,7 @@ class UuidCodecSpecification extends Specification { bsonSubType << [3, 4, 3, 3] codec << [ - new UuidCodec(), + new UuidCodec(UuidRepresentation.JAVA_LEGACY), new UuidCodec(UuidRepresentation.STANDARD), new UuidCodec(UuidRepresentation.PYTHON_LEGACY), new UuidCodec(UuidRepresentation.C_SHARP_LEGACY), @@ -149,4 +157,15 @@ class UuidCodecSpecification extends Specification { UUID.fromString('04030201-0605-0807-090a-0b0c0d0e0f10') // simulated C# UUID ] } + + def 'should throw if representation is unspecified'() { + given: + def codec = new UuidCodec(UuidRepresentation.UNSPECIFIED) + + when: + codec.encode(new BsonDocumentWriter(new BsonDocument()), UUID.randomUUID(), EncoderContext.builder().build()) + + then: + thrown(CodecConfigurationException) + } } diff --git a/bson/src/test/unit/org/bson/codecs/ValueCodecProviderSpecification.groovy b/bson/src/test/unit/org/bson/codecs/ValueCodecProviderSpecification.groovy index b4eb7a131fb..1fd738b5c5c 100644 --- a/bson/src/test/unit/org/bson/codecs/ValueCodecProviderSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/ValueCodecProviderSpecification.groovy @@ -17,6 +17,10 @@ package org.bson.codecs import org.bson.Document +import org.bson.Float32BinaryVector +import org.bson.Int8BinaryVector +import org.bson.PackedBitBinaryVector +import org.bson.BinaryVector import org.bson.codecs.configuration.CodecRegistries import org.bson.types.Binary import org.bson.types.Code @@ -32,6 +36,8 @@ import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.atomic.AtomicLong import java.util.regex.Pattern +//Codenarc +@SuppressWarnings("VectorIsObsolete") class ValueCodecProviderSpecification extends Specification { private final provider = new ValueCodecProvider() private final registry = CodecRegistries.fromProviders(provider) @@ -56,6 +62,10 @@ class ValueCodecProviderSpecification extends Specification { provider.get(Short, registry) instanceof ShortCodec provider.get(byte[], registry) instanceof ByteArrayCodec provider.get(Float, registry) instanceof FloatCodec + provider.get(BinaryVector, registry) instanceof BinaryVectorCodec + provider.get(Float32BinaryVector, registry) instanceof Float32BinaryVectorCodec + provider.get(Int8BinaryVector, registry) instanceof Int8VectorCodec + provider.get(PackedBitBinaryVector, registry) instanceof PackedBitBinaryVectorCodec provider.get(Binary, registry) instanceof BinaryCodec provider.get(MinKey, registry) instanceof MinKeyCodec @@ -63,7 +73,7 @@ class ValueCodecProviderSpecification extends Specification { provider.get(Code, registry) instanceof CodeCodec provider.get(ObjectId, registry) instanceof ObjectIdCodec provider.get(Symbol, registry) instanceof SymbolCodec - provider.get(UUID, registry) instanceof UuidCodec + provider.get(UUID, registry) instanceof OverridableUuidRepresentationCodec provider.get(Document, registry) == null } diff --git a/bson/src/test/unit/org/bson/codecs/configuration/CodeRegistriesSpecification.groovy b/bson/src/test/unit/org/bson/codecs/configuration/CodeRegistriesSpecification.groovy index 26bc027b81a..9cae58f7468 100644 --- a/bson/src/test/unit/org/bson/codecs/configuration/CodeRegistriesSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/configuration/CodeRegistriesSpecification.groovy @@ -16,18 +16,36 @@ package org.bson.codecs.configuration +import org.bson.BsonArray +import org.bson.BsonDateTime +import org.bson.BsonDocument +import org.bson.BsonDocumentReader +import org.bson.BsonDocumentWriter import org.bson.BsonInt32 import org.bson.codecs.BsonInt32Codec import org.bson.codecs.BsonValueCodecProvider +import org.bson.codecs.CollectionCodecProvider +import org.bson.codecs.DecoderContext +import org.bson.codecs.EncoderContext import org.bson.codecs.IntegerCodec import org.bson.codecs.LongCodec +import org.bson.codecs.MapCodecProvider import org.bson.codecs.UuidCodec import org.bson.codecs.ValueCodecProvider +import org.bson.codecs.jsr310.Jsr310CodecProvider +import org.bson.internal.ProvidersCodecRegistry import spock.lang.Specification +import java.lang.reflect.ParameterizedType +import java.time.Instant + import static CodecRegistries.fromCodecs import static CodecRegistries.fromProviders import static CodecRegistries.fromRegistries +import static java.util.Arrays.asList +import static org.bson.UuidRepresentation.STANDARD +import static org.bson.UuidRepresentation.UNSPECIFIED +import static org.bson.codecs.configuration.CodecRegistries.withUuidRepresentation class CodeRegistriesSpecification extends Specification { def 'fromCodec should return a SingleCodecRegistry'() { @@ -69,4 +87,66 @@ class CodeRegistriesSpecification extends Specification { registry.get(UUID).is(uuidCodec) registry.get(Integer) instanceof IntegerCodec } + + def 'withUuidRepresentation should apply uuid representation'() { + given: + def registry = fromProviders(new ValueCodecProvider()) + def registryWithStandard = withUuidRepresentation(registry, STANDARD) + + when: + def uuidCodec = registry.get(UUID) as UuidCodec + + then: + uuidCodec.getUuidRepresentation() == UNSPECIFIED + + when: + uuidCodec = registryWithStandard.get(UUID) as UuidCodec + + then: + uuidCodec.getUuidRepresentation() == STANDARD + } + + def 'withUuidRepresentation should not break parameterization'() { + given: + def registry = fromProviders( + new Jsr310CodecProvider(), + new ValueCodecProvider(), + withUuidRepresentation(fromProviders(new CollectionCodecProvider()), STANDARD), + withUuidRepresentation(fromProviders(new MapCodecProvider()), STANDARD) + ) + def codec = registry.get(Collection, asList( + ((ParameterizedType) CodeRegistriesSpecification.getMethod('parameterizedTypeProvider').genericReturnType) + .actualTypeArguments)) + def writer = new BsonDocumentWriter(new BsonDocument()) + def reader = new BsonDocumentReader(writer.getDocument()) + def value = [ + ['firstMap': [Instant.ofEpochMilli(1), Instant.ofEpochMilli(2)]], + ['secondMap': [Instant.ofEpochMilli(3), Instant.ofEpochMilli(4)]]] + when: + writer.writeStartDocument() + writer.writeName('value') + codec.encode(writer, value, EncoderContext.builder().build()) + writer.writeEndDocument() + + then: + writer.getDocument() == new BsonDocument() + .append('value', new BsonArray( + [ + new BsonDocument('firstMap', new BsonArray([new BsonDateTime(1), new BsonDateTime(2)])), + new BsonDocument('secondMap', new BsonArray([new BsonDateTime(3), new BsonDateTime(4)])) + ])) + + when: + reader.readStartDocument() + reader.readName('value') + def decodedValue = codec.decode(reader, DecoderContext.builder().build()) + + then: + decodedValue == value + } + + @SuppressWarnings('unused') + List>> parameterizedTypeProvider() { + [] + } } diff --git a/bson/src/test/unit/org/bson/codecs/configuration/CodecCacheSpecification.groovy b/bson/src/test/unit/org/bson/codecs/configuration/CodecCacheSpecification.groovy deleted file mode 100644 index b9e37e6885b..00000000000 --- a/bson/src/test/unit/org/bson/codecs/configuration/CodecCacheSpecification.groovy +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.codecs.configuration - -import org.bson.codecs.MinKeyCodec -import org.bson.types.MinKey -import spock.lang.Specification - -class CodecCacheSpecification extends Specification { - - def 'should return the cached codec if a codec for the class exists'() { - when: - def codec = new MinKeyCodec() - def cache = new CodecCache() - cache.put(MinKey, codec) - - then: - cache.getOrThrow(MinKey).is(codec) - } - - def 'should throw if codec for class does not exist'() { - when: - def cache = new CodecCache() - cache.getOrThrow(MinKey) - - then: - thrown(CodecConfigurationException) - } -} diff --git a/bson/src/test/unit/org/bson/codecs/jsr310/InstantCodecSpecification.groovy b/bson/src/test/unit/org/bson/codecs/jsr310/InstantCodecSpecification.groovy index 3ece437ce0a..bdf6aa3f3a6 100644 --- a/bson/src/test/unit/org/bson/codecs/jsr310/InstantCodecSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/jsr310/InstantCodecSpecification.groovy @@ -19,13 +19,11 @@ package org.bson.codecs.jsr310 import org.bson.BsonDocument import org.bson.codecs.Codec import org.bson.codecs.configuration.CodecConfigurationException -import spock.lang.IgnoreIf import java.time.Instant import java.time.LocalDateTime import java.time.ZoneOffset -@IgnoreIf({ javaVersion < 1.8 }) class InstantCodecSpecification extends JsrSpecification { def 'should round trip Instant successfully'() { diff --git a/bson/src/test/unit/org/bson/codecs/jsr310/Jsr310CodecProviderSpecification.groovy b/bson/src/test/unit/org/bson/codecs/jsr310/Jsr310CodecProviderSpecification.groovy index bf8999cf535..e4f7a31281d 100644 --- a/bson/src/test/unit/org/bson/codecs/jsr310/Jsr310CodecProviderSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/jsr310/Jsr310CodecProviderSpecification.groovy @@ -17,12 +17,10 @@ package org.bson.codecs.jsr310 import org.bson.codecs.configuration.CodecRegistry -import spock.lang.IgnoreIf import spock.lang.Specification class Jsr310CodecProviderSpecification extends Specification { - @IgnoreIf({ javaVersion < 1.8 }) def 'should provide a codec for all JSR-310 classes'() { given: def codecRegistry = Stub(CodecRegistry) @@ -39,14 +37,4 @@ class Jsr310CodecProviderSpecification extends Specification { java.time.LocalTime, ] } - - @IgnoreIf({ javaVersion > 1.7 }) - def 'should not error when used on pre java 8'() { - given: - def codecRegistry = Stub(CodecRegistry) - def provider = new Jsr310CodecProvider() - - expect: - provider.get(Integer, codecRegistry) == null - } } diff --git a/bson/src/test/unit/org/bson/codecs/jsr310/LocalDateCodecSpecification.groovy b/bson/src/test/unit/org/bson/codecs/jsr310/LocalDateCodecSpecification.groovy index e418056cb8f..a94753992b6 100644 --- a/bson/src/test/unit/org/bson/codecs/jsr310/LocalDateCodecSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/jsr310/LocalDateCodecSpecification.groovy @@ -19,11 +19,9 @@ package org.bson.codecs.jsr310 import org.bson.BsonDocument import org.bson.codecs.Codec import org.bson.codecs.configuration.CodecConfigurationException -import spock.lang.IgnoreIf import java.time.LocalDate -@IgnoreIf({ javaVersion < 1.8 }) class LocalDateCodecSpecification extends JsrSpecification { def 'should round trip LocalDate successfully'() { diff --git a/bson/src/test/unit/org/bson/codecs/jsr310/LocalDateTimeCodecSpecification.groovy b/bson/src/test/unit/org/bson/codecs/jsr310/LocalDateTimeCodecSpecification.groovy index 1ba101af7ae..0140c9ba13d 100644 --- a/bson/src/test/unit/org/bson/codecs/jsr310/LocalDateTimeCodecSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/jsr310/LocalDateTimeCodecSpecification.groovy @@ -19,14 +19,12 @@ package org.bson.codecs.jsr310 import org.bson.BsonDocument import org.bson.codecs.Codec import org.bson.codecs.configuration.CodecConfigurationException -import spock.lang.IgnoreIf import java.time.LocalDate import java.time.LocalDateTime import java.time.LocalTime import java.time.ZoneOffset -@IgnoreIf({ javaVersion < 1.8 }) class LocalDateTimeCodecSpecification extends JsrSpecification { def 'should round trip LocalDateTime successfully'() { diff --git a/bson/src/test/unit/org/bson/codecs/jsr310/LocalTimeCodecSpecification.groovy b/bson/src/test/unit/org/bson/codecs/jsr310/LocalTimeCodecSpecification.groovy index 1ad1534ae00..609e9bc899d 100644 --- a/bson/src/test/unit/org/bson/codecs/jsr310/LocalTimeCodecSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/jsr310/LocalTimeCodecSpecification.groovy @@ -19,11 +19,9 @@ package org.bson.codecs.jsr310 import org.bson.BsonDocument import org.bson.codecs.Codec import org.bson.codecs.configuration.CodecConfigurationException -import spock.lang.IgnoreIf import java.time.LocalTime -@IgnoreIf({ javaVersion < 1.8 }) class LocalTimeCodecSpecification extends JsrSpecification { def 'should round trip LocalTime successfully'() { diff --git a/bson/src/test/unit/org/bson/codecs/pojo/ClassModelBuilderTest.java b/bson/src/test/unit/org/bson/codecs/pojo/ClassModelBuilderTest.java index aa386c95cfd..83c9c432a07 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/ClassModelBuilderTest.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/ClassModelBuilderTest.java @@ -22,9 +22,10 @@ import org.bson.codecs.pojo.entities.GenericHolderModel; import org.bson.codecs.pojo.entities.NestedGenericHolderModel; import org.bson.codecs.pojo.entities.SimpleGenericsModel; -import org.bson.codecs.pojo.entities.UpperBoundsModel; +import org.bson.codecs.pojo.entities.SimpleIdModel; import org.bson.codecs.pojo.entities.UpperBoundsConcreteModel; -import org.junit.Test; +import org.bson.codecs.pojo.entities.UpperBoundsModel; +import org.junit.jupiter.api.Test; import java.lang.annotation.Annotation; import java.lang.reflect.Field; @@ -36,10 +37,11 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import static junit.framework.TestCase.assertTrue; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; @SuppressWarnings("rawtypes") public final class ClassModelBuilderTest { @@ -53,14 +55,15 @@ public void testDefaults() { assertEquals(field.getName(), builder.getProperty(field.getName()).getWriteName()); } - Map fieldNameToTypeParameterMap = new HashMap(); + Map fieldNameToTypeParameterMap = new HashMap<>(); fieldNameToTypeParameterMap.put("myIntegerField", TypeParameterMap.builder().build()); fieldNameToTypeParameterMap.put("myGenericField", TypeParameterMap.builder().addIndex(0).build()); fieldNameToTypeParameterMap.put("myListField", TypeParameterMap.builder().addIndex(0, 1).build()); - fieldNameToTypeParameterMap.put("myMapField", TypeParameterMap.builder().addIndex(1, 2).build()); + fieldNameToTypeParameterMap.put("myMapField", TypeParameterMap.builder().addIndex(0, TypeParameterMap.builder().build()) + .addIndex(1, 2).build()); assertEquals(fieldNameToTypeParameterMap, builder.getPropertyNameToTypeParameterMap()); - assertEquals(2, builder.getConventions().size()); + assertEquals(3, builder.getConventions().size()); assertTrue(builder.getAnnotations().isEmpty()); assertEquals(clazz, builder.getType()); assertNull(builder.getIdPropertyName()); @@ -75,7 +78,7 @@ public void testCanReflectObjectClass() { assertEquals(0, builder.getPropertyModelBuilders().size()); assertTrue(builder.getPropertyNameToTypeParameterMap().isEmpty()); - assertEquals(2, builder.getConventions().size()); + assertEquals(3, builder.getConventions().size()); assertTrue(builder.getAnnotations().isEmpty()); assertEquals(clazz, builder.getType()); assertNull(builder.getIdPropertyName()); @@ -115,7 +118,7 @@ public void testFieldsMappedClassTypes() { @Test public void testOverrides() throws NoSuchFieldException { - ClassModelBuilder builder = ClassModel.builder(SimpleGenericsModel.class) + ClassModelBuilder builder = ClassModel.builder(SimpleGenericsModel.class) .annotations(TEST_ANNOTATIONS) .conventions(TEST_CONVENTIONS) .discriminatorKey("_cls") @@ -146,19 +149,39 @@ public void testCanRemoveField() { assertEquals(3, builder.getPropertyModelBuilders().size()); } - @Test(expected = CodecConfigurationException.class) + @Test() public void testValidationIdProperty() { - ClassModel.builder(SimpleGenericsModel.class).idPropertyName("ID").build(); + assertThrows(CodecConfigurationException.class, () -> + ClassModel.builder(SimpleGenericsModel.class).idPropertyName("ID").build()); } - @Test(expected = CodecConfigurationException.class) + @Test() public void testValidationDuplicateDocumentFieldName() { - ClassModelBuilder builder = ClassModel.builder(SimpleGenericsModel.class); - builder.getProperty("myIntegerField").writeName("myGenericField"); - builder.build(); + assertThrows(CodecConfigurationException.class, () -> { + ClassModelBuilder builder = ClassModel.builder(SimpleGenericsModel.class); + builder.getProperty("myIntegerField").writeName("myGenericField"); + builder.build(); + }); + } + + @Test() + public void testDifferentTypeIdGenerator() { + assertThrows(CodecConfigurationException.class, () -> + ClassModel.builder(SimpleIdModel.class) + .idGenerator(new IdGenerator() { + @Override + public String generate() { + return "id"; + } + + @Override + public Class getType() { + return String.class; + } + }).build()); } - private static final List TEST_ANNOTATIONS = Collections.singletonList( + private static final List TEST_ANNOTATIONS = Collections.singletonList( new BsonProperty() { @Override public Class annotationType() { @@ -176,18 +199,10 @@ public boolean useDiscriminator() { } }); - private static final List TEST_CONVENTIONS = Collections.singletonList( - new Convention() { - @Override - public void apply(final ClassModelBuilder builder) { - } + private static final List TEST_CONVENTIONS = Collections.singletonList( + builder -> { }); private static final InstanceCreatorFactory TEST_INSTANCE_CREATOR_FACTORY = - new InstanceCreatorFactory() { - @Override - public InstanceCreator create() { - return null; - } - }; + () -> null; } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/ClassModelTest.java b/bson/src/test/unit/org/bson/codecs/pojo/ClassModelTest.java index 0e904393433..d0ee3cb1cc7 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/ClassModelTest.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/ClassModelTest.java @@ -16,30 +16,45 @@ package org.bson.codecs.pojo; +import java.util.SortedSet; + import org.bson.codecs.pojo.entities.CollectionNestedPojoModel; +import org.bson.codecs.pojo.entities.ConcreteAndNestedAbstractInterfaceModel; import org.bson.codecs.pojo.entities.GenericHolderModel; +import org.bson.codecs.pojo.entities.InterfaceBasedModel; +import org.bson.codecs.pojo.entities.ListGenericExtendedModel; +import org.bson.codecs.pojo.entities.ListListGenericExtendedModel; +import org.bson.codecs.pojo.entities.ListMapGenericExtendedModel; +import org.bson.codecs.pojo.entities.MapGenericExtendedModel; +import org.bson.codecs.pojo.entities.MapListGenericExtendedModel; +import org.bson.codecs.pojo.entities.MapMapGenericExtendedModel; +import org.bson.codecs.pojo.entities.MultipleBoundsModel; import org.bson.codecs.pojo.entities.NestedGenericHolderMapModel; import org.bson.codecs.pojo.entities.PropertySelectionModel; +import org.bson.codecs.pojo.entities.ShapeHolderCircleModel; +import org.bson.codecs.pojo.entities.ShapeHolderModel; +import org.bson.codecs.pojo.entities.ShapeModelAbstract; +import org.bson.codecs.pojo.entities.ShapeModelCircle; import org.bson.codecs.pojo.entities.SimpleGenericsModel; import org.bson.codecs.pojo.entities.SimpleModel; +import org.bson.codecs.pojo.entities.SimpleWithStaticModel; import org.bson.codecs.pojo.entities.conventions.AnnotationInheritedModel; import org.bson.codecs.pojo.entities.conventions.AnnotationModel; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; -import static junit.framework.TestCase.assertTrue; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; public final class ClassModelTest { @Test - @SuppressWarnings("rawtypes") public void testSimpleGenericsModel() { ClassModel classModel = ClassModel.builder(SimpleGenericsModel.class).build(); @@ -49,45 +64,65 @@ public void testSimpleGenericsModel() { assertEquals("_t", classModel.getDiscriminatorKey()); assertEquals("org.bson.codecs.pojo.entities.SimpleGenericsModel", classModel.getDiscriminator()); assertNull(classModel.getIdPropertyModel()); - assertEquals(4, classModel.getPropertyModels().size()); assertTrue(classModel.getInstanceCreatorFactory() instanceof InstanceCreatorFactoryImpl); + + assertEquals(4, classModel.getPropertyModels().size()); + assertEquals(classModel.getPropertyModel("myIntegerField").getTypeData(), createTypeData(Integer.class)); + assertEquals(classModel.getPropertyModel("myGenericField").getTypeData(), createTypeData(Object.class)); + assertEquals(classModel.getPropertyModel("myListField").getTypeData(), createTypeData(List.class, Object.class)); + assertEquals(classModel.getPropertyModel("myMapField").getTypeData(), createTypeData(Map.class, String.class, Object.class)); } @Test @SuppressWarnings("rawtypes") public void testCollectionNestedPojoModelPropertyTypes() { - TypeData string = TypeData.builder(String.class).build(); - TypeData simple = TypeData.builder(SimpleModel.class).build(); - TypeData list = TypeData.builder(List.class).addTypeParameter(simple).build(); - TypeData listList = TypeData.builder(List.class).addTypeParameter(list).build(); - TypeData set = TypeData.builder(Set.class).addTypeParameter(simple).build(); - TypeData setSet = TypeData.builder(Set.class).addTypeParameter(set).build(); - TypeData map = TypeData.builder(Map.class).addTypeParameter(string).addTypeParameter(simple).build(); - TypeData listMap = TypeData.builder(List.class).addTypeParameter(map).build(); - TypeData mapMap = TypeData.builder(Map.class).addTypeParameter(string).addTypeParameter(map).build(); - TypeData mapList = TypeData.builder(Map.class).addTypeParameter(string).addTypeParameter(list).build(); - TypeData mapListMap = TypeData.builder(Map.class).addTypeParameter(string).addTypeParameter(listMap).build(); - TypeData mapSet = TypeData.builder(Map.class).addTypeParameter(string).addTypeParameter(set).build(); - TypeData listMapList = TypeData.builder(List.class).addTypeParameter(mapList).build(); - TypeData listMapSet = TypeData.builder(List.class).addTypeParameter(mapSet).build(); + TypeData string = createTypeData(String.class); + TypeData simple = createTypeData(SimpleModel.class); + TypeData list = createBuilder(List.class).addTypeParameter(simple).build(); + TypeData listList = createBuilder(List.class).addTypeParameter(list).build(); + TypeData set = createBuilder(Set.class).addTypeParameter(simple).build(); + TypeData setSet = createBuilder(Set.class).addTypeParameter(set).build(); + TypeData sortedSet = createBuilder(SortedSet.class).addTypeParameter(simple).build(); + TypeData map = createBuilder(Map.class).addTypeParameter(string).addTypeParameter(simple).build(); + TypeData listMap = createBuilder(List.class).addTypeParameter(map).build(); + TypeData mapMap = createBuilder(Map.class).addTypeParameter(string).addTypeParameter(map).build(); + TypeData mapList = createBuilder(Map.class).addTypeParameter(string).addTypeParameter(list).build(); + TypeData mapListMap = createBuilder(Map.class).addTypeParameter(string).addTypeParameter(listMap).build(); + TypeData mapSet = createBuilder(Map.class).addTypeParameter(string).addTypeParameter(set).build(); + TypeData listMapList = createBuilder(List.class).addTypeParameter(mapList).build(); + TypeData listMapSet = createBuilder(List.class).addTypeParameter(mapSet).build(); ClassModel classModel = ClassModel.builder(CollectionNestedPojoModel.class).build(); - assertEquals(list, classModel.getPropertyModel("listSimple").getTypeData()); - assertEquals(listList, classModel.getPropertyModel("listListSimple").getTypeData()); - assertEquals(set, classModel.getPropertyModel("setSimple").getTypeData()); - assertEquals(setSet, classModel.getPropertyModel("setSetSimple").getTypeData()); + assertEquals(13, classModel.getPropertyModels().size()); + assertEquals(classModel.getPropertyModel("listSimple").getTypeData(), list); + assertEquals(classModel.getPropertyModel("listListSimple").getTypeData(), listList); + + assertEquals(classModel.getPropertyModel("setSimple").getTypeData(), set); + assertEquals(classModel.getPropertyModel("setSetSimple").getTypeData(), setSet); + + assertEquals(classModel.getPropertyModel("sortedSetSimple").getTypeData(), sortedSet); + + assertEquals(classModel.getPropertyModel("mapSimple").getTypeData(), map); + assertEquals(classModel.getPropertyModel("mapMapSimple").getTypeData(), mapMap); + + assertEquals(classModel.getPropertyModel("mapListSimple").getTypeData(), mapList); + assertEquals(classModel.getPropertyModel("mapListMapSimple").getTypeData(), mapListMap); + assertEquals(classModel.getPropertyModel("mapSetSimple").getTypeData(), mapSet); - assertEquals(map, classModel.getPropertyModel("mapSimple").getTypeData()); - assertEquals(mapMap, classModel.getPropertyModel("mapMapSimple").getTypeData()); + assertEquals(classModel.getPropertyModel("listMapSimple").getTypeData(), listMap); + assertEquals(classModel.getPropertyModel("listMapListSimple").getTypeData(), listMapList); + assertEquals(classModel.getPropertyModel("listMapSetSimple").getTypeData(), listMapSet); + } - assertEquals(mapList, classModel.getPropertyModel("mapListSimple").getTypeData()); - assertEquals(mapListMap, classModel.getPropertyModel("mapListMapSimple").getTypeData()); - assertEquals(mapSet, classModel.getPropertyModel("mapSetSimple").getTypeData()); + @Test + public void testWildcardModel() { + ClassModel classModel = ClassModel.builder(ConcreteAndNestedAbstractInterfaceModel.class).build(); - assertEquals(listMap, classModel.getPropertyModel("listMapSimple").getTypeData()); - assertEquals(listMapList, classModel.getPropertyModel("listMapListSimple").getTypeData()); - assertEquals(listMapSet, classModel.getPropertyModel("listMapSetSimple").getTypeData()); + assertEquals(3, classModel.getPropertyModels().size()); + assertEquals(classModel.getPropertyModel("name").getTypeData(), createTypeData(String.class)); + assertEquals(classModel.getPropertyModel("child").getTypeData(), createTypeData(InterfaceBasedModel.class)); + assertEquals(classModel.getPropertyModel("wildcardList").getTypeData(), createTypeData(List.class, InterfaceBasedModel.class)); } @Test @@ -95,36 +130,17 @@ public void testPropertySelection() { ClassModel classModel = ClassModel.builder(PropertySelectionModel.class).build(); assertEquals(2, classModel.getPropertyModels().size()); - assertNotNull(classModel.getPropertyModel("stringField")); - assertNotNull(classModel.getPropertyModel("finalStringField")); + assertEquals(classModel.getPropertyModel("stringField").getTypeData(), createTypeData(String.class)); + assertEquals(classModel.getPropertyModel("finalStringField").getTypeData(), createTypeData(String.class)); } @Test - @SuppressWarnings("rawtypes") public void testMappingConcreteGenericTypes() { - TypeData string = TypeData.builder(String.class).build(); - TypeData simple = TypeData.builder(SimpleModel.class).build(); - TypeData map = TypeData.builder(Map.class).addTypeParameter(string).addTypeParameter(simple).build(); - TypeData genericHolder = TypeData.builder(GenericHolderModel.class).addTypeParameter(map).build(); - ClassModel classModel = ClassModel.builder(NestedGenericHolderMapModel.class).build(); - assertEquals(genericHolder, classModel.getPropertyModels().get(0).getTypeData()); - } - - @Test - @SuppressWarnings("rawtypes") - public void testMappingSimpleGenericsModelTypes() { - TypeData object = TypeData.builder(Object.class).build(); - TypeData integer = TypeData.builder(Integer.class).build(); - TypeData string = TypeData.builder(String.class).build(); - TypeData list = TypeData.builder(List.class).addTypeParameter(object).build(); - TypeData map = TypeData.builder(Map.class).addTypeParameter(string).addTypeParameter(object).build(); - ClassModel classModel = ClassModel.builder(SimpleGenericsModel.class).build(); - assertEquals(integer, classModel.getPropertyModel("myIntegerField").getTypeData()); - assertEquals(object, classModel.getPropertyModel("myGenericField").getTypeData()); - assertEquals(list, classModel.getPropertyModel("myListField").getTypeData()); - assertEquals(map, classModel.getPropertyModel("myMapField").getTypeData()); + assertEquals(1, classModel.getPropertyModels().size()); + assertEquals(classModel.getPropertyModels().get(0).getTypeData(), createBuilder(GenericHolderModel.class) + .addTypeParameter(createTypeData(Map.class, String.class, SimpleModel.class)).build()); } @Test @@ -137,10 +153,14 @@ public void testAnnotationModel() { assertTrue(classModel.useDiscriminator()); assertEquals("_cls", classModel.getDiscriminatorKey()); assertEquals("MyAnnotationModel", classModel.getDiscriminator()); - assertEquals(propertyModel, classModel.getIdPropertyModel()); - assertEquals(3, classModel.getPropertyModels().size()); + assertEquals(propertyModel, classModel.getPropertyModel("customId")); assertTrue(classModel.getInstanceCreatorFactory() instanceof InstanceCreatorFactoryImpl); + + assertEquals(3, classModel.getPropertyModels().size()); + assertEquals(createTypeData(String.class), classModel.getPropertyModel("customId").getTypeData()); + assertEquals(createTypeData(AnnotationModel.class), classModel.getPropertyModel("child").getTypeData()); + assertEquals(createTypeData(AnnotationModel.class), classModel.getPropertyModel("alternative").getTypeData()); } @Test @@ -151,6 +171,8 @@ public void testInheritedClassAnnotations() { assertEquals("org.bson.codecs.pojo.entities.conventions.AnnotationInheritedModel", classModel.getDiscriminator()); assertEquals(2, classModel.getPropertyModels().size()); + assertEquals(createTypeData(String.class), classModel.getPropertyModel("customId").getTypeData()); + assertEquals(createTypeData(AnnotationModel.class), classModel.getPropertyModel("child").getTypeData()); PropertyModel propertyModel = classModel.getPropertyModel("customId"); assertEquals(propertyModel, classModel.getIdPropertyModel()); @@ -159,4 +181,103 @@ public void testInheritedClassAnnotations() { assertTrue(propertyModel.useDiscriminator()); } + @Test + public void testOverridePropertyWithSubclass() { + ClassModel classModel = ClassModel.builder(ShapeHolderModel.class).build(); + assertEquals(1, classModel.getPropertyModels().size()); + assertEquals(createTypeData(ShapeModelAbstract.class), classModel.getPropertyModel("shape").getTypeData()); + + ClassModel overriddenClassModel = ClassModel.builder(ShapeHolderCircleModel.class).build(); + assertEquals(1, overriddenClassModel.getPropertyModels().size()); + assertEquals(createTypeData(ShapeModelCircle.class), overriddenClassModel.getPropertyModel("shape").getTypeData()); + } + + @Test + public void testListGenericExtendedModel() { + ClassModel classModel = ClassModel.builder(ListGenericExtendedModel.class).build(); + + assertEquals(1, classModel.getPropertyModels().size()); + assertEquals(createTypeData(List.class, Integer.class), classModel.getPropertyModel("values").getTypeData()); + } + + @Test + public void testListListGenericExtendedModel() { + ClassModel classModel = ClassModel.builder(ListListGenericExtendedModel.class).build(); + + assertEquals(1, classModel.getPropertyModels().size()); + assertEquals(createBuilder(List.class).addTypeParameter(createTypeData(List.class, Integer.class)).build(), classModel.getPropertyModel("values").getTypeData()); + } + + @Test + public void testMapGenericExtendedModel() { + ClassModel classModel = ClassModel.builder(MapGenericExtendedModel.class).build(); + + assertEquals(1, classModel.getPropertyModels().size()); + assertEquals(createTypeData(Map.class, String.class, Integer.class), classModel.getPropertyModel("values").getTypeData()); + } + + @Test + public void testMapMapGenericExtendedModel() { + ClassModel classModel = ClassModel.builder(MapMapGenericExtendedModel.class).build(); + + assertEquals(1, classModel.getPropertyModels().size()); + assertEquals(createBuilder(Map.class).addTypeParameter(createTypeData(String.class)) + .addTypeParameter(createTypeData(Map.class, String.class, Integer.class)).build(), classModel.getPropertyModel("values").getTypeData()); + } + + @Test + public void testListMapGenericExtendedModel() { + ClassModel classModel = ClassModel.builder(ListMapGenericExtendedModel.class).build(); + + assertEquals(1, classModel.getPropertyModels().size()); + assertEquals(createBuilder(List.class).addTypeParameter(createTypeData(Map.class, String.class, Integer.class)).build(), classModel.getPropertyModel("values").getTypeData()); + } + + + @Test + public void testMapListGenericExtendedModel() { + ClassModel classModel = ClassModel.builder(MapListGenericExtendedModel.class).build(); + + assertEquals(1, classModel.getPropertyModels().size()); + assertEquals(createBuilder(Map.class) + .addTypeParameter(createTypeData(String.class)) + .addTypeParameter(createTypeData(List.class, Integer.class)).build(), classModel.getPropertyModel("values").getTypeData()); + } + + + @Test + public void testMultipleBoundsModel() { + ClassModel classModel = ClassModel.builder(MultipleBoundsModel.class).build(); + + assertEquals(3, classModel.getPropertyModels().size()); + + assertEquals(createTypeData(Double.class), classModel.getPropertyModel("level1").getTypeData()); + assertEquals(createTypeData(List.class, Integer.class), classModel.getPropertyModel("level2").getTypeData()); + assertEquals(createTypeData(Map.class, String.class, String.class), classModel.getPropertyModel("level3").getTypeData()); + } + + @Test + public void testSimpleWithStaticModel() { + ClassModel classModel = ClassModel.builder(SimpleWithStaticModel.class).build(); + + assertEquals(2, classModel.getPropertyModels().size()); + assertEquals(createTypeData(Integer.class), classModel.getPropertyModel("integerField").getTypeData()); + assertEquals(createTypeData(String.class), classModel.getPropertyModel("stringField").getTypeData()); + + } + + TypeData.Builder createBuilder(final Class clazz, final Class... types) { + TypeData.Builder builder = TypeData.builder(clazz); + List> subTypes = new ArrayList<>(); + for (final Class type : types) { + subTypes.add(TypeData.builder(type).build()); + } + builder.addTypeParameters(subTypes); + return builder; + } + + TypeData createTypeData(final Class clazz, final Class... types) { + return createBuilder(clazz, types).build(); + } + } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/ConventionsTest.java b/bson/src/test/unit/org/bson/codecs/pojo/ConventionsTest.java index 32233ea1ea6..6554ab318ec 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/ConventionsTest.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/ConventionsTest.java @@ -16,12 +16,20 @@ package org.bson.codecs.pojo; +import org.bson.BsonType; import org.bson.codecs.configuration.CodecConfigurationException; +import org.bson.codecs.pojo.entities.BsonIdModel; +import org.bson.codecs.pojo.entities.ConventionModel; import org.bson.codecs.pojo.entities.SimpleModel; import org.bson.codecs.pojo.entities.conventions.AnnotationBsonPropertyIdModel; +import org.bson.codecs.pojo.entities.conventions.AnnotationBsonRepresentation; +import org.bson.codecs.pojo.entities.conventions.AnnotationCollision; import org.bson.codecs.pojo.entities.conventions.AnnotationDefaultsModel; -import org.bson.codecs.pojo.entities.conventions.AnnotationModel; import org.bson.codecs.pojo.entities.conventions.AnnotationNameCollision; +import org.bson.codecs.pojo.entities.conventions.AnnotationWithObjectIdModel; +import org.bson.codecs.pojo.entities.conventions.AnnotationWriteCollision; +import org.bson.codecs.pojo.entities.conventions.BsonIgnoreDuplicatePropertyMultipleTypes; +import org.bson.codecs.pojo.entities.conventions.CreatorConstructorNoKnownIdModel; import org.bson.codecs.pojo.entities.conventions.CreatorInvalidConstructorModel; import org.bson.codecs.pojo.entities.conventions.CreatorInvalidMethodModel; import org.bson.codecs.pojo.entities.conventions.CreatorInvalidMethodReturnTypeModel; @@ -30,23 +38,25 @@ import org.bson.codecs.pojo.entities.conventions.CreatorInvalidMultipleStaticCreatorsModel; import org.bson.codecs.pojo.entities.conventions.CreatorInvalidTypeConstructorModel; import org.bson.codecs.pojo.entities.conventions.CreatorInvalidTypeMethodModel; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static java.util.Collections.singletonList; -import static junit.framework.TestCase.assertFalse; -import static junit.framework.TestCase.assertTrue; import static org.bson.codecs.pojo.Conventions.ANNOTATION_CONVENTION; import static org.bson.codecs.pojo.Conventions.CLASS_AND_PROPERTY_CONVENTION; import static org.bson.codecs.pojo.Conventions.DEFAULT_CONVENTIONS; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.bson.codecs.pojo.Conventions.NO_CONVENTIONS; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; public final class ConventionsTest { @Test public void testDefaultConventions() { - ClassModel classModel = ClassModel.builder(AnnotationModel.class) + ClassModel classModel = ClassModel.builder(AnnotationWithObjectIdModel.class) .conventions(DEFAULT_CONVENTIONS).build(); assertTrue(classModel.useDiscriminator()); @@ -58,6 +68,7 @@ public void testDefaultConventions() { assertNotNull(idPropertyModel); assertEquals("customId", idPropertyModel.getName()); assertEquals("_id", idPropertyModel.getWriteName()); + assertEquals(classModel.getIdPropertyModelHolder().getIdGenerator(), IdGenerators.OBJECT_ID_GENERATOR); PropertyModel childPropertyModel = classModel.getPropertyModel("child"); assertNotNull(childPropertyModel); @@ -98,6 +109,28 @@ public void testBsonPropertyIdModelModel() { assertNull(classModel.getIdPropertyModel()); } + @Test + public void testBsonRepresentation() { + ClassModel classModel = ClassModel.builder(AnnotationBsonRepresentation.class).build(); + assertEquals(classModel.getPropertyModel("id").getBsonRepresentation(), BsonType.OBJECT_ID); + assertEquals(classModel.getPropertyModel("parentId").getBsonRepresentation(), BsonType.OBJECT_ID); + assertNull(classModel.getPropertyModel("friendId").getBsonRepresentation()); + assertNull(classModel.getPropertyModel("age").getBsonRepresentation()); + } + + @Test + public void testIdGeneratorChoice() { + ClassModel stringIdObjectRep = ClassModel.builder(AnnotationBsonRepresentation.class).build(); + assertEquals(stringIdObjectRep.getIdPropertyModelHolder().getIdGenerator(), IdGenerators.STRING_ID_GENERATOR); + + ClassModel stringIdStringRep = ClassModel.builder(ConventionModel.class).build(); + assertNull(stringIdStringRep.getIdPropertyModelHolder().getIdGenerator()); + + ClassModel bsonId = ClassModel.builder(BsonIdModel.class).build(); + assertEquals(bsonId.getIdPropertyModelHolder().getIdGenerator(), IdGenerators.BSON_OBJECT_ID_GENERATOR); + } + + @Test @SuppressWarnings("unchecked") public void testClassAndFieldConventionDoesNotOverwrite() { @@ -106,24 +139,19 @@ public void testClassAndFieldConventionDoesNotOverwrite() { .discriminatorKey("_cls") .discriminator("Simples") .conventions(singletonList(CLASS_AND_PROPERTY_CONVENTION)) - .instanceCreatorFactory(new InstanceCreatorFactory() { - @Override - public InstanceCreator create() { - return null; - } - }); + .instanceCreatorFactory(() -> null); PropertyModelBuilder propertyModelBuilder = (PropertyModelBuilder) builder.getProperty("integerField"); propertyModelBuilder.writeName("id") - .propertySerialization(new PropertyModelSerializationImpl()) - .propertyAccessor(new PropertyAccessorTest()); + .propertySerialization(new PropertyModelSerializationImpl<>()) + .propertyAccessor(new PropertyAccessorTest<>()); PropertyModelBuilder propertyModelBuilder2 = (PropertyModelBuilder) builder.getProperty("stringField"); propertyModelBuilder2.writeName("_id") - .propertySerialization(new PropertyModelSerializationImpl()) - .propertyAccessor(new PropertyAccessorTest()); + .propertySerialization(new PropertyModelSerializationImpl<>()) + .propertyAccessor(new PropertyAccessorTest<>()); - ClassModel classModel = builder.idPropertyName("stringField").build(); + ClassModel classModel = builder.idPropertyName("stringField").build(); assertTrue(classModel.useDiscriminator()); assertEquals("_cls", classModel.getDiscriminatorKey()); @@ -136,58 +164,93 @@ public InstanceCreator create() { assertNull(idPropertyModel.useDiscriminator()); } - @Test(expected = CodecConfigurationException.class) + @Test + public void testAnnotationCollision() { + assertThrows(CodecConfigurationException.class, () -> + ClassModel.builder(AnnotationCollision.class).conventions(DEFAULT_CONVENTIONS).build()); + } + + @Test + public void testAnnotationWriteCollision() { + assertThrows(CodecConfigurationException.class, () -> + ClassModel.builder(AnnotationWriteCollision.class).conventions(DEFAULT_CONVENTIONS).build()); + } + + @Test public void testAnnotationNameCollision() { - ClassModel.builder(AnnotationNameCollision.class) - .conventions(singletonList(ANNOTATION_CONVENTION)).build(); + assertThrows(CodecConfigurationException.class, () -> + ClassModel.builder(AnnotationNameCollision.class) + .conventions(singletonList(ANNOTATION_CONVENTION)).build()); } - @Test(expected = CodecConfigurationException.class) + @Test public void testCreatorInvalidConstructorModel() { - ClassModel.builder(CreatorInvalidConstructorModel.class) - .conventions(singletonList(ANNOTATION_CONVENTION)).build(); + assertThrows(CodecConfigurationException.class, () -> + ClassModel.builder(CreatorInvalidConstructorModel.class) + .conventions(singletonList(ANNOTATION_CONVENTION)).build()); } - @Test(expected = CodecConfigurationException.class) + @Test public void testCreatorInvalidMethodModel() { - ClassModel.builder(CreatorInvalidMethodModel.class) - .conventions(singletonList(ANNOTATION_CONVENTION)).build(); + assertThrows(CodecConfigurationException.class, () -> + ClassModel.builder(CreatorInvalidMethodModel.class) + .conventions(singletonList(ANNOTATION_CONVENTION)).build()); } - @Test(expected = CodecConfigurationException.class) + @Test public void testCreatorInvalidMultipleConstructorsModel() { - ClassModel.builder(CreatorInvalidMultipleConstructorsModel.class) - .conventions(singletonList(ANNOTATION_CONVENTION)).build(); + assertThrows(CodecConfigurationException.class, () -> + ClassModel.builder(CreatorInvalidMultipleConstructorsModel.class) + .conventions(singletonList(ANNOTATION_CONVENTION)).build()); } - @Test(expected = CodecConfigurationException.class) + @Test public void testCreatorInvalidMultipleCreatorsModel() { - ClassModel.builder(CreatorInvalidMultipleCreatorsModel.class) - .conventions(singletonList(ANNOTATION_CONVENTION)).build(); + assertThrows(CodecConfigurationException.class, () -> + ClassModel.builder(CreatorInvalidMultipleCreatorsModel.class) + .conventions(singletonList(ANNOTATION_CONVENTION)).build()); } - @Test(expected = CodecConfigurationException.class) + @Test public void testCreatorInvalidMultipleStaticCreatorsModel() { - ClassModel.builder(CreatorInvalidMultipleStaticCreatorsModel.class) - .conventions(singletonList(ANNOTATION_CONVENTION)).build(); + assertThrows(CodecConfigurationException.class, () -> + ClassModel.builder(CreatorInvalidMultipleStaticCreatorsModel.class) + .conventions(singletonList(ANNOTATION_CONVENTION)).build()); } - @Test(expected = CodecConfigurationException.class) + @Test public void testCreatorInvalidMethodReturnTypeModel() { - ClassModel.builder(CreatorInvalidMethodReturnTypeModel.class) - .conventions(singletonList(ANNOTATION_CONVENTION)).build(); + assertThrows(CodecConfigurationException.class, () -> + ClassModel.builder(CreatorInvalidMethodReturnTypeModel.class) + .conventions(singletonList(ANNOTATION_CONVENTION)).build()); } - @Test(expected = CodecConfigurationException.class) + @Test public void testCreatorInvalidTypeConstructorModel() { - ClassModel.builder(CreatorInvalidTypeConstructorModel.class) - .conventions(singletonList(ANNOTATION_CONVENTION)).build(); + assertThrows(CodecConfigurationException.class, () -> + ClassModel.builder(CreatorInvalidTypeConstructorModel.class) + .conventions(singletonList(ANNOTATION_CONVENTION)).build()); } - @Test(expected = CodecConfigurationException.class) + @Test public void testCreatorInvalidTypeMethodModel() { - ClassModel.builder(CreatorInvalidTypeMethodModel.class) - .conventions(singletonList(ANNOTATION_CONVENTION)).build(); + assertThrows(CodecConfigurationException.class, () -> + ClassModel.builder(CreatorInvalidTypeMethodModel.class) + .conventions(singletonList(ANNOTATION_CONVENTION)).build()); + } + + @Test + public void testCreatorConstructorNoKnownIdModel() { + assertThrows(CodecConfigurationException.class, () -> + ClassModel.builder(CreatorConstructorNoKnownIdModel.class) + .conventions(singletonList(ANNOTATION_CONVENTION)).build()); + } + + @Test + public void testBsonIgnoreDuplicatePropertyMultipleTypesModel() { + assertThrows(CodecConfigurationException.class, () -> + ClassModel.builder(BsonIgnoreDuplicatePropertyMultipleTypes.class) + .conventions(NO_CONVENTIONS).build()); } private class PropertyAccessorTest implements PropertyAccessor { diff --git a/bson/src/test/unit/org/bson/codecs/pojo/IdGeneratorsTest.java b/bson/src/test/unit/org/bson/codecs/pojo/IdGeneratorsTest.java new file mode 100644 index 00000000000..fe812ba8fe9 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/IdGeneratorsTest.java @@ -0,0 +1,51 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo; + +import org.bson.BsonObjectId; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class IdGeneratorsTest { + + @Test + public void testObjectIdGenerator() { + IdGenerator idGenerator = IdGenerators.OBJECT_ID_GENERATOR; + + assertEquals(ObjectId.class, idGenerator.getType()); + assertEquals(ObjectId.class, idGenerator.generate().getClass()); + } + + @Test + public void testBsonObjectIdGenerator() { + IdGenerator idGenerator = IdGenerators.BSON_OBJECT_ID_GENERATOR; + + assertEquals(BsonObjectId.class, idGenerator.getType()); + assertEquals(BsonObjectId.class, idGenerator.generate().getClass()); + } + + @Test + public void testStringIdGenerator() { + IdGenerator idGenerator = IdGenerators.STRING_ID_GENERATOR; + + assertEquals(String.class, idGenerator.getType()); + assertEquals(String.class, idGenerator.generate().getClass()); + } + +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/PojoCodecCyclicalLookupTest.java b/bson/src/test/unit/org/bson/codecs/pojo/PojoCodecCyclicalLookupTest.java new file mode 100644 index 00000000000..161a54fd902 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/PojoCodecCyclicalLookupTest.java @@ -0,0 +1,166 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.codecs.pojo; + +import org.bson.codecs.BsonValueCodecProvider; +import org.bson.codecs.Codec; +import org.bson.codecs.ValueCodecProvider; +import org.bson.codecs.configuration.CodecProvider; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.codecs.pojo.entities.ConventionModel; +import org.bson.codecs.pojo.entities.GenericHolderModel; +import org.bson.codecs.pojo.entities.GenericTreeModel; +import org.bson.codecs.pojo.entities.ListListGenericExtendedModel; +import org.bson.codecs.pojo.entities.NestedGenericHolderFieldWithMultipleTypeParamsModel; +import org.bson.codecs.pojo.entities.NestedGenericTreeModel; +import org.bson.codecs.pojo.entities.PropertyWithMultipleTypeParamsModel; +import org.bson.codecs.pojo.entities.SimpleGenericsModel; +import org.bson.codecs.pojo.entities.SimpleModel; +import org.junit.jupiter.api.Test; + +import java.util.List; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; + +import static java.util.Arrays.asList; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class PojoCodecCyclicalLookupTest extends PojoTestCase { + + @Test + void testSimpleModel() { + SimpleModel model = getSimpleModel(); + LookupCountingCodecRegistry registry = createRegistry(SimpleModel.class); + roundTrip(registry, model, SIMPLE_MODEL_JSON); + + assertEquals(2, registry.counters.get(SimpleModel.class).get()); // Looked up in encodesTo & decodesTo + assertEquals(1, registry.counters.get(String.class).get()); // Lookup on encode then cached (PropertyCodecRegistry) + assertEquals(1, registry.counters.get(Integer.class).get()); // Lookup on encode then cached (PropertyCodecRegistry) + } + + @Test + void testConventionModel() { + ConventionModel model = getConventionModel(); + String json = "{'_id': 'id', '_cls': 'AnnotatedConventionModel', 'myFinalField': 10, 'myIntField': 10," + + "'child': {'_id': 'child', 'myFinalField': 10, 'myIntField': 10," + + "'model': {'integerField': 42, 'stringField': 'myString'}}}"; + LookupCountingCodecRegistry registry = createRegistry(ConventionModel.class, SimpleModel.class); + roundTrip(registry, model, json); + + assertEquals(2, registry.counters.get(ConventionModel.class).get()); // Looked up in encodesTo & decodesTo + assertEquals(1, registry.counters.get(SimpleModel.class).get()); // Lookup on encode then cached (PropertyCodecRegistry) + assertEquals(2, registry.counters.get(String.class).get()); // Once for ConventionModel & once for SimpleModel + assertEquals(2, registry.counters.get(Integer.class).get()); // Once for ConventionModel & once for SimpleModel + } + + @Test + void testNestedGenericTreeModel() { + NestedGenericTreeModel model = new NestedGenericTreeModel(42, getGenericTreeModel()); + String json = "{'intField': 42, 'nested': {'field1': 'top', 'field2': 1, " + + "'left': {'field1': 'left', 'field2': 2, 'left': {'field1': 'left', 'field2': 3}}, " + + "'right': {'field1': 'right', 'field2': 4, 'left': {'field1': 'left', 'field2': 5}}}}"; + LookupCountingCodecRegistry registry = createRegistry(NestedGenericTreeModel.class, GenericTreeModel.class); + roundTrip(registry, model, json); + + assertEquals(2, registry.counters.get(NestedGenericTreeModel.class).get()); + assertEquals(1, registry.counters.get(GenericTreeModel.class).get()); + assertEquals(1, registry.counters.get(String.class).get()); + assertEquals(1, registry.counters.get(Integer.class).get()); + } + + @Test + void testNestedGenericHolderFieldWithMultipleTypeParamsModel() { + NestedGenericHolderFieldWithMultipleTypeParamsModel model = getNestedGenericHolderFieldWithMultipleTypeParamsModel(); + LookupCountingCodecRegistry registry = createRegistry(NestedGenericHolderFieldWithMultipleTypeParamsModel.class, + PropertyWithMultipleTypeParamsModel.class, SimpleGenericsModel.class, GenericHolderModel.class); + String json = "{'nested': {'myGenericField': {_t: 'PropertyWithMultipleTypeParamsModel', " + + "'simpleGenericsModel': {_t: 'org.bson.codecs.pojo.entities.SimpleGenericsModel', 'myIntegerField': 42, " + + "'myGenericField': {'$numberLong': '101'}, 'myListField': ['B', 'C'], 'myMapField': {'D': 2, 'E': 3, 'F': 4 }}}," + + "'myLongField': {'$numberLong': '42'}}}"; + + + roundTrip(registry, model, json); + + assertEquals(2, registry.counters.get(NestedGenericHolderFieldWithMultipleTypeParamsModel.class).get()); + assertEquals(1, registry.counters.get(PropertyWithMultipleTypeParamsModel.class).get()); + assertEquals(1, registry.counters.get(SimpleGenericsModel.class).get()); + assertEquals(1, registry.counters.get(GenericHolderModel.class).get()); + assertEquals(1, registry.counters.get(Long.class).get()); + assertEquals(1, registry.counters.get(String.class).get()); + assertEquals(1, registry.counters.get(Integer.class).get()); + } + + @Test + void testListListGenericExtendedModel() { + ListListGenericExtendedModel model = new ListListGenericExtendedModel(asList(asList(1, 2, 3), asList(4, 5, 6))); + LookupCountingCodecRegistry registry = createRegistry(ListListGenericExtendedModel .class); + String json = "{values: [[1, 2, 3], [4, 5, 6]]}"; + roundTrip(registry, model, json); + + assertEquals(2, registry.counters.get(ListListGenericExtendedModel.class).get()); + assertEquals(1, registry.counters.get(Integer.class).get()); + } + + + LookupCountingCodecRegistry createRegistry(final Class... classes) { + return new LookupCountingCodecRegistry( + new BsonValueCodecProvider(), + new ValueCodecProvider(), + getPojoCodecProviderBuilder(classes).build() + ); + } + + + static class LookupCountingCodecRegistry implements CodecRegistry { + + private final ConcurrentHashMap, AtomicInteger> counters; + private final List codecProviders; + + LookupCountingCodecRegistry(final CodecProvider... providers) { + this.codecProviders = asList(providers); + this.counters = new ConcurrentHashMap<>(); + } + + @Override + public Codec get(final Class clazz) { + incrementCount(clazz); + for (CodecProvider provider : codecProviders) { + Codec codec = provider.get(clazz, this); + if (codec != null) { + return codec; + } + } + return null; + } + + public Codec get(final Class clazz, final CodecRegistry registry) { + incrementCount(clazz); + for (CodecProvider provider : codecProviders) { + Codec codec = provider.get(clazz, registry); + if (codec != null) { + return codec; + } + } + return null; + } + + private synchronized void incrementCount(final Class clazz) { + AtomicInteger atomicInteger = counters.computeIfAbsent(clazz, k -> new AtomicInteger()); + atomicInteger.incrementAndGet(); + } + } + +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/PojoCodecDiscriminatorTest.java b/bson/src/test/unit/org/bson/codecs/pojo/PojoCodecDiscriminatorTest.java new file mode 100644 index 00000000000..b95e7bcefda --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/PojoCodecDiscriminatorTest.java @@ -0,0 +1,82 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo; + +import org.bson.codecs.pojo.entities.DiscriminatorModel; +import org.bson.codecs.pojo.entities.DiscriminatorWithGetterModel; +import org.bson.codecs.pojo.entities.DiscriminatorWithProperty; +import org.bson.codecs.pojo.entities.DiscriminatorWithPropertyAndIgnore; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertArrayEquals; + +public final class PojoCodecDiscriminatorTest extends PojoTestCase { + + @Test + public void testDiscriminatorEncodedOnceWhenItIsAlsoAGetter() { + byte[] encodedDiscriminatorModel = encode( + getCodec(DiscriminatorModel.class), + new DiscriminatorModel(), + false + ).toByteArray(); + byte[] encodedDiscriminatorWithGetter = encode( + getCodec(DiscriminatorWithGetterModel.class), + new DiscriminatorWithGetterModel(), + false + ).toByteArray(); + assertArrayEquals(encodedDiscriminatorModel, encodedDiscriminatorWithGetter); + } + + @Test + public void testDiscriminatorRoundTripWhenItIsAlsoAGetter() { + roundTrip( + new DiscriminatorWithGetterModel(), + "{discriminatorKey:'discriminatorValue'}" + ); + } + + @Test + public void testDiscriminatorEncodedOnceWhenItIsAlsoAProperty() { + byte[] encodedDiscriminatorModel = encode( + getCodec(DiscriminatorModel.class), + new DiscriminatorModel(), + false + ).toByteArray(); + byte[] encodedDiscriminatorWithProperty = encode( + getCodec(DiscriminatorWithProperty.class), + new DiscriminatorWithProperty(), + false + ).toByteArray(); + assertArrayEquals(encodedDiscriminatorModel, encodedDiscriminatorWithProperty); + } + + @Test + public void testDiscriminatorRoundTripWhenItIsAlsoAProperty() { + roundTrip( + new DiscriminatorWithProperty(), + "{discriminatorKey:'discriminatorValue'}" + ); + } + + @Test + public void testDiscriminatorRoundTripWhenItIsAlsoAPropertyWithIgnoredProperty() { + roundTrip( + new DiscriminatorWithPropertyAndIgnore(), + "{discriminatorKey:'discriminatorValue'}" + ); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/PojoCodecProviderTest.java b/bson/src/test/unit/org/bson/codecs/pojo/PojoCodecProviderTest.java index 22ce1ef19c4..1921e161854 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/PojoCodecProviderTest.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/PojoCodecProviderTest.java @@ -21,11 +21,11 @@ import org.bson.codecs.configuration.CodecRegistry; import org.bson.codecs.pojo.entities.SimpleModel; import org.bson.codecs.pojo.entities.conventions.CreatorInvalidMethodModel; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.bson.codecs.configuration.CodecRegistries.fromProviders; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; public final class PojoCodecProviderTest extends PojoTestCase { diff --git a/bson/src/test/unit/org/bson/codecs/pojo/PojoCustomTest.java b/bson/src/test/unit/org/bson/codecs/pojo/PojoCustomTest.java index c1bb44ba04c..7b38e16ef2e 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/PojoCustomTest.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/PojoCustomTest.java @@ -19,38 +19,62 @@ import org.bson.BsonReader; import org.bson.BsonWriter; import org.bson.Document; +import org.bson.codecs.BsonValueCodecProvider; import org.bson.codecs.Codec; import org.bson.codecs.DecoderContext; import org.bson.codecs.EncoderContext; +import org.bson.codecs.IterableCodecProvider; import org.bson.codecs.LongCodec; -import org.bson.codecs.MapCodec; +import org.bson.codecs.MapCodecProvider; +import org.bson.codecs.SimpleEnum; +import org.bson.codecs.ValueCodecProvider; import org.bson.codecs.configuration.CodecConfigurationException; import org.bson.codecs.configuration.CodecRegistry; +import org.bson.codecs.pojo.entities.AbstractInterfaceModel; import org.bson.codecs.pojo.entities.AsymmetricalCreatorModel; import org.bson.codecs.pojo.entities.AsymmetricalIgnoreModel; import org.bson.codecs.pojo.entities.AsymmetricalModel; +import org.bson.codecs.pojo.entities.BsonRepresentationUnsupportedInt; +import org.bson.codecs.pojo.entities.BsonRepresentationUnsupportedString; +import org.bson.codecs.pojo.entities.ComposeInterfaceModel; +import org.bson.codecs.pojo.entities.ConcreteAndNestedAbstractInterfaceModel; import org.bson.codecs.pojo.entities.ConcreteCollectionsModel; +import org.bson.codecs.pojo.entities.ConcreteModel; +import org.bson.codecs.pojo.entities.ConcreteField; +import org.bson.codecs.pojo.entities.ConcreteStandAloneAbstractInterfaceModel; import org.bson.codecs.pojo.entities.ConstructorNotPublicModel; import org.bson.codecs.pojo.entities.ConventionModel; import org.bson.codecs.pojo.entities.ConverterModel; import org.bson.codecs.pojo.entities.CustomPropertyCodecOptionalModel; +import org.bson.codecs.pojo.entities.GenericBaseModel; +import org.bson.codecs.pojo.entities.GenericHolderModel; import org.bson.codecs.pojo.entities.GenericTreeModel; +import org.bson.codecs.pojo.entities.InterfaceBasedModel; +import org.bson.codecs.pojo.entities.InterfaceModelB; +import org.bson.codecs.pojo.entities.InterfaceModelImpl; import org.bson.codecs.pojo.entities.InvalidCollectionModel; import org.bson.codecs.pojo.entities.InvalidGetterAndSetterModel; import org.bson.codecs.pojo.entities.InvalidMapModel; import org.bson.codecs.pojo.entities.InvalidMapPropertyCodecProvider; import org.bson.codecs.pojo.entities.InvalidSetterArgsModel; import org.bson.codecs.pojo.entities.MapStringObjectModel; +import org.bson.codecs.pojo.entities.NestedGenericHolderFieldWithMultipleTypeParamsModel; +import org.bson.codecs.pojo.entities.NestedSimpleIdModel; import org.bson.codecs.pojo.entities.Optional; import org.bson.codecs.pojo.entities.OptionalPropertyCodecProvider; import org.bson.codecs.pojo.entities.PrimitivesModel; import org.bson.codecs.pojo.entities.PrivateSetterFieldModel; -import org.bson.codecs.pojo.entities.SimpleEnum; +import org.bson.codecs.pojo.entities.PropertyWithMultipleTypeParamsModel; import org.bson.codecs.pojo.entities.SimpleEnumModel; +import org.bson.codecs.pojo.entities.SimpleGenericsModel; +import org.bson.codecs.pojo.entities.SimpleIdImmutableModel; +import org.bson.codecs.pojo.entities.SimpleIdModel; import org.bson.codecs.pojo.entities.SimpleModel; import org.bson.codecs.pojo.entities.SimpleNestedPojoModel; import org.bson.codecs.pojo.entities.UpperBoundsModel; import org.bson.codecs.pojo.entities.conventions.AnnotationModel; +import org.bson.codecs.pojo.entities.conventions.BsonExtraElementsInvalidModel; +import org.bson.codecs.pojo.entities.conventions.BsonRepresentationModel; import org.bson.codecs.pojo.entities.conventions.CollectionsGetterImmutableModel; import org.bson.codecs.pojo.entities.conventions.CollectionsGetterMutableModel; import org.bson.codecs.pojo.entities.conventions.CollectionsGetterNonEmptyModel; @@ -58,12 +82,13 @@ import org.bson.codecs.pojo.entities.conventions.CreatorConstructorPrimitivesModel; import org.bson.codecs.pojo.entities.conventions.CreatorConstructorThrowsExceptionModel; import org.bson.codecs.pojo.entities.conventions.CreatorMethodThrowsExceptionModel; +import org.bson.codecs.pojo.entities.conventions.InterfaceModelBInstanceCreatorConvention; import org.bson.codecs.pojo.entities.conventions.MapGetterImmutableModel; import org.bson.codecs.pojo.entities.conventions.MapGetterMutableModel; import org.bson.codecs.pojo.entities.conventions.MapGetterNonEmptyModel; import org.bson.codecs.pojo.entities.conventions.MapGetterNullModel; import org.bson.types.ObjectId; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.Collection; @@ -74,14 +99,19 @@ import static java.lang.String.format; import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; import static org.bson.codecs.configuration.CodecRegistries.fromCodecs; import static org.bson.codecs.configuration.CodecRegistries.fromProviders; import static org.bson.codecs.configuration.CodecRegistries.fromRegistries; +import static org.bson.codecs.pojo.Conventions.CLASS_AND_PROPERTY_CONVENTION; import static org.bson.codecs.pojo.Conventions.DEFAULT_CONVENTIONS; import static org.bson.codecs.pojo.Conventions.NO_CONVENTIONS; import static org.bson.codecs.pojo.Conventions.SET_PRIVATE_FIELDS_CONVENTION; import static org.bson.codecs.pojo.Conventions.USE_GETTERS_FOR_SETTERS; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; public final class PojoCustomTest extends PojoTestCase { @@ -144,27 +174,24 @@ public void testConventionsEmpty() { @Test public void testConventionsCustom() { - List conventions = Collections.singletonList( - new Convention() { - @Override - public void apply(final ClassModelBuilder classModelBuilder) { - for (PropertyModelBuilder fieldModelBuilder : classModelBuilder.getPropertyModelBuilders()) { - fieldModelBuilder.discriminatorEnabled(false); - fieldModelBuilder.readName( - fieldModelBuilder.getName() - .replaceAll("([^_A-Z])([A-Z])", "$1_$2").toLowerCase()); - fieldModelBuilder.writeName( - fieldModelBuilder.getName() - .replaceAll("([^_A-Z])([A-Z])", "$1_$2").toLowerCase()); - } - if (classModelBuilder.getProperty("customId") != null) { - classModelBuilder.idPropertyName("customId"); - } - classModelBuilder.enableDiscriminator(true); - classModelBuilder.discriminatorKey("_cls"); - classModelBuilder.discriminator(classModelBuilder.getType().getSimpleName() - .replaceAll("([^_A-Z])([A-Z])", "$1_$2").toLowerCase()); + List conventions = singletonList( + classModelBuilder -> { + for (PropertyModelBuilder fieldModelBuilder : classModelBuilder.getPropertyModelBuilders()) { + fieldModelBuilder.discriminatorEnabled(false); + fieldModelBuilder.readName( + fieldModelBuilder.getName() + .replaceAll("([^_A-Z])([A-Z])", "$1_$2").toLowerCase()); + fieldModelBuilder.writeName( + fieldModelBuilder.getName() + .replaceAll("([^_A-Z])([A-Z])", "$1_$2").toLowerCase()); + } + if (classModelBuilder.getProperty("customId") != null) { + classModelBuilder.idPropertyName("customId"); } + classModelBuilder.enableDiscriminator(true); + classModelBuilder.discriminatorKey("_cls"); + classModelBuilder.discriminator(classModelBuilder.getType().getSimpleName() + .replaceAll("([^_A-Z])([A-Z])", "$1_$2").toLowerCase()); }); ClassModelBuilder classModel = ClassModel.builder(ConventionModel.class).conventions(conventions); @@ -176,15 +203,67 @@ public void apply(final ClassModelBuilder classModelBuilder) { + " 'simple_model': {'integer_field': 42, 'string_field': 'myString' } } }"); } + @Test + public void testIdGeneratorMutable() { + SimpleIdModel simpleIdModel = new SimpleIdModel(42, "myString"); + assertNull(simpleIdModel.getId()); + ClassModelBuilder builder = ClassModel.builder(SimpleIdModel.class).idGenerator(new ObjectIdGenerator()); + + roundTrip(getPojoCodecProviderBuilder(builder), simpleIdModel, "{'integerField': 42, 'stringField': 'myString'}"); + assertNull(simpleIdModel.getId()); + + encodesTo(getPojoCodecProviderBuilder(builder), simpleIdModel, + "{'_id': {'$oid': '123412341234123412341234'}, 'integerField': 42, 'stringField': 'myString'}", true); + assertEquals(new ObjectId("123412341234123412341234"), simpleIdModel.getId()); + } + + @Test + public void testIdGeneratorImmutable() { + SimpleIdImmutableModel simpleIdModelNoId = new SimpleIdImmutableModel(42, "myString"); + SimpleIdImmutableModel simpleIdModelWithId = new SimpleIdImmutableModel(new ObjectId("123412341234123412341234"), 42, "myString"); + ClassModelBuilder builder = ClassModel.builder(SimpleIdImmutableModel.class) + .idGenerator(new ObjectIdGenerator()); + String json = "{'_id': {'$oid': '123412341234123412341234'}, 'integerField': 42, 'stringField': 'myString'}"; + + encodesTo(getPojoCodecProviderBuilder(builder), simpleIdModelNoId, json, true); + decodesTo(getPojoCodecProviderBuilder(builder), json, simpleIdModelWithId); + } + + @Test + public void testIdGeneratorNonObjectId() { + NestedSimpleIdModel nestedSimpleIdModel = new NestedSimpleIdModel(new SimpleIdModel(42, "myString")); + assertNull(nestedSimpleIdModel.getId()); + ClassModelBuilder builder = ClassModel.builder(NestedSimpleIdModel.class) + .idGenerator(new IdGenerator() { + @Override + public String generate() { + return "a"; + } + + @Override + public Class getType() { + return String.class; + } + }); + + roundTrip(getPojoCodecProviderBuilder(builder, ClassModel.builder(SimpleIdModel.class)), nestedSimpleIdModel, + "{'nestedSimpleIdModel': {'integerField': 42, 'stringField': 'myString'}}"); + assertNull(nestedSimpleIdModel.getId()); + + encodesTo(getPojoCodecProviderBuilder(builder, ClassModel.builder(SimpleIdModel.class)), nestedSimpleIdModel, + "{'_id': 'a', 'nestedSimpleIdModel': {'integerField': 42, 'stringField': 'myString'}}", true); + assertEquals("a", nestedSimpleIdModel.getId()); + } + @Test public void testSetPrivateFieldConvention() { PojoCodecProvider.Builder builder = getPojoCodecProviderBuilder(PrivateSetterFieldModel.class); - ArrayList conventions = new ArrayList(DEFAULT_CONVENTIONS); + ArrayList conventions = new ArrayList<>(DEFAULT_CONVENTIONS); conventions.add(SET_PRIVATE_FIELDS_CONVENTION); builder.conventions(conventions); roundTrip(builder, new PrivateSetterFieldModel(1, "2", asList("a", "b")), - "{'integerField': 1, 'stringField': '2', listField: ['a', 'b']}"); + "{'someMethod': 'some method', 'integerField': 1, 'stringField': '2', listField: ['a', 'b']}"); } @Test @@ -196,77 +275,102 @@ public void testUseGettersForSettersConvention() { roundTrip(builder, new MapGetterMutableModel(Collections.singletonMap("a", 3)), "{mapField: {a: 3}}"); } - @Test(expected = CodecConfigurationException.class) + @Test + public void testWithWildcardListField() { + ClassModel interfaceBasedModelClassModel = + ClassModel.builder(InterfaceBasedModel.class).enableDiscriminator(true).build(); + PojoCodecProvider.Builder builder = PojoCodecProvider.builder().automatic(true) + .register(interfaceBasedModelClassModel) + .register(AbstractInterfaceModel.class, ConcreteStandAloneAbstractInterfaceModel.class, + ConcreteAndNestedAbstractInterfaceModel.class); + + roundTrip(builder, + new ConcreteAndNestedAbstractInterfaceModel("A", + singletonList(new ConcreteStandAloneAbstractInterfaceModel("B"))), + "{'_t': 'org.bson.codecs.pojo.entities.ConcreteAndNestedAbstractInterfaceModel', 'name': 'A', " + + " 'wildcardList': [{'_t': 'org.bson.codecs.pojo.entities.ConcreteStandAloneAbstractInterfaceModel', " + + "'name': 'B'}]}"); + } + + @Test public void testUseGettersForSettersConventionInvalidTypeForCollection() { PojoCodecProvider.Builder builder = getPojoCodecProviderBuilder(CollectionsGetterMutableModel.class) .conventions(getDefaultAndUseGettersConvention()); - - decodingShouldFail(getCodec(builder, CollectionsGetterMutableModel.class), "{listField: ['1', '2']}"); + assertThrows(CodecConfigurationException.class, () -> + decodingShouldFail(getCodec(builder, CollectionsGetterMutableModel.class), "{listField: ['1', '2']}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testUseGettersForSettersConventionInvalidTypeForMap() { PojoCodecProvider.Builder builder = getPojoCodecProviderBuilder(MapGetterMutableModel.class) .conventions(getDefaultAndUseGettersConvention()); - - decodingShouldFail(getCodec(builder, MapGetterMutableModel.class), "{mapField: {a: '1'}}"); + assertThrows(CodecConfigurationException.class, () -> + decodingShouldFail(getCodec(builder, MapGetterMutableModel.class), "{mapField: {a: '1'}}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testUseGettersForSettersConventionImmutableCollection() { PojoCodecProvider.Builder builder = getPojoCodecProviderBuilder(CollectionsGetterImmutableModel.class) .conventions(getDefaultAndUseGettersConvention()); - - roundTrip(builder, new CollectionsGetterImmutableModel(asList(1, 2)), "{listField: [1, 2]}"); + assertThrows(CodecConfigurationException.class, () -> + roundTrip(builder, new CollectionsGetterImmutableModel(asList(1, 2)), "{listField: [1, 2]}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testUseGettersForSettersConventionImmutableMap() { PojoCodecProvider.Builder builder = getPojoCodecProviderBuilder(MapGetterImmutableModel.class) .conventions(getDefaultAndUseGettersConvention()); - - roundTrip(builder, new MapGetterImmutableModel(Collections.singletonMap("a", 3)), "{mapField: {a: 3}}"); + assertThrows(CodecConfigurationException.class, () -> + roundTrip(builder, new MapGetterImmutableModel(Collections.singletonMap("a", 3)), "{mapField: {a: 3}}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testUseGettersForSettersConventionNullCollection() { PojoCodecProvider.Builder builder = getPojoCodecProviderBuilder(CollectionsGetterNullModel.class) .conventions(getDefaultAndUseGettersConvention()); - - roundTrip(builder, new CollectionsGetterNullModel(asList(1, 2)), "{listField: [1, 2]}"); + assertThrows(CodecConfigurationException.class, () -> + roundTrip(builder, new CollectionsGetterNullModel(asList(1, 2)), "{listField: [1, 2]}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testUseGettersForSettersConventionNullMap() { PojoCodecProvider.Builder builder = getPojoCodecProviderBuilder(MapGetterNullModel.class) .conventions(getDefaultAndUseGettersConvention()); - - roundTrip(builder, new MapGetterNullModel(Collections.singletonMap("a", 3)), "{mapField: {a: 3}}"); + assertThrows(CodecConfigurationException.class, () -> + roundTrip(builder, new MapGetterNullModel(Collections.singletonMap("a", 3)), "{mapField: {a: 3}}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testUseGettersForSettersConventionNotEmptyCollection() { PojoCodecProvider.Builder builder = getPojoCodecProviderBuilder(CollectionsGetterNonEmptyModel.class) .conventions(getDefaultAndUseGettersConvention()); - - roundTrip(builder, new CollectionsGetterNonEmptyModel(asList(1, 2)), "{listField: [1, 2]}"); + assertThrows(CodecConfigurationException.class, () -> + roundTrip(builder, new CollectionsGetterNonEmptyModel(asList(1, 2)), "{listField: [1, 2]}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testUseGettersForSettersConventionNotEmptyMap() { PojoCodecProvider.Builder builder = getPojoCodecProviderBuilder(MapGetterNonEmptyModel.class) .conventions(getDefaultAndUseGettersConvention()); - - roundTrip(builder, new MapGetterNonEmptyModel(Collections.singletonMap("a", 3)), "{mapField: {a: 3}}"); + assertThrows(CodecConfigurationException.class, () -> + roundTrip(builder, new MapGetterNonEmptyModel(Collections.singletonMap("a", 3)), "{mapField: {a: 3}}")); } @Test public void testEnumSupportWithCustomCodec() { - CodecRegistry registry = fromRegistries(getCodecRegistry(getPojoCodecProviderBuilder(SimpleEnumModel.class)), - fromCodecs(new SimpleEnumCodec())); + CodecRegistry registry = fromRegistries(fromCodecs(new SimpleEnumCodec()), + getCodecRegistry(getPojoCodecProviderBuilder(SimpleEnumModel.class))); roundTrip(registry, new SimpleEnumModel(SimpleEnum.BRAVO), "{ 'myEnum': 1 }"); } + @Test + public void testEnumSupportWithFallback() { + // Create a registry without EnumCodecProvider, to test the fallback in EnumPropertyCodecProvider#get + CodecRegistry registry = fromRegistries(fromProviders(new ValueCodecProvider(), + getPojoCodecProviderBuilder(SimpleEnumModel.class).build())); + roundTrip(registry, new SimpleEnumModel(SimpleEnum.BRAVO), "{ 'myEnum': 'BRAVO' }"); + } + @Test @SuppressWarnings("unchecked") public void testCustomCodec() { @@ -288,12 +392,7 @@ public void testCustomPropertySerializer() { model.setIntegerField(null); ClassModelBuilder classModel = ClassModel.builder(SimpleModel.class); ((PropertyModelBuilder) classModel.getProperty("integerField")) - .propertySerialization(new PropertySerialization() { - @Override - public boolean shouldSerialize(final Integer value) { - return true; - } - }); + .propertySerialization(value -> true); roundTrip(getPojoCodecProviderBuilder(classModel), model, "{'integerField': null, 'stringField': 'myString'}"); } @@ -305,12 +404,7 @@ public void testCanHandleNullValuesForNestedModels() { model.setSimple(null); ClassModelBuilder classModel = ClassModel.builder(SimpleNestedPojoModel.class); ((PropertyModelBuilder) classModel.getProperty("simple")) - .propertySerialization(new PropertySerialization() { - @Override - public boolean shouldSerialize(final SimpleModel value) { - return true; - } - }); + .propertySerialization(value -> true); ClassModelBuilder classModelSimple = ClassModel.builder(SimpleModel.class); roundTrip(getPojoCodecProviderBuilder(classModel, classModelSimple), model, "{'simple': null}"); @@ -326,19 +420,9 @@ public void testCanHandleNullValuesForCollectionsAndMaps() { ClassModelBuilder classModel = ClassModel.builder(ConcreteCollectionsModel.class); ((PropertyModelBuilder>) classModel.getProperty("collection")) - .propertySerialization(new PropertySerialization>() { - @Override - public boolean shouldSerialize(final Collection value) { - return true; - } - }); + .propertySerialization(value -> true); ((PropertyModelBuilder>) classModel.getProperty("map")) - .propertySerialization(new PropertySerialization>() { - @Override - public boolean shouldSerialize(final Map value) { - return true; - } - }); + .propertySerialization(value -> true); roundTrip(getPojoCodecProviderBuilder(classModel), model, "{'collection': null, 'list': [4, 5, 6], 'linked': [7, 8, 9], 'map': null," @@ -362,7 +446,7 @@ public void testDataCanHandleMissingData() { @Test @SuppressWarnings({"unchecked", "rawtypes"}) public void testCanHandleTopLevelGenericIfHasCodec() { - UpperBoundsModel model = new UpperBoundsModel(5L); + UpperBoundsModel model = new UpperBoundsModel<>(5L); ClassModelBuilder classModelBuilder = ClassModel.builder(UpperBoundsModel.class); ((PropertyModelBuilder) classModelBuilder.getProperty("myGenericField")).codec(new LongCodec()); @@ -380,53 +464,47 @@ public void testCustomRegisteredPropertyCodecWithValue() { @Test public void testCustomRegisteredPropertyCodecOmittedValue() { - CustomPropertyCodecOptionalModel model = new CustomPropertyCodecOptionalModel(Optional.empty()); + CustomPropertyCodecOptionalModel model = new CustomPropertyCodecOptionalModel(Optional.empty()); roundTrip(getPojoCodecProviderBuilder(CustomPropertyCodecOptionalModel.class).register(new OptionalPropertyCodecProvider()), model, "{'optionalField': null}"); } @Test public void testMapStringObjectModel() { - MapStringObjectModel model = new MapStringObjectModel(new HashMap(Document.parse("{a : 1, b: 'b', c: [1, 2, 3]}"))); - CodecRegistry registry = fromRegistries(fromCodecs(new MapCodec()), - fromProviders(getPojoCodecProviderBuilder(MapStringObjectModel.class).build())); + MapStringObjectModel model = new MapStringObjectModel(new HashMap<>(Document.parse("{a : 1, b: 'b', c: [1, 2, 3]}"))); + CodecRegistry registry = fromRegistries(fromProviders(new MapCodecProvider(), new IterableCodecProvider(), new ValueCodecProvider(), + getPojoCodecProviderBuilder(MapStringObjectModel.class).build())); roundTrip(registry, model, "{ map: {a : 1, b: 'b', c: [1, 2, 3]}}"); } - @Test(expected = UnsupportedOperationException.class) + @Test public void testMapStringObjectModelWithObjectCodec() { - MapStringObjectModel model = new MapStringObjectModel(new HashMap(Document.parse("{a : 1, b: 'b', c: [1, 2, 3]}"))); - CodecRegistry registry = fromRegistries(fromCodecs(new MapCodec()), fromCodecs(new ObjectCodec()), + MapStringObjectModel model = new MapStringObjectModel(new HashMap<>(Document.parse("{a : 1, b: 'b', c: [1, 2, 3]}"))); + CodecRegistry registry = fromRegistries(fromProviders(new MapCodecProvider()), fromCodecs(new ObjectCodec()), fromProviders(getPojoCodecProviderBuilder(MapStringObjectModel.class).build())); - roundTrip(registry, model, "{ map: {a : 1, b: 'b', c: [1, 2, 3]}}"); + assertThrows(UnsupportedOperationException.class, () -> + roundTrip(registry, model, "{ map: {a : 1, b: 'b', c: [1, 2, 3]}}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testEncodingInvalidMapModel() { - encodesTo(getPojoCodecProviderBuilder(InvalidMapModel.class), getInvalidMapModel(), "{'invalidMap': {'1': 1, '2': 2}}"); + assertThrows(CodecConfigurationException.class, () -> + encodesTo(getPojoCodecProviderBuilder(InvalidMapModel.class), getInvalidMapModel(), "{'invalidMap': {'1': 1, '2': 2}}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testDecodingInvalidMapModel() { - try { - decodingShouldFail(getCodec(InvalidMapModel.class), "{'invalidMap': {'1': 1, '2': 2}}"); - } catch (CodecConfigurationException e) { - assertTrue(e.getMessage().startsWith("Could not create a PojoCodec for 'InvalidMapModel'." - + " Property 'invalidMap' errored with:")); - throw e; - } + CodecConfigurationException e = assertThrows(CodecConfigurationException.class, () -> + decodingShouldFail(getCodec(InvalidMapModel.class), "{'invalidMap': {'1': 1, '2': 2}}")); + assertTrue(e.getMessage().startsWith("Failed to decode 'InvalidMapModel'. Decoding 'invalidMap' errored with:")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testEncodingInvalidCollectionModel() { - try { - encodesTo(getPojoCodecProviderBuilder(InvalidCollectionModel.class), new InvalidCollectionModel(asList(1, 2, 3)), - "{collectionField: [1, 2, 3]}"); - } catch (CodecConfigurationException e) { - assertTrue(e.getMessage().startsWith("Could not create a PojoCodec for 'InvalidCollectionModel'." - + " Property 'collectionField' errored with:")); - throw e; - } + CodecConfigurationException e = assertThrows(CodecConfigurationException.class, () -> + encodesTo(getPojoCodecProviderBuilder(InvalidCollectionModel.class), new InvalidCollectionModel(asList(1, 2, 3)), + "{collectionField: [1, 2, 3]}")); + assertTrue(e.getMessage().startsWith("Failed to encode 'InvalidCollectionModel'. Encoding 'collectionField' errored with:")); } @Test @@ -435,91 +513,180 @@ public void testInvalidMapModelWithCustomPropertyCodecProvider() { "{'invalidMap': {'1': 1, '2': 2}}"); } - @Test(expected = CodecConfigurationException.class) + @Test + public void testInterfaceModelCreatorMadeInConvention() { + roundTrip( + getPojoCodecProviderBuilder(ComposeInterfaceModel.class, InterfaceModelB.class, InterfaceModelImpl.class) + .conventions(Collections.singletonList(new InterfaceModelBInstanceCreatorConvention())), + new ComposeInterfaceModel("someTitle", + new InterfaceModelImpl("a", "b")), + "{'title': 'someTitle', 'nestedModel': {'propertyA': 'a', 'propertyB': 'b'}}" + ); + } + + @Test public void testConstructorNotPublicModel() { - decodingShouldFail(getCodec(ConstructorNotPublicModel.class), "{'integerField': 99}"); + assertThrows(CodecConfigurationException.class, () -> + decodingShouldFail(getCodec(ConstructorNotPublicModel.class), "{'integerField': 99}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testDataUnknownClass() { ClassModel classModel = ClassModel.builder(SimpleModel.class).enableDiscriminator(true).build(); - try { - decodingShouldFail(getCodec(PojoCodecProvider.builder().register(classModel), SimpleModel.class), "{'_t': 'FakeModel'}"); - } catch (CodecConfigurationException e) { - assertTrue(e.getMessage().startsWith("Failed to decode 'SimpleModel'. Decoding errored with:")); - throw e; - } + CodecConfigurationException e = assertThrows(CodecConfigurationException.class, () -> + decodingShouldFail(getCodec(PojoCodecProvider.builder().register(classModel), SimpleModel.class), "{'_t': 'FakeModel'}")); + assertTrue(e.getMessage().startsWith("Failed to decode 'SimpleModel'. Decoding errored with:")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testInvalidTypeForField() { - decodingShouldFail(getCodec(SimpleModel.class), "{'_t': 'SimpleModel', 'stringField': 123}"); + assertThrows(CodecConfigurationException.class, () -> + decodingShouldFail(getCodec(SimpleModel.class), "{'_t': 'SimpleModel', 'stringField': 123}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testInvalidTypeForPrimitiveField() { - decodingShouldFail(getCodec(PrimitivesModel.class), "{ '_t': 'PrimitivesModel', 'myBoolean': null}"); + assertThrows(CodecConfigurationException.class, () -> + decodingShouldFail(getCodec(PrimitivesModel.class), "{ '_t': 'PrimitivesModel', 'myBoolean': null}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testInvalidTypeForModelField() { - decodingShouldFail(getCodec(SimpleNestedPojoModel.class), "{ '_t': 'SimpleNestedPojoModel', 'simple': 123}"); + assertThrows(CodecConfigurationException.class, () -> + decodingShouldFail(getCodec(SimpleNestedPojoModel.class), "{ '_t': 'SimpleNestedPojoModel', 'simple': 123}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testInvalidDiscriminatorInNestedModel() { - decodingShouldFail(getCodec(SimpleNestedPojoModel.class), "{ '_t': 'SimpleNestedPojoModel'," - + "'simple': {'_t': 'FakeModel', 'integerField': 42, 'stringField': 'myString'}}"); + assertThrows(CodecConfigurationException.class, () -> + decodingShouldFail(getCodec(SimpleNestedPojoModel.class), "{ '_t': 'SimpleNestedPojoModel'," + + "'simple': {'_t': 'FakeModel', 'integerField': 42, 'stringField': 'myString'}}")); + } + + @Test + public void testGenericBaseClass() { + CodecRegistry registry = fromProviders(new ValueCodecProvider(), PojoCodecProvider.builder().automatic(true).build()); + + ConcreteModel model = new ConcreteModel(new ConcreteField("name1")); + + String json = "{\"_t\": \"org.bson.codecs.pojo.entities.ConcreteModel\", \"field\": {\"name\": \"name1\"}}"; + roundTrip(PojoCodecProvider.builder().automatic(true), GenericBaseModel.class, model, json); } - @Test(expected = CodecConfigurationException.class) + + @Test public void testCannotEncodeUnspecializedClasses() { CodecRegistry registry = fromProviders(getPojoCodecProviderBuilder(GenericTreeModel.class).build()); - encode(registry.get(GenericTreeModel.class), getGenericTreeModel()); + assertThrows(CodecConfigurationException.class, () -> + encode(registry.get(GenericTreeModel.class), getGenericTreeModel(), false)); } - @Test(expected = CodecConfigurationException.class) - public void testCannotDecodeUnspecializedClasses() { - decodingShouldFail(getCodec(GenericTreeModel.class), - "{'field1': 'top', 'field2': 1, " - + "'left': {'field1': 'left', 'field2': 2, 'left': {'field1': 'left', 'field2': 3}}, " - + "'right': {'field1': 'right', 'field2': 4, 'left': {'field1': 'left', 'field2': 5}}}"); + @Test + public void testCannotDecodeUnspecializedClassesWithoutADiscriminator() { + assertThrows(CodecConfigurationException.class, () -> + decodingShouldFail(getCodec(GenericTreeModel.class), + "{'field1': 'top', 'field2': 1, " + + "'left': {'field1': 'left', 'field2': 2, 'left': {'field1': 'left', 'field2': 3}}, " + + "'right': {'field1': 'right', 'field2': 4, 'left': {'field1': 'left', 'field2': 5}}}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testBsonCreatorPrimitivesAndNullValues() { - decodingShouldFail(getCodec(CreatorConstructorPrimitivesModel.class), "{intField: 100, stringField: 'test'}"); + assertThrows(CodecConfigurationException.class, () -> + decodingShouldFail(getCodec(CreatorConstructorPrimitivesModel.class), "{intField: 100, stringField: 'test'}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testCreatorMethodThrowsExceptionModel() { - decodingShouldFail(getCodec(CreatorMethodThrowsExceptionModel.class), - "{'integerField': 10, 'stringField': 'eleven', 'longField': {$numberLong: '12'}}"); + assertThrows(CodecConfigurationException.class, () -> + decodingShouldFail(getCodec(CreatorMethodThrowsExceptionModel.class), + "{'integerField': 10, 'stringField': 'eleven', 'longField': {$numberLong: '12'}}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testCreatorConstructorThrowsExceptionModel() { - decodingShouldFail(getCodec(CreatorConstructorThrowsExceptionModel.class), "{}"); + assertThrows(CodecConfigurationException.class, () -> + decodingShouldFail(getCodec(CreatorConstructorThrowsExceptionModel.class), "{}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testInvalidSetterModel() { - decodingShouldFail(getCodec(InvalidSetterArgsModel.class), "{'integerField': 42, 'stringField': 'myString'}"); + assertThrows(CodecConfigurationException.class, () -> + decodingShouldFail(getCodec(InvalidSetterArgsModel.class), "{'integerField': 42, 'stringField': 'myString'}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testInvalidGetterAndSetterModelEncoding() { InvalidGetterAndSetterModel model = new InvalidGetterAndSetterModel(42, "myString"); - roundTrip(getPojoCodecProviderBuilder(InvalidGetterAndSetterModel.class), model, "{'integerField': 42, 'stringField': 'myString'}"); + assertThrows(CodecConfigurationException.class, () -> + roundTrip(getPojoCodecProviderBuilder(InvalidGetterAndSetterModel.class), model, "{'integerField': 42, 'stringField': 'myString'}")); } - @Test(expected = CodecConfigurationException.class) + @Test public void testInvalidGetterAndSetterModelDecoding() { - decodingShouldFail(getCodec(InvalidGetterAndSetterModel.class), "{'integerField': 42, 'stringField': 'myString'}"); + assertThrows(CodecConfigurationException.class, () -> + decodingShouldFail(getCodec(InvalidGetterAndSetterModel.class), "{'integerField': 42, 'stringField': 'myString'}")); + } + + @Test + public void testInvalidBsonRepresentationStringDecoding() { + assertThrows(CodecConfigurationException.class, () -> + decodingShouldFail(getCodec(BsonRepresentationUnsupportedString.class), "{'id': 'hello', s: 3}")); + } + + @Test + public void testInvalidBsonRepresentationStringEncoding() { + assertThrows(CodecConfigurationException.class, () -> + encodesTo(getPojoCodecProviderBuilder(BsonRepresentationUnsupportedString.class), + new BsonRepresentationUnsupportedString("1"), "")); + } + + @Test + public void testInvalidBsonRepresentationIntDecoding() { + assertThrows(CodecConfigurationException.class, () -> + decodingShouldFail(getCodec(BsonRepresentationUnsupportedInt.class), "{'id': 'hello', age: '3'}")); + } + + @Test + public void testStringIdIsNotObjectId() { + assertThrows(IllegalArgumentException.class, () -> + encodesTo(getCodec(BsonRepresentationModel.class), new BsonRepresentationModel("notanobjectid", 1), null)); + } + + @Test + public void testRoundTripWithoutBsonAnnotation() { + roundTrip(getPojoCodecProviderBuilder(BsonRepresentationModel.class).conventions(asList(CLASS_AND_PROPERTY_CONVENTION)), + new BsonRepresentationModel("hello", 1), "{'_id': 'hello', 'age': 1}"); + } + + @Test + public void testMultiplePojoProviders() { + NestedGenericHolderFieldWithMultipleTypeParamsModel model = getNestedGenericHolderFieldWithMultipleTypeParamsModel(); + PojoCodecProvider provider1 = PojoCodecProvider.builder().register(NestedGenericHolderFieldWithMultipleTypeParamsModel.class) + .build(); + PojoCodecProvider provider2 = PojoCodecProvider.builder().register(PropertyWithMultipleTypeParamsModel.class).build(); + PojoCodecProvider provider3 = PojoCodecProvider.builder().register(SimpleGenericsModel.class).build(); + PojoCodecProvider provider4 = PojoCodecProvider.builder().register(GenericHolderModel.class).build(); + + CodecRegistry registry = fromProviders(provider1, provider2, provider3, provider4); + CodecRegistry actualRegistry = fromRegistries(fromProviders(new BsonValueCodecProvider(), new ValueCodecProvider()), registry); + + String json = "{'nested': {'myGenericField': {_t: 'PropertyWithMultipleTypeParamsModel', " + + "'simpleGenericsModel': {_t: 'org.bson.codecs.pojo.entities.SimpleGenericsModel', 'myIntegerField': 42, " + + "'myGenericField': {'$numberLong': '101'}, 'myListField': ['B', 'C'], 'myMapField': {'D': 2, 'E': 3, 'F': 4 }}}," + + "'myLongField': {'$numberLong': '42'}}}"; + + roundTrip(actualRegistry, model, json); + } + + @Test + public void testBsonExtraElementsInvalidModel() { + assertThrows(CodecConfigurationException.class, () -> + getPojoCodecProviderBuilder(BsonExtraElementsInvalidModel.class).build()); } private List getDefaultAndUseGettersConvention() { - List conventions = new ArrayList(DEFAULT_CONVENTIONS); + List conventions = new ArrayList<>(DEFAULT_CONVENTIONS); conventions.add(USE_GETTERS_FOR_SETTERS); return conventions; } @@ -541,4 +708,16 @@ public Class getEncoderClass() { return Object.class; } } + + class ObjectIdGenerator implements IdGenerator { + @Override + public ObjectId generate() { + return new ObjectId("123412341234123412341234"); + } + + @Override + public Class getType() { + return ObjectId.class; + } + } } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/PojoRoundTripTest.java b/bson/src/test/unit/org/bson/codecs/pojo/PojoRoundTripTest.java index 79f807edbc8..53f5d363535 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/PojoRoundTripTest.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/PojoRoundTripTest.java @@ -17,6 +17,7 @@ package org.bson.codecs.pojo; import org.bson.BsonDocument; +import org.bson.codecs.SimpleEnum; import org.bson.codecs.pojo.entities.AbstractInterfaceModel; import org.bson.codecs.pojo.entities.CollectionNestedPojoModel; import org.bson.codecs.pojo.entities.CollectionSpecificReturnTypeCreatorModel; @@ -27,12 +28,21 @@ import org.bson.codecs.pojo.entities.ConcreteStandAloneAbstractInterfaceModel; import org.bson.codecs.pojo.entities.ContainsAlternativeMapAndCollectionModel; import org.bson.codecs.pojo.entities.ConventionModel; +import org.bson.codecs.pojo.entities.DuplicateAnnotationAllowedModel; import org.bson.codecs.pojo.entities.FieldAndPropertyTypeMismatchModel; import org.bson.codecs.pojo.entities.GenericHolderModel; import org.bson.codecs.pojo.entities.GenericTreeModel; import org.bson.codecs.pojo.entities.InterfaceBasedModel; import org.bson.codecs.pojo.entities.InterfaceModelImpl; import org.bson.codecs.pojo.entities.InterfaceUpperBoundsModelAbstractImpl; +import org.bson.codecs.pojo.entities.InterfaceWithDefaultMethodModelImpl; +import org.bson.codecs.pojo.entities.InterfaceWithOverrideDefaultMethodModelImpl; +import org.bson.codecs.pojo.entities.ListGenericExtendedModel; +import org.bson.codecs.pojo.entities.ListListGenericExtendedModel; +import org.bson.codecs.pojo.entities.ListMapGenericExtendedModel; +import org.bson.codecs.pojo.entities.MapGenericExtendedModel; +import org.bson.codecs.pojo.entities.MapListGenericExtendedModel; +import org.bson.codecs.pojo.entities.MapMapGenericExtendedModel; import org.bson.codecs.pojo.entities.MultipleBoundsModel; import org.bson.codecs.pojo.entities.MultipleLevelGenericModel; import org.bson.codecs.pojo.entities.NestedFieldReusingClassTypeParameter; @@ -45,24 +55,34 @@ import org.bson.codecs.pojo.entities.NestedReusedGenericsModel; import org.bson.codecs.pojo.entities.NestedSelfReferentialGenericHolderModel; import org.bson.codecs.pojo.entities.NestedSelfReferentialGenericModel; +import org.bson.codecs.pojo.entities.NestedSimpleIdModel; import org.bson.codecs.pojo.entities.PrimitivesModel; import org.bson.codecs.pojo.entities.PropertyReusingClassTypeParameter; import org.bson.codecs.pojo.entities.PropertySelectionModel; import org.bson.codecs.pojo.entities.PropertyWithMultipleTypeParamsModel; import org.bson.codecs.pojo.entities.ReusedGenericsModel; import org.bson.codecs.pojo.entities.SelfReferentialGenericModel; +import org.bson.codecs.pojo.entities.ShapeHolderCircleModel; import org.bson.codecs.pojo.entities.ShapeHolderModel; import org.bson.codecs.pojo.entities.ShapeModelAbstract; import org.bson.codecs.pojo.entities.ShapeModelCircle; import org.bson.codecs.pojo.entities.ShapeModelRectangle; -import org.bson.codecs.pojo.entities.SimpleEnum; import org.bson.codecs.pojo.entities.SimpleEnumModel; import org.bson.codecs.pojo.entities.SimpleGenericsModel; +import org.bson.codecs.pojo.entities.SimpleIdImmutableModel; +import org.bson.codecs.pojo.entities.SimpleIdModel; import org.bson.codecs.pojo.entities.SimpleModel; import org.bson.codecs.pojo.entities.SimpleNestedPojoModel; +import org.bson.codecs.pojo.entities.SimpleWithStaticModel; +import org.bson.codecs.pojo.entities.TreeWithIdModel; import org.bson.codecs.pojo.entities.UpperBoundsConcreteModel; import org.bson.codecs.pojo.entities.conventions.AnnotationBsonPropertyIdModel; +import org.bson.codecs.pojo.entities.conventions.BsonExtraElementsMapModel; +import org.bson.codecs.pojo.entities.conventions.BsonExtraElementsModel; +import org.bson.codecs.pojo.entities.conventions.BsonIgnoreDuplicatePropertyMultipleTypes; import org.bson.codecs.pojo.entities.conventions.BsonIgnoreInvalidMapModel; +import org.bson.codecs.pojo.entities.conventions.BsonIgnoreSyntheticProperty; +import org.bson.codecs.pojo.entities.conventions.BsonRepresentationModel; import org.bson.codecs.pojo.entities.conventions.CollectionDiscriminatorAbstractClassesModel; import org.bson.codecs.pojo.entities.conventions.CollectionDiscriminatorInterfacesModel; import org.bson.codecs.pojo.entities.conventions.CreatorAllFinalFieldsModel; @@ -81,44 +101,39 @@ import org.bson.codecs.pojo.entities.conventions.Subclass1Model; import org.bson.codecs.pojo.entities.conventions.Subclass2Model; import org.bson.codecs.pojo.entities.conventions.SuperClassModel; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.bson.types.ObjectId; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import java.util.stream.Stream; import static java.lang.String.format; import static java.util.Arrays.asList; -@RunWith(Parameterized.class) public final class PojoRoundTripTest extends PojoTestCase { - private final String name; - private final Object model; - private final PojoCodecProvider.Builder builder; - private final String json; - - public PojoRoundTripTest(final String name, final Object model, final String json, final PojoCodecProvider.Builder builder) { - this.name = name; - this.model = model; - this.json = json; - this.builder = builder; - } - - @Test - public void test() { + @ParameterizedTest(name = "{0}") + @MethodSource("data") + public void test(final String name, final Object model, final String json, final PojoCodecProvider.Builder builder) { roundTrip(builder, model, json); + threadedRoundTrip(builder, model, json); } private static List testCases() { - List data = new ArrayList(); + List data = new ArrayList<>(); data.add(new TestData("Simple model", getSimpleModel(), PojoCodecProvider.builder().register(SimpleModel.class), SIMPLE_MODEL_JSON)); + data.add(new TestData("Simple model with statics", new SimpleWithStaticModel(42, "myString"), + PojoCodecProvider.builder().register(SimpleWithStaticModel.class), + SIMPLE_MODEL_JSON)); + data.add(new TestData("Property selection model", new PropertySelectionModel(), getPojoCodecProviderBuilder(PropertySelectionModel.class), "{'finalStringField': 'finalStringField', 'stringField': 'stringField'}")); @@ -137,6 +152,16 @@ private static List testCases() { getPojoCodecProviderBuilder(InterfaceModelImpl.class), "{'propertyA': 'a', 'propertyB': 'b'}")); + data.add(new TestData("Interfaced based model with default method", new InterfaceWithDefaultMethodModelImpl("a", + "c"), + getPojoCodecProviderBuilder(InterfaceWithDefaultMethodModelImpl.class), + "{'propertyA': 'a', 'propertyC': 'c'}")); + + data.add(new TestData("Interfaced based model with override default method", + new InterfaceWithOverrideDefaultMethodModelImpl("a", "c-override"), + getPojoCodecProviderBuilder(InterfaceWithOverrideDefaultMethodModelImpl.class), + "{'propertyA': 'a', 'propertyC': 'c-override'}")); + data.add(new TestData("Interfaced based model with bound", new InterfaceUpperBoundsModelAbstractImpl("someName", new InterfaceModelImpl("a", "b")), getPojoCodecProviderBuilder(InterfaceUpperBoundsModelAbstractImpl.class, InterfaceModelImpl.class), @@ -169,12 +194,12 @@ private static List testCases() { "{'collection': [1, null, 3], 'list': [4, null, 6], 'linked': [null, 8, 9], 'map': {'A': 1.1, 'B': null, 'C': 3.3}}")); data.add(new TestData("Concrete specific return collection type model through BsonCreator", - new CollectionSpecificReturnTypeCreatorModel(Arrays.asList("foo", "bar")), + new CollectionSpecificReturnTypeCreatorModel(asList("foo", "bar")), getPojoCodecProviderBuilder(CollectionSpecificReturnTypeCreatorModel.class), "{'properties': ['foo', 'bar']}")); data.add(new TestData("Concrete specific return collection type model through getter and setter", - new CollectionSpecificReturnTypeModel(Arrays.asList("foo", "bar")), + new CollectionSpecificReturnTypeModel(asList("foo", "bar")), getPojoCodecProviderBuilder(CollectionSpecificReturnTypeModel.class), "{'properties': ['foo', 'bar']}")); @@ -193,6 +218,7 @@ private static List testCases() { + "'listListSimple': [[" + SIMPLE_MODEL_JSON + "]]," + "'setSimple': [" + SIMPLE_MODEL_JSON + "]," + "'setSetSimple': [[" + SIMPLE_MODEL_JSON + "]]," + + "'sortedSetSimple': [" + SIMPLE_MODEL_JSON + "]," + "'mapSimple': {'s': " + SIMPLE_MODEL_JSON + "}," + "'mapMapSimple': {'ms': {'s': " + SIMPLE_MODEL_JSON + "}}," + "'mapListSimple': {'ls': [" + SIMPLE_MODEL_JSON + "]}," @@ -248,7 +274,7 @@ private static List testCases() { + "'right': {'field1': 'right', 'field2': 4, 'left': {'field1': 'left', 'field2': 5}}}}")); data.add(new TestData("Nested multiple level", - new NestedMultipleLevelGenericModel(42, new MultipleLevelGenericModel("string", getGenericTreeModel())), + new NestedMultipleLevelGenericModel(42, new MultipleLevelGenericModel<>("string", getGenericTreeModel())), getPojoCodecProviderBuilder(NestedMultipleLevelGenericModel.class, MultipleLevelGenericModel.class, GenericTreeModel.class), "{'intField': 42, 'nested': {'stringField': 'string', 'nested': {'field1': 'top', 'field2': 1, " + "'left': {'field1': 'left', 'field2': 2, 'left': {'field1': 'left', 'field2': 3}}, " @@ -263,7 +289,7 @@ private static List testCases() { + " 'myLongField': {'$numberLong': '42' }}}")); data.add(new TestData("Nested property reusing type parameter", - new NestedFieldReusingClassTypeParameter(new PropertyReusingClassTypeParameter(getGenericTreeModelStrings())), + new NestedFieldReusingClassTypeParameter(new PropertyReusingClassTypeParameter<>(getGenericTreeModelStrings())), getPojoCodecProviderBuilder(NestedFieldReusingClassTypeParameter.class, PropertyReusingClassTypeParameter.class, GenericTreeModel.class), "{'nested': {'tree': {'field1': 'top', 'field2': '1', " @@ -354,7 +380,7 @@ private static List testCases() { data.add(new TestData("Collection of discriminators interfaces", new CollectionDiscriminatorInterfacesModel().setList( asList(new InterfaceModelImplA().setName("abc").setValue(true), new InterfaceModelImplB().setInteger(234).setValue(false))).setMap( - Collections.singletonMap("key", new InterfaceModelImplB().setInteger(123).setValue(true))), + Collections.singletonMap("key", new InterfaceModelImplB().setInteger(123).setValue(true))), getPojoCodecProviderBuilder(CollectionDiscriminatorInterfacesModel.class, InterfaceModelImplA.class, InterfaceModelImplB.class, InterfaceModel.class), "{list: [{_t: 'org.bson.codecs.pojo.entities.conventions.InterfaceModelImplA', value: true, name: 'abc'}," @@ -379,19 +405,139 @@ private static List testCases() { data.add(new TestData("AnnotationBsonPropertyIdModel", new AnnotationBsonPropertyIdModel(99L), getPojoCodecProviderBuilder(AnnotationBsonPropertyIdModel.class), "{'id': {'$numberLong': '99' }}")); + + data.add(new TestData("Shape model - circle", + new ShapeHolderCircleModel(getShapeModelCircle()), + getPojoCodecProviderBuilder(ShapeModelCircle.class, ShapeHolderCircleModel.class), + "{'shape': {'_t': 'org.bson.codecs.pojo.entities.ShapeModelCircle', 'color': 'orange', 'radius': 4.2}}")); + + data.add(new TestData("BsonIgnore synthentic property", + new BsonIgnoreSyntheticProperty("string value"), + getPojoCodecProviderBuilder(BsonIgnoreSyntheticProperty.class).conventions(Conventions.DEFAULT_CONVENTIONS), + "{stringField: 'string value'}")); + + data.add(new TestData("SimpleIdModel with existing id", + new SimpleIdModel(new ObjectId("123412341234123412341234"), 42, "myString"), + getPojoCodecProviderBuilder(SimpleIdModel.class).conventions(Conventions.DEFAULT_CONVENTIONS), + "{'_id': {'$oid': '123412341234123412341234'}, 'integerField': 42, 'stringField': 'myString'}")); + + + data.add(new TestData("SimpleIdImmutableModel with existing id", + new SimpleIdImmutableModel(new ObjectId("123412341234123412341234"), 42, "myString"), + getPojoCodecProviderBuilder(SimpleIdImmutableModel.class).conventions(Conventions.DEFAULT_CONVENTIONS), + "{'_id': {'$oid': '123412341234123412341234'}, 'integerField': 42, 'stringField': 'myString'}")); + + data.add(new TestData("NestedSimpleIdModel", + new NestedSimpleIdModel(new SimpleIdModel(42, "myString")), + getPojoCodecProviderBuilder(NestedSimpleIdModel.class, SimpleIdModel.class).conventions(Conventions.DEFAULT_CONVENTIONS), + "{'nestedSimpleIdModel': {'integerField': 42, 'stringField': 'myString'}}")); + + data.add(new TestData("TreeWithIdModel", + new TreeWithIdModel(new ObjectId("123412341234123412341234"), "top", + new TreeWithIdModel("left-1", new TreeWithIdModel("left-2"), null), new TreeWithIdModel("right-1")), + getPojoCodecProviderBuilder(TreeWithIdModel.class).conventions(Conventions.DEFAULT_CONVENTIONS), + "{'_id': {'$oid': '123412341234123412341234'}, 'level': 'top'," + + "'left': {'level': 'left-1', 'left': {'level': 'left-2'}}," + + "'right': {'level': 'right-1'}}")); + + data.add(new TestData("DuplicateAnnotationAllowedModel", + new DuplicateAnnotationAllowedModel("abc"), + getPojoCodecProviderBuilder(DuplicateAnnotationAllowedModel.class).conventions(Conventions.DEFAULT_CONVENTIONS), + "{'_id': 'abc'}")); + + data.add(new TestData("BsonIgnore duplicate property with multiple types", + new BsonIgnoreDuplicatePropertyMultipleTypes("string value"), + getPojoCodecProviderBuilder(BsonIgnoreDuplicatePropertyMultipleTypes.class).conventions(Conventions.DEFAULT_CONVENTIONS), + "{stringField: 'string value'}")); + + data.add(new TestData("Can handle concrete generic list types", + new ListGenericExtendedModel(asList(1, 2, 3)), + getPojoCodecProviderBuilder(ListGenericExtendedModel.class), + "{values: [1, 2, 3]}")); + + data.add(new TestData("Can handle concrete nested generic list types", + new ListListGenericExtendedModel(asList(asList(1, 2, 3), asList(4, 5, 6))), + getPojoCodecProviderBuilder(ListListGenericExtendedModel.class), + "{values: [[1, 2, 3], [4, 5, 6]]}")); + + + data.add(new TestData("Can handle concrete generic map types", + new MapGenericExtendedModel(new HashMap() {{ + put("a", 1); + put("b", 2); + }}), + getPojoCodecProviderBuilder(MapGenericExtendedModel.class), + "{values: {a: 1, b: 2}}")); + + data.add(new TestData("Can handle concrete nested generic map types", + new MapMapGenericExtendedModel(new HashMap>() {{ + put("a", new HashMap() {{ + put("aa", 1); + put("ab", 2); + }}); + put("b", new HashMap() {{ + put("ba", 1); + put("bb", 2); + }}); + }} + ), + getPojoCodecProviderBuilder(MapMapGenericExtendedModel.class), + "{values: {a: {aa: 1, ab: 2}, b: {ba: 1, bb: 2}}}")); + + data.add(new TestData("Can handle concrete lists with generic map types", + new ListMapGenericExtendedModel(asList(new HashMap() {{ + put("a", 1); + put("b", 2); + }}, new HashMap() {{ + put("c", 3); + put("d", 4); + }})), + getPojoCodecProviderBuilder(ListMapGenericExtendedModel.class), + "{values: [{a: 1, b: 2}, {c: 3, d: 4}]}")); + + + data.add(new TestData("Can handle concrete maps with generic list types", + new MapListGenericExtendedModel(new HashMap>() {{ + put("a", asList(1, 2, 3)); + put("b", asList(4, 5, 6)); + }}), + getPojoCodecProviderBuilder(MapListGenericExtendedModel.class), + "{values: {a: [1, 2, 3], b: [4, 5, 6]}}")); + + data.add(new TestData("BsonRepresentation is encoded and decoded correctly", new BsonRepresentationModel(1), + getPojoCodecProviderBuilder(BsonRepresentationModel.class), + "{'_id': {'$oid': '111111111111111111111111'}, 'age': 1}")); + + data.add(new TestData("BsonExtraElements with no extra data", + new BsonExtraElementsModel(42, "myString", null), + getPojoCodecProviderBuilder(BsonExtraElementsModel.class), + "{'integerField': 42, 'stringField': 'myString'}")); + + data.add(new TestData("BsonExtraElements are encoded and decoded correctly", + new BsonExtraElementsModel(42, "myString", BsonDocument.parse("{a: 1, b: 2, c: [1, 2, {a: 1}]}")), + getPojoCodecProviderBuilder(BsonExtraElementsModel.class), + "{'integerField': 42, 'stringField': 'myString', 'a': 1, 'b': 2, c: [1, 2, {a: 1}]}")); + + Map stringMap = new HashMap<>(); + stringMap.put("a", "a"); + stringMap.put("b", "b"); + data.add(new TestData("BsonExtraElements are encoded and decoded correctly to a Map", + new BsonExtraElementsMapModel(42, "myString", stringMap), + getPojoCodecProviderBuilder(BsonExtraElementsMapModel.class), + "{'integerField': 42, 'stringField': 'myString', 'a': 'a', 'b': 'b'}")); + return data; } - @Parameterized.Parameters(name = "{0}") - public static Collection data() { - List data = new ArrayList(); + public static Stream data() { + List data = new ArrayList<>(); for (TestData testData : testCases()) { - data.add(new Object[]{format("%s", testData.getName()), testData.getModel(), testData.getJson(), testData.getBuilder()}); - data.add(new Object[]{format("%s [Auto]", testData.getName()), testData.getModel(), testData.getJson(), AUTOMATIC_BUILDER}); - data.add(new Object[]{format("%s [Package]", testData.getName()), testData.getModel(), testData.getJson(), PACKAGE_BUILDER}); + data.add(Arguments.of(format("%s", testData.getName()), testData.getModel(), testData.getJson(), testData.getBuilder())); + data.add(Arguments.of(format("%s [Auto]", testData.getName()), testData.getModel(), testData.getJson(), AUTOMATIC_BUILDER)); + data.add(Arguments.of(format("%s [Package]", testData.getName()), testData.getModel(), testData.getJson(), PACKAGE_BUILDER)); } - return data; + return data.stream(); } private static final PojoCodecProvider.Builder AUTOMATIC_BUILDER = PojoCodecProvider.builder().automatic(true); diff --git a/bson/src/test/unit/org/bson/codecs/pojo/PojoTestCase.java b/bson/src/test/unit/org/bson/codecs/pojo/PojoTestCase.java index 356ab22e3d8..eb380bb7986 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/PojoTestCase.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/PojoTestCase.java @@ -16,6 +16,9 @@ package org.bson.codecs.pojo; +import java.util.SortedSet; +import java.util.TreeSet; + import org.bson.BsonBinaryReader; import org.bson.BsonBinaryWriter; import org.bson.BsonDocument; @@ -27,8 +30,11 @@ import org.bson.codecs.Codec; import org.bson.codecs.DecoderContext; import org.bson.codecs.EncoderContext; +import org.bson.codecs.EnumCodecProvider; +import org.bson.codecs.SimpleEnum; import org.bson.codecs.ValueCodecProvider; import org.bson.codecs.configuration.CodecRegistry; +import org.bson.codecs.jsr310.Jsr310CodecProvider; import org.bson.codecs.pojo.entities.CollectionNestedPojoModel; import org.bson.codecs.pojo.entities.ConcreteCollectionsModel; import org.bson.codecs.pojo.entities.ConventionModel; @@ -49,7 +55,6 @@ import org.bson.codecs.pojo.entities.SelfReferentialGenericModel; import org.bson.codecs.pojo.entities.ShapeModelCircle; import org.bson.codecs.pojo.entities.ShapeModelRectangle; -import org.bson.codecs.pojo.entities.SimpleEnum; import org.bson.codecs.pojo.entities.SimpleGenericsModel; import org.bson.codecs.pojo.entities.SimpleModel; import org.bson.codecs.pojo.entities.SimpleNestedPojoModel; @@ -73,45 +78,77 @@ import static java.util.Collections.singletonList; import static org.bson.codecs.configuration.CodecRegistries.fromProviders; import static org.bson.codecs.pojo.Conventions.DEFAULT_CONVENTIONS; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static util.ThreadTestHelpers.executeAll; abstract class PojoTestCase { static final BsonDocumentCodec DOCUMENT_CODEC = new BsonDocumentCodec(); - @SuppressWarnings("unchecked") void roundTrip(final T value, final String json) { roundTrip(PojoCodecProvider.builder().automatic(true), value, json); } - @SuppressWarnings("unchecked") void roundTrip(final PojoCodecProvider.Builder builder, final T value, final String json) { - encodesTo(getCodecRegistry(builder), value, json); - decodesTo(getCodecRegistry(builder), json, value); + roundTrip(builder, value.getClass(), value, json); + } + + void roundTrip(final PojoCodecProvider.Builder builder, final Class clazz, final T value, final String json) { + encodesTo(getCodecRegistry(builder), clazz, value, json); + decodesTo(getCodecRegistry(builder), clazz, json, value); + } + + void threadedRoundTrip(final PojoCodecProvider.Builder builder, final T value, final String json) { + int numberOfThreads = 5; + CodecRegistry codecRegistry = getCodecRegistry(builder); + + executeAll(numberOfThreads, () -> { + encodesTo(codecRegistry, value, json); + decodesTo(codecRegistry, json, value); + }); } - @SuppressWarnings("unchecked") void roundTrip(final CodecRegistry registry, final T value, final String json) { encodesTo(registry, value, json); decodesTo(registry, json, value); } + void roundTrip(final CodecRegistry registry, final Class clazz, final T value, final String json) { + encodesTo(registry, clazz, value, json); + decodesTo(registry, clazz, json, value); + } + void encodesTo(final PojoCodecProvider.Builder builder, final T value, final String json) { - encodesTo(getCodecRegistry(builder), value, json); + encodesTo(builder, value, json, false); + } + + void encodesTo(final PojoCodecProvider.Builder builder, final T value, final String json, final boolean collectible) { + encodesTo(getCodecRegistry(builder), value.getClass(), value, json, collectible); } - @SuppressWarnings("unchecked") void encodesTo(final CodecRegistry registry, final T value, final String json) { - Codec codec = (Codec) registry.get(value.getClass()); - encodesTo(codec, value, json); + encodesTo(registry, value.getClass(), value, json, false); + } + + void encodesTo(final CodecRegistry registry, final Class clazz, final T value, final String json) { + encodesTo(registry, clazz, value, json, false); } @SuppressWarnings("unchecked") + void encodesTo(final CodecRegistry registry, final Class clazz, final T value, final String json, final boolean collectible) { + Codec codec = (Codec) registry.get(clazz); + encodesTo(codec, value, json, collectible); + } + void encodesTo(final Codec codec, final T value, final String json) { - OutputBuffer encoded = encode(codec, value); + encodesTo(codec, value, json, false); + } + + void encodesTo(final Codec codec, final T value, final String json, final boolean collectible) { + OutputBuffer encoded = encode(codec, value, collectible); BsonDocument asBsonDocument = decode(DOCUMENT_CODEC, encoded); - assertEquals("Encoded value", BsonDocument.parse(json), asBsonDocument); + assertEquals(BsonDocument.parse(json), asBsonDocument); } void decodesTo(final PojoCodecProvider.Builder builder, final String json, final T expected) { @@ -120,24 +157,29 @@ void decodesTo(final PojoCodecProvider.Builder builder, final String json, f @SuppressWarnings("unchecked") void decodesTo(final CodecRegistry registry, final String json, final T expected) { - Codec codec = (Codec) registry.get(expected.getClass()); + decodesTo(registry, expected.getClass(), json, expected); + } + + @SuppressWarnings("unchecked") + void decodesTo(final CodecRegistry registry, final Class clazz, final String json, final T expected) { + Codec codec = (Codec) registry.get(clazz); decodesTo(codec, json, expected); } void decodesTo(final Codec codec, final String json, final T expected) { - OutputBuffer encoded = encode(DOCUMENT_CODEC, BsonDocument.parse(json)); + OutputBuffer encoded = encode(DOCUMENT_CODEC, BsonDocument.parse(json), false); T result = decode(codec, encoded); - assertEquals("Decoded value", expected, result); + assertEquals(expected, result); } void decodingShouldFail(final Codec codec, final String json) { decodesTo(codec, json, null); } - OutputBuffer encode(final Codec codec, final T value) { + OutputBuffer encode(final Codec codec, final T value, final boolean collectible) { OutputBuffer buffer = new BasicOutputBuffer(); BsonWriter writer = new BsonBinaryWriter(buffer); - codec.encode(writer, value, EncoderContext.builder().build()); + codec.encode(writer, value, EncoderContext.builder().isEncodingCollectibleDocument(collectible).build()); return buffer; } @@ -155,16 +197,16 @@ static PojoCodecProvider.Builder getPojoCodecProviderBuilder(final Class... c return builder; } - PojoCodecImpl getCodec(final PojoCodecProvider.Builder builder, final Class clazz) { - return (PojoCodecImpl) getCodecRegistry(builder).get(clazz); + PojoCodec getCodec(final PojoCodecProvider.Builder builder, final Class clazz) { + return (PojoCodec) getCodecRegistry(builder).get(clazz); } - PojoCodecImpl getCodec(final Class clazz) { + PojoCodec getCodec(final Class clazz) { return getCodec(getPojoCodecProviderBuilder(clazz), clazz); } PojoCodecProvider.Builder getPojoCodecProviderBuilder(final ClassModelBuilder... classModelBuilders) { - List> builders = new ArrayList>(); + List> builders = new ArrayList<>(); for (ClassModelBuilder classModelBuilder : classModelBuilders) { builders.add(classModelBuilder.build()); } @@ -172,7 +214,8 @@ PojoCodecProvider.Builder getPojoCodecProviderBuilder(final ClassModelBuilder } CodecRegistry getCodecRegistry(final PojoCodecProvider.Builder builder) { - return fromProviders(new BsonValueCodecProvider(), new ValueCodecProvider(), builder.build()); + return fromProviders(new BsonValueCodecProvider(), new ValueCodecProvider(), new Jsr310CodecProvider(), new EnumCodecProvider(), + builder.build()); } static SimpleModel getSimpleModel() { @@ -184,32 +227,32 @@ static PrimitivesModel getPrimitivesModel() { } SimpleGenericsModel getSimpleGenericsModel() { - HashMap map = new HashMap(); + HashMap map = new HashMap<>(); map.put("D", 2); map.put("E", 3); map.put("F", 4); - return new SimpleGenericsModel(42, "A", asList("B", "C"), map); + return new SimpleGenericsModel<>(42, "A", asList("B", "C"), map); } static SimpleGenericsModel getSimpleGenericsModelAlt() { - HashMap map = new HashMap(); + HashMap map = new HashMap<>(); map.put("D", 2); map.put("E", 3); map.put("F", 4); - return new SimpleGenericsModel(42, 101L, asList("B", "C"), map); + return new SimpleGenericsModel<>(42, 101L, asList("B", "C"), map); } static ConcreteCollectionsModel getConcreteCollectionsModel() { Collection collection = asList(1, 2, 3); List list = asList(4, 5, 6); - LinkedList linked = new LinkedList(asList(7, 8, 9)); - Map map = new HashMap(); + LinkedList linked = new LinkedList<>(asList(7, 8, 9)); + Map map = new HashMap<>(); map.put("A", 1.1); map.put("B", 2.2); map.put("C", 3.3); - ConcurrentHashMap concurrent = new ConcurrentHashMap(); + ConcurrentHashMap concurrent = new ConcurrentHashMap<>(); concurrent.put("D", 4.4); concurrent.put("E", 5.5); concurrent.put("F", 6.6); @@ -221,8 +264,8 @@ static ConcreteCollectionsModel getConcreteCollectionsModel() { static ConcreteCollectionsModel getConcreteCollectionsModelWithNulls() { Collection collection = asList(1, null, 3); List list = asList(4, null, 6); - LinkedList linked = new LinkedList(asList(null, 8, 9)); - Map map = new HashMap(); + LinkedList linked = new LinkedList<>(asList(null, 8, 9)); + Map map = new HashMap<>(); map.put("A", 1.1); map.put("B", null); map.put("C", 3.3); @@ -246,41 +289,44 @@ static CollectionNestedPojoModel getCollectionNestedPojoModelWithNulls() { private static CollectionNestedPojoModel getCollectionNestedPojoModel(final boolean useNulls) { List listSimple; Set setSimple; + SortedSet sortedSetSimple; Map mapSimple; if (useNulls) { listSimple = null; setSimple = null; + sortedSetSimple = null; mapSimple = null; } else { SimpleModel simpleModel = getSimpleModel(); listSimple = singletonList(simpleModel); - setSimple = new HashSet(listSimple); - mapSimple = new HashMap(); + setSimple = new HashSet<>(listSimple); + sortedSetSimple = new TreeSet<>(listSimple); + mapSimple = new HashMap<>(); mapSimple.put("s", simpleModel); } List> listListSimple = singletonList(listSimple); - Set> setSetSimple = new HashSet>(singletonList(setSimple)); + Set> setSetSimple = new HashSet<>(singletonList(setSimple)); - Map> mapMapSimple = new HashMap>(); + Map> mapMapSimple = new HashMap<>(); mapMapSimple.put("ms", mapSimple); - Map> mapListSimple = new HashMap>(); + Map> mapListSimple = new HashMap<>(); mapListSimple.put("ls", listSimple); - Map>> mapListMapSimple = new HashMap>>(); + Map>> mapListMapSimple = new HashMap<>(); mapListMapSimple.put("lm", singletonList(mapSimple)); - Map> mapSetSimple = new HashMap>(); + Map> mapSetSimple = new HashMap<>(); mapSetSimple.put("s", setSimple); List> listMapSimple = singletonList(mapSimple); List>> listMapListSimple = singletonList(mapListSimple); List>> listMapSetSimple = singletonList(mapSetSimple); - return new CollectionNestedPojoModel(listSimple, listListSimple, setSimple, setSetSimple, mapSimple, mapMapSimple, mapListSimple, - mapListMapSimple, mapSetSimple, listMapSimple, listMapListSimple, listMapSetSimple); + return new CollectionNestedPojoModel(listSimple, listListSimple, setSimple, setSetSimple, sortedSetSimple, + mapSimple, mapMapSimple, mapListSimple, mapListMapSimple, mapSetSimple, listMapSimple, listMapListSimple, listMapSetSimple); } static ConventionModel getConventionModel() { @@ -298,7 +344,7 @@ static ShapeModelRectangle getShapeModelRectangle() { } static MultipleBoundsModel getMultipleBoundsModel() { - HashMap map = new HashMap(); + HashMap map = new HashMap<>(); map.put("key", "value"); List list = asList(1, 2, 3); return new MultipleBoundsModel(map, list, 2.2); @@ -307,70 +353,70 @@ static MultipleBoundsModel getMultipleBoundsModel() { static NestedGenericHolderFieldWithMultipleTypeParamsModel getNestedGenericHolderFieldWithMultipleTypeParamsModel() { SimpleGenericsModel simple = getSimpleGenericsModelAlt(); PropertyWithMultipleTypeParamsModel field = - new PropertyWithMultipleTypeParamsModel(simple); + new PropertyWithMultipleTypeParamsModel<>(simple); GenericHolderModel> nested = new - GenericHolderModel>(field, 42L); + GenericHolderModel<>(field, 42L); return new NestedGenericHolderFieldWithMultipleTypeParamsModel(nested); } static NestedGenericHolderSimpleGenericsModel getNestedGenericHolderSimpleGenericsModel() { SimpleModel simpleModel = getSimpleModel(); - Map map = new HashMap(); + Map map = new HashMap<>(); map.put("A", simpleModel); - Map> mapB = new HashMap>(); + Map> mapB = new HashMap<>(); mapB.put("A", map); SimpleGenericsModel, Map> simpleGenericsModel = - new SimpleGenericsModel, Map>(42, 42, + new SimpleGenericsModel<>(42, 42, singletonList(singletonList(simpleModel)), mapB); GenericHolderModel, Map>> nested = - new GenericHolderModel, Map>>(simpleGenericsModel, 42L); + new GenericHolderModel<>(simpleGenericsModel, 42L); return new NestedGenericHolderSimpleGenericsModel(nested); } static NestedSelfReferentialGenericHolderModel getNestedSelfReferentialGenericHolderModel() { - SelfReferentialGenericModel selfRef1 = new SelfReferentialGenericModel(true, 33L, - new SelfReferentialGenericModel(44L, false, null)); - SelfReferentialGenericModel selfRef2 = new SelfReferentialGenericModel(true, 3.14, - new SelfReferentialGenericModel(3.42, true, null)); + SelfReferentialGenericModel selfRef1 = new SelfReferentialGenericModel<>(true, 33L, + new SelfReferentialGenericModel<>(44L, false, null)); + SelfReferentialGenericModel selfRef2 = new SelfReferentialGenericModel<>(true, 3.14, + new SelfReferentialGenericModel<>(3.42, true, null)); NestedSelfReferentialGenericModel nested = - new NestedSelfReferentialGenericModel(true, 42L, 44.0, selfRef1, selfRef2); + new NestedSelfReferentialGenericModel<>(true, 42L, 44.0, selfRef1, selfRef2); return new NestedSelfReferentialGenericHolderModel(nested); } static NestedGenericHolderModel getNestedGenericHolderModel() { - return new NestedGenericHolderModel(new GenericHolderModel("generic", 1L)); + return new NestedGenericHolderModel(new GenericHolderModel<>("generic", 1L)); } static NestedGenericHolderMapModel getNestedGenericHolderMapModel() { - Map mapSimple = new HashMap(); + Map mapSimple = new HashMap<>(); mapSimple.put("s", getSimpleModel()); - return new NestedGenericHolderMapModel(new GenericHolderModel>(mapSimple, 1L)); + return new NestedGenericHolderMapModel(new GenericHolderModel<>(mapSimple, 1L)); } static NestedReusedGenericsModel getNestedReusedGenericsModel() { - return new NestedReusedGenericsModel(new ReusedGenericsModel, String>(1L, + return new NestedReusedGenericsModel(new ReusedGenericsModel<>(1L, singletonList(getSimpleModel()), "field3", 42, "field5", asList(getSimpleModel(), getSimpleModel()), 2L, "field8")); } static GenericTreeModel getGenericTreeModel() { - return new GenericTreeModel("top", 1, - new GenericTreeModel("left", 2, - new GenericTreeModel("left", 3, null, null), null), - new GenericTreeModel("right", 4, - new GenericTreeModel("left", 5, null, null), null)); + return new GenericTreeModel<>("top", 1, + new GenericTreeModel<>("left", 2, + new GenericTreeModel<>("left", 3, null, null), null), + new GenericTreeModel<>("right", 4, + new GenericTreeModel<>("left", 5, null, null), null)); } static GenericTreeModel getGenericTreeModelStrings() { - return new GenericTreeModel("top", "1", - new GenericTreeModel("left", "2", - new GenericTreeModel("left", "3", null, null), null), - new GenericTreeModel("right", "4", - new GenericTreeModel("left", "5", null, null), null)); + return new GenericTreeModel<>("top", "1", + new GenericTreeModel<>("left", "2", + new GenericTreeModel<>("left", "3", null, null), null), + new GenericTreeModel<>("right", "4", + new GenericTreeModel<>("left", "5", null, null), null)); } static InvalidMapModel getInvalidMapModel() { - Map map = new HashMap(); + Map map = new HashMap<>(); map.put(1, 1); map.put(2, 2); return new InvalidMapModel(map); diff --git a/bson/src/test/unit/org/bson/codecs/pojo/PropertyModelBuilderTest.java b/bson/src/test/unit/org/bson/codecs/pojo/PropertyModelBuilderTest.java index 3bc54a9cdb8..9ec8ffb96f7 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/PropertyModelBuilderTest.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/PropertyModelBuilderTest.java @@ -18,23 +18,24 @@ import org.bson.codecs.IntegerCodec; import org.bson.codecs.pojo.annotations.BsonProperty; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.lang.annotation.Annotation; import java.util.Collections; import java.util.List; -import static junit.framework.TestCase.assertTrue; import static org.bson.codecs.pojo.PojoBuilderHelper.createPropertyModelBuilder; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; public final class PropertyModelBuilderTest { private static final String FIELD_NAME = "myFieldName"; private static final PropertyMetadata PROPERTY_METADATA = - new PropertyMetadata(FIELD_NAME, "MyClass", TypeData.builder(Integer.class).build()); + new PropertyMetadata<>(FIELD_NAME, "MyClass", TypeData.builder(Integer.class).build()); @Test public void testFieldMapping() throws NoSuchFieldException { @@ -67,15 +68,16 @@ public void testFieldOverrides() throws NoSuchFieldException { assertFalse(propertyModelBuilder.isDiscriminatorEnabled()); } - @Test(expected = IllegalStateException.class) + @Test public void testMustBeReadableOrWritable() { - createPropertyModelBuilder(PROPERTY_METADATA) + assertThrows(IllegalStateException.class, () -> + createPropertyModelBuilder(PROPERTY_METADATA) .readName(null) .writeName(null) - .build(); + .build()); } - private static final List ANNOTATIONS = Collections.singletonList( + private static final List ANNOTATIONS = Collections.singletonList( new BsonProperty() { @Override public Class annotationType() { @@ -93,12 +95,7 @@ public boolean useDiscriminator() { } }); - private static final PropertySerialization CUSTOM_SERIALIZATION = new PropertySerialization() { - @Override - public boolean shouldSerialize(final Integer value) { - return false; - } - }; + private static final PropertySerialization CUSTOM_SERIALIZATION = value -> false; private static final PropertyAccessor FIELD_ACCESSOR = new PropertyAccessor() { @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/PropertyModelTest.java b/bson/src/test/unit/org/bson/codecs/pojo/PropertyModelTest.java index be99fb56298..bd7f2160a4d 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/PropertyModelTest.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/PropertyModelTest.java @@ -18,27 +18,27 @@ import org.bson.codecs.IntegerCodec; import org.bson.codecs.pojo.annotations.BsonProperty; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.lang.annotation.Annotation; import java.util.Collections; import java.util.List; -import static junit.framework.TestCase.assertFalse; import static org.bson.codecs.pojo.PojoBuilderHelper.createPropertyModelBuilder; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; public final class PropertyModelTest { private static final String FIELD_NAME = "myFieldName"; private static final PropertyMetadata PROPERTY_METADATA = - new PropertyMetadata(FIELD_NAME, "MyClass", TypeData.builder(Integer.class).build()); + new PropertyMetadata<>(FIELD_NAME, "MyClass", TypeData.builder(Integer.class).build()); @Test public void testPropertyMapping() throws NoSuchFieldException { - PropertySerialization serializer = new PropertyModelSerializationImpl(); - PropertyAccessor accessor = new PropertyAccessorImpl(PROPERTY_METADATA); + PropertySerialization serializer = new PropertyModelSerializationImpl<>(); + PropertyAccessor accessor = new PropertyAccessorImpl<>(PROPERTY_METADATA); PropertyModel propertyModel = createPropertyModelBuilder(PROPERTY_METADATA) .propertySerialization(serializer) .propertyAccessor(accessor) @@ -75,7 +75,7 @@ public void testPropertyOverrides() throws NoSuchFieldException { assertFalse(propertyModel.useDiscriminator()); } - private static final List ANNOTATIONS = Collections.singletonList( + private static final List ANNOTATIONS = Collections.singletonList( new BsonProperty() { @Override public Class annotationType() { @@ -93,12 +93,7 @@ public boolean useDiscriminator() { } }); - private static final PropertySerialization CUSTOM_SERIALIZATION = new PropertySerialization() { - @Override - public boolean shouldSerialize(final Integer value) { - return false; - } - }; + private static final PropertySerialization CUSTOM_SERIALIZATION = value -> false; private static final PropertyAccessor FIELD_ACCESSOR = new PropertyAccessor() { @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/TypeDataTest.java b/bson/src/test/unit/org/bson/codecs/pojo/TypeDataTest.java index 03c45dc4b1a..ee52e7e7bcf 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/TypeDataTest.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/TypeDataTest.java @@ -17,15 +17,15 @@ package org.bson.codecs.pojo; import org.bson.codecs.pojo.entities.GenericHolderModel; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.Arrays; import java.util.List; import java.util.Map; import static java.util.Collections.singletonList; -import static junit.framework.TestCase.assertTrue; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; @SuppressWarnings("rawtypes") public final class TypeDataTest { diff --git a/bson/src/test/unit/org/bson/codecs/pojo/TypeParameterMapTest.java b/bson/src/test/unit/org/bson/codecs/pojo/TypeParameterMapTest.java index 95acff41ae0..6b743da53a2 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/TypeParameterMapTest.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/TypeParameterMapTest.java @@ -16,13 +16,14 @@ package org.bson.codecs.pojo; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.HashMap; import java.util.Map; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; public final class TypeParameterMapTest { @@ -35,22 +36,23 @@ public void testDefault() { @Test public void testClassParamMapsToField() { TypeParameterMap typeParameterMap = TypeParameterMap.builder().addIndex(1).build(); - Map expected = new HashMap(); - expected.put(-1, 1); + Map> expected = new HashMap<>(); + expected.put(-1, Either.left(1)); assertEquals(expected, typeParameterMap.getPropertyToClassParamIndexMap()); } @Test public void testMapsClassAndFieldIndices() { TypeParameterMap typeParameterMap = TypeParameterMap.builder().addIndex(1, 2).addIndex(2, 2).build(); - Map expected = new HashMap(); - expected.put(1, 2); - expected.put(2, 2); + Map> expected = new HashMap<>(); + expected.put(1, Either.left(2)); + expected.put(2, Either.left(2)); assertEquals(expected, typeParameterMap.getPropertyToClassParamIndexMap()); } - @Test(expected = IllegalStateException.class) + @Test public void testFieldCannotBeGenericAndContainTypeParameters() { - TypeParameterMap.builder().addIndex(1).addIndex(2, 2).build(); + assertThrows(IllegalStateException.class, () -> + TypeParameterMap.builder().addIndex(1).addIndex(2, 2).build()); } } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/BaseField.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/BaseField.java new file mode 100644 index 00000000000..4393c5f2d7f --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/BaseField.java @@ -0,0 +1,55 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import java.util.Objects; + +public abstract class BaseField { + private String name; + + public BaseField(final String name) { + this.name = name; + } + + protected BaseField() { + } + + public String getName() { + return name; + } + + public void setName(final String name) { + this.name = name; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + BaseField baseField = (BaseField) o; + return Objects.equals(name, baseField.name); + } + + @Override + public int hashCode() { + return Objects.hashCode(name); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/BsonIdModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/BsonIdModel.java new file mode 100644 index 00000000000..8bf98785c61 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/BsonIdModel.java @@ -0,0 +1,50 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import org.bson.BsonObjectId; + +import java.util.Objects; + +public class BsonIdModel { + private BsonObjectId id; + + public BsonObjectId getId() { + return id; + } + + public void setId(final BsonObjectId id) { + this.id = id; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + BsonIdModel bsonId = (BsonIdModel) o; + return Objects.equals(id, bsonId.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/BsonRepresentationUnsupportedInt.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/BsonRepresentationUnsupportedInt.java new file mode 100644 index 00000000000..379c904f26f --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/BsonRepresentationUnsupportedInt.java @@ -0,0 +1,69 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import org.bson.BsonType; +import org.bson.codecs.pojo.annotations.BsonRepresentation; + +import java.util.Objects; + +public class BsonRepresentationUnsupportedInt { + private String id; + + @BsonRepresentation(BsonType.STRING) + private int age; + + public BsonRepresentationUnsupportedInt() {} + + public BsonRepresentationUnsupportedInt(final int age) { + this.id = "1"; + this.age = age; + } + + public String getId() { + return id; + } + + public void setId(final String id) { + this.id = id; + } + + public int getAge() { + return age; + } + + public void setAge(final int age) { + this.age = age; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + BsonRepresentationUnsupportedInt that = (BsonRepresentationUnsupportedInt) o; + return age == that.age && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id, age); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/BsonRepresentationUnsupportedString.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/BsonRepresentationUnsupportedString.java new file mode 100644 index 00000000000..f9ddbbf61e1 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/BsonRepresentationUnsupportedString.java @@ -0,0 +1,72 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import org.bson.BsonType; +import org.bson.codecs.pojo.annotations.BsonId; +import org.bson.codecs.pojo.annotations.BsonRepresentation; + +import java.util.Objects; + +public class BsonRepresentationUnsupportedString { + @BsonId + private String id; + + @BsonRepresentation(BsonType.INT32) + private String s; + + public BsonRepresentationUnsupportedString() { + } + + public BsonRepresentationUnsupportedString(final String s) { + this.id = "1"; + this.s = s; + } + + public void setId(final String id) { + this.id = id; + } + + public void setS(final String s) { + this.s = s; + } + + public String getId() { + return id; + } + + public String getS() { + return s; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + BsonRepresentationUnsupportedString that = (BsonRepresentationUnsupportedString) o; + return Objects.equals(id, that.id) && Objects.equals(s, that.s); + } + + @Override + public int hashCode() { + return Objects.hash(id, s); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/CollectionNestedPojoModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/CollectionNestedPojoModel.java index ce81e18897d..554469249d8 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/CollectionNestedPojoModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/CollectionNestedPojoModel.java @@ -19,6 +19,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.SortedSet; import static java.util.Collections.singletonList; @@ -32,6 +33,8 @@ public final class CollectionNestedPojoModel { private Set setSimple; private Set> setSetSimple; + private SortedSet sortedSetSimple; + private Map mapSimple; private Map> mapMapSimple; @@ -47,7 +50,8 @@ public CollectionNestedPojoModel() { } public CollectionNestedPojoModel(final List listSimple, final List> listListSimple, final - Set setSimple, final Set> setSetSimple, final Map mapSimple, final Map setSimple, final Set> setSetSimple, final SortedSet sortedSetSimple, + final Map mapSimple, final Map> mapMapSimple, final Map> mapListSimple, final Map>> mapListMapSimple, final Map> mapSetSimple, final List> listMapSimple, final List>> listMapListSimple, final List listSimple, final List< this.listListSimple = listListSimple; this.setSimple = setSimple; this.setSetSimple = setSetSimple; + this.sortedSetSimple = sortedSetSimple; this.mapSimple = mapSimple; this.mapMapSimple = mapMapSimple; this.mapListSimple = mapListSimple; @@ -98,6 +103,14 @@ public void setSetSimple(final Set setSimple) { this.setSimple = setSimple; } + public SortedSet getSortedSetSimple() { + return sortedSetSimple; + } + + public void setSortedSetSimple(final SortedSet sortedSetSimple) { + this.sortedSetSimple = sortedSetSimple; + } + public Set> getSetSetSimple() { return setSetSimple; } @@ -193,6 +206,9 @@ public boolean equals(final Object o) { if (getSetSetSimple() != null ? !getSetSetSimple().equals(that.getSetSetSimple()) : that.getSetSetSimple() != null) { return false; } + if (getSortedSetSimple() != null ? !getSortedSetSimple().equals(that.getSortedSetSimple()) : that.getSortedSetSimple() != null) { + return false; + } if (getMapSimple() != null ? !getMapSimple().equals(that.getMapSimple()) : that.getMapSimple() != null) { return false; } @@ -230,6 +246,7 @@ public int hashCode() { result = 31 * result + (getListListSimple() != null ? getListListSimple().hashCode() : 0); result = 31 * result + (getSetSimple() != null ? getSetSimple().hashCode() : 0); result = 31 * result + (getSetSetSimple() != null ? getSetSetSimple().hashCode() : 0); + result = 31 * result + (getSortedSetSimple() != null ? getSortedSetSimple().hashCode() : 0); result = 31 * result + (getMapSimple() != null ? getMapSimple().hashCode() : 0); result = 31 * result + (getMapMapSimple() != null ? getMapMapSimple().hashCode() : 0); result = 31 * result + (getMapListSimple() != null ? getMapListSimple().hashCode() : 0); @@ -248,6 +265,7 @@ public String toString() { + ", listListSimple=" + listListSimple + ", setSimple=" + setSimple + ", setSetSimple=" + setSetSimple + + ", setSortedSimple=" + sortedSetSimple + ", mapSimple=" + mapSimple + ", mapMapSimple=" + mapMapSimple + ", mapListSimple=" + mapListSimple diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/CollectionSpecificReturnTypeCreatorModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/CollectionSpecificReturnTypeCreatorModel.java index 761b7fe9a7b..b14027304e0 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/CollectionSpecificReturnTypeCreatorModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/CollectionSpecificReturnTypeCreatorModel.java @@ -20,6 +20,7 @@ import org.bson.codecs.pojo.annotations.BsonProperty; import java.util.List; +import java.util.Objects; public class CollectionSpecificReturnTypeCreatorModel extends AbstractCollectionSpecificReturnTypeCreatorModel { private final ImmutableList properties; @@ -44,7 +45,7 @@ public boolean equals(final Object o) { CollectionSpecificReturnTypeCreatorModel that = (CollectionSpecificReturnTypeCreatorModel) o; - return properties != null ? properties.equals(that.properties) : that.properties == null; + return Objects.equals(properties, that.properties); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/CollectionSpecificReturnTypeModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/CollectionSpecificReturnTypeModel.java index cf984f6c731..877e35ac91d 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/CollectionSpecificReturnTypeModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/CollectionSpecificReturnTypeModel.java @@ -17,6 +17,7 @@ package org.bson.codecs.pojo.entities; import java.util.List; +import java.util.Objects; public class CollectionSpecificReturnTypeModel { private ImmutableList properties; @@ -47,7 +48,7 @@ public boolean equals(final Object o) { CollectionSpecificReturnTypeModel that = (CollectionSpecificReturnTypeModel) o; - return properties != null ? properties.equals(that.properties) : that.properties == null; + return Objects.equals(properties, that.properties); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/ComposeInterfaceModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/ComposeInterfaceModel.java new file mode 100644 index 00000000000..b2d7beb765d --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/ComposeInterfaceModel.java @@ -0,0 +1,74 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import java.util.Objects; + +public class ComposeInterfaceModel { + private String title; + private InterfaceModelB nestedModel; + + public ComposeInterfaceModel() { + } + + public ComposeInterfaceModel(final String title, final InterfaceModelB nestedModel) { + this.title = title; + this.nestedModel = nestedModel; + } + + public String getTitle() { + return title; + } + + public void setTitle(final String title) { + this.title = title; + } + + public InterfaceModelB getNestedModel() { + return nestedModel; + } + + public void setNestedModel(final InterfaceModelB nestedModel) { + this.nestedModel = nestedModel; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ComposeInterfaceModel that = (ComposeInterfaceModel) o; + return Objects.equals(title, that.title) + && Objects.equals(nestedModel, that.nestedModel); + } + + @Override + public int hashCode() { + return Objects.hash(title, nestedModel); + } + + @Override + public String toString() { + return "ComposeInterfaceModel{" + + "title='" + title + '\'' + + ", nestedModel=" + nestedModel + + '}'; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteAndNestedAbstractInterfaceModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteAndNestedAbstractInterfaceModel.java index f453adc6dc2..c907a58dda2 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteAndNestedAbstractInterfaceModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteAndNestedAbstractInterfaceModel.java @@ -19,12 +19,14 @@ import org.bson.codecs.pojo.annotations.BsonProperty; +import java.util.List; + public final class ConcreteAndNestedAbstractInterfaceModel extends AbstractInterfaceModel { @BsonProperty(useDiscriminator = true) private InterfaceBasedModel child; + private List wildcardList; public ConcreteAndNestedAbstractInterfaceModel() { - super(); } public ConcreteAndNestedAbstractInterfaceModel(final String name, final InterfaceBasedModel child) { @@ -32,6 +34,12 @@ public ConcreteAndNestedAbstractInterfaceModel(final String name, final Interfac this.child = child; } + public ConcreteAndNestedAbstractInterfaceModel(final String name, final List wildcardList) { + super(name); + this.child = null; + this.wildcardList = wildcardList; + } + public InterfaceBasedModel getChild() { return child; } @@ -40,6 +48,14 @@ public void setChild(final InterfaceBasedModel child) { this.child = child; } + public List getWildcardList() { + return wildcardList; + } + + public void setWildcardList(final List wildcardList) { + this.wildcardList = wildcardList; + } + @Override public boolean equals(final Object o) { if (this == o) { diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteField.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteField.java new file mode 100644 index 00000000000..6fb06a70de9 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteField.java @@ -0,0 +1,27 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +public class ConcreteField extends BaseField { + + public ConcreteField() { + } + + public ConcreteField(final String name) { + super(name); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteInterfaceGenericModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteInterfaceGenericModel.java index de73e8d46a7..9f2799db6eb 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteInterfaceGenericModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteInterfaceGenericModel.java @@ -16,6 +16,8 @@ package org.bson.codecs.pojo.entities; +import java.util.Objects; + public class ConcreteInterfaceGenericModel implements InterfaceGenericModel { private String property; @@ -47,7 +49,7 @@ public boolean equals(final Object o) { ConcreteInterfaceGenericModel that = (ConcreteInterfaceGenericModel) o; - return property != null ? property.equals(that.property) : that.property == null; + return Objects.equals(property, that.property); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteModel.java new file mode 100644 index 00000000000..cd406fa1392 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteModel.java @@ -0,0 +1,27 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +public class ConcreteModel extends GenericBaseModel { + + public ConcreteModel() { + } + + public ConcreteModel(final ConcreteField field) { + super(field); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteStandAloneAbstractInterfaceModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteStandAloneAbstractInterfaceModel.java index 701897aef60..aaf71875f08 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteStandAloneAbstractInterfaceModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/ConcreteStandAloneAbstractInterfaceModel.java @@ -19,7 +19,6 @@ public final class ConcreteStandAloneAbstractInterfaceModel extends AbstractInterfaceModel { public ConcreteStandAloneAbstractInterfaceModel() { - super(); } public ConcreteStandAloneAbstractInterfaceModel(final String name) { diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/ConventionModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/ConventionModel.java index dbf4ac9ab21..20417f075c4 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/ConventionModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/ConventionModel.java @@ -41,7 +41,6 @@ public ConventionModel(){ } public ConventionModel(final String customId, final ConventionModel child, final SimpleModel simpleModel) { - this.myIntField = myIntField; this.customId = customId; this.child = child; this.simpleModel = simpleModel; diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/CustomPropertyCodecOptionalModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/CustomPropertyCodecOptionalModel.java index c28da6da915..2e5359b8500 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/CustomPropertyCodecOptionalModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/CustomPropertyCodecOptionalModel.java @@ -18,12 +18,14 @@ import org.bson.codecs.pojo.annotations.BsonCreator; import org.bson.codecs.pojo.annotations.BsonProperty; +import java.util.Objects; + public class CustomPropertyCodecOptionalModel { private final Optional optionalField; @BsonCreator - public CustomPropertyCodecOptionalModel(final @BsonProperty("optionalField") Optional optionalField) { - this.optionalField = optionalField == null ? Optional.empty() : optionalField; + public CustomPropertyCodecOptionalModel(@BsonProperty("optionalField") final Optional optionalField) { + this.optionalField = optionalField == null ? Optional.empty() : optionalField; } public Optional getOptionalField() { @@ -41,7 +43,7 @@ public boolean equals(final Object o) { CustomPropertyCodecOptionalModel that = (CustomPropertyCodecOptionalModel) o; - return optionalField != null ? optionalField.equals(that.optionalField) : that.optionalField == null; + return Objects.equals(optionalField, that.optionalField); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/DiscriminatorModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/DiscriminatorModel.java new file mode 100644 index 00000000000..1ef419540bd --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/DiscriminatorModel.java @@ -0,0 +1,26 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import org.bson.codecs.pojo.annotations.BsonDiscriminator; + +@BsonDiscriminator(key = "discriminatorKey", value = "discriminatorValue") +public class DiscriminatorModel { + + public DiscriminatorModel() { + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/DiscriminatorWithGetterModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/DiscriminatorWithGetterModel.java new file mode 100644 index 00000000000..53b57dabff7 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/DiscriminatorWithGetterModel.java @@ -0,0 +1,51 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import org.bson.codecs.pojo.annotations.BsonDiscriminator; + +import java.util.Objects; + +@BsonDiscriminator(key = "discriminatorKey", value = "discriminatorValue") +public class DiscriminatorWithGetterModel { + + public DiscriminatorWithGetterModel() { + } + + public String getDiscriminatorKey() { + return "discriminatorValue"; + } + + @Override + public boolean equals(final Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + final DiscriminatorWithGetterModel that = (DiscriminatorWithGetterModel) o; + return Objects.equals(getDiscriminatorKey(), that.getDiscriminatorKey()); + } + + @Override + public int hashCode() { + return Objects.hashCode(getDiscriminatorKey()); + } + + @Override + public String toString() { + return "DiscriminatorWithGetterModel{}"; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/DiscriminatorWithProperty.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/DiscriminatorWithProperty.java new file mode 100644 index 00000000000..a7a5b4060f5 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/DiscriminatorWithProperty.java @@ -0,0 +1,53 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import org.bson.codecs.pojo.annotations.BsonDiscriminator; +import org.bson.codecs.pojo.annotations.BsonProperty; + +import java.util.Objects; + +@BsonDiscriminator(key = "discriminatorKey", value = "discriminatorValue") +public class DiscriminatorWithProperty { + + public DiscriminatorWithProperty() { + } + + @BsonProperty("discriminatorKey") + public String getDiscriminator() { + return "discriminatorValue"; + } + + @Override + public boolean equals(final Object o) { + if (o == null || getClass() != o.getClass()){ + return false; + } + final DiscriminatorWithProperty that = (DiscriminatorWithProperty) o; + return Objects.equals(getDiscriminator(), that.getDiscriminator()); + } + + @Override + public int hashCode() { + return Objects.hashCode(getDiscriminator()); + } + + @Override + public String toString() { + return "DiscriminatorWithProperty{}"; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/DiscriminatorWithPropertyAndIgnore.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/DiscriminatorWithPropertyAndIgnore.java new file mode 100644 index 00000000000..ea92fbea928 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/DiscriminatorWithPropertyAndIgnore.java @@ -0,0 +1,59 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import org.bson.codecs.pojo.annotations.BsonDiscriminator; +import org.bson.codecs.pojo.annotations.BsonIgnore; +import org.bson.codecs.pojo.annotations.BsonProperty; + +import java.util.Objects; + +@BsonDiscriminator(key = "discriminatorKey", value = "discriminatorValue") +public class DiscriminatorWithPropertyAndIgnore { + + public DiscriminatorWithPropertyAndIgnore() { + } + + @BsonIgnore + public String getFoo() { + return "foo"; + } + + @BsonProperty("discriminatorKey") + public String getDiscriminator() { + return "discriminatorValue"; + } + + @Override + public boolean equals(final Object o) { + if (o == null || getClass() != o.getClass()){ + return false; + } + final DiscriminatorWithPropertyAndIgnore that = (DiscriminatorWithPropertyAndIgnore) o; + return Objects.equals(getDiscriminator(), that.getDiscriminator()); + } + + @Override + public int hashCode() { + return Objects.hashCode(getDiscriminator()); + } + + @Override + public String toString() { + return "DiscriminatorWithProperty{}"; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/DuplicateAnnotationAllowedModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/DuplicateAnnotationAllowedModel.java new file mode 100644 index 00000000000..211b1e12b50 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/DuplicateAnnotationAllowedModel.java @@ -0,0 +1,90 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import org.bson.codecs.pojo.annotations.BsonIgnore; +import org.bson.codecs.pojo.annotations.BsonProperty; + +import javax.annotation.Nullable; +import java.util.Objects; + +public class DuplicateAnnotationAllowedModel { + + @Nullable + private String id; + + @BsonIgnore + private String ignoredString; + + @BsonProperty("property") + private String propertyString; + + public DuplicateAnnotationAllowedModel() { + } + + public DuplicateAnnotationAllowedModel(final String id) { + this.id = id; + } + + @Nullable + public String getId() { + return id; + } + + public void setId(@Nullable final String id) { + this.id = id; + } + + @BsonIgnore + public String getIgnoredString() { + return ignoredString; + } + + @BsonIgnore + public void setIgnoredString(final String ignoredString) { + this.ignoredString = ignoredString; + } + + @BsonProperty("property") + public String getPropertyString() { + return propertyString; + } + + @BsonProperty("property") + public void setPropertyString(final String propertyString) { + this.propertyString = propertyString; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + DuplicateAnnotationAllowedModel that = (DuplicateAnnotationAllowedModel) o; + + return (Objects.equals(id, that.id)); + } + + @Override + public int hashCode() { + return id != null ? id.hashCode() : 0; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/GenericBaseModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/GenericBaseModel.java new file mode 100644 index 00000000000..5164f9703e5 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/GenericBaseModel.java @@ -0,0 +1,59 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import org.bson.codecs.pojo.annotations.BsonDiscriminator; + +import java.util.Objects; + +@BsonDiscriminator() +public class GenericBaseModel { + + private T field; + + public GenericBaseModel(final T field) { + this.field = field; + } + + public GenericBaseModel() { + } + + public T getField() { + return field; + } + + public void setField(final T field) { + this.field = field; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GenericBaseModel that = (GenericBaseModel) o; + return Objects.equals(field, that.field); + } + + @Override + public int hashCode() { + return Objects.hashCode(field); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/ImmutableList.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/ImmutableList.java index de65db6537e..044e9f8e20e 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/ImmutableList.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/ImmutableList.java @@ -31,7 +31,7 @@ public static ImmutableList copyOf(final List list) { if (list instanceof ImmutableList) { return (ImmutableList) list; } else { - return new ImmutableList(new ArrayList(list)); + return new ImmutableList<>(new ArrayList<>(list)); } } @@ -161,6 +161,6 @@ public ListIterator listIterator(final int index) { @Override public List subList(final int fromIndex, final int toIndex) { - return new ImmutableList(list.subList(fromIndex, toIndex)); + return new ImmutableList<>(list.subList(fromIndex, toIndex)); } } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/InterfaceModelC.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/InterfaceModelC.java new file mode 100644 index 00000000000..e1bf81788dc --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/InterfaceModelC.java @@ -0,0 +1,27 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + + +public interface InterfaceModelC extends InterfaceModelA { + + default String getPropertyC() { + return "c"; + } + + void setPropertyC(String propertyC); +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/InterfaceModelImpl.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/InterfaceModelImpl.java index 90828dc8e6e..9db110c6115 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/InterfaceModelImpl.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/InterfaceModelImpl.java @@ -63,7 +63,15 @@ public boolean equals(final Object o) { @Override public int hashCode() { int result = getPropertyA() != null ? getPropertyA().hashCode() : 0; - result = 31 * result + getPropertyB() != null ? getPropertyB().hashCode() : 0; + result = 31 * result + (getPropertyB() != null ? getPropertyB().hashCode() : 0); return result; } + + @Override + public String toString() { + return "InterfaceModelImpl{" + + "propertyA='" + getPropertyA() + "', " + + "propertyB='" + getPropertyB() + '\'' + + '}'; + } } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/InterfaceUpperBoundsModelAbstractImpl.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/InterfaceUpperBoundsModelAbstractImpl.java index 88f2df94325..9766c70e70c 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/InterfaceUpperBoundsModelAbstractImpl.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/InterfaceUpperBoundsModelAbstractImpl.java @@ -16,6 +16,8 @@ package org.bson.codecs.pojo.entities; +import java.util.Objects; + public class InterfaceUpperBoundsModelAbstractImpl extends InterfaceUpperBoundsModelAbstract { private String name; private InterfaceModelImpl nestedModel; @@ -57,10 +59,10 @@ public boolean equals(final Object o) { InterfaceUpperBoundsModelAbstractImpl that = (InterfaceUpperBoundsModelAbstractImpl) o; - if (name != null ? !name.equals(that.name) : that.name != null) { + if (!Objects.equals(name, that.name)) { return false; } - return nestedModel != null ? nestedModel.equals(that.nestedModel) : that.nestedModel == null; + return Objects.equals(nestedModel, that.nestedModel); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/InterfaceWithDefaultMethodModelImpl.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/InterfaceWithDefaultMethodModelImpl.java new file mode 100644 index 00000000000..1f9a104be5c --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/InterfaceWithDefaultMethodModelImpl.java @@ -0,0 +1,64 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +public class InterfaceWithDefaultMethodModelImpl extends InterfaceModelAbstract implements InterfaceModelC { + + private String propertyC; + + public InterfaceWithDefaultMethodModelImpl() { + } + + public InterfaceWithDefaultMethodModelImpl(final String propertyA, final String propertyC) { + super(propertyA); + this.propertyC = propertyC; + } + + @Override + public void setPropertyC(final String propertyC) { + this.propertyC = propertyC; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + InterfaceWithDefaultMethodModelImpl that = (InterfaceWithDefaultMethodModelImpl) o; + + if (getPropertyA() != null ? !getPropertyA().equals(that.getPropertyA()) : that.getPropertyA() != null) { + return false; + } + + if (getPropertyC() != null ? !getPropertyC().equals(that.getPropertyC()) : that.getPropertyC() != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = getPropertyA() != null ? getPropertyA().hashCode() : 0; + result = 31 * result + getPropertyC() != null ? getPropertyC().hashCode() : 0; + return result; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/InterfaceWithOverrideDefaultMethodModelImpl.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/InterfaceWithOverrideDefaultMethodModelImpl.java new file mode 100644 index 00000000000..6e6c9b19761 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/InterfaceWithOverrideDefaultMethodModelImpl.java @@ -0,0 +1,69 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +public class InterfaceWithOverrideDefaultMethodModelImpl extends InterfaceModelAbstract implements InterfaceModelC { + + private String propertyC; + + public InterfaceWithOverrideDefaultMethodModelImpl() { + } + + public InterfaceWithOverrideDefaultMethodModelImpl(final String propertyA, final String propertyC) { + super(propertyA); + this.propertyC = propertyC; + } + + @Override + public String getPropertyC() { + return propertyC; + } + + @Override + public void setPropertyC(final String propertyC) { + this.propertyC = propertyC; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + InterfaceWithOverrideDefaultMethodModelImpl that = (InterfaceWithOverrideDefaultMethodModelImpl) o; + + if (getPropertyA() != null ? !getPropertyA().equals(that.getPropertyA()) : that.getPropertyA() != null) { + return false; + } + + if (getPropertyC() != null ? !getPropertyC().equals(that.getPropertyC()) : that.getPropertyC() != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = getPropertyA() != null ? getPropertyA().hashCode() : 0; + result = 31 * result + getPropertyC() != null ? getPropertyC().hashCode() : 0; + return result; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/InvalidCollection.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/InvalidCollection.java index 47df811674e..f6951e7a5e1 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/InvalidCollection.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/InvalidCollection.java @@ -26,7 +26,7 @@ public class InvalidCollection implements Collection { private final List wrapped; public InvalidCollection(final List wrapped) { - this.wrapped = new ArrayList(wrapped); + this.wrapped = new ArrayList<>(wrapped); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/InvalidMapModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/InvalidMapModel.java index c7e7ea67e58..8e7990da785 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/InvalidMapModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/InvalidMapModel.java @@ -17,6 +17,7 @@ package org.bson.codecs.pojo.entities; import java.util.Map; +import java.util.Objects; public final class InvalidMapModel { private Map invalidMap; @@ -47,7 +48,7 @@ public boolean equals(final Object o) { InvalidMapModel that = (InvalidMapModel) o; - return invalidMap != null ? invalidMap.equals(that.invalidMap) : that.invalidMap == null; + return Objects.equals(invalidMap, that.invalidMap); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/InvalidMapPropertyCodecProvider.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/InvalidMapPropertyCodecProvider.java index 4a66f47fe97..3716c00a179 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/InvalidMapPropertyCodecProvider.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/InvalidMapPropertyCodecProvider.java @@ -44,7 +44,7 @@ public Codec get(final TypeWithTypeParameters type, final PropertyCode } private static final class InvalidMapModelCodec implements Codec> { - private Class> encoderClass; + private final Class> encoderClass; private InvalidMapModelCodec(final Class> encoderClass) { this.encoderClass = encoderClass; @@ -52,7 +52,7 @@ private InvalidMapModelCodec(final Class> encoderClass) { @Override public Map decode(final BsonReader reader, final DecoderContext decoderContext) { - Map map = new HashMap(); + Map map = new HashMap<>(); reader.readStartDocument(); while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/ListGenericExtendedModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/ListGenericExtendedModel.java new file mode 100644 index 00000000000..5d7072caad8 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/ListGenericExtendedModel.java @@ -0,0 +1,34 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import java.util.List; + +public class ListGenericExtendedModel extends ListGenericModel { + + public ListGenericExtendedModel() { + } + + public ListGenericExtendedModel(final List values) { + super(values); + } + + @Override + public String toString() { + return "ListGenericExtendedModel{} " + super.toString(); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/ListGenericModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/ListGenericModel.java new file mode 100644 index 00000000000..012395bddda --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/ListGenericModel.java @@ -0,0 +1,64 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import java.util.List; +import java.util.Objects; + +public class ListGenericModel { + + private List values; + + public ListGenericModel() { + } + + public ListGenericModel(final List values) { + this.values = values; + } + + public List getValues() { + return values; + } + + public void setValues(final List values) { + this.values = values; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ListGenericModel that = (ListGenericModel) o; + return Objects.equals(values, that.values); + } + + @Override + public int hashCode() { + return Objects.hash(values); + } + + @Override + public String toString() { + return "ListGenericModel{" + + "values=" + values + + '}'; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/ListListGenericExtendedModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/ListListGenericExtendedModel.java new file mode 100644 index 00000000000..4310785ac45 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/ListListGenericExtendedModel.java @@ -0,0 +1,34 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import java.util.List; + +public class ListListGenericExtendedModel extends ListListGenericModel { + + public ListListGenericExtendedModel() { + } + + public ListListGenericExtendedModel(final List> values) { + super(values); + } + + @Override + public String toString() { + return "ListListGenericExtendedModel{} " + super.toString(); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/ListListGenericModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/ListListGenericModel.java new file mode 100644 index 00000000000..7fe97c79b5f --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/ListListGenericModel.java @@ -0,0 +1,64 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import java.util.List; +import java.util.Objects; + +public class ListListGenericModel { + + private List> values; + + public ListListGenericModel() { + } + + public ListListGenericModel(final List> values) { + this.values = values; + } + + public List> getValues() { + return values; + } + + public void setValues(final List> values) { + this.values = values; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ListListGenericModel that = (ListListGenericModel) o; + return Objects.equals(values, that.values); + } + + @Override + public int hashCode() { + return Objects.hash(values); + } + + @Override + public String toString() { + return "ListListGenericModel{" + + "values=" + values + + '}'; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/ListMapGenericExtendedModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/ListMapGenericExtendedModel.java new file mode 100644 index 00000000000..d0f3c471fae --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/ListMapGenericExtendedModel.java @@ -0,0 +1,35 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import java.util.List; +import java.util.Map; + +public class ListMapGenericExtendedModel extends ListMapGenericModel { + + public ListMapGenericExtendedModel() { + } + + public ListMapGenericExtendedModel(final List> values) { + super(values); + } + + @Override + public String toString() { + return "ListMapGenericExtendedModel{} " + super.toString(); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/ListMapGenericModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/ListMapGenericModel.java new file mode 100644 index 00000000000..ef913c88920 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/ListMapGenericModel.java @@ -0,0 +1,65 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public class ListMapGenericModel { + + private List> values; + + public ListMapGenericModel() { + } + + public ListMapGenericModel(final List> values) { + this.values = values; + } + + public List> getValues() { + return values; + } + + public void setValues(final List> values) { + this.values = values; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ListMapGenericModel that = (ListMapGenericModel) o; + return Objects.equals(values, that.values); + } + + @Override + public int hashCode() { + return Objects.hash(values); + } + + @Override + public String toString() { + return "ListMapGenericModel{" + + "values=" + values + + '}'; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/MapGenericExtendedModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/MapGenericExtendedModel.java new file mode 100644 index 00000000000..cdda1325248 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/MapGenericExtendedModel.java @@ -0,0 +1,34 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import java.util.Map; + +public class MapGenericExtendedModel extends MapGenericModel { + + public MapGenericExtendedModel() { + } + + public MapGenericExtendedModel(final Map values) { + super(values); + } + + @Override + public String toString() { + return "MapGenericExtendedModel{} " + super.toString(); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/MapGenericModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/MapGenericModel.java new file mode 100644 index 00000000000..a4db52b4fa9 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/MapGenericModel.java @@ -0,0 +1,64 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import java.util.Map; +import java.util.Objects; + +public class MapGenericModel { + + private Map values; + + public MapGenericModel() { + } + + public MapGenericModel(final Map values) { + this.values = values; + } + + public Map getValues() { + return values; + } + + public void setValues(final Map values) { + this.values = values; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + MapGenericModel that = (MapGenericModel) o; + return Objects.equals(values, that.values); + } + + @Override + public int hashCode() { + return Objects.hash(values); + } + + @Override + public String toString() { + return "MapGenericModel{" + + "values=" + values + + '}'; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/MapListGenericExtendedModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/MapListGenericExtendedModel.java new file mode 100644 index 00000000000..776bd1f6a9b --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/MapListGenericExtendedModel.java @@ -0,0 +1,35 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import java.util.List; +import java.util.Map; + +public class MapListGenericExtendedModel extends MapListGenericModel { + + public MapListGenericExtendedModel() { + } + + public MapListGenericExtendedModel(final Map> values) { + super(values); + } + + @Override + public String toString() { + return "MapListGenericExtendedModel{} " + super.toString(); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/MapListGenericModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/MapListGenericModel.java new file mode 100644 index 00000000000..e75019c2247 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/MapListGenericModel.java @@ -0,0 +1,65 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public class MapListGenericModel { + + private Map> values; + + public MapListGenericModel() { + } + + public MapListGenericModel(final Map> values) { + this.values = values; + } + + public Map> getValues() { + return values; + } + + public void setValues(final Map> values) { + this.values = values; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + MapListGenericModel that = (MapListGenericModel) o; + return Objects.equals(values, that.values); + } + + @Override + public int hashCode() { + return Objects.hash(values); + } + + @Override + public String toString() { + return "MapListGenericModel{" + + "values=" + values + + '}'; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/MapMapGenericExtendedModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/MapMapGenericExtendedModel.java new file mode 100644 index 00000000000..8b68dddd151 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/MapMapGenericExtendedModel.java @@ -0,0 +1,34 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import java.util.Map; + +public class MapMapGenericExtendedModel extends MapMapGenericModel { + + public MapMapGenericExtendedModel() { + } + + public MapMapGenericExtendedModel(final Map> values) { + super(values); + } + + @Override + public String toString() { + return "MapMapGenericExtendedModel{} " + super.toString(); + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/MapMapGenericModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/MapMapGenericModel.java new file mode 100644 index 00000000000..190291d74d0 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/MapMapGenericModel.java @@ -0,0 +1,64 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import java.util.Map; +import java.util.Objects; + +public class MapMapGenericModel { + + private Map> values; + + public MapMapGenericModel() { + } + + public MapMapGenericModel(final Map> values) { + this.values = values; + } + + public Map> getValues() { + return values; + } + + public void setValues(final Map> values) { + this.values = values; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + MapMapGenericModel that = (MapMapGenericModel) o; + return Objects.equals(values, that.values); + } + + @Override + public int hashCode() { + return Objects.hash(values); + } + + @Override + public String toString() { + return "MapMapGenericModel{" + + "values=" + values + + '}'; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/MultipleBoundsLevel1.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/MultipleBoundsLevel1.java index 39fe4e156b2..b61b3243745 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/MultipleBoundsLevel1.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/MultipleBoundsLevel1.java @@ -18,12 +18,12 @@ import java.util.List; import java.util.Map; +import java.util.Objects; public class MultipleBoundsLevel1 extends MultipleBoundsLevel2 { private T level1; public MultipleBoundsLevel1() { - super(); } public MultipleBoundsLevel1(final Map level3, final List level2, final T level1) { @@ -53,7 +53,7 @@ public boolean equals(final Object o) { MultipleBoundsLevel1 that = (MultipleBoundsLevel1) o; - if (level1 != null ? !level1.equals(that.level1) : that.level1 != null) { + if (!Objects.equals(level1, that.level1)) { return false; } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/MultipleBoundsLevel2.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/MultipleBoundsLevel2.java index 67406dae3aa..6be7ea478ac 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/MultipleBoundsLevel2.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/MultipleBoundsLevel2.java @@ -18,12 +18,12 @@ import java.util.List; import java.util.Map; +import java.util.Objects; public class MultipleBoundsLevel2 extends MultipleBoundsLevel3 { private List level2; public MultipleBoundsLevel2() { - super(); } public MultipleBoundsLevel2(final Map level3, final List level2) { @@ -53,7 +53,7 @@ public boolean equals(final Object o) { MultipleBoundsLevel2 that = (MultipleBoundsLevel2) o; - if (level2 != null ? !level2.equals(that.level2) : that.level2 != null) { + if (!Objects.equals(level2, that.level2)) { return false; } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/MultipleBoundsLevel3.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/MultipleBoundsLevel3.java index f5313c05e71..b3f7ff03fef 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/MultipleBoundsLevel3.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/MultipleBoundsLevel3.java @@ -17,6 +17,7 @@ package org.bson.codecs.pojo.entities; import java.util.Map; +import java.util.Objects; public class MultipleBoundsLevel3 { private Map level3; @@ -47,7 +48,7 @@ public boolean equals(final Object o) { MultipleBoundsLevel3 that = (MultipleBoundsLevel3) o; - if (level3 != null ? !level3.equals(that.level3) : that.level3 != null) { + if (!Objects.equals(level3, that.level3)) { return false; } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/MultipleBoundsModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/MultipleBoundsModel.java index 66c8e080c09..ae3910f8819 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/MultipleBoundsModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/MultipleBoundsModel.java @@ -22,7 +22,6 @@ public final class MultipleBoundsModel extends MultipleBoundsLevel1 { public MultipleBoundsModel() { - super(); } public MultipleBoundsModel(final Map level3, final List level2, final Double level1) { diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedFieldReusingClassTypeParameter.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedFieldReusingClassTypeParameter.java index 98313294865..114eead120e 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedFieldReusingClassTypeParameter.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedFieldReusingClassTypeParameter.java @@ -16,6 +16,8 @@ package org.bson.codecs.pojo.entities; +import java.util.Objects; + public final class NestedFieldReusingClassTypeParameter { public PropertyReusingClassTypeParameter nested; @@ -37,7 +39,7 @@ public boolean equals(final Object o) { NestedFieldReusingClassTypeParameter that = (NestedFieldReusingClassTypeParameter) o; - if (nested != null ? !nested.equals(that.nested) : that.nested != null) { + if (!Objects.equals(nested, that.nested)) { return false; } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedGenericHolderSimpleGenericsModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedGenericHolderSimpleGenericsModel.java index 46285ed0411..0ce36b8a8cb 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedGenericHolderSimpleGenericsModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedGenericHolderSimpleGenericsModel.java @@ -18,6 +18,7 @@ import java.util.List; import java.util.Map; +import java.util.Objects; public final class NestedGenericHolderSimpleGenericsModel { private GenericHolderModel, Map>> nested; @@ -49,7 +50,7 @@ public boolean equals(final Object o) { NestedGenericHolderSimpleGenericsModel that = (NestedGenericHolderSimpleGenericsModel) o; - if (nested != null ? !nested.equals(that.nested) : that.nested != null) { + if (!Objects.equals(nested, that.nested)) { return false; } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedSelfReferentialGenericHolderModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedSelfReferentialGenericHolderModel.java index 4374ba92077..9ff6f8c5d68 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedSelfReferentialGenericHolderModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedSelfReferentialGenericHolderModel.java @@ -16,6 +16,8 @@ package org.bson.codecs.pojo.entities; +import java.util.Objects; + public final class NestedSelfReferentialGenericHolderModel { private NestedSelfReferentialGenericModel nested; @@ -45,7 +47,7 @@ public boolean equals(final Object o) { NestedSelfReferentialGenericHolderModel that = (NestedSelfReferentialGenericHolderModel) o; - if (nested != null ? !nested.equals(that.nested) : that.nested != null) { + if (!Objects.equals(nested, that.nested)) { return false; } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedSelfReferentialGenericModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedSelfReferentialGenericModel.java index 37f201bd088..01fb1685ae5 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedSelfReferentialGenericModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedSelfReferentialGenericModel.java @@ -16,6 +16,8 @@ package org.bson.codecs.pojo.entities; +import java.util.Objects; + public final class NestedSelfReferentialGenericModel { private T t; private V v; @@ -86,19 +88,19 @@ public boolean equals(final Object o) { NestedSelfReferentialGenericModel that = (NestedSelfReferentialGenericModel) o; - if (t != null ? !t.equals(that.t) : that.t != null) { + if (!Objects.equals(t, that.t)) { return false; } - if (v != null ? !v.equals(that.v) : that.v != null) { + if (!Objects.equals(v, that.v)) { return false; } - if (z != null ? !z.equals(that.z) : that.z != null) { + if (!Objects.equals(z, that.z)) { return false; } - if (selfRef1 != null ? !selfRef1.equals(that.selfRef1) : that.selfRef1 != null) { + if (!Objects.equals(selfRef1, that.selfRef1)) { return false; } - if (selfRef2 != null ? !selfRef2.equals(that.selfRef2) : that.selfRef2 != null) { + if (!Objects.equals(selfRef2, that.selfRef2)) { return false; } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedSimpleIdModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedSimpleIdModel.java new file mode 100644 index 00000000000..9a5462acd30 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/NestedSimpleIdModel.java @@ -0,0 +1,84 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import java.util.Objects; + +public class NestedSimpleIdModel { + private String id; + private SimpleIdModel nestedSimpleIdModel; + + public NestedSimpleIdModel(){ + } + + public NestedSimpleIdModel(final SimpleIdModel nestedSimpleIdModel) { + this(null, nestedSimpleIdModel); + } + + public NestedSimpleIdModel(final String id, final SimpleIdModel nestedSimpleIdModel) { + this.id = id; + this.nestedSimpleIdModel = nestedSimpleIdModel; + } + + public String getId() { + return id; + } + + public void setId(final String id) { + this.id = id; + } + + public SimpleIdModel getNestedSimpleIdModel() { + return nestedSimpleIdModel; + } + + public void setNestedSimpleIdModel(final SimpleIdModel nestedSimpleIdModel) { + this.nestedSimpleIdModel = nestedSimpleIdModel; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + NestedSimpleIdModel that = (NestedSimpleIdModel) o; + + if (!Objects.equals(id, that.id)) { + return false; + } + return Objects.equals(nestedSimpleIdModel, that.nestedSimpleIdModel); + } + + @Override + public int hashCode() { + int result = id != null ? id.hashCode() : 0; + result = 31 * result + (nestedSimpleIdModel != null ? nestedSimpleIdModel.hashCode() : 0); + return result; + } + + @Override + public String toString() { + return "NestedSimpleIdModel{" + + "id='" + id + '\'' + + ", nestedSimpleIdModel=" + nestedSimpleIdModel + + '}'; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/Optional.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/Optional.java index 4ac1ae0e325..e70b47a08cf 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/Optional.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/Optional.java @@ -43,7 +43,7 @@ public static Optional of(final T it) { if (it == null) { return (Optional) Optional.NONE; } else { - return new Optional.Some(it); + return new Optional.Some<>(it); } } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/PrivateSetterFieldModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/PrivateSetterFieldModel.java index eb2da3f6e1b..8580aec4dec 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/PrivateSetterFieldModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/PrivateSetterFieldModel.java @@ -33,6 +33,10 @@ public PrivateSetterFieldModel(final Integer integerField, final String stringFi this.listField = listField; } + public String getSomeMethod() { + return "some method"; + } + public Integer getIntegerField() { return integerField; } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/PropertyReusingClassTypeParameter.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/PropertyReusingClassTypeParameter.java index 219c1e3ed14..0e1d5d68c12 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/PropertyReusingClassTypeParameter.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/PropertyReusingClassTypeParameter.java @@ -16,6 +16,8 @@ package org.bson.codecs.pojo.entities; +import java.util.Objects; + public final class PropertyReusingClassTypeParameter { public GenericTreeModel tree; @@ -38,7 +40,7 @@ public boolean equals(final Object o) { PropertyReusingClassTypeParameter that = (PropertyReusingClassTypeParameter) o; - if (tree != null ? !tree.equals(that.tree) : that.tree != null) { + if (!Objects.equals(tree, that.tree)) { return false; } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/PropertySelectionModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/PropertySelectionModel.java index 3b935ccb2de..c7e44bcb776 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/PropertySelectionModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/PropertySelectionModel.java @@ -30,9 +30,9 @@ public final class PropertySelectionModel { private static final String staticFinalStringField = "staticFinalStringField"; - private static String staticStringField = "staticStringField"; + private static final String staticStringField = "staticStringField"; - private transient String transientString = "transientString"; + private final transient String transientString = "transientString"; public PropertySelectionModel() { } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/SelfReferentialGenericModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/SelfReferentialGenericModel.java index c6bb12f9b9d..2558b3fbf24 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/SelfReferentialGenericModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/SelfReferentialGenericModel.java @@ -16,6 +16,8 @@ package org.bson.codecs.pojo.entities; +import java.util.Objects; + public final class SelfReferentialGenericModel { private T t; private V v; @@ -65,13 +67,13 @@ public boolean equals(final Object o) { SelfReferentialGenericModel that = (SelfReferentialGenericModel) o; - if (t != null ? !t.equals(that.t) : that.t != null) { + if (!Objects.equals(t, that.t)) { return false; } - if (v != null ? !v.equals(that.v) : that.v != null) { + if (!Objects.equals(v, that.v)) { return false; } - if (child != null ? !child.equals(that.child) : that.child != null) { + if (!Objects.equals(child, that.child)) { return false; } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/ShapeHolderCircleModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/ShapeHolderCircleModel.java new file mode 100644 index 00000000000..ca2dd40afa6 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/ShapeHolderCircleModel.java @@ -0,0 +1,33 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +public class ShapeHolderCircleModel extends ShapeHolderModel { + + public ShapeHolderCircleModel() { + } + + public ShapeHolderCircleModel(final ShapeModelCircle shape) { + super(shape); + } + + @Override + public ShapeModelCircle getShape() { + return (ShapeModelCircle) super.getShape(); + } +} + diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/ShapeHolderModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/ShapeHolderModel.java index 7e59f684a89..2ccd6c4f477 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/ShapeHolderModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/ShapeHolderModel.java @@ -16,7 +16,7 @@ package org.bson.codecs.pojo.entities; -public final class ShapeHolderModel { +public class ShapeHolderModel { private ShapeModelAbstract shape; diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleEnumModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleEnumModel.java index 394f2a72b24..3d65dc1ea8c 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleEnumModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleEnumModel.java @@ -16,6 +16,8 @@ package org.bson.codecs.pojo.entities; +import org.bson.codecs.SimpleEnum; + public final class SimpleEnumModel { private SimpleEnum myEnum; diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleIdImmutableModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleIdImmutableModel.java new file mode 100644 index 00000000000..15c34c1a16c --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleIdImmutableModel.java @@ -0,0 +1,91 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import org.bson.codecs.pojo.annotations.BsonCreator; +import org.bson.codecs.pojo.annotations.BsonProperty; +import org.bson.types.ObjectId; + +import java.util.Objects; + +public class SimpleIdImmutableModel { + private final ObjectId id; + private final Integer integerField; + private final String stringField; + + public SimpleIdImmutableModel(final Integer integerField, final String stringField){ + this(null, integerField, stringField); + } + + @BsonCreator + public SimpleIdImmutableModel(@BsonProperty("id") final ObjectId id, + @BsonProperty("integerField") final Integer integerField, + @BsonProperty("stringField") final String stringField) { + this.id = id; + this.integerField = integerField; + this.stringField = stringField; + } + + public ObjectId getId() { + return id; + } + + public Integer getIntegerField() { + return integerField; + } + + public String getStringField() { + return stringField; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + SimpleIdImmutableModel that = (SimpleIdImmutableModel) o; + + if (!Objects.equals(id, that.id)) { + return false; + } + if (!Objects.equals(integerField, that.integerField)) { + return false; + } + return Objects.equals(stringField, that.stringField); + } + + @Override + public int hashCode() { + int result = id != null ? id.hashCode() : 0; + result = 31 * result + (integerField != null ? integerField.hashCode() : 0); + result = 31 * result + (stringField != null ? stringField.hashCode() : 0); + return result; + } + + @Override + public String toString() { + return "SimpleIdImmutableModel{" + + "id=" + id + + ", integerField=" + integerField + + ", stringField='" + stringField + '\'' + + '}'; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleIdModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleIdModel.java new file mode 100644 index 00000000000..65e9aa1fb90 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleIdModel.java @@ -0,0 +1,101 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import org.bson.types.ObjectId; + +import java.util.Objects; + +public class SimpleIdModel { + private ObjectId id; + private Integer integerField; + private String stringField; + + public SimpleIdModel(){ + } + + public SimpleIdModel(final Integer integerField, final String stringField) { + this(null, integerField, stringField); + } + + public SimpleIdModel(final ObjectId objectId, final Integer integerField, final String stringField) { + this.id = objectId; + this.integerField = integerField; + this.stringField = stringField; + } + + public ObjectId getId() { + return id; + } + + public void setId(final ObjectId id) { + this.id = id; + } + + public Integer getIntegerField() { + return integerField; + } + + public void setIntegerField(final Integer integerField) { + this.integerField = integerField; + } + + public String getStringField() { + return stringField; + } + + public void setStringField(final String stringField) { + this.stringField = stringField; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + SimpleIdModel that = (SimpleIdModel) o; + + if (!Objects.equals(id, that.id)) { + return false; + } + if (!Objects.equals(integerField, that.integerField)) { + return false; + } + return Objects.equals(stringField, that.stringField); + } + + @Override + public int hashCode() { + int result = id != null ? id.hashCode() : 0; + result = 31 * result + (integerField != null ? integerField.hashCode() : 0); + result = 31 * result + (stringField != null ? stringField.hashCode() : 0); + return result; + } + + @Override + public String toString() { + return "SimpleIdModel{" + + "id=" + id + + ", integerField=" + integerField + + ", stringField='" + stringField + '\'' + + '}'; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleModel.java index 91e43e1e415..7566066eef5 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleModel.java @@ -16,7 +16,7 @@ package org.bson.codecs.pojo.entities; -public final class SimpleModel { +public final class SimpleModel implements Comparable { private Integer integerField; private String stringField; @@ -79,4 +79,10 @@ public String toString() { + ", stringField='" + stringField + "'" + "}"; } + + @Override + public int compareTo(final SimpleModel o) { + int integerFieldCompareResult = this.integerField.compareTo(o.integerField); + return integerFieldCompareResult == 0 ? this.stringField.compareTo(o.stringField) : integerFieldCompareResult; + } } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleWithStaticModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleWithStaticModel.java new file mode 100644 index 00000000000..81eee44c74e --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/SimpleWithStaticModel.java @@ -0,0 +1,85 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import java.util.Objects; + +public final class SimpleWithStaticModel { + private Integer integerField; + private String stringField; + + public SimpleWithStaticModel(){ + } + + public SimpleWithStaticModel(final Integer integerField, final String stringField) { + this.integerField = integerField; + this.stringField = stringField; + } + + public Integer getIntegerField() { + return integerField; + } + + public void setIntegerField(final Integer integerField) { + this.integerField = integerField; + } + + public String getStringField() { + return stringField; + } + + public static void getStringField$Annotations() { + // Mimics the static kotlin synthetic annotations field + } + + public static void setIntegerField$Annotations() { + // Mimics the static kotlin synthetic annotations field + } + + public void getStringField$Alternative() { + // Non static void getter field + } + + public void setStringField(final String stringField) { + this.stringField = stringField; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SimpleWithStaticModel that = (SimpleWithStaticModel) o; + return Objects.equals(integerField, that.integerField) && Objects.equals(stringField, that.stringField); + } + + @Override + public int hashCode() { + return Objects.hash(integerField, stringField); + } + + @Override + public String toString() { + return "SimpleWithStaticModel{" + + "integerField=" + integerField + + ", stringField='" + stringField + "'" + + "}"; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/TreeWithIdModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/TreeWithIdModel.java new file mode 100644 index 00000000000..01937a5a3f2 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/TreeWithIdModel.java @@ -0,0 +1,120 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities; + +import org.bson.types.ObjectId; + +import java.util.Objects; + +public class TreeWithIdModel { + private ObjectId id; + private String level; + private TreeWithIdModel left; + private TreeWithIdModel right; + + public TreeWithIdModel() { + } + + public TreeWithIdModel(final String level) { + this(null, level, null, null); + } + + public TreeWithIdModel(final String level, final TreeWithIdModel left, final TreeWithIdModel right) { + this(null, level, left, right); + } + + public TreeWithIdModel(final ObjectId id, final String level, final TreeWithIdModel left, final TreeWithIdModel right) { + this.id = id; + this.level = level; + this.left = left; + this.right = right; + } + + public ObjectId getId() { + return id; + } + + public void setId(final ObjectId id) { + this.id = id; + } + + public String getLevel() { + return level; + } + + public void setLevel(final String level) { + this.level = level; + } + + public TreeWithIdModel getLeft() { + return left; + } + + public void setLeft(final TreeWithIdModel left) { + this.left = left; + } + + public TreeWithIdModel getRight() { + return right; + } + + public void setRight(final TreeWithIdModel right) { + this.right = right; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + TreeWithIdModel that = (TreeWithIdModel) o; + + if (!Objects.equals(id, that.id)) { + return false; + } + if (!Objects.equals(level, that.level)) { + return false; + } + if (!Objects.equals(left, that.left)) { + return false; + } + return Objects.equals(right, that.right); + } + + @Override + public int hashCode() { + int result = id != null ? id.hashCode() : 0; + result = 31 * result + (level != null ? level.hashCode() : 0); + result = 31 * result + (left != null ? left.hashCode() : 0); + result = 31 * result + (right != null ? right.hashCode() : 0); + return result; + } + + @Override + public String toString() { + return "TreeWithIdModel{" + + "id=" + id + + ", level=" + level + + ", left=" + left + + ", right=" + right + + '}'; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/UpperBoundsConcreteModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/UpperBoundsConcreteModel.java index b0b6964334c..b68a05d4507 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/UpperBoundsConcreteModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/UpperBoundsConcreteModel.java @@ -19,7 +19,6 @@ public final class UpperBoundsConcreteModel extends UpperBoundsModel { public UpperBoundsConcreteModel() { - super(); } public UpperBoundsConcreteModel(final Long myGenericField) { diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/UpperBoundsModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/UpperBoundsModel.java index 31cfe6acc53..2161dfe5d0c 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/UpperBoundsModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/UpperBoundsModel.java @@ -16,6 +16,8 @@ package org.bson.codecs.pojo.entities; +import java.util.Objects; + public class UpperBoundsModel { private T myGenericField; @@ -45,7 +47,7 @@ public boolean equals(final Object o) { UpperBoundsModel that = (UpperBoundsModel) o; - if (myGenericField != null ? !myGenericField.equals(that.myGenericField) : that.myGenericField != null) { + if (!Objects.equals(myGenericField, that.myGenericField)) { return false; } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/AnnotationBsonRepresentation.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/AnnotationBsonRepresentation.java new file mode 100644 index 00000000000..58df622fc57 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/AnnotationBsonRepresentation.java @@ -0,0 +1,101 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities.conventions; + +import org.bson.BsonType; +import org.bson.codecs.pojo.annotations.BsonRepresentation; +import org.bson.types.ObjectId; + +import java.util.Objects; + +public class AnnotationBsonRepresentation { + private String id; + private String friendId; + + @BsonRepresentation(BsonType.OBJECT_ID) + private String parentId; + + private int age; + + public AnnotationBsonRepresentation() {} + + public AnnotationBsonRepresentation(final int age) { + id = new ObjectId("111111111111111111111111").toHexString(); + friendId = ""; + parentId = ""; + this.age = age; + } + + public AnnotationBsonRepresentation(final String id, final String friendId, final String parentId, final int age) { + this.id = id; + this.friendId = friendId; + this.parentId = parentId; + this.age = age; + } + + @BsonRepresentation(BsonType.OBJECT_ID) + public String getId() { + return id; + } + + public void setId(final String id) { + this.id = id; + } + + public String getFriendId() { + return friendId; + } + + @BsonRepresentation(BsonType.OBJECT_ID) + public void setFriendId(final String friendId) { + this.friendId = friendId; + } + + public String getParentId() { + return parentId; + } + + public void setParentId(final String parentId) { + this.parentId = parentId; + } + + public int getAge() { + return age; + } + + public void setAge(final int age) { + this.age = age; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + AnnotationBsonRepresentation that = (AnnotationBsonRepresentation) o; + return age == that.age && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id, age); + } + +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/AnnotationCollision.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/AnnotationCollision.java new file mode 100644 index 00000000000..8d6a2856e39 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/AnnotationCollision.java @@ -0,0 +1,44 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities.conventions; + +import org.bson.codecs.pojo.annotations.BsonProperty; + +public final class AnnotationCollision { + + public String id; + + @BsonProperty("color") + private String color; + + public String getId() { + return id; + } + + public void setId(final String id) { + this.id = id; + } + + @BsonProperty("theme") + public String getColor() { + return color; + } + + public void setColor(final String color) { + this.color = color; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/AnnotationWithObjectIdModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/AnnotationWithObjectIdModel.java new file mode 100644 index 00000000000..8cddf1e6160 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/AnnotationWithObjectIdModel.java @@ -0,0 +1,101 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities.conventions; + +import org.bson.codecs.pojo.annotations.BsonDiscriminator; +import org.bson.codecs.pojo.annotations.BsonId; +import org.bson.codecs.pojo.annotations.BsonProperty; +import org.bson.types.ObjectId; + +@BsonDiscriminator(value = "MyAnnotationModel", key = "_cls") +public final class AnnotationWithObjectIdModel { + + @BsonId() + public ObjectId customId; + + @BsonProperty(useDiscriminator = false) + public AnnotationWithObjectIdModel child; + + @BsonProperty("renamed") + public AnnotationWithObjectIdModel alternative; + + public AnnotationWithObjectIdModel() { + } + + public AnnotationWithObjectIdModel(final ObjectId customId, final AnnotationWithObjectIdModel child, + final AnnotationWithObjectIdModel alternative) { + this.customId = customId; + this.child = child; + this.alternative = alternative; + } + + public ObjectId getCustomId() { + return customId; + } + + public void setCustomId(final ObjectId customId) { + this.customId = customId; + } + + public AnnotationWithObjectIdModel getChild() { + return child; + } + + public void setChild(final AnnotationWithObjectIdModel child) { + this.child = child; + } + + public AnnotationWithObjectIdModel getAlternative() { + return alternative; + } + + public void setAlternative(final AnnotationWithObjectIdModel alternative) { + this.alternative = alternative; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + AnnotationWithObjectIdModel that = (AnnotationWithObjectIdModel) o; + + if (getCustomId() != null ? !getCustomId().equals(that.getCustomId()) : that.getCustomId() != null) { + return false; + } + if (getChild() != null ? !getChild().equals(that.getChild()) : that.getChild() != null) { + return false; + } + if (getAlternative() != null ? !getAlternative().equals(that.getAlternative()) : that.getAlternative() != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = getCustomId() != null ? getCustomId().hashCode() : 0; + result = 31 * result + (getChild() != null ? getChild().hashCode() : 0); + result = 31 * result + (getAlternative() != null ? getAlternative().hashCode() : 0); + return result; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/AnnotationWriteCollision.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/AnnotationWriteCollision.java new file mode 100644 index 00000000000..1d826cc5689 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/AnnotationWriteCollision.java @@ -0,0 +1,45 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities.conventions; + +import org.bson.codecs.pojo.annotations.BsonProperty; + +public final class AnnotationWriteCollision { + + public String id; + + @BsonProperty("color") + private String color; + + + public String getId() { + return id; + } + + public void setId(final String id) { + this.id = id; + } + + public String getColor() { + return color; + } + + @BsonProperty("theme") + public void setColor(final String color) { + this.color = color; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonExtraElementsInvalidModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonExtraElementsInvalidModel.java new file mode 100644 index 00000000000..d456a6c2333 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonExtraElementsInvalidModel.java @@ -0,0 +1,88 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities.conventions; + +import org.bson.codecs.pojo.annotations.BsonExtraElements; + +import java.util.Objects; + +public class BsonExtraElementsInvalidModel { + + private Integer integerField; + private String stringField; + @BsonExtraElements + private Integer extraElements; + + public BsonExtraElementsInvalidModel(){ + } + + public BsonExtraElementsInvalidModel(final Integer integerField, final String stringField, final Integer extraElements) { + this.integerField = integerField; + this.stringField = stringField; + this.extraElements = extraElements; + } + + public Integer getIntegerField() { + return integerField; + } + + public BsonExtraElementsInvalidModel setIntegerField(final Integer integerField) { + this.integerField = integerField; + return this; + } + + public String getStringField() { + return stringField; + } + + public BsonExtraElementsInvalidModel setStringField(final String stringField) { + this.stringField = stringField; + return this; + } + + public Integer getExtraElements() { + return extraElements; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + BsonExtraElementsInvalidModel that = (BsonExtraElementsInvalidModel) o; + return Objects.equals(integerField, that.integerField) + && Objects.equals(stringField, that.stringField) + && Objects.equals(extraElements, that.extraElements); + } + + @Override + public int hashCode() { + return Objects.hash(integerField, stringField, extraElements); + } + + @Override + public String toString() { + return "BsonExtraElementsModel{" + + "integerField=" + integerField + + ", stringField='" + stringField + '\'' + + ", extraElements=" + extraElements + + '}'; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonExtraElementsMapModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonExtraElementsMapModel.java new file mode 100644 index 00000000000..678f021fa2f --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonExtraElementsMapModel.java @@ -0,0 +1,98 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities.conventions; + +import org.bson.codecs.pojo.annotations.BsonExtraElements; + +import java.util.Map; +import java.util.Objects; + +public class BsonExtraElementsMapModel { + + private Integer integerField; + private String stringField; + @BsonExtraElements + private Map extraElements; + + public BsonExtraElementsMapModel(){ + } + + public BsonExtraElementsMapModel(final Integer integerField, final String stringField, final Map extraElements) { + this.integerField = integerField; + this.stringField = stringField; + this.extraElements = extraElements; + } + + public Integer getIntegerField() { + return integerField; + } + + public BsonExtraElementsMapModel setIntegerField(final Integer integerField) { + this.integerField = integerField; + return this; + } + + public String getStringField() { + return stringField; + } + + public BsonExtraElementsMapModel setStringField(final String stringField) { + this.stringField = stringField; + return this; + } + + public Map getExtraElements() { + return extraElements; + } + + public BsonExtraElementsMapModel setExtraElement(final String key, final String value) { + extraElements.put(key, value); + return this; + } + + public Object get(final String key) { + return extraElements.get(key); + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + BsonExtraElementsMapModel that = (BsonExtraElementsMapModel) o; + return Objects.equals(integerField, that.integerField) + && Objects.equals(stringField, that.stringField) + && Objects.equals(extraElements, that.extraElements); + } + + @Override + public int hashCode() { + return Objects.hash(integerField, stringField, extraElements); + } + + @Override + public String toString() { + return "BsonExtraElementsModel{" + + "integerField=" + integerField + + ", stringField='" + stringField + '\'' + + ", extraElements=" + extraElements + + '}'; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonExtraElementsModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonExtraElementsModel.java new file mode 100644 index 00000000000..2d1b8b1f554 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonExtraElementsModel.java @@ -0,0 +1,99 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities.conventions; + +import org.bson.BsonDocument; +import org.bson.BsonValue; +import org.bson.codecs.pojo.annotations.BsonExtraElements; + +import java.util.Objects; + +public class BsonExtraElementsModel { + + private Integer integerField; + private String stringField; + @BsonExtraElements + private BsonDocument extraElements; + + public BsonExtraElementsModel(){ + } + + public BsonExtraElementsModel(final Integer integerField, final String stringField, final BsonDocument extraElements) { + this.integerField = integerField; + this.stringField = stringField; + this.extraElements = extraElements; + } + + public Integer getIntegerField() { + return integerField; + } + + public BsonExtraElementsModel setIntegerField(final Integer integerField) { + this.integerField = integerField; + return this; + } + + public String getStringField() { + return stringField; + } + + public BsonExtraElementsModel setStringField(final String stringField) { + this.stringField = stringField; + return this; + } + + public BsonDocument getExtraElements() { + return extraElements; + } + + public BsonExtraElementsModel setExtraElement(final String key, final BsonValue value) { + extraElements.append(key, value); + return this; + } + + public Object get(final String key) { + return extraElements.get(key); + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + BsonExtraElementsModel that = (BsonExtraElementsModel) o; + return Objects.equals(integerField, that.integerField) + && Objects.equals(stringField, that.stringField) + && Objects.equals(extraElements, that.extraElements); + } + + @Override + public int hashCode() { + return Objects.hash(integerField, stringField, extraElements); + } + + @Override + public String toString() { + return "BsonExtraElementsModel{" + + "integerField=" + integerField + + ", stringField='" + stringField + '\'' + + ", extraElements=" + extraElements + + '}'; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonIgnoreDuplicatePropertyMultipleTypes.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonIgnoreDuplicatePropertyMultipleTypes.java new file mode 100644 index 00000000000..89b6c16d934 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonIgnoreDuplicatePropertyMultipleTypes.java @@ -0,0 +1,84 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities.conventions; + +import org.bson.codecs.pojo.annotations.BsonCreator; +import org.bson.codecs.pojo.annotations.BsonIgnore; +import org.bson.codecs.pojo.annotations.BsonProperty; + +import java.util.Objects; + +public class BsonIgnoreDuplicatePropertyMultipleTypes { + private final String stringField; + private String altStringField; + + @BsonCreator + public BsonIgnoreDuplicatePropertyMultipleTypes(@BsonProperty("stringField") final String stringField) { + this.stringField = stringField; + } + + public String getStringField() { + return stringField; + } + + @BsonIgnore + public String getAltStringField() { + return altStringField; + } + + @BsonIgnore + public void setAltStringField(final String altStringField) { + this.altStringField = altStringField; + } + + @BsonIgnore + public void setAltStringField(final Integer i) { + this.altStringField = i.toString(); + } + + @Override + public String toString() { + return "BsonIgnoreDuplicatePropertyMultipleTypes{" + + "stringField='" + stringField + '\'' + + ", altStringField='" + altStringField + '\'' + + '}'; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + BsonIgnoreDuplicatePropertyMultipleTypes that = (BsonIgnoreDuplicatePropertyMultipleTypes) o; + + if (!Objects.equals(stringField, that.stringField)) { + return false; + } + return Objects.equals(altStringField, that.altStringField); + } + + @Override + public int hashCode() { + int result = stringField != null ? stringField.hashCode() : 0; + result = 31 * result + (altStringField != null ? altStringField.hashCode() : 0); + return result; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonIgnoreInvalidMapModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonIgnoreInvalidMapModel.java index 9a964811b48..33f7601b28f 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonIgnoreInvalidMapModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonIgnoreInvalidMapModel.java @@ -19,6 +19,7 @@ import org.bson.codecs.pojo.annotations.BsonIgnore; import java.util.Map; +import java.util.Objects; public class BsonIgnoreInvalidMapModel { @@ -61,10 +62,10 @@ public boolean equals(final Object o) { BsonIgnoreInvalidMapModel that = (BsonIgnoreInvalidMapModel) o; - if (stringField != null ? !stringField.equals(that.stringField) : that.stringField != null) { + if (!Objects.equals(stringField, that.stringField)) { return false; } - return invalidMap != null ? invalidMap.equals(that.invalidMap) : that.invalidMap == null; + return Objects.equals(invalidMap, that.invalidMap); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonIgnoreSyntheticProperty.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonIgnoreSyntheticProperty.java new file mode 100644 index 00000000000..0d6544f64fa --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonIgnoreSyntheticProperty.java @@ -0,0 +1,67 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities.conventions; + +import org.bson.codecs.pojo.annotations.BsonCreator; +import org.bson.codecs.pojo.annotations.BsonIgnore; +import org.bson.codecs.pojo.annotations.BsonProperty; + +import java.util.Objects; + +public class BsonIgnoreSyntheticProperty { + private final String stringField; + + @BsonCreator + public BsonIgnoreSyntheticProperty(@BsonProperty("stringField") final String stringField) { + this.stringField = stringField; + } + + public String getStringField() { + return stringField; + } + + @BsonIgnore + public Object getSyntheticProperty() { + return null; + } + + @Override + public String toString() { + return "BsonIgnoreSyntheticProperty{" + + "stringField='" + stringField + '\'' + + '}'; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + BsonIgnoreSyntheticProperty that = (BsonIgnoreSyntheticProperty) o; + + return Objects.equals(stringField, that.stringField); + } + + @Override + public int hashCode() { + return stringField != null ? stringField.hashCode() : 0; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonRepresentationModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonRepresentationModel.java new file mode 100644 index 00000000000..c9a52908167 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/BsonRepresentationModel.java @@ -0,0 +1,76 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities.conventions; + +import org.bson.BsonType; +import org.bson.codecs.pojo.annotations.BsonRepresentation; +import org.bson.types.ObjectId; + +import java.util.Objects; + +public class BsonRepresentationModel { + @BsonRepresentation(BsonType.OBJECT_ID) + private String id; + + private int age; + + public BsonRepresentationModel() {} + + public BsonRepresentationModel(final int age) { + id = new ObjectId("111111111111111111111111").toHexString(); + this.age = age; + } + + public BsonRepresentationModel(final String id, final int age) { + this.id = id; + this.age = age; + } + + public String getId() { + return id; + } + + public void setId(final String id) { + this.id = id; + } + + public int getAge() { + return age; + } + + public void setAge(final int age) { + this.age = age; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + BsonRepresentationModel that = (BsonRepresentationModel) o; + return age == that.age && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id, age); + } + +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CollectionsGetterImmutableModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CollectionsGetterImmutableModel.java index 2b9ff3cfc7c..31bcd3d62e9 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CollectionsGetterImmutableModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CollectionsGetterImmutableModel.java @@ -18,13 +18,14 @@ import java.util.Collections; import java.util.List; +import java.util.Objects; public class CollectionsGetterImmutableModel { private final List listField; public CollectionsGetterImmutableModel() { - this(Collections.emptyList()); + this(Collections.emptyList()); } public CollectionsGetterImmutableModel(final List listField) { @@ -46,7 +47,7 @@ public boolean equals(final Object o) { CollectionsGetterImmutableModel that = (CollectionsGetterImmutableModel) o; - return listField != null ? listField.equals(that.listField) : that.listField == null; + return Objects.equals(listField, that.listField); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CollectionsGetterMutableModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CollectionsGetterMutableModel.java index ed320f74f99..bc928b37c34 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CollectionsGetterMutableModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CollectionsGetterMutableModel.java @@ -18,13 +18,14 @@ import java.util.ArrayList; import java.util.List; +import java.util.Objects; public class CollectionsGetterMutableModel { private final List listField; public CollectionsGetterMutableModel() { - this(new ArrayList()); + this(new ArrayList<>()); } public CollectionsGetterMutableModel(final List listField) { @@ -45,7 +46,7 @@ public boolean equals(final Object o) { } CollectionsGetterMutableModel that = (CollectionsGetterMutableModel) o; - return listField != null ? listField.equals(that.listField) : that.listField == null; + return Objects.equals(listField, that.listField); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CollectionsGetterNonEmptyModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CollectionsGetterNonEmptyModel.java index 4a8bee9aa93..87f832124e5 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CollectionsGetterNonEmptyModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CollectionsGetterNonEmptyModel.java @@ -17,6 +17,7 @@ package org.bson.codecs.pojo.entities.conventions; import java.util.List; +import java.util.Objects; import static java.util.Arrays.asList; @@ -47,7 +48,7 @@ public boolean equals(final Object o) { CollectionsGetterNonEmptyModel that = (CollectionsGetterNonEmptyModel) o; - return listField != null ? listField.equals(that.listField) : that.listField == null; + return Objects.equals(listField, that.listField); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CollectionsGetterNullModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CollectionsGetterNullModel.java index f827c0a37bd..df670c4570a 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CollectionsGetterNullModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CollectionsGetterNullModel.java @@ -17,6 +17,7 @@ package org.bson.codecs.pojo.entities.conventions; import java.util.List; +import java.util.Objects; public class CollectionsGetterNullModel { @@ -44,7 +45,7 @@ public boolean equals(final Object o) { } CollectionsGetterNullModel that = (CollectionsGetterNullModel) o; - return listField != null ? listField.equals(that.listField) : that.listField == null; + return Objects.equals(listField, that.listField); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CreatorAllFinalFieldsModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CreatorAllFinalFieldsModel.java index d7c3215f943..07d76c6291c 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CreatorAllFinalFieldsModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CreatorAllFinalFieldsModel.java @@ -21,6 +21,8 @@ import org.bson.codecs.pojo.annotations.BsonId; import org.bson.codecs.pojo.annotations.BsonProperty; +import java.util.Objects; + @BsonDiscriminator public final class CreatorAllFinalFieldsModel { private final String pid; @@ -60,13 +62,13 @@ public boolean equals(final Object o) { CreatorAllFinalFieldsModel that = (CreatorAllFinalFieldsModel) o; - if (pid != null ? !pid.equals(that.pid) : that.pid != null) { + if (!Objects.equals(pid, that.pid)) { return false; } - if (fName != null ? !fName.equals(that.fName) : that.fName != null) { + if (!Objects.equals(fName, that.fName)) { return false; } - if (lName != null ? !lName.equals(that.lName) : that.lName != null) { + if (!Objects.equals(lName, that.lName)) { return false; } diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CreatorConstructorIdModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CreatorConstructorIdModel.java index aeb84821e18..d3520e9f02f 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CreatorConstructorIdModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CreatorConstructorIdModel.java @@ -29,8 +29,8 @@ public class CreatorConstructorIdModel { public long longField; @BsonCreator - public CreatorConstructorIdModel(final @BsonId String id, @BsonProperty("integersField") final List integerField, - @BsonProperty("longField") final long longField) { + public CreatorConstructorIdModel(@BsonId final String id, @BsonProperty("integersField") final List integerField, + @BsonProperty("longField") final long longField) { this.id = id; this.integersField = integerField; this.longField = longField; diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CreatorConstructorNoKnownIdModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CreatorConstructorNoKnownIdModel.java new file mode 100644 index 00000000000..2027aefd605 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CreatorConstructorNoKnownIdModel.java @@ -0,0 +1,69 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities.conventions; + +import org.bson.codecs.pojo.annotations.BsonCreator; +import org.bson.codecs.pojo.annotations.BsonId; +import org.bson.codecs.pojo.annotations.BsonProperty; + +import java.util.Objects; + +public class CreatorConstructorNoKnownIdModel { + private final String stringField; + private final long longField; + + @BsonCreator + public CreatorConstructorNoKnownIdModel( + @BsonId final String stringField, + @BsonProperty("longField") final long longField) { + this.stringField = stringField; + this.longField = longField; + } + + public String getStringField() { + return stringField; + } + + public long getLongField() { + return longField; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + final CreatorConstructorNoKnownIdModel that = (CreatorConstructorNoKnownIdModel) o; + return longField == that.longField && Objects.equals(stringField, that.stringField); + } + + @Override + public int hashCode() { + return Objects.hash(stringField, longField); + } + + @Override + public String toString() { + return "CreatorConstructorNoKnownIdModel{" + + ", stringField='" + stringField + '\'' + + ", longField=" + longField + + '}'; + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CreatorInSuperClassModelImpl.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CreatorInSuperClassModelImpl.java index 72461cdabd5..12ea066b6c7 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CreatorInSuperClassModelImpl.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CreatorInSuperClassModelImpl.java @@ -16,6 +16,8 @@ package org.bson.codecs.pojo.entities.conventions; +import java.util.Objects; + public class CreatorInSuperClassModelImpl extends CreatorInSuperClassModel { private final String propertyA; private final String propertyB; @@ -46,10 +48,10 @@ public boolean equals(final Object o) { CreatorInSuperClassModelImpl that = (CreatorInSuperClassModelImpl) o; - if (propertyA != null ? !propertyA.equals(that.propertyA) : that.propertyA != null) { + if (!Objects.equals(propertyA, that.propertyA)) { return false; } - return propertyB != null ? propertyB.equals(that.propertyB) : that.propertyB == null; + return Objects.equals(propertyB, that.propertyB); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/InterfaceModelBInstanceCreatorConvention.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/InterfaceModelBInstanceCreatorConvention.java new file mode 100644 index 00000000000..88781c40513 --- /dev/null +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/InterfaceModelBInstanceCreatorConvention.java @@ -0,0 +1,54 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.codecs.pojo.entities.conventions; + +import org.bson.codecs.pojo.ClassModelBuilder; +import org.bson.codecs.pojo.Convention; +import org.bson.codecs.pojo.InstanceCreator; +import org.bson.codecs.pojo.PropertyModel; +import org.bson.codecs.pojo.entities.InterfaceModelB; +import org.bson.codecs.pojo.entities.InterfaceModelImpl; + +public class InterfaceModelBInstanceCreatorConvention implements Convention { + @Override + @SuppressWarnings("unchecked") + public void apply(final ClassModelBuilder classModelBuilder) { + if (classModelBuilder.getType().equals(InterfaceModelB.class)) { + // Simulate a custom implementation of InstanceCreator factory + // (This one can be generated automatically, but, a real use case can have an advanced reflection based + // solution that the POJO Codec doesn't support out of the box) + ((ClassModelBuilder) classModelBuilder).instanceCreatorFactory(() -> { + InterfaceModelB interfaceModelB = new InterfaceModelImpl(); + return new InstanceCreator() { + @Override + public void set(final S value, final PropertyModel propertyModel) { + if (propertyModel.getName().equals("propertyA")) { + interfaceModelB.setPropertyA((String) value); + } else if (propertyModel.getName().equals("propertyB")) { + interfaceModelB.setPropertyB((String) value); + } + } + + @Override + public InterfaceModelB getInstance() { + return interfaceModelB; + } + }; + }); + } + } +} diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/InterfaceModelImplA.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/InterfaceModelImplA.java index 3b6d5c7f4d0..553ac977e55 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/InterfaceModelImplA.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/InterfaceModelImplA.java @@ -18,6 +18,8 @@ import org.bson.codecs.pojo.annotations.BsonDiscriminator; +import java.util.Objects; + @BsonDiscriminator public class InterfaceModelImplA implements InterfaceModel { private boolean value; @@ -55,7 +57,7 @@ public boolean equals(final Object o) { if (value != that.value) { return false; } - return name != null ? name.equals(that.name) : that.name == null; + return Objects.equals(name, that.name); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/MapGetterImmutableModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/MapGetterImmutableModel.java index 440f2109798..6e17a1778c7 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/MapGetterImmutableModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/MapGetterImmutableModel.java @@ -18,13 +18,14 @@ import java.util.Collections; import java.util.Map; +import java.util.Objects; public class MapGetterImmutableModel { private final Map mapField; public MapGetterImmutableModel() { - this(Collections.emptyMap()); + this(Collections.emptyMap()); } public MapGetterImmutableModel(final Map mapField) { @@ -46,7 +47,7 @@ public boolean equals(final Object o) { MapGetterImmutableModel that = (MapGetterImmutableModel) o; - return mapField != null ? mapField.equals(that.mapField) : that.mapField == null; + return Objects.equals(mapField, that.mapField); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/MapGetterMutableModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/MapGetterMutableModel.java index 1f4eb1a7391..61366762f14 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/MapGetterMutableModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/MapGetterMutableModel.java @@ -18,13 +18,14 @@ import java.util.HashMap; import java.util.Map; +import java.util.Objects; public class MapGetterMutableModel { private final Map mapField; public MapGetterMutableModel() { - this.mapField = new HashMap(); + this.mapField = new HashMap<>(); } public MapGetterMutableModel(final Map mapField) { @@ -45,7 +46,7 @@ public boolean equals(final Object o) { } MapGetterMutableModel that = (MapGetterMutableModel) o; - return mapField != null ? mapField.equals(that.mapField) : that.mapField == null; + return Objects.equals(mapField, that.mapField); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/MapGetterNonEmptyModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/MapGetterNonEmptyModel.java index e4d5aa712bc..8c1e77022b4 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/MapGetterNonEmptyModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/MapGetterNonEmptyModel.java @@ -18,6 +18,7 @@ import java.util.Collections; import java.util.Map; +import java.util.Objects; public class MapGetterNonEmptyModel { @@ -46,7 +47,7 @@ public boolean equals(final Object o) { MapGetterNonEmptyModel that = (MapGetterNonEmptyModel) o; - return mapField != null ? mapField.equals(that.mapField) : that.mapField == null; + return Objects.equals(mapField, that.mapField); } @Override diff --git a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/MapGetterNullModel.java b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/MapGetterNullModel.java index c1f60b07d37..f67a621af11 100644 --- a/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/MapGetterNullModel.java +++ b/bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/MapGetterNullModel.java @@ -17,6 +17,7 @@ package org.bson.codecs.pojo.entities.conventions; import java.util.Map; +import java.util.Objects; public class MapGetterNullModel { @@ -45,7 +46,7 @@ public boolean equals(final Object o) { MapGetterNullModel that = (MapGetterNullModel) o; - return mapField != null ? mapField.equals(that.mapField) : that.mapField == null; + return Objects.equals(mapField, that.mapField); } @Override diff --git a/bson/src/test/unit/org/bson/internal/BsonUtilTest.java b/bson/src/test/unit/org/bson/internal/BsonUtilTest.java new file mode 100644 index 00000000000..f0ed7c24b26 --- /dev/null +++ b/bson/src/test/unit/org/bson/internal/BsonUtilTest.java @@ -0,0 +1,132 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.bson.internal; + +import org.bson.BsonArray; +import org.bson.BsonBinary; +import org.bson.BsonDocument; +import org.bson.BsonDocumentWrapper; +import org.bson.BsonJavaScriptWithScope; +import org.bson.BsonValue; +import org.bson.RawBsonArray; +import org.bson.RawBsonDocument; +import org.bson.conversions.Bson; +import org.junit.jupiter.api.Test; + +import java.nio.charset.StandardCharsets; +import java.util.AbstractMap.SimpleImmutableEntry; +import java.util.Map.Entry; + +import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; +import static org.bson.assertions.Assertions.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotSame; + +final class BsonUtilTest { + @Test + public void mutableDeepCopy() { + Entry originalBsonBinaryEntry = new SimpleImmutableEntry<>( + "bsonBinary", + new BsonBinary("bsonBinary".getBytes(StandardCharsets.UTF_8)) + ); + Entry originalBsonJavaScriptWithScopeEntry = new SimpleImmutableEntry<>( + "bsonJavaScriptWithScopeEntry", + new BsonJavaScriptWithScope("\"use strict\";", new BsonDocument()) + ); + Entry originalRawBsonDocumentEntry = new SimpleImmutableEntry<>( + "rawBsonDocument", + RawBsonDocument.parse("{rawBsonDocument: 'rawBsonDocument_value'}") + ); + Entry> originalBsonDocumentWrapperEntry = new SimpleImmutableEntry<>( + "bsonDocumentWrapper", + new BsonDocumentWrapper<>(originalRawBsonDocumentEntry.getValue(), Bson.DEFAULT_CODEC_REGISTRY.get(RawBsonDocument.class)) + ); + Entry originalBsonDocumentEntry = new SimpleImmutableEntry<>( + "bsonDocument", + new BsonDocument() + .append(originalBsonBinaryEntry.getKey(), originalBsonBinaryEntry.getValue()) + .append(originalBsonJavaScriptWithScopeEntry.getKey(), originalBsonJavaScriptWithScopeEntry.getValue()) + .append(originalRawBsonDocumentEntry.getKey(), originalRawBsonDocumentEntry.getValue()) + .append(originalBsonDocumentWrapperEntry.getKey(), originalBsonDocumentWrapperEntry.getValue()) + ); + Entry originalBsonArrayEntry = new SimpleImmutableEntry<>( + "bsonArray", + new BsonArray(singletonList(new BsonArray())) + ); + Entry originalRawBsonArrayEntry = new SimpleImmutableEntry<>( + "rawBsonArray", + rawBsonArray( + originalBsonBinaryEntry.getValue(), + originalBsonJavaScriptWithScopeEntry.getValue(), + originalRawBsonDocumentEntry.getValue(), + originalBsonDocumentWrapperEntry.getValue(), + originalBsonDocumentEntry.getValue(), + originalBsonArrayEntry.getValue()) + ); + BsonDocument original = new BsonDocument() + .append(originalBsonBinaryEntry.getKey(), originalBsonBinaryEntry.getValue()) + .append(originalBsonJavaScriptWithScopeEntry.getKey(), originalBsonJavaScriptWithScopeEntry.getValue()) + .append(originalRawBsonDocumentEntry.getKey(), originalRawBsonDocumentEntry.getValue()) + .append(originalBsonDocumentWrapperEntry.getKey(), originalBsonDocumentWrapperEntry.getValue()) + .append(originalBsonDocumentEntry.getKey(), originalBsonDocumentEntry.getValue()) + .append(originalBsonArrayEntry.getKey(), originalBsonArrayEntry.getValue()) + .append(originalRawBsonArrayEntry.getKey(), originalRawBsonArrayEntry.getValue()); + BsonDocument copy = BsonUtil.mutableDeepCopy(original); + assertEqualNotSameAndMutable(original, copy); + original.forEach((key, value) -> assertEqualNotSameAndMutable(value, copy.get(key))); + // check nested document + String nestedDocumentKey = originalBsonDocumentEntry.getKey(); + BsonDocument originalNestedDocument = original.getDocument(nestedDocumentKey); + BsonDocument copyNestedDocument = copy.getDocument(nestedDocumentKey); + assertEqualNotSameAndMutable(originalNestedDocument, copyNestedDocument); + originalNestedDocument.forEach((key, value) -> assertEqualNotSameAndMutable(value, copyNestedDocument.get(key))); + // check nested array + String nestedArrayKey = originalRawBsonArrayEntry.getKey(); + BsonArray originalNestedArray = original.getArray(nestedArrayKey); + BsonArray copyNestedArray = copy.getArray(nestedArrayKey); + assertEqualNotSameAndMutable(originalNestedArray, copyNestedArray); + for (int i = 0; i < originalNestedArray.size(); i++) { + assertEqualNotSameAndMutable(originalNestedArray.get(i), copyNestedArray.get(i)); + } + } + + private static RawBsonArray rawBsonArray(final BsonValue... elements) { + return (RawBsonArray) new RawBsonDocument( + new BsonDocument("a", new BsonArray(asList(elements))), Bson.DEFAULT_CODEC_REGISTRY.get(BsonDocument.class)) + .get("a"); + } + + private static void assertEqualNotSameAndMutable(final Object expected, final Object actual) { + assertEquals(expected, actual); + assertNotSame(expected, actual); + Class actualClass = actual.getClass(); + if (expected instanceof BsonDocument) { + assertEquals(BsonDocument.class, actualClass); + } else if (expected instanceof BsonArray) { + assertEquals(BsonArray.class, actualClass); + } else if (expected instanceof BsonBinary) { + assertEquals(BsonBinary.class, actualClass); + } else if (expected instanceof BsonJavaScriptWithScope) { + assertEquals(BsonJavaScriptWithScope.class, actualClass); + } else { + fail("Unexpected " + expected.getClass().toString()); + } + } + + private BsonUtilTest() { + } +} diff --git a/bson/src/test/unit/org/bson/internal/CodecCacheSpecification.groovy b/bson/src/test/unit/org/bson/internal/CodecCacheSpecification.groovy new file mode 100644 index 00000000000..09b40735f1f --- /dev/null +++ b/bson/src/test/unit/org/bson/internal/CodecCacheSpecification.groovy @@ -0,0 +1,64 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.internal + +import org.bson.codecs.MinKeyCodec +import org.bson.types.MinKey +import spock.lang.Specification + +class CodecCacheSpecification extends Specification { + + def 'should return the cached codec if a codec for the class exists'() { + when: + def codec = new MinKeyCodec() + def cache = new CodecCache() + def cacheKey = new CodecCache.CodecCacheKey(MinKey, null) + cache.putIfAbsent(cacheKey, codec) + + then: + cache.get(cacheKey).get().is(codec) + } + + def 'should return empty if codec for class does not exist'() { + when: + def cache = new CodecCache() + def cacheKey = new CodecCache.CodecCacheKey(MinKey, null) + + then: + !cache.get(cacheKey).isPresent() + } + + def 'should return the cached codec if a codec for the parameterized class exists'() { + when: + def codec = new MinKeyCodec() + def cache = new CodecCache() + def cacheKey = new CodecCache.CodecCacheKey(List, [Integer]) + cache.putIfAbsent(cacheKey, codec) + + then: + cache.get(cacheKey).get().is(codec) + } + + def 'should return empty if codec for the parameterized class does not exist'() { + when: + def cache = new CodecCache() + def cacheKey = new CodecCache.CodecCacheKey(List, [Integer]) + + then: + !cache.get(cacheKey).isPresent() + } +} diff --git a/bson/src/test/unit/org/bson/internal/Holder.java b/bson/src/test/unit/org/bson/internal/Holder.java new file mode 100644 index 00000000000..afbf2f3f78c --- /dev/null +++ b/bson/src/test/unit/org/bson/internal/Holder.java @@ -0,0 +1,24 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.internal; + +import java.util.Collection; + +public class Holder { + @SuppressWarnings("VisibilityModifier") + public Collection> c; +} diff --git a/bson/src/test/unit/org/bson/codecs/configuration/ProvidersCodecRegistrySpecification.groovy b/bson/src/test/unit/org/bson/internal/ProvidersCodecRegistrySpecification.groovy similarity index 90% rename from bson/src/test/unit/org/bson/codecs/configuration/ProvidersCodecRegistrySpecification.groovy rename to bson/src/test/unit/org/bson/internal/ProvidersCodecRegistrySpecification.groovy index bfc354f9e3b..40897b3a5aa 100644 --- a/bson/src/test/unit/org/bson/codecs/configuration/ProvidersCodecRegistrySpecification.groovy +++ b/bson/src/test/unit/org/bson/internal/ProvidersCodecRegistrySpecification.groovy @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.bson.codecs.configuration +package org.bson.internal import org.bson.BsonBinaryReader import org.bson.BsonBinaryWriter @@ -26,6 +26,9 @@ import org.bson.codecs.Codec import org.bson.codecs.DecoderContext import org.bson.codecs.EncoderContext import org.bson.codecs.MinKeyCodec +import org.bson.codecs.configuration.CodecConfigurationException +import org.bson.codecs.configuration.CodecProvider +import org.bson.codecs.configuration.CodecRegistry import org.bson.io.BasicOutputBuffer import org.bson.io.ByteBufferBsonInput import org.bson.types.MaxKey @@ -95,8 +98,8 @@ class ProvidersCodecRegistrySpecification extends Specification { new Nested('George', new Top('Joe', null, null))) def writer = new BsonBinaryWriter(new BasicOutputBuffer()) topCodec.encode(writer, top, EncoderContext.builder().build()) - ByteArrayOutputStream os = new ByteArrayOutputStream(); - writer.getBsonOutput().pipe(os); + ByteArrayOutputStream os = new ByteArrayOutputStream() + writer.getBsonOutput().pipe(os) writer.close() then: @@ -106,22 +109,32 @@ class ProvidersCodecRegistrySpecification extends Specification { def 'get should use the codecCache'() { given: - def provider = Mock(CodecProvider) + def codec = Mock(Codec) + def provider = new CodecProvider() { + private int counter = 0 + + @Override + Codec get(final Class clazz, final CodecRegistry registry) { + if (counter == 0) { + counter++ + return codec + } + throw new AssertionError((Object)'Must not be called more than once.') + } + } when: def registry = new ProvidersCodecRegistry([provider]) - registry.get(MinKey) + def codecFromRegistry = registry.get(MinKey) then: - thrown(CodecConfigurationException) - 1 * provider.get(MinKey, _) + codecFromRegistry == codec when: - registry.get(MinKey) + codecFromRegistry = registry.get(MinKey) then: - thrown(CodecConfigurationException) - 0 * provider.get(MinKey, _) + codecFromRegistry == codec } def 'get with codec registry should return the codec from the first source that has one'() { @@ -161,7 +174,7 @@ class SingleCodecProvider implements CodecProvider { } @Override - def Codec get(final Class clazz, final CodecRegistry registry) { + Codec get(final Class clazz, final CodecRegistry registry) { if (clazz == codec.getEncoderClass()) { return codec } @@ -183,7 +196,7 @@ class ClassModelCodecProvider implements CodecProvider { @Override @SuppressWarnings('ReturnNullFromCatchBlock') - def Codec get(final Class clazz, final CodecRegistry registry) { + Codec get(final Class clazz, final CodecRegistry registry) { if (!supportedClasses.contains(clazz)) { null } else if (clazz == Top) { @@ -262,7 +275,7 @@ class TopCodec implements Codec { nested = codecForNested.decode(reader, decoderContext) } reader.readEndDocument() - new Top(name, other, nested); + new Top(name, other, nested) } } @@ -306,7 +319,7 @@ class NestedCodec implements Codec { top = codecForTop.decode(reader, decoderContext) } reader.readEndDocument() - new Nested(name, top); + new Nested(name, top) } } @@ -441,4 +454,3 @@ class Nested { class Simple { int value = 0 } - diff --git a/bson/src/test/unit/org/bson/internal/UnsignedLongsTest.java b/bson/src/test/unit/org/bson/internal/UnsignedLongsTest.java deleted file mode 100644 index 4dcc50fed37..00000000000 --- a/bson/src/test/unit/org/bson/internal/UnsignedLongsTest.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * Copyright 2010 The Guava Authors - * Copyright 2011 The Guava Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.internal; - -import org.junit.Test; - -import java.math.BigInteger; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -public class UnsignedLongsTest { - - @Test - public void testCompare() { - // max value - assertTrue(UnsignedLongs.compare(0, 0xffffffffffffffffL) < 0); - assertTrue(UnsignedLongs.compare(0xffffffffffffffffL, 0) > 0); - - // both with high bit set - assertTrue(UnsignedLongs.compare(0xff1a618b7f65ea12L, 0xffffffffffffffffL) < 0); - assertTrue(UnsignedLongs.compare(0xffffffffffffffffL, 0xff1a618b7f65ea12L) > 0); - - // one with high bit set - assertTrue(UnsignedLongs.compare(0x5a4316b8c153ac4dL, 0xff1a618b7f65ea12L) < 0); - assertTrue(UnsignedLongs.compare(0xff1a618b7f65ea12L, 0x5a4316b8c153ac4dL) > 0); - - // neither with high bit set - assertTrue(UnsignedLongs.compare(0x5a4316b8c153ac4dL, 0x6cf78a4b139a4e2aL) < 0); - assertTrue(UnsignedLongs.compare(0x6cf78a4b139a4e2aL, 0x5a4316b8c153ac4dL) > 0); - - // same value - assertTrue(UnsignedLongs.compare(0xff1a618b7f65ea12L, 0xff1a618b7f65ea12L) == 0); - } - - @Test - public void testParseLong() { - assertEquals(0xffffffffffffffffL, UnsignedLongs.parse("18446744073709551615")); - assertEquals(0x7fffffffffffffffL, UnsignedLongs.parse("9223372036854775807")); - assertEquals(0xff1a618b7f65ea12L, UnsignedLongs.parse("18382112080831834642")); - assertEquals(0x5a4316b8c153ac4dL, UnsignedLongs.parse("6504067269626408013")); - assertEquals(0x6cf78a4b139a4e2aL, UnsignedLongs.parse("7851896530399809066")); - } - - @Test - public void testToString() { - String[] tests = { - "ffffffffffffffff", - "7fffffffffffffff", - "ff1a618b7f65ea12", - "5a4316b8c153ac4d", - "6cf78a4b139a4e2a" - }; - for (String x : tests) { - BigInteger xValue = new BigInteger(x, 16); - long xLong = xValue.longValue(); // signed - assertEquals(xValue.toString(10), UnsignedLongs.toString(xLong)); - } - } - -} diff --git a/bson/src/test/unit/org/bson/internal/UuidHelperSpecification.groovy b/bson/src/test/unit/org/bson/internal/UuidHelperSpecification.groovy new file mode 100644 index 00000000000..a42d09d6d9e --- /dev/null +++ b/bson/src/test/unit/org/bson/internal/UuidHelperSpecification.groovy @@ -0,0 +1,53 @@ +package org.bson.internal + +import org.bson.BSONException +import org.bson.UuidRepresentation +import spock.lang.Specification +import spock.lang.Unroll + +class UuidHelperSpecification extends Specification { + + @Unroll + def 'should encode different types of UUID'() { + given: + def expectedUuid = UUID.fromString('08070605-0403-0201-100f-0e0d0c0b0a09') + + expect: + bytes == UuidHelper.encodeUuidToBinary(expectedUuid, uuidRepresentation) + + where: + bytes | uuidRepresentation + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] | UuidRepresentation.JAVA_LEGACY + [8, 7, 6, 5, 4, 3, 2, 1, 16, 15, 14, 13, 12, 11, 10, 9] | UuidRepresentation.STANDARD + [8, 7, 6, 5, 4, 3, 2, 1, 16, 15, 14, 13, 12, 11, 10, 9] | UuidRepresentation.PYTHON_LEGACY + [5, 6, 7, 8, 3, 4, 1, 2, 16, 15, 14, 13, 12, 11, 10, 9] | UuidRepresentation.C_SHARP_LEGACY + } + + @Unroll + def 'should decode different types of UUID'() { + given: + byte[] expectedBytes = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[] + + expect: + uuid == UuidHelper.decodeBinaryToUuid(expectedBytes, (byte) type, uuidRepresentation) + expectedBytes == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] as byte[] + + where: + uuid | type | uuidRepresentation + UUID.fromString('08070605-0403-0201-100f-0e0d0c0b0a09') | 3 | UuidRepresentation.JAVA_LEGACY + UUID.fromString('01020304-0506-0708-090a-0b0c0d0e0f10') | 3 | UuidRepresentation.PYTHON_LEGACY + UUID.fromString('04030201-0605-0807-090a-0b0c0d0e0f10') | 3 | UuidRepresentation.C_SHARP_LEGACY + UUID.fromString('01020304-0506-0708-090a-0b0c0d0e0f10') | 4 | UuidRepresentation.STANDARD + } + + def 'should error when decoding a subtype 3 binary to standard representation'() { + given: + byte[] expectedBytes = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + + when: + UuidHelper.decodeBinaryToUuid(expectedBytes, (byte) 3, UuidRepresentation.STANDARD) + + then: + thrown(BSONException) + } +} diff --git a/bson/src/test/unit/org/bson/io/BasicOutputBufferSpecification.groovy b/bson/src/test/unit/org/bson/io/BasicOutputBufferSpecification.groovy index 38de06bf8cf..758d4fc1cfd 100644 --- a/bson/src/test/unit/org/bson/io/BasicOutputBufferSpecification.groovy +++ b/bson/src/test/unit/org/bson/io/BasicOutputBufferSpecification.groovy @@ -44,9 +44,22 @@ class BasicOutputBufferSpecification extends Specification { bsonOutput.size == 1 } + def 'writeBytes shorthand should extend buffer'() { + given: + def bsonOutput = new BasicOutputBuffer(3) + + when: + bsonOutput.write([1, 2, 3, 4] as byte[]) + + then: + getBytes(bsonOutput) == [1, 2, 3, 4] as byte[] + bsonOutput.position == 4 + bsonOutput.size == 4 + } + def 'should write bytes'() { given: - def bsonOutput = new BasicOutputBuffer() + def bsonOutput = new BasicOutputBuffer(3) when: bsonOutput.writeBytes([1, 2, 3, 4] as byte[]) @@ -59,7 +72,7 @@ class BasicOutputBufferSpecification extends Specification { def 'should write bytes from offset until length'() { given: - def bsonOutput = new BasicOutputBuffer() + def bsonOutput = new BasicOutputBuffer(5) when: bsonOutput.writeBytes([0, 1, 2, 3, 4, 5] as byte[], 1, 4) @@ -70,9 +83,40 @@ class BasicOutputBufferSpecification extends Specification { bsonOutput.size == 4 } + def 'toByteArray should be idempotent'() { + given: + def bsonOutput = new BasicOutputBuffer(10) + bsonOutput.writeBytes([1, 2, 3, 4] as byte[]) + + when: + def first = bsonOutput.toByteArray() + def second = bsonOutput.toByteArray() + + then: + getBytes(bsonOutput) == [1, 2, 3, 4] as byte[] + first == [1, 2, 3, 4] as byte[] + second == [1, 2, 3, 4] as byte[] + bsonOutput.position == 4 + bsonOutput.size == 4 + } + + def 'toByteArray creates a copy'() { + given: + def bsonOutput = new BasicOutputBuffer(10) + bsonOutput.writeBytes([1, 2, 3, 4] as byte[]) + + when: + def first = bsonOutput.toByteArray() + def second = bsonOutput.toByteArray() + + then: + first !== second + first == [1, 2, 3, 4] as byte[] + second == [1, 2, 3, 4] as byte[] + } def 'should write a little endian Int32'() { given: - def bsonOutput = new BasicOutputBuffer() + def bsonOutput = new BasicOutputBuffer(3) when: bsonOutput.writeInt32(0x1020304) @@ -85,7 +129,7 @@ class BasicOutputBufferSpecification extends Specification { def 'should write a little endian Int64'() { given: - def bsonOutput = new BasicOutputBuffer() + def bsonOutput = new BasicOutputBuffer(7) when: bsonOutput.writeInt64(0x102030405060708L) @@ -98,7 +142,7 @@ class BasicOutputBufferSpecification extends Specification { def 'should write a double'() { given: - def bsonOutput = new BasicOutputBuffer() + def bsonOutput = new BasicOutputBuffer(7) when: bsonOutput.writeDouble(Double.longBitsToDouble(0x102030405060708L)) @@ -112,7 +156,7 @@ class BasicOutputBufferSpecification extends Specification { def 'should write an ObjectId'() { given: def objectIdAsByteArray = [12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1] as byte[] - def bsonOutput = new BasicOutputBuffer() + def bsonOutput = new BasicOutputBuffer(11) when: bsonOutput.writeObjectId(new ObjectId(objectIdAsByteArray)) @@ -123,6 +167,19 @@ class BasicOutputBufferSpecification extends Specification { bsonOutput.size == 12 } + def 'write ObjectId should throw after close'() { + given: + def objectIdAsByteArray = [12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1] as byte[] + def bsonOutput = new BasicOutputBuffer() + bsonOutput.close() + + when: + bsonOutput.writeObjectId(new ObjectId(objectIdAsByteArray)) + + then: + thrown(IllegalStateException) + } + def 'should write an empty string'() { given: def bsonOutput = new BasicOutputBuffer() @@ -151,7 +208,7 @@ class BasicOutputBufferSpecification extends Specification { def 'should write a UTF-8 string'() { given: - def bsonOutput = new BasicOutputBuffer() + def bsonOutput = new BasicOutputBuffer(7) when: bsonOutput.writeString('\u0900') @@ -263,6 +320,46 @@ class BasicOutputBufferSpecification extends Specification { bsonOutput.size == 8 } + def 'absolute write should throw with invalid position'() { + given: + def bsonOutput = new BasicOutputBuffer() + bsonOutput.writeBytes([1, 2, 3, 4] as byte[]) + + when: + bsonOutput.write(-1, 0x1020304) + + then: + thrown(IllegalArgumentException) + + when: + bsonOutput.write(4, 0x1020304) + + then: + thrown(IllegalArgumentException) + } + + def 'absolute write should write lower byte at position'() { + given: + def bsonOutput = new BasicOutputBuffer() + bsonOutput.writeBytes([0, 0, 0, 0, 1, 2, 3, 4] as byte[]) + + when: + bsonOutput.write(0, 0x1020304) + + then: + getBytes(bsonOutput) == [4, 0, 0, 0, 1, 2, 3, 4] as byte[] + bsonOutput.position == 8 + bsonOutput.size == 8 + + when: + bsonOutput.write(7, 0x1020304) + + then: + getBytes(bsonOutput) == [4, 0, 0, 0, 1, 2, 3, 4] as byte[] + bsonOutput.position == 8 + bsonOutput.size == 8 + } + def 'truncate should throw with invalid position'() { given: def bsonOutput = new BasicOutputBuffer() @@ -320,6 +417,20 @@ class BasicOutputBufferSpecification extends Specification { bsonOutput.getByteBuffers()[0].getInt() == 1 } + def 'should get byte buffer with limit'() { + given: + def bsonOutput = new BasicOutputBuffer(8) + bsonOutput.writeBytes([1, 0, 0, 0] as byte[]) + + when: + def buffers = bsonOutput.getByteBuffers() + + then: + buffers.size() == 1 + buffers[0].position() == 0 + buffers[0].limit() == 4 + } + def 'should get internal buffer'() { given: def bsonOutput = new BasicOutputBuffer(4) diff --git a/bson/src/test/unit/org/bson/io/BasicOutputBufferTest.java b/bson/src/test/unit/org/bson/io/BasicOutputBufferTest.java index 1531c7d0678..795df289876 100644 --- a/bson/src/test/unit/org/bson/io/BasicOutputBufferTest.java +++ b/bson/src/test/unit/org/bson/io/BasicOutputBufferTest.java @@ -16,13 +16,14 @@ package org.bson.io; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.Arrays; -import static org.junit.Assert.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; // for tests that are too slow to run in Groovy public class BasicOutputBufferTest { @@ -42,7 +43,7 @@ public void shouldEncodeAllCodePointsThatAreLettersOrDigits() throws IOException // then byte[] bytes = getBytes(bsonOutput); - assertArrayEquals("failed with code point " + codePoint, str.getBytes("UTF-8"), Arrays.copyOfRange(bytes, 0, bytes.length - 1)); + assertArrayEquals(str.getBytes(StandardCharsets.UTF_8), Arrays.copyOfRange(bytes, 0, bytes.length - 1), "failed with code point " + codePoint); } } diff --git a/bson/src/test/unit/org/bson/io/ByteBufferBsonInputSpecification.groovy b/bson/src/test/unit/org/bson/io/ByteBufferBsonInputSpecification.groovy index 80623280c80..a6ff9e1d609 100644 --- a/bson/src/test/unit/org/bson/io/ByteBufferBsonInputSpecification.groovy +++ b/bson/src/test/unit/org/bson/io/ByteBufferBsonInputSpecification.groovy @@ -207,6 +207,26 @@ class ByteBufferBsonInputSpecification extends Specification { stream.position == 4 } + def 'should handle invalid CString not null terminated'() { + when: + def stream = new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap([0xe0, 0xa4, 0x80] as byte[]))) + stream.readCString() + + then: + def e = thrown(BsonSerializationException) + e.getMessage() == 'Found a BSON string that is not null-terminated' + } + + def 'should handle invalid CString not null terminated when skipping value'() { + when: + def stream = new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap([0xe0, 0xa4, 0x80] as byte[]))) + stream.skipCString() + + then: + def e = thrown(BsonSerializationException) + e.getMessage() == 'Found a BSON string that is not null-terminated' + } + def 'should read from position'() { given: def stream = new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap([4, 3, 2, 1] as byte[]))) @@ -240,35 +260,6 @@ class ByteBufferBsonInputSpecification extends Specification { stream.position == 5 } - def 'reset should throw when there is no mark'() { - given: - def stream = new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap([0x4a, 0x61, 0x76, 0x61, 0] as byte[]))) - - when: - stream.reset() - - then: - thrown(IllegalStateException) - } - - def 'should reset to the mark'() { - given: - def stream = new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap([0x4a, 0x61, 0x76, 0x61, 0] as byte[]))) - - when: - stream.with { - readByte() - readByte() - mark(1024) - readByte() - readByte() - readByte() - reset() - } - then: - stream.position == 2 - } - def 'should reset to the BsonInputMark'() { given: def stream = new ByteBufferBsonInput(new ByteBufNIO(ByteBuffer.wrap([0x4a, 0x61, 0x76, 0x61, 0] as byte[]))) diff --git a/bson/src/test/unit/org/bson/json/Base64Specification.groovy b/bson/src/test/unit/org/bson/json/Base64Specification.groovy deleted file mode 100644 index 0bc2566bbcc..00000000000 --- a/bson/src/test/unit/org/bson/json/Base64Specification.groovy +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.json - -import org.bson.internal.Base64 -import spock.lang.Specification -import spock.lang.Unroll - -class Base64Specification extends Specification { - - @Unroll - def 'encodes #encoded into #decoded'() { - expect: - Base64.encode(encoded.getBytes()) == decoded - Base64.decode(decoded) == encoded.getBytes() - - where: - encoded | decoded - '' | '' - 'f' | 'Zg==' - 'fo' | 'Zm8=' - 'foo' | 'Zm9v' - 'foob' | 'Zm9vYg==' - 'fooba' | 'Zm9vYmE=' - 'foobar' | 'Zm9vYmFy' - } - -} diff --git a/bson/src/test/unit/org/bson/json/JsonBufferTest.java b/bson/src/test/unit/org/bson/json/JsonBufferTest.java deleted file mode 100644 index e647cfd6acf..00000000000 --- a/bson/src/test/unit/org/bson/json/JsonBufferTest.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.json; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; - -public class JsonBufferTest { - - @Test - public void testRead() { - JsonBuffer buffer = new JsonBuffer("ABC"); - assertEquals('A', buffer.read()); - assertEquals('B', buffer.read()); - assertEquals('C', buffer.read()); - assertEquals(-1, buffer.read()); - } - - @Test - public void testUnRead() { - JsonBuffer buffer = new JsonBuffer("A"); - buffer.unread(buffer.read()); - assertEquals('A', buffer.read()); - assertEquals(-1, buffer.read()); - } - - @Test - public void testPosition() { - JsonBuffer buffer = new JsonBuffer("ABC"); - - buffer.setPosition(2); - assertEquals(2, buffer.getPosition()); - } - - @Test(expected = JsonParseException.class) - public void testEOFCheck() { - JsonBuffer buffer = new JsonBuffer(""); - - buffer.read(); - buffer.read(); - } -} diff --git a/bson/src/test/unit/org/bson/json/JsonObjectTest.java b/bson/src/test/unit/org/bson/json/JsonObjectTest.java new file mode 100644 index 00000000000..80cfe07196c --- /dev/null +++ b/bson/src/test/unit/org/bson/json/JsonObjectTest.java @@ -0,0 +1,127 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.json; + +import org.bson.BsonDocument; +import org.bson.BsonInt32; +import org.bson.codecs.BsonCodecProvider; +import org.bson.codecs.JsonObjectCodecProvider; +import org.junit.jupiter.api.Test; + +import static org.bson.codecs.configuration.CodecRegistries.fromProviders; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +public class JsonObjectTest { + + @Test + public void testNull() { + assertThrows(IllegalArgumentException.class, () -> new JsonObject(null)); + } + + @Test + public void testArray() { + assertThrows(IllegalArgumentException.class, () ->new JsonObject("['A', 'B', 'C']")); + } + + @Test + public void testSpaceInvalidObject() { + assertThrows(IllegalArgumentException.class, () ->new JsonObject(" ['A']")); + } + + @Test + public void testLineFeedInvalidObject() { + assertThrows(IllegalArgumentException.class, () ->new JsonObject("\nvalue")); + } + + @Test + public void testCarriageReturnInvalidObject() { + assertThrows(IllegalArgumentException.class, () ->new JsonObject("\r123")); + } + + @Test + public void testHorizontalTabInvalidObject() { + assertThrows(IllegalArgumentException.class, () ->new JsonObject("\t123")); + } + + @Test + public void testOnlyWhitespace() { + assertThrows(IllegalArgumentException.class, () ->new JsonObject(" \t\n \r ")); + } + + @Test + public void testSpaceValidObject() { + String json = " {hello: 2}"; + assertEquals(new JsonObject(json).getJson(), json); + } + + @Test + public void testLineFeedValidObject() { + String json = "\n{hello: 2}"; + assertEquals(new JsonObject(json).getJson(), json); + } + + @Test + public void testCarriageReturnValidObject() { + String json = "\r{hello: 2}"; + assertEquals(new JsonObject(json).getJson(), json); + } + + @Test + public void testHorizontalTabValidObject() { + String json = "\t{hello: 2}"; + assertEquals(new JsonObject(json).getJson(), json); + } + + @Test + public void testLeadingAndTrailingWhitespace() { + String json = "\n\t\r {hello: 2} \n"; + assertEquals(new JsonObject(json).getJson(), json); + } + + @Test + public void testEqualsAndHashCode() { + JsonObject j1 = new JsonObject("{hello: 1}"); + JsonObject j2 = new JsonObject("{hello: 1}"); + JsonObject j3 = new JsonObject("{world: 2}"); + + assertEquals(j1, j1); + assertEquals(j1, j2); + assertEquals(j2, j1); + assertNotEquals(j1, j3); + assertNotEquals(j3, j1); + assertNotEquals(null, j1); + assertNotEquals("{hello: 1}", j1); + + assertEquals(j1.hashCode(), j1.hashCode()); + assertEquals(j1.hashCode(), j2.hashCode()); + } + + @Test + public void testGetJson() { + JsonObject j1 = new JsonObject("{hello: 1}"); + assertEquals(j1.getJson(), "{hello: 1}"); + } + + @Test + public void testToBsonDocument() { + JsonObject j1 = new JsonObject("{hello: 1}"); + BsonDocument b1 = new BsonDocument("hello", new BsonInt32(1)); + assertEquals(j1.toBsonDocument(null, fromProviders(new JsonObjectCodecProvider(), new BsonCodecProvider())), b1); + } +} diff --git a/bson/src/test/unit/org/bson/json/JsonReaderTest.java b/bson/src/test/unit/org/bson/json/JsonReaderTest.java index 8fde26e429f..27e1980a3e3 100644 --- a/bson/src/test/unit/org/bson/json/JsonReaderTest.java +++ b/bson/src/test/unit/org/bson/json/JsonReaderTest.java @@ -20,289 +20,356 @@ import org.bson.BsonBinary; import org.bson.BsonBinarySubType; import org.bson.BsonDbPointer; +import org.bson.BsonReaderMark; import org.bson.BsonRegularExpression; import org.bson.BsonTimestamp; import org.bson.BsonType; import org.bson.types.Decimal128; import org.bson.types.ObjectId; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.nio.charset.StandardCharsets; import java.text.ParsePosition; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import java.util.Locale; +import java.util.function.Function; import static java.util.Arrays.asList; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; public class JsonReaderTest { - private AbstractBsonReader bsonReader; @Test public void testArrayEmpty() { String json = "[]"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.ARRAY, bsonReader.readBsonType()); - bsonReader.readStartArray(); - assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); - bsonReader.readEndArray(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.ARRAY, bsonReader.readBsonType()); + bsonReader.readStartArray(); + assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); + bsonReader.readEndArray(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testArrayOneElement() { String json = "[1]"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.ARRAY, bsonReader.readBsonType()); - bsonReader.readStartArray(); - assertEquals(BsonType.INT32, bsonReader.readBsonType()); - assertEquals(1, bsonReader.readInt32()); - assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); - bsonReader.readEndArray(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.ARRAY, bsonReader.readBsonType()); + bsonReader.readStartArray(); + assertEquals(BsonType.INT32, bsonReader.readBsonType()); + assertEquals(1, bsonReader.readInt32()); + assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); + bsonReader.readEndArray(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testArrayTwoElements() { String json = "[1, 2]"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.ARRAY, bsonReader.readBsonType()); - bsonReader.readStartArray(); - assertEquals(BsonType.INT32, bsonReader.readBsonType()); - assertEquals(1, bsonReader.readInt32()); - assertEquals(BsonType.INT32, bsonReader.readBsonType()); - assertEquals(2, bsonReader.readInt32()); - assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); - bsonReader.readEndArray(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.ARRAY, bsonReader.readBsonType()); + bsonReader.readStartArray(); + assertEquals(BsonType.INT32, bsonReader.readBsonType()); + assertEquals(1, bsonReader.readInt32()); + assertEquals(BsonType.INT32, bsonReader.readBsonType()); + assertEquals(2, bsonReader.readInt32()); + assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); + bsonReader.readEndArray(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testBooleanFalse() { String json = "false"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.BOOLEAN, bsonReader.readBsonType()); - assertEquals(false, bsonReader.readBoolean()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.BOOLEAN, bsonReader.readBsonType()); + assertFalse(bsonReader.readBoolean()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testBooleanTrue() { String json = "true"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.BOOLEAN, bsonReader.readBsonType()); - assertEquals(true, bsonReader.readBoolean()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.BOOLEAN, bsonReader.readBsonType()); + assertTrue(bsonReader.readBoolean()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDateTimeMinBson() { String json = "new Date(-9223372036854775808)"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); - assertEquals(-9223372036854775808L, bsonReader.readDateTime()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); + assertEquals(-9223372036854775808L, bsonReader.readDateTime()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDateTimeMaxBson() { String json = "new Date(9223372036854775807)"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); - long k = bsonReader.readDateTime(); - assertEquals(9223372036854775807L, k); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); + long k = bsonReader.readDateTime(); + assertEquals(9223372036854775807L, k); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); + } + + @Test + public void testDateTimeShellDateOnly() { + String json = "ISODate(\"1970-01-01\")"; + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); + assertEquals(0, bsonReader.readDateTime()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDateTimeShell() { String json = "ISODate(\"1970-01-01T00:00:00Z\")"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); - assertEquals(0, bsonReader.readDateTime()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); + assertEquals(0, bsonReader.readDateTime()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDateTimeShellWith24HourTimeSpecification() { String json = "ISODate(\"2013-10-04T12:07:30.443Z\")"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); - assertEquals(1380888450443L, bsonReader.readDateTime()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); + assertEquals(1380888450443L, bsonReader.readDateTime()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDateTimeStrict() { String json = "{ \"$date\" : 0 }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); - assertEquals(0, bsonReader.readDateTime()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); + assertEquals(0, bsonReader.readDateTime()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testNestedDateTimeStrict() { String json = "{d1 : { \"$date\" : 0 }, d2 : { \"$date\" : 1 } }"; - bsonReader = new JsonReader(json); - bsonReader.readStartDocument(); - assertEquals(0L, bsonReader.readDateTime("d1")); - assertEquals(1L, bsonReader.readDateTime("d2")); - bsonReader.readEndDocument(); + testStringAndStream(json, bsonReader -> { + bsonReader.readStartDocument(); + assertEquals(0L, bsonReader.readDateTime("d1")); + assertEquals(1L, bsonReader.readDateTime("d2")); + bsonReader.readEndDocument(); + return null; + }); } @Test public void testDateTimeISOString() { String json = "{ \"$date\" : \"2015-04-16T14:55:57.626Z\" }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); - assertEquals(1429196157626L, bsonReader.readDateTime()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); + assertEquals(1429196157626L, bsonReader.readDateTime()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDateTimeISOStringWithTimeOffset() { String json = "{ \"$date\" : \"2015-04-16T16:55:57.626+02:00\" }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); - assertEquals(1429196157626L, bsonReader.readDateTime()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); + assertEquals(1429196157626L, bsonReader.readDateTime()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDateTimeTengen() { String json = "new Date(0)"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); - assertEquals(0, bsonReader.readDateTime()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); + assertEquals(0, bsonReader.readDateTime()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDocumentEmpty() { String json = "{ }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); - bsonReader.readStartDocument(); - assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); - bsonReader.readEndDocument(); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); + bsonReader.readStartDocument(); + assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); + bsonReader.readEndDocument(); + return null; + }); } @Test public void testDocumentNested() { String json = "{ \"a\" : { \"x\" : 1 }, \"y\" : 2 }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); - bsonReader.readStartDocument(); - assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); - assertEquals("a", bsonReader.readName()); - bsonReader.readStartDocument(); - assertEquals(BsonType.INT32, bsonReader.readBsonType()); - assertEquals("x", bsonReader.readName()); - assertEquals(1, bsonReader.readInt32()); - assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); - bsonReader.readEndDocument(); - assertEquals(BsonType.INT32, bsonReader.readBsonType()); - assertEquals("y", bsonReader.readName()); - assertEquals(2, bsonReader.readInt32()); - assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); - bsonReader.readEndDocument(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); + bsonReader.readStartDocument(); + assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); + assertEquals("a", bsonReader.readName()); + bsonReader.readStartDocument(); + assertEquals(BsonType.INT32, bsonReader.readBsonType()); + assertEquals("x", bsonReader.readName()); + assertEquals(1, bsonReader.readInt32()); + assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); + bsonReader.readEndDocument(); + assertEquals(BsonType.INT32, bsonReader.readBsonType()); + assertEquals("y", bsonReader.readName()); + assertEquals(2, bsonReader.readInt32()); + assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); + bsonReader.readEndDocument(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDocumentOneElement() { String json = "{ \"x\" : 1 }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); - bsonReader.readStartDocument(); - assertEquals(BsonType.INT32, bsonReader.readBsonType()); - assertEquals("x", bsonReader.readName()); - assertEquals(1, bsonReader.readInt32()); - assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); - bsonReader.readEndDocument(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); + bsonReader.readStartDocument(); + assertEquals(BsonType.INT32, bsonReader.readBsonType()); + assertEquals("x", bsonReader.readName()); + assertEquals(1, bsonReader.readInt32()); + assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); + bsonReader.readEndDocument(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDocumentTwoElements() { String json = "{ \"x\" : 1, \"y\" : 2 }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); - bsonReader.readStartDocument(); - assertEquals(BsonType.INT32, bsonReader.readBsonType()); - assertEquals("x", bsonReader.readName()); - assertEquals(1, bsonReader.readInt32()); - assertEquals(BsonType.INT32, bsonReader.readBsonType()); - assertEquals("y", bsonReader.readName()); - assertEquals(2, bsonReader.readInt32()); - assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); - bsonReader.readEndDocument(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); + bsonReader.readStartDocument(); + assertEquals(BsonType.INT32, bsonReader.readBsonType()); + assertEquals("x", bsonReader.readName()); + assertEquals(1, bsonReader.readInt32()); + assertEquals(BsonType.INT32, bsonReader.readBsonType()); + assertEquals("y", bsonReader.readName()); + assertEquals(2, bsonReader.readInt32()); + assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); + bsonReader.readEndDocument(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDouble() { String json = "1.5"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DOUBLE, bsonReader.readBsonType()); - assertEquals(1.5, bsonReader.readDouble(), 0); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DOUBLE, bsonReader.readBsonType()); + assertEquals(1.5, bsonReader.readDouble(), 0); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testHexData() { - byte[] expectedBytes = new byte[]{0x01, 0x23}; + byte[] expectedBytes = {0x01, 0x23}; String json = "HexData(0, \"0123\")"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.BINARY, bsonReader.readBsonType()); - BsonBinary binary = bsonReader.readBinaryData(); - assertArrayEquals(expectedBytes, binary.getData()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.BINARY, bsonReader.readBsonType()); + BsonBinary binary = bsonReader.readBinaryData(); + assertArrayEquals(expectedBytes, binary.getData()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testHexDataWithNew() { - byte[] expectedBytes = new byte[]{0x01, 0x23}; + byte[] expectedBytes = {0x01, 0x23}; String json = "new HexData(0, \"0123\")"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.BINARY, bsonReader.readBsonType()); - BsonBinary binary = bsonReader.readBinaryData(); - assertArrayEquals(expectedBytes, binary.getData()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.BINARY, bsonReader.readBsonType()); + BsonBinary binary = bsonReader.readBinaryData(); + assertArrayEquals(expectedBytes, binary.getData()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testInt32() { String json = "123"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.INT32, bsonReader.readBsonType()); - assertEquals(123, bsonReader.readInt32()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.INT32, bsonReader.readBsonType()); + assertEquals(123, bsonReader.readInt32()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testInt64() { String json = String.valueOf(Long.MAX_VALUE); - bsonReader = new JsonReader(json); - assertEquals(BsonType.INT64, bsonReader.readBsonType()); - assertEquals(Long.MAX_VALUE, bsonReader.readInt64()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.INT64, bsonReader.readBsonType()); + assertEquals(Long.MAX_VALUE, bsonReader.readInt64()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testNumberLongExtendedJson() { String json = "{\"$numberLong\":\"123\"}"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.INT64, bsonReader.readBsonType()); - assertEquals(123, bsonReader.readInt64()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.INT64, bsonReader.readBsonType()); + assertEquals(123, bsonReader.readInt64()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test @@ -313,10 +380,12 @@ public void testNumberLong() { "new NumberLong(123)", "new NumberLong(\"123\")"); for (String json : jsonTexts) { - bsonReader = new JsonReader(json); - assertEquals(BsonType.INT64, bsonReader.readBsonType()); - assertEquals(123, bsonReader.readInt64()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.INT64, bsonReader.readBsonType()); + assertEquals(123, bsonReader.readInt64()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } } @@ -328,117 +397,138 @@ public void testNumberInt() { "new NumberInt(123)", "new NumberInt(\"123\")"); for (String json : jsonTexts) { - bsonReader = new JsonReader(json); - assertEquals(BsonType.INT32, bsonReader.readBsonType()); - assertEquals(123, bsonReader.readInt32()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.INT32, bsonReader.readBsonType()); + assertEquals(123, bsonReader.readInt32()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } } @Test public void testDecimal128StringConstructor() { String json = "NumberDecimal(\"314E-2\")"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DECIMAL128, bsonReader.readBsonType()); - assertEquals(Decimal128.parse("314E-2"), bsonReader.readDecimal128()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DECIMAL128, bsonReader.readBsonType()); + assertEquals(Decimal128.parse("314E-2"), bsonReader.readDecimal128()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDecimal128Int32Constructor() { String json = "NumberDecimal(" + Integer.MAX_VALUE + ")"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DECIMAL128, bsonReader.readBsonType()); - assertEquals(new Decimal128(Integer.MAX_VALUE), bsonReader.readDecimal128()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DECIMAL128, bsonReader.readBsonType()); + assertEquals(new Decimal128(Integer.MAX_VALUE), bsonReader.readDecimal128()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDecimal128Int64Constructor() { String json = "NumberDecimal(" + Long.MAX_VALUE + ")"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DECIMAL128, bsonReader.readBsonType()); - assertEquals(new Decimal128(Long.MAX_VALUE), bsonReader.readDecimal128()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DECIMAL128, bsonReader.readBsonType()); + assertEquals(new Decimal128(Long.MAX_VALUE), bsonReader.readDecimal128()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDecimal128DoubleConstructor() { String json = "NumberDecimal(" + 1.0 + ")"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DECIMAL128, bsonReader.readBsonType()); - assertEquals(Decimal128.parse("1"), bsonReader.readDecimal128()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DECIMAL128, bsonReader.readBsonType()); + assertEquals(Decimal128.parse("1"), bsonReader.readDecimal128()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDecimal128BooleanConstructor() { String json = "NumberDecimal(true)"; - bsonReader = new JsonReader(json); - try { - bsonReader.readBsonType(); - fail("Should fail to parse NumberDecimal constructor with a string"); - } catch (JsonParseException e) { - // all good - } + testStringAndStream(json, bsonReader -> { + try { + bsonReader.readBsonType(); + fail("Should fail to parse NumberDecimal constructor with a string"); + } catch (JsonParseException e) { + // all good + } + return null; + }); } @Test public void testDecimal128WithNew() { String json = "new NumberDecimal(\"314E-2\")"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DECIMAL128, bsonReader.readBsonType()); - assertEquals(Decimal128.parse("314E-2"), bsonReader.readDecimal128()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DECIMAL128, bsonReader.readBsonType()); + assertEquals(Decimal128.parse("314E-2"), bsonReader.readDecimal128()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDecimal128ExtendedJson() { String json = "{\"$numberDecimal\":\"314E-2\"}"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DECIMAL128, bsonReader.readBsonType()); - assertEquals(Decimal128.parse("314E-2"), bsonReader.readDecimal128()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DECIMAL128, bsonReader.readBsonType()); + assertEquals(Decimal128.parse("314E-2"), bsonReader.readDecimal128()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testJavaScript() { String json = "{ \"$code\" : \"function f() { return 1; }\" }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.JAVASCRIPT, bsonReader.readBsonType()); - assertEquals("function f() { return 1; }", bsonReader.readJavaScript()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); - + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.JAVASCRIPT, bsonReader.readBsonType()); + assertEquals("function f() { return 1; }", bsonReader.readJavaScript()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testJavaScriptWithScope() { String json = "{\"codeWithScope\": { \"$code\" : \"function f() { return n; }\", \"$scope\" : { \"n\" : 1 } } }"; - bsonReader = new JsonReader(json); - bsonReader.readStartDocument(); - assertEquals(BsonType.JAVASCRIPT_WITH_SCOPE, bsonReader.readBsonType()); - assertEquals("codeWithScope", bsonReader.readName()); - assertEquals("function f() { return n; }", bsonReader.readJavaScriptWithScope()); - bsonReader.readStartDocument(); - assertEquals(BsonType.INT32, bsonReader.readBsonType()); - assertEquals("n", bsonReader.readName()); - assertEquals(1, bsonReader.readInt32()); - bsonReader.readEndDocument(); - bsonReader.readEndDocument(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + bsonReader.readStartDocument(); + assertEquals(BsonType.JAVASCRIPT_WITH_SCOPE, bsonReader.readBsonType()); + assertEquals("codeWithScope", bsonReader.readName()); + assertEquals("function f() { return n; }", bsonReader.readJavaScriptWithScope()); + bsonReader.readStartDocument(); + assertEquals(BsonType.INT32, bsonReader.readBsonType()); + assertEquals("n", bsonReader.readName()); + assertEquals(1, bsonReader.readInt32()); + bsonReader.readEndDocument(); + bsonReader.readEndDocument(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testMaxKey() { for (String maxKeyJson : asList("{ \"$maxKey\" : 1 }", "MaxKey", "MaxKey()", "new MaxKey", "new MaxKey()")) { String json = "{ maxKey : " + maxKeyJson + " }"; - bsonReader = new JsonReader(json); - bsonReader.readStartDocument(); - assertEquals("maxKey", bsonReader.readName()); - assertEquals(BsonType.MAX_KEY, bsonReader.getCurrentBsonType()); - bsonReader.readMaxKey(); - bsonReader.readEndDocument(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + bsonReader.readStartDocument(); + assertEquals("maxKey", bsonReader.readName()); + assertEquals(BsonType.MAX_KEY, bsonReader.getCurrentBsonType()); + bsonReader.readMaxKey(); + bsonReader.readEndDocument(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } } @@ -446,555 +536,799 @@ public void testMaxKey() { public void testMinKey() { for (String minKeyJson : asList("{ \"$minKey\" : 1 }", "MinKey", "MinKey()", "new MinKey", "new MinKey()")) { String json = "{ minKey : " + minKeyJson + " }"; - bsonReader = new JsonReader(json); - bsonReader.readStartDocument(); - assertEquals("minKey", bsonReader.readName()); - assertEquals(BsonType.MIN_KEY, bsonReader.getCurrentBsonType()); - bsonReader.readMinKey(); - bsonReader.readEndDocument(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + bsonReader.readStartDocument(); + assertEquals("minKey", bsonReader.readName()); + assertEquals(BsonType.MIN_KEY, bsonReader.getCurrentBsonType()); + bsonReader.readMinKey(); + bsonReader.readEndDocument(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } } @Test public void testNestedArray() { String json = "{ \"a\" : [1, 2] }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); - bsonReader.readStartDocument(); - assertEquals(BsonType.ARRAY, bsonReader.readBsonType()); - assertEquals("a", bsonReader.readName()); - bsonReader.readStartArray(); - assertEquals(1, bsonReader.readInt32()); - assertEquals(2, bsonReader.readInt32()); - bsonReader.readEndArray(); - bsonReader.readEndDocument(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); - + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); + bsonReader.readStartDocument(); + assertEquals(BsonType.ARRAY, bsonReader.readBsonType()); + assertEquals("a", bsonReader.readName()); + bsonReader.readStartArray(); + assertEquals(1, bsonReader.readInt32()); + assertEquals(2, bsonReader.readInt32()); + bsonReader.readEndArray(); + bsonReader.readEndDocument(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testNestedDocument() { String json = "{ \"a\" : { \"b\" : 1, \"c\" : 2 } }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); - bsonReader.readStartDocument(); - assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); - assertEquals("a", bsonReader.readName()); - bsonReader.readStartDocument(); - assertEquals("b", bsonReader.readName()); - assertEquals(1, bsonReader.readInt32()); - assertEquals("c", bsonReader.readName()); - assertEquals(2, bsonReader.readInt32()); - bsonReader.readEndDocument(); - bsonReader.readEndDocument(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); - + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); + bsonReader.readStartDocument(); + assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); + assertEquals("a", bsonReader.readName()); + bsonReader.readStartDocument(); + assertEquals("b", bsonReader.readName()); + assertEquals(1, bsonReader.readInt32()); + assertEquals("c", bsonReader.readName()); + assertEquals(2, bsonReader.readInt32()); + bsonReader.readEndDocument(); + bsonReader.readEndDocument(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testNull() { String json = "null"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.NULL, bsonReader.readBsonType()); - bsonReader.readNull(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); - + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.NULL, bsonReader.readBsonType()); + bsonReader.readNull(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testObjectIdShell() { String json = "ObjectId(\"4d0ce088e447ad08b4721a37\")"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.OBJECT_ID, bsonReader.readBsonType()); - ObjectId objectId = bsonReader.readObjectId(); - assertEquals("4d0ce088e447ad08b4721a37", objectId.toString()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.OBJECT_ID, bsonReader.readBsonType()); + ObjectId objectId = bsonReader.readObjectId(); + assertEquals("4d0ce088e447ad08b4721a37", objectId.toString()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testObjectIdWithNew() { String json = "new ObjectId(\"4d0ce088e447ad08b4721a37\")"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.OBJECT_ID, bsonReader.readBsonType()); - ObjectId objectId = bsonReader.readObjectId(); - assertEquals("4d0ce088e447ad08b4721a37", objectId.toString()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.OBJECT_ID, bsonReader.readBsonType()); + ObjectId objectId = bsonReader.readObjectId(); + assertEquals("4d0ce088e447ad08b4721a37", objectId.toString()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testObjectIdStrict() { String json = "{ \"$oid\" : \"4d0ce088e447ad08b4721a37\" }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.OBJECT_ID, bsonReader.readBsonType()); - ObjectId objectId = bsonReader.readObjectId(); - assertEquals("4d0ce088e447ad08b4721a37", objectId.toString()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.OBJECT_ID, bsonReader.readBsonType()); + ObjectId objectId = bsonReader.readObjectId(); + assertEquals("4d0ce088e447ad08b4721a37", objectId.toString()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testObjectIdTenGen() { String json = "ObjectId(\"4d0ce088e447ad08b4721a37\")"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.OBJECT_ID, bsonReader.readBsonType()); - ObjectId objectId = bsonReader.readObjectId(); - assertEquals("4d0ce088e447ad08b4721a37", objectId.toString()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.OBJECT_ID, bsonReader.readBsonType()); + ObjectId objectId = bsonReader.readObjectId(); + assertEquals("4d0ce088e447ad08b4721a37", objectId.toString()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testRegularExpressionShell() { String json = "/pattern/imxs"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.REGULAR_EXPRESSION, bsonReader.readBsonType()); - BsonRegularExpression regex = bsonReader.readRegularExpression(); - assertEquals("pattern", regex.getPattern()); - assertEquals("imsx", regex.getOptions()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); - + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.REGULAR_EXPRESSION, bsonReader.readBsonType()); + BsonRegularExpression regex = bsonReader.readRegularExpression(); + assertEquals("pattern", regex.getPattern()); + assertEquals("imsx", regex.getOptions()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testRegularExpressionStrict() { String json = "{ \"$regex\" : \"pattern\", \"$options\" : \"imxs\" }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.REGULAR_EXPRESSION, bsonReader.readBsonType()); - BsonRegularExpression regex = bsonReader.readRegularExpression(); - assertEquals("pattern", regex.getPattern()); - assertEquals("imsx", regex.getOptions()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.REGULAR_EXPRESSION, bsonReader.readBsonType()); + BsonRegularExpression regex = bsonReader.readRegularExpression(); + assertEquals("pattern", regex.getPattern()); + assertEquals("imsx", regex.getOptions()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testRegularExpressionCanonical() { String json = "{ \"$regularExpression\" : { \"pattern\" : \"pattern\", \"options\" : \"imxs\" }}"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.REGULAR_EXPRESSION, bsonReader.readBsonType()); - BsonRegularExpression regex = bsonReader.readRegularExpression(); - assertEquals("pattern", regex.getPattern()); - assertEquals("imsx", regex.getOptions()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.REGULAR_EXPRESSION, bsonReader.readBsonType()); + BsonRegularExpression regex = bsonReader.readRegularExpression(); + assertEquals("pattern", regex.getPattern()); + assertEquals("imsx", regex.getOptions()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testRegularExpressionQuery() { String json = "{ \"$regex\" : { \"$regularExpression\" : { \"pattern\" : \"pattern\", \"options\" : \"imxs\" }}}"; - bsonReader = new JsonReader(json); - bsonReader.readStartDocument(); - BsonRegularExpression regex = bsonReader.readRegularExpression("$regex"); - assertEquals("pattern", regex.getPattern()); - assertEquals("imsx", regex.getOptions()); - bsonReader.readEndDocument(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + bsonReader.readStartDocument(); + BsonRegularExpression regex = bsonReader.readRegularExpression("$regex"); + assertEquals("pattern", regex.getPattern()); + assertEquals("imsx", regex.getOptions()); + bsonReader.readEndDocument(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testRegularExpressionQueryShell() { String json = "{ \"$regex\" : /pattern/imxs}"; - bsonReader = new JsonReader(json); - bsonReader.readStartDocument(); - BsonRegularExpression regex = bsonReader.readRegularExpression("$regex"); - assertEquals("pattern", regex.getPattern()); - assertEquals("imsx", regex.getOptions()); - bsonReader.readEndDocument(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + bsonReader.readStartDocument(); + BsonRegularExpression regex = bsonReader.readRegularExpression("$regex"); + assertEquals("pattern", regex.getPattern()); + assertEquals("imsx", regex.getOptions()); + bsonReader.readEndDocument(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); + } + + /** + * Test a $regularExpression extended json with unquoted keys + */ + @Test + public void testRegularExpressionCanonicalWithUnquotedKeys() { + String json = "{$regularExpression: {pattern: \"[a-z]\", options: \"imxs\"}}"; + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.REGULAR_EXPRESSION, bsonReader.readBsonType()); + assertEquals(new BsonRegularExpression("[a-z]", "imxs"), bsonReader.readRegularExpression()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); + } + + /** + * Test a $regex extended json query version with unquoted keys + */ + @Test + public void testRegularExpressionQueryWithUnquotedKeys() { + String json = "{$regex : { $regularExpression : { pattern : \"[a-z]\", options : \"imxs\" }}}"; + testStringAndStream(json, bsonReader -> { + bsonReader.readStartDocument(); + BsonRegularExpression regex = bsonReader.readRegularExpression("$regex"); + assertEquals("[a-z]", regex.getPattern()); + assertEquals("imsx", regex.getOptions()); + bsonReader.readEndDocument(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testString() { - String str = "abc"; - String json = '"' + str + '"'; - bsonReader = new JsonReader(json); - assertEquals(BsonType.STRING, bsonReader.readBsonType()); - assertEquals(str, bsonReader.readString()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); - - str = "\ud806\udc5c"; - json = '"' + str + '"'; - bsonReader = new JsonReader(json); - assertEquals(BsonType.STRING, bsonReader.readBsonType()); - assertEquals(str, bsonReader.readString()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); - - str = "\\ud806\\udc5c"; - json = '"' + str + '"'; - bsonReader = new JsonReader(json); - assertEquals(BsonType.STRING, bsonReader.readBsonType()); - assertEquals("\ud806\udc5c", bsonReader.readString()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); - - str = "꼢𑡜ᳫ鉠鮻罖᧭䆔瘉"; - json = '"' + str + '"'; - bsonReader = new JsonReader(json); - assertEquals(BsonType.STRING, bsonReader.readBsonType()); - assertEquals(str, bsonReader.readString()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + final String str = "abc"; + final String json = '"' + str + '"'; + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.STRING, bsonReader.readBsonType()); + assertEquals(str, bsonReader.readString()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); + + final String str2 = "\ud806\udc5c"; + final String json2 = '"' + str2 + '"'; + testStringAndStream(json2, bsonReader -> { + assertEquals(BsonType.STRING, bsonReader.readBsonType()); + assertEquals(str2, bsonReader.readString()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); + + final String str3 = "\\ud806\\udc5c"; + final String json3 = '"' + str3 + '"'; + testStringAndStream(json3, bsonReader -> { + assertEquals(BsonType.STRING, bsonReader.readBsonType()); + assertEquals("\ud806\udc5c", bsonReader.readString()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); + + final String str4 = "꼢𑡜ᳫ鉠鮻罖᧭䆔瘉"; + final String json4 = '"' + str4 + '"'; + testStringAndStream(json4, bsonReader -> { + assertEquals(BsonType.STRING, bsonReader.readBsonType()); + assertEquals(str4, bsonReader.readString()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testStringEmpty() { String json = "\"\""; - bsonReader = new JsonReader(json); - assertEquals(BsonType.STRING, bsonReader.readBsonType()); - assertEquals("", bsonReader.readString()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.STRING, bsonReader.readBsonType()); + assertEquals("", bsonReader.readString()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testSymbol() { String json = "{ \"$symbol\" : \"symbol\" }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.SYMBOL, bsonReader.readBsonType()); - assertEquals("symbol", bsonReader.readSymbol()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.SYMBOL, bsonReader.readBsonType()); + assertEquals("symbol", bsonReader.readSymbol()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testTimestampStrict() { String json = "{ \"$timestamp\" : { \"t\" : 1234, \"i\" : 1 } }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.TIMESTAMP, bsonReader.readBsonType()); - assertEquals(new BsonTimestamp(1234, 1), bsonReader.readTimestamp()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.TIMESTAMP, bsonReader.readBsonType()); + assertEquals(new BsonTimestamp(1234, 1), bsonReader.readTimestamp()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testTimestampStrictWithOutOfOrderFields() { String json = "{ \"$timestamp\" : { \"i\" : 1, \"t\" : 1234 } }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.TIMESTAMP, bsonReader.readBsonType()); - assertEquals(new BsonTimestamp(1234, 1), bsonReader.readTimestamp()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.TIMESTAMP, bsonReader.readBsonType()); + assertEquals(new BsonTimestamp(1234, 1), bsonReader.readTimestamp()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testTimestampShell() { String json = "Timestamp(1234, 1)"; - bsonReader = new JsonReader(json); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.TIMESTAMP, bsonReader.readBsonType()); + assertEquals(new BsonTimestamp(1234, 1), bsonReader.readTimestamp()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); + } - assertEquals(BsonType.TIMESTAMP, bsonReader.readBsonType()); - assertEquals(new BsonTimestamp(1234, 1), bsonReader.readTimestamp()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + /** + * Test a $timestamp extended json with unquoted keys + */ + @Test + public void testTimestampStrictWithUnquotedKeys() { + String json = "{$timestamp : { t : 1234, i : 1 }}"; + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.TIMESTAMP, bsonReader.readBsonType()); + assertEquals(new BsonTimestamp(1234, 1), bsonReader.readTimestamp()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testUndefined() { String json = "undefined"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.UNDEFINED, bsonReader.readBsonType()); - bsonReader.readUndefined(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.UNDEFINED, bsonReader.readBsonType()); + bsonReader.readUndefined(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testUndefinedExtended() { String json = "{ \"$undefined\" : true }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.UNDEFINED, bsonReader.readBsonType()); - bsonReader.readUndefined(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.UNDEFINED, bsonReader.readBsonType()); + bsonReader.readUndefined(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } - @Test(expected = IllegalStateException.class) + @Test public void testClosedState() { - bsonReader = new JsonReader(""); + AbstractBsonReader bsonReader = new JsonReader(""); bsonReader.close(); - bsonReader.readBinaryData(); + assertThrows(IllegalStateException.class, () -> bsonReader.readBinaryData()); } - @Test(expected = JsonParseException.class) + @Test public void testEndOfFile0() { String json = "{"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); - bsonReader.readStartDocument(); - bsonReader.readBsonType(); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); + bsonReader.readStartDocument(); + bsonReader.readBsonType(); + return null; + }, JsonParseException.class); } - @Test(expected = JsonParseException.class) + @Test public void testEndOfFile1() { String json = "{ test : "; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); - bsonReader.readStartDocument(); - bsonReader.readBsonType(); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DOCUMENT, bsonReader.readBsonType()); + bsonReader.readStartDocument(); + bsonReader.readBsonType(); + return null; + }, JsonParseException.class); } @Test public void testLegacyBinary() { String json = "{ \"$binary\" : \"AQID\", \"$type\" : \"0\" }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.BINARY, bsonReader.readBsonType()); - BsonBinary binary = bsonReader.readBinaryData(); - assertEquals(BsonBinarySubType.BINARY.getValue(), binary.getType()); - assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.BINARY, bsonReader.readBsonType()); + BsonBinary binary = bsonReader.readBinaryData(); + assertEquals(BsonBinarySubType.BINARY.getValue(), binary.getType()); + assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testLegacyBinaryWithNumericType() { String json = "{ \"$binary\" : \"AQID\", \"$type\" : 0 }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.BINARY, bsonReader.readBsonType()); - BsonBinary binary = bsonReader.readBinaryData(); - assertEquals(BsonBinarySubType.BINARY.getValue(), binary.getType()); - assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.BINARY, bsonReader.readBsonType()); + BsonBinary binary = bsonReader.readBinaryData(); + assertEquals(BsonBinarySubType.BINARY.getValue(), binary.getType()); + assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testLegacyUserDefinedBinary() { String json = "{ \"$binary\" : \"AQID\", \"$type\" : \"80\" }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.BINARY, bsonReader.readBsonType()); - BsonBinary binary = bsonReader.readBinaryData(); - assertEquals(BsonBinarySubType.USER_DEFINED.getValue(), binary.getType()); - assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.BINARY, bsonReader.readBsonType()); + BsonBinary binary = bsonReader.readBinaryData(); + assertEquals(BsonBinarySubType.USER_DEFINED.getValue(), binary.getType()); + assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testLegacyUserDefinedBinaryWithKeyOrderReversed() { String json = "{ \"$type\" : \"80\", \"$binary\" : \"AQID\" }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.BINARY, bsonReader.readBsonType()); - BsonBinary binary = bsonReader.readBinaryData(); - assertEquals(BsonBinarySubType.USER_DEFINED.getValue(), binary.getType()); - assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.BINARY, bsonReader.readBsonType()); + BsonBinary binary = bsonReader.readBinaryData(); + assertEquals(BsonBinarySubType.USER_DEFINED.getValue(), binary.getType()); + assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testLegacyUserDefinedBinaryWithNumericType() { String json = "{ \"$binary\" : \"AQID\", \"$type\" : 128 }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.BINARY, bsonReader.readBsonType()); - BsonBinary binary = bsonReader.readBinaryData(); - assertEquals(BsonBinarySubType.USER_DEFINED.getValue(), binary.getType()); - assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.BINARY, bsonReader.readBsonType()); + BsonBinary binary = bsonReader.readBinaryData(); + assertEquals(BsonBinarySubType.USER_DEFINED.getValue(), binary.getType()); + assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testCanonicalExtendedJsonBinary() { String json = "{ \"$binary\" : { \"base64\" : \"AQID\", \"subType\" : \"80\" } }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.BINARY, bsonReader.readBsonType()); - BsonBinary binary = bsonReader.readBinaryData(); - assertEquals(BsonBinarySubType.USER_DEFINED.getValue(), binary.getType()); - assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.BINARY, bsonReader.readBsonType()); + BsonBinary binary = bsonReader.readBinaryData(); + assertEquals(BsonBinarySubType.USER_DEFINED.getValue(), binary.getType()); + assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testCanonicalExtendedJsonBinaryWithKeysReversed() { String json = "{ \"$binary\" : { \"subType\" : \"80\", \"base64\" : \"AQID\" } }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.BINARY, bsonReader.readBsonType()); - BsonBinary binary = bsonReader.readBinaryData(); - assertEquals(BsonBinarySubType.USER_DEFINED.getValue(), binary.getType()); - assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.BINARY, bsonReader.readBsonType()); + BsonBinary binary = bsonReader.readBinaryData(); + assertEquals(BsonBinarySubType.USER_DEFINED.getValue(), binary.getType()); + assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } - @Test(expected = JsonParseException.class) + @Test public void testCanonicalExtendedJsonBinaryWithIncorrectFirstKey() { String json = "{ \"$binary\" : { \"badKey\" : \"80\", \"base64\" : \"AQID\" } }"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.BINARY, bsonReader.readBsonType()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.BINARY, bsonReader.readBsonType()); + return null; + }, JsonParseException.class); + } + + @Test + public void testUuid() { + String json = "{ \"$uuid\" : \"b5f21e0c-2a0d-42d6-ad03-d827008d8ab6\"}}"; + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.BINARY, bsonReader.readBsonType()); + BsonBinary binary = bsonReader.readBinaryData(); + assertEquals(BsonBinarySubType.UUID_STANDARD.getValue(), binary.getType()); + assertArrayEquals(new byte[]{-75, -14, 30, 12, 42, 13, 66, -42, -83, 3, -40, 39, 0, -115, -118, -74}, binary.getData()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); + } + + // testing that JsonReader uses internal UuidStringValidator, as UUID.fromString accepts this UUID + @Test + public void testInvalidUuid() { + // first hyphen out of place + String json = "{ \"$uuid\" : \"73ff-d26444b-34c6-990e8e-7d1dfc035d4\"}}"; + assertThrows(JsonParseException.class, () -> + testStringAndStream(json, bsonReader -> { + bsonReader.readBinaryData(); + return null; + })); + } + + @Test + public void testUuidConstructor() { + String json = "UUID(\"b5f21e0c-2a0d-42d6-ad03-d827008d8ab6\")"; + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.BINARY, bsonReader.readBsonType()); + BsonBinary binary = bsonReader.readBinaryData(); + assertEquals(BsonBinarySubType.UUID_STANDARD.getValue(), binary.getType()); + assertArrayEquals(new byte[]{-75, -14, 30, 12, 42, 13, 66, -42, -83, 3, -40, 39, 0, -115, -118, -74}, binary.getData()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testInfinity() { String json = "{ \"value\" : Infinity }"; - bsonReader = new JsonReader(json); - bsonReader.readStartDocument(); - assertEquals(BsonType.DOUBLE, bsonReader.readBsonType()); - bsonReader.readName(); - assertEquals(Double.POSITIVE_INFINITY, bsonReader.readDouble(), 0.0001); + testStringAndStream(json, bsonReader -> { + bsonReader.readStartDocument(); + assertEquals(BsonType.DOUBLE, bsonReader.readBsonType()); + bsonReader.readName(); + assertEquals(Double.POSITIVE_INFINITY, bsonReader.readDouble(), 0.0001); + return null; + }); } @Test public void testNaN() { String json = "{ \"value\" : NaN }"; - bsonReader = new JsonReader(json); - bsonReader.readStartDocument(); - assertEquals(BsonType.DOUBLE, bsonReader.readBsonType()); - bsonReader.readName(); - assertEquals(Double.NaN, bsonReader.readDouble(), 0.0001); + testStringAndStream(json, bsonReader -> { + bsonReader.readStartDocument(); + assertEquals(BsonType.DOUBLE, bsonReader.readBsonType()); + bsonReader.readName(); + assertEquals(Double.NaN, bsonReader.readDouble(), 0.0001); + return null; + }); } @Test public void testBinData() { String json = "{ \"a\" : BinData(3, AQID) }"; - bsonReader = new JsonReader(json); - bsonReader.readStartDocument(); - assertEquals(BsonType.BINARY, bsonReader.readBsonType()); - BsonBinary binary = bsonReader.readBinaryData(); - assertEquals(3, binary.getType()); - assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); - bsonReader.readEndDocument(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + bsonReader.readStartDocument(); + assertEquals(BsonType.BINARY, bsonReader.readBsonType()); + BsonBinary binary = bsonReader.readBinaryData(); + assertEquals(3, binary.getType()); + assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); + bsonReader.readEndDocument(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testBinDataUserDefined() { String json = "{ \"a\" : BinData(128, AQID) }"; - bsonReader = new JsonReader(json); - bsonReader.readStartDocument(); - assertEquals(BsonType.BINARY, bsonReader.readBsonType()); - BsonBinary binary = bsonReader.readBinaryData(); - assertEquals(BsonBinarySubType.USER_DEFINED.getValue(), binary.getType()); - assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); - bsonReader.readEndDocument(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + bsonReader.readStartDocument(); + assertEquals(BsonType.BINARY, bsonReader.readBsonType()); + BsonBinary binary = bsonReader.readBinaryData(); + assertEquals(BsonBinarySubType.USER_DEFINED.getValue(), binary.getType()); + assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); + bsonReader.readEndDocument(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testBinDataWithNew() { String json = "{ \"a\" : new BinData(3, AQID) }"; - bsonReader = new JsonReader(json); - bsonReader.readStartDocument(); - assertEquals(BsonType.BINARY, bsonReader.readBsonType()); - BsonBinary binary = bsonReader.readBinaryData(); - assertEquals(3, binary.getType()); - assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); - bsonReader.readEndDocument(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + bsonReader.readStartDocument(); + assertEquals(BsonType.BINARY, bsonReader.readBsonType()); + BsonBinary binary = bsonReader.readBinaryData(); + assertEquals(3, binary.getType()); + assertArrayEquals(new byte[]{1, 2, 3}, binary.getData()); + bsonReader.readEndDocument(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testBinDataQuoted() { String json = "{ \"a\" : BinData(3, \"AQIDBA==\") }"; - bsonReader = new JsonReader(json); - bsonReader.readStartDocument(); - assertEquals(BsonType.BINARY, bsonReader.readBsonType()); - BsonBinary binary = bsonReader.readBinaryData(); - assertEquals(3, binary.getType()); - assertArrayEquals(new byte[]{1, 2, 3, 4}, binary.getData()); - bsonReader.readEndDocument(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + bsonReader.readStartDocument(); + assertEquals(BsonType.BINARY, bsonReader.readBsonType()); + BsonBinary binary = bsonReader.readBinaryData(); + assertEquals(3, binary.getType()); + assertArrayEquals(new byte[]{1, 2, 3, 4}, binary.getData()); + bsonReader.readEndDocument(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDateWithNumbers() { String json = "new Date(1988, 06, 13 , 22 , 1)"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); - assertEquals(584834460000L, bsonReader.readDateTime()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); + assertEquals(584834460000L, bsonReader.readDateTime()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDateTimeConstructorWithNew() { String json = "new Date(\"Sat Jul 13 2013 11:10:05 UTC\")"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); - assertEquals(1373713805000L, bsonReader.readDateTime()); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); + assertEquals(1373713805000L, bsonReader.readDateTime()); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testEmptyDateTimeConstructorWithNew() { long currentTime = new Date().getTime(); String json = "new Date()"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); - assertTrue(bsonReader.readDateTime() >= currentTime); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); + assertTrue(bsonReader.readDateTime() >= currentTime); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDateTimeWithOutNew() { long currentTime = currentTimeWithoutMillis(); String json = "Date()"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.STRING, bsonReader.readBsonType()); - assertTrue(dateStringToTime(bsonReader.readString()) >= currentTime); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.STRING, bsonReader.readBsonType()); + assertTrue(dateStringToTime(bsonReader.readString()) >= currentTime); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDateTimeWithOutNewContainingJunk() { long currentTime = currentTimeWithoutMillis(); String json = "Date({ok: 1}, 1234)"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.STRING, bsonReader.readBsonType()); - assertTrue(dateStringToTime(bsonReader.readString()) >= currentTime); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.STRING, bsonReader.readBsonType()); + assertTrue(dateStringToTime(bsonReader.readString()) >= currentTime); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testEmptyISODateTimeConstructorWithNew() { long currentTime = new Date().getTime(); String json = "new ISODate()"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); - assertTrue(bsonReader.readDateTime() >= currentTime); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); + assertTrue(bsonReader.readDateTime() >= currentTime); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testEmptyISODateTimeConstructor() { long currentTime = new Date().getTime(); String json = "ISODate()"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); - assertTrue(bsonReader.readDateTime() >= currentTime); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DATE_TIME, bsonReader.readBsonType()); + assertTrue(bsonReader.readDateTime() >= currentTime); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testRegExp() { String json = "RegExp(\"abc\",\"im\")"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.REGULAR_EXPRESSION, bsonReader.readBsonType()); - BsonRegularExpression regularExpression = bsonReader.readRegularExpression(); - assertEquals("abc", regularExpression.getPattern()); - assertEquals("im", regularExpression.getOptions()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.REGULAR_EXPRESSION, bsonReader.readBsonType()); + BsonRegularExpression regularExpression = bsonReader.readRegularExpression(); + assertEquals("abc", regularExpression.getPattern()); + assertEquals("im", regularExpression.getOptions()); + return null; + }); } @Test public void testRegExpWithNew() { String json = "new RegExp(\"abc\",\"im\")"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.REGULAR_EXPRESSION, bsonReader.readBsonType()); - BsonRegularExpression regularExpression = bsonReader.readRegularExpression(); - assertEquals("abc", regularExpression.getPattern()); - assertEquals("im", regularExpression.getOptions()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.REGULAR_EXPRESSION, bsonReader.readBsonType()); + BsonRegularExpression regularExpression = bsonReader.readRegularExpression(); + assertEquals("abc", regularExpression.getPattern()); + assertEquals("im", regularExpression.getOptions()); + return null; + }); } @Test public void testSkip() { String json = "{ \"a\" : 2 }"; - bsonReader = new JsonReader(json); - bsonReader.readStartDocument(); - bsonReader.readBsonType(); - bsonReader.skipName(); - bsonReader.skipValue(); - assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); - bsonReader.readEndDocument(); - assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + testStringAndStream(json, bsonReader -> { + bsonReader.readStartDocument(); + bsonReader.readBsonType(); + bsonReader.skipName(); + bsonReader.skipValue(); + assertEquals(BsonType.END_OF_DOCUMENT, bsonReader.readBsonType()); + bsonReader.readEndDocument(); + assertEquals(AbstractBsonReader.State.DONE, bsonReader.getState()); + return null; + }); } @Test public void testDBPointer() { String json = "DBPointer(\"b\",\"5209296cd6c4e38cf96fffdc\")"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DB_POINTER, bsonReader.readBsonType()); - BsonDbPointer dbPointer = bsonReader.readDBPointer(); - assertEquals("b", dbPointer.getNamespace()); - assertEquals(new ObjectId("5209296cd6c4e38cf96fffdc"), dbPointer.getId()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DB_POINTER, bsonReader.readBsonType()); + BsonDbPointer dbPointer = bsonReader.readDBPointer(); + assertEquals("b", dbPointer.getNamespace()); + assertEquals(new ObjectId("5209296cd6c4e38cf96fffdc"), dbPointer.getId()); + return null; + }); } @Test public void testDBPointerWithNew() { String json = "new DBPointer(\"b\",\"5209296cd6c4e38cf96fffdc\")"; - bsonReader = new JsonReader(json); - assertEquals(BsonType.DB_POINTER, bsonReader.readBsonType()); - BsonDbPointer dbPointer = bsonReader.readDBPointer(); - assertEquals("b", dbPointer.getNamespace()); - assertEquals(new ObjectId("5209296cd6c4e38cf96fffdc"), dbPointer.getId()); + testStringAndStream(json, bsonReader -> { + assertEquals(BsonType.DB_POINTER, bsonReader.readBsonType()); + BsonDbPointer dbPointer = bsonReader.readDBPointer(); + assertEquals("b", dbPointer.getNamespace()); + assertEquals(new ObjectId("5209296cd6c4e38cf96fffdc"), dbPointer.getId()); + return null; + }); + } + + @Test + public void testMultipleMarks() { + String json = "{a : { b : 1 }}"; + testStringAndStream(json, bsonReader -> { + bsonReader.readStartDocument(); + BsonReaderMark markOne = bsonReader.getMark(); + bsonReader.readName("a"); + bsonReader.readStartDocument(); + BsonReaderMark markTwo = bsonReader.getMark(); + bsonReader.readName("b"); + bsonReader.readInt32(); + bsonReader.readEndDocument(); + markTwo.reset(); + bsonReader.readName("b"); + markOne.reset(); + bsonReader.readName("a"); + return null; + }); + } + + @Test + public void testTwoDocuments() { + Reader reader = new BufferedReader(new InputStreamReader(new ByteArrayInputStream("{a : 1}{b : 1}".getBytes()))); + + JsonReader jsonReader = new JsonReader(reader); + jsonReader.readStartDocument(); + jsonReader.readName("a"); + jsonReader.readInt32(); + jsonReader.readEndDocument(); + + jsonReader = new JsonReader(reader); + jsonReader.readStartDocument(); + jsonReader.readName("b"); + jsonReader.readInt32(); + jsonReader.readEndDocument(); + } + + private void testStringAndStream(final String json, final Function testFunc, + final Class exClass) { + try { + testFunc.apply(new JsonReader(json)); + } catch (Exception e) { + if (exClass == null) { + throw e; + } + assertEquals(exClass, e.getClass()); + } + try { + testFunc.apply(new JsonReader(new InputStreamReader(new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)), + StandardCharsets.UTF_8))); + } catch (Exception e) { + if (exClass == null) { + throw e; + } + assertEquals(exClass, e.getClass()); + } + } + + private void testStringAndStream(final String json, final Function testFunc) { + testStringAndStream(json, testFunc, null); } private long dateStringToTime(final String date) { diff --git a/bson/src/test/unit/org/bson/json/JsonScannerTest.java b/bson/src/test/unit/org/bson/json/JsonScannerTest.java index 0b48c5a2c99..cf0647b08de 100644 --- a/bson/src/test/unit/org/bson/json/JsonScannerTest.java +++ b/bson/src/test/unit/org/bson/json/JsonScannerTest.java @@ -17,16 +17,17 @@ package org.bson.json; import org.bson.BsonRegularExpression; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; public class JsonScannerTest { @Test public void testEndOfFile() { String json = "\t "; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.END_OF_FILE, token.getType()); @@ -36,7 +37,7 @@ public void testEndOfFile() { @Test public void testBeginObject() { String json = "\t {x"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.BEGIN_OBJECT, token.getType()); @@ -47,7 +48,7 @@ public void testBeginObject() { @Test public void testEndObject() { String json = "\t }x"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.END_OBJECT, token.getType()); @@ -58,7 +59,7 @@ public void testEndObject() { @Test public void testBeginArray() { String json = "\t [x"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.BEGIN_ARRAY, token.getType()); @@ -69,7 +70,7 @@ public void testBeginArray() { @Test public void testEndArray() { String json = "\t ]x"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.END_ARRAY, token.getType()); @@ -80,7 +81,7 @@ public void testEndArray() { @Test public void testParentheses() { String json = "\t (jj)x"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.LEFT_PAREN, token.getType()); @@ -95,7 +96,7 @@ public void testParentheses() { @Test public void testNameSeparator() { String json = "\t :x"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.COLON, token.getType()); @@ -106,7 +107,7 @@ public void testNameSeparator() { @Test public void testValueSeparator() { String json = "\t ,x"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.COMMA, token.getType()); @@ -117,7 +118,7 @@ public void testValueSeparator() { @Test public void testEmptyString() { String json = "\t \"\"x"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.STRING, token.getType()); @@ -128,7 +129,7 @@ public void testEmptyString() { @Test public void test1CharacterString() { String json = "\t \"1\"x"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.STRING, token.getType()); @@ -139,7 +140,7 @@ public void test1CharacterString() { @Test public void test2CharacterString() { String json = "\t \"12\"x"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.STRING, token.getType()); @@ -150,7 +151,7 @@ public void test2CharacterString() { @Test public void test3CharacterString() { String json = "\t \"123\"x"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.STRING, token.getType()); @@ -161,7 +162,7 @@ public void test3CharacterString() { @Test public void testEscapeSequences() { String json = "\t \"x\\\"\\\\\\/\\b\\f\\n\\r\\t\\u0030y\"x"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.STRING, token.getType()); @@ -173,7 +174,7 @@ public void testEscapeSequences() { @Test public void testTrue() { String json = "\t true,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.UNQUOTED_STRING, token.getType()); @@ -184,7 +185,7 @@ public void testTrue() { @Test public void testMinusInfinity() { String json = "\t -Infinity]"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.DOUBLE, token.getType()); @@ -195,7 +196,7 @@ public void testMinusInfinity() { @Test public void testFalse() { String json = "\t false,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.UNQUOTED_STRING, token.getType()); @@ -206,7 +207,7 @@ public void testFalse() { @Test public void testNull() { String json = "\t null,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.UNQUOTED_STRING, token.getType()); @@ -217,7 +218,7 @@ public void testNull() { @Test public void testUndefined() { String json = "\t undefined,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.UNQUOTED_STRING, token.getType()); @@ -228,7 +229,7 @@ public void testUndefined() { @Test public void testUnquotedStringWithSeparator() { String json = "\t name123:1"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.UNQUOTED_STRING, token.getType()); @@ -239,7 +240,7 @@ public void testUnquotedStringWithSeparator() { @Test public void testUnquotedString() { String json = "name123"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.UNQUOTED_STRING, token.getType()); @@ -250,7 +251,7 @@ public void testUnquotedString() { @Test public void testZero() { String json = "\t 0,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.INT32, token.getType()); @@ -261,7 +262,7 @@ public void testZero() { @Test public void testMinusZero() { String json = "\t -0,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.INT32, token.getType()); @@ -272,7 +273,7 @@ public void testMinusZero() { @Test public void testOne() { String json = "\t 1,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.INT32, token.getType()); @@ -283,7 +284,7 @@ public void testOne() { @Test public void testMinusOne() { String json = "\t -1,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.INT32, token.getType()); @@ -294,7 +295,7 @@ public void testMinusOne() { @Test public void testTwelve() { String json = "\t 12,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.INT32, token.getType()); @@ -305,7 +306,7 @@ public void testTwelve() { @Test public void testMinusTwelve() { String json = "\t -12,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.INT32, token.getType()); @@ -316,7 +317,7 @@ public void testMinusTwelve() { @Test public void testZeroPointZero() { String json = "\t 0.0,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.DOUBLE, token.getType()); @@ -327,7 +328,7 @@ public void testZeroPointZero() { @Test public void testMinusZeroPointZero() { String json = "\t -0.0,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.DOUBLE, token.getType()); @@ -338,7 +339,7 @@ public void testMinusZeroPointZero() { @Test public void testZeroExponentOne() { String json = "\t 0e1,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.DOUBLE, token.getType()); @@ -349,7 +350,7 @@ public void testZeroExponentOne() { @Test public void testMinusZeroExponentOne() { String json = "\t -0e1,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.DOUBLE, token.getType()); @@ -360,7 +361,7 @@ public void testMinusZeroExponentOne() { @Test public void testZeroExponentMinusOne() { String json = "\t 0e-1,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.DOUBLE, token.getType()); @@ -371,7 +372,7 @@ public void testZeroExponentMinusOne() { @Test public void testMinusZeroExponentMinusOne() { String json = "\t -0e-1,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.DOUBLE, token.getType()); @@ -382,7 +383,7 @@ public void testMinusZeroExponentMinusOne() { @Test public void testOnePointTwo() { String json = "\t 1.2,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.DOUBLE, token.getType()); @@ -393,7 +394,7 @@ public void testOnePointTwo() { @Test public void testMinusOnePointTwo() { String json = "\t -1.2,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.DOUBLE, token.getType()); @@ -404,7 +405,7 @@ public void testMinusOnePointTwo() { @Test public void testOneExponentTwelve() { String json = "\t 1e12,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.DOUBLE, token.getType()); @@ -415,7 +416,7 @@ public void testOneExponentTwelve() { @Test public void testMinusZeroExponentTwelve() { String json = "\t -1e12,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.DOUBLE, token.getType()); @@ -426,7 +427,7 @@ public void testMinusZeroExponentTwelve() { @Test public void testOneExponentMinuesTwelve() { String json = "\t 1e-12,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.DOUBLE, token.getType()); @@ -437,7 +438,7 @@ public void testOneExponentMinuesTwelve() { @Test public void testMinusZeroExponentMinusTwelve() { String json = "\t -1e-12,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.DOUBLE, token.getType()); @@ -448,7 +449,7 @@ public void testMinusZeroExponentMinusTwelve() { @Test public void testRegularExpressionEmpty() { String json = "\t //,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.REGULAR_EXPRESSION, token.getType()); @@ -464,7 +465,7 @@ public void testRegularExpressionEmpty() { public void testRegularExpressionPattern() { String json = "\t /pattern/,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.REGULAR_EXPRESSION, token.getType()); @@ -475,7 +476,7 @@ public void testRegularExpressionPattern() { @Test public void testRegularExpressionPatternAndOptions() { String json = "\t /pattern/im,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.REGULAR_EXPRESSION, token.getType()); @@ -489,7 +490,7 @@ public void testRegularExpressionPatternAndOptions() { @Test public void testRegularExpressionPatternAndEscapeSequence() { String json = "\t /patte\\.n/,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); JsonToken token = scanner.nextToken(); assertEquals(JsonTokenType.REGULAR_EXPRESSION, token.getType()); @@ -497,41 +498,41 @@ public void testRegularExpressionPatternAndEscapeSequence() { assertEquals(',', buffer.read()); } - @Test(expected = JsonParseException.class) + @Test public void testInvalidRegularExpression() { String json = "\t /pattern/nsk,"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); - scanner.nextToken(); + assertThrows(JsonParseException.class, () -> scanner.nextToken()); } - @Test(expected = JsonParseException.class) + @Test public void testInvalidRegularExpressionNoEnd() { String json = "/b"; - JsonBuffer buffer = new JsonBuffer(json); + JsonBuffer buffer = new JsonStringBuffer(json); JsonScanner scanner = new JsonScanner(buffer); - scanner.nextToken(); + assertThrows(JsonParseException.class, () ->scanner.nextToken()); } - @Test(expected = JsonParseException.class) + @Test public void testInvalidInput() { String json = "\t &&"; JsonScanner scanner = new JsonScanner(json); - scanner.nextToken(); + assertThrows(JsonParseException.class, () -> scanner.nextToken()); } - @Test(expected = JsonParseException.class) + @Test public void testInvalidNumber() { String json = "\t 123a]"; JsonScanner scanner = new JsonScanner(json); - scanner.nextToken(); + assertThrows(JsonParseException.class, () -> scanner.nextToken()); } - @Test(expected = JsonParseException.class) + @Test public void testInvalidInfinity() { String json = "\t -Infinnity]"; JsonScanner scanner = new JsonScanner(json); - scanner.nextToken(); + assertThrows(JsonParseException.class, () -> scanner.nextToken()); } } diff --git a/bson/src/test/unit/org/bson/json/JsonStreamBufferTest.java b/bson/src/test/unit/org/bson/json/JsonStreamBufferTest.java new file mode 100644 index 00000000000..c8bdfb42449 --- /dev/null +++ b/bson/src/test/unit/org/bson/json/JsonStreamBufferTest.java @@ -0,0 +1,107 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.json; + +import org.junit.jupiter.api.Test; + +import java.io.ByteArrayInputStream; +import java.io.InputStreamReader; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +public class JsonStreamBufferTest { + + @Test + public void testRead() { + JsonStreamBuffer buffer = new JsonStreamBuffer(new InputStreamReader(new ByteArrayInputStream("ABC".getBytes()))); + assertEquals('A', buffer.read()); + assertEquals('B', buffer.read()); + assertEquals('C', buffer.read()); + assertEquals(-1, buffer.read()); + } + + @Test + public void testUnRead() { + JsonStreamBuffer buffer = new JsonStreamBuffer(new InputStreamReader(new ByteArrayInputStream("A".getBytes()))); + buffer.unread(buffer.read()); + assertEquals('A', buffer.read()); + assertEquals(-1, buffer.read()); + } + + @Test + public void testPosition() { + JsonStreamBuffer buffer = new JsonStreamBuffer(new InputStreamReader(new ByteArrayInputStream("ABC".getBytes()))); + + buffer.read(); + buffer.read(); + assertEquals(2, buffer.getPosition()); + } + + @Test + public void testEOFCheck() { + JsonStreamBuffer buffer = new JsonStreamBuffer(new InputStreamReader(new ByteArrayInputStream("".getBytes()))); + + buffer.read(); + assertThrows(JsonParseException.class, () -> buffer.read()); + } + + @Test + public void testMarkAndReset() { + JsonStreamBuffer buffer = + new JsonStreamBuffer(new InputStreamReader(new ByteArrayInputStream("ABCDEFGHIJKLMNOPQRSTUPWXYZ".getBytes())), 4); + + int pos = buffer.mark(); + assertEquals(0, pos); + assertEquals('A', buffer.read()); + + buffer.reset(pos); + assertEquals('A', buffer.read()); + + pos = buffer.mark(); + buffer.discard(pos); + assertEquals('B', buffer.read()); + + pos = buffer.mark(); + assertEquals(2, pos); + + buffer.read(); + buffer.mark(); + + buffer.read(); + buffer.mark(); + + buffer.reset(pos + 1); + assertEquals(pos + 1, buffer.getPosition()); + assertEquals('D', buffer.read()); + + pos = buffer.mark(); + buffer.read(); + buffer.read(); + buffer.read(); + buffer.read(); + buffer.read(); + + buffer.reset(pos); + assertEquals('E', buffer.read()); + assertEquals('F', buffer.read()); + assertEquals('G', buffer.read()); + assertEquals('H', buffer.read()); + assertEquals('I', buffer.read()); + assertEquals('J', buffer.read()); + } +} diff --git a/bson/src/test/unit/org/bson/json/JsonStringBufferTest.java b/bson/src/test/unit/org/bson/json/JsonStringBufferTest.java new file mode 100644 index 00000000000..058a27cd4d0 --- /dev/null +++ b/bson/src/test/unit/org/bson/json/JsonStringBufferTest.java @@ -0,0 +1,59 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.json; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +public class JsonStringBufferTest { + + @Test + public void testRead() { + JsonBuffer buffer = new JsonStringBuffer("ABC"); + assertEquals('A', buffer.read()); + assertEquals('B', buffer.read()); + assertEquals('C', buffer.read()); + assertEquals(-1, buffer.read()); + } + + @Test + public void testUnRead() { + JsonStringBuffer buffer = new JsonStringBuffer("A"); + buffer.unread(buffer.read()); + assertEquals('A', buffer.read()); + assertEquals(-1, buffer.read()); + } + + @Test + public void testPosition() { + JsonStringBuffer buffer = new JsonStringBuffer("ABC"); + + buffer.read(); + buffer.read(); + assertEquals(2, buffer.getPosition()); + } + + @Test + public void testEOFCheck() { + JsonStringBuffer buffer = new JsonStringBuffer(""); + + buffer.read(); + assertThrows(JsonParseException.class, () -> buffer.read()); + } +} diff --git a/bson/src/test/unit/org/bson/json/JsonWriterSettingsSpecification.groovy b/bson/src/test/unit/org/bson/json/JsonWriterSettingsSpecification.groovy index f09c134e902..b2568b0bae6 100644 --- a/bson/src/test/unit/org/bson/json/JsonWriterSettingsSpecification.groovy +++ b/bson/src/test/unit/org/bson/json/JsonWriterSettingsSpecification.groovy @@ -22,14 +22,7 @@ class JsonWriterSettingsSpecification extends Specification { def 'test defaults'() { when: - def settings = new JsonWriterSettings(); - - then: - !settings.isIndent() - settings.getOutputMode() == JsonMode.STRICT - - when: - settings = JsonWriterSettings.builder().build(); + def settings = JsonWriterSettings.builder().build() then: !settings.isIndent() @@ -75,52 +68,7 @@ class JsonWriterSettingsSpecification extends Specification { settings.getMaxLength() == 100 } - def 'test constructors'() { - when: - def settings = new JsonWriterSettings() - - then: - !settings.isIndent() - settings.getOutputMode() == JsonMode.STRICT - settings.getMaxLength() == 0 - - when: - settings = new JsonWriterSettings(JsonMode.SHELL) - - then: - settings.getOutputMode() == JsonMode.SHELL - - when: - settings = new JsonWriterSettings(true) - - then: - settings.isIndent() - - when: - settings = new JsonWriterSettings(JsonMode.SHELL, true) - - then: - settings.getOutputMode() == JsonMode.SHELL - settings.isIndent() - - when: - settings = new JsonWriterSettings(JsonMode.SHELL, '\t') - - then: - settings.getOutputMode() == JsonMode.SHELL - settings.isIndent() - settings.getIndentCharacters() == '\t' - - when: - settings = new JsonWriterSettings(JsonMode.SHELL, '\t', '\r') - - then: - settings.getOutputMode() == JsonMode.SHELL - settings.isIndent() - settings.getIndentCharacters() == '\t' - settings.getNewLineCharacters() == '\r' - } - + @SuppressWarnings('deprecation') def 'should use legacy extended json converters for strict mode'() { when: def settings = JsonWriterSettings.builder().outputMode(JsonMode.STRICT).build() diff --git a/bson/src/test/unit/org/bson/json/JsonWriterSpecification.groovy b/bson/src/test/unit/org/bson/json/JsonWriterSpecification.groovy index c1dafb2306a..8cb8ecbea4b 100644 --- a/bson/src/test/unit/org/bson/json/JsonWriterSpecification.groovy +++ b/bson/src/test/unit/org/bson/json/JsonWriterSpecification.groovy @@ -29,8 +29,8 @@ import static org.bson.BsonHelper.documentWithValuesOfEveryType class JsonWriterSpecification extends Specification { - def stringWriter = new StringWriter(); - def writer = new JsonWriter(stringWriter) + def stringWriter = new StringWriter() + def writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.RELAXED).build()) def jsonWithValuesOfEveryType = documentWithValuesOfEveryType().toJson(JsonWriterSettings.builder().build()) def 'should pipe all types'() { diff --git a/bson/src/test/unit/org/bson/json/JsonWriterTest.java b/bson/src/test/unit/org/bson/json/JsonWriterTest.java index 5a19d6ce52c..00777a3dfec 100644 --- a/bson/src/test/unit/org/bson/json/JsonWriterTest.java +++ b/bson/src/test/unit/org/bson/json/JsonWriterTest.java @@ -23,22 +23,22 @@ import org.bson.BsonTimestamp; import org.bson.types.Decimal128; import org.bson.types.ObjectId; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.StringWriter; import java.util.Date; import java.util.List; import static java.util.Arrays.asList; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; -@SuppressWarnings("unchecked") public class JsonWriterTest { private StringWriter stringWriter; private JsonWriter writer; - @Before + @BeforeEach public void before() { stringWriter = new StringWriter(); writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().build()); @@ -54,130 +54,147 @@ private static class TestData { } } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowExceptionForBooleanWhenWritingBeforeStartingDocument() { - writer.writeBoolean("b1", true); + assertThrows(BsonInvalidOperationException.class, () -> + writer.writeBoolean("b1", true)); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowExceptionForNameWhenWritingBeforeStartingDocument() { - writer.writeName("name"); + assertThrows(BsonInvalidOperationException.class, () -> + writer.writeName("name")); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowExceptionForStringWhenStateIsValue() { writer.writeStartDocument(); - writer.writeString("SomeString"); + assertThrows(BsonInvalidOperationException.class, () -> + writer.writeString("SomeString")); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowExceptionWhenEndingAnArrayWhenStateIsValue() { writer.writeStartDocument(); - writer.writeEndArray(); + assertThrows(BsonInvalidOperationException.class, () -> + writer.writeEndArray()); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowExceptionWhenWritingASecondName() { writer.writeStartDocument(); writer.writeName("f1"); - writer.writeName("i2"); + assertThrows(BsonInvalidOperationException.class, () -> + writer.writeName("i2")); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowExceptionWhenEndingADocumentBeforeValueIsWritten() { writer.writeStartDocument(); writer.writeName("f1"); - writer.writeEndDocument(); + assertThrows(BsonInvalidOperationException.class, () -> + writer.writeEndDocument()); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowAnExceptionWhenTryingToWriteASecondValue() { + writer.writeStartDocument(); + writer.writeName("f1"); writer.writeDouble(100); - writer.writeString("i2"); + assertThrows(BsonInvalidOperationException.class, () -> + writer.writeString("i2")); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowAnExceptionWhenTryingToWriteJavaScript() { + writer.writeStartDocument(); + writer.writeName("f1"); writer.writeDouble(100); - writer.writeJavaScript("var i"); + assertThrows(BsonInvalidOperationException.class, () -> + writer.writeJavaScript("var i")); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowAnExceptionWhenWritingANameInAnArray() { writer.writeStartDocument(); writer.writeStartArray("f2"); - writer.writeName("i3"); + assertThrows(BsonInvalidOperationException.class, () -> + writer.writeName("i3")); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowAnExceptionWhenEndingDocumentInTheMiddleOfWritingAnArray() { writer.writeStartDocument(); writer.writeStartArray("f2"); - writer.writeEndDocument(); + assertThrows(BsonInvalidOperationException.class, () -> + writer.writeEndDocument()); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowAnExceptionWhenEndingAnArrayInASubDocument() { writer.writeStartDocument(); writer.writeStartArray("f2"); writer.writeStartDocument(); - writer.writeEndArray(); + assertThrows(BsonInvalidOperationException.class, () -> + writer.writeEndArray()); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowAnExceptionWhenWritingANameInAnArrayEvenWhenSubDocumentExistsInArray() { writer.writeStartDocument(); writer.writeStartArray("f2"); writer.writeStartDocument(); writer.writeEndDocument(); - writer.writeName("i3"); + assertThrows(BsonInvalidOperationException.class, () -> + writer.writeName("i3")); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowAnExceptionWhenAttemptingToEndAnArrayThatWasNotStarted() { writer.writeStartDocument(); writer.writeStartArray("f2"); writer.writeEndArray(); - writer.writeEndArray(); + assertThrows(BsonInvalidOperationException.class, () -> + writer.writeEndArray()); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowAnErrorIfTryingToWriteNameIntoAJavascriptScope() { writer.writeStartDocument(); writer.writeJavaScriptWithScope("js1", "var i = 1"); - - writer.writeName("b1"); + assertThrows(BsonInvalidOperationException.class, () -> + writer.writeName("b1")); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowAnErrorIfTryingToWriteValueIntoAJavascriptScope() { writer.writeStartDocument(); writer.writeJavaScriptWithScope("js1", "var i = 1"); - - writer.writeBinaryData(new BsonBinary(new byte[]{0, 0, 1, 0})); + assertThrows(BsonInvalidOperationException.class, () -> + writer.writeBinaryData(new BsonBinary(new byte[]{0, 0, 1, 0}))); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowAnErrorIfTryingToWriteArrayIntoAJavascriptScope() { writer.writeStartDocument(); writer.writeJavaScriptWithScope("js1", "var i = 1"); - - writer.writeStartArray(); + assertThrows(BsonInvalidOperationException.class, () -> + writer.writeStartArray()); } - @Test(expected = BsonInvalidOperationException.class) + @Test public void shouldThrowAnErrorIfTryingToWriteEndDocumentIntoAJavascriptScope() { writer.writeStartDocument(); writer.writeJavaScriptWithScope("js1", "var i = 1"); - - writer.writeEndDocument(); + assertThrows(BsonInvalidOperationException.class, () -> + writer.writeEndDocument()); } @Test public void testEmptyDocument() { writer.writeStartDocument(); writer.writeEndDocument(); - String expected = "{ }"; + String expected = "{}"; assertEquals(expected, stringWriter.toString()); } @@ -187,7 +204,7 @@ public void testSingleElementDocument() { writer.writeName("s"); writer.writeString("str"); writer.writeEndDocument(); - String expected = "{ \"s\" : \"str\" }"; + String expected = "{\"s\": \"str\"}"; assertEquals(expected, stringWriter.toString()); } @@ -199,7 +216,7 @@ public void testTwoElementDocument() { writer.writeName("d"); writer.writeString("str2"); writer.writeEndDocument(); - String expected = "{ \"s\" : \"str\", \"d\" : \"str2\" }"; + String expected = "{\"s\": \"str\", \"d\": \"str2\"}"; assertEquals(expected, stringWriter.toString()); } @@ -215,7 +232,7 @@ public void testNestedDocument() { writer.writeEndDocument(); writer.writeEndDocument(); writer.writeEndDocument(); - String expected = "{ \"doc\" : { \"doc\" : { \"s\" : \"str\" } } }"; + String expected = "{\"doc\": {\"doc\": {\"s\": \"str\"}}}"; assertEquals(expected, stringWriter.toString()); } @@ -224,7 +241,7 @@ public void testSingleString() { writer.writeStartDocument(); writer.writeString("abc", "xyz"); writer.writeEndDocument(); - String expected = "{ \"abc\" : \"xyz\" }"; + String expected = "{\"abc\": \"xyz\"}"; assertEquals(expected, stringWriter.toString()); } @@ -234,80 +251,79 @@ public void testBoolean() { writer.writeStartDocument(); writer.writeBoolean("abc", true); writer.writeEndDocument(); - String expected = "{ \"abc\" : true }"; + String expected = "{\"abc\": true}"; assertEquals(expected, stringWriter.toString()); } @Test public void testDouble() { - List> tests = asList(new TestData(0.0, "0.0"), new TestData(0.0005, "5.0E-4"), - new TestData(0.5, "0.5"), new TestData(1.0, "1.0"), - new TestData(1.5, "1.5"), new TestData(1.5E+40, "1.5E40"), - new TestData(1.5E-40, "1.5E-40"), - new TestData(1234567890.1234568E+123, "1.2345678901234568E132"), - new TestData(Double.MAX_VALUE, "1.7976931348623157E308"), - new TestData(Double.MIN_VALUE, "4.9E-324"), - - new TestData(-0.0005, "-5.0E-4"), - new TestData(-0.5, "-0.5"), - new TestData(-1.0, "-1.0"), - new TestData(-1.5, "-1.5"), - new TestData(-1.5E+40, "-1.5E40"), - new TestData(-1.5E-40, "-1.5E-40"), - new TestData(-1234567890.1234568E+123, "-1.2345678901234568E132"), - - new TestData(Double.NaN, "NaN"), - new TestData(Double.NEGATIVE_INFINITY, "-Infinity"), - new TestData(Double.POSITIVE_INFINITY, "Infinity")); + List> tests = asList(new TestData<>(0.0, "0.0"), new TestData<>(0.0005, "5.0E-4"), + new TestData<>(0.5, "0.5"), new TestData<>(1.0, "1.0"), + new TestData<>(1.5, "1.5"), new TestData<>(1.5E+40, "1.5E40"), + new TestData<>(1.5E-40, "1.5E-40"), + new TestData<>(1234567890.1234568E+123, "1.2345678901234568E132"), + new TestData<>(Double.MAX_VALUE, "1.7976931348623157E308"), + new TestData<>(Double.MIN_VALUE, "4.9E-324"), + + new TestData<>(-0.0005, "-5.0E-4"), + new TestData<>(-0.5, "-0.5"), + new TestData<>(-1.0, "-1.0"), + new TestData<>(-1.5, "-1.5"), + new TestData<>(-1.5E+40, "-1.5E40"), + new TestData<>(-1.5E-40, "-1.5E-40"), + new TestData<>(-1234567890.1234568E+123, "-1.2345678901234568E132"), + + new TestData<>(Double.NaN, "NaN"), + new TestData<>(Double.NEGATIVE_INFINITY, "-Infinity"), + new TestData<>(Double.POSITIVE_INFINITY, "Infinity")); for (final TestData cur : tests) { stringWriter = new StringWriter(); writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.EXTENDED).build()); writer.writeStartDocument(); writer.writeDouble("d", cur.value); writer.writeEndDocument(); - String expected = "{ \"d\" : { \"$numberDouble\" : \"" + cur.expected + "\" } }"; + String expected = "{\"d\": {\"$numberDouble\": \"" + cur.expected + "\"}}"; assertEquals(expected, stringWriter.toString()); } } @Test public void testInt64Shell() { - List> tests = asList(new TestData(Long.MIN_VALUE, "NumberLong(\"-9223372036854775808\")"), - new TestData(Integer.MIN_VALUE - 1L, "NumberLong(\"-2147483649\")"), - new TestData(Integer.MIN_VALUE + 0L, "NumberLong(-2147483648)"), - new TestData(0L, "NumberLong(0)"), - new TestData(Integer.MAX_VALUE + 0L, "NumberLong(2147483647)"), - new TestData(Integer.MAX_VALUE + 1L, "NumberLong(\"2147483648\")"), - new TestData(Long.MAX_VALUE, "NumberLong(\"9223372036854775807\")")); + List> tests = asList(new TestData<>(Long.MIN_VALUE, "NumberLong(\"-9223372036854775808\")"), + new TestData<>(Integer.MIN_VALUE - 1L, "NumberLong(\"-2147483649\")"), + new TestData<>((long) Integer.MIN_VALUE, "NumberLong(-2147483648)"), + new TestData<>(0L, "NumberLong(0)"), + new TestData<>((long) Integer.MAX_VALUE, "NumberLong(2147483647)"), + new TestData<>(Integer.MAX_VALUE + 1L, "NumberLong(\"2147483648\")"), + new TestData<>(Long.MAX_VALUE, "NumberLong(\"9223372036854775807\")")); for (final TestData cur : tests) { stringWriter = new StringWriter(); writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.SHELL).build()); writer.writeStartDocument(); writer.writeInt64("l", cur.value); writer.writeEndDocument(); - String expected = "{ \"l\" : " + cur.expected + " }"; + String expected = "{\"l\": " + cur.expected + "}"; assertEquals(expected, stringWriter.toString()); } } @Test - @SuppressWarnings("deprecation") - public void testInt64Strict() { - List> tests = asList(new TestData(Long.MIN_VALUE, "-9223372036854775808"), - new TestData(Integer.MIN_VALUE - 1L, "-2147483649"), - new TestData(Integer.MIN_VALUE - 0L, "-2147483648"), - new TestData(0L, "0"), - new TestData(Integer.MAX_VALUE + 0L, "2147483647"), - new TestData(Integer.MAX_VALUE + 1L, "2147483648"), - new TestData(Long.MAX_VALUE, "9223372036854775807")); + public void testInt64Relaxed() { + List> tests = asList(new TestData<>(Long.MIN_VALUE, "-9223372036854775808"), + new TestData<>(Integer.MIN_VALUE - 1L, "-2147483649"), + new TestData<>((long) Integer.MIN_VALUE, "-2147483648"), + new TestData<>(0L, "0"), + new TestData<>((long) Integer.MAX_VALUE, "2147483647"), + new TestData<>(Integer.MAX_VALUE + 1L, "2147483648"), + new TestData<>(Long.MAX_VALUE, "9223372036854775807")); for (final TestData cur : tests) { stringWriter = new StringWriter(); - writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.STRICT).build()); + writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.RELAXED).build()); writer.writeStartDocument(); writer.writeInt64("l", cur.value); writer.writeEndDocument(); - String expected = "{ \"l\" : { \"$numberLong\" : \"" + cur.expected + "\" } }"; + String expected = "{\"l\": " + cur.expected + "}"; assertEquals(expected, stringWriter.toString()); } } @@ -315,8 +331,8 @@ public void testInt64Strict() { @Test public void testDecimal128SShell() { List> tests = asList( - new TestData(Decimal128.parse("1.0"), "1.0"), - new TestData(Decimal128.POSITIVE_INFINITY, Decimal128.POSITIVE_INFINITY.toString())); + new TestData<>(Decimal128.parse("1.0"), "1.0"), + new TestData<>(Decimal128.POSITIVE_INFINITY, Decimal128.POSITIVE_INFINITY.toString())); for (final TestData cur : tests) { @@ -325,26 +341,25 @@ public void testDecimal128SShell() { writer.writeStartDocument(); writer.writeDecimal128("d", cur.value); writer.writeEndDocument(); - String expected = "{ \"d\" : NumberDecimal(\"" + cur.expected + "\") }"; + String expected = "{\"d\": NumberDecimal(\"" + cur.expected + "\")}"; assertEquals(expected, stringWriter.toString()); } } @Test - @SuppressWarnings("deprecation") - public void testDecimal128Strict() { + public void testDecimal128Relaxed() { List> tests = asList( - new TestData(Decimal128.parse("1.0"), "1.0"), - new TestData(Decimal128.POSITIVE_INFINITY, Decimal128.POSITIVE_INFINITY.toString())); + new TestData<>(Decimal128.parse("1.0"), "1.0"), + new TestData<>(Decimal128.POSITIVE_INFINITY, Decimal128.POSITIVE_INFINITY.toString())); for (final TestData cur : tests) { stringWriter = new StringWriter(); - writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.STRICT).build()); + writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.RELAXED).build()); writer.writeStartDocument(); writer.writeDecimal128("d", cur.value); writer.writeEndDocument(); - String expected = "{ \"d\" : { \"$numberDecimal\" : \"" + cur.expected + "\" } }"; + String expected = "{\"d\": {\"$numberDecimal\": \"" + cur.expected + "\"}}"; assertEquals(expected, stringWriter.toString()); } } @@ -358,89 +373,87 @@ public void testArray() { writer.writeInt32(3); writer.writeEndArray(); writer.writeEndDocument(); - String expected = "{ \"array\" : [1, 2, 3] }"; + String expected = "{\"array\": [1, 2, 3]}"; assertEquals(expected, stringWriter.toString()); } @Test - @SuppressWarnings("deprecation") - public void testBinaryStrict() { - List> tests = asList(new TestData(new BsonBinary(new byte[0]), - "{ \"$binary\" : \"\", " - + "\"$type\" : \"00\" }"), - new TestData(new BsonBinary(new byte[]{1}), - "{ \"$binary\" : \"AQ==\", " - + "\"$type\" : \"00\" }"), - new TestData(new BsonBinary(new byte[]{1, 2}), - "{ \"$binary\" : \"AQI=\", " - + "\"$type\" : \"00\" }"), - new TestData(new BsonBinary(new byte[]{1, 2, 3}), - "{ \"$binary\" : \"AQID\", " - + "\"$type\" : \"00\" }"), - new TestData(new BsonBinary((byte) 0x80, new byte[]{1, 2, 3}), - "{ \"$binary\" : \"AQID\", " - + "\"$type\" : \"80\" }")); + public void testBinaryRelaxed() { + List> tests = asList(new TestData<>(new BsonBinary(new byte[0]), + "{\"$binary\": {\"base64\": \"\", " + + "\"subType\": \"00\"}}"), + new TestData<>(new BsonBinary(new byte[]{1}), + "{\"$binary\": {\"base64\": \"AQ==\", " + + "\"subType\": \"00\"}}"), + new TestData<>(new BsonBinary(new byte[]{1, 2}), + "{\"$binary\": {\"base64\": \"AQI=\", " + + "\"subType\": \"00\"}}"), + new TestData<>(new BsonBinary(new byte[]{1, 2, 3}), + "{\"$binary\": {\"base64\": \"AQID\", " + + "\"subType\": \"00\"}}"), + new TestData<>(new BsonBinary((byte) 0x80, new byte[]{1, 2, 3}), + "{\"$binary\": {\"base64\": \"AQID\", " + + "\"subType\": \"80\"}}")); for (final TestData cur : tests) { stringWriter = new StringWriter(); - writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.STRICT).build()); + writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.RELAXED).build()); writer.writeStartDocument(); writer.writeBinaryData("binary", cur.value); writer.writeEndDocument(); - String expected = "{ \"binary\" : " + cur.expected + " }"; + String expected = "{\"binary\": " + cur.expected + "}"; assertEquals(expected, stringWriter.toString()); } } @Test public void testBinaryShell() { - List> tests = asList(new TestData(new BsonBinary(new byte[0]), "new BinData(0, \"\")"), - new TestData(new BsonBinary(new byte[]{1}), "new BinData(0, \"AQ==\")"), - new TestData(new BsonBinary(new byte[]{1, 2}), "new BinData(0, \"AQI=\")"), - new TestData(new BsonBinary(new byte[]{1, 2, 3}), "new BinData(0, \"AQID\")"), - new TestData(new BsonBinary((byte) 0x80, new byte[]{1, 2, 3}), - "new BinData(128, \"AQID\")")); + List> tests = asList(new TestData<>(new BsonBinary(new byte[0]), "new BinData(0, \"\")"), + new TestData<>(new BsonBinary(new byte[]{1}), "new BinData(0, \"AQ==\")"), + new TestData<>(new BsonBinary(new byte[]{1, 2}), "new BinData(0, \"AQI=\")"), + new TestData<>(new BsonBinary(new byte[]{1, 2, 3}), "new BinData(0, \"AQID\")"), + new TestData<>(new BsonBinary((byte) 0x80, new byte[]{1, 2, 3}), + "new BinData(128, \"AQID\")")); for (final TestData cur : tests) { stringWriter = new StringWriter(); writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.SHELL).build()); writer.writeStartDocument(); writer.writeBinaryData("binary", cur.value); writer.writeEndDocument(); - String expected = "{ \"binary\" : " + cur.expected + " }"; + String expected = "{\"binary\": " + cur.expected + "}"; assertEquals(expected, stringWriter.toString()); } } @Test - @SuppressWarnings("deprecation") - public void testDateTimeStrict() { - List> tests = asList(new TestData(new Date(0), "{ \"$date\" : 0 }"), - new TestData(new Date(Long.MAX_VALUE), "{ \"$date\" : 9223372036854775807 }"), - new TestData(new Date(Long.MIN_VALUE), "{ \"$date\" : -9223372036854775808 }")); + public void testDateTimeRelaxed() { + List> tests = asList(new TestData<>(new Date(0), "{\"$date\": \"1970-01-01T00:00:00Z\"}"), + new TestData<>(new Date(Long.MAX_VALUE), "{\"$date\": {\"$numberLong\": \"9223372036854775807\"}}"), + new TestData<>(new Date(Long.MIN_VALUE), "{\"$date\": {\"$numberLong\": \"-9223372036854775808\"}}")); for (final TestData cur : tests) { stringWriter = new StringWriter(); - writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.STRICT).build()); + writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.RELAXED).build()); writer.writeStartDocument(); writer.writeDateTime("date", cur.value.getTime()); writer.writeEndDocument(); - String expected = "{ \"date\" : " + cur.expected + " }"; + String expected = "{\"date\": " + cur.expected + "}"; assertEquals(expected, stringWriter.toString()); } } @Test public void testDateTimeShell() { - List> tests = asList(new TestData(new Date(0), "ISODate(\"1970-01-01T00:00:00.000Z\")"), - new TestData(new Date(1), "ISODate(\"1970-01-01T00:00:00.001Z\")"), - new TestData(new Date(-1), "ISODate(\"1969-12-31T23:59:59.999Z\")"), - new TestData(new Date(Long.MAX_VALUE), "new Date(9223372036854775807)"), - new TestData(new Date(Long.MIN_VALUE), "new Date(-9223372036854775808)")); + List> tests = asList(new TestData<>(new Date(0), "ISODate(\"1970-01-01T00:00:00.000Z\")"), + new TestData<>(new Date(1), "ISODate(\"1970-01-01T00:00:00.001Z\")"), + new TestData<>(new Date(-1), "ISODate(\"1969-12-31T23:59:59.999Z\")"), + new TestData<>(new Date(Long.MAX_VALUE), "new Date(9223372036854775807)"), + new TestData<>(new Date(Long.MIN_VALUE), "new Date(-9223372036854775808)")); for (final TestData cur : tests) { stringWriter = new StringWriter(); writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.SHELL).build()); writer.writeStartDocument(); writer.writeDateTime("date", cur.value.getTime()); writer.writeEndDocument(); - String expected = "{ \"date\" : " + cur.expected + " }"; + String expected = "{\"date\": " + cur.expected + "}"; assertEquals(expected, stringWriter.toString()); } } @@ -450,7 +463,7 @@ public void testJavaScript() { writer.writeStartDocument(); writer.writeJavaScript("f", "function f() { return 1; }"); writer.writeEndDocument(); - String expected = "{ \"f\" : { \"$code\" : \"function f() { return 1; }\" } }"; + String expected = "{\"f\": {\"$code\": \"function f() { return 1; }\"}}"; assertEquals(expected, stringWriter.toString()); } @@ -463,7 +476,7 @@ public void testJavaScriptWithScope() { writer.writeEndDocument(); writer.writeEndDocument(); String expected = - "{ \"f\" : { \"$code\" : \"function f() { return n; }\", " + "\"$scope\" : { \"n\" : 1 } } }"; + "{\"f\": {\"$code\": \"function f() { return n; }\", " + "\"$scope\": {\"n\": 1}}}"; assertEquals(expected, stringWriter.toString()); } @@ -472,7 +485,7 @@ public void testMaxKeyStrict() { writer.writeStartDocument(); writer.writeMaxKey("maxkey"); writer.writeEndDocument(); - String expected = "{ \"maxkey\" : { \"$maxKey\" : 1 } }"; + String expected = "{\"maxkey\": {\"$maxKey\": 1}}"; assertEquals(expected, stringWriter.toString()); } @@ -481,7 +494,7 @@ public void testMinKeyStrict() { writer.writeStartDocument(); writer.writeMinKey("minkey"); writer.writeEndDocument(); - String expected = "{ \"minkey\" : { \"$minKey\" : 1 } }"; + String expected = "{\"minkey\": {\"$minKey\": 1}}"; assertEquals(expected, stringWriter.toString()); } @@ -492,7 +505,7 @@ public void testMaxKeyShell() { writer.writeStartDocument(); writer.writeMaxKey("maxkey"); writer.writeEndDocument(); - String expected = "{ \"maxkey\" : MaxKey }"; + String expected = "{\"maxkey\": MaxKey}"; assertEquals(expected, stringWriter.toString()); } @@ -502,7 +515,7 @@ public void testMinKeyShell() { writer.writeStartDocument(); writer.writeMinKey("minkey"); writer.writeEndDocument(); - String expected = "{ \"minkey\" : MinKey }"; + String expected = "{\"minkey\": MinKey}"; assertEquals(expected, stringWriter.toString()); } @@ -511,7 +524,7 @@ public void testNull() { writer.writeStartDocument(); writer.writeNull("null"); writer.writeEndDocument(); - String expected = "{ \"null\" : null }"; + String expected = "{\"null\": null}"; assertEquals(expected, stringWriter.toString()); } @@ -524,7 +537,7 @@ public void testObjectIdShell() { writer.writeObjectId("_id", objectId); writer.writeEndDocument(); - String expected = "{ \"_id\" : ObjectId(\"4d0ce088e447ad08b4721a37\") }"; + String expected = "{\"_id\": ObjectId(\"4d0ce088e447ad08b4721a37\")}"; assertEquals(expected, stringWriter.toString()); } @@ -536,72 +549,61 @@ public void testObjectIdStrict() { writer.writeObjectId("_id", objectId); writer.writeEndDocument(); - String expected = "{ \"_id\" : { \"$oid\" : \"4d0ce088e447ad08b4721a37\" } }"; + String expected = "{\"_id\": {\"$oid\": \"4d0ce088e447ad08b4721a37\"}}"; assertEquals(expected, stringWriter.toString()); } @Test public void testRegularExpressionShell() { List> tests; - tests = asList(new TestData(new BsonRegularExpression(""), "/(?:)/"), - new TestData(new BsonRegularExpression("a"), "/a/"), - new TestData(new BsonRegularExpression("a/b"), "/a\\/b/"), - new TestData(new BsonRegularExpression("a\\b"), "/a\\b/"), - new TestData(new BsonRegularExpression("a", "i"), "/a/i"), - new TestData(new BsonRegularExpression("a", "m"), "/a/m"), - new TestData(new BsonRegularExpression("a", "x"), "/a/x"), - new TestData(new BsonRegularExpression("a", "s"), "/a/s"), - new TestData(new BsonRegularExpression("a", "imxs"), "/a/imsx")); + tests = asList(new TestData<>(new BsonRegularExpression(""), "/(?:)/"), + new TestData<>(new BsonRegularExpression("a"), "/a/"), + new TestData<>(new BsonRegularExpression("a/b"), "/a\\/b/"), + new TestData<>(new BsonRegularExpression("a\\b"), "/a\\b/"), + new TestData<>(new BsonRegularExpression("a", "i"), "/a/i"), + new TestData<>(new BsonRegularExpression("a", "m"), "/a/m"), + new TestData<>(new BsonRegularExpression("a", "x"), "/a/x"), + new TestData<>(new BsonRegularExpression("a", "s"), "/a/s"), + new TestData<>(new BsonRegularExpression("a", "imxs"), "/a/imsx")); for (final TestData cur : tests) { stringWriter = new StringWriter(); writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.SHELL).build()); writer.writeStartDocument(); writer.writeRegularExpression("regex", cur.value); writer.writeEndDocument(); - String expected = "{ \"regex\" : " + cur.expected + " }"; + String expected = "{\"regex\": " + cur.expected + "}"; assertEquals(expected, stringWriter.toString()); } } @Test - @SuppressWarnings("deprecation") - public void testRegularExpressionStrict() { + public void testRegularExpressionRelaxed() { List> tests; - tests = asList(new TestData(new BsonRegularExpression(""), "{ \"$regex\" : \"\", " - + "\"$options\" : \"\" " - + "}"), - new TestData(new BsonRegularExpression("a"), "{ \"$regex\" : \"a\"," - + " \"$options\" : \"\" " - + "}"), - new TestData(new BsonRegularExpression("a/b"), "{ \"$regex\" : " - + "\"a/b\", " - + "\"$options\" : \"\" " - + "}"), - new TestData(new BsonRegularExpression("a\\b"), "{ \"$regex\" : " - + "\"a\\\\b\", " - + "\"$options\" : \"\" " - + "}"), - new TestData(new BsonRegularExpression("a", "i"), "{ \"$regex\" : \"a\"," - + " \"$options\" : \"i\"" - + " }"), - new TestData(new BsonRegularExpression("a", "m"), "{ \"$regex\" : \"a\"," - + " \"$options\" : \"m\"" - + " }"), - new TestData(new BsonRegularExpression("a", "x"), "{ \"$regex\" : \"a\"," - + " \"$options\" : \"x\"" - + " }"), - new TestData(new BsonRegularExpression("a", "s"), "{ \"$regex\" : \"a\"," - + " \"$options\" : \"s\"" - + " }"), - new TestData(new BsonRegularExpression("a", "imxs"), - "{ \"$regex\" : \"a\"," + " \"$options\" : \"imsx\" }")); + tests = asList(new TestData<>(new BsonRegularExpression(""), + "{\"$regularExpression\": {\"pattern\": \"\", \"options\": \"\"}}"), + new TestData<>(new BsonRegularExpression("a"), + "{\"$regularExpression\": {\"pattern\": \"a\", \"options\": \"\"}}"), + new TestData<>(new BsonRegularExpression("a/b"), + "{\"$regularExpression\": {\"pattern\": \"a/b\", \"options\": \"\"}}"), + new TestData<>(new BsonRegularExpression("a\\b"), + "{\"$regularExpression\": {\"pattern\": \"a\\\\b\", \"options\": \"\"}}"), + new TestData<>(new BsonRegularExpression("a", "i"), + "{\"$regularExpression\": {\"pattern\": \"a\", \"options\": \"i\"}}"), + new TestData<>(new BsonRegularExpression("a", "m"), + "{\"$regularExpression\": {\"pattern\": \"a\", \"options\": \"m\"}}"), + new TestData<>(new BsonRegularExpression("a", "x"), + "{\"$regularExpression\": {\"pattern\": \"a\", \"options\": \"x\"}}"), + new TestData<>(new BsonRegularExpression("a", "s"), + "{\"$regularExpression\": {\"pattern\": \"a\", \"options\": \"s\"}}"), + new TestData<>(new BsonRegularExpression("a", "imxs"), + "{\"$regularExpression\": {\"pattern\": \"a\", \"options\": \"imsx\"}}")); for (final TestData cur : tests) { stringWriter = new StringWriter(); - writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.STRICT).build()); + writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.RELAXED).build()); writer.writeStartDocument(); writer.writeRegularExpression("regex", cur.value); writer.writeEndDocument(); - String expected = "{ \"regex\" : " + cur.expected + " }"; + String expected = "{\"regex\": " + cur.expected + "}"; assertEquals(expected, stringWriter.toString()); } } @@ -611,7 +613,7 @@ public void testSymbol() { writer.writeStartDocument(); writer.writeSymbol("symbol", "name"); writer.writeEndDocument(); - String expected = "{ \"symbol\" : { \"$symbol\" : \"name\" } }"; + String expected = "{\"symbol\": {\"$symbol\": \"name\"}}"; assertEquals(expected, stringWriter.toString()); } @@ -621,7 +623,7 @@ public void testTimestampStrict() { writer.writeStartDocument(); writer.writeTimestamp("timestamp", new BsonTimestamp(1000, 1)); writer.writeEndDocument(); - String expected = "{ \"timestamp\" : { \"$timestamp\" : { \"t\" : 1000, \"i\" : 1 } } }"; + String expected = "{\"timestamp\": {\"$timestamp\": {\"t\": 1000, \"i\": 1}}}"; assertEquals(expected, stringWriter.toString()); } @@ -631,18 +633,17 @@ public void testTimestampShell() { writer.writeStartDocument(); writer.writeTimestamp("timestamp", new BsonTimestamp(1000, 1)); writer.writeEndDocument(); - String expected = "{ \"timestamp\" : Timestamp(1000, 1) }"; + String expected = "{\"timestamp\": Timestamp(1000, 1)}"; assertEquals(expected, stringWriter.toString()); } @Test - @SuppressWarnings("deprecation") - public void testUndefinedStrict() { - writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.STRICT).build()); + public void testUndefinedRelaxed() { + writer = new JsonWriter(stringWriter, JsonWriterSettings.builder().outputMode(JsonMode.RELAXED).build()); writer.writeStartDocument(); writer.writeUndefined("undefined"); writer.writeEndDocument(); - String expected = "{ \"undefined\" : { \"$undefined\" : true } }"; + String expected = "{\"undefined\": {\"$undefined\": true}}"; assertEquals(expected, stringWriter.toString()); } @@ -652,7 +653,7 @@ public void testUndefinedShell() { writer.writeStartDocument(); writer.writeUndefined("undefined"); writer.writeEndDocument(); - String expected = "{ \"undefined\" : undefined }"; + String expected = "{\"undefined\": undefined}"; assertEquals(expected, stringWriter.toString()); } @@ -661,7 +662,7 @@ public void testDBPointer() { writer.writeStartDocument(); writer.writeDBPointer("dbPointer", new BsonDbPointer("my.test", new ObjectId("4d0ce088e447ad08b4721a37"))); writer.writeEndDocument(); - String expected = "{ \"dbPointer\" : { \"$ref\" : \"my.test\", \"$id\" : { \"$oid\" : \"4d0ce088e447ad08b4721a37\" } } }"; + String expected = "{\"dbPointer\": {\"$ref\": \"my.test\", \"$id\": {\"$oid\": \"4d0ce088e447ad08b4721a37\"}}}"; assertEquals(expected, stringWriter.toString()); } } diff --git a/bson/src/test/unit/org/bson/json/StrictCharacterStreamJsonWriterSpecification.groovy b/bson/src/test/unit/org/bson/json/StrictCharacterStreamJsonWriterSpecification.groovy index 0196e55cfba..8a3d16036f3 100644 --- a/bson/src/test/unit/org/bson/json/StrictCharacterStreamJsonWriterSpecification.groovy +++ b/bson/src/test/unit/org/bson/json/StrictCharacterStreamJsonWriterSpecification.groovy @@ -37,7 +37,7 @@ class StrictCharacterStreamJsonWriterSpecification extends Specification { writer.writeEndObject() then: - stringWriter.toString() == '{ }' + stringWriter.toString() == '{}' } def 'should write empty array'() { @@ -56,18 +56,17 @@ class StrictCharacterStreamJsonWriterSpecification extends Specification { writer.writeEndObject() then: - stringWriter.toString() == '{ "n" : null }' + stringWriter.toString() == '{"n": null}' } def 'should write boolean'() { when: writer.writeStartObject() writer.writeBoolean('b1', true) - writer.writeBoolean('b2', false) writer.writeEndObject() then: - stringWriter.toString() == '{ "b1" : true, "b2" : false }' + stringWriter.toString() == '{"b1": true}' } def 'should write number'() { @@ -77,7 +76,7 @@ class StrictCharacterStreamJsonWriterSpecification extends Specification { writer.writeEndObject() then: - stringWriter.toString() == '{ "n" : 42 }' + stringWriter.toString() == '{"n": 42}' } def 'should write string'() { @@ -87,7 +86,7 @@ class StrictCharacterStreamJsonWriterSpecification extends Specification { writer.writeEndObject() then: - stringWriter.toString() == '{ "n" : "42" }' + stringWriter.toString() == '{"n": "42"}' } def 'should write unquoted string'() { @@ -97,7 +96,7 @@ class StrictCharacterStreamJsonWriterSpecification extends Specification { writer.writeEndObject() then: - stringWriter.toString() == '{ "s" : NumberDecimal("42.0") }' + stringWriter.toString() == '{"s": NumberDecimal("42.0")}' } def 'should write document'() { @@ -108,7 +107,7 @@ class StrictCharacterStreamJsonWriterSpecification extends Specification { writer.writeEndObject() then: - stringWriter.toString() == '{ "d" : { } }' + stringWriter.toString() == '{"d": {}}' } def 'should write array'() { @@ -119,7 +118,7 @@ class StrictCharacterStreamJsonWriterSpecification extends Specification { writer.writeEndObject() then: - stringWriter.toString() == '{ "a" : [] }' + stringWriter.toString() == '{"a": []}' } def 'should write array of values'() { @@ -133,7 +132,7 @@ class StrictCharacterStreamJsonWriterSpecification extends Specification { writer.writeEndObject() then: - stringWriter.toString() == '{ "a" : [1, null, "str"] }' + stringWriter.toString() == '{"a": [1, null, "str"]}' } def 'should write strings'() { @@ -143,7 +142,7 @@ class StrictCharacterStreamJsonWriterSpecification extends Specification { writer.writeEndObject() then: - stringWriter.toString() == '{ "str" : ' + expected + ' }' + stringWriter.toString() == '{"str": ' + expected + '}' where: value | expected @@ -167,6 +166,17 @@ class StrictCharacterStreamJsonWriterSpecification extends Specification { '\u0080\u0081\u0082' | '"\\u0080\\u0081\\u0082"' } + def 'should write two object elements'() { + when: + writer.writeStartObject() + writer.writeBoolean('b1', true) + writer.writeBoolean('b2', false) + writer.writeEndObject() + + then: + stringWriter.toString() == '{"b1": true, "b2": false}' + } + def 'should indent one element'() { given: writer = new StrictCharacterStreamJsonWriter(stringWriter, StrictCharacterStreamJsonWriterSettings.builder().indent(true).build()) @@ -177,7 +187,7 @@ class StrictCharacterStreamJsonWriterSpecification extends Specification { writer.writeEndObject() then: - stringWriter.toString() == format('{%n "name" : "value"%n}') + stringWriter.toString() == format('{%n "name": "value"%n}') } def 'should indent one element with indent and newline characters'() { @@ -194,7 +204,7 @@ class StrictCharacterStreamJsonWriterSpecification extends Specification { writer.writeEndObject() then: - stringWriter.toString() == format('{\r\t"name" : "value"\r}') + stringWriter.toString() == format('{\r\t"name": "value"\r}') } def 'should indent two elements'() { @@ -208,7 +218,43 @@ class StrictCharacterStreamJsonWriterSpecification extends Specification { writer.writeEndObject() then: - stringWriter.toString() == format('{%n "a" : "x",%n "b" : "y"%n}') + stringWriter.toString() == format('{%n "a": "x",%n "b": "y"%n}') + } + + def 'should indent two array elements'() { + given: + writer = new StrictCharacterStreamJsonWriter(stringWriter, StrictCharacterStreamJsonWriterSettings.builder().indent(true).build()) + + when: + writer.writeStartObject() + writer.writeStartArray('a') + writer.writeNull() + writer.writeNumber('4') + writer.writeEndArray() + writer.writeEndObject() + + then: + stringWriter.toString() == format('{%n "a": [%n null,%n 4%n ]%n}') + } + + def 'should indent two document elements'() { + given: + writer = new StrictCharacterStreamJsonWriter(stringWriter, StrictCharacterStreamJsonWriterSettings.builder().indent(true).build()) + + when: + writer.writeStartObject() + writer.writeStartArray('a') + writer.writeStartObject() + writer.writeNull('a') + writer.writeEndObject() + writer.writeStartObject() + writer.writeNull('a') + writer.writeEndObject() + writer.writeEndArray() + writer.writeEndObject() + + then: + stringWriter.toString() == format('{%n "a": [%n {%n "a": null%n },%n {%n "a": null%n }%n ]%n}') } def 'should indent embedded document'() { @@ -224,7 +270,7 @@ class StrictCharacterStreamJsonWriterSpecification extends Specification { writer.writeEndObject() then: - stringWriter.toString() == format('{%n "doc" : {%n "a" : 1,%n "b" : 2%n }%n}') + stringWriter.toString() == format('{%n "doc": {%n "a": 1,%n "b": 2%n }%n}') } def shouldThrowExceptionForBooleanWhenWritingBeforeStartingDocument() { @@ -360,6 +406,29 @@ class StrictCharacterStreamJsonWriterSpecification extends Specification { thrown(BsonInvalidOperationException) } + def shouldThrowAnExceptionWhenStartingAnObjectWhenDone() { + given: + writer.writeStartObject() + writer.writeEndObject() + + when: + writer.writeStartObject() + + then: + thrown(BsonInvalidOperationException) + } + + def shouldThrowAnExceptionWhenStartingAnObjectWhenNameIsExpected() { + given: + writer.writeStartObject() + + when: + writer.writeStartObject() + + then: + thrown(BsonInvalidOperationException) + } + def shouldThrowAnExceptionWhenAttemptingToEndAnArrayThatWasNotStarted() { given: writer.writeStartObject() @@ -480,7 +549,7 @@ class StrictCharacterStreamJsonWriterSpecification extends Specification { def shouldStopAtMaxLength() { given: - def fullJsonText = '{ "n" : null }' + def fullJsonText = '{"n": null}' writer = new StrictCharacterStreamJsonWriter(stringWriter, StrictCharacterStreamJsonWriterSettings.builder().maxLength(maxLength).build()) diff --git a/bson/src/test/unit/org/bson/json/UuidStringValidatorTest.java b/bson/src/test/unit/org/bson/json/UuidStringValidatorTest.java new file mode 100644 index 00000000000..e14abe66464 --- /dev/null +++ b/bson/src/test/unit/org/bson/json/UuidStringValidatorTest.java @@ -0,0 +1,103 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.json; + +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +import static org.bson.json.UuidStringValidator.validate; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertThrows; + +public class UuidStringValidatorTest { + + @ParameterizedTest + @ValueSource(strings = { + "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb", + "cccccccc-cccc-cccc-cccc-cccccccccccc", + "dddddddd-dddd-dddd-dddd-dddddddddddd", + "eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee", + "ffffffff-ffff-ffff-ffff-ffffffffffff", + "AAAAAAAA-AAAA-AAAA-AAAA-AAAAAAAAAAAA", + "BBBBBBBB-BBBB-BBBB-BBBB-BBBBBBBBBBBB", + "CCCCCCCC-CCCC-CCCC-CCCC-CCCCCCCCCCCC", + "DDDDDDDD-DDDD-DDDD-DDDD-DDDDDDDDDDDD", + "EEEEEEEE-EEEE-EEEE-EEEE-EEEEEEEEEEEE", + "FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF", + "00000000-0000-0000-0000-000000000000", + "11111111-1111-1111-1111-111111111111", + "22222222-2222-2222-2222-222222222222", + "33333333-3333-3333-3333-333333333333", + "44444444-4444-4444-4444-444444444444", + "55555555-5555-5555-5555-555555555555", + "66666666-6666-6666-6666-666666666666", + "77777777-7777-7777-7777-777777777777", + "88888888-8888-8888-8888-888888888888", + "99999999-9999-9999-9999-999999999999"}) + public void testValidUuidStrings(final String uuidString) { + assertDoesNotThrow(() -> validate(uuidString)); + } + + @ParameterizedTest + @ValueSource(strings = { + "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaa", + "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaaa", + "aaaaaaaa+aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "aaaaaaaa-aaaa+aaaa-aaaa-aaaaaaaaaaaa", + "aaaaaaaa-aaaa-aaaa+aaaa-aaaaaaaaaaaa", + "aaaaaaaa-aaaa-aaaa-aaaa+aaaaaaaaaaaa", + "`aaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "{aaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "@aaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "[aaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "/aaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + ":aaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "a:aaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "aa:aaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "aaa:aaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "aaaa:aaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "aaaaa:aa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "aaaaaa:a-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "aaaaaaaa-:aaa-aaaa-aaaa-aaaaaaaaaaaa", + "aaaaaaaa-a:aa-aaaa-aaaa-aaaaaaaaaaaa", + "aaaaaaaa-aa:a-aaaa-aaaa-aaaaaaaaaaaa", + "aaaaaaaa-aaa:-aaaa-aaaa-aaaaaaaaaaaa", + "aaaaaaaa-aaaa-:aaa-aaaa-aaaaaaaaaaaa", + "aaaaaaaa-aaaa-a:aa-aaaa-aaaaaaaaaaaa", + "aaaaaaaa-aaaa-aa:a-aaaa-aaaaaaaaaaaa", + "aaaaaaaa-aaaa-aaa:-aaaa-aaaaaaaaaaaa", + "aaaaaaaa-aaaa-aaaa-:aaa-aaaaaaaaaaaa", + "aaaaaaaa-aaaa-aaaa-a:aa-aaaaaaaaaaaa", + "aaaaaaaa-aaaa-aaaa-aa:a-aaaaaaaaaaaa", + "aaaaaaaa-aaaa-aaaa-aaa:-aaaaaaaaaaaa", + "aaaaaaaa-aaaa-aaaa-aaaa-:aaaaaaaaaaa", + "aaaaaaaa-aaaa-aaaa-aaaa-a:aaaaaaaaaa", + "aaaaaaaa-aaaa-aaaa-aaaa-aa:aaaaaaaaa", + "aaaaaaaa-aaaa-aaaa-aaaa-aaa:aaaaaaaa", + "aaaaaaaa-aaaa-aaaa-aaaa-aaaa:aaaaaaa", + "aaaaaaaa-aaaa-aaaa-aaaa-aaaaa:aaaaaa", + "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaa:aaaaa", + "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaa:aaaa", + "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaa:aaa", + "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaa:aa", + "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaa:a", + "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaa:"}) + public void testInvalidUuidStrings(final String uuidString) { + assertThrows(IllegalArgumentException.class, () -> validate(uuidString)); + } +} diff --git a/bson/src/test/unit/org/bson/types/BSONBsonTimestampTest.java b/bson/src/test/unit/org/bson/types/BSONBsonTimestampTest.java index 28d864c9465..f2a210d1d3e 100644 --- a/bson/src/test/unit/org/bson/types/BSONBsonTimestampTest.java +++ b/bson/src/test/unit/org/bson/types/BSONBsonTimestampTest.java @@ -16,10 +16,10 @@ package org.bson.types; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; public class BSONBsonTimestampTest { diff --git a/bson/src/test/unit/org/bson/types/Decimal128Specification.groovy b/bson/src/test/unit/org/bson/types/Decimal128Specification.groovy deleted file mode 100644 index e94a4afee71..00000000000 --- a/bson/src/test/unit/org/bson/types/Decimal128Specification.groovy +++ /dev/null @@ -1,380 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bson.types - -import spock.lang.Specification - -import static org.bson.types.Decimal128.NEGATIVE_INFINITY -import static org.bson.types.Decimal128.NEGATIVE_NaN -import static org.bson.types.Decimal128.NEGATIVE_ZERO -import static org.bson.types.Decimal128.NaN -import static org.bson.types.Decimal128.POSITIVE_INFINITY -import static org.bson.types.Decimal128.POSITIVE_ZERO -import static org.bson.types.Decimal128.fromIEEE754BIDEncoding -import static org.bson.types.Decimal128.parse - -class Decimal128Specification extends Specification { - - def 'should have correct constants'() { - expect: - POSITIVE_ZERO == fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000000L) - NEGATIVE_ZERO == fromIEEE754BIDEncoding(0xb040000000000000L, 0x0000000000000000L) - POSITIVE_INFINITY == fromIEEE754BIDEncoding(0x7800000000000000L, 0x0000000000000000L) - NEGATIVE_INFINITY == fromIEEE754BIDEncoding(0xf800000000000000L, 0x0000000000000000L) - NaN == fromIEEE754BIDEncoding(0x7c00000000000000L, 0x0000000000000000L) - } - - def 'should construct from high and low'() { - given: - def decimal = fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000001L) - - expect: - decimal.high == 0x3040000000000000L - decimal.low == 0x0000000000000001L - } - - def 'should construct from simple string'() { - expect: - parse('0') == fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000000L) - parse('-0') == fromIEEE754BIDEncoding(0xb040000000000000L, 0x0000000000000000L) - parse('1') == fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000001L) - parse('-1') == fromIEEE754BIDEncoding(0xb040000000000000L, 0x0000000000000001L) - parse('12345678901234567') == fromIEEE754BIDEncoding(0x3040000000000000L, 0x002bdc545d6b4b87L) - parse('989898983458') == fromIEEE754BIDEncoding(0x3040000000000000L, 0x000000e67a93c822L) - parse('-12345678901234567') == fromIEEE754BIDEncoding(0xb040000000000000L, 0x002bdc545d6b4b87L) - parse('0.12345') == fromIEEE754BIDEncoding(0x3036000000000000L, 0x0000000000003039L) - parse('0.0012345') == fromIEEE754BIDEncoding(0x3032000000000000L, 0x0000000000003039L) - parse('00012345678901234567') == fromIEEE754BIDEncoding(0x3040000000000000L, 0x002bdc545d6b4b87L) - } - - def 'should round exactly'() { - expect: - parse('1.234567890123456789012345678901234') == parse('1.234567890123456789012345678901234') - parse('1.2345678901234567890123456789012340') == parse('1.234567890123456789012345678901234') - parse('1.23456789012345678901234567890123400') == parse('1.234567890123456789012345678901234') - parse('1.234567890123456789012345678901234000') == parse('1.234567890123456789012345678901234') - } - - def 'should clamp positive exponents'() { - expect: - parse('1E6112') == parse('10E6111') - parse('1E6113') == parse('100E6111') - parse('1E6143') == parse('100000000000000000000000000000000E+6111') - parse('1E6144') == parse('1000000000000000000000000000000000E+6111') - parse('11E6143') == parse('1100000000000000000000000000000000E+6111') - parse('0E8000') == parse('0E6111') - parse('0E2147483647') == parse('0E6111') - - parse('-1E6112') == parse('-10E6111') - parse('-1E6113') == parse('-100E6111') - parse('-1E6143') == parse('-100000000000000000000000000000000E+6111') - parse('-1E6144') == parse('-1000000000000000000000000000000000E+6111') - parse('-11E6143') == parse('-1100000000000000000000000000000000E+6111') - parse('-0E8000') == parse('-0E6111') - parse('-0E2147483647') == parse('-0E6111') - } - - def 'should clamp negative exponents'() { - expect: - parse('0E-8000') == parse('0E-6176') - parse('0E-2147483647') == parse('0E-6176') - parse('10E-6177') == parse('1E-6176') - parse('100E-6178') == parse('1E-6176') - parse('110E-6177') == parse('11E-6176') - - parse('-0E-8000') == parse('-0E-6176') - parse('-0E-2147483647') == parse('-0E-6176') - parse('-10E-6177') == parse('-1E-6176') - parse('-100E-6178') == parse('-1E-6176') - parse('-110E-6177') == parse('-11E-6176') - } - - def 'should construct from long'() { - expect: - new Decimal128(1L) == new Decimal128(new BigDecimal('1')) - new Decimal128(Long.MIN_VALUE) == new Decimal128(new BigDecimal(Long.MIN_VALUE)) - new Decimal128(Long.MAX_VALUE) == new Decimal128(new BigDecimal(Long.MAX_VALUE)) - } - - def 'should construct from large BigDecimal'() { - expect: - parse('12345689012345789012345') == fromIEEE754BIDEncoding(0x304000000000029dL, 0x42da3a76f9e0d979L) - parse('1234567890123456789012345678901234') == fromIEEE754BIDEncoding(0x30403cde6fff9732L, 0xde825cd07e96aff2L) - parse('9.999999999999999999999999999999999E+6144') == fromIEEE754BIDEncoding(0x5fffed09bead87c0L, 0x378d8e63ffffffffL) - parse('9.999999999999999999999999999999999E-6143') == fromIEEE754BIDEncoding(0x0001ed09bead87c0L, 0x378d8e63ffffffffL) - parse('5.192296858534827628530496329220095E+33') == fromIEEE754BIDEncoding(0x3040ffffffffffffL, 0xffffffffffffffffL) - } - - def 'should convert to simple BigDecimal'() { - expect: - fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000000L).bigDecimalValue() == new BigDecimal('0') - fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000001L).bigDecimalValue() == new BigDecimal('1') - fromIEEE754BIDEncoding(0xb040000000000000L, 0x0000000000000001L).bigDecimalValue() == new BigDecimal('-1') - fromIEEE754BIDEncoding(0x3040000000000000L, 0x002bdc545d6b4b87L).bigDecimalValue() == new BigDecimal('12345678901234567') - fromIEEE754BIDEncoding(0x3040000000000000L, 0x000000e67a93c822L).bigDecimalValue() == new BigDecimal('989898983458') - fromIEEE754BIDEncoding(0xb040000000000000L, 0x002bdc545d6b4b87L).bigDecimalValue() == new BigDecimal('-12345678901234567') - fromIEEE754BIDEncoding(0x3036000000000000L, 0x0000000000003039L).bigDecimalValue() == new BigDecimal('0.12345') - fromIEEE754BIDEncoding(0x3032000000000000L, 0x0000000000003039L).bigDecimalValue() == new BigDecimal('0.0012345') - fromIEEE754BIDEncoding(0x3040000000000000L, 0x002bdc545d6b4b87L).bigDecimalValue() == new BigDecimal('00012345678901234567') - } - - def 'should convert to large BigDecimal'() { - expect: - fromIEEE754BIDEncoding(0x304000000000029dL, 0x42da3a76f9e0d979L).bigDecimalValue() == - new BigDecimal('12345689012345789012345') - - fromIEEE754BIDEncoding(0x30403cde6fff9732L, 0xde825cd07e96aff2L).bigDecimalValue() == - new BigDecimal('1234567890123456789012345678901234') - - fromIEEE754BIDEncoding(0x5fffed09bead87c0L, 0x378d8e63ffffffffL).bigDecimalValue() == - new BigDecimal('9.999999999999999999999999999999999E+6144') - - fromIEEE754BIDEncoding(0x0001ed09bead87c0L, 0x378d8e63ffffffffL).bigDecimalValue() == - new BigDecimal('9.999999999999999999999999999999999E-6143') - - fromIEEE754BIDEncoding(0x3040ffffffffffffL, 0xffffffffffffffffL).bigDecimalValue() == - new BigDecimal('5.192296858534827628530496329220095E+33') - } - - def 'should convert invalid representations of 0 as BigDecimal 0'() { - expect: - fromIEEE754BIDEncoding(0x6C10000000000000, 0x0).bigDecimalValue() == new BigDecimal('0') - fromIEEE754BIDEncoding(0x6C11FFFFFFFFFFFF, 0xffffffffffffffffL).bigDecimalValue() == new BigDecimal('0E+3') - } - - def 'should detect infinity'() { - expect: - POSITIVE_INFINITY.isInfinite() - NEGATIVE_INFINITY.isInfinite() - !parse('0').isInfinite() - !parse('9.999999999999999999999999999999999E+6144').isInfinite() - !parse('9.999999999999999999999999999999999E-6143').isInfinite() - !POSITIVE_INFINITY.isFinite() - !NEGATIVE_INFINITY.isFinite() - parse('0').isFinite() - parse('9.999999999999999999999999999999999E+6144').isFinite() - parse('9.999999999999999999999999999999999E-6143').isFinite() - } - - def 'should detect NaN'() { - expect: - NaN.isNaN() - fromIEEE754BIDEncoding(0x7e00000000000000L, 0).isNaN() // SNaN - !POSITIVE_INFINITY.isNaN() - !NEGATIVE_INFINITY.isNaN() - !parse('0').isNaN() - !parse('9.999999999999999999999999999999999E+6144').isNaN() - !parse('9.999999999999999999999999999999999E-6143').isNaN() - } - - def 'should convert NaN to string'() { - expect: - NaN.toString() == 'NaN' - } - - def 'should convert NaN from string'() { - expect: - parse('NaN') == NaN - parse('nan') == NaN - parse('nAn') == NaN - parse('-NaN') == NEGATIVE_NaN - parse('-nan') == NEGATIVE_NaN - parse('-nAn') == NEGATIVE_NaN - } - - def 'should not convert NaN to BigDecimal'() { - when: - NaN.bigDecimalValue() - - then: - thrown(ArithmeticException) - } - - def 'should convert infinity to string'() { - expect: - POSITIVE_INFINITY.toString() == 'Infinity' - NEGATIVE_INFINITY.toString() == '-Infinity' - } - - def 'should convert infinity from string'() { - expect: - parse('Inf') == POSITIVE_INFINITY - parse('inf') == POSITIVE_INFINITY - parse('inF') == POSITIVE_INFINITY - parse('+Inf') == POSITIVE_INFINITY - parse('+inf') == POSITIVE_INFINITY - parse('+inF') == POSITIVE_INFINITY - parse('Infinity') == POSITIVE_INFINITY - parse('infinity') == POSITIVE_INFINITY - parse('infiniTy') == POSITIVE_INFINITY - parse('+Infinity') == POSITIVE_INFINITY - parse('+infinity') == POSITIVE_INFINITY - parse('+infiniTy') == POSITIVE_INFINITY - parse('-Inf') == NEGATIVE_INFINITY - parse('-inf') == NEGATIVE_INFINITY - parse('-inF') == NEGATIVE_INFINITY - parse('-Infinity') == NEGATIVE_INFINITY - parse('-infinity') == NEGATIVE_INFINITY - parse('-infiniTy') == NEGATIVE_INFINITY - } - - def 'should convert finite to string'() { - expect: - parse('0').toString() == '0' - parse('-0').toString() == '-0' - parse('0E10').toString() == '0E+10' - parse('-0E10').toString() == '-0E+10' - parse('1').toString() == '1' - parse('-1').toString() == '-1' - parse('-1.1').toString() == '-1.1' - - parse('123E-9').toString() == '1.23E-7' - parse('123E-8').toString() == '0.00000123' - parse('123E-7').toString() == '0.0000123' - parse('123E-6').toString() == '0.000123' - parse('123E-5').toString() == '0.00123' - parse('123E-4').toString() == '0.0123' - parse('123E-3').toString() == '0.123' - parse('123E-2').toString() == '1.23' - parse('123E-1').toString() == '12.3' - parse('123E0').toString() == '123' - parse('123E1').toString() == '1.23E+3' - - parse('1234E-7').toString() == '0.0001234' - parse('1234E-6').toString() == '0.001234' - - parse('1E6').toString() == '1E+6' - } - - def 'should convert invalid representations of 0 to string'() { - expect: - fromIEEE754BIDEncoding(0x6C10000000000000, 0x0).bigDecimalValue().toString() == '0' - fromIEEE754BIDEncoding(0x6C11FFFFFFFFFFFF, 0xffffffffffffffffL).toString() == '0E+3' - } - - - def 'test equals'() { - given: - def d1 = fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000001L) - def d2 = fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000001L) - def d3 = fromIEEE754BIDEncoding(0x3040000000000001L, 0x0000000000000001L) - def d4 = fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000011L) - - expect: - d1.equals(d1) - d1.equals(d2) - !d1.equals(d3) - !d1.equals(d4) - !d1.equals(null) - !d1.equals(0L) - } - - def 'test hashCode'() { - expect: - fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000001L).hashCode() == 809500703 - } - - def 'should not convert infinity to BigDecimal'() { - when: - decimal.bigDecimalValue() - - then: - thrown(ArithmeticException) - - where: - decimal << [POSITIVE_INFINITY, NEGATIVE_INFINITY] - } - - def 'should not convert negative zero to BigDecimal'() { - when: - decimal.bigDecimalValue() - - then: - thrown(ArithmeticException) - - where: - decimal << [parse('-0'), parse('-0E+1'), parse('-0E-1')] - } - - def 'should not round inexactly'() { - when: - parse(val) - - then: - thrown(IllegalArgumentException) - - where: - val << [ - '12345678901234567890123456789012345E+6111', - '123456789012345678901234567890123456E+6111', - '1234567890123456789012345678901234567E+6111', - '12345678901234567890123456789012345E-6176', - '123456789012345678901234567890123456E-6176', - '1234567890123456789012345678901234567E-6176', - '-12345678901234567890123456789012345E+6111', - '-123456789012345678901234567890123456E+6111', - '-1234567890123456789012345678901234567E+6111', - '-12345678901234567890123456789012345E-6176', - '-123456789012345678901234567890123456E-6176', - '-1234567890123456789012345678901234567E-6176', - ] - } - - def 'should not clamp large exponents if no extra precision is available'() { - when: - parse(val) - - then: - thrown(IllegalArgumentException) - - where: - val << [ - '1234567890123456789012345678901234E+6112', - '1234567890123456789012345678901234E+6113', - '1234567890123456789012345678901234E+6114', - '-1234567890123456789012345678901234E+6112', - '-1234567890123456789012345678901234E+6113', - '-1234567890123456789012345678901234E+6114', - ] - } - - def 'should not clamp small exponents if no extra precision can be discarded'() { - when: - parse(val) - - then: - thrown(IllegalArgumentException) - - where: - val << [ - '1234567890123456789012345678901234E-6177', - '1234567890123456789012345678901234E-6178', - '1234567890123456789012345678901234E-6179', - '-1234567890123456789012345678901234E-6177', - '-1234567890123456789012345678901234E-6178', - '-1234567890123456789012345678901234E-6179', - ] - } - - def 'should throw IllegalArgumentException if BigDecimal is too large'() { - when: - new Decimal128(new BigDecimal('12345678901234567890123456789012345')) - - then: - thrown(IllegalArgumentException) - } -} diff --git a/bson/src/test/unit/org/bson/types/Decimal128Test.java b/bson/src/test/unit/org/bson/types/Decimal128Test.java new file mode 100644 index 00000000000..4d662aefb37 --- /dev/null +++ b/bson/src/test/unit/org/bson/types/Decimal128Test.java @@ -0,0 +1,603 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.types; + +import org.junit.jupiter.api.Test; + +import java.math.BigDecimal; + +import static org.bson.types.Decimal128.NEGATIVE_INFINITY; +import static org.bson.types.Decimal128.NEGATIVE_NaN; +import static org.bson.types.Decimal128.NEGATIVE_ZERO; +import static org.bson.types.Decimal128.NaN; +import static org.bson.types.Decimal128.POSITIVE_INFINITY; +import static org.bson.types.Decimal128.POSITIVE_ZERO; +import static org.bson.types.Decimal128.fromIEEE754BIDEncoding; +import static org.bson.types.Decimal128.parse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +public class Decimal128Test { + + @Test + public void shouldHaveCorrectConstants() { + // expect + assertEquals(fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000000L), POSITIVE_ZERO); + assertEquals(fromIEEE754BIDEncoding(0xb040000000000000L, 0x0000000000000000L), NEGATIVE_ZERO); + assertEquals(fromIEEE754BIDEncoding(0x7800000000000000L, 0x0000000000000000L), POSITIVE_INFINITY); + assertEquals(fromIEEE754BIDEncoding(0xf800000000000000L, 0x0000000000000000L), NEGATIVE_INFINITY); + assertEquals(fromIEEE754BIDEncoding(0x7c00000000000000L, 0x0000000000000000L), NaN); + } + + @Test + public void shouldConstructFromHighAndLow() { + // given + Decimal128 val = fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000001L); + + // then + assertEquals(0x3040000000000000L, val.getHigh()); + assertEquals(0x0000000000000001L, val.getLow()); + } + + @Test + public void shouldConstructFromSimpleString() { + // expect + assertEquals(fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000000L), parse("0")); + assertEquals(fromIEEE754BIDEncoding(0xb040000000000000L, 0x0000000000000000L), parse("-0")); + assertEquals(fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000001L), parse("1")); + assertEquals(fromIEEE754BIDEncoding(0xb040000000000000L, 0x0000000000000001L), parse("-1")); + assertEquals(fromIEEE754BIDEncoding(0x3040000000000000L, 0x002bdc545d6b4b87L), parse("12345678901234567")); + assertEquals(fromIEEE754BIDEncoding(0x3040000000000000L, 0x000000e67a93c822L), parse("989898983458")); + assertEquals(fromIEEE754BIDEncoding(0xb040000000000000L, 0x002bdc545d6b4b87L), parse("-12345678901234567")); + assertEquals(fromIEEE754BIDEncoding(0x3036000000000000L, 0x0000000000003039L), parse("0.12345")); + assertEquals(fromIEEE754BIDEncoding(0x3032000000000000L, 0x0000000000003039L), parse("0.0012345")); + assertEquals(fromIEEE754BIDEncoding(0x3040000000000000L, 0x002bdc545d6b4b87L), parse("00012345678901234567")); + } + + @Test + public void shouldRoundExactly() { + // expect + assertEquals(parse("1.234567890123456789012345678901234"), parse("1.234567890123456789012345678901234")); + assertEquals(parse("1.234567890123456789012345678901234"), parse("1.2345678901234567890123456789012340")); + assertEquals(parse("1.234567890123456789012345678901234"), parse("1.23456789012345678901234567890123400")); + assertEquals(parse("1.234567890123456789012345678901234"), parse("1.234567890123456789012345678901234000")); + } + + @Test + public void shouldClampPositiveExponents() { + // expect + assertEquals(parse("10E6111"), parse("1E6112")); + assertEquals(parse("100E6111"), parse("1E6113")); + assertEquals(parse("100000000000000000000000000000000E+6111"), parse("1E6143")); + assertEquals(parse("1000000000000000000000000000000000E+6111"), parse("1E6144")); + assertEquals(parse("1100000000000000000000000000000000E+6111"), parse("11E6143")); + assertEquals(parse("0E6111"), parse("0E8000")); + assertEquals(parse("0E6111"), parse("0E2147483647")); + + assertEquals(parse("-10E6111"), parse("-1E6112")); + assertEquals(parse("-100E6111"), parse("-1E6113")); + assertEquals(parse("-100000000000000000000000000000000E+6111"), parse("-1E6143")); + assertEquals(parse("-1000000000000000000000000000000000E+6111"), parse("-1E6144")); + assertEquals(parse("-1100000000000000000000000000000000E+6111"), parse("-11E6143")); + assertEquals(parse("-0E6111"), parse("-0E8000")); + assertEquals(parse("-0E6111"), parse("-0E2147483647")); + } + + @Test + public void shouldClampNegativeExponents() { + // expect + assertEquals(parse("0E-6176"), parse("0E-8000")); + assertEquals(parse("0E-6176"), parse("0E-2147483647")); + assertEquals(parse("1E-6176"), parse("10E-6177")); + assertEquals(parse("1E-6176"), parse("100E-6178")); + assertEquals(parse("11E-6176"), parse("110E-6177")); + + assertEquals(parse("-0E-6176"), parse("-0E-8000")); + assertEquals(parse("-0E-6176"), parse("-0E-2147483647")); + assertEquals(parse("-1E-6176"), parse("-10E-6177")); + assertEquals(parse("-1E-6176"), parse("-100E-6178")); + assertEquals(parse("-11E-6176"), parse("-110E-6177")); + } + + @Test + public void shouldConstructFromLong() { + // expect + assertEquals(new Decimal128(new BigDecimal("1")), new Decimal128(1L)); + assertEquals(new Decimal128(new BigDecimal(Long.MIN_VALUE)), new Decimal128(Long.MIN_VALUE)); + assertEquals(new Decimal128(new BigDecimal(Long.MAX_VALUE)), new Decimal128(Long.MAX_VALUE)); + } + + @Test + public void shouldConstructFromLargeBigDecimal() { + // expect + assertEquals(fromIEEE754BIDEncoding(0x304000000000029dL, 0x42da3a76f9e0d979L), parse("12345689012345789012345")); + assertEquals(fromIEEE754BIDEncoding(0x30403cde6fff9732L, 0xde825cd07e96aff2L), parse("1234567890123456789012345678901234")); + assertEquals(fromIEEE754BIDEncoding(0x5fffed09bead87c0L, 0x378d8e63ffffffffL), parse("9.999999999999999999999999999999999E+6144")); + assertEquals(fromIEEE754BIDEncoding(0x0001ed09bead87c0L, 0x378d8e63ffffffffL), parse("9.999999999999999999999999999999999E-6143")); + assertEquals(fromIEEE754BIDEncoding(0x3040ffffffffffffL, 0xffffffffffffffffL), parse("5.192296858534827628530496329220095E+33")); + } + + @Test + public void shouldConvertToSimpleBigDecimal() { + // expect + assertEquals(new BigDecimal("0"), fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000000L).bigDecimalValue()); + assertEquals(new BigDecimal("1"), fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000001L).bigDecimalValue()); + assertEquals(new BigDecimal("-1"), fromIEEE754BIDEncoding(0xb040000000000000L, 0x0000000000000001L).bigDecimalValue()); + assertEquals(new BigDecimal("12345678901234567"), + fromIEEE754BIDEncoding(0x3040000000000000L, 0x002bdc545d6b4b87L).bigDecimalValue()); + assertEquals(new BigDecimal("989898983458"), fromIEEE754BIDEncoding(0x3040000000000000L, 0x000000e67a93c822L).bigDecimalValue()); + assertEquals(new BigDecimal("-12345678901234567"), + fromIEEE754BIDEncoding(0xb040000000000000L, 0x002bdc545d6b4b87L).bigDecimalValue()); + assertEquals(new BigDecimal("0.12345"), fromIEEE754BIDEncoding(0x3036000000000000L, 0x0000000000003039L).bigDecimalValue()); + assertEquals(new BigDecimal("0.0012345"), fromIEEE754BIDEncoding(0x3032000000000000L, 0x0000000000003039L).bigDecimalValue()); + assertEquals(new BigDecimal("00012345678901234567"), + fromIEEE754BIDEncoding(0x3040000000000000L, 0x002bdc545d6b4b87L).bigDecimalValue()); + } + + @Test + public void shouldConvertToLargeBigDecimal() { + // expect + assertEquals(new BigDecimal("12345689012345789012345"), + fromIEEE754BIDEncoding(0x304000000000029dL, 0x42da3a76f9e0d979L).bigDecimalValue()); + + assertEquals(new BigDecimal("1234567890123456789012345678901234"), fromIEEE754BIDEncoding(0x30403cde6fff9732L, + 0xde825cd07e96aff2L).bigDecimalValue()); + + assertEquals(new BigDecimal("9.999999999999999999999999999999999E+6144"), + fromIEEE754BIDEncoding(0x5fffed09bead87c0L, 0x378d8e63ffffffffL).bigDecimalValue()); + + assertEquals(new BigDecimal("9.999999999999999999999999999999999E-6143"), + fromIEEE754BIDEncoding(0x0001ed09bead87c0L, 0x378d8e63ffffffffL).bigDecimalValue()); + + assertEquals(new BigDecimal("5.192296858534827628530496329220095E+33"), + fromIEEE754BIDEncoding(0x3040ffffffffffffL, 0xffffffffffffffffL).bigDecimalValue()); + } + + @Test + public void shouldConvertInvalidRepresentationsOfZeroAsBigDecimalZero() { + // expect + assertEquals(new BigDecimal("0"), fromIEEE754BIDEncoding(0x6C10000000000000L, 0x0).bigDecimalValue()); + assertEquals(new BigDecimal("0E+3"), fromIEEE754BIDEncoding(0x6C11FFFFFFFFFFFFL, 0xffffffffffffffffL).bigDecimalValue()); + } + + @Test + public void shouldDetectInfinity() { + // expect + assertTrue(POSITIVE_INFINITY.isInfinite()); + assertTrue(NEGATIVE_INFINITY.isInfinite()); + assertFalse(parse("0").isInfinite()); + assertFalse(parse("9.999999999999999999999999999999999E+6144").isInfinite()); + assertFalse(parse("9.999999999999999999999999999999999E-6143").isInfinite()); + assertFalse(POSITIVE_INFINITY.isFinite()); + assertFalse(NEGATIVE_INFINITY.isFinite()); + assertTrue(parse("0").isFinite()); + assertTrue(parse("9.999999999999999999999999999999999E+6144").isFinite()); + assertTrue(parse("9.999999999999999999999999999999999E-6143").isFinite()); + } + + @Test + public void shouldDetectNaN() { + // expect + assertTrue(NaN.isNaN()); + assertTrue(fromIEEE754BIDEncoding(0x7e00000000000000L, 0).isNaN()); // SNaN + assertFalse(POSITIVE_INFINITY.isNaN()); + assertFalse(NEGATIVE_INFINITY.isNaN()); + assertFalse(parse("0").isNaN()); + assertFalse(parse("9.999999999999999999999999999999999E+6144").isNaN()); + assertFalse(parse("9.999999999999999999999999999999999E-6143").isNaN()); + } + + @Test + public void shouldConvertNaNToString() { + // expect + assertEquals("NaN", NaN.toString()); + } + + @Test + public void shouldConvertNaNFromString() { + // expect + assertEquals(NaN, parse("NaN")); + assertEquals(NaN, parse("nan")); + assertEquals(NaN, parse("nAn")); + assertEquals(NEGATIVE_NaN, parse("-NaN")); + assertEquals(NEGATIVE_NaN, parse("-nan")); + assertEquals(NEGATIVE_NaN, parse("-nAn")); + } + + @Test + public void shouldNotConvertNaNToBigDecimal() { + assertThrows(ArithmeticException.class, () -> + // when + NaN.bigDecimalValue()); + } + + @Test + public void shouldConvertInfinityToString() { + // expect + assertEquals("Infinity", POSITIVE_INFINITY.toString()); + assertEquals("-Infinity", NEGATIVE_INFINITY.toString()); + } + + @Test + public void shouldConvertInfinityFromString() { + // expect + assertEquals(POSITIVE_INFINITY, parse("Inf")); + assertEquals(POSITIVE_INFINITY, parse("inf")); + assertEquals(POSITIVE_INFINITY, parse("inF")); + assertEquals(POSITIVE_INFINITY, parse("+Inf")); + assertEquals(POSITIVE_INFINITY, parse("+inf")); + assertEquals(POSITIVE_INFINITY, parse("+inF")); + assertEquals(POSITIVE_INFINITY, parse("Infinity")); + assertEquals(POSITIVE_INFINITY, parse("infinity")); + assertEquals(POSITIVE_INFINITY, parse("infiniTy")); + assertEquals(POSITIVE_INFINITY, parse("+Infinity")); + assertEquals(POSITIVE_INFINITY, parse("+infinity")); + assertEquals(POSITIVE_INFINITY, parse("+infiniTy")); + assertEquals(NEGATIVE_INFINITY, parse("-Inf")); + assertEquals(NEGATIVE_INFINITY, parse("-inf")); + assertEquals(NEGATIVE_INFINITY, parse("-inF")); + assertEquals(NEGATIVE_INFINITY, parse("-Infinity")); + assertEquals(NEGATIVE_INFINITY, parse("-infinity")); + assertEquals(NEGATIVE_INFINITY, parse("-infiniTy")); + } + + @Test + public void shouldConvertFiniteToString() { + // expect + assertEquals("0", parse("0").toString()); + assertEquals("-0", parse("-0").toString()); + assertEquals("0E+10", parse("0E10").toString()); + assertEquals("-0E+10", parse("-0E10").toString()); + assertEquals("1", parse("1").toString()); + assertEquals("-1", parse("-1").toString()); + assertEquals("-1.1", parse("-1.1").toString()); + + assertEquals("1.23E-7", parse("123E-9").toString()); + assertEquals("0.00000123", parse("123E-8").toString()); + assertEquals("0.0000123", parse("123E-7").toString()); + assertEquals("0.000123", parse("123E-6").toString()); + assertEquals("0.00123", parse("123E-5").toString()); + assertEquals("0.0123", parse("123E-4").toString()); + assertEquals("0.123", parse("123E-3").toString()); + assertEquals("1.23", parse("123E-2").toString()); + assertEquals("12.3", parse("123E-1").toString()); + assertEquals("123", parse("123E0").toString()); + assertEquals("1.23E+3", parse("123E1").toString()); + + assertEquals("0.0001234", parse("1234E-7").toString()); + assertEquals("0.001234", parse("1234E-6").toString()); + + assertEquals("1E+6", parse("1E6").toString()); + } + + @Test + public void shouldConvertInvalidRepresentationsOfZeroToString() { + // expect + assertEquals("0", fromIEEE754BIDEncoding(0x6C10000000000000L, 0x0).bigDecimalValue().toString()); + assertEquals("0E+3", fromIEEE754BIDEncoding(0x6C11FFFFFFFFFFFFL, 0xffffffffffffffffL).toString()); + } + + @Test + public void testEquals() { + // given + Decimal128 d1 = fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000001L); + Decimal128 d2 = fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000001L); + Decimal128 d3 = fromIEEE754BIDEncoding(0x3040000000000001L, 0x0000000000000001L); + Decimal128 d4 = fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000011L); + + // expect + assertEquals(d1, d1); + assertEquals(d1, d2); + assertNotEquals(d1, d3); + assertNotEquals(d1, d4); + assertNotEquals(null, d1); + assertNotEquals(0L, d1); + } + + @Test + public void testHashCode() { + // expect + assertEquals(809500703, fromIEEE754BIDEncoding(0x3040000000000000L, 0x0000000000000001L).hashCode()); + } + + @Test + public void shouldNotConvertPositiveInfinityToBigDecimal() { + assertThrows(ArithmeticException.class, () -> POSITIVE_INFINITY.bigDecimalValue()); + } + + @Test + public void shouldNotConvertNegativeInfinityToBigDecimal() { + assertThrows(ArithmeticException.class, () ->NEGATIVE_INFINITY.bigDecimalValue()); + } + + @Test + public void shouldNotConvertNegativeZeroToBigDecimal() { + try { + parse("-0").bigDecimalValue(); + fail(); + } catch (ArithmeticException e) { + // pass + } + + try { + parse("-0E+1").bigDecimalValue(); + fail(); + } catch (ArithmeticException e) { + // pass + } + + try { + parse("-0E-1").bigDecimalValue(); + fail(); + } catch (ArithmeticException e) { + // pass + } + } + + @Test + public void shouldNotRoundInexactly() { + try { + parse("12345678901234567890123456789012345E+6111"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("123456789012345678901234567890123456E+6111"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("1234567890123456789012345678901234567E+6111"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("12345678901234567890123456789012345E-6176"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("123456789012345678901234567890123456E-6176"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("1234567890123456789012345678901234567E-6176"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("-12345678901234567890123456789012345E+6111"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("-123456789012345678901234567890123456E+6111"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("-1234567890123456789012345678901234567E+6111"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("-12345678901234567890123456789012345E-6176"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("-123456789012345678901234567890123456E-6176"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("-1234567890123456789012345678901234567E-6176"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + } + + @Test + public void shouldNotClampLargeExponentsIfNoExtraPrecisionIsAvailable() { + try { + parse("1234567890123456789012345678901234E+6112"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("1234567890123456789012345678901234E+6113"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("1234567890123456789012345678901234E+6114"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("-1234567890123456789012345678901234E+6112"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("-1234567890123456789012345678901234E+6113"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("-1234567890123456789012345678901234E+6114"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + } + + @Test + public void shouldNotClampSmallExponentsIfNoExtraPrecisionCanBeDiscarded() { + try { + parse("1234567890123456789012345678901234E-6177"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("1234567890123456789012345678901234E-6178"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("1234567890123456789012345678901234E-6179"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("-1234567890123456789012345678901234E-6177"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("-1234567890123456789012345678901234E-6178"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + try { + parse("-1234567890123456789012345678901234E-6179"); + fail(); + } catch (IllegalArgumentException e) { + // pass + } + } + + @Test + public void shouldThrowIllegalArgumentExceptionIfBigDecimalIsTooLarge() { + assertThrows(IllegalArgumentException.class, () -> new Decimal128(new BigDecimal("12345678901234567890123456789012345"))); + } + + @Test + public void shouldExtendNumber() { + // expect + assertEquals(Double.POSITIVE_INFINITY, POSITIVE_INFINITY.doubleValue(), 0); + assertEquals(Float.POSITIVE_INFINITY, POSITIVE_INFINITY.floatValue(), 0); + assertEquals(Long.MAX_VALUE, POSITIVE_INFINITY.longValue()); + assertEquals(Integer.MAX_VALUE, POSITIVE_INFINITY.intValue()); + + assertEquals(Double.NEGATIVE_INFINITY, NEGATIVE_INFINITY.doubleValue(), 0); + assertEquals(Float.NEGATIVE_INFINITY, NEGATIVE_INFINITY.floatValue(), 0); + assertEquals(Long.MIN_VALUE, NEGATIVE_INFINITY.longValue()); + assertEquals(Integer.MIN_VALUE, NEGATIVE_INFINITY.intValue()); + + assertEquals(Double.NaN, NaN.doubleValue(), 0); + assertEquals(Double.NaN, NaN.floatValue(), 0); + assertEquals(0, NaN.longValue()); + assertEquals(0, NaN.intValue()); + + assertEquals(Double.NaN, NEGATIVE_NaN.doubleValue(), 0); + assertEquals(Float.NaN, NEGATIVE_NaN.floatValue(), 0); + assertEquals(0, NEGATIVE_NaN.longValue()); + assertEquals(0, NEGATIVE_NaN.intValue()); + + assertEquals(0.0d, POSITIVE_ZERO.doubleValue(), 0); + assertEquals(0.0f, POSITIVE_ZERO.floatValue(), 0); + assertEquals(0L, POSITIVE_ZERO.longValue()); + assertEquals(0, POSITIVE_ZERO.intValue()); + + assertEquals(NEGATIVE_ZERO.doubleValue(), -0d, 0); + assertEquals(NEGATIVE_ZERO.floatValue(), -0f, 0); + assertEquals(0L, NEGATIVE_ZERO.longValue()); + assertEquals(0, NEGATIVE_ZERO.intValue()); + + assertEquals(parse("-0.0").doubleValue(), -0d, 0); + assertEquals(parse("-0.0").floatValue(), -0f, 0); + assertEquals(0L, parse("-0.0").longValue()); + assertEquals(0, parse("-0.0").intValue()); + + assertEquals(5.4d, parse("5.4").doubleValue(), 0); + assertEquals(5.4f, parse("5.4").floatValue(), 0); + assertEquals(5L, parse("5.4").longValue()); + assertEquals(5, parse("5.4").intValue()); + + assertEquals(1.2345678901234568E33d, parse("1234567890123456789012345678901234").doubleValue(), 0); + assertEquals(1.2345679E33f, parse("1234567890123456789012345678901234").floatValue(), 0); + assertEquals(Long.MAX_VALUE, parse("1234567890123456789012345678901234").longValue()); + assertEquals(Integer.MAX_VALUE, parse("1234567890123456789012345678901234").intValue()); + + assertEquals(-1.2345678901234568E33d, parse("-1234567890123456789012345678901234").doubleValue(), 0); + assertEquals(-1.2345679E33f, parse("-1234567890123456789012345678901234").floatValue(), 0); + assertEquals(Long.MIN_VALUE, parse("-1234567890123456789012345678901234").longValue()); + assertEquals(Integer.MIN_VALUE, parse("-1234567890123456789012345678901234").intValue()); + } + + @Test + public void shouldImplementComparable() { + assertEquals(1, NaN.compareTo(NEGATIVE_ZERO)); + assertEquals(0, NaN.compareTo(NaN)); + assertEquals(1, NaN.compareTo(POSITIVE_INFINITY)); + assertEquals(1, NaN.compareTo(NEGATIVE_INFINITY)); + assertEquals(1, NaN.compareTo(parse("1"))); + assertEquals(1, POSITIVE_INFINITY.compareTo(NEGATIVE_INFINITY)); + assertEquals(0, POSITIVE_INFINITY.compareTo(POSITIVE_INFINITY)); + assertEquals(-1, POSITIVE_INFINITY.compareTo(NaN)); + assertEquals(1, POSITIVE_INFINITY.compareTo(NEGATIVE_ZERO)); + assertEquals(1, POSITIVE_INFINITY.compareTo(parse("1"))); + assertEquals(-1, NEGATIVE_INFINITY.compareTo(POSITIVE_INFINITY)); + assertEquals(0, NEGATIVE_INFINITY.compareTo(NEGATIVE_INFINITY)); + assertEquals(-1, NEGATIVE_INFINITY.compareTo(NaN)); + assertEquals(-1, NEGATIVE_INFINITY.compareTo(NEGATIVE_ZERO)); + assertEquals(-1, NEGATIVE_INFINITY.compareTo(parse("1"))); + assertEquals(-1, parse("1").compareTo(NaN)); + assertEquals(-1, parse("1").compareTo(POSITIVE_INFINITY)); + assertEquals(1, parse("1").compareTo(NEGATIVE_INFINITY)); + assertEquals(1, parse("1").compareTo(NEGATIVE_ZERO)); + assertEquals(-1, parse("-0").compareTo(parse("0"))); + assertEquals(0, parse("-0").compareTo(parse("-0"))); + assertEquals(-1, parse("-0").compareTo(NaN)); + assertEquals(-1, parse("-0").compareTo(POSITIVE_INFINITY)); + assertEquals(1, parse("-0").compareTo(NEGATIVE_INFINITY)); + assertEquals(1, parse("0").compareTo(parse("-0"))); + assertEquals(0, parse("0").compareTo(parse("0"))); + assertEquals(0, parse("5.4").compareTo(parse("5.4"))); + assertEquals(1, parse("5.4").compareTo(parse("5.3"))); + assertEquals(-1, parse("5.3").compareTo(parse("5.4"))); + assertEquals(0, parse("5.4").compareTo(parse("5.40"))); + } +} diff --git a/bson/src/test/unit/org/bson/types/DocumentSpecification.groovy b/bson/src/test/unit/org/bson/types/DocumentSpecification.groovy index 0dba1ea09d5..1066edc5317 100644 --- a/bson/src/test/unit/org/bson/types/DocumentSpecification.groovy +++ b/bson/src/test/unit/org/bson/types/DocumentSpecification.groovy @@ -28,13 +28,13 @@ class DocumentSpecification extends Specification { def 'should return correct type for each typed method'() { given: - Date date = new Date(); - ObjectId objectId = new ObjectId(); + Date date = new Date() + ObjectId objectId = new ObjectId() when: Document doc = new Document() .append('int', 1).append('long', 2L).append('double', 3.0 as double).append('string', 'hi').append('boolean', true) - .append('objectId', objectId).append('date', date); + .append('objectId', objectId).append('date', date) then: doc.getInteger('int') == 1 @@ -65,42 +65,204 @@ class DocumentSpecification extends Specification { doc.get('noVal', objectId) == objectId } + def 'should return a list with elements of the specified class'() { + when: + Document doc = Document.parse("{x: 1, y: ['two', 'three'], z: [{a: 'one'}, {b:2}], w: {a: ['One', 'Two']}}") + .append('numberList', [10, 20.5d, 30L]) + .append('listWithNullElement', [10, null, 20]) + List defaultList = ['a', 'b', 'c'] + + then: + doc.getList('y', String).get(0) == 'two' + doc.getList('y', String).get(1) == 'three' + doc.getList('z', Document).get(0).getString('a') == 'one' + doc.getList('z', Document).get(1).getInteger('b') == 2 + doc.get('w', Document).getList('a', String).get(0) == 'One' + doc.get('w', Document).getList('a', String).get(1) == 'Two' + doc.getList('invalidKey', Document, defaultList).get(0) == 'a' + doc.getList('invalidKey', Document, defaultList).get(1) == 'b' + doc.getList('invalidKey', Document, defaultList).get(2) == 'c' + doc.getList('numberList', Number).get(0) == 10 + doc.getList('numberList', Number).get(1) == 20.5d + doc.getList('numberList', Number).get(2) == 30L + doc.getList('listWithNullElement', Number).get(0) == 10 + doc.getList('listWithNullElement', Number).get(1) == null + doc.getList('listWithNullElement', Number).get(2) == 20 + } + + def 'should return null list when key is not found'() { + when: + Document doc = Document.parse('{x: 1}') + + then: + doc.getList('a', String) == null + } + + def 'should return specified default value when key is not found'() { + when: + Document doc = Document.parse('{x: 1}') + List defaultList = ['a', 'b', 'c'] + + then: + doc.getList('a', String, defaultList) == defaultList + } + + + def 'should throw an exception when the list elements are not objects of the specified class'() { + given: + Document doc = Document.parse('{x: 1, y: [{a: 1}, {b: 2}], z: [1, 2]}') + + when: + doc.getList('x', String) + + then: + thrown(ClassCastException) + + when: + doc.getList('y', String) + + then: + thrown(ClassCastException) + + when: + doc.getList('z', String) + + then: + thrown(ClassCastException) + } + + def 'should return null when getting embedded value'() { + when: + Document document = Document.parse("{a: 1, b: {x: [2, 3, 4], y: {m: 'one', len: 3}}, 'a.b': 'two'}") + + then: + document.getEmbedded(['notAKey'], String) == null + document.getEmbedded(['b', 'y', 'notAKey'], String) == null + document.getEmbedded(['b', 'b', 'm'], String) == null + Document.parse('{}').getEmbedded(['a', 'b'], Integer) == null + Document.parse('{b: 1}').getEmbedded(['a'], Integer) == null + Document.parse('{b: 1}').getEmbedded(['a', 'b'], Integer) == null + Document.parse('{a: {c: 1}}').getEmbedded(['a', 'b'], Integer) == null + Document.parse('{a: {c: 1}}').getEmbedded(['a', 'b', 'c'], Integer) == null + } + + def 'should return embedded value'() { + given: + Date date = new Date() + ObjectId objectId = new ObjectId() + + when: + Document document = Document.parse("{a: 1, b: {x: [2, 3, 4], y: {m: 'one', len: 3}}, 'a.b': 'two'}") + .append('l', new Document('long', 2L)) + .append('d', new Document('double', 3.0 as double)) + .append('t', new Document('boolean', true)) + .append('o', new Document('objectId', objectId)) + .append('n', new Document('date', date)) + + then: + document.getEmbedded(['a'], Integer) == 1 + document.getEmbedded(['b', 'x'], List).get(0) == 2 + document.getEmbedded(['b', 'x'], List).get(1) == 3 + document.getEmbedded(['b', 'x'], List).get(2) == 4 + document.getEmbedded(['b', 'y', 'm'], String) == 'one' + document.getEmbedded(['b', 'y', 'len'], Integer) == 3 + document.getEmbedded(['a.b'], String) == 'two' + document.getEmbedded(['b', 'y'], Document).getString('m') == 'one' + document.getEmbedded(['b', 'y'], Document).getInteger('len') == 3 + + document.getEmbedded(['l', 'long'], Long) == 2L + document.getEmbedded(['d', 'double'], Double) == 3.0d + document.getEmbedded(['l', 'long'], Number) == 2L + document.getEmbedded(['d', 'double'], Number) == 3.0d + document.getEmbedded(['t', 'boolean'], Boolean) == true + document.getEmbedded(['t', 'x'], false) == false + document.getEmbedded(['o', 'objectId'], ObjectId) == objectId + document.getEmbedded(['n', 'date'], Date) == date + } + + def 'should throw an exception getting an embedded value'() { + given: + Document document = Document.parse("{a: 1, b: {x: [2, 3, 4], y: {m: 'one', len: 3}}, 'a.b': 'two'}") + + when: + document.getEmbedded(null, String) == null + + then: + thrown(IllegalArgumentException) + + when: + document.getEmbedded([], String) == null + + then: + thrown(IllegalStateException) + + when: + document.getEmbedded(['a', 'b'], Integer) + + then: + thrown(ClassCastException) + + when: + document.getEmbedded(['b', 'y', 'm'], Integer) + + then: + thrown(ClassCastException) + + when: + document.getEmbedded(['b', 'x'], Document) + + then: + thrown(ClassCastException) + + when: + document.getEmbedded(['b', 'x', 'm'], String) + + then: + thrown(ClassCastException) + + when: + document.getEmbedded(['b', 'x', 'm'], 'invalid') + + then: + thrown(ClassCastException) + } + def 'should parse a valid JSON string to a Document'() { when: - Document document = Document.parse("{ 'int' : 1, 'string' : 'abc' }"); + Document document = Document.parse("{ 'int' : 1, 'string' : 'abc' }") then: - document != null; - document.keySet().size() == 2; - document.getInteger('int') == 1; - document.getString('string') == 'abc'; + document != null + document.keySet().size() == 2 + document.getInteger('int') == 1 + document.getString('string') == 'abc' when: - document = Document.parse("{ 'int' : 1, 'string' : 'abc' }", new DocumentCodec()); + document = Document.parse("{ 'int' : 1, 'string' : 'abc' }", new DocumentCodec()) then: - document != null; - document.keySet().size() == 2; - document.getInteger('int') == 1; - document.getString('string') == 'abc'; + document != null + document.keySet().size() == 2 + document.getInteger('int') == 1 + document.getString('string') == 'abc' } def 'test parse method with mode'() { when: - Document document = Document.parse("{'regex' : /abc/im }"); + Document document = Document.parse("{'regex' : /abc/im }") then: - document != null; - document.keySet().size() == 1; + document != null + document.keySet().size() == 1 - BsonRegularExpression regularExpression = (BsonRegularExpression) document.get('regex'); + BsonRegularExpression regularExpression = (BsonRegularExpression) document.get('regex') regularExpression.options == 'im' regularExpression.pattern == 'abc' } def 'should throw an exception when parsing an invalid JSON String'() { when: - Document.parse("{ 'int' : 1, 'string' : }"); + Document.parse("{ 'int' : 1, 'string' : }") then: thrown(JsonParseException) diff --git a/bson/src/test/unit/org/bson/types/ObjectIdTest.java b/bson/src/test/unit/org/bson/types/ObjectIdTest.java index 8e2d4ab5ae6..cfe04623b90 100644 --- a/bson/src/test/unit/org/bson/types/ObjectIdTest.java +++ b/bson/src/test/unit/org/bson/types/ObjectIdTest.java @@ -16,44 +16,106 @@ package org.bson.types; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.nio.Buffer; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; import java.util.Date; +import java.util.List; +import java.util.Locale; import java.util.Random; -import java.nio.ByteBuffer; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; public class ObjectIdTest { - @Test - public void testToBytes() { - ObjectId objectId = new ObjectId(0x5106FC9A, 0x00BC8237, (short) 0x5581, 0x0036D289); - byte[] expectedBytes = new byte[]{81, 6, -4, -102, -68, -126, 55, 85, -127, 54, -46, -119}; + + /** Calls the base method of ByteBuffer.position(int) since the override is not available in jdk8. */ + private static ByteBuffer setPosition(final ByteBuffer buf, final int pos) { + ((Buffer) buf).position(pos); + return buf; + } + + /** + * MethodSource for valid ByteBuffers that can hold an ObjectID + */ + public static List validOutputBuffers() { + List result = new ArrayList<>(); + result.add(ByteBuffer.allocate(12)); + result.add(ByteBuffer.allocate(12).order(ByteOrder.LITTLE_ENDIAN)); + result.add(ByteBuffer.allocate(24).put(new byte[12])); + result.add(ByteBuffer.allocateDirect(12)); + result.add(ByteBuffer.allocateDirect(12).order(ByteOrder.LITTLE_ENDIAN)); + return result; + } + + @MethodSource("validOutputBuffers") + @ParameterizedTest + public void testToBytes(final ByteBuffer output) { + int originalPosition = output.position(); + ByteOrder originalOrder = output.order(); + byte[] expectedBytes = {81, 6, -4, -102, -68, -126, 55, 85, -127, 54, -46, -119}; + byte[] result = new byte[12]; + ObjectId objectId = new ObjectId(expectedBytes); assertArrayEquals(expectedBytes, objectId.toByteArray()); - ByteBuffer buffer = ByteBuffer.allocate(12); - objectId.putToByteBuffer(buffer); - assertArrayEquals(expectedBytes, buffer.array()); + objectId.putToByteBuffer(output); + ((Buffer) output).position(output.position() - 12); + output.get(result); // read last 12 bytes leaving position intact + + assertArrayEquals(expectedBytes, result); + assertEquals(originalPosition + 12, output.position()); + assertEquals(originalOrder, output.order()); } @Test public void testFromBytes() { - byte[] bytes = new byte[]{81, 6, -4, -102, -68, -126, 55, 85, -127, 54, -46, -119}; + + try { + new ObjectId((byte[]) null); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertEquals("bytes can not be null", e.getMessage()); + } + + try { + new ObjectId(new byte[11]); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertEquals("state should be: bytes has length of 12", e.getMessage()); + } + + try { + new ObjectId(new byte[13]); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertEquals("state should be: bytes has length of 12", e.getMessage()); + } + + byte[] bytes = {81, 6, -4, -102, -68, -126, 55, 85, -127, 54, -46, -119}; ObjectId objectId1 = new ObjectId(bytes); assertEquals(0x5106FC9A, objectId1.getTimestamp()); - assertEquals(0x00BC8237, objectId1.getMachineIdentifier()); - assertEquals((short) 0x5581, objectId1.getProcessIdentifier()); - assertEquals(0x0036D289, objectId1.getCounter()); ObjectId objectId2 = new ObjectId(ByteBuffer.wrap(bytes)); assertEquals(0x5106FC9A, objectId2.getTimestamp()); - assertEquals(0x00BC8237, objectId2.getMachineIdentifier()); - assertEquals((short) 0x5581, objectId2.getProcessIdentifier()); - assertEquals(0x0036D289, objectId2.getCounter()); } @Test @@ -72,6 +134,36 @@ public void testBytesRoundtrip() { assertEquals("41d91c58988b09375cc1fe9f", expected.toString()); } + @Test + public void testGetSmallestWithDate() { + Date date = new Date(1588467737760L); + byte[] expectedBytes = {94, -82, 24, 25, 0, 0, 0, 0, 0, 0, 0, 0}; + ObjectId objectId = ObjectId.getSmallestWithDate(date); + assertArrayEquals(expectedBytes, objectId.toByteArray()); + assertEquals(date.getTime() / 1000 * 1000, objectId.getDate().getTime()); + assertEquals(-1, objectId.compareTo(new ObjectId(date))); + } + + @Test + public void testGetTimeZero() { + assertEquals(0L, new ObjectId(0, 0).getDate().getTime()); + } + + @Test + public void testGetTimeMaxSignedInt() { + assertEquals(0x7FFFFFFFL * 1000, new ObjectId(0x7FFFFFFF, 0).getDate().getTime()); + } + + @Test + public void testGetTimeMaxSignedIntPlusOne() { + assertEquals(0x80000000L * 1000, new ObjectId(0x80000000, 0).getDate().getTime()); + } + + @Test + public void testGetTimeMaxInt() { + assertEquals(0xFFFFFFFFL * 1000, new ObjectId(0xFFFFFFFF, 0).getDate().getTime()); + } + @Test public void testTime() { long a = System.currentTimeMillis(); @@ -80,88 +172,161 @@ public void testTime() { } @Test - public void testDateCons() { - Date d = new Date(); - ObjectId a = new ObjectId(d); - assertEquals(d.getTime() / 1000, a.getDate().getTime() / 1000); + public void testDateConstructor() { + assertEquals(new Date().getTime() / 1000, new ObjectId(new Date()).getDate().getTime() / 1000); + assertNotEquals(new ObjectId(new Date(1_000)), new ObjectId(new Date(1_000))); + assertEquals("00000001", new ObjectId(new Date(1_000)).toHexString().substring(0, 8)); } @Test - public void testMachineIdentifier() { - assertTrue(ObjectId.getGeneratedMachineIdentifier() > 0); - assertEquals(0, ObjectId.getGeneratedMachineIdentifier() & 0xff000000); - - assertEquals(5, new ObjectId(0, 5, (short) 0, 0).getMachineIdentifier()); - assertEquals(0x00ffffff, new ObjectId(0, 0x00ffffff, (short) 0, 0).getMachineIdentifier()); - assertEquals(ObjectId.getGeneratedMachineIdentifier(), new ObjectId().getMachineIdentifier()); + public void testDateConstructorWithCounter() { + assertEquals(new ObjectId(new Date(1_000), 1), new ObjectId(new Date(1_000), 1)); + assertEquals("00000001", new ObjectId(new Date(1_000), 1).toHexString().substring(0, 8)); + assertThrows(NullPointerException.class, () -> new ObjectId(null, Integer.MAX_VALUE)); + assertThrows(IllegalArgumentException.class, () -> new ObjectId(new Date(1_000), Integer.MAX_VALUE)); } - @Test(expected = IllegalArgumentException.class) - public void shouldThrowIfMachineIdentifierIsTooLarge() { - new ObjectId(0, 0x00ffffff + 1, (short) 0, 0); + @Test + public void testTimestampConstructor() { + assertEquals(1_000, new ObjectId(1_000, 1).getTimestamp()); + assertEquals(new ObjectId(1_000, 1), new ObjectId(1_000, 1)); + assertEquals("7fffffff", new ObjectId(Integer.MAX_VALUE, 1).toHexString().substring(0, 8)); + assertThrows(IllegalArgumentException.class, () -> new ObjectId(Integer.MAX_VALUE, Integer.MAX_VALUE)); } - @Test - public void testProcessIdentifier() { - assertEquals(5, new ObjectId(0, 0, (short) 5, 0).getProcessIdentifier()); - assertEquals(ObjectId.getGeneratedProcessIdentifier(), new ObjectId().getProcessIdentifier()); + /** + * MethodSource for valid ByteBuffers containing an ObjectID at the current position. + */ + public static List validInputBuffers() { + byte[] data = new byte[12]; + for (byte i = 0; i < data.length; ++i) { + data[i] = i; + } + + List result = new ArrayList<>(); + result.add(ByteBuffer.wrap(data)); + result.add(ByteBuffer.wrap(data).order(ByteOrder.LITTLE_ENDIAN)); + result.add(setPosition(ByteBuffer.allocateDirect(data.length).put(data), 0)); + result.add(setPosition(ByteBuffer.allocateDirect(data.length).put(data).order(ByteOrder.LITTLE_ENDIAN), 0)); + result.add(setPosition(ByteBuffer.allocate(2 * data.length).put(data), 0)); + result.add(setPosition(ByteBuffer.allocate(2 * data.length).put(new byte[12]).put(data), 12)); + return result; } - @Test - public void testCounter() { - assertEquals(new ObjectId().getCounter() + 1, new ObjectId().getCounter()); + @ParameterizedTest + @MethodSource(value = "validInputBuffers") + public void testByteBufferConstructor(final ByteBuffer input) { + ByteOrder order = input.order(); + int position = input.position(); + + byte[] result = new ObjectId(input).toByteArray(); + + assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11}, result); + assertEquals(order, input.order()); + assertEquals(position + 12, input.position()); } - @Test(expected = IllegalArgumentException.class) - public void shouldThrowIfCounterIsTooLarge() { - new ObjectId(0, 0, (short) 0, 0x00ffffff + 1); + @Test + public void testInvalidByteBufferConstructor() { + assertThrows(IllegalArgumentException.class, () -> new ObjectId((ByteBuffer) null)); + assertThrows(IllegalArgumentException.class, () -> new ObjectId(ByteBuffer.allocate(11))); } @Test public void testHexStringConstructor() { ObjectId id = new ObjectId(); assertEquals(id, new ObjectId(id.toHexString())); + assertEquals(id, new ObjectId(id.toHexString().toUpperCase(Locale.US))); + assertThrows(IllegalArgumentException.class, () -> new ObjectId((String) null)); + assertThrows(IllegalArgumentException.class, () -> new ObjectId(id.toHexString().substring(0, 23))); + assertThrows(IllegalArgumentException.class, () -> new ObjectId(id.toHexString().substring(0, 23) + '%')); } @Test public void testCompareTo() { - assertEquals(-1, new ObjectId(0, 0, (short) 0, 0).compareTo(new ObjectId(1, 0, (short) 0, 0))); - assertEquals(-1, new ObjectId(0, 0, (short) 0, 0).compareTo(new ObjectId(0, 1, (short) 0, 0))); - assertEquals(-1, new ObjectId(0, 0, (short) 0, 0).compareTo(new ObjectId(0, 0, (short) 1, 0))); - assertEquals(-1, new ObjectId(0, 0, (short) 1, 0).compareTo(new ObjectId(0, 0, (short) -1, 0))); - assertEquals(-1, new ObjectId(0, 0, (short) 0, 0).compareTo(new ObjectId(0, 0, (short) 0, 1))); - assertEquals(0, new ObjectId(0, 0, (short) 0, 0).compareTo(new ObjectId(0, 0, (short) 0, 0))); - assertEquals(1, new ObjectId(1, 0, (short) 0, 0).compareTo(new ObjectId(0, 0, (short) 0, 0))); - assertEquals(1, new ObjectId(0, 1, (short) 0, 0).compareTo(new ObjectId(0, 0, (short) 0, 0))); - assertEquals(1, new ObjectId(0, 0, (short) 1, 0).compareTo(new ObjectId(0, 0, (short) 0, 0))); - assertEquals(1, new ObjectId(0, 0, (short) -1, 0).compareTo(new ObjectId(0, 0, (short) 1, 0))); - assertEquals(1, new ObjectId(0, 0, (short) 0, 1).compareTo(new ObjectId(0, 0, (short) 0, 0))); + Date dateOne = new Date(); + Date dateTwo = new Date(dateOne.getTime() + 10000); + ObjectId first = new ObjectId(dateOne, 0); + ObjectId second = new ObjectId(dateOne, 1); + ObjectId third = new ObjectId(dateTwo, 0); + assertEquals(0, first.compareTo(first)); + assertEquals(-1, first.compareTo(second)); + assertEquals(-1, first.compareTo(third)); + assertEquals(1, second.compareTo(first)); + assertEquals(1, third.compareTo(first)); + assertThrows(NullPointerException.class, () -> first.compareTo(null)); + } + + @Test + public void testEquals() { + Date dateOne = new Date(); + Date dateTwo = new Date(dateOne.getTime() + 10000); + ObjectId first = new ObjectId(dateOne, 0); + ObjectId second = new ObjectId(dateOne, 1); + ObjectId third = new ObjectId(dateTwo, 0); + ObjectId fourth = new ObjectId(first.toByteArray()); + assertEquals(first, first); + assertEquals(first, fourth); + assertNotEquals(first, second); + assertNotEquals(first, third); + assertNotEquals(second, third); + assertFalse(first.equals(null)); } @Test public void testToHexString() { - assertEquals("000000000000000000000000", new ObjectId(0, 0, (short) 0, 0).toHexString()); - assertEquals("7fffffff007fff7fff007fff", - new ObjectId(Integer.MAX_VALUE, Short.MAX_VALUE, Short.MAX_VALUE, Short.MAX_VALUE).toHexString()); + assertEquals("000000000000000000000000", new ObjectId(new byte[12]).toHexString()); + assertEquals("7fffffff007fff7fff007fff", new ObjectId(new byte[]{127, -1, -1, -1, 0, 127, -1, 127, -1, 0, 127, -1}).toHexString()); + } + + private Date getDate(final String s) throws ParseException { + return new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss Z").parse(s); } - @SuppressWarnings("deprecation") @Test - public void testDeprecatedMethods() { + public void testTimeZero() throws ParseException { + assertEquals(getDate("01-Jan-1970 00:00:00 -0000"), new ObjectId(0, 0).getDate()); + } - ObjectId id = new ObjectId(); - assertEquals(id.getTimestamp(), id.getTimeSecond()); - assertEquals(id.getDate().getTime(), id.getTime()); - assertEquals(id.toHexString(), id.toStringMongod()); - assertArrayEquals(new byte[]{0x12, 0x34, 0x56, 0x78, 0x43, 0x21, 0xffffff87, 0x65, 0x74, 0xffffff92, 0xffffff87, 0x56}, - new ObjectId(0x12345678, 0x43218765, 0x74928756).toByteArray()); + @Test + public void testTimeMaxSignedInt() throws ParseException { + assertEquals(getDate("19-Jan-2038 03:14:07 -0000"), new ObjectId(0x7FFFFFFF, 0).getDate()); } - // Got these values from 2.12.0 driver. This test is ensuring that we properly round-trip old and new format ObjectIds. @Test - public void testCreateFromLegacy() { - assertArrayEquals(new byte[]{82, 23, -82, -78, -80, -58, -95, -92, -75, -38, 118, -16}, - ObjectId.createFromLegacyFormat(1377283762, -1329159772, -1243973904).toByteArray()); + public void testTimeMaxSignedIntPlusOne() throws ParseException { + assertEquals(getDate("19-Jan-2038 03:14:08 -0000"), new ObjectId(0x80000000, 0).getDate()); + } + + @Test + public void testTimeMaxInt() throws ParseException { + assertEquals(getDate("07-Feb-2106 06:28:15 -0000"), new ObjectId(0xFFFFFFFF, 0).getDate()); } -} + @Test + public void testObjectSerialization() throws IOException, ClassNotFoundException { + // given + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + ObjectOutputStream oos = new ObjectOutputStream(baos); + ObjectId objectId = new ObjectId("5f8f4fcf27516f05e7eae5be"); + + // when + oos.writeObject(objectId); + + // then + assertTrue(baos.toString().contains("org.bson.types.ObjectId$SerializationProxy")); + assertArrayEquals(new byte[] {-84, -19, 0, 5, 115, 114, 0, 42, 111, 114, 103, 46, 98, 115, 111, 110, 46, 116, 121, 112, 101, 115, + 46, 79, 98, 106, 101, 99, 116, 73, 100, 36, 83, 101, 114, 105, 97, 108, 105, 122, 97, 116, 105, 111, 110, 80, 114, + 111, 120, 121, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 1, 91, 0, 5, 98, 121, 116, 101, 115, 116, 0, 2, 91, 66, 120, 112, 117, + 114, 0, 2, 91, 66, -84, -13, 23, -8, 6, 8, 84, -32, 2, 0, 0, 120, 112, 0, 0, 0, 12, 95, -113, 79, -49, 39, 81, 111, + 5, -25, -22, -27, -66}, baos.toByteArray()); + + // when + ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + ObjectInputStream ois = new ObjectInputStream(bais); + ObjectId deserializedObjectId = (ObjectId) ois.readObject(); + + // then + assertEquals(objectId, deserializedObjectId); + } +} diff --git a/bson/src/test/unit/org/bson/types/StringRangeSetSpecification.groovy b/bson/src/test/unit/org/bson/types/StringRangeSetSpecification.groovy index 191057b4cb4..3f19df94b7b 100644 --- a/bson/src/test/unit/org/bson/types/StringRangeSetSpecification.groovy +++ b/bson/src/test/unit/org/bson/types/StringRangeSetSpecification.groovy @@ -66,7 +66,7 @@ class StringRangeSetSpecification extends Specification { def 'set should be ordered string representations of the range'() { given: - def size = 2000; + def size = 2000 def expectedKeys = [] for (def i : (0..() - def expected = new Animal() - classMap.put(Animal, expected) - - when: - def actual = classMap.get(Animal) - - then: - actual == expected - } - - def 'should return null if there is no matching class or superclass in the class map'() { - given: - def classMap = new ClassMap() - def expected = new Animal() - classMap.put(Animal, expected) - - when: - def actual = classMap.get(Object) - - then: - actual == null - } - - def 'should get the value of the most specific class'() { - given: - def classMap = new ClassMap() - def expected = new Dog() - classMap.put(Animal, new Animal()) - classMap.put(Dog, expected) - - when: - def actual = classMap.get(Dog) - - then: - actual == expected - } - - def 'should get the value of the superclass if specific class is not in the map'() { - given: - def classMap = new ClassMap() - def expected = new Animal() - classMap.put(Animal, expected) - classMap.put(Dog, new Dog()) - - when: - def actual = classMap.get(Duck) - - then: - actual == expected - } - - def 'should get the value of the closest superclass if specific class is not in the map'() { - given: - def classMap = new ClassMap() - def expected = new Dog() - classMap.put(Animal, new Animal()) - classMap.put(Dog, expected) - - when: - def actual = classMap.get(Labrador) - - then: - actual == expected - } - - def 'should get the hierarchy of all superclasses'() { - when: - def actual = ClassMap.getAncestry(Labrador) - - then: - actual == [Labrador, Dog, Animal, GroovyObject, Object] - } - - def 'should return the size for the number of class keys explicitly added to the map'() { - // i.e. I don't expect it to add cached superclasses to the size - given: - def classMap = new ClassMap() - classMap.put(Animal, new Animal()) - classMap.put(Labrador, new Labrador()) - - when: - def actualSize = classMap.size() - - then: - actualSize == 2 - } - - @SuppressWarnings('EmptyClass') - private class Animal { - - } - - private class Dog extends Animal { - - } - - private class Labrador extends Dog { - - } - - private class Duck extends Animal { - - } -} diff --git a/bson/src/test/unit/org/bson/vector/BinaryVectorGenericBsonTest.java b/bson/src/test/unit/org/bson/vector/BinaryVectorGenericBsonTest.java new file mode 100644 index 00000000000..35326281c66 --- /dev/null +++ b/bson/src/test/unit/org/bson/vector/BinaryVectorGenericBsonTest.java @@ -0,0 +1,272 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bson.vector; + +import org.bson.BinaryVector; +import org.bson.BsonArray; +import org.bson.BsonBinary; +import org.bson.BsonDocument; +import org.bson.BsonString; +import org.bson.BsonValue; +import org.bson.Float32BinaryVector; +import org.bson.PackedBitBinaryVector; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import util.JsonPoweredTestHelper; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Stream; + +import static java.lang.String.format; +import static org.bson.BsonHelper.decodeToDocument; +import static org.bson.BsonHelper.encodeToHex; +import static org.bson.internal.vector.BinaryVectorHelper.determineVectorDType; +import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assumptions.assumeFalse; + +/** + * See + * JSON-based tests that included in test resources. + */ +class BinaryVectorGenericBsonTest { + + private static final List TEST_NAMES_TO_IGNORE = Arrays.asList( + //NO API to set padding for floats available. + "FLOAT32 with padding", + //NO API to set padding for floats available. + "INT8 with padding", + //It is impossible to provide float inputs for INT8 in the API. + "INT8 with float inputs", + //It is impossible to provide float inputs for INT8. + "Underflow Vector PACKED_BIT", + //It is impossible to provide float inputs for PACKED_BIT in the API. + "Vector with float values PACKED_BIT", + //It is impossible to provide float inputs for INT8. + "Overflow Vector PACKED_BIT", + //It is impossible to overflow byte with values higher than 127 in the API. + "Overflow Vector INT8", + //It is impossible to underflow byte with values lower than -128 in the API. + "Underflow Vector INT8"); + + + @ParameterizedTest(name = "{0}") + @MethodSource("data") + void shouldPassAllOutcomes(@SuppressWarnings("unused") final String description, + final BsonDocument testDefinition, final BsonDocument testCase) { + assumeFalse(TEST_NAMES_TO_IGNORE.contains(testCase.get("description").asString().getValue())); + + String testKey = testDefinition.getString("test_key").getValue(); + boolean isValidVector = testCase.getBoolean("valid").getValue(); + if (isValidVector) { + runValidTestCase(testKey, testCase); + } else { + runInvalidTestCase(testCase); + } + } + + private static void runInvalidTestCase(final BsonDocument testCase) { + BsonArray arrayVector = testCase.getArray("vector"); + byte expectedPadding = (byte) testCase.getInt32("padding").getValue(); + byte dtypeByte = Byte.decode(testCase.getString("dtype_hex").getValue()); + BinaryVector.DataType expectedDType = determineVectorDType(dtypeByte); + + switch (expectedDType) { + case INT8: + byte[] expectedVectorData = toByteArray(arrayVector); + assertValidationException(assertThrows(RuntimeException.class, + () -> BinaryVector.int8Vector(expectedVectorData))); + break; + case PACKED_BIT: + byte[] expectedVectorPackedBitData = toByteArray(arrayVector); + assertValidationException(assertThrows(RuntimeException.class, + () -> BinaryVector.packedBitVector(expectedVectorPackedBitData, expectedPadding))); + break; + case FLOAT32: + float[] expectedFloatVector = toFloatArray(arrayVector); + assertValidationException(assertThrows(RuntimeException.class, () -> BinaryVector.floatVector(expectedFloatVector))); + break; + default: + throw new IllegalArgumentException("Unsupported vector data type: " + expectedDType); + } + } + + private static void runValidTestCase(final String testKey, final BsonDocument testCase) { + String description = testCase.getString("description").getValue(); + byte dtypeByte = Byte.decode(testCase.getString("dtype_hex").getValue()); + + byte expectedPadding = (byte) testCase.getInt32("padding").getValue(); + BinaryVector.DataType expectedDType = determineVectorDType(dtypeByte); + String expectedCanonicalBsonHex = testCase.getString("canonical_bson").getValue().toUpperCase(); + + BsonArray arrayVector = testCase.getArray("vector"); + BsonDocument actualDecodedDocument = decodeToDocument(expectedCanonicalBsonHex, description); + BinaryVector actualVector = actualDecodedDocument.getBinary("vector").asVector(); + + switch (expectedDType) { + case INT8: + byte[] expectedVectorData = toByteArray(arrayVector); + byte[] actualVectorData = actualVector.asInt8Vector().getData(); + assertVectorDecoding( + expectedVectorData, + expectedDType, + actualVectorData, + actualVector); + + assertThatVectorCreationResultsInCorrectBinary(BinaryVector.int8Vector(expectedVectorData), + testKey, + actualDecodedDocument, + expectedCanonicalBsonHex, + description); + break; + case PACKED_BIT: + PackedBitBinaryVector actualPackedBitVector = actualVector.asPackedBitVector(); + byte[] expectedVectorPackedBitData = toByteArray(arrayVector); + assertVectorDecoding( + expectedVectorPackedBitData, + expectedDType, expectedPadding, + actualPackedBitVector); + + assertThatVectorCreationResultsInCorrectBinary( + BinaryVector.packedBitVector(expectedVectorPackedBitData, expectedPadding), + testKey, + actualDecodedDocument, + expectedCanonicalBsonHex, + description); + break; + case FLOAT32: + Float32BinaryVector actualFloat32Vector = actualVector.asFloat32Vector(); + float[] expectedFloatVector = toFloatArray(arrayVector); + assertVectorDecoding( + expectedFloatVector, + expectedDType, + actualFloat32Vector); + assertThatVectorCreationResultsInCorrectBinary( + BinaryVector.floatVector(expectedFloatVector), + testKey, + actualDecodedDocument, + expectedCanonicalBsonHex, + description); + break; + default: + throw new IllegalArgumentException("Unsupported vector data type: " + expectedDType); + } + } + + private static void assertValidationException(final RuntimeException runtimeException) { + assertTrue(runtimeException instanceof IllegalArgumentException || runtimeException instanceof IllegalStateException); + } + + private static void assertThatVectorCreationResultsInCorrectBinary(final BinaryVector expectedVectorData, + final String testKey, + final BsonDocument actualDecodedDocument, + final String expectedCanonicalBsonHex, + final String description) { + BsonDocument documentToEncode = new BsonDocument(testKey, new BsonBinary(expectedVectorData)); + assertEquals(documentToEncode, actualDecodedDocument); + assertEquals(expectedCanonicalBsonHex, encodeToHex(documentToEncode), + format("Failed to create expected BSON for document with description '%s'", description)); + } + + private static void assertVectorDecoding(final byte[] expectedVectorData, + final BinaryVector.DataType expectedDType, + final byte[] actualVectorData, + final BinaryVector actualVector) { + Assertions.assertArrayEquals(actualVectorData, expectedVectorData, + () -> "Actual: " + Arrays.toString(actualVectorData) + " != Expected:" + Arrays.toString(expectedVectorData)); + assertEquals(expectedDType, actualVector.getDataType()); + } + + private static void assertVectorDecoding(final byte[] expectedVectorData, + final BinaryVector.DataType expectedDType, + final byte expectedPadding, + final PackedBitBinaryVector actualVector) { + byte[] actualVectorData = actualVector.getData(); + assertVectorDecoding( + expectedVectorData, + expectedDType, + actualVectorData, + actualVector); + assertEquals(expectedPadding, actualVector.getPadding()); + } + + private static void assertVectorDecoding(final float[] expectedVectorData, + final BinaryVector.DataType expectedDType, + final Float32BinaryVector actualVector) { + float[] actualVectorArray = actualVector.getData(); + Assertions.assertArrayEquals(actualVectorArray, expectedVectorData, + () -> "Actual: " + Arrays.toString(actualVectorArray) + " != Expected:" + Arrays.toString(expectedVectorData)); + assertEquals(expectedDType, actualVector.getDataType()); + } + + private static byte[] toByteArray(final BsonArray arrayVector) { + byte[] bytes = new byte[arrayVector.size()]; + for (int i = 0; i < arrayVector.size(); i++) { + bytes[i] = (byte) arrayVector.get(i).asInt32().getValue(); + } + return bytes; + } + + private static float[] toFloatArray(final BsonArray arrayVector) { + float[] floats = new float[arrayVector.size()]; + for (int i = 0; i < arrayVector.size(); i++) { + BsonValue bsonValue = arrayVector.get(i); + if (bsonValue.isString()) { + floats[i] = parseFloat(bsonValue.asString()); + } else { + floats[i] = (float) arrayVector.get(i).asDouble().getValue(); + } + } + return floats; + } + + private static float parseFloat(final BsonString bsonValue) { + String floatValue = bsonValue.getValue(); + switch (floatValue) { + case "-inf": + return Float.NEGATIVE_INFINITY; + case "inf": + return Float.POSITIVE_INFINITY; + default: + return Float.parseFloat(floatValue); + } + } + + private static Stream data() { + List data = new ArrayList<>(); + for (BsonDocument testDocument : JsonPoweredTestHelper.getTestDocuments("/bson-binary-vector")) { + for (BsonValue curValue : testDocument.getArray("tests", new BsonArray())) { + BsonDocument testCaseDocument = curValue.asDocument(); + data.add(Arguments.of(createTestCaseDescription(testDocument, testCaseDocument), testDocument, testCaseDocument)); + } + } + return data.stream(); + } + + private static String createTestCaseDescription(final BsonDocument testDocument, + final BsonDocument testCaseDocument) { + boolean isValidTestCase = testCaseDocument.getBoolean("valid").getValue(); + String fileDescription = testDocument.getString("description").getValue(); + String testDescription = testCaseDocument.getString("description").getValue(); + return "[Valid input: " + isValidTestCase + "] " + fileDescription + ": " + testDescription; + } +} diff --git a/bson/src/test/unit/util/JsonPoweredTestHelper.java b/bson/src/test/unit/util/JsonPoweredTestHelper.java index e0f15e198c8..e261e132ab4 100644 --- a/bson/src/test/unit/util/JsonPoweredTestHelper.java +++ b/bson/src/test/unit/util/JsonPoweredTestHelper.java @@ -17,60 +17,119 @@ package util; import org.bson.BsonDocument; -import org.bson.codecs.BsonDocumentCodec; -import org.bson.codecs.DecoderContext; -import org.bson.json.JsonReader; +import org.bson.BsonString; +import org.bson.BsonValue; +import org.bson.assertions.Assertions; import java.io.BufferedReader; -import java.io.File; -import java.io.FileInputStream; import java.io.IOException; +import java.io.InputStream; import java.io.InputStreamReader; -import java.net.URISyntaxException; -import java.nio.charset.Charset; +import java.net.URI; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.nio.file.FileSystem; +import java.nio.file.FileSystems; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.List; public final class JsonPoweredTestHelper { - public static BsonDocument getTestDocument(final File file) throws IOException { - return new BsonDocumentCodec().decode(new JsonReader(getFileAsString(file)), DecoderContext.builder().build()); + private static final String SPECIFICATIONS_PREFIX = "/specifications/source/"; + + public static BsonDocument getTestDocument(final String resourcePath) { + BsonDocument testDocument = getTestDocumentWithMetaData(SPECIFICATIONS_PREFIX + resourcePath); + testDocument.remove("resourcePath"); + testDocument.remove("fileName"); + return testDocument; } - public static List getTestFiles(final String resourcePath) throws URISyntaxException { - List files = new ArrayList(); - addFilesFromDirectory(new File(JsonPoweredTestHelper.class.getResource(resourcePath).toURI()), files); - return files; + public static Collection getTestData(final String resourcePath) { + List data = new ArrayList<>(); + for (BsonDocument document : getSpecTestDocuments(resourcePath)) { + for (BsonValue test : document.getArray("tests")) { + BsonDocument testDocument = test.asDocument(); + data.add(new Object[]{document.getString("fileName").getValue(), + testDocument.getString("description").getValue(), + testDocument.getString("uri", new BsonString("")).getValue(), + testDocument}); + } + } + return data; } - private static String getFileAsString(final File file) throws IOException { - StringBuilder stringBuilder = new StringBuilder(); - String line; - String ls = System.getProperty("line.separator"); - BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(file), Charset.forName("UTF-8"))); + public static List getSpecTestDocuments(final String resourcePath) { + return getTestDocuments(SPECIFICATIONS_PREFIX + resourcePath); + } + + public static List getTestDocuments(final String resourcePath) { + List files = new ArrayList<>(); try { - while ((line = reader.readLine()) != null) { - stringBuilder.append(line); - stringBuilder.append(ls); + URL urlResource = JsonPoweredTestHelper.class.getResource(resourcePath); + if (urlResource == null) { + Assertions.fail("No such resource: " + resourcePath); + } + + URI resource = urlResource.toURI(); + try (FileSystem fileSystem = (resource.getScheme().equals("jar") ? FileSystems.newFileSystem(resource, Collections.emptyMap()) : null)) { + Path myPath = Paths.get(resource); + Files.walkFileTree(myPath, new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(final Path filePath, final BasicFileAttributes attrs) throws IOException { + if (filePath.toString().endsWith(".json")) { + if (fileSystem == null) { + files.add(getTestDocumentWithMetaData(filePath.toString().substring(filePath.toString().lastIndexOf(resourcePath)))); + } else { + files.add(getTestDocumentWithMetaData(filePath.toString())); + } + } + return super.visitFile(filePath, attrs); + } + }); } - } finally { - reader.close(); + } catch (Exception e) { + Assertions.fail("Unable to load resource: " + resourcePath, e); } - return stringBuilder.toString(); + + if (files.isEmpty()) { + Assertions.fail("No test documents found in: " + resourcePath); + } + return files; + } + + private static BsonDocument getTestDocumentWithMetaData(final String resourcePath) { + BsonDocument testDocument = BsonDocument.parse(resourcePathToString(resourcePath)); + testDocument.append("resourcePath", new BsonString(resourcePath)) + .append("fileName", new BsonString(resourcePath.substring(resourcePath.lastIndexOf('/') + 1))); + return testDocument; } - private static void addFilesFromDirectory(final File directory, final List files) { - String[] fileNames = directory.list(); - if (fileNames != null) { - for (String fileName : fileNames) { - File file = new File(directory, fileName); - if (file.isDirectory()) { - addFilesFromDirectory(file, files); - } else if (file.getName().endsWith(".json")) { - files.add(file); + private static String resourcePathToString(final String resourcePath) { + StringBuilder stringBuilder = new StringBuilder(); + String line; + String ls = System.lineSeparator(); + try (InputStream inputStream = JsonPoweredTestHelper.class.getResourceAsStream(resourcePath)) { + if (inputStream == null) { + Assertions.fail("Unable to load resource: " + resourcePath); + } + try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8))) { + while ((line = reader.readLine()) != null) { + stringBuilder.append(line); + stringBuilder.append(ls); } } + } catch (Exception e) { + Assertions.fail("Unable to load resource", e); } + return stringBuilder.toString(); } private JsonPoweredTestHelper() { diff --git a/bson/src/test/unit/util/ThreadTestHelpers.java b/bson/src/test/unit/util/ThreadTestHelpers.java new file mode 100644 index 00000000000..e2115da079f --- /dev/null +++ b/bson/src/test/unit/util/ThreadTestHelpers.java @@ -0,0 +1,71 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package util; + +import org.opentest4j.MultipleFailuresError; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +public final class ThreadTestHelpers { + + private ThreadTestHelpers() { + } + + public static void executeAll(final int nThreads, final Runnable c) { + executeAll(Collections.nCopies(nThreads, c).toArray(new Runnable[0])); + } + + public static void executeAll(final Runnable... runnables) { + ExecutorService service = null; + try { + service = Executors.newFixedThreadPool(runnables.length); + CountDownLatch latch = new CountDownLatch(runnables.length); + List failures = Collections.synchronizedList(new ArrayList<>()); + for (final Runnable runnable : runnables) { + service.submit(() -> { + try { + runnable.run(); + } catch (Throwable e) { + failures.add(e); + } finally { + latch.countDown(); + } + }); + } + try { + latch.await(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RuntimeException(e); + } + if (!failures.isEmpty()) { + MultipleFailuresError multipleFailuresError = new MultipleFailuresError("Failed to execute all", failures); + failures.forEach(multipleFailuresError::addSuppressed); + throw multipleFailuresError; + } + } finally { + if (service != null) { + service.shutdown(); + } + } + } +} diff --git a/build.gradle b/build.gradle deleted file mode 100644 index 9ee7e780ce9..00000000000 --- a/build.gradle +++ /dev/null @@ -1,332 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import static org.gradle.util.CollectionUtils.single - -apply plugin: 'eclipse' -apply plugin: 'idea' - -def configDir = new File(rootDir, 'config') -ext.jnrUnixsocketVersion = '0.18' -ext.nettyVersion = '4.1.17.Final' -ext.snappyVersion = '1.1.4' -ext.jnaVersion = '4.5.0' - -buildscript { - repositories { - maven { url 'https://oss.sonatype.org/content/repositories/snapshots' } - jcenter() - mavenCentral() - maven { url "https://plugins.gradle.org/m2/" } - mavenLocal() - } - dependencies { - classpath 'org.kordamp.gradle:clirr-gradle-plugin:0.2.2' - classpath 'com.netflix.nebula:gradle-extra-configurations-plugin:1.12.+' - classpath 'com.bmuschko:gradle-nexus-plugin:2.2' - classpath "gradle.plugin.com.github.spotbugs:spotbugs-gradle-plugin:1.6.1" - } -} - -////////////////////////////////////////// -// Common behavior // -////////////////////////////////////////// - -configure(subprojects.findAll { it.name != 'util' }) { - apply plugin: 'java' - apply plugin: 'optional-base' - - evaluationDependsOn(':util') - - group = 'org.mongodb' - version = '3.8.0-SNAPSHOT' - sourceCompatibility = JavaVersion.VERSION_1_6 - targetCompatibility = JavaVersion.VERSION_1_6 - - repositories { - mavenCentral() - maven { url 'https://oss.sonatype.org/content/repositories/snapshots' } - mavenLocal() - } - - dependencies { - compileOnly 'com.google.code.findbugs:jsr305:1.3.9' - compile 'org.slf4j:slf4j-api:1.7.6', optional - - testCompile 'com.google.code.findbugs:jsr305:1.3.9' - } - - /* Compiling */ - tasks.withType(AbstractCompile) { - options.encoding = 'ISO-8859-1' - options.fork = true - options.debug = true - options.compilerArgs = ['-Xlint:all', '-Xlint:-options'] - - onlyIf { JavaVersion.current().isJava7Compatible() } - } - - project.ext.buildingWith = { propertyName -> - project.hasProperty(propertyName) && project.property(propertyName).toBoolean() - } - - tasks.withType(Checkstyle) { - reports { - xml.enabled true - html.enabled true - } - } - - /* - For security we allow the signing-related project properties to be passed in as environment variables, which - Gradle enables if they are prefixed with "ORG_GRADLE_PROJECT_". But since environment variables can not contain - the '.' character and the signing-related properties contain '.', here we map signing-related project properties with '_' - to ones with '.' that are expected by the signing plugin. - */ - gradle.taskGraph.whenReady { taskGraph -> - if (taskGraph.allTasks.any { it instanceof Sign }) { - if (project.hasProperty("signing_keyId")) { - allprojects { ext."signing.keyId" = project.property("signing_keyId") } - } - if (project.hasProperty("signing_secretKeyRingFile")) { - allprojects { ext."signing.secretKeyRingFile" = project.property("signing_secretKeyRingFile") } - } - if (project.hasProperty("signing_password")) { - allprojects { ext."signing.password" = project.property("signing_password") } - } - } - } - - javadoc { - exclude "**/com/mongodb/**/internal/**" - exclude "**/org/bson/**/internal/**" - dependsOn project(':util').compileJava //We need taglets to be compiled - options { options -> setJavaDocOptions(options) } - } -} - -def getGitVersion() { - def outputAsString - new ByteArrayOutputStream().withStream { os -> - def result = exec { - executable 'git' - args 'describe', '--tags', '--always', '--dirty' - standardOutput = os - } - outputAsString = os.toString().substring(1).trim() - } - return outputAsString -} - -configure(subprojects.findAll { it.name != 'util' && it.name != 'mongo-java-driver' }) { - apply plugin: 'checkstyle' - apply plugin: "com.github.spotbugs" - apply plugin: 'jacoco' - apply plugin: 'groovy' - apply plugin: 'codenarc' - - dependencies { - testCompile 'org.codehaus.groovy:groovy-all:2.4.12' - testCompile 'org.spockframework:spock-core:1.1-groovy-2.4' - testCompile 'cglib:cglib-nodep:2.2.2' - testCompile 'org.objenesis:objenesis:1.3' - testCompile 'org.hamcrest:hamcrest-all:1.3' - testCompile 'ch.qos.logback:logback-classic:1.1.1' - testCompile project(':util') //Adding categories to classpath - } - - sourceSets { - main { - java.srcDirs = ['src/main'] - } - test { - groovy.srcDirs = ['src/test/functional', 'src/test/unit'] - } - } - - /* Testing */ - tasks.withType(Test) { - maxHeapSize = "3g" - maxParallelForks = 1 - - systemProperties( - 'org.mongodb.test.uri': System.getProperty('org.mongodb.test.uri'), - 'org.mongodb.test.embedded.path': System.getProperty('org.mongodb.test.embedded.path'), - 'org.mongodb.useSocket': System.getProperty('org.mongodb.useSocket', 'false'), - 'org.mongodb.disableAsync': System.getProperty('org.mongodb.disableAsync', 'false'), - 'org.mongodb.async.type': System.getProperty('org.mongodb.async.type', 'nio2'), - ) - - if (project.buildingWith('ssl.enabled')) { - systemProperties( - 'javax.net.ssl.keyStoreType': project.property('ssl.keyStoreType'), - 'javax.net.ssl.keyStore': project.property('ssl.keyStore'), - 'javax.net.ssl.keyStorePassword': project.property('ssl.keyStorePassword'), - 'javax.net.ssl.trustStoreType': project.property('ssl.trustStoreType'), - 'javax.net.ssl.trustStore': project.property('ssl.trustStore'), - 'javax.net.ssl.trustStorePassword': project.property('ssl.trustStorePassword') - ) - } - - if (project.buildingWith('gssapi.enabled')) { - systemProperties( - 'sun.security.krb5.debug': project.getProperty('sun.security.krb5.debug'), - 'javax.security.auth.useSubjectCredsOnly': "false", - 'java.security.krb5.kdc': project.getProperty('krb5.kdc'), - 'java.security.krb5.realm': project.getProperty('krb5.realm'), - 'java.security.auth.login.config': project.getProperty('auth.login.config'), - ) - } - - useJUnit { - if (!project.buildingWith('rs.enabled')) { - excludeCategories 'category.ReplicaSet' - } - if (project.buildingWith('quicktest')) { - excludeCategories 'category.SlowUnit' - } - if (project.buildingWith('travistest')) { - excludeCategories 'category.SlowUnit', 'category.Slow' - } - } - - jacoco { enabled = false } - - testLogging { exceptionFormat = 'full' } - } - - task testSlowUnit(type: Test) { - useJUnit { - includeCategories 'category.SlowUnit' - } - } - - gradle.taskGraph.whenReady { taskGraph -> - if (taskGraph.hasTask(testCoverage)) { - tasks.withType(Test) { jacoco { enabled = true } } - } - } - - task testCoverage(dependsOn: test) - - /* Code quality */ - - checkstyle { - toolVersion = "7.4" - configFile = new File(configDir, 'checkstyle.xml') - configProperties.checkstyleConfigDir = configDir - } - - spotbugs { - excludeFilter = new File(configDir, 'findbugs-exclude.xml') - sourceSets = [sourceSets.main] - toolVersion = '3.1.3' - } - - codenarc { - toolVersion = '1.1' - reportFormat = project.buildingWith('xmlReports.enabled') ? 'xml' : 'html' - } - - tasks.withType(com.github.spotbugs.SpotBugsTask) { - reports { - xml.enabled = project.buildingWith('xmlReports.enabled') - html.enabled = !project.buildingWith('xmlReports.enabled') - } - } - - tasks.withType(Test) { - def jdkHome = findProperty("jdkHome") - if (jdkHome) { - def javaExecutablesPath = new File(jdkHome, 'bin') - def javaExecutables = [:].withDefault { execName -> - def executable = new File(javaExecutablesPath, execName) - assert executable.exists() : "There is no ${execName} executable in ${javaExecutablesPath}" - executable - } - executable = javaExecutables.java - } - } -} - -task docs(type: Javadoc) { - destinationDir = new File(projectDir, 'build/docs') - options { options -> setJavaDocOptions(options) } - subprojects.grep({ it.name != 'util' }).each { proj -> - proj.tasks.withType(Javadoc).each { javadocTask -> - source += javadocTask.source - classpath += javadocTask.classpath - excludes += javadocTask.excludes - includes += javadocTask.includes - dependsOn +=javadocTask.dependsOn - } - } -} - -def setJavaDocOptions(MinimalJavadocOptions options) { - options.author = true - options.version = true - options.links 'https://docs.oracle.com/javase/9/docs/api/' - options.tagletPath single(project(':util').sourceSets.main.output.classesDirs) - options.taglets 'ManualTaglet' - options.taglets 'DochubTaglet' - options.taglets 'ServerReleaseTaglet' - options.encoding = 'UTF-8' - options.charSet 'UTF-8' - options.docEncoding 'UTF-8' - options.addBooleanOption("html4", true) - options.addBooleanOption("-allow-script-in-comments", true) - options.header = ''' - - ''' -} - -////////////////////////////////////////// -// Root project configuration // -////////////////////////////////////////// -task wrapper(type: Wrapper) { - gradleVersion = '4.5.1' -} - -gradle.buildFinished { BuildResult result -> - if (result.failure && !JavaVersion.current().isJava9Compatible()) { - gradle.rootProject.logger.error("\nWARNING:\nJDK ${JavaVersion.VERSION_1_9} is required to build the driver: " + - "you are using JDK ${JavaVersion.current()}.") - } -} - -apply from: 'gradle/deploy.gradle' -apply from: 'gradle/TestColorOutput.gradle' - diff --git a/build.gradle.kts b/build.gradle.kts new file mode 100644 index 00000000000..3112e2c59b9 --- /dev/null +++ b/build.gradle.kts @@ -0,0 +1,49 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import java.time.Duration + +plugins { + id("eclipse") + id("idea") + alias(libs.plugins.nexus.publish) +} + +val nexusUsername: Provider = providers.gradleProperty("nexusUsername") +val nexusPassword: Provider = providers.gradleProperty("nexusPassword") + +nexusPublishing { + packageGroup.set("org.mongodb") + repositories { + sonatype { + username.set(nexusUsername) + password.set(nexusPassword) + + // central portal URLs + nexusUrl.set(uri("https://ossrh-staging-api.central.sonatype.com/service/local/")) + snapshotRepositoryUrl.set(uri("https://central.sonatype.com/repository/maven-snapshots/")) + } + } + + connectTimeout.set(Duration.ofMinutes(5)) + clientTimeout.set(Duration.ofMinutes(30)) + + transitionCheckOptions { + // We have many artifacts and Maven Central can take a long time on its compliance checks. + // Set the timeout for waiting for the repository to close to a comfortable 50 minutes. + maxRetries.set(300) + delayBetween.set(Duration.ofSeconds(10)) + } +} diff --git a/buildSrc/build.gradle.kts b/buildSrc/build.gradle.kts new file mode 100644 index 00000000000..33d758d0753 --- /dev/null +++ b/buildSrc/build.gradle.kts @@ -0,0 +1,69 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +plugins { + id("java-library") + `kotlin-dsl` + alias(libs.plugins.spotless) + alias(libs.plugins.detekt) apply false +} + +repositories { + gradlePluginPortal() + mavenCentral() + google() +} + +// Spotless configuration for `buildSrc` code. +spotless { + kotlinGradle { + target("**/*.gradle.kts") + ktfmt("0.39").dropboxStyle().configure { + it.setMaxWidth(120) + it.setRemoveUnusedImport(true) + } + trimTrailingWhitespace() + indentWithSpaces() + endWithNewline() + licenseHeaderFile( + "../config/mongodb.license", "(package|group|plugins|import|buildscript|rootProject|@Suppress)") + } + + kotlin { + target("**/*.kt") + ktfmt().dropboxStyle().configure { + it.setMaxWidth(120) + it.setRemoveUnusedImport(true) + } + trimTrailingWhitespace() + indentWithSpaces() + endWithNewline() + licenseHeaderFile(rootProject.file("../config/mongodb.license")) + } + + java { + palantirJavaFormat() + target("src/*/java/**/*.java") + removeUnusedImports() + trimTrailingWhitespace() + indentWithSpaces() + endWithNewline() + licenseHeaderFile(rootProject.file("../config/mongodb.license")) + } +} + +java { toolchain { languageVersion.set(JavaLanguageVersion.of("17")) } } + +tasks.findByName("check")?.dependsOn("spotlessCheck") diff --git a/buildSrc/settings.gradle.kts b/buildSrc/settings.gradle.kts new file mode 100644 index 00000000000..832331d3e3e --- /dev/null +++ b/buildSrc/settings.gradle.kts @@ -0,0 +1,21 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +plugins { + // Add support for `libs.versions.toml` within `buildSrc` + // https://github.com/radoslaw-panuszewski/typesafe-conventions-gradle-plugin + // https://github.com/gradle/gradle/issues/15383 + id("dev.panuszewski.typesafe-conventions") version "0.7.3" +} diff --git a/buildSrc/src/main/java/com/mongodb/doclet/AtlasManualTaglet.java b/buildSrc/src/main/java/com/mongodb/doclet/AtlasManualTaglet.java new file mode 100644 index 00000000000..673b55a6bf6 --- /dev/null +++ b/buildSrc/src/main/java/com/mongodb/doclet/AtlasManualTaglet.java @@ -0,0 +1,33 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.mongodb.doclet; + +public final class AtlasManualTaglet extends DocTaglet { + @Override + public String getName() { + return "mongodb.atlas.manual"; + } + + @Override + protected String getHeader() { + return "MongoDB Atlas documentation"; + } + + @Override + protected String getBaseDocURI() { + return "https://www.mongodb.com/docs/atlas/"; + } +} diff --git a/buildSrc/src/main/java/com/mongodb/doclet/DocTaglet.java b/buildSrc/src/main/java/com/mongodb/doclet/DocTaglet.java new file mode 100644 index 00000000000..0f51f45f197 --- /dev/null +++ b/buildSrc/src/main/java/com/mongodb/doclet/DocTaglet.java @@ -0,0 +1,77 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.mongodb.doclet; + +import static java.util.Arrays.asList; +import static jdk.javadoc.doclet.Taglet.Location.CONSTRUCTOR; +import static jdk.javadoc.doclet.Taglet.Location.FIELD; +import static jdk.javadoc.doclet.Taglet.Location.METHOD; +import static jdk.javadoc.doclet.Taglet.Location.OVERVIEW; +import static jdk.javadoc.doclet.Taglet.Location.PACKAGE; +import static jdk.javadoc.doclet.Taglet.Location.TYPE; + +import com.sun.source.doctree.DocTree; +import com.sun.source.doctree.UnknownBlockTagTree; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import javax.lang.model.element.Element; +import jdk.javadoc.doclet.Taglet; + +public abstract class DocTaglet implements Taglet { + + @Override + public Set getAllowedLocations() { + return new HashSet<>(asList(CONSTRUCTOR, METHOD, FIELD, OVERVIEW, PACKAGE, TYPE)); + } + + @Override + public boolean isInlineTag() { + return false; + } + + @Override + public String toString(List tags, Element element) { + if (tags.size() == 0) { + return null; + } + + StringBuilder buf = + new StringBuilder(String.format("
    %s
    ", getHeader())); + for (DocTree tag : tags) { + String text = ((UnknownBlockTagTree) tag).getContent().get(0).toString(); + buf.append("
    ").append(genLink(text)).append("
    "); + } + return buf.toString(); + } + + protected String genLink(final String text) { + String relativePath = text; + String display = text; + + int firstSpace = text.indexOf(' '); + if (firstSpace != -1) { + relativePath = text.substring(0, firstSpace); + display = text.substring(firstSpace).trim(); + } + + return String.format("%s", getBaseDocURI(), relativePath, display); + } + + protected abstract String getHeader(); + + protected abstract String getBaseDocURI(); +} diff --git a/util/src/main/DochubTaglet.java b/buildSrc/src/main/java/com/mongodb/doclet/DochubTaglet.java similarity index 92% rename from util/src/main/DochubTaglet.java rename to buildSrc/src/main/java/com/mongodb/doclet/DochubTaglet.java index 541775b1bff..a6b960eaa27 100644 --- a/util/src/main/DochubTaglet.java +++ b/buildSrc/src/main/java/com/mongodb/doclet/DochubTaglet.java @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +package com.mongodb.doclet; public class DochubTaglet extends DocTaglet { @@ -28,7 +29,6 @@ protected String getHeader() { @Override protected String getBaseDocURI() { - return "http://dochub.mongodb.org/"; + return "https://dochub.mongodb.org/"; } - } diff --git a/util/src/main/ManualTaglet.java b/buildSrc/src/main/java/com/mongodb/doclet/ManualTaglet.java similarity index 91% rename from util/src/main/ManualTaglet.java rename to buildSrc/src/main/java/com/mongodb/doclet/ManualTaglet.java index 71312bd76e1..ff49c9ab37c 100644 --- a/util/src/main/ManualTaglet.java +++ b/buildSrc/src/main/java/com/mongodb/doclet/ManualTaglet.java @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +package com.mongodb.doclet; public class ManualTaglet extends DocTaglet { @@ -28,7 +29,6 @@ protected String getHeader() { @Override protected String getBaseDocURI() { - return "http://docs.mongodb.org/manual/"; + return "https://www.mongodb.com/docs/manual/"; } - } diff --git a/util/src/main/ServerReleaseTaglet.java b/buildSrc/src/main/java/com/mongodb/doclet/ServerReleaseTaglet.java similarity index 90% rename from util/src/main/ServerReleaseTaglet.java rename to buildSrc/src/main/java/com/mongodb/doclet/ServerReleaseTaglet.java index 0b7b69a3421..9b4f88fbf92 100644 --- a/util/src/main/ServerReleaseTaglet.java +++ b/buildSrc/src/main/java/com/mongodb/doclet/ServerReleaseTaglet.java @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +package com.mongodb.doclet; public class ServerReleaseTaglet extends DocTaglet { @@ -28,7 +29,6 @@ protected String getHeader() { @Override protected String getBaseDocURI() { - return "http://docs.mongodb.org/manual/release-notes/"; + return "https://www.mongodb.com/docs/manual/release-notes/"; } - } diff --git a/buildSrc/src/main/kotlin/ProjectExtensions.kt b/buildSrc/src/main/kotlin/ProjectExtensions.kt new file mode 100644 index 00000000000..a369aefc9d2 --- /dev/null +++ b/buildSrc/src/main/kotlin/ProjectExtensions.kt @@ -0,0 +1,56 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import org.gradle.api.Project +import org.gradle.api.java.archives.Manifest +import org.gradle.api.publish.maven.MavenPublication +import org.gradle.api.tasks.bundling.Jar +import org.gradle.kotlin.dsl.named +import org.gradle.kotlin.dsl.withType + +object ProjectExtensions { + + /** + * Extension function to get and validate the current scala version + * + * See: gradle.properties for `supportedScalaVersions` and `defaultScalaVersion` + */ + fun Project.scalaVersion(): String { + val supportedScalaVersions = (project.property("supportedScalaVersions") as String).split(",") + val scalaVersion: String = + (project.findProperty("scalaVersion") ?: project.property("defaultScalaVersion")) as String + + if (!supportedScalaVersions.contains(scalaVersion)) { + throw UnsupportedOperationException( + """Scala version: $scalaVersion is not a supported scala version. + |Supported versions: $supportedScalaVersions + """ + .trimMargin()) + } + + return scalaVersion + } + + /** Extension function to configure the maven publication */ + fun Project.configureMavenPublication(configure: MavenPublication.() -> Unit = {}) { + val publishing = extensions.getByName("publishing") as org.gradle.api.publish.PublishingExtension + publishing.publications.named("maven") { configure() } + } + + /** Extension function to configure the jars manifest */ + fun Project.configureJarManifest(configure: Manifest.() -> Unit = {}) { + tasks.withType { manifest { afterEvaluate { configure() } } } + } +} diff --git a/buildSrc/src/main/kotlin/conventions/Companion.kt b/buildSrc/src/main/kotlin/conventions/Companion.kt new file mode 100644 index 00000000000..c28eef2a080 --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/Companion.kt @@ -0,0 +1,29 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +import org.gradle.accessors.dm.LibrariesForLibs +import org.gradle.api.Project +import org.gradle.kotlin.dsl.getByType + +// Adds the `libs` value for use in conventions +internal val Project.libs: LibrariesForLibs + get() = extensions.getByType() + +/** Extension function to determine if a project property has been set. */ +fun Project.buildingWith(name: String): Boolean { + return this.findProperty(name)?.toString()?.toBoolean() ?: false +} diff --git a/buildSrc/src/main/kotlin/conventions/bnd.gradle.kts b/buildSrc/src/main/kotlin/conventions/bnd.gradle.kts new file mode 100644 index 00000000000..bbea4bf9878 --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/bnd.gradle.kts @@ -0,0 +1,23 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +import libs + +// Gradle Plugin for developing OSGi bundles with Bnd. +// https://plugins.gradle.org/plugin/biz.aQute.bnd.builder + +plugins { alias(libs.plugins.bnd) } diff --git a/buildSrc/src/main/kotlin/conventions/codenarc.gradle.kts b/buildSrc/src/main/kotlin/conventions/codenarc.gradle.kts new file mode 100644 index 00000000000..25cd5c00cc8 --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/codenarc.gradle.kts @@ -0,0 +1,25 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +// The CodeNarc plugin performs quality checks on your project’s Groovy source files +// https://docs.gradle.org/current/userguide/codenarc_plugin.html +plugins { id("codenarc") } + +codenarc { + toolVersion = "1.6.1" + reportFormat = if (project.buildingWith("xmlReports.enabled")) "xml" else "html" +} diff --git a/buildSrc/src/main/kotlin/conventions/detekt.gradle.kts b/buildSrc/src/main/kotlin/conventions/detekt.gradle.kts new file mode 100644 index 00000000000..4759138904a --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/detekt.gradle.kts @@ -0,0 +1,45 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +import io.gitlab.arturbosch.detekt.Detekt +import libs + +// Static code analysis for Kotlin +// https://plugins.gradle.org/plugin/io.gitlab.arturbosch.detekt +plugins { alias(libs.plugins.detekt) } + +detekt { + allRules = true // fail build on any finding + buildUponDefaultConfig = true // preconfigure defaults + config = rootProject.files("config/detekt/detekt.yml") // point to your custom config defining rules to run, + // overwriting default behavior + baseline = rootProject.file("config/detekt/baseline.xml") // a way of suppressing issues before introducing detekt + source = + files( + file("src/main/kotlin"), + file("src/test/kotlin"), + file("src/integrationTest/kotlin"), + ) +} + +tasks.withType().configureEach { + reports { + html.required.set(true) // observe findings in your browser with structure and code snippets + xml.required.set(true) // checkstyle like format mainly for integrations like Jenkins + txt.required.set(false) // similar to the console output, contains issue signature to manually edit + } +} diff --git a/buildSrc/src/main/kotlin/conventions/dokka.gradle.kts b/buildSrc/src/main/kotlin/conventions/dokka.gradle.kts new file mode 100644 index 00000000000..06b40161697 --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/dokka.gradle.kts @@ -0,0 +1,49 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +import libs + +// Dokka, the documentation engine for Kotlin +// https://plugins.gradle.org/plugin/org.jetbrains.dokka +plugins { + alias(libs.plugins.dokka) + id("conventions.publishing") +} + +// Create a generic `docs` task +tasks.register("docs") { + group = "documentation" + dependsOn("dokkaHtml") +} + +val dokkaOutputDir: Provider = rootProject.layout.buildDirectory.dir("docs/${base.archivesName.get()}") + +tasks.dokkaHtml.configure { + outputDirectory.set(dokkaOutputDir.get().asFile) + moduleName.set(base.archivesName.get()) +} + +val cleanDokka by tasks.register("cleanDokka") { delete(dokkaOutputDir) } + +// Ensure dokka is used for the javadoc +afterEvaluate { + tasks.named("javadocJar").configure { + dependsOn("cleanDokka", "dokkaHtml") + archiveClassifier.set("javadoc") + from(dokkaOutputDir) + } +} diff --git a/buildSrc/src/main/kotlin/conventions/git-version.gradle.kts b/buildSrc/src/main/kotlin/conventions/git-version.gradle.kts new file mode 100644 index 00000000000..9ddfd25cab2 --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/git-version.gradle.kts @@ -0,0 +1,35 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +// Provides the current git version for the build + +val gitVersion: Provider = + providers + .exec { + commandLine("git", "describe", "--tags", "--always", "--dirty") + isIgnoreExitValue = true + } + .standardOutput + .asText + .map { it.trim().removePrefix("r") } + .orElse("UNKNOWN") + +// Allows access to gitVersion extension to other conventions +extensions.add("gitVersion", gitVersion) + +// Debug task that outputs the gitVersion. +tasks.register("gitVersion") { doLast { println("Git version: ${gitVersion.get()}") } } diff --git a/buildSrc/src/main/kotlin/conventions/javadoc.gradle.kts b/buildSrc/src/main/kotlin/conventions/javadoc.gradle.kts new file mode 100644 index 00000000000..8ab2ef5bb5b --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/javadoc.gradle.kts @@ -0,0 +1,125 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +// Provides the Javadoc configuration for the build +plugins { + id("java-library") + id("maven-publish") +} + +// Create a generic `docs` task +tasks.register("docs") { + group = "documentation" + dependsOn("javadoc") +} + +tasks.withType { + exclude("**/com/mongodb/**/assertions/**") + exclude("**/com/mongodb/**/internal/**") + exclude("**/org/bson/**/internal/**") + + setDestinationDir(rootProject.file("build/docs/${project.base.archivesName.get()}")) + + val standardDocletOptions = options as StandardJavadocDocletOptions + standardDocletOptions.apply { + author(true) + version(true) + links = + listOf( + "https://docs.oracle.com/en/java/javase/11/docs/api/", + "https://www.reactive-streams.org/reactive-streams-1.0.3-javadoc/") + tagletPath(rootProject.projectDir.resolve("buildSrc/build/classes/java/main")) + taglets("com.mongodb.doclet.AtlasManualTaglet") + taglets("com.mongodb.doclet.ManualTaglet") + taglets("com.mongodb.doclet.DochubTaglet") + taglets("com.mongodb.doclet.ServerReleaseTaglet") + encoding = "UTF-8" + charSet("UTF-8") + docEncoding("UTF-8") + addBooleanOption("html5", true) + addBooleanOption("-allow-script-in-comments", true) + header( + """ + + """.trimIndent()) + } + + // Customizations for specific projects + afterEvaluate { + val docVersion = docVersion(project.version as String) + if (project.name != "bson") linksOfflineHelper(docVersion, "bson", standardDocletOptions) + if (!project.name.contains("bson") && project.name != "mongodb-driver-core") + linksOfflineHelper(docVersion, "mongodb-driver-core", standardDocletOptions) + if (!project.name.contains("bson") && project.name != "mongodb-driver-sync") + linksOfflineHelper(docVersion, "mongodb-driver-sync", standardDocletOptions) + } +} + +// Helper functions +internal fun docVersion(version: String): String { + val (major, minor, patch) = version.split("-").first().split(".").map { it.toInt() } + var docVersion = "${major}.${minor}" + if (version.contains("-SNAPSHOT") && patch == 0 && minor > 0) { + docVersion = "${major}.${minor - 1}" + } + return docVersion +} + +internal fun linksOfflineHelper(docVersion: String, packageName: String, options: StandardJavadocDocletOptions): Unit { + val docsPath = rootProject.file("build/docs/${packageName}") + if (docsPath.exists()) { + options.apply { + linksOffline( + "http://mongodb.github.io/mongo-java-driver/${docVersion}/apidocs/${packageName}/", docsPath.path) + } + } +} diff --git a/buildSrc/src/main/kotlin/conventions/optional.gradle.kts b/buildSrc/src/main/kotlin/conventions/optional.gradle.kts new file mode 100644 index 00000000000..1bf10321971 --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/optional.gradle.kts @@ -0,0 +1,33 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +// Provides the optional dependencies support eg: optionalApi, optionalImplementation +plugins { + id("java-library") + id("maven-publish") +} + +java { registerFeature("optional") { usingSourceSet(sourceSets["main"]) } } + +// Suppress POM warnings for the optional features (eg: optionalApi, optionalImplementation) +afterEvaluate { + configurations + .filter { it.name.startsWith("optional") } + .forEach { optional -> + publishing.publications.named("maven") { suppressPomMetadataWarningsFor(optional.name) } + } +} diff --git a/buildSrc/src/main/kotlin/conventions/publishing.gradle.kts b/buildSrc/src/main/kotlin/conventions/publishing.gradle.kts new file mode 100644 index 00000000000..b243ce7df2e --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/publishing.gradle.kts @@ -0,0 +1,160 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +// Provides the publishing configuration for the build +// +// Note: Further configuration can be achieved using the `project.configureMavenPublication` and +// `project.configureJarManifest` helpers. +// See: `ProjectExtensions.kt` for more information +plugins { + id("conventions.git-version") + id("maven-publish") + id("signing") +} + +val signingKey: Provider = providers.gradleProperty("signingKey") +val signingPassword: Provider = providers.gradleProperty("signingPassword") +@Suppress("UNCHECKED_CAST") val gitVersion: Provider = project.findProperty("gitVersion") as Provider + +tasks.withType().configureEach { + // Gradle warns about some signing tasks using publishing task outputs without explicit + // dependencies. Here's a quick fix. + dependsOn(tasks.withType()) + mustRunAfter(tasks.withType()) + + doLast { + logger.lifecycle("[task: ${name}] ${publication.groupId}:${publication.artifactId}:${publication.version}") + } +} + +val localBuildRepo: Provider = rootProject.layout.buildDirectory.dir("repo") + +publishing { + repositories { + + // publish to local dir, for artifact tracking and testing + // `./gradlew publishMavenPublicationToLocalBuildRepository` + maven { + url = uri(localBuildRepo.get()) + name = "LocalBuild" + } + } + + publications.create("maven") { + components.findByName("java")?.let { from(it) } + + pom { + url.set("https://www.mongodb.com/") + scm { + url.set("https://github.com/mongodb/mongo-java-driver") + connection.set("scm:https://github.com/mongodb/mongo-java-driver.git") + developerConnection.set("scm:https://github.com/mongodb/mongo-java-driver.git") + } + + developers { + developer { + name.set("Various") + organization.set("MongoDB") + } + } + + licenses { + license { + name.set("The Apache License, Version 2.0") + url.set("https://www.apache.org/licenses/LICENSE-2.0.txt") + } + } + } + + // Ensure get the final set `base.archivesName` not the default one (project name). + afterEvaluate { artifactId = base.archivesName.get() } + } +} + +tasks.withType { + manifest { attributes["-exportcontents"] = "*;-noimport:=true" } + + afterEvaluate { + manifest { + if (attributes.containsKey("-nomanifest")) { + attributes.remove("-exportcontents") + } else { + attributes["Bundle-Version"] = project.version + attributes["Bundle-SymbolicName"] = + "${project.findProperty("group")}.${project.findProperty("archivesBaseName")}" + attributes["Build-Version"] = gitVersion.get() + attributes["Bundle-Name"] = base.archivesName.get() + } + } + } +} + +signing { + if (signingKey.isPresent && signingPassword.isPresent) { + logger.info("[${project.displayName}] Signing is enabled") + useInMemoryPgpKeys(signingKey.get(), signingPassword.get()) + sign(publishing.publications["maven"]) + } else { + logger.info("[${project.displayName}] No Signing keys found, skipping signing configuration") + } +} + +tasks.named("clean") { delete.add(localBuildRepo) } + +tasks.withType { enabled = false } + +tasks.register("publishSnapshots") { + group = "publishing" + description = "Publishes snapshots to Sonatype" + + if (version.toString().endsWith("-SNAPSHOT")) { + dependsOn(tasks.named("publishAllPublicationsToLocalBuildRepository")) + dependsOn(tasks.named("publishToSonatype")) + } +} + +tasks.register("publishArchives") { + group = "publishing" + description = "Publishes a release and uploads to Sonatype / Maven Central" + + val currentGitVersion = gitVersion.get() + val gitVersionMatch = currentGitVersion == version + doFirst { + if (!gitVersionMatch) { + val cause = + """ + Version mismatch: + ================= + + $version != $currentGitVersion + + The project version does not match the git tag. + """.trimMargin() + throw GradleException(cause) + } else { + println("Publishing: ${project.name} : $currentGitVersion") + } + } + if (gitVersionMatch) { + dependsOn(tasks.named("publishAllPublicationsToLocalBuildRepository")) + dependsOn(tasks.named("publishToSonatype")) + } +} + +// workaround for https://github.com/gradle/gradle/issues/16543 +inline fun TaskContainer.provider(taskName: String): Provider = + providers.provider { taskName }.flatMap { named(it) } diff --git a/buildSrc/src/main/kotlin/conventions/scaladoc.gradle.kts b/buildSrc/src/main/kotlin/conventions/scaladoc.gradle.kts new file mode 100644 index 00000000000..b14d9573e72 --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/scaladoc.gradle.kts @@ -0,0 +1,34 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +// Provides the scaladoc configuration for the build +plugins { + id("scala") + id("conventions.publishing") +} + +// Create a generic `docs` task +tasks.register("docs") { + group = "documentation" + dependsOn("scaladoc") +} + +tasks.withType { + group = "documentation" + + destinationDir = rootProject.file("build/docs/${project.base.archivesName.get()}") +} diff --git a/buildSrc/src/main/kotlin/conventions/spotbugs.gradle.kts b/buildSrc/src/main/kotlin/conventions/spotbugs.gradle.kts new file mode 100644 index 00000000000..e7ea096fc33 --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/spotbugs.gradle.kts @@ -0,0 +1,53 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +import com.github.spotbugs.snom.SpotBugsTask +import libs +import org.gradle.kotlin.dsl.dependencies + +// Performs quality checks on your project's Java source files using SpotBug +// https://plugins.gradle.org/plugin/com.github.spotbugs +plugins { + id("java-library") + alias(libs.plugins.spotbugs) +} + +dependencies { + compileOnly(libs.findbugs.jsr) + + testImplementation(libs.findbugs.jsr) +} + +spotbugs { + if (!project.buildingWith("ssdlcReport.enabled")) { + excludeFilter.set(rootProject.file("config/spotbugs/exclude.xml")) + } +} + +tasks.withType().configureEach { + if (name == "spotbugsMain") { + reports { + register("xml") { required.set(project.buildingWith("xmlReports.enabled")) } + register("html") { required.set(!project.buildingWith("xmlReports.enabled")) } + register("sarif") { required.set(project.buildingWith("ssdlcReport.enabled")) } + } + } else if (name == "spotbugsTest") { + enabled = false + } else if (name == "spotbugsIntegrationTest") { + enabled = false + } +} diff --git a/buildSrc/src/main/kotlin/conventions/spotless.gradle.kts b/buildSrc/src/main/kotlin/conventions/spotless.gradle.kts new file mode 100644 index 00000000000..7a148f57735 --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/spotless.gradle.kts @@ -0,0 +1,70 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +import com.diffplug.gradle.spotless.SpotlessApply +import com.diffplug.gradle.spotless.SpotlessCheck +import libs + +// Spotless - keep your code spotless +// https://plugins.gradle.org/plugin/com.diffplug.spotless +plugins { alias(libs.plugins.spotless) } + +val doesNotHaveACustomLicenseHeader = "/^(?s)(?!.*@custom-license-header).*/" + +spotless { + kotlinGradle { + ktfmt("0.39").dropboxStyle().configure { it.setMaxWidth(120) } + trimTrailingWhitespace() + indentWithSpaces() + endWithNewline() + licenseHeaderFile(rootProject.file("config/mongodb.license"), "(group|plugins|import|buildscript|rootProject)") + } + + scala { + target("**/*.scala") + scalafmt().configFile(rootProject.file("config/scala/scalafmt.conf")) + } + + kotlin { + target("**/*.kt") + ktfmt().dropboxStyle().configure { it.setMaxWidth(120) } + trimTrailingWhitespace() + indentWithSpaces() + endWithNewline() + licenseHeaderFile(rootProject.file("config/mongodb.license")) + .named("standard") + .onlyIfContentMatches(doesNotHaveACustomLicenseHeader) + } + + format("extraneous") { + target("*.xml", "*.yml", "*.md") + trimTrailingWhitespace() + indentWithSpaces() + endWithNewline() + } +} + +tasks.named("check") { dependsOn("spotlessApply") } + +tasks { + withType().configureEach { + notCompatibleWithConfigurationCache("https://github.com/diffplug/spotless/issues/644") + } + withType().configureEach { + notCompatibleWithConfigurationCache("https://github.com/diffplug/spotless/issues/644") + } +} diff --git a/buildSrc/src/main/kotlin/conventions/test-artifacts-runtime-dependencies.gradle.kts b/buildSrc/src/main/kotlin/conventions/test-artifacts-runtime-dependencies.gradle.kts new file mode 100644 index 00000000000..73b2b891faa --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/test-artifacts-runtime-dependencies.gradle.kts @@ -0,0 +1,27 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +plugins { id("java-library") } + +// Also include test runtime dependencies +dependencies { + testRuntimeClasspath(platform(libs.netty.bom)) + testRuntimeClasspath(libs.netty.tcnative.boringssl.static) + listOf("linux-x86_64", "linux-aarch_64", "osx-x86_64", "osx-aarch_64", "windows-x86_64").forEach { arch -> + testRuntimeClasspath(variantOf(libs.netty.tcnative.boringssl.static) { classifier(arch) }) + } +} diff --git a/buildSrc/src/main/kotlin/conventions/test-artifacts.gradle.kts b/buildSrc/src/main/kotlin/conventions/test-artifacts.gradle.kts new file mode 100644 index 00000000000..f82a88c7df9 --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/test-artifacts.gradle.kts @@ -0,0 +1,43 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +import project.DEFAULT_JAVA_VERSION + +// Allows test artifacts (resources and code) to be shared between projects +plugins { id("java-library") } + +/** Create a test artifact configuration so that test resources can be consumed by other projects. */ +val testArtifacts by configurations.creating +val testJar by + tasks.registering(Jar::class) { + archiveBaseName.set("${project.name}-test") + from(sourceSets.test.get().output) + setDuplicatesStrategy(DuplicatesStrategy.EXCLUDE) + } + +val testJavaVersion: Int = findProperty("javaVersion")?.toString()?.toInt() ?: DEFAULT_JAVA_VERSION + +tasks.withType() { + mustRunAfter(testJar) + + // Needed for OidcAuthenticationProseTests calls `field.setAccessible(true)` + if (testJavaVersion >= DEFAULT_JAVA_VERSION) { + jvmArgs("--add-opens=java.base/java.lang=ALL-UNNAMED") + } +} + +artifacts { add("testArtifacts", testJar) } diff --git a/buildSrc/src/main/kotlin/conventions/test-include-optionals.gradle.kts b/buildSrc/src/main/kotlin/conventions/test-include-optionals.gradle.kts new file mode 100644 index 00000000000..e7fde0b4c0f --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/test-include-optionals.gradle.kts @@ -0,0 +1,37 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +import org.gradle.kotlin.dsl.dependencies +import org.gradle.kotlin.dsl.project + +// Adds common optional dependencies to the testImplementations +dependencies { + + // Encryption testing + "testImplementation"(project(path = ":mongodb-crypt", configuration = "default")) + + // Netty stream type testing + "testImplementation"(platform(libs.netty.bom)) + "testImplementation"(libs.bundles.netty) + + // Snappy / zstd testing + "testImplementation"(libs.snappy.java) + "testImplementation"(libs.zstd.jni) + + // Socket testing + "testImplementation"(libs.jnr.unixsocket) +} diff --git a/buildSrc/src/main/kotlin/conventions/testing-base.gradle.kts b/buildSrc/src/main/kotlin/conventions/testing-base.gradle.kts new file mode 100644 index 00000000000..4708c742d40 --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/testing-base.gradle.kts @@ -0,0 +1,110 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +import com.adarshr.gradle.testlogger.theme.ThemeType +import libs +import project.DEFAULT_JAVA_VERSION + +// Default test configuration for projects +// +// Utilizes the test-logger plugin: +// https://plugins.gradle.org/plugin/com.adarshr.test-logger +plugins { + id("java-library") + alias(libs.plugins.test.logger) +} + +tasks.withType { + maxHeapSize = "4g" + maxParallelForks = 1 + + useJUnitPlatform() + + jvmArgs.add("-Dio.netty.leakDetection.level=paranoid") + + // Pass any `org.mongodb.*` system settings + systemProperties = + System.getProperties() + .map { (key, value) -> Pair(key.toString(), value) } + .filter { it.first.startsWith("org.mongodb.") } + .toMap() + + // Convert any ssl based properties + if (project.buildingWith("ssl.enabled")) { + if (project.hasProperty("ssl.keyStoreType")) { + systemProperties( + mapOf( + "javax.net.ssl.keyStoreType" to project.property("ssl.keyStoreType"), + "javax.net.ssl.keyStore" to project.property("ssl.keyStore"), + "javax.net.ssl.keyStorePassword" to project.property("ssl.keyStorePassword"))) + } + if (project.hasProperty("ssl.trustStoreType")) { + systemProperties( + mapOf( + "javax.net.ssl.trustStoreType" to project.property("ssl.trustStoreType"), + "javax.net.ssl.trustStore" to project.property("ssl.trustStore"), + "javax.net.ssl.trustStorePassword" to project.property("ssl.trustStorePassword"))) + } + if (project.hasProperty("ocsp.property")) { + systemProperties( + mapOf( + "org.mongodb.test.ocsp.tls.should.succeed" to project.property("ocsp.tls.should.succeed"), + "java.security.properties" to file(project.property("ocsp.property").toString()), + "com.sun.net.ssl.checkRevocation" to project.property("ssl.checkRevocation"), + "jdk.tls.client.enableStatusRequestExtension" to + project.property("client.enableStatusRequestExtension"), + "jdk.tls.client.protocols" to project.property("client.protocols"))) + } + } + + // Convert gssapi properties + if (project.buildingWith("gssapi.enabled")) { + systemProperties( + mapOf( + "sun.security.krb5.debug" to project.property("sun.security.krb5.debug"), + "javax.security.auth.useSubjectCredsOnly" to "false", + "java.security.krb5.kdc" to project.property("krb5.kdc"), + "java.security.krb5.realm" to project.property("krb5.realm"), + "java.security.auth.login.config" to project.property("auth.login.config"), + )) + } + + // Allow testing with an alternative JDK version + val testJavaVersion: Int = findProperty("javaVersion")?.toString()?.toInt() ?: DEFAULT_JAVA_VERSION + javaLauncher.set(javaToolchains.launcherFor { languageVersion = JavaLanguageVersion.of(testJavaVersion) }) +} + +// Pretty test output +testlogger { + theme = ThemeType.STANDARD + showExceptions = true + showStackTraces = true + showFullStackTraces = false + showCauses = true + slowThreshold = 2000 + showSummary = true + showSimpleNames = false + showPassed = true + showSkipped = true + showFailed = true + showOnlySlow = false + showStandardStreams = false + showPassedStandardStreams = true + showSkippedStandardStreams = true + showFailedStandardStreams = true + logLevel = LogLevel.LIFECYCLE +} diff --git a/buildSrc/src/main/kotlin/conventions/testing-integration.gradle.kts b/buildSrc/src/main/kotlin/conventions/testing-integration.gradle.kts new file mode 100644 index 00000000000..bdd30028b18 --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/testing-integration.gradle.kts @@ -0,0 +1,50 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +// Adds separate `integrationTest` configuration to a project +// Allows unit and integrations tests to be separate tasks +// +// See: +// https://docs.gradle.org/current/samples/sample_jvm_multi_project_with_additional_test_types.html +plugins { id("java-library") } + +val integrationTest by sourceSets.creating + +configurations[integrationTest.implementationConfigurationName].extendsFrom(configurations.testImplementation.get()) + +configurations[integrationTest.runtimeOnlyConfigurationName].extendsFrom(configurations.testRuntimeOnly.get()) + +val integrationTestTask = + tasks.register("integrationTest") { + description = "Runs integration tests." + group = "verification" + useJUnitPlatform() + + testClassesDirs = integrationTest.output.classesDirs + classpath = configurations[integrationTest.runtimeClasspathConfigurationName] + integrationTest.output + shouldRunAfter(tasks.test) + } + +tasks.findByName("check")?.dependsOn(integrationTestTask) + +dependencies { + "integrationTestImplementation"(project) + "integrationTestImplementation"(platform(libs.junit.bom)) + "integrationTestImplementation"(libs.bundles.junit.vintage) +} + +sourceSets["integrationTest"].java.srcDirs("src/integrationTest", "src/integrationTest/java") diff --git a/buildSrc/src/main/kotlin/conventions/testing-junit-vintage.gradle.kts b/buildSrc/src/main/kotlin/conventions/testing-junit-vintage.gradle.kts new file mode 100644 index 00000000000..48f6eee92eb --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/testing-junit-vintage.gradle.kts @@ -0,0 +1,26 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +// Default junit vintage (aka junit4) test configuration for projects +plugins { id("conventions.testing-base") } + +dependencies { + testImplementation(platform(libs.junit.bom)) + testImplementation(libs.bundles.junit.vintage) +} + +sourceSets["test"].java { srcDirs("src/test", "src/test/unit", "src/test/functional") } diff --git a/buildSrc/src/main/kotlin/conventions/testing-junit.gradle.kts b/buildSrc/src/main/kotlin/conventions/testing-junit.gradle.kts new file mode 100644 index 00000000000..7e72c5101bf --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/testing-junit.gradle.kts @@ -0,0 +1,26 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +// Default junit test configuration for projects +plugins { id("conventions.testing-base") } + +dependencies { + testImplementation(platform(libs.junit.bom)) + testImplementation(libs.bundles.junit) +} + +sourceSets["test"].java { srcDirs("src/test", "src/test/unit", "src/test/functional") } diff --git a/buildSrc/src/main/kotlin/conventions/testing-mockito.gradle.kts b/buildSrc/src/main/kotlin/conventions/testing-mockito.gradle.kts new file mode 100644 index 00000000000..08c33262e9e --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/testing-mockito.gradle.kts @@ -0,0 +1,27 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +// Adds mockito support to a project +plugins { id("java-library") } + +dependencies { + if (project.findProperty("javaVersion")?.toString().equals("8")) { + testImplementation(libs.bundles.mockito.java8) + } else { + testImplementation(libs.bundles.mockito) + } +} diff --git a/buildSrc/src/main/kotlin/conventions/testing-spock-exclude-slow.gradle.kts b/buildSrc/src/main/kotlin/conventions/testing-spock-exclude-slow.gradle.kts new file mode 100644 index 00000000000..706bca27e8c --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/testing-spock-exclude-slow.gradle.kts @@ -0,0 +1,36 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +import org.gradle.api.tasks.testing.Test +import org.gradle.kotlin.dsl.withType + +// Adds groovy spock testing framework support +// See: https://spockframework.org/ +plugins { id("conventions.testing-spock") } + +tasks.withType().configureEach { + exclude("examples/**") + useJUnitPlatform { excludeTags("Slow") } + + systemProperty("spock.configuration", "${rootProject.file("config/spock/ExcludeSlow.groovy")}") +} + +tasks.register("testSlowOnly", Test::class.java) { + useJUnitPlatform { includeTags("Slow") } + + systemProperty("spock.configuration", "${rootProject.file("config/spock/OnlySlow.groovy")}") +} diff --git a/buildSrc/src/main/kotlin/conventions/testing-spock.gradle.kts b/buildSrc/src/main/kotlin/conventions/testing-spock.gradle.kts new file mode 100644 index 00000000000..e4d46007856 --- /dev/null +++ b/buildSrc/src/main/kotlin/conventions/testing-spock.gradle.kts @@ -0,0 +1,42 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package conventions + +import org.gradle.kotlin.dsl.dependencies +import project.libs + +// Adds groovy spock testing framework support +// See: https://spockframework.org/ +plugins { + id("groovy") + id("conventions.codenarc") + id("conventions.testing-base") + id("conventions.testing-junit-vintage") +} + +dependencies { + testImplementation(platform(libs.spock.bom)) + testImplementation(libs.bundles.spock) +} + +sourceSets { + test { + groovy { srcDirs("src/test", "src/test/unit", "src/test/functional", "src/examples") } + + // Disable java src directories - groovy will compile the mixed java and groovy test code + java { setSrcDirs(emptyList()) } + } +} diff --git a/buildSrc/src/main/kotlin/project/Companion.kt b/buildSrc/src/main/kotlin/project/Companion.kt new file mode 100644 index 00000000000..b4b9650031a --- /dev/null +++ b/buildSrc/src/main/kotlin/project/Companion.kt @@ -0,0 +1,26 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package project + +import org.gradle.accessors.dm.LibrariesForLibs +import org.gradle.api.Project +import org.gradle.kotlin.dsl.getByType + +// Adds the `libs` value for use in project +internal val Project.libs: LibrariesForLibs + get() = extensions.getByType() + +internal const val DEFAULT_JAVA_VERSION = 17 diff --git a/buildSrc/src/main/kotlin/project/base.gradle.kts b/buildSrc/src/main/kotlin/project/base.gradle.kts new file mode 100644 index 00000000000..ed13c40cb76 --- /dev/null +++ b/buildSrc/src/main/kotlin/project/base.gradle.kts @@ -0,0 +1,26 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package project + +plugins { id("conventions.git-version") } + +group = "org.mongodb" + +repositories { + mavenLocal() + google() + mavenCentral() +} diff --git a/buildSrc/src/main/kotlin/project/java.gradle.kts b/buildSrc/src/main/kotlin/project/java.gradle.kts new file mode 100644 index 00000000000..60861167f17 --- /dev/null +++ b/buildSrc/src/main/kotlin/project/java.gradle.kts @@ -0,0 +1,50 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package project + +plugins { + id("java-library") + id("checkstyle") + id("project.base") + id("conventions.bnd") + id("conventions.javadoc") + id("conventions.optional") + id("conventions.publishing") + id("conventions.spotbugs") + id("conventions.spotless") + id("conventions.testing-junit") +} + +dependencies { "optionalApi"(libs.slf4j) } + +logger.info("Compiling ${project.name} using JDK${DEFAULT_JAVA_VERSION}") + +java { + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 + + toolchain { languageVersion = JavaLanguageVersion.of(DEFAULT_JAVA_VERSION) } + + withSourcesJar() + withJavadocJar() +} + +tasks.withType { + options.encoding = "UTF-8" + options.release.set(8) +} + +sourceSets["main"].java { srcDir("src/main") } diff --git a/buildSrc/src/main/kotlin/project/kotlin.gradle.kts b/buildSrc/src/main/kotlin/project/kotlin.gradle.kts new file mode 100644 index 00000000000..a0f53e0ad28 --- /dev/null +++ b/buildSrc/src/main/kotlin/project/kotlin.gradle.kts @@ -0,0 +1,68 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package project + +import libs +import org.jetbrains.kotlin.gradle.dsl.JvmTarget +import org.jetbrains.kotlin.gradle.tasks.KotlinJvmCompile + +plugins { + alias(libs.plugins.kotlin.gradle) + id("project.base") + id("conventions.bnd") + id("conventions.detekt") + id("conventions.dokka") + id("conventions.optional") + id("conventions.publishing") + id("conventions.spotbugs") + id("conventions.spotless") + id("conventions.testing-integration") + id("conventions.testing-junit") +} + +/* Compiling */ +logger.info("Compiling ${project.name} using JDK${DEFAULT_JAVA_VERSION}") + +kotlin { + explicitApi() + jvmToolchain(DEFAULT_JAVA_VERSION) +} + +tasks.withType { compilerOptions { jvmTarget = JvmTarget.JVM_1_8 } } + +java { + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 + + withSourcesJar() + withJavadocJar() +} + +dependencies { + "optionalApi"(libs.slf4j) + + // Align versions of all Kotlin components + implementation(platform(libs.kotlin.bom)) + implementation(libs.kotlin.stdlib.jdk8) + + testImplementation(libs.kotlin.reflect) + testImplementation(libs.junit.kotlin) + testImplementation(libs.bundles.mockito.kotlin) + testImplementation(libs.assertj) + testImplementation(libs.classgraph) + + "integrationTestImplementation"(libs.junit.kotlin) +} diff --git a/buildSrc/src/main/kotlin/project/scala.gradle.kts b/buildSrc/src/main/kotlin/project/scala.gradle.kts new file mode 100644 index 00000000000..ff5918ae695 --- /dev/null +++ b/buildSrc/src/main/kotlin/project/scala.gradle.kts @@ -0,0 +1,113 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package project + +import ProjectExtensions.configureMavenPublication +import ProjectExtensions.scalaVersion + +plugins { + id("scala") + id("project.base") + id("conventions.bnd") + id("conventions.optional") + id("conventions.publishing") + id("conventions.scaladoc") + id("conventions.spotless") + id("conventions.testing-junit") + id("conventions.testing-integration") +} + +group = "org.mongodb.scala" + +val scalaVersion: String by lazy { project.scalaVersion() } + +sourceSets["integrationTest"].scala.srcDir("src/integrationTest/scala") + +tasks.register("scalaCheck") { + description = "Runs all the Scala checks" + group = "verification" + + dependsOn("clean", "compileTestScala", "check") + tasks.findByName("check")?.mustRunAfter("clean") +} + +tasks.withType { + doFirst { println("Running Test task using scala version: $scalaVersion") } + useJUnitPlatform() +} + +tasks.named("clean") { delete.add(rootProject.file("build/docs/")) } + +java { + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 + + withSourcesJar() + withJavadocJar() +} + +afterEvaluate { + configureMavenPublication { artifactId = "${base.archivesName.get()}_${scalaVersion}" } + + // ============================================ + // Scala version specific configuration + // ============================================ + val compileOptions = mutableListOf("-target:jvm-1.8") + when (scalaVersion) { + "2.13" -> { + dependencies { + api(libs.bundles.scala.v2.v13) + + testImplementation(libs.bundles.scala.test.v2.v13) + } + sourceSets { main { scala { setSrcDirs(listOf("src/main/scala", "src/main/scala-2.13+")) } } } + + compileOptions.addAll( + listOf( + "-feature", + "-unchecked", + "-language:reflectiveCalls", + "-Wconf:cat=deprecation:ws", + "-Xlint:strict-unsealed-patmat")) + } + "2.12" -> { + dependencies { + api(libs.bundles.scala.v2.v12) + + testImplementation(libs.bundles.scala.test.v2.v12) + } + sourceSets { main { scala { setSrcDirs(listOf("src/main/scala", "src/main/scala-2.13-")) } } } + } + "2.11" -> { + dependencies { + api(libs.bundles.scala.v2.v11) + + testImplementation(libs.bundles.scala.test.v2.v11) + } + // Reuse the scala-2.12 source as its compatible. + sourceSets { main { scala { setSrcDirs(listOf("src/main/scala", "src/main/scala-2.13-")) } } } + + compileOptions.add("-Xexperimental") + } + } + + tasks.withType { + doFirst { println("Compiling using scala version: $scalaVersion") } + + scalaCompileOptions.isDeprecation = false + scalaCompileOptions.additionalParameters = compileOptions + } +} diff --git a/config/checkstyle-exclude.xml b/config/checkstyle-exclude.xml deleted file mode 100644 index b879d8bb238..00000000000 --- a/config/checkstyle-exclude.xml +++ /dev/null @@ -1,134 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/config/checkstyle.xml b/config/checkstyle/checkstyle.xml similarity index 88% rename from config/checkstyle.xml rename to config/checkstyle/checkstyle.xml index f2d675871ee..3a88f90de8c 100644 --- a/config/checkstyle.xml +++ b/config/checkstyle/checkstyle.xml @@ -78,6 +78,13 @@ + + + + + + + @@ -85,14 +92,13 @@ - + - + - @@ -114,16 +120,9 @@ - - - - - - - - + @@ -154,7 +153,9 @@ - + + + @@ -196,8 +197,6 @@ - - @@ -218,16 +217,21 @@ + + + + + + + + + - - - - - - + + - + diff --git a/config/checkstyle/suppressions.xml b/config/checkstyle/suppressions.xml new file mode 100644 index 00000000000..6d24f861e08 --- /dev/null +++ b/config/checkstyle/suppressions.xml @@ -0,0 +1,164 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/config/clirr-exclude.yml b/config/clirr-exclude.yml deleted file mode 100644 index 40ecc8ffabc..00000000000 --- a/config/clirr-exclude.yml +++ /dev/null @@ -1,29 +0,0 @@ ---- -differenceTypes: [] - -packages: -- io.netty.* -- org.slf4j.* - -members: - org.bson.AbstractBsonReader: - - doReadDecimal128() - - doPeekBinarySize() - org.bson.AbstractBsonWriter: - - doWriteDecimal128(org.bson.types.Decimal128) - org.bson.BsonNumber: - - decimal128Value() - org.bson.BsonReader: - - readDecimal128() - - readDecimal128(java.lang.String) - - peekBinarySize() - - getMark() - - close() - org.bson.io.BsonInput: - - getMark(int) - org.bson.BsonWriter: - - writeDecimal128(org.bson.types.Decimal128) - - writeDecimal128(java.lang.String,org.bson.types.Decimal128) - - pipe(org.bson.BsonReader,java.util.List) - org.bson.diagnostics.Loggers: - - getLogger(java.lang.String) diff --git a/config/codenarc/codenarc.xml b/config/codenarc/codenarc.xml index bd1deeffde6..cfdd190abf6 100644 --- a/config/codenarc/codenarc.xml +++ b/config/codenarc/codenarc.xml @@ -34,12 +34,6 @@ - - - - - - @@ -47,6 +41,9 @@ + + + @@ -55,6 +52,13 @@ + + + + + + + @@ -72,10 +76,16 @@ - + + + + + + + @@ -86,6 +96,9 @@ + + + @@ -113,6 +126,9 @@ + + + @@ -125,6 +141,7 @@ + @@ -133,4 +150,3 @@ - diff --git a/config/detekt/baseline.xml b/config/detekt/baseline.xml new file mode 100644 index 00000000000..d462c314e9c --- /dev/null +++ b/config/detekt/baseline.xml @@ -0,0 +1,36 @@ + + + + + EmptyDefaultConstructor:UnifiedCrudTest.kt$UnifiedCrudTest$() + EmptyDefaultConstructor:UnifiedTest.kt$UnifiedTest$() + EmptyFunctionBlock:SyncMongoCursor.kt$SyncMongoCursor${} + IteratorNotThrowingNoSuchElementException:MongoCursor.kt$MongoCursor<T : Any> : IteratorCloseable + LargeClass:MongoCollectionTest.kt$MongoCollectionTest + LongMethod:FindFlowTest.kt$FindFlowTest$@Test fun shouldCallTheUnderlyingMethods() + LongMethod:FindIterableTest.kt$FindIterableTest$@Test fun shouldCallTheUnderlyingMethods() + LongMethod:KotlinSerializerCodecTest.kt$KotlinSerializerCodecTest$@Test fun testDataClassOptionalBsonValues() + MaxLineLength:ListCollectionNamesFlow.kt$ListCollectionNamesFlow$* + MaxLineLength:ListCollectionNamesIterable.kt$ListCollectionNamesIterable$* + MaxLineLength:ListCollectionsFlow.kt$ListCollectionsFlow$* + MaxLineLength:ListCollectionsIterable.kt$ListCollectionsIterable$* + MaxLineLength:MapReduceFlow.kt$MapReduceFlow$* + MaxLineLength:MapReduceIterable.kt$MapReduceIterable$* + SwallowedException:MockitoHelper.kt$MockitoHelper.DeepReflectionEqMatcher$e: Throwable + TooManyFunctions:ClientSession.kt$ClientSession : jClientSession + TooManyFunctions:FindFlow.kt$FindFlow<T : Any> : Flow + TooManyFunctions:FindIterable.kt$FindIterable<T : Any> : MongoIterable + TooManyFunctions:MongoCollection.kt$MongoCollection<T : Any> + TooManyFunctions:MongoDatabase.kt$MongoDatabase + TooManyFunctions:SyncClientSession.kt$SyncClientSession : JClientSession + TooManyFunctions:SyncFindIterable.kt$SyncFindIterable<T : Any> : JFindIterableSyncMongoIterable + TooManyFunctions:SyncMongoCluster.kt$SyncMongoCluster : JMongoCluster + TooManyFunctions:SyncMongoCollection.kt$SyncMongoCollection<T : Any> : JMongoCollection + TooManyFunctions:SyncMongoDatabase.kt$SyncMongoDatabase : JMongoDatabase + UnnecessaryAbstractClass:UnifiedTest.kt$UnifiedTest$UnifiedTest + UnsafeCallOnNullableType:SmokeTests.kt$SmokeTests$collection!! + UnusedPrivateMember:SyncMongoIterable.kt$SyncMongoIterable$private var timeoutMode: TimeoutMode? = null + VarCouldBeVal:SyncMongoIterable.kt$SyncMongoIterable$private var timeoutMode: TimeoutMode? = null + WildcardImport:SyncMongoDatabase.kt$import com.mongodb.client.* + + diff --git a/config/detekt/detekt.yml b/config/detekt/detekt.yml new file mode 100644 index 00000000000..4ac460b0738 --- /dev/null +++ b/config/detekt/detekt.yml @@ -0,0 +1,712 @@ +build: + maxIssues: 0 + excludeCorrectable: false + weights: + # complexity: 2 + # LongParameterList: 1 + # style: 1 + # comments: 1 + +config: + validation: true + warningsAsErrors: false + # when writing own rules with new properties, exclude the property path e.g.: 'my_rule_set,.*>.*>[my_property]' + excludes: '' + +processors: + active: true + exclude: + - 'DetektProgressListener' + # - 'KtFileCountProcessor' + # - 'PackageCountProcessor' + # - 'ClassCountProcessor' + # - 'FunctionCountProcessor' + # - 'PropertyCountProcessor' + # - 'ProjectComplexityProcessor' + # - 'ProjectCognitiveComplexityProcessor' + # - 'ProjectLLOCProcessor' + # - 'ProjectCLOCProcessor' + # - 'ProjectLOCProcessor' + # - 'ProjectSLOCProcessor' + # - 'LicenseHeaderLoaderExtension' + +console-reports: + active: true + exclude: + - 'ProjectStatisticsReport' + - 'ComplexityReport' + - 'NotificationReport' + - 'FindingsReport' + - 'FileBasedFindingsReport' + # - 'LiteFindingsReport' + +output-reports: + active: true + exclude: + # - 'TxtOutputReport' + # - 'XmlOutputReport' + # - 'HtmlOutputReport' + # - 'MdOutputReport' + +comments: + active: true + AbsentOrWrongFileLicense: + active: false + licenseTemplateFile: 'license.template' + licenseTemplateIsRegex: false + CommentOverPrivateFunction: + active: false + CommentOverPrivateProperty: + active: false + DeprecatedBlockTag: + active: false + EndOfSentenceFormat: + active: false + endOfSentenceFormat: '([.?!][ \t\n\r\f<])|([.?!:]$)' + KDocReferencesNonPublicProperty: + active: false + excludes: ['**/test/**'] + OutdatedDocumentation: + active: false + matchTypeParameters: true + matchDeclarationsOrder: true + allowParamOnConstructorProperties: false + UndocumentedPublicClass: + active: false + excludes: ['**/test/**'] + searchInNestedClass: true + searchInInnerClass: true + searchInInnerObject: true + searchInInnerInterface: true + UndocumentedPublicFunction: + active: false + excludes: ['**/test/**'] + UndocumentedPublicProperty: + active: false + excludes: ['**/test/**'] + +complexity: + active: true + ComplexCondition: + active: true + threshold: 4 + ComplexInterface: + active: false + threshold: 10 + includeStaticDeclarations: false + includePrivateDeclarations: false + ComplexMethod: + active: true + threshold: 15 + ignoreSingleWhenExpression: false + ignoreSimpleWhenEntries: false + ignoreNestingFunctions: false + nestingFunctions: + - 'also' + - 'apply' + - 'forEach' + - 'isNotNull' + - 'ifNull' + - 'let' + - 'run' + - 'use' + - 'with' + LabeledExpression: + active: false + ignoredLabels: [] + LargeClass: + active: true + threshold: 600 + LongMethod: + active: true + threshold: 60 + LongParameterList: + active: true + functionThreshold: 6 + constructorThreshold: 7 + ignoreDefaultParameters: false + ignoreDataClasses: true + ignoreAnnotatedParameter: [] + MethodOverloading: + active: false + threshold: 6 + NamedArguments: + active: false + threshold: 3 + ignoreArgumentsMatchingNames: false + NestedBlockDepth: + active: true + threshold: 4 + NestedScopeFunctions: + active: false + threshold: 1 + functions: + - 'kotlin.apply' + - 'kotlin.run' + - 'kotlin.with' + - 'kotlin.let' + - 'kotlin.also' + ReplaceSafeCallChainWithRun: + active: false + StringLiteralDuplication: + active: false + excludes: ['**/test/**'] + threshold: 3 + ignoreAnnotation: true + excludeStringsWithLessThan5Characters: true + ignoreStringsRegex: '$^' + TooManyFunctions: + active: true + excludes: ['**/test/**'] + thresholdInFiles: 25 + thresholdInClasses: 27 + thresholdInInterfaces: 25 + thresholdInObjects: 25 + thresholdInEnums: 25 + ignoreDeprecated: false + ignorePrivate: false + ignoreOverridden: false + +coroutines: + active: true + GlobalCoroutineUsage: + active: false + InjectDispatcher: + active: true + dispatcherNames: + - 'IO' + - 'Default' + - 'Unconfined' + RedundantSuspendModifier: + active: true + SleepInsteadOfDelay: + active: true + SuspendFunWithCoroutineScopeReceiver: + active: false + SuspendFunWithFlowReturnType: + active: true + +empty-blocks: + active: true + EmptyCatchBlock: + active: true + allowedExceptionNameRegex: '_|(ignore|expected).*' + EmptyClassBlock: + active: true + EmptyDefaultConstructor: + active: true + EmptyDoWhileBlock: + active: true + EmptyElseBlock: + active: true + EmptyFinallyBlock: + active: true + EmptyForBlock: + active: true + EmptyFunctionBlock: + active: true + ignoreOverridden: false + EmptyIfBlock: + active: true + EmptyInitBlock: + active: true + EmptyKtFile: + active: true + EmptySecondaryConstructor: + active: true + EmptyTryBlock: + active: true + EmptyWhenBlock: + active: true + EmptyWhileBlock: + active: true + +exceptions: + active: true + ExceptionRaisedInUnexpectedLocation: + active: true + methodNames: + - 'equals' + - 'finalize' + - 'hashCode' + - 'toString' + InstanceOfCheckForException: + active: true + excludes: ['**/test/**'] + NotImplementedDeclaration: + active: false + ObjectExtendsThrowable: + active: false + PrintStackTrace: + active: true + RethrowCaughtException: + active: true + ReturnFromFinally: + active: true + ignoreLabeled: false + SwallowedException: + active: true + ignoredExceptionTypes: + - 'InterruptedException' + - 'MalformedURLException' + - 'NumberFormatException' + - 'ParseException' + allowedExceptionNameRegex: '_|(ignore|expected).*' + ThrowingExceptionFromFinally: + active: true + ThrowingExceptionInMain: + active: false + ThrowingExceptionsWithoutMessageOrCause: + active: true + excludes: ['**/test/**'] + exceptions: + - 'ArrayIndexOutOfBoundsException' + - 'Exception' + - 'IllegalArgumentException' + - 'IllegalMonitorStateException' + - 'IllegalStateException' + - 'IndexOutOfBoundsException' + - 'NullPointerException' + - 'RuntimeException' + - 'Throwable' + ThrowingNewInstanceOfSameException: + active: true + TooGenericExceptionCaught: + active: true + excludes: ['**/test/**'] + exceptionNames: + - 'ArrayIndexOutOfBoundsException' + - 'Error' + - 'Exception' + - 'IllegalMonitorStateException' + - 'IndexOutOfBoundsException' + - 'NullPointerException' + - 'RuntimeException' + - 'Throwable' + allowedExceptionNameRegex: '_|(ignore|expected).*' + TooGenericExceptionThrown: + active: true + exceptionNames: + - 'Error' + - 'Exception' + - 'RuntimeException' + - 'Throwable' + +naming: + active: true + BooleanPropertyNaming: + active: false + allowedPattern: '^(is|has|are)' + ignoreOverridden: true + ClassNaming: + active: true + classPattern: '[A-Z][a-zA-Z0-9]*' + ConstructorParameterNaming: + active: true + parameterPattern: '[a-z][A-Za-z0-9]*' + privateParameterPattern: '[a-z][A-Za-z0-9]*' + excludeClassPattern: '$^' + ignoreOverridden: true + EnumNaming: + active: true + enumEntryPattern: '[A-Z][_a-zA-Z0-9]*' + ForbiddenClassName: + active: false + forbiddenName: [] + FunctionMaxLength: + active: false + maximumFunctionNameLength: 30 + FunctionMinLength: + active: false + minimumFunctionNameLength: 3 + FunctionNaming: + active: true + excludes: ['**/test/**'] + functionPattern: '[a-z][a-zA-Z0-9]*' + excludeClassPattern: '$^' + ignoreOverridden: true + FunctionParameterNaming: + active: true + parameterPattern: '[a-z][A-Za-z0-9]*' + excludeClassPattern: '$^' + ignoreOverridden: true + InvalidPackageDeclaration: + active: true + rootPackage: '' + requireRootInDeclaration: false + LambdaParameterNaming: + active: false + parameterPattern: '[a-z][A-Za-z0-9]*|_' + MatchingDeclarationName: + active: true + mustBeFirst: true + MemberNameEqualsClassName: + active: true + ignoreOverridden: true + NoNameShadowing: + active: true + NonBooleanPropertyPrefixedWithIs: + active: false + ObjectPropertyNaming: + active: true + constantPattern: '[A-Za-z][_A-Za-z0-9]*' + propertyPattern: '[A-Za-z][_A-Za-z0-9]*' + privatePropertyPattern: '(_)?[A-Za-z][_A-Za-z0-9]*' + PackageNaming: + active: true + packagePattern: '[a-z]+(\.[a-z][A-Za-z0-9]*)*' + TopLevelPropertyNaming: + active: true + constantPattern: '[A-Z][_A-Z0-9]*' + propertyPattern: '[A-Za-z][_A-Za-z0-9]*' + privatePropertyPattern: '_?[A-Za-z][_A-Za-z0-9]*' + VariableMaxLength: + active: false + maximumVariableNameLength: 64 + VariableMinLength: + active: false + minimumVariableNameLength: 1 + VariableNaming: + active: true + variablePattern: '[a-z][A-Za-z0-9]*' + privateVariablePattern: '(_)?[a-z][A-Za-z0-9]*' + excludeClassPattern: '$^' + ignoreOverridden: true + +performance: + active: true + ArrayPrimitive: + active: true + CouldBeSequence: + active: false + threshold: 3 + ForEachOnRange: + active: true + excludes: ['**/test/**'] + SpreadOperator: + active: true + excludes: ['**/test/**'] + UnnecessaryTemporaryInstantiation: + active: true + +potential-bugs: + active: true + AvoidReferentialEquality: + active: true + forbiddenTypePatterns: + - 'kotlin.String' + CastToNullableType: + active: false + Deprecation: + active: false + DontDowncastCollectionTypes: + active: false + DoubleMutabilityForCollection: + active: true + mutableTypes: + - 'kotlin.collections.MutableList' + - 'kotlin.collections.MutableMap' + - 'kotlin.collections.MutableSet' + - 'java.util.ArrayList' + - 'java.util.LinkedHashSet' + - 'java.util.HashSet' + - 'java.util.LinkedHashMap' + - 'java.util.HashMap' + DuplicateCaseInWhenExpression: + active: true + ElseCaseInsteadOfExhaustiveWhen: + active: false + EqualsAlwaysReturnsTrueOrFalse: + active: true + EqualsWithHashCodeExist: + active: true + ExitOutsideMain: + active: false + ExplicitGarbageCollectionCall: + active: true + HasPlatformType: + active: true + IgnoredReturnValue: + active: true + restrictToAnnotatedMethods: true + returnValueAnnotations: + - '*.CheckResult' + - '*.CheckReturnValue' + ignoreReturnValueAnnotations: + - '*.CanIgnoreReturnValue' + ignoreFunctionCall: [] + ImplicitDefaultLocale: + active: true + ImplicitUnitReturnType: + active: false + allowExplicitReturnType: true + InvalidRange: + active: true + IteratorHasNextCallsNextMethod: + active: true + IteratorNotThrowingNoSuchElementException: + active: true + LateinitUsage: + active: false + excludes: ['**/test/**'] + ignoreOnClassesPattern: '' + MapGetWithNotNullAssertionOperator: + active: true + MissingPackageDeclaration: + active: false + excludes: ['**/*.kts'] + MissingWhenCase: + active: true + allowElseExpression: true + NullCheckOnMutableProperty: + active: false + NullableToStringCall: + active: false + RedundantElseInWhen: + active: true + UnconditionalJumpStatementInLoop: + active: false + UnnecessaryNotNullOperator: + active: true + UnnecessarySafeCall: + active: true + UnreachableCatchBlock: + active: true + UnreachableCode: + active: true + UnsafeCallOnNullableType: + active: true + excludes: ['**/test/**'] + UnsafeCast: + active: true + UnusedUnaryOperator: + active: true + UselessPostfixExpression: + active: true + WrongEqualsTypeParameter: + active: true + +style: + active: true + CanBeNonNullable: + active: false + CascadingCallWrapping: + active: false + includeElvis: true + ClassOrdering: + active: false + CollapsibleIfStatements: + active: false + DataClassContainsFunctions: + active: false + conversionFunctionPrefix: 'to' + DataClassShouldBeImmutable: + active: false + DestructuringDeclarationWithTooManyEntries: + active: true + maxDestructuringEntries: 3 + EqualsNullCall: + active: true + EqualsOnSignatureLine: + active: false + ExplicitCollectionElementAccessMethod: + active: false + ExplicitItLambdaParameter: + active: true + ExpressionBodySyntax: + active: false + includeLineWrapping: false + ForbiddenComment: + active: true + values: + - 'FIXME:' + - 'STOPSHIP:' + - 'TODO:' + allowedPatterns: '' + customMessage: '' + ForbiddenImport: + active: false + imports: [] + forbiddenPatterns: '' + ForbiddenMethodCall: + active: false + methods: + - 'kotlin.io.print' + - 'kotlin.io.println' + ForbiddenPublicDataClass: + active: true + excludes: ['**'] + ignorePackages: + - '*.internal' + - '*.internal.*' + ForbiddenSuppress: + active: false + rules: [] + ForbiddenVoid: + active: true + ignoreOverridden: false + ignoreUsageInGenerics: false + FunctionOnlyReturningConstant: + active: true + ignoreOverridableFunction: true + ignoreActualFunction: true + excludedFunctions: '' + LibraryCodeMustSpecifyReturnType: + active: true + excludes: ['**'] + LibraryEntitiesShouldNotBePublic: + active: true + excludes: ['**'] + LoopWithTooManyJumpStatements: + active: true + maxJumpCount: 1 + MagicNumber: + active: true + excludes: ['**/test/**', '**/*.kts'] + ignoreNumbers: + - '-1' + - '0' + - '1' + - '2' + ignoreHashCodeFunction: true + ignorePropertyDeclaration: false + ignoreLocalVariableDeclaration: false + ignoreConstantDeclaration: true + ignoreCompanionObjectPropertyDeclaration: true + ignoreAnnotation: false + ignoreNamedArgument: true + ignoreEnums: true + ignoreRanges: false + ignoreExtensionFunctions: true + MandatoryBracesIfStatements: + active: false + MandatoryBracesLoops: + active: false + MaxChainedCallsOnSameLine: + active: false + maxChainedCalls: 5 + MaxLineLength: + active: true + maxLineLength: 120 + excludePackageStatements: true + excludeImportStatements: true + excludeCommentStatements: false + MayBeConst: + active: true + ModifierOrder: + active: true + MultilineLambdaItParameter: + active: false + NestedClassesVisibility: + active: true + NewLineAtEndOfFile: + active: true + NoTabs: + active: false + NullableBooleanCheck: + active: false + ObjectLiteralToLambda: + active: true + OptionalAbstractKeyword: + active: true + OptionalUnit: + active: false + OptionalWhenBraces: + active: false + PreferToOverPairSyntax: + active: false + ProtectedMemberInFinalClass: + active: true + RedundantExplicitType: + active: false + RedundantHigherOrderMapUsage: + active: true + RedundantVisibilityModifierRule: + active: false + ReturnCount: + active: true + max: 2 + excludedFunctions: 'equals' + excludeLabeled: false + excludeReturnFromLambda: true + excludeGuardClauses: false + SafeCast: + active: true + SerialVersionUIDInSerializableClass: + active: true + SpacingBetweenPackageAndImports: + active: false + ThrowsCount: + active: true + max: 2 + excludeGuardClauses: false + TrailingWhitespace: + active: false + UnderscoresInNumericLiterals: + active: false + acceptableLength: 4 + allowNonStandardGrouping: false + UnnecessaryAbstractClass: + active: true + UnnecessaryAnnotationUseSiteTarget: + active: false + UnnecessaryApply: + active: true + UnnecessaryBackticks: + active: false + UnnecessaryFilter: + active: true + UnnecessaryInheritance: + active: true + UnnecessaryInnerClass: + active: false + UnnecessaryLet: + active: false + UnnecessaryParentheses: + active: false + UntilInsteadOfRangeTo: + active: false + UnusedImports: + active: false + UnusedPrivateClass: + active: true + UnusedPrivateMember: + active: true + allowedNames: '(_|ignored|expected|serialVersionUID)' + UseAnyOrNoneInsteadOfFind: + active: true + UseArrayLiteralsInAnnotations: + active: true + UseCheckNotNull: + active: true + UseCheckOrError: + active: true + UseDataClass: + active: false + allowVars: false + UseEmptyCounterpart: + active: false + UseIfEmptyOrIfBlank: + active: false + UseIfInsteadOfWhen: + active: false + UseIsNullOrEmpty: + active: true + UseOrEmpty: + active: true + UseRequire: + active: true + UseRequireNotNull: + active: true + UselessCallOnNotNull: + active: true + UtilityClassWithPublicConstructor: + active: true + VarCouldBeVal: + active: true + ignoreLateinitVar: false + WildcardImport: + active: true + excludes: ['**/test/**'] + excludeImports: + - 'java.util.*' diff --git a/config/findbugs-exclude.xml b/config/findbugs-exclude.xml deleted file mode 100644 index af027f9b16f..00000000000 --- a/config/findbugs-exclude.xml +++ /dev/null @@ -1,161 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/config/mongodb.license b/config/mongodb.license new file mode 100644 index 00000000000..6a2444433a7 --- /dev/null +++ b/config/mongodb.license @@ -0,0 +1,15 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ diff --git a/config/scala/scalafmt.conf b/config/scala/scalafmt.conf new file mode 100644 index 00000000000..6c5e35eae69 --- /dev/null +++ b/config/scala/scalafmt.conf @@ -0,0 +1,16 @@ +version = "3.7.1" +runner.dialect = scala213 + +preset = default + +danglingParentheses.preset = true +docstrings.style = keep +#docstrings.style = Asterisk +#docstrings.wrap = no +maxColumn = 120 +rewrite.rules = [SortImports] +newlines.topLevelStatements = [] +newlines.source=keep +newlines.implicitParamListModifierPrefer=before + +spaces.inImportCurlyBraces = true diff --git a/config/spock/ExcludeSlow.groovy b/config/spock/ExcludeSlow.groovy new file mode 100644 index 00000000000..033fbdb2a7d --- /dev/null +++ b/config/spock/ExcludeSlow.groovy @@ -0,0 +1,6 @@ +package spock + +runner { + println "Excluding Slow Spock tests" + exclude com.mongodb.spock.Slow +} diff --git a/config/spock/OnlySlow.groovy b/config/spock/OnlySlow.groovy new file mode 100644 index 00000000000..d98c04bd826 --- /dev/null +++ b/config/spock/OnlySlow.groovy @@ -0,0 +1,6 @@ +package spock + +runner { + println "Only including Slow Spock tests" + include com.mongodb.spock.Slow +} diff --git a/config/spotbugs/exclude.xml b/config/spotbugs/exclude.xml new file mode 100644 index 00000000000..20684680865 --- /dev/null +++ b/config/spotbugs/exclude.xml @@ -0,0 +1,293 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/README.md b/docs/README.md deleted file mode 100644 index 883ef64b28a..00000000000 --- a/docs/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# MongoDB Java Driver Documentation - - 1. landing - the front page of all the java docs - 2. reference - the reference site for the current version of the driver - diff --git a/docs/landing/README.md b/docs/landing/README.md deleted file mode 100644 index 5f1702b3a37..00000000000 --- a/docs/landing/README.md +++ /dev/null @@ -1,28 +0,0 @@ -# MongoDB Java Driver Front page - -The static front page site for the Java documentation portal. - -## Requirements -Hugo version 0.25 [download here](https://github.com/spf13/hugo/releases/tag/v0.25) -
    -Check out the hugo [quickstart guide](http://gohugo.io/overview/quickstart/). - -## Running the server -To run the server call the hugo command: - - hugo server --baseUrl=http://localhost/ --buildDrafts --watch - -| Options explained || -| --------------------------- |--------------------------------------------------------------------------| -| server | Hugo runs its own webserver to render the files | -| --baseUrl=http://localhost/ | Normally the base url will be /mongo-java-driver for gh-pages | -| --buildDrafts | Include draft posts in the output - these won't be published to gh-pages | -| -- watch | Automatically reloads on file change | - - -All generated content will appear in the `./public` folder, so you can also check the filesystem and browse it locally.
    -For more hugo server options run: `hugo --help` - -### Data - -All dynamic / changing data lives in `./data/mongodb.toml` diff --git a/docs/landing/config.toml b/docs/landing/config.toml deleted file mode 100644 index 013e39ec8ba..00000000000 --- a/docs/landing/config.toml +++ /dev/null @@ -1,8 +0,0 @@ -baseurl = "/mongo-java-driver/" -languageCode = "en-us" -title = "MongoDB Java Driver" -canonifyurls = false -disableHugoGeneratorInject = true -disableKinds = ["section", "taxonomy", "taxonomyTerm", "404"] - -githubRepo = "mongo-java-driver" diff --git a/docs/landing/content/.readme b/docs/landing/content/.readme deleted file mode 100644 index c5c049d9211..00000000000 --- a/docs/landing/content/.readme +++ /dev/null @@ -1 +0,0 @@ -content dir diff --git a/docs/landing/data/releases.toml b/docs/landing/data/releases.toml deleted file mode 100644 index 6fb7631a4d3..00000000000 --- a/docs/landing/data/releases.toml +++ /dev/null @@ -1,82 +0,0 @@ -current = "3.8.0" - -[[versions]] - version = "3.8.0" - docs = "./3.8" - api = "./3.8/javadoc" - -[[versions]] - version = "3.7.1" - docs = "./3.7" - api = "./3.7/javadoc" - -[[versions]] - version = "3.6.4" - status = "current" - docs = "./3.6" - api = "./3.6/javadoc" - -[[versions]] - version = "3.5.0" - docs = "./3.5" - api = "./3.5/javadoc" - -[[versions]] - version = "3.4.3" - docs = "./3.4" - api = "./3.4/javadoc" - -[[versions]] - version = "3.3.0" - docs = "./3.3" - api = "http://api.mongodb.com/java/3.3" - -[[versions]] - version = "3.2.2" - docs = "./3.2" - api = "http://api.mongodb.com/java/3.2" - -[[versions]] - version = "3.1.1" - docs = "./3.1" - api = "http://api.mongodb.com/java/3.1" - -[[versions]] - version = "3.0.4" - docs = "./3.0" - api = "http://api.mongodb.com/java/3.0" - -[[versions]] - version = "2.14.2" - docs = "./2.14" - api = "http://api.mongodb.com/java/2.14" - -[[versions]] - version = "2.13.3" - docs = "./2.13" - api = "http://api.mongodb.com/java/2.13" - -[[drivers]] - name = "mongodb-driver-sync" - description = "The synchronous driver, new in 3.7.
    For older versions of the driver please use the `mongodb-driver` or `mongo-java-driver`." - versions = "3.8.0,3.7.1" - -[[drivers]] - name = "mongodb-driver" - description = "The synchronous driver plus the legacy driver, new in 3.0.
    For older versions of the driver or for OSGi-based applications please use the `mongo-java-driver`." - versions = "3.8.0,3.7.1,3.6.4,3.5.0,3.4.3,3.3.0,3.2.2,3.1.1,3.0.4" - -[[drivers]] - name = "mongo-java-driver" - description = "An uber jar containing the bson library, the core library and the mongodb-driver.
    This artifact is a valid OSGi bundle." - versions = "3.8.0,3.7.1,3.6.4,3.5.0,3.4.3,3.3.0,3.2.2,3.1.1,3.0.4,2.14.2,2.13.3" - -[[drivers]] - name = "mongodb-driver-async" - description = "The new asynchronous driver, new in 3.0" - versions = "3.8.0,3.7.1,3.6.4,3.5.0,3.4.3,3.3.0,3.2.2,3.1.1,3.0.4" - -[[drivers]] - name = "mongodb-driver-core" - description = "The core library, new in 3.0" - versions = "3.8.0,3.7.1,3.6.4,3.5.0,3.4.3,3.3.0,3.2.2,3.1.1,3.0.4" diff --git a/docs/landing/layouts/.readme b/docs/landing/layouts/.readme deleted file mode 100644 index 8238d8e9f14..00000000000 --- a/docs/landing/layouts/.readme +++ /dev/null @@ -1 +0,0 @@ -custom layouts here diff --git a/docs/landing/layouts/404.html b/docs/landing/layouts/404.html deleted file mode 100644 index 634869dcb9b..00000000000 --- a/docs/landing/layouts/404.html +++ /dev/null @@ -1,45 +0,0 @@ - - - - {{ partial "meta.html"}} - - - {{.Title}} - - {{ partial "assets/css.html" . }} - - - - - {{ partial "header/main.html" . }} - - -
    -
    -
    -
    -
    -

    404: Page not found

    -
    - -
    -
    -
    - - - -
    -
    -
    - {{ partial "footer.html" .}} -
    -
    -
    - - - - -{{ partial "assets/javascripts.html" . }} -{{ partial "assets/analytics.html" . }} - - diff --git a/docs/landing/layouts/index.html b/docs/landing/layouts/index.html deleted file mode 100644 index 1164cfe59ad..00000000000 --- a/docs/landing/layouts/index.html +++ /dev/null @@ -1,52 +0,0 @@ - - - - {{- partial "meta.html" -}} - - - {{.Title}} - - {{- partial "assets/css.html" . -}} - - - - - {{- partial "header/main.html" . -}} - - - {{- partial "hero.html" . -}} - - -
    -
    -
    - {{- partial "introduction.html" . -}} - - {{- partial "features.html" . -}} - - {{- partial "quickStart.html" . -}} -
    -
    - {{- partial "releases.html" . -}} - {{- partial "mongodbUniversity.html" . -}} -
    -
    -
    - - - -
    -
    -
    - {{- partial "footer.html" . -}} -
    -
    -
    - - - - -{{- partial "assets/javascripts.html" . -}} -{{- partial "assets/analytics.html" . -}} - - diff --git a/docs/landing/layouts/partials/assets/analytics.html b/docs/landing/layouts/partials/assets/analytics.html deleted file mode 100644 index 9aafc9d499a..00000000000 --- a/docs/landing/layouts/partials/assets/analytics.html +++ /dev/null @@ -1,22 +0,0 @@ - - - - - - diff --git a/docs/landing/layouts/partials/assets/css.html b/docs/landing/layouts/partials/assets/css.html deleted file mode 100644 index e053bda9e00..00000000000 --- a/docs/landing/layouts/partials/assets/css.html +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - diff --git a/docs/landing/layouts/partials/assets/javascripts.html b/docs/landing/layouts/partials/assets/javascripts.html deleted file mode 100644 index 6ef23295901..00000000000 --- a/docs/landing/layouts/partials/assets/javascripts.html +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/docs/landing/layouts/partials/features.html b/docs/landing/layouts/partials/features.html deleted file mode 100644 index 76dc1ad72d8..00000000000 --- a/docs/landing/layouts/partials/features.html +++ /dev/null @@ -1,14 +0,0 @@ -

    Features

    - -
    -
    BSON Library
    -
    A standalone BSON library, with a new Codec infrastructure that you can - use to build high-performance encoders and decoders without requiring an - intermediate Map instance.
    -
    MongoDB Driver
    -
    An updated Java driver that includes the legacy API as well as a new generic MongoCollection interface that complies with a new cross-driver CRUD specification.
    -
    MongoDB Async Driver
    -
    A new asynchronous API that can leverage either Netty or Java 7's AsynchronousSocketChannel for fast and non-blocking IO.
    -
    Core driver
    -
    The MongoDB Driver and Async Driver are both built on top of a new core library, which anyone can use to build alternative or experimental high-level APIs.
    -
    diff --git a/docs/landing/layouts/partials/footer.html b/docs/landing/layouts/partials/footer.html deleted file mode 100644 index bf9321d6f96..00000000000 --- a/docs/landing/layouts/partials/footer.html +++ /dev/null @@ -1,7 +0,0 @@ - diff --git a/docs/landing/layouts/partials/header/main.html b/docs/landing/layouts/partials/header/main.html deleted file mode 100644 index 3da110ccda2..00000000000 --- a/docs/landing/layouts/partials/header/main.html +++ /dev/null @@ -1,12 +0,0 @@ - diff --git a/docs/landing/layouts/partials/header/topRight.html b/docs/landing/layouts/partials/header/topRight.html deleted file mode 100644 index b447f7f3e5f..00000000000 --- a/docs/landing/layouts/partials/header/topRight.html +++ /dev/null @@ -1,9 +0,0 @@ - diff --git a/docs/landing/layouts/partials/hero.html b/docs/landing/layouts/partials/hero.html deleted file mode 100644 index 7b214b24c0e..00000000000 --- a/docs/landing/layouts/partials/hero.html +++ /dev/null @@ -1,22 +0,0 @@ -
    -
    -
    -
    -
    -

    MongoDB Java Driver

    -

    - The next generation Java driver for MongoDB -

    -

    - {{- range where $.Site.Data.releases.versions "version" $.Site.Data.releases.current -}} - {{- $.Scratch.Set "qs.currentReleasedVersion" . -}} - {{- end -}} - {{- $currentReleasedVersion := $.Scratch.Get "qs.currentReleasedVersion" -}} - Latest documentation -

    - -
    - -
    -
    -
    diff --git a/docs/landing/layouts/partials/introduction.html b/docs/landing/layouts/partials/introduction.html deleted file mode 100644 index dcffa95360a..00000000000 --- a/docs/landing/layouts/partials/introduction.html +++ /dev/null @@ -1,6 +0,0 @@ -

    Introduction

    - -

    - The official MongoDB Java Driver providing both synchronous and asynchronous interaction with MongoDB. - Powering the drivers is a new driver core and BSON library. -

    diff --git a/docs/landing/layouts/partials/meta.html b/docs/landing/layouts/partials/meta.html deleted file mode 100644 index 408b03e3337..00000000000 --- a/docs/landing/layouts/partials/meta.html +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/docs/landing/layouts/partials/mongodbUniversity.html b/docs/landing/layouts/partials/mongodbUniversity.html deleted file mode 100644 index fc6b3370581..00000000000 --- a/docs/landing/layouts/partials/mongodbUniversity.html +++ /dev/null @@ -1,10 +0,0 @@ -
    -

    MongoDB University

    - -

    M101J: MongoDB for Java Developers

    -

    Learn everything you need to know to get started building a MongoDB-based app. From basic installation, JSON, schema design, querying, insertion of data, indexing and working with the Java driver. -

    -

    -Learn More -

    -
    diff --git a/docs/landing/layouts/partials/quickStart.html b/docs/landing/layouts/partials/quickStart.html deleted file mode 100644 index 35d641c9e74..00000000000 --- a/docs/landing/layouts/partials/quickStart.html +++ /dev/null @@ -1,150 +0,0 @@ -

    Quick Start

    - -

    - The recommended way to get started using one of the drivers in your project is with a dependency management system. - Select the driver, version and dependency management system below and the snippet can be copied and pasted into your build. -

    -

    - Alternatively, head over to our documentation to learn more about getting started with Java and MongoDB. -

    - -{{- range where $.Site.Data.releases.versions "version" $.Site.Data.releases.current -}} - {{- $.Scratch.Set "qs.currentReleasedVersion" . -}} -{{- end -}} -{{- $currentReleasedVersion := $.Scratch.Get "qs.currentReleasedVersion" -}} - -
    -
    -
    -
    -
    - {{- $.Scratch.Set "qs.pos" 0 -}} - {{- $.Scratch.Set "qs.firstDriver" false -}} - {{- with $.Site.Data.releases.drivers -}} - - {{- end -}} -
    -
    - {{- $.Scratch.Set "qs.pos" 0 -}} - {{- $firstDriver := $.Scratch.Get "qs.firstDriver" -}} - {{- with $.Site.Data.releases.versions -}} - - {{- end -}} -
    -
    - -
    -
    - -
    - - -
    -
    - -
    - {{- $currentNode := . -}} - {{- $.Scratch.Set "qs.firstDriver" true -}} - {{- with $currentNode.Site.Data.releases.drivers -}} - {{- $.Scratch.Set "qs.driverPos" 0 -}} - {{- range . -}} - {{- $currentDriver := . -}} - {{- with $.Site.Data.releases.versions -}} - {{- $.Scratch.Set "qs.versionPos" 0 -}} - {{- range . -}} - {{- $currentVersion := . -}} - {{- $firstDriver := $.Scratch.Get "qs.firstDriver" -}} - {{- $driverPos := $.Scratch.Get "qs.driverPos" -}} - {{- $versionPos := $.Scratch.Get "qs.versionPos" -}} - {{- if in $currentDriver.versions $currentVersion.version -}} -{{- if in $currentVersion.version "SNAPSHOT" -}} -
    -
    
    -<dependencies>
    -    <dependency>
    -        <groupId>org.mongodb</groupId>
    -        <artifactId>{{$currentDriver.name}}</artifactId>
    -        <version>{{$currentVersion.version}}</version>
    -    </dependency>
    -</dependencies>
    -<repositories>
    -    <repository>
    -        <id>sonatype-snapshots</id>
    -        <name>Sontatype Snapshots</name>
    -        <url>https://oss.sonatype.org/content/repositories/snapshots</url>
    -        <snapshots>
    -            <enabled>true</enabled>
    -        </snapshots>
    -    </repository>
    -</repositories>
    -
    -
    -
    - -{{- else -}} -
    -
    
    -<dependencies>
    -    <dependency>
    -        <groupId>org.mongodb</groupId>
    -        <artifactId>{{$currentDriver.name}}</artifactId>
    -        <version>{{$currentVersion.version}}</version>
    -    </dependency>
    -</dependencies>
    -
    -
    -
    - -{{- end -}} - {{- if eq $currentVersion.status "current" -}} - {{- $.Scratch.Set "qs.firstDriver" false -}} - {{- end -}} - {{- $.Scratch.Add "qs.versionPos" 1 -}} - {{- end -}} - {{- end -}} - {{- end -}} - {{- $driverPos := $.Scratch.Get "qs.driverPos" -}} -
    - {{- $currentDriver.description | markdownify -}} -
    - {{- $.Scratch.Add "qs.driverPos" 1 -}} - {{- end -}} - {{- end -}} -
    -
    -
    diff --git a/docs/landing/layouts/partials/releases.html b/docs/landing/layouts/partials/releases.html deleted file mode 100644 index c4e94b9d6bd..00000000000 --- a/docs/landing/layouts/partials/releases.html +++ /dev/null @@ -1,16 +0,0 @@ -{{- with $.Site.Data.releases.versions -}} -
    -

    Releases

    - - - - {{- range . -}} - - - - - {{- end -}} - -
    ReleaseDocumentation
    {{.version}}Reference | API
    -
    -{{- end -}} diff --git a/docs/landing/static/.nojekyll b/docs/landing/static/.nojekyll deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/docs/landing/static/apple-touch-icon.png b/docs/landing/static/apple-touch-icon.png deleted file mode 100644 index 9f98bad32ca..00000000000 Binary files a/docs/landing/static/apple-touch-icon.png and /dev/null differ diff --git a/docs/landing/static/favicon.ico b/docs/landing/static/favicon.ico deleted file mode 100644 index 1cb9531f4b7..00000000000 Binary files a/docs/landing/static/favicon.ico and /dev/null differ diff --git a/docs/landing/static/s/css/frontpage.css b/docs/landing/static/s/css/frontpage.css deleted file mode 100644 index 189f5c4dee8..00000000000 --- a/docs/landing/static/s/css/frontpage.css +++ /dev/null @@ -1,393 +0,0 @@ -body { - font-family: "PT Sans", "Helvetica Neue", Helvetica, Arial, sans-serif; - margin: 0; - padding: 0; - color: #494747; - font-size: 16px; - -webkit-font-smoothing: antialiased; - background-color: #f3f4eb; - color: #4c3a2c; - padding-top: 50px; -} - -h1, h2, h3, h4, h5, h6 { - font-family: "PT Sans", "Helvetica Neue", Helvetica, Arial, sans-serif; - font-weight: normal; - color: #313030; -} - -a { - color: #006cbc; - text-decoration: none; -} - -a:hover { - text-decoration: underline; -} - -dt { - margin-top: 10px; - font-size: 18px; -} - -/* Header styles */ - -#header-db { - position: fixed; - -webkit-transform: translateZ(0); /* hack around Chrome bug: http://stackoverflow.com/questions/11258877/fixed-element-disappears-in-chrome */ - height: 50px; - top: 0; - left: 0; - width: 100%; - font-size: 31px; - background-color: #3b291f; - color: white; - margin: 0; - padding: 0; - z-index: 100; -} - -#header-db .header-content { - width: 100%; - padding: 0 12px; - - transition: width 0.4s cubic-bezier(.02,.01,.47,1); - -moz-transition: width 0.4s cubic-bezier(.02,.01,.47,1); - -webkit-transition: width 0.4s cubic-bezier(.02,.01,.47,1); -} - -#header-db .nav-items { - display: inline-block; -} - -#header-db .nav-items > a { - font-size: 14px; -} - -#header-db a { - color: white; -} - -#header-db .logo img { - height: 36px; - vertical-align: top; -} -#header-db .nav-items > a:not(:last-child) { - margin-right: 15px; -} - -div.gsc-control-cse-en, div.gsc-control-cse { padding: 0 !important; } - -form.gsc-search-box { - background-color: rgba(255,255,255,0.3); - border-radius: 6px; - border: 1px solid #3b2920; -} - -.gsc-search-box.gsc-search-box-tools .gsc-search-box .gsc-input { - padding: 0; -} - -div.gsc-input-box, -.gsc-search-box .gsc-input>input:hover, .gsc-input-box-hover, -.gsc-search-box .gsc-input>input:focus, .gsc-input-box-focus { - border: 0; - background: transparent; - box-shadow: none; -} - -/* [name] selector is a hack to override google's stylsheet */ -.gsc-input input.gsc-input[name="search"] { - background-color: transparent !important; - color: white; - font-weight: 300; - font-size: 15px; - height: 1.3em !important; -} - -.gsc-input input.gsc-input::-webkit-input-placeholder, -.gsc-input input.gsc-input:-moz-input-placeholder, -.gsc-input input.gsc-input::-moz-input-placeholder, -.gsc-input input.gsc-input:-ms-input-placeholder { - color: #CCC; -} - -/* Clear search button */ -.gsib_b { - display: none; -} - -/* [title] selector is a hack to override google's stylesheet */ -input.gsc-search-button[title], input.gsc-search-button:hover[title], input.gsc-search-button:focus[title] { - border-color: transparent; - background-color: transparent; - padding: 0 8px; - box-sizing: content-box; - -moz-box-sizing: content-box; - -webkit-box-sizing: content-box; -} - -#header-db .logo { - padding: 5px 0; -} - -#header-db .search-db { - transition: width 0.3s linear; - -webkit-transition: width 0.3s linear; - -moz-transition: width 0.3s linear; - -o-transition: width 0.3s linear; - margin-left: 45px; - margin-top: 11px; - display: inline-block; -} - -#header-db .search-db.narrow { - width: 98px; -} - -#header-db .search-db.wide { - width: 226px; -} - -/* Footer styles */ -.footer { - width:auto; - font-size:80%; - border:none; - padding: 20px 0; -} - -.footer .copyright { - text-align: center; -} - -.footer p { - margin: 1em 0; - padding: 0; - line-height:1.5; -} - -.footer a { - color: #989898; - text-decoration: underline; -} - -.footer h3 { - margin-top: 10px; - font-size: 16px; -} - -.footer .box { - background-color: #f5f6f7; - min-height: 20px; - padding: 15px; - margin-bottom: 20px; - text-align: left; -} - -.footer .box a { - color: #333; - text-decoration: none; -} - -.footer .box a:hover { - text-decoration: underline; -} - -.footer ul { - list-style-type: none; - padding-left: 20px; -} - -.footer .section-0 { - padding-left: 100px; -} - -.footer .section-1 { - padding-left: 46px; -} - -.footer .section-2 { - padding-left: 64px; -} - -.footer .section-3 { - padding-left: 49px; -} - - -/* Hero Styles */ - -#hero { - background-color: #666; - color: #f3f4eb; - border-bottom: 1px solid #313030; - padding: 12px 0px; -} - -#mongodbJVMlogo { - background-image: url("../img/mongoJVMlogo.png"); - background-position: left top; - background-repeat: no-repeat; - width: 83px; - height: 150px; -} - -#hero h2 { - border-top: 3px solid #6ca439; - color: #f3f4eb; - padding: 10px 0; - display: inline-block; -} - -.btn-mongo, .btn-mongo:visited, .btn-mongo:active { - background-image: none; - border-radius: 0; - color: white; - font-size: 14px; - font-weight: 800; - line-height: 14px; - border: 2px solid #6ca439; - text-shadow: none; - text-transform: uppercase; -} - -.btn-mongo:hover { - color: white; - border-color:#34302d; - box-shadow:none; - text-decoration:none; -} - -.btn-dark { - border: 2px solid #3b291f; - background-color: #3b291f; - color: white; - border-radius: 0; -} - -.btn-dark:hover { - color: white; - border: 2px solid #6ca439; - box-shadow:none; - border-radius: 0; -} - -.btn-download, .btn-download:visited, .btn-download:active { - margin-top: -10px; - padding: 15px 10px; - text-shadow: none; - text-transform: uppercase; -} - - -/* Releases styles */ -#releases { - padding-bottom: 20px; -} - -#releases h3 { - text-align: center; -} - -#releases table { - border: 1px solid #3b291f; - border-collapse: initial; - margin-bottom: 0px; -} - -#releases table thead { - font-size: 16px; - font-weight: 500; - text-transform: uppercase; -} - -#releases table td a { - padding: 0px 10px; -} - -/* Download widget */ -#downloadWidget { - border: 1px solid #3b291f; -} - -#downloadWidget .downloadForm { - margin-top: 10px; -} - -#downloadWidget .downloadLink { - padding-left: 0; - padding-right: 5px; -} - -#downloadWidget .description { - border-top: 1px solid #3b291f; - padding: 10px; - background-color: #666; - color: #f3f4eb; - font-weight: 800; -} - -#downloadWidget pre { - border-radius: 0; - margin: 10px; - padding: 0; -} - -#downloadWidget pre code { - padding: 10px; - min-height: 65px; -} - -#downloadWidget .clipboard { - position: relative; -} - -#downloadWidget .clipboard button { - position: absolute; - right: 10px; - top: 10px; - border-radius: 0; - color: white; - font-size: 2em; -} - -#downloadWidget .clipboard .zeroclipboard-is-hover { - color: white; - border: 2px solid #6ca439; -} - -/* University Promo */ -#universityPromo h3 { - background-image: url("../img/mongodb-university-logo.png"); - background-position: left top; - background-repeat: no-repeat; - height: 72px; -} - -#universityPromo h3 span { - display: none; -} - -#universityPromo h4 { - margin-top: 20px; -} - -#universityPromo .hero-container .h4-wrapper { - width: 100%; - margin-top: -30px; - padding-bottom: 40px; -} - -#universityPromo .hero-container .h4-wrapper h4 { - margin: 0 auto; - color: white; - font-weight: 500; - text-align: center; - background: #666; -} - -code { - background-color: #ddd; - color: #494747; -} diff --git a/docs/landing/static/s/img/24px-baseline-overlay.png b/docs/landing/static/s/img/24px-baseline-overlay.png deleted file mode 100644 index 9aa62dcdd83..00000000000 Binary files a/docs/landing/static/s/img/24px-baseline-overlay.png and /dev/null differ diff --git a/docs/landing/static/s/img/back-body.png b/docs/landing/static/s/img/back-body.png deleted file mode 100644 index 0eb9bc5e960..00000000000 Binary files a/docs/landing/static/s/img/back-body.png and /dev/null differ diff --git a/docs/landing/static/s/img/code-block-bg.png b/docs/landing/static/s/img/code-block-bg.png deleted file mode 100644 index aecf24d6d20..00000000000 Binary files a/docs/landing/static/s/img/code-block-bg.png and /dev/null differ diff --git a/docs/landing/static/s/img/code-block-bg@2x.png b/docs/landing/static/s/img/code-block-bg@2x.png deleted file mode 100644 index c0c4d806bc1..00000000000 Binary files a/docs/landing/static/s/img/code-block-bg@2x.png and /dev/null differ diff --git a/docs/landing/static/s/img/favicon.png b/docs/landing/static/s/img/favicon.png deleted file mode 100644 index f9f54468dd5..00000000000 Binary files a/docs/landing/static/s/img/favicon.png and /dev/null differ diff --git a/docs/landing/static/s/img/gray.png b/docs/landing/static/s/img/gray.png deleted file mode 100755 index 3807691d3fd..00000000000 Binary files a/docs/landing/static/s/img/gray.png and /dev/null differ diff --git a/docs/landing/static/s/img/logo-mongodb-header.png b/docs/landing/static/s/img/logo-mongodb-header.png deleted file mode 100755 index fa2dccfa620..00000000000 Binary files a/docs/landing/static/s/img/logo-mongodb-header.png and /dev/null differ diff --git a/docs/landing/static/s/img/mongoJVMlogo.png b/docs/landing/static/s/img/mongoJVMlogo.png deleted file mode 100644 index e2d5598c44e..00000000000 Binary files a/docs/landing/static/s/img/mongoJVMlogo.png and /dev/null differ diff --git a/docs/landing/static/s/img/mongodb-university-logo.png b/docs/landing/static/s/img/mongodb-university-logo.png deleted file mode 100644 index c2a288ecccb..00000000000 Binary files a/docs/landing/static/s/img/mongodb-university-logo.png and /dev/null differ diff --git a/docs/landing/static/s/img/social-facebook.png b/docs/landing/static/s/img/social-facebook.png deleted file mode 100644 index 4a8e6cf831a..00000000000 Binary files a/docs/landing/static/s/img/social-facebook.png and /dev/null differ diff --git a/docs/landing/static/s/img/social-facebook@2x.png b/docs/landing/static/s/img/social-facebook@2x.png deleted file mode 100644 index dcbd4074814..00000000000 Binary files a/docs/landing/static/s/img/social-facebook@2x.png and /dev/null differ diff --git a/docs/landing/static/s/img/social-gplus.png b/docs/landing/static/s/img/social-gplus.png deleted file mode 100644 index efbac7d18c9..00000000000 Binary files a/docs/landing/static/s/img/social-gplus.png and /dev/null differ diff --git a/docs/landing/static/s/img/social-gplus@2x.png b/docs/landing/static/s/img/social-gplus@2x.png deleted file mode 100644 index 45f130c1a3f..00000000000 Binary files a/docs/landing/static/s/img/social-gplus@2x.png and /dev/null differ diff --git a/docs/landing/static/s/img/social-twitter.png b/docs/landing/static/s/img/social-twitter.png deleted file mode 100644 index 05f534c47e0..00000000000 Binary files a/docs/landing/static/s/img/social-twitter.png and /dev/null differ diff --git a/docs/landing/static/s/img/social-twitter@2x.png b/docs/landing/static/s/img/social-twitter@2x.png deleted file mode 100644 index e84e6c0d332..00000000000 Binary files a/docs/landing/static/s/img/social-twitter@2x.png and /dev/null differ diff --git a/docs/landing/static/s/img/social-youtube.png b/docs/landing/static/s/img/social-youtube.png deleted file mode 100644 index 1cc3167b5b3..00000000000 Binary files a/docs/landing/static/s/img/social-youtube.png and /dev/null differ diff --git a/docs/landing/static/s/img/social-youtube@2x.png b/docs/landing/static/s/img/social-youtube@2x.png deleted file mode 100644 index 033e628ce65..00000000000 Binary files a/docs/landing/static/s/img/social-youtube@2x.png and /dev/null differ diff --git a/docs/landing/static/s/img/trans-user-back.png b/docs/landing/static/s/img/trans-user-back.png deleted file mode 100644 index 388216cfcc4..00000000000 Binary files a/docs/landing/static/s/img/trans-user-back.png and /dev/null differ diff --git a/docs/landing/static/s/img/trans-user-left.png b/docs/landing/static/s/img/trans-user-left.png deleted file mode 100644 index a96245e706d..00000000000 Binary files a/docs/landing/static/s/img/trans-user-left.png and /dev/null differ diff --git a/docs/landing/static/s/img/trans-user-right.png b/docs/landing/static/s/img/trans-user-right.png deleted file mode 100644 index e7069e161d7..00000000000 Binary files a/docs/landing/static/s/img/trans-user-right.png and /dev/null differ diff --git a/docs/landing/static/s/js/frontpage.js b/docs/landing/static/s/js/frontpage.js deleted file mode 100644 index 34d52005228..00000000000 --- a/docs/landing/static/s/js/frontpage.js +++ /dev/null @@ -1,73 +0,0 @@ -function initializeJS() { - jQuery('.driverPicker').selectpicker(); - jQuery('.driverPicker').change(toggleDownload); - jQuery('.releasePicker').selectpicker(); - jQuery('.releasePicker').change(toggleDownload); - jQuery('.distroPicker').bootstrapToggle(); - jQuery('.distroPicker').change(toggleDownload); - - var clipboard = new ZeroClipboard(jQuery(".clipboard button")); - var clipBridge = $('#global-zeroclipboard-html-bridge'); - clipBridge.tooltip({title: "copy to clipboard", placement: 'bottom'}); - clipboard.on( 'copy', function(event) { - clipBridge.attr('title', 'copied').tooltip('fixTitle').tooltip('show'); - $('#global-zeroclipboard-html-bridge').tooltip({title: "copied", placement: 'bottom'}); - var button = jQuery(".clipboard button"); - button.addClass('btn-success'); - clipboard.clearData(); - prefix = $('.distroPicker').prop('checked') ? "#maven" : "#gradle" - driverVersion = $('.driverPicker').selectpicker().val(); - releaseVersion = $('.releasePicker').selectpicker().val(); - activeSample = prefix + "-" + releaseVersion + "-" + driverVersion; - clipboard.setText($(activeSample).text()); - - button.animate({ opacity: 1 }, 400, function() { - button.removeClass('btn-success'); - clipBridge.attr('title', 'copy to clipboard').tooltip('hide').tooltip('fixTitle'); - }); - }); -}; - -var toggleDownload = function() { - downloadLink = 'https://oss.sonatype.org/content/repositories/releases/org/mongodb/'; - downloadSnapshotLink = 'https://oss.sonatype.org/content/repositories/snapshots/org/mongodb/'; - prefix = $('.distroPicker').prop('checked') ? "#maven" : "#gradle"; - driverVersion = $('.driverPicker').selectpicker().val(); - releaseVersion = $('.releasePicker').selectpicker().val(); - activeDriver = $('.driverPicker option:selected').text(); - activeVersion = $('.releasePicker option:selected').text(); - - driverVersions = $('.driverPicker option:selected').data('versions'); - $('.releasePicker option').each(function(){ - $(this).prop('disabled', driverVersions.indexOf($(this).text()) < 0); - }); - - $('.driverPicker option').each(function(){ - driverVersions = $(this).data('versions'); - $(this).prop('disabled', driverVersions.indexOf(activeVersion) < 0); - }); - - $('.driverPicker').selectpicker('refresh'); - $('.releasePicker').selectpicker('refresh'); - - activeSample = prefix + "-" + releaseVersion + "-" + driverVersion; - activeDescription = "#driver-" + driverVersion; - - if (activeVersion.indexOf("SNAPSHOT") > -1) { - activeLink = downloadSnapshotLink + activeDriver +'/' + activeVersion + '/'; - } else { - activeLink = downloadLink + activeDriver +'/' + activeVersion + '/'; - } - - $('.download').addClass('hidden'); - $(activeSample).removeClass('hidden'); - $(activeDescription).removeClass('hidden'); - $('#downloadLink').attr('href', activeLink); -}; - -jQuery(document).ready(function(){ - initializeJS(); - jQuery('[data-toggle="tooltip"]').tooltip(); - jQuery("body").addClass("hljsCode"); - hljs.initHighlightingOnLoad(); -}); diff --git a/docs/landing/static/s/js/jquery.js b/docs/landing/static/s/js/jquery.js deleted file mode 100644 index bfb2376d2ae..00000000000 --- a/docs/landing/static/s/js/jquery.js +++ /dev/null @@ -1,9789 +0,0 @@ -/*! - * jQuery JavaScript Library v1.10.2 - * http://jquery.com/ - * - * Includes Sizzle.js - * http://sizzlejs.com/ - * - * Copyright 2005, 2013 jQuery Foundation, Inc. and other contributors - * Released under the MIT license - * http://jquery.org/license - * - * Date: 2013-07-03T13:48Z - */ -(function( window, undefined ) { - -// Can't do this because several apps including ASP.NET trace -// the stack via arguments.caller.callee and Firefox dies if -// you try to trace through "use strict" call chains. (#13335) -// Support: Firefox 18+ -//"use strict"; -var - // The deferred used on DOM ready - readyList, - - // A central reference to the root jQuery(document) - rootjQuery, - - // Support: IE<10 - // For `typeof xmlNode.method` instead of `xmlNode.method !== undefined` - core_strundefined = typeof undefined, - - // Use the correct document accordingly with window argument (sandbox) - location = window.location, - document = window.document, - docElem = document.documentElement, - - // Map over jQuery in case of overwrite - _jQuery = window.jQuery, - - // Map over the $ in case of overwrite - _$ = window.$, - - // [[Class]] -> type pairs - class2type = {}, - - // List of deleted data cache ids, so we can reuse them - core_deletedIds = [], - - core_version = "1.10.2", - - // Save a reference to some core methods - core_concat = core_deletedIds.concat, - core_push = core_deletedIds.push, - core_slice = core_deletedIds.slice, - core_indexOf = core_deletedIds.indexOf, - core_toString = class2type.toString, - core_hasOwn = class2type.hasOwnProperty, - core_trim = core_version.trim, - - // Define a local copy of jQuery - jQuery = function( selector, context ) { - // The jQuery object is actually just the init constructor 'enhanced' - return new jQuery.fn.init( selector, context, rootjQuery ); - }, - - // Used for matching numbers - core_pnum = /[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source, - - // Used for splitting on whitespace - core_rnotwhite = /\S+/g, - - // Make sure we trim BOM and NBSP (here's looking at you, Safari 5.0 and IE) - rtrim = /^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g, - - // A simple way to check for HTML strings - // Prioritize #id over to avoid XSS via location.hash (#9521) - // Strict HTML recognition (#11290: must start with <) - rquickExpr = /^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/, - - // Match a standalone tag - rsingleTag = /^<(\w+)\s*\/?>(?:<\/\1>|)$/, - - // JSON RegExp - rvalidchars = /^[\],:{}\s]*$/, - rvalidbraces = /(?:^|:|,)(?:\s*\[)+/g, - rvalidescape = /\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g, - rvalidtokens = /"[^"\\\r\n]*"|true|false|null|-?(?:\d+\.|)\d+(?:[eE][+-]?\d+|)/g, - - // Matches dashed string for camelizing - rmsPrefix = /^-ms-/, - rdashAlpha = /-([\da-z])/gi, - - // Used by jQuery.camelCase as callback to replace() - fcamelCase = function( all, letter ) { - return letter.toUpperCase(); - }, - - // The ready event handler - completed = function( event ) { - - // readyState === "complete" is good enough for us to call the dom ready in oldIE - if ( document.addEventListener || event.type === "load" || document.readyState === "complete" ) { - detach(); - jQuery.ready(); - } - }, - // Clean-up method for dom ready events - detach = function() { - if ( document.addEventListener ) { - document.removeEventListener( "DOMContentLoaded", completed, false ); - window.removeEventListener( "load", completed, false ); - - } else { - document.detachEvent( "onreadystatechange", completed ); - window.detachEvent( "onload", completed ); - } - }; - -jQuery.fn = jQuery.prototype = { - // The current version of jQuery being used - jquery: core_version, - - constructor: jQuery, - init: function( selector, context, rootjQuery ) { - var match, elem; - - // HANDLE: $(""), $(null), $(undefined), $(false) - if ( !selector ) { - return this; - } - - // Handle HTML strings - if ( typeof selector === "string" ) { - if ( selector.charAt(0) === "<" && selector.charAt( selector.length - 1 ) === ">" && selector.length >= 3 ) { - // Assume that strings that start and end with <> are HTML and skip the regex check - match = [ null, selector, null ]; - - } else { - match = rquickExpr.exec( selector ); - } - - // Match html or make sure no context is specified for #id - if ( match && (match[1] || !context) ) { - - // HANDLE: $(html) -> $(array) - if ( match[1] ) { - context = context instanceof jQuery ? context[0] : context; - - // scripts is true for back-compat - jQuery.merge( this, jQuery.parseHTML( - match[1], - context && context.nodeType ? context.ownerDocument || context : document, - true - ) ); - - // HANDLE: $(html, props) - if ( rsingleTag.test( match[1] ) && jQuery.isPlainObject( context ) ) { - for ( match in context ) { - // Properties of context are called as methods if possible - if ( jQuery.isFunction( this[ match ] ) ) { - this[ match ]( context[ match ] ); - - // ...and otherwise set as attributes - } else { - this.attr( match, context[ match ] ); - } - } - } - - return this; - - // HANDLE: $(#id) - } else { - elem = document.getElementById( match[2] ); - - // Check parentNode to catch when Blackberry 4.6 returns - // nodes that are no longer in the document #6963 - if ( elem && elem.parentNode ) { - // Handle the case where IE and Opera return items - // by name instead of ID - if ( elem.id !== match[2] ) { - return rootjQuery.find( selector ); - } - - // Otherwise, we inject the element directly into the jQuery object - this.length = 1; - this[0] = elem; - } - - this.context = document; - this.selector = selector; - return this; - } - - // HANDLE: $(expr, $(...)) - } else if ( !context || context.jquery ) { - return ( context || rootjQuery ).find( selector ); - - // HANDLE: $(expr, context) - // (which is just equivalent to: $(context).find(expr) - } else { - return this.constructor( context ).find( selector ); - } - - // HANDLE: $(DOMElement) - } else if ( selector.nodeType ) { - this.context = this[0] = selector; - this.length = 1; - return this; - - // HANDLE: $(function) - // Shortcut for document ready - } else if ( jQuery.isFunction( selector ) ) { - return rootjQuery.ready( selector ); - } - - if ( selector.selector !== undefined ) { - this.selector = selector.selector; - this.context = selector.context; - } - - return jQuery.makeArray( selector, this ); - }, - - // Start with an empty selector - selector: "", - - // The default length of a jQuery object is 0 - length: 0, - - toArray: function() { - return core_slice.call( this ); - }, - - // Get the Nth element in the matched element set OR - // Get the whole matched element set as a clean array - get: function( num ) { - return num == null ? - - // Return a 'clean' array - this.toArray() : - - // Return just the object - ( num < 0 ? this[ this.length + num ] : this[ num ] ); - }, - - // Take an array of elements and push it onto the stack - // (returning the new matched element set) - pushStack: function( elems ) { - - // Build a new jQuery matched element set - var ret = jQuery.merge( this.constructor(), elems ); - - // Add the old object onto the stack (as a reference) - ret.prevObject = this; - ret.context = this.context; - - // Return the newly-formed element set - return ret; - }, - - // Execute a callback for every element in the matched set. - // (You can seed the arguments with an array of args, but this is - // only used internally.) - each: function( callback, args ) { - return jQuery.each( this, callback, args ); - }, - - ready: function( fn ) { - // Add the callback - jQuery.ready.promise().done( fn ); - - return this; - }, - - slice: function() { - return this.pushStack( core_slice.apply( this, arguments ) ); - }, - - first: function() { - return this.eq( 0 ); - }, - - last: function() { - return this.eq( -1 ); - }, - - eq: function( i ) { - var len = this.length, - j = +i + ( i < 0 ? len : 0 ); - return this.pushStack( j >= 0 && j < len ? [ this[j] ] : [] ); - }, - - map: function( callback ) { - return this.pushStack( jQuery.map(this, function( elem, i ) { - return callback.call( elem, i, elem ); - })); - }, - - end: function() { - return this.prevObject || this.constructor(null); - }, - - // For internal use only. - // Behaves like an Array's method, not like a jQuery method. - push: core_push, - sort: [].sort, - splice: [].splice -}; - -// Give the init function the jQuery prototype for later instantiation -jQuery.fn.init.prototype = jQuery.fn; - -jQuery.extend = jQuery.fn.extend = function() { - var src, copyIsArray, copy, name, options, clone, - target = arguments[0] || {}, - i = 1, - length = arguments.length, - deep = false; - - // Handle a deep copy situation - if ( typeof target === "boolean" ) { - deep = target; - target = arguments[1] || {}; - // skip the boolean and the target - i = 2; - } - - // Handle case when target is a string or something (possible in deep copy) - if ( typeof target !== "object" && !jQuery.isFunction(target) ) { - target = {}; - } - - // extend jQuery itself if only one argument is passed - if ( length === i ) { - target = this; - --i; - } - - for ( ; i < length; i++ ) { - // Only deal with non-null/undefined values - if ( (options = arguments[ i ]) != null ) { - // Extend the base object - for ( name in options ) { - src = target[ name ]; - copy = options[ name ]; - - // Prevent never-ending loop - if ( target === copy ) { - continue; - } - - // Recurse if we're merging plain objects or arrays - if ( deep && copy && ( jQuery.isPlainObject(copy) || (copyIsArray = jQuery.isArray(copy)) ) ) { - if ( copyIsArray ) { - copyIsArray = false; - clone = src && jQuery.isArray(src) ? src : []; - - } else { - clone = src && jQuery.isPlainObject(src) ? src : {}; - } - - // Never move original objects, clone them - target[ name ] = jQuery.extend( deep, clone, copy ); - - // Don't bring in undefined values - } else if ( copy !== undefined ) { - target[ name ] = copy; - } - } - } - } - - // Return the modified object - return target; -}; - -jQuery.extend({ - // Unique for each copy of jQuery on the page - // Non-digits removed to match rinlinejQuery - expando: "jQuery" + ( core_version + Math.random() ).replace( /\D/g, "" ), - - noConflict: function( deep ) { - if ( window.$ === jQuery ) { - window.$ = _$; - } - - if ( deep && window.jQuery === jQuery ) { - window.jQuery = _jQuery; - } - - return jQuery; - }, - - // Is the DOM ready to be used? Set to true once it occurs. - isReady: false, - - // A counter to track how many items to wait for before - // the ready event fires. See #6781 - readyWait: 1, - - // Hold (or release) the ready event - holdReady: function( hold ) { - if ( hold ) { - jQuery.readyWait++; - } else { - jQuery.ready( true ); - } - }, - - // Handle when the DOM is ready - ready: function( wait ) { - - // Abort if there are pending holds or we're already ready - if ( wait === true ? --jQuery.readyWait : jQuery.isReady ) { - return; - } - - // Make sure body exists, at least, in case IE gets a little overzealous (ticket #5443). - if ( !document.body ) { - return setTimeout( jQuery.ready ); - } - - // Remember that the DOM is ready - jQuery.isReady = true; - - // If a normal DOM Ready event fired, decrement, and wait if need be - if ( wait !== true && --jQuery.readyWait > 0 ) { - return; - } - - // If there are functions bound, to execute - readyList.resolveWith( document, [ jQuery ] ); - - // Trigger any bound ready events - if ( jQuery.fn.trigger ) { - jQuery( document ).trigger("ready").off("ready"); - } - }, - - // See test/unit/core.js for details concerning isFunction. - // Since version 1.3, DOM methods and functions like alert - // aren't supported. They return false on IE (#2968). - isFunction: function( obj ) { - return jQuery.type(obj) === "function"; - }, - - isArray: Array.isArray || function( obj ) { - return jQuery.type(obj) === "array"; - }, - - isWindow: function( obj ) { - /* jshint eqeqeq: false */ - return obj != null && obj == obj.window; - }, - - isNumeric: function( obj ) { - return !isNaN( parseFloat(obj) ) && isFinite( obj ); - }, - - type: function( obj ) { - if ( obj == null ) { - return String( obj ); - } - return typeof obj === "object" || typeof obj === "function" ? - class2type[ core_toString.call(obj) ] || "object" : - typeof obj; - }, - - isPlainObject: function( obj ) { - var key; - - // Must be an Object. - // Because of IE, we also have to check the presence of the constructor property. - // Make sure that DOM nodes and window objects don't pass through, as well - if ( !obj || jQuery.type(obj) !== "object" || obj.nodeType || jQuery.isWindow( obj ) ) { - return false; - } - - try { - // Not own constructor property must be Object - if ( obj.constructor && - !core_hasOwn.call(obj, "constructor") && - !core_hasOwn.call(obj.constructor.prototype, "isPrototypeOf") ) { - return false; - } - } catch ( e ) { - // IE8,9 Will throw exceptions on certain host objects #9897 - return false; - } - - // Support: IE<9 - // Handle iteration over inherited properties before own properties. - if ( jQuery.support.ownLast ) { - for ( key in obj ) { - return core_hasOwn.call( obj, key ); - } - } - - // Own properties are enumerated firstly, so to speed up, - // if last one is own, then all properties are own. - for ( key in obj ) {} - - return key === undefined || core_hasOwn.call( obj, key ); - }, - - isEmptyObject: function( obj ) { - var name; - for ( name in obj ) { - return false; - } - return true; - }, - - error: function( msg ) { - throw new Error( msg ); - }, - - // data: string of html - // context (optional): If specified, the fragment will be created in this context, defaults to document - // keepScripts (optional): If true, will include scripts passed in the html string - parseHTML: function( data, context, keepScripts ) { - if ( !data || typeof data !== "string" ) { - return null; - } - if ( typeof context === "boolean" ) { - keepScripts = context; - context = false; - } - context = context || document; - - var parsed = rsingleTag.exec( data ), - scripts = !keepScripts && []; - - // Single tag - if ( parsed ) { - return [ context.createElement( parsed[1] ) ]; - } - - parsed = jQuery.buildFragment( [ data ], context, scripts ); - if ( scripts ) { - jQuery( scripts ).remove(); - } - return jQuery.merge( [], parsed.childNodes ); - }, - - parseJSON: function( data ) { - // Attempt to parse using the native JSON parser first - if ( window.JSON && window.JSON.parse ) { - return window.JSON.parse( data ); - } - - if ( data === null ) { - return data; - } - - if ( typeof data === "string" ) { - - // Make sure leading/trailing whitespace is removed (IE can't handle it) - data = jQuery.trim( data ); - - if ( data ) { - // Make sure the incoming data is actual JSON - // Logic borrowed from http://json.org/json2.js - if ( rvalidchars.test( data.replace( rvalidescape, "@" ) - .replace( rvalidtokens, "]" ) - .replace( rvalidbraces, "")) ) { - - return ( new Function( "return " + data ) )(); - } - } - } - - jQuery.error( "Invalid JSON: " + data ); - }, - - // Cross-browser xml parsing - parseXML: function( data ) { - var xml, tmp; - if ( !data || typeof data !== "string" ) { - return null; - } - try { - if ( window.DOMParser ) { // Standard - tmp = new DOMParser(); - xml = tmp.parseFromString( data , "text/xml" ); - } else { // IE - xml = new ActiveXObject( "Microsoft.XMLDOM" ); - xml.async = "false"; - xml.loadXML( data ); - } - } catch( e ) { - xml = undefined; - } - if ( !xml || !xml.documentElement || xml.getElementsByTagName( "parsererror" ).length ) { - jQuery.error( "Invalid XML: " + data ); - } - return xml; - }, - - noop: function() {}, - - // Evaluates a script in a global context - // Workarounds based on findings by Jim Driscoll - // http://weblogs.java.net/blog/driscoll/archive/2009/09/08/eval-javascript-global-context - globalEval: function( data ) { - if ( data && jQuery.trim( data ) ) { - // We use execScript on Internet Explorer - // We use an anonymous function so that context is window - // rather than jQuery in Firefox - ( window.execScript || function( data ) { - window[ "eval" ].call( window, data ); - } )( data ); - } - }, - - // Convert dashed to camelCase; used by the css and data modules - // Microsoft forgot to hump their vendor prefix (#9572) - camelCase: function( string ) { - return string.replace( rmsPrefix, "ms-" ).replace( rdashAlpha, fcamelCase ); - }, - - nodeName: function( elem, name ) { - return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase(); - }, - - // args is for internal usage only - each: function( obj, callback, args ) { - var value, - i = 0, - length = obj.length, - isArray = isArraylike( obj ); - - if ( args ) { - if ( isArray ) { - for ( ; i < length; i++ ) { - value = callback.apply( obj[ i ], args ); - - if ( value === false ) { - break; - } - } - } else { - for ( i in obj ) { - value = callback.apply( obj[ i ], args ); - - if ( value === false ) { - break; - } - } - } - - // A special, fast, case for the most common use of each - } else { - if ( isArray ) { - for ( ; i < length; i++ ) { - value = callback.call( obj[ i ], i, obj[ i ] ); - - if ( value === false ) { - break; - } - } - } else { - for ( i in obj ) { - value = callback.call( obj[ i ], i, obj[ i ] ); - - if ( value === false ) { - break; - } - } - } - } - - return obj; - }, - - // Use native String.trim function wherever possible - trim: core_trim && !core_trim.call("\uFEFF\xA0") ? - function( text ) { - return text == null ? - "" : - core_trim.call( text ); - } : - - // Otherwise use our own trimming functionality - function( text ) { - return text == null ? - "" : - ( text + "" ).replace( rtrim, "" ); - }, - - // results is for internal usage only - makeArray: function( arr, results ) { - var ret = results || []; - - if ( arr != null ) { - if ( isArraylike( Object(arr) ) ) { - jQuery.merge( ret, - typeof arr === "string" ? - [ arr ] : arr - ); - } else { - core_push.call( ret, arr ); - } - } - - return ret; - }, - - inArray: function( elem, arr, i ) { - var len; - - if ( arr ) { - if ( core_indexOf ) { - return core_indexOf.call( arr, elem, i ); - } - - len = arr.length; - i = i ? i < 0 ? Math.max( 0, len + i ) : i : 0; - - for ( ; i < len; i++ ) { - // Skip accessing in sparse arrays - if ( i in arr && arr[ i ] === elem ) { - return i; - } - } - } - - return -1; - }, - - merge: function( first, second ) { - var l = second.length, - i = first.length, - j = 0; - - if ( typeof l === "number" ) { - for ( ; j < l; j++ ) { - first[ i++ ] = second[ j ]; - } - } else { - while ( second[j] !== undefined ) { - first[ i++ ] = second[ j++ ]; - } - } - - first.length = i; - - return first; - }, - - grep: function( elems, callback, inv ) { - var retVal, - ret = [], - i = 0, - length = elems.length; - inv = !!inv; - - // Go through the array, only saving the items - // that pass the validator function - for ( ; i < length; i++ ) { - retVal = !!callback( elems[ i ], i ); - if ( inv !== retVal ) { - ret.push( elems[ i ] ); - } - } - - return ret; - }, - - // arg is for internal usage only - map: function( elems, callback, arg ) { - var value, - i = 0, - length = elems.length, - isArray = isArraylike( elems ), - ret = []; - - // Go through the array, translating each of the items to their - if ( isArray ) { - for ( ; i < length; i++ ) { - value = callback( elems[ i ], i, arg ); - - if ( value != null ) { - ret[ ret.length ] = value; - } - } - - // Go through every key on the object, - } else { - for ( i in elems ) { - value = callback( elems[ i ], i, arg ); - - if ( value != null ) { - ret[ ret.length ] = value; - } - } - } - - // Flatten any nested arrays - return core_concat.apply( [], ret ); - }, - - // A global GUID counter for objects - guid: 1, - - // Bind a function to a context, optionally partially applying any - // arguments. - proxy: function( fn, context ) { - var args, proxy, tmp; - - if ( typeof context === "string" ) { - tmp = fn[ context ]; - context = fn; - fn = tmp; - } - - // Quick check to determine if target is callable, in the spec - // this throws a TypeError, but we will just return undefined. - if ( !jQuery.isFunction( fn ) ) { - return undefined; - } - - // Simulated bind - args = core_slice.call( arguments, 2 ); - proxy = function() { - return fn.apply( context || this, args.concat( core_slice.call( arguments ) ) ); - }; - - // Set the guid of unique handler to the same of original handler, so it can be removed - proxy.guid = fn.guid = fn.guid || jQuery.guid++; - - return proxy; - }, - - // Multifunctional method to get and set values of a collection - // The value/s can optionally be executed if it's a function - access: function( elems, fn, key, value, chainable, emptyGet, raw ) { - var i = 0, - length = elems.length, - bulk = key == null; - - // Sets many values - if ( jQuery.type( key ) === "object" ) { - chainable = true; - for ( i in key ) { - jQuery.access( elems, fn, i, key[i], true, emptyGet, raw ); - } - - // Sets one value - } else if ( value !== undefined ) { - chainable = true; - - if ( !jQuery.isFunction( value ) ) { - raw = true; - } - - if ( bulk ) { - // Bulk operations run against the entire set - if ( raw ) { - fn.call( elems, value ); - fn = null; - - // ...except when executing function values - } else { - bulk = fn; - fn = function( elem, key, value ) { - return bulk.call( jQuery( elem ), value ); - }; - } - } - - if ( fn ) { - for ( ; i < length; i++ ) { - fn( elems[i], key, raw ? value : value.call( elems[i], i, fn( elems[i], key ) ) ); - } - } - } - - return chainable ? - elems : - - // Gets - bulk ? - fn.call( elems ) : - length ? fn( elems[0], key ) : emptyGet; - }, - - now: function() { - return ( new Date() ).getTime(); - }, - - // A method for quickly swapping in/out CSS properties to get correct calculations. - // Note: this method belongs to the css module but it's needed here for the support module. - // If support gets modularized, this method should be moved back to the css module. - swap: function( elem, options, callback, args ) { - var ret, name, - old = {}; - - // Remember the old values, and insert the new ones - for ( name in options ) { - old[ name ] = elem.style[ name ]; - elem.style[ name ] = options[ name ]; - } - - ret = callback.apply( elem, args || [] ); - - // Revert the old values - for ( name in options ) { - elem.style[ name ] = old[ name ]; - } - - return ret; - } -}); - -jQuery.ready.promise = function( obj ) { - if ( !readyList ) { - - readyList = jQuery.Deferred(); - - // Catch cases where $(document).ready() is called after the browser event has already occurred. - // we once tried to use readyState "interactive" here, but it caused issues like the one - // discovered by ChrisS here: http://bugs.jquery.com/ticket/12282#comment:15 - if ( document.readyState === "complete" ) { - // Handle it asynchronously to allow scripts the opportunity to delay ready - setTimeout( jQuery.ready ); - - // Standards-based browsers support DOMContentLoaded - } else if ( document.addEventListener ) { - // Use the handy event callback - document.addEventListener( "DOMContentLoaded", completed, false ); - - // A fallback to window.onload, that will always work - window.addEventListener( "load", completed, false ); - - // If IE event model is used - } else { - // Ensure firing before onload, maybe late but safe also for iframes - document.attachEvent( "onreadystatechange", completed ); - - // A fallback to window.onload, that will always work - window.attachEvent( "onload", completed ); - - // If IE and not a frame - // continually check to see if the document is ready - var top = false; - - try { - top = window.frameElement == null && document.documentElement; - } catch(e) {} - - if ( top && top.doScroll ) { - (function doScrollCheck() { - if ( !jQuery.isReady ) { - - try { - // Use the trick by Diego Perini - // http://javascript.nwbox.com/IEContentLoaded/ - top.doScroll("left"); - } catch(e) { - return setTimeout( doScrollCheck, 50 ); - } - - // detach all dom ready events - detach(); - - // and execute any waiting functions - jQuery.ready(); - } - })(); - } - } - } - return readyList.promise( obj ); -}; - -// Populate the class2type map -jQuery.each("Boolean Number String Function Array Date RegExp Object Error".split(" "), function(i, name) { - class2type[ "[object " + name + "]" ] = name.toLowerCase(); -}); - -function isArraylike( obj ) { - var length = obj.length, - type = jQuery.type( obj ); - - if ( jQuery.isWindow( obj ) ) { - return false; - } - - if ( obj.nodeType === 1 && length ) { - return true; - } - - return type === "array" || type !== "function" && - ( length === 0 || - typeof length === "number" && length > 0 && ( length - 1 ) in obj ); -} - -// All jQuery objects should point back to these -rootjQuery = jQuery(document); -/*! - * Sizzle CSS Selector Engine v1.10.2 - * http://sizzlejs.com/ - * - * Copyright 2013 jQuery Foundation, Inc. and other contributors - * Released under the MIT license - * http://jquery.org/license - * - * Date: 2013-07-03 - */ -(function( window, undefined ) { - -var i, - support, - cachedruns, - Expr, - getText, - isXML, - compile, - outermostContext, - sortInput, - - // Local document vars - setDocument, - document, - docElem, - documentIsHTML, - rbuggyQSA, - rbuggyMatches, - matches, - contains, - - // Instance-specific data - expando = "sizzle" + -(new Date()), - preferredDoc = window.document, - dirruns = 0, - done = 0, - classCache = createCache(), - tokenCache = createCache(), - compilerCache = createCache(), - hasDuplicate = false, - sortOrder = function( a, b ) { - if ( a === b ) { - hasDuplicate = true; - return 0; - } - return 0; - }, - - // General-purpose constants - strundefined = typeof undefined, - MAX_NEGATIVE = 1 << 31, - - // Instance methods - hasOwn = ({}).hasOwnProperty, - arr = [], - pop = arr.pop, - push_native = arr.push, - push = arr.push, - slice = arr.slice, - // Use a stripped-down indexOf if we can't use a native one - indexOf = arr.indexOf || function( elem ) { - var i = 0, - len = this.length; - for ( ; i < len; i++ ) { - if ( this[i] === elem ) { - return i; - } - } - return -1; - }, - - booleans = "checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped", - - // Regular expressions - - // Whitespace characters http://www.w3.org/TR/css3-selectors/#whitespace - whitespace = "[\\x20\\t\\r\\n\\f]", - // http://www.w3.org/TR/css3-syntax/#characters - characterEncoding = "(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+", - - // Loosely modeled on CSS identifier characters - // An unquoted value should be a CSS identifier http://www.w3.org/TR/css3-selectors/#attribute-selectors - // Proper syntax: http://www.w3.org/TR/CSS21/syndata.html#value-def-identifier - identifier = characterEncoding.replace( "w", "w#" ), - - // Acceptable operators http://www.w3.org/TR/selectors/#attribute-selectors - attributes = "\\[" + whitespace + "*(" + characterEncoding + ")" + whitespace + - "*(?:([*^$|!~]?=)" + whitespace + "*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|(" + identifier + ")|)|)" + whitespace + "*\\]", - - // Prefer arguments quoted, - // then not containing pseudos/brackets, - // then attribute selectors/non-parenthetical expressions, - // then anything else - // These preferences are here to reduce the number of selectors - // needing tokenize in the PSEUDO preFilter - pseudos = ":(" + characterEncoding + ")(?:\\(((['\"])((?:\\\\.|[^\\\\])*?)\\3|((?:\\\\.|[^\\\\()[\\]]|" + attributes.replace( 3, 8 ) + ")*)|.*)\\)|)", - - // Leading and non-escaped trailing whitespace, capturing some non-whitespace characters preceding the latter - rtrim = new RegExp( "^" + whitespace + "+|((?:^|[^\\\\])(?:\\\\.)*)" + whitespace + "+$", "g" ), - - rcomma = new RegExp( "^" + whitespace + "*," + whitespace + "*" ), - rcombinators = new RegExp( "^" + whitespace + "*([>+~]|" + whitespace + ")" + whitespace + "*" ), - - rsibling = new RegExp( whitespace + "*[+~]" ), - rattributeQuotes = new RegExp( "=" + whitespace + "*([^\\]'\"]*)" + whitespace + "*\\]", "g" ), - - rpseudo = new RegExp( pseudos ), - ridentifier = new RegExp( "^" + identifier + "$" ), - - matchExpr = { - "ID": new RegExp( "^#(" + characterEncoding + ")" ), - "CLASS": new RegExp( "^\\.(" + characterEncoding + ")" ), - "TAG": new RegExp( "^(" + characterEncoding.replace( "w", "w*" ) + ")" ), - "ATTR": new RegExp( "^" + attributes ), - "PSEUDO": new RegExp( "^" + pseudos ), - "CHILD": new RegExp( "^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\(" + whitespace + - "*(even|odd|(([+-]|)(\\d*)n|)" + whitespace + "*(?:([+-]|)" + whitespace + - "*(\\d+)|))" + whitespace + "*\\)|)", "i" ), - "bool": new RegExp( "^(?:" + booleans + ")$", "i" ), - // For use in libraries implementing .is() - // We use this for POS matching in `select` - "needsContext": new RegExp( "^" + whitespace + "*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\(" + - whitespace + "*((?:-\\d)?\\d*)" + whitespace + "*\\)|)(?=[^-]|$)", "i" ) - }, - - rnative = /^[^{]+\{\s*\[native \w/, - - // Easily-parseable/retrievable ID or TAG or CLASS selectors - rquickExpr = /^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/, - - rinputs = /^(?:input|select|textarea|button)$/i, - rheader = /^h\d$/i, - - rescape = /'|\\/g, - - // CSS escapes http://www.w3.org/TR/CSS21/syndata.html#escaped-characters - runescape = new RegExp( "\\\\([\\da-f]{1,6}" + whitespace + "?|(" + whitespace + ")|.)", "ig" ), - funescape = function( _, escaped, escapedWhitespace ) { - var high = "0x" + escaped - 0x10000; - // NaN means non-codepoint - // Support: Firefox - // Workaround erroneous numeric interpretation of +"0x" - return high !== high || escapedWhitespace ? - escaped : - // BMP codepoint - high < 0 ? - String.fromCharCode( high + 0x10000 ) : - // Supplemental Plane codepoint (surrogate pair) - String.fromCharCode( high >> 10 | 0xD800, high & 0x3FF | 0xDC00 ); - }; - -// Optimize for push.apply( _, NodeList ) -try { - push.apply( - (arr = slice.call( preferredDoc.childNodes )), - preferredDoc.childNodes - ); - // Support: Android<4.0 - // Detect silently failing push.apply - arr[ preferredDoc.childNodes.length ].nodeType; -} catch ( e ) { - push = { apply: arr.length ? - - // Leverage slice if possible - function( target, els ) { - push_native.apply( target, slice.call(els) ); - } : - - // Support: IE<9 - // Otherwise append directly - function( target, els ) { - var j = target.length, - i = 0; - // Can't trust NodeList.length - while ( (target[j++] = els[i++]) ) {} - target.length = j - 1; - } - }; -} - -function Sizzle( selector, context, results, seed ) { - var match, elem, m, nodeType, - // QSA vars - i, groups, old, nid, newContext, newSelector; - - if ( ( context ? context.ownerDocument || context : preferredDoc ) !== document ) { - setDocument( context ); - } - - context = context || document; - results = results || []; - - if ( !selector || typeof selector !== "string" ) { - return results; - } - - if ( (nodeType = context.nodeType) !== 1 && nodeType !== 9 ) { - return []; - } - - if ( documentIsHTML && !seed ) { - - // Shortcuts - if ( (match = rquickExpr.exec( selector )) ) { - // Speed-up: Sizzle("#ID") - if ( (m = match[1]) ) { - if ( nodeType === 9 ) { - elem = context.getElementById( m ); - // Check parentNode to catch when Blackberry 4.6 returns - // nodes that are no longer in the document #6963 - if ( elem && elem.parentNode ) { - // Handle the case where IE, Opera, and Webkit return items - // by name instead of ID - if ( elem.id === m ) { - results.push( elem ); - return results; - } - } else { - return results; - } - } else { - // Context is not a document - if ( context.ownerDocument && (elem = context.ownerDocument.getElementById( m )) && - contains( context, elem ) && elem.id === m ) { - results.push( elem ); - return results; - } - } - - // Speed-up: Sizzle("TAG") - } else if ( match[2] ) { - push.apply( results, context.getElementsByTagName( selector ) ); - return results; - - // Speed-up: Sizzle(".CLASS") - } else if ( (m = match[3]) && support.getElementsByClassName && context.getElementsByClassName ) { - push.apply( results, context.getElementsByClassName( m ) ); - return results; - } - } - - // QSA path - if ( support.qsa && (!rbuggyQSA || !rbuggyQSA.test( selector )) ) { - nid = old = expando; - newContext = context; - newSelector = nodeType === 9 && selector; - - // qSA works strangely on Element-rooted queries - // We can work around this by specifying an extra ID on the root - // and working up from there (Thanks to Andrew Dupont for the technique) - // IE 8 doesn't work on object elements - if ( nodeType === 1 && context.nodeName.toLowerCase() !== "object" ) { - groups = tokenize( selector ); - - if ( (old = context.getAttribute("id")) ) { - nid = old.replace( rescape, "\\$&" ); - } else { - context.setAttribute( "id", nid ); - } - nid = "[id='" + nid + "'] "; - - i = groups.length; - while ( i-- ) { - groups[i] = nid + toSelector( groups[i] ); - } - newContext = rsibling.test( selector ) && context.parentNode || context; - newSelector = groups.join(","); - } - - if ( newSelector ) { - try { - push.apply( results, - newContext.querySelectorAll( newSelector ) - ); - return results; - } catch(qsaError) { - } finally { - if ( !old ) { - context.removeAttribute("id"); - } - } - } - } - } - - // All others - return select( selector.replace( rtrim, "$1" ), context, results, seed ); -} - -/** - * Create key-value caches of limited size - * @returns {Function(string, Object)} Returns the Object data after storing it on itself with - * property name the (space-suffixed) string and (if the cache is larger than Expr.cacheLength) - * deleting the oldest entry - */ -function createCache() { - var keys = []; - - function cache( key, value ) { - // Use (key + " ") to avoid collision with native prototype properties (see Issue #157) - if ( keys.push( key += " " ) > Expr.cacheLength ) { - // Only keep the most recent entries - delete cache[ keys.shift() ]; - } - return (cache[ key ] = value); - } - return cache; -} - -/** - * Mark a function for special use by Sizzle - * @param {Function} fn The function to mark - */ -function markFunction( fn ) { - fn[ expando ] = true; - return fn; -} - -/** - * Support testing using an element - * @param {Function} fn Passed the created div and expects a boolean result - */ -function assert( fn ) { - var div = document.createElement("div"); - - try { - return !!fn( div ); - } catch (e) { - return false; - } finally { - // Remove from its parent by default - if ( div.parentNode ) { - div.parentNode.removeChild( div ); - } - // release memory in IE - div = null; - } -} - -/** - * Adds the same handler for all of the specified attrs - * @param {String} attrs Pipe-separated list of attributes - * @param {Function} handler The method that will be applied - */ -function addHandle( attrs, handler ) { - var arr = attrs.split("|"), - i = attrs.length; - - while ( i-- ) { - Expr.attrHandle[ arr[i] ] = handler; - } -} - -/** - * Checks document order of two siblings - * @param {Element} a - * @param {Element} b - * @returns {Number} Returns less than 0 if a precedes b, greater than 0 if a follows b - */ -function siblingCheck( a, b ) { - var cur = b && a, - diff = cur && a.nodeType === 1 && b.nodeType === 1 && - ( ~b.sourceIndex || MAX_NEGATIVE ) - - ( ~a.sourceIndex || MAX_NEGATIVE ); - - // Use IE sourceIndex if available on both nodes - if ( diff ) { - return diff; - } - - // Check if b follows a - if ( cur ) { - while ( (cur = cur.nextSibling) ) { - if ( cur === b ) { - return -1; - } - } - } - - return a ? 1 : -1; -} - -/** - * Returns a function to use in pseudos for input types - * @param {String} type - */ -function createInputPseudo( type ) { - return function( elem ) { - var name = elem.nodeName.toLowerCase(); - return name === "input" && elem.type === type; - }; -} - -/** - * Returns a function to use in pseudos for buttons - * @param {String} type - */ -function createButtonPseudo( type ) { - return function( elem ) { - var name = elem.nodeName.toLowerCase(); - return (name === "input" || name === "button") && elem.type === type; - }; -} - -/** - * Returns a function to use in pseudos for positionals - * @param {Function} fn - */ -function createPositionalPseudo( fn ) { - return markFunction(function( argument ) { - argument = +argument; - return markFunction(function( seed, matches ) { - var j, - matchIndexes = fn( [], seed.length, argument ), - i = matchIndexes.length; - - // Match elements found at the specified indexes - while ( i-- ) { - if ( seed[ (j = matchIndexes[i]) ] ) { - seed[j] = !(matches[j] = seed[j]); - } - } - }); - }); -} - -/** - * Detect xml - * @param {Element|Object} elem An element or a document - */ -isXML = Sizzle.isXML = function( elem ) { - // documentElement is verified for cases where it doesn't yet exist - // (such as loading iframes in IE - #4833) - var documentElement = elem && (elem.ownerDocument || elem).documentElement; - return documentElement ? documentElement.nodeName !== "HTML" : false; -}; - -// Expose support vars for convenience -support = Sizzle.support = {}; - -/** - * Sets document-related variables once based on the current document - * @param {Element|Object} [doc] An element or document object to use to set the document - * @returns {Object} Returns the current document - */ -setDocument = Sizzle.setDocument = function( node ) { - var doc = node ? node.ownerDocument || node : preferredDoc, - parent = doc.defaultView; - - // If no document and documentElement is available, return - if ( doc === document || doc.nodeType !== 9 || !doc.documentElement ) { - return document; - } - - // Set our document - document = doc; - docElem = doc.documentElement; - - // Support tests - documentIsHTML = !isXML( doc ); - - // Support: IE>8 - // If iframe document is assigned to "document" variable and if iframe has been reloaded, - // IE will throw "permission denied" error when accessing "document" variable, see jQuery #13936 - // IE6-8 do not support the defaultView property so parent will be undefined - if ( parent && parent.attachEvent && parent !== parent.top ) { - parent.attachEvent( "onbeforeunload", function() { - setDocument(); - }); - } - - /* Attributes - ---------------------------------------------------------------------- */ - - // Support: IE<8 - // Verify that getAttribute really returns attributes and not properties (excepting IE8 booleans) - support.attributes = assert(function( div ) { - div.className = "i"; - return !div.getAttribute("className"); - }); - - /* getElement(s)By* - ---------------------------------------------------------------------- */ - - // Check if getElementsByTagName("*") returns only elements - support.getElementsByTagName = assert(function( div ) { - div.appendChild( doc.createComment("") ); - return !div.getElementsByTagName("*").length; - }); - - // Check if getElementsByClassName can be trusted - support.getElementsByClassName = assert(function( div ) { - div.innerHTML = "
    "; - - // Support: Safari<4 - // Catch class over-caching - div.firstChild.className = "i"; - // Support: Opera<10 - // Catch gEBCN failure to find non-leading classes - return div.getElementsByClassName("i").length === 2; - }); - - // Support: IE<10 - // Check if getElementById returns elements by name - // The broken getElementById methods don't pick up programatically-set names, - // so use a roundabout getElementsByName test - support.getById = assert(function( div ) { - docElem.appendChild( div ).id = expando; - return !doc.getElementsByName || !doc.getElementsByName( expando ).length; - }); - - // ID find and filter - if ( support.getById ) { - Expr.find["ID"] = function( id, context ) { - if ( typeof context.getElementById !== strundefined && documentIsHTML ) { - var m = context.getElementById( id ); - // Check parentNode to catch when Blackberry 4.6 returns - // nodes that are no longer in the document #6963 - return m && m.parentNode ? [m] : []; - } - }; - Expr.filter["ID"] = function( id ) { - var attrId = id.replace( runescape, funescape ); - return function( elem ) { - return elem.getAttribute("id") === attrId; - }; - }; - } else { - // Support: IE6/7 - // getElementById is not reliable as a find shortcut - delete Expr.find["ID"]; - - Expr.filter["ID"] = function( id ) { - var attrId = id.replace( runescape, funescape ); - return function( elem ) { - var node = typeof elem.getAttributeNode !== strundefined && elem.getAttributeNode("id"); - return node && node.value === attrId; - }; - }; - } - - // Tag - Expr.find["TAG"] = support.getElementsByTagName ? - function( tag, context ) { - if ( typeof context.getElementsByTagName !== strundefined ) { - return context.getElementsByTagName( tag ); - } - } : - function( tag, context ) { - var elem, - tmp = [], - i = 0, - results = context.getElementsByTagName( tag ); - - // Filter out possible comments - if ( tag === "*" ) { - while ( (elem = results[i++]) ) { - if ( elem.nodeType === 1 ) { - tmp.push( elem ); - } - } - - return tmp; - } - return results; - }; - - // Class - Expr.find["CLASS"] = support.getElementsByClassName && function( className, context ) { - if ( typeof context.getElementsByClassName !== strundefined && documentIsHTML ) { - return context.getElementsByClassName( className ); - } - }; - - /* QSA/matchesSelector - ---------------------------------------------------------------------- */ - - // QSA and matchesSelector support - - // matchesSelector(:active) reports false when true (IE9/Opera 11.5) - rbuggyMatches = []; - - // qSa(:focus) reports false when true (Chrome 21) - // We allow this because of a bug in IE8/9 that throws an error - // whenever `document.activeElement` is accessed on an iframe - // So, we allow :focus to pass through QSA all the time to avoid the IE error - // See http://bugs.jquery.com/ticket/13378 - rbuggyQSA = []; - - if ( (support.qsa = rnative.test( doc.querySelectorAll )) ) { - // Build QSA regex - // Regex strategy adopted from Diego Perini - assert(function( div ) { - // Select is set to empty string on purpose - // This is to test IE's treatment of not explicitly - // setting a boolean content attribute, - // since its presence should be enough - // http://bugs.jquery.com/ticket/12359 - div.innerHTML = ""; - - // Support: IE8 - // Boolean attributes and "value" are not treated correctly - if ( !div.querySelectorAll("[selected]").length ) { - rbuggyQSA.push( "\\[" + whitespace + "*(?:value|" + booleans + ")" ); - } - - // Webkit/Opera - :checked should return selected option elements - // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked - // IE8 throws error here and will not see later tests - if ( !div.querySelectorAll(":checked").length ) { - rbuggyQSA.push(":checked"); - } - }); - - assert(function( div ) { - - // Support: Opera 10-12/IE8 - // ^= $= *= and empty values - // Should not select anything - // Support: Windows 8 Native Apps - // The type attribute is restricted during .innerHTML assignment - var input = doc.createElement("input"); - input.setAttribute( "type", "hidden" ); - div.appendChild( input ).setAttribute( "t", "" ); - - if ( div.querySelectorAll("[t^='']").length ) { - rbuggyQSA.push( "[*^$]=" + whitespace + "*(?:''|\"\")" ); - } - - // FF 3.5 - :enabled/:disabled and hidden elements (hidden elements are still enabled) - // IE8 throws error here and will not see later tests - if ( !div.querySelectorAll(":enabled").length ) { - rbuggyQSA.push( ":enabled", ":disabled" ); - } - - // Opera 10-11 does not throw on post-comma invalid pseudos - div.querySelectorAll("*,:x"); - rbuggyQSA.push(",.*:"); - }); - } - - if ( (support.matchesSelector = rnative.test( (matches = docElem.webkitMatchesSelector || - docElem.mozMatchesSelector || - docElem.oMatchesSelector || - docElem.msMatchesSelector) )) ) { - - assert(function( div ) { - // Check to see if it's possible to do matchesSelector - // on a disconnected node (IE 9) - support.disconnectedMatch = matches.call( div, "div" ); - - // This should fail with an exception - // Gecko does not error, returns false instead - matches.call( div, "[s!='']:x" ); - rbuggyMatches.push( "!=", pseudos ); - }); - } - - rbuggyQSA = rbuggyQSA.length && new RegExp( rbuggyQSA.join("|") ); - rbuggyMatches = rbuggyMatches.length && new RegExp( rbuggyMatches.join("|") ); - - /* Contains - ---------------------------------------------------------------------- */ - - // Element contains another - // Purposefully does not implement inclusive descendent - // As in, an element does not contain itself - contains = rnative.test( docElem.contains ) || docElem.compareDocumentPosition ? - function( a, b ) { - var adown = a.nodeType === 9 ? a.documentElement : a, - bup = b && b.parentNode; - return a === bup || !!( bup && bup.nodeType === 1 && ( - adown.contains ? - adown.contains( bup ) : - a.compareDocumentPosition && a.compareDocumentPosition( bup ) & 16 - )); - } : - function( a, b ) { - if ( b ) { - while ( (b = b.parentNode) ) { - if ( b === a ) { - return true; - } - } - } - return false; - }; - - /* Sorting - ---------------------------------------------------------------------- */ - - // Document order sorting - sortOrder = docElem.compareDocumentPosition ? - function( a, b ) { - - // Flag for duplicate removal - if ( a === b ) { - hasDuplicate = true; - return 0; - } - - var compare = b.compareDocumentPosition && a.compareDocumentPosition && a.compareDocumentPosition( b ); - - if ( compare ) { - // Disconnected nodes - if ( compare & 1 || - (!support.sortDetached && b.compareDocumentPosition( a ) === compare) ) { - - // Choose the first element that is related to our preferred document - if ( a === doc || contains(preferredDoc, a) ) { - return -1; - } - if ( b === doc || contains(preferredDoc, b) ) { - return 1; - } - - // Maintain original order - return sortInput ? - ( indexOf.call( sortInput, a ) - indexOf.call( sortInput, b ) ) : - 0; - } - - return compare & 4 ? -1 : 1; - } - - // Not directly comparable, sort on existence of method - return a.compareDocumentPosition ? -1 : 1; - } : - function( a, b ) { - var cur, - i = 0, - aup = a.parentNode, - bup = b.parentNode, - ap = [ a ], - bp = [ b ]; - - // Exit early if the nodes are identical - if ( a === b ) { - hasDuplicate = true; - return 0; - - // Parentless nodes are either documents or disconnected - } else if ( !aup || !bup ) { - return a === doc ? -1 : - b === doc ? 1 : - aup ? -1 : - bup ? 1 : - sortInput ? - ( indexOf.call( sortInput, a ) - indexOf.call( sortInput, b ) ) : - 0; - - // If the nodes are siblings, we can do a quick check - } else if ( aup === bup ) { - return siblingCheck( a, b ); - } - - // Otherwise we need full lists of their ancestors for comparison - cur = a; - while ( (cur = cur.parentNode) ) { - ap.unshift( cur ); - } - cur = b; - while ( (cur = cur.parentNode) ) { - bp.unshift( cur ); - } - - // Walk down the tree looking for a discrepancy - while ( ap[i] === bp[i] ) { - i++; - } - - return i ? - // Do a sibling check if the nodes have a common ancestor - siblingCheck( ap[i], bp[i] ) : - - // Otherwise nodes in our document sort first - ap[i] === preferredDoc ? -1 : - bp[i] === preferredDoc ? 1 : - 0; - }; - - return doc; -}; - -Sizzle.matches = function( expr, elements ) { - return Sizzle( expr, null, null, elements ); -}; - -Sizzle.matchesSelector = function( elem, expr ) { - // Set document vars if needed - if ( ( elem.ownerDocument || elem ) !== document ) { - setDocument( elem ); - } - - // Make sure that attribute selectors are quoted - expr = expr.replace( rattributeQuotes, "='$1']" ); - - if ( support.matchesSelector && documentIsHTML && - ( !rbuggyMatches || !rbuggyMatches.test( expr ) ) && - ( !rbuggyQSA || !rbuggyQSA.test( expr ) ) ) { - - try { - var ret = matches.call( elem, expr ); - - // IE 9's matchesSelector returns false on disconnected nodes - if ( ret || support.disconnectedMatch || - // As well, disconnected nodes are said to be in a document - // fragment in IE 9 - elem.document && elem.document.nodeType !== 11 ) { - return ret; - } - } catch(e) {} - } - - return Sizzle( expr, document, null, [elem] ).length > 0; -}; - -Sizzle.contains = function( context, elem ) { - // Set document vars if needed - if ( ( context.ownerDocument || context ) !== document ) { - setDocument( context ); - } - return contains( context, elem ); -}; - -Sizzle.attr = function( elem, name ) { - // Set document vars if needed - if ( ( elem.ownerDocument || elem ) !== document ) { - setDocument( elem ); - } - - var fn = Expr.attrHandle[ name.toLowerCase() ], - // Don't get fooled by Object.prototype properties (jQuery #13807) - val = fn && hasOwn.call( Expr.attrHandle, name.toLowerCase() ) ? - fn( elem, name, !documentIsHTML ) : - undefined; - - return val === undefined ? - support.attributes || !documentIsHTML ? - elem.getAttribute( name ) : - (val = elem.getAttributeNode(name)) && val.specified ? - val.value : - null : - val; -}; - -Sizzle.error = function( msg ) { - throw new Error( "Syntax error, unrecognized expression: " + msg ); -}; - -/** - * Document sorting and removing duplicates - * @param {ArrayLike} results - */ -Sizzle.uniqueSort = function( results ) { - var elem, - duplicates = [], - j = 0, - i = 0; - - // Unless we *know* we can detect duplicates, assume their presence - hasDuplicate = !support.detectDuplicates; - sortInput = !support.sortStable && results.slice( 0 ); - results.sort( sortOrder ); - - if ( hasDuplicate ) { - while ( (elem = results[i++]) ) { - if ( elem === results[ i ] ) { - j = duplicates.push( i ); - } - } - while ( j-- ) { - results.splice( duplicates[ j ], 1 ); - } - } - - return results; -}; - -/** - * Utility function for retrieving the text value of an array of DOM nodes - * @param {Array|Element} elem - */ -getText = Sizzle.getText = function( elem ) { - var node, - ret = "", - i = 0, - nodeType = elem.nodeType; - - if ( !nodeType ) { - // If no nodeType, this is expected to be an array - for ( ; (node = elem[i]); i++ ) { - // Do not traverse comment nodes - ret += getText( node ); - } - } else if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) { - // Use textContent for elements - // innerText usage removed for consistency of new lines (see #11153) - if ( typeof elem.textContent === "string" ) { - return elem.textContent; - } else { - // Traverse its children - for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { - ret += getText( elem ); - } - } - } else if ( nodeType === 3 || nodeType === 4 ) { - return elem.nodeValue; - } - // Do not include comment or processing instruction nodes - - return ret; -}; - -Expr = Sizzle.selectors = { - - // Can be adjusted by the user - cacheLength: 50, - - createPseudo: markFunction, - - match: matchExpr, - - attrHandle: {}, - - find: {}, - - relative: { - ">": { dir: "parentNode", first: true }, - " ": { dir: "parentNode" }, - "+": { dir: "previousSibling", first: true }, - "~": { dir: "previousSibling" } - }, - - preFilter: { - "ATTR": function( match ) { - match[1] = match[1].replace( runescape, funescape ); - - // Move the given value to match[3] whether quoted or unquoted - match[3] = ( match[4] || match[5] || "" ).replace( runescape, funescape ); - - if ( match[2] === "~=" ) { - match[3] = " " + match[3] + " "; - } - - return match.slice( 0, 4 ); - }, - - "CHILD": function( match ) { - /* matches from matchExpr["CHILD"] - 1 type (only|nth|...) - 2 what (child|of-type) - 3 argument (even|odd|\d*|\d*n([+-]\d+)?|...) - 4 xn-component of xn+y argument ([+-]?\d*n|) - 5 sign of xn-component - 6 x of xn-component - 7 sign of y-component - 8 y of y-component - */ - match[1] = match[1].toLowerCase(); - - if ( match[1].slice( 0, 3 ) === "nth" ) { - // nth-* requires argument - if ( !match[3] ) { - Sizzle.error( match[0] ); - } - - // numeric x and y parameters for Expr.filter.CHILD - // remember that false/true cast respectively to 0/1 - match[4] = +( match[4] ? match[5] + (match[6] || 1) : 2 * ( match[3] === "even" || match[3] === "odd" ) ); - match[5] = +( ( match[7] + match[8] ) || match[3] === "odd" ); - - // other types prohibit arguments - } else if ( match[3] ) { - Sizzle.error( match[0] ); - } - - return match; - }, - - "PSEUDO": function( match ) { - var excess, - unquoted = !match[5] && match[2]; - - if ( matchExpr["CHILD"].test( match[0] ) ) { - return null; - } - - // Accept quoted arguments as-is - if ( match[3] && match[4] !== undefined ) { - match[2] = match[4]; - - // Strip excess characters from unquoted arguments - } else if ( unquoted && rpseudo.test( unquoted ) && - // Get excess from tokenize (recursively) - (excess = tokenize( unquoted, true )) && - // advance to the next closing parenthesis - (excess = unquoted.indexOf( ")", unquoted.length - excess ) - unquoted.length) ) { - - // excess is a negative index - match[0] = match[0].slice( 0, excess ); - match[2] = unquoted.slice( 0, excess ); - } - - // Return only captures needed by the pseudo filter method (type and argument) - return match.slice( 0, 3 ); - } - }, - - filter: { - - "TAG": function( nodeNameSelector ) { - var nodeName = nodeNameSelector.replace( runescape, funescape ).toLowerCase(); - return nodeNameSelector === "*" ? - function() { return true; } : - function( elem ) { - return elem.nodeName && elem.nodeName.toLowerCase() === nodeName; - }; - }, - - "CLASS": function( className ) { - var pattern = classCache[ className + " " ]; - - return pattern || - (pattern = new RegExp( "(^|" + whitespace + ")" + className + "(" + whitespace + "|$)" )) && - classCache( className, function( elem ) { - return pattern.test( typeof elem.className === "string" && elem.className || typeof elem.getAttribute !== strundefined && elem.getAttribute("class") || "" ); - }); - }, - - "ATTR": function( name, operator, check ) { - return function( elem ) { - var result = Sizzle.attr( elem, name ); - - if ( result == null ) { - return operator === "!="; - } - if ( !operator ) { - return true; - } - - result += ""; - - return operator === "=" ? result === check : - operator === "!=" ? result !== check : - operator === "^=" ? check && result.indexOf( check ) === 0 : - operator === "*=" ? check && result.indexOf( check ) > -1 : - operator === "$=" ? check && result.slice( -check.length ) === check : - operator === "~=" ? ( " " + result + " " ).indexOf( check ) > -1 : - operator === "|=" ? result === check || result.slice( 0, check.length + 1 ) === check + "-" : - false; - }; - }, - - "CHILD": function( type, what, argument, first, last ) { - var simple = type.slice( 0, 3 ) !== "nth", - forward = type.slice( -4 ) !== "last", - ofType = what === "of-type"; - - return first === 1 && last === 0 ? - - // Shortcut for :nth-*(n) - function( elem ) { - return !!elem.parentNode; - } : - - function( elem, context, xml ) { - var cache, outerCache, node, diff, nodeIndex, start, - dir = simple !== forward ? "nextSibling" : "previousSibling", - parent = elem.parentNode, - name = ofType && elem.nodeName.toLowerCase(), - useCache = !xml && !ofType; - - if ( parent ) { - - // :(first|last|only)-(child|of-type) - if ( simple ) { - while ( dir ) { - node = elem; - while ( (node = node[ dir ]) ) { - if ( ofType ? node.nodeName.toLowerCase() === name : node.nodeType === 1 ) { - return false; - } - } - // Reverse direction for :only-* (if we haven't yet done so) - start = dir = type === "only" && !start && "nextSibling"; - } - return true; - } - - start = [ forward ? parent.firstChild : parent.lastChild ]; - - // non-xml :nth-child(...) stores cache data on `parent` - if ( forward && useCache ) { - // Seek `elem` from a previously-cached index - outerCache = parent[ expando ] || (parent[ expando ] = {}); - cache = outerCache[ type ] || []; - nodeIndex = cache[0] === dirruns && cache[1]; - diff = cache[0] === dirruns && cache[2]; - node = nodeIndex && parent.childNodes[ nodeIndex ]; - - while ( (node = ++nodeIndex && node && node[ dir ] || - - // Fallback to seeking `elem` from the start - (diff = nodeIndex = 0) || start.pop()) ) { - - // When found, cache indexes on `parent` and break - if ( node.nodeType === 1 && ++diff && node === elem ) { - outerCache[ type ] = [ dirruns, nodeIndex, diff ]; - break; - } - } - - // Use previously-cached element index if available - } else if ( useCache && (cache = (elem[ expando ] || (elem[ expando ] = {}))[ type ]) && cache[0] === dirruns ) { - diff = cache[1]; - - // xml :nth-child(...) or :nth-last-child(...) or :nth(-last)?-of-type(...) - } else { - // Use the same loop as above to seek `elem` from the start - while ( (node = ++nodeIndex && node && node[ dir ] || - (diff = nodeIndex = 0) || start.pop()) ) { - - if ( ( ofType ? node.nodeName.toLowerCase() === name : node.nodeType === 1 ) && ++diff ) { - // Cache the index of each encountered element - if ( useCache ) { - (node[ expando ] || (node[ expando ] = {}))[ type ] = [ dirruns, diff ]; - } - - if ( node === elem ) { - break; - } - } - } - } - - // Incorporate the offset, then check against cycle size - diff -= last; - return diff === first || ( diff % first === 0 && diff / first >= 0 ); - } - }; - }, - - "PSEUDO": function( pseudo, argument ) { - // pseudo-class names are case-insensitive - // http://www.w3.org/TR/selectors/#pseudo-classes - // Prioritize by case sensitivity in case custom pseudos are added with uppercase letters - // Remember that setFilters inherits from pseudos - var args, - fn = Expr.pseudos[ pseudo ] || Expr.setFilters[ pseudo.toLowerCase() ] || - Sizzle.error( "unsupported pseudo: " + pseudo ); - - // The user may use createPseudo to indicate that - // arguments are needed to create the filter function - // just as Sizzle does - if ( fn[ expando ] ) { - return fn( argument ); - } - - // But maintain support for old signatures - if ( fn.length > 1 ) { - args = [ pseudo, pseudo, "", argument ]; - return Expr.setFilters.hasOwnProperty( pseudo.toLowerCase() ) ? - markFunction(function( seed, matches ) { - var idx, - matched = fn( seed, argument ), - i = matched.length; - while ( i-- ) { - idx = indexOf.call( seed, matched[i] ); - seed[ idx ] = !( matches[ idx ] = matched[i] ); - } - }) : - function( elem ) { - return fn( elem, 0, args ); - }; - } - - return fn; - } - }, - - pseudos: { - // Potentially complex pseudos - "not": markFunction(function( selector ) { - // Trim the selector passed to compile - // to avoid treating leading and trailing - // spaces as combinators - var input = [], - results = [], - matcher = compile( selector.replace( rtrim, "$1" ) ); - - return matcher[ expando ] ? - markFunction(function( seed, matches, context, xml ) { - var elem, - unmatched = matcher( seed, null, xml, [] ), - i = seed.length; - - // Match elements unmatched by `matcher` - while ( i-- ) { - if ( (elem = unmatched[i]) ) { - seed[i] = !(matches[i] = elem); - } - } - }) : - function( elem, context, xml ) { - input[0] = elem; - matcher( input, null, xml, results ); - return !results.pop(); - }; - }), - - "has": markFunction(function( selector ) { - return function( elem ) { - return Sizzle( selector, elem ).length > 0; - }; - }), - - "contains": markFunction(function( text ) { - return function( elem ) { - return ( elem.textContent || elem.innerText || getText( elem ) ).indexOf( text ) > -1; - }; - }), - - // "Whether an element is represented by a :lang() selector - // is based solely on the element's language value - // being equal to the identifier C, - // or beginning with the identifier C immediately followed by "-". - // The matching of C against the element's language value is performed case-insensitively. - // The identifier C does not have to be a valid language name." - // http://www.w3.org/TR/selectors/#lang-pseudo - "lang": markFunction( function( lang ) { - // lang value must be a valid identifier - if ( !ridentifier.test(lang || "") ) { - Sizzle.error( "unsupported lang: " + lang ); - } - lang = lang.replace( runescape, funescape ).toLowerCase(); - return function( elem ) { - var elemLang; - do { - if ( (elemLang = documentIsHTML ? - elem.lang : - elem.getAttribute("xml:lang") || elem.getAttribute("lang")) ) { - - elemLang = elemLang.toLowerCase(); - return elemLang === lang || elemLang.indexOf( lang + "-" ) === 0; - } - } while ( (elem = elem.parentNode) && elem.nodeType === 1 ); - return false; - }; - }), - - // Miscellaneous - "target": function( elem ) { - var hash = window.location && window.location.hash; - return hash && hash.slice( 1 ) === elem.id; - }, - - "root": function( elem ) { - return elem === docElem; - }, - - "focus": function( elem ) { - return elem === document.activeElement && (!document.hasFocus || document.hasFocus()) && !!(elem.type || elem.href || ~elem.tabIndex); - }, - - // Boolean properties - "enabled": function( elem ) { - return elem.disabled === false; - }, - - "disabled": function( elem ) { - return elem.disabled === true; - }, - - "checked": function( elem ) { - // In CSS3, :checked should return both checked and selected elements - // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked - var nodeName = elem.nodeName.toLowerCase(); - return (nodeName === "input" && !!elem.checked) || (nodeName === "option" && !!elem.selected); - }, - - "selected": function( elem ) { - // Accessing this property makes selected-by-default - // options in Safari work properly - if ( elem.parentNode ) { - elem.parentNode.selectedIndex; - } - - return elem.selected === true; - }, - - // Contents - "empty": function( elem ) { - // http://www.w3.org/TR/selectors/#empty-pseudo - // :empty is only affected by element nodes and content nodes(including text(3), cdata(4)), - // not comment, processing instructions, or others - // Thanks to Diego Perini for the nodeName shortcut - // Greater than "@" means alpha characters (specifically not starting with "#" or "?") - for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { - if ( elem.nodeName > "@" || elem.nodeType === 3 || elem.nodeType === 4 ) { - return false; - } - } - return true; - }, - - "parent": function( elem ) { - return !Expr.pseudos["empty"]( elem ); - }, - - // Element/input types - "header": function( elem ) { - return rheader.test( elem.nodeName ); - }, - - "input": function( elem ) { - return rinputs.test( elem.nodeName ); - }, - - "button": function( elem ) { - var name = elem.nodeName.toLowerCase(); - return name === "input" && elem.type === "button" || name === "button"; - }, - - "text": function( elem ) { - var attr; - // IE6 and 7 will map elem.type to 'text' for new HTML5 types (search, etc) - // use getAttribute instead to test this case - return elem.nodeName.toLowerCase() === "input" && - elem.type === "text" && - ( (attr = elem.getAttribute("type")) == null || attr.toLowerCase() === elem.type ); - }, - - // Position-in-collection - "first": createPositionalPseudo(function() { - return [ 0 ]; - }), - - "last": createPositionalPseudo(function( matchIndexes, length ) { - return [ length - 1 ]; - }), - - "eq": createPositionalPseudo(function( matchIndexes, length, argument ) { - return [ argument < 0 ? argument + length : argument ]; - }), - - "even": createPositionalPseudo(function( matchIndexes, length ) { - var i = 0; - for ( ; i < length; i += 2 ) { - matchIndexes.push( i ); - } - return matchIndexes; - }), - - "odd": createPositionalPseudo(function( matchIndexes, length ) { - var i = 1; - for ( ; i < length; i += 2 ) { - matchIndexes.push( i ); - } - return matchIndexes; - }), - - "lt": createPositionalPseudo(function( matchIndexes, length, argument ) { - var i = argument < 0 ? argument + length : argument; - for ( ; --i >= 0; ) { - matchIndexes.push( i ); - } - return matchIndexes; - }), - - "gt": createPositionalPseudo(function( matchIndexes, length, argument ) { - var i = argument < 0 ? argument + length : argument; - for ( ; ++i < length; ) { - matchIndexes.push( i ); - } - return matchIndexes; - }) - } -}; - -Expr.pseudos["nth"] = Expr.pseudos["eq"]; - -// Add button/input type pseudos -for ( i in { radio: true, checkbox: true, file: true, password: true, image: true } ) { - Expr.pseudos[ i ] = createInputPseudo( i ); -} -for ( i in { submit: true, reset: true } ) { - Expr.pseudos[ i ] = createButtonPseudo( i ); -} - -// Easy API for creating new setFilters -function setFilters() {} -setFilters.prototype = Expr.filters = Expr.pseudos; -Expr.setFilters = new setFilters(); - -function tokenize( selector, parseOnly ) { - var matched, match, tokens, type, - soFar, groups, preFilters, - cached = tokenCache[ selector + " " ]; - - if ( cached ) { - return parseOnly ? 0 : cached.slice( 0 ); - } - - soFar = selector; - groups = []; - preFilters = Expr.preFilter; - - while ( soFar ) { - - // Comma and first run - if ( !matched || (match = rcomma.exec( soFar )) ) { - if ( match ) { - // Don't consume trailing commas as valid - soFar = soFar.slice( match[0].length ) || soFar; - } - groups.push( tokens = [] ); - } - - matched = false; - - // Combinators - if ( (match = rcombinators.exec( soFar )) ) { - matched = match.shift(); - tokens.push({ - value: matched, - // Cast descendant combinators to space - type: match[0].replace( rtrim, " " ) - }); - soFar = soFar.slice( matched.length ); - } - - // Filters - for ( type in Expr.filter ) { - if ( (match = matchExpr[ type ].exec( soFar )) && (!preFilters[ type ] || - (match = preFilters[ type ]( match ))) ) { - matched = match.shift(); - tokens.push({ - value: matched, - type: type, - matches: match - }); - soFar = soFar.slice( matched.length ); - } - } - - if ( !matched ) { - break; - } - } - - // Return the length of the invalid excess - // if we're just parsing - // Otherwise, throw an error or return tokens - return parseOnly ? - soFar.length : - soFar ? - Sizzle.error( selector ) : - // Cache the tokens - tokenCache( selector, groups ).slice( 0 ); -} - -function toSelector( tokens ) { - var i = 0, - len = tokens.length, - selector = ""; - for ( ; i < len; i++ ) { - selector += tokens[i].value; - } - return selector; -} - -function addCombinator( matcher, combinator, base ) { - var dir = combinator.dir, - checkNonElements = base && dir === "parentNode", - doneName = done++; - - return combinator.first ? - // Check against closest ancestor/preceding element - function( elem, context, xml ) { - while ( (elem = elem[ dir ]) ) { - if ( elem.nodeType === 1 || checkNonElements ) { - return matcher( elem, context, xml ); - } - } - } : - - // Check against all ancestor/preceding elements - function( elem, context, xml ) { - var data, cache, outerCache, - dirkey = dirruns + " " + doneName; - - // We can't set arbitrary data on XML nodes, so they don't benefit from dir caching - if ( xml ) { - while ( (elem = elem[ dir ]) ) { - if ( elem.nodeType === 1 || checkNonElements ) { - if ( matcher( elem, context, xml ) ) { - return true; - } - } - } - } else { - while ( (elem = elem[ dir ]) ) { - if ( elem.nodeType === 1 || checkNonElements ) { - outerCache = elem[ expando ] || (elem[ expando ] = {}); - if ( (cache = outerCache[ dir ]) && cache[0] === dirkey ) { - if ( (data = cache[1]) === true || data === cachedruns ) { - return data === true; - } - } else { - cache = outerCache[ dir ] = [ dirkey ]; - cache[1] = matcher( elem, context, xml ) || cachedruns; - if ( cache[1] === true ) { - return true; - } - } - } - } - } - }; -} - -function elementMatcher( matchers ) { - return matchers.length > 1 ? - function( elem, context, xml ) { - var i = matchers.length; - while ( i-- ) { - if ( !matchers[i]( elem, context, xml ) ) { - return false; - } - } - return true; - } : - matchers[0]; -} - -function condense( unmatched, map, filter, context, xml ) { - var elem, - newUnmatched = [], - i = 0, - len = unmatched.length, - mapped = map != null; - - for ( ; i < len; i++ ) { - if ( (elem = unmatched[i]) ) { - if ( !filter || filter( elem, context, xml ) ) { - newUnmatched.push( elem ); - if ( mapped ) { - map.push( i ); - } - } - } - } - - return newUnmatched; -} - -function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) { - if ( postFilter && !postFilter[ expando ] ) { - postFilter = setMatcher( postFilter ); - } - if ( postFinder && !postFinder[ expando ] ) { - postFinder = setMatcher( postFinder, postSelector ); - } - return markFunction(function( seed, results, context, xml ) { - var temp, i, elem, - preMap = [], - postMap = [], - preexisting = results.length, - - // Get initial elements from seed or context - elems = seed || multipleContexts( selector || "*", context.nodeType ? [ context ] : context, [] ), - - // Prefilter to get matcher input, preserving a map for seed-results synchronization - matcherIn = preFilter && ( seed || !selector ) ? - condense( elems, preMap, preFilter, context, xml ) : - elems, - - matcherOut = matcher ? - // If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results, - postFinder || ( seed ? preFilter : preexisting || postFilter ) ? - - // ...intermediate processing is necessary - [] : - - // ...otherwise use results directly - results : - matcherIn; - - // Find primary matches - if ( matcher ) { - matcher( matcherIn, matcherOut, context, xml ); - } - - // Apply postFilter - if ( postFilter ) { - temp = condense( matcherOut, postMap ); - postFilter( temp, [], context, xml ); - - // Un-match failing elements by moving them back to matcherIn - i = temp.length; - while ( i-- ) { - if ( (elem = temp[i]) ) { - matcherOut[ postMap[i] ] = !(matcherIn[ postMap[i] ] = elem); - } - } - } - - if ( seed ) { - if ( postFinder || preFilter ) { - if ( postFinder ) { - // Get the final matcherOut by condensing this intermediate into postFinder contexts - temp = []; - i = matcherOut.length; - while ( i-- ) { - if ( (elem = matcherOut[i]) ) { - // Restore matcherIn since elem is not yet a final match - temp.push( (matcherIn[i] = elem) ); - } - } - postFinder( null, (matcherOut = []), temp, xml ); - } - - // Move matched elements from seed to results to keep them synchronized - i = matcherOut.length; - while ( i-- ) { - if ( (elem = matcherOut[i]) && - (temp = postFinder ? indexOf.call( seed, elem ) : preMap[i]) > -1 ) { - - seed[temp] = !(results[temp] = elem); - } - } - } - - // Add elements to results, through postFinder if defined - } else { - matcherOut = condense( - matcherOut === results ? - matcherOut.splice( preexisting, matcherOut.length ) : - matcherOut - ); - if ( postFinder ) { - postFinder( null, results, matcherOut, xml ); - } else { - push.apply( results, matcherOut ); - } - } - }); -} - -function matcherFromTokens( tokens ) { - var checkContext, matcher, j, - len = tokens.length, - leadingRelative = Expr.relative[ tokens[0].type ], - implicitRelative = leadingRelative || Expr.relative[" "], - i = leadingRelative ? 1 : 0, - - // The foundational matcher ensures that elements are reachable from top-level context(s) - matchContext = addCombinator( function( elem ) { - return elem === checkContext; - }, implicitRelative, true ), - matchAnyContext = addCombinator( function( elem ) { - return indexOf.call( checkContext, elem ) > -1; - }, implicitRelative, true ), - matchers = [ function( elem, context, xml ) { - return ( !leadingRelative && ( xml || context !== outermostContext ) ) || ( - (checkContext = context).nodeType ? - matchContext( elem, context, xml ) : - matchAnyContext( elem, context, xml ) ); - } ]; - - for ( ; i < len; i++ ) { - if ( (matcher = Expr.relative[ tokens[i].type ]) ) { - matchers = [ addCombinator(elementMatcher( matchers ), matcher) ]; - } else { - matcher = Expr.filter[ tokens[i].type ].apply( null, tokens[i].matches ); - - // Return special upon seeing a positional matcher - if ( matcher[ expando ] ) { - // Find the next relative operator (if any) for proper handling - j = ++i; - for ( ; j < len; j++ ) { - if ( Expr.relative[ tokens[j].type ] ) { - break; - } - } - return setMatcher( - i > 1 && elementMatcher( matchers ), - i > 1 && toSelector( - // If the preceding token was a descendant combinator, insert an implicit any-element `*` - tokens.slice( 0, i - 1 ).concat({ value: tokens[ i - 2 ].type === " " ? "*" : "" }) - ).replace( rtrim, "$1" ), - matcher, - i < j && matcherFromTokens( tokens.slice( i, j ) ), - j < len && matcherFromTokens( (tokens = tokens.slice( j )) ), - j < len && toSelector( tokens ) - ); - } - matchers.push( matcher ); - } - } - - return elementMatcher( matchers ); -} - -function matcherFromGroupMatchers( elementMatchers, setMatchers ) { - // A counter to specify which element is currently being matched - var matcherCachedRuns = 0, - bySet = setMatchers.length > 0, - byElement = elementMatchers.length > 0, - superMatcher = function( seed, context, xml, results, expandContext ) { - var elem, j, matcher, - setMatched = [], - matchedCount = 0, - i = "0", - unmatched = seed && [], - outermost = expandContext != null, - contextBackup = outermostContext, - // We must always have either seed elements or context - elems = seed || byElement && Expr.find["TAG"]( "*", expandContext && context.parentNode || context ), - // Use integer dirruns iff this is the outermost matcher - dirrunsUnique = (dirruns += contextBackup == null ? 1 : Math.random() || 0.1); - - if ( outermost ) { - outermostContext = context !== document && context; - cachedruns = matcherCachedRuns; - } - - // Add elements passing elementMatchers directly to results - // Keep `i` a string if there are no elements so `matchedCount` will be "00" below - for ( ; (elem = elems[i]) != null; i++ ) { - if ( byElement && elem ) { - j = 0; - while ( (matcher = elementMatchers[j++]) ) { - if ( matcher( elem, context, xml ) ) { - results.push( elem ); - break; - } - } - if ( outermost ) { - dirruns = dirrunsUnique; - cachedruns = ++matcherCachedRuns; - } - } - - // Track unmatched elements for set filters - if ( bySet ) { - // They will have gone through all possible matchers - if ( (elem = !matcher && elem) ) { - matchedCount--; - } - - // Lengthen the array for every element, matched or not - if ( seed ) { - unmatched.push( elem ); - } - } - } - - // Apply set filters to unmatched elements - matchedCount += i; - if ( bySet && i !== matchedCount ) { - j = 0; - while ( (matcher = setMatchers[j++]) ) { - matcher( unmatched, setMatched, context, xml ); - } - - if ( seed ) { - // Reintegrate element matches to eliminate the need for sorting - if ( matchedCount > 0 ) { - while ( i-- ) { - if ( !(unmatched[i] || setMatched[i]) ) { - setMatched[i] = pop.call( results ); - } - } - } - - // Discard index placeholder values to get only actual matches - setMatched = condense( setMatched ); - } - - // Add matches to results - push.apply( results, setMatched ); - - // Seedless set matches succeeding multiple successful matchers stipulate sorting - if ( outermost && !seed && setMatched.length > 0 && - ( matchedCount + setMatchers.length ) > 1 ) { - - Sizzle.uniqueSort( results ); - } - } - - // Override manipulation of globals by nested matchers - if ( outermost ) { - dirruns = dirrunsUnique; - outermostContext = contextBackup; - } - - return unmatched; - }; - - return bySet ? - markFunction( superMatcher ) : - superMatcher; -} - -compile = Sizzle.compile = function( selector, group /* Internal Use Only */ ) { - var i, - setMatchers = [], - elementMatchers = [], - cached = compilerCache[ selector + " " ]; - - if ( !cached ) { - // Generate a function of recursive functions that can be used to check each element - if ( !group ) { - group = tokenize( selector ); - } - i = group.length; - while ( i-- ) { - cached = matcherFromTokens( group[i] ); - if ( cached[ expando ] ) { - setMatchers.push( cached ); - } else { - elementMatchers.push( cached ); - } - } - - // Cache the compiled function - cached = compilerCache( selector, matcherFromGroupMatchers( elementMatchers, setMatchers ) ); - } - return cached; -}; - -function multipleContexts( selector, contexts, results ) { - var i = 0, - len = contexts.length; - for ( ; i < len; i++ ) { - Sizzle( selector, contexts[i], results ); - } - return results; -} - -function select( selector, context, results, seed ) { - var i, tokens, token, type, find, - match = tokenize( selector ); - - if ( !seed ) { - // Try to minimize operations if there is only one group - if ( match.length === 1 ) { - - // Take a shortcut and set the context if the root selector is an ID - tokens = match[0] = match[0].slice( 0 ); - if ( tokens.length > 2 && (token = tokens[0]).type === "ID" && - support.getById && context.nodeType === 9 && documentIsHTML && - Expr.relative[ tokens[1].type ] ) { - - context = ( Expr.find["ID"]( token.matches[0].replace(runescape, funescape), context ) || [] )[0]; - if ( !context ) { - return results; - } - selector = selector.slice( tokens.shift().value.length ); - } - - // Fetch a seed set for right-to-left matching - i = matchExpr["needsContext"].test( selector ) ? 0 : tokens.length; - while ( i-- ) { - token = tokens[i]; - - // Abort if we hit a combinator - if ( Expr.relative[ (type = token.type) ] ) { - break; - } - if ( (find = Expr.find[ type ]) ) { - // Search, expanding context for leading sibling combinators - if ( (seed = find( - token.matches[0].replace( runescape, funescape ), - rsibling.test( tokens[0].type ) && context.parentNode || context - )) ) { - - // If seed is empty or no tokens remain, we can return early - tokens.splice( i, 1 ); - selector = seed.length && toSelector( tokens ); - if ( !selector ) { - push.apply( results, seed ); - return results; - } - - break; - } - } - } - } - } - - // Compile and execute a filtering function - // Provide `match` to avoid retokenization if we modified the selector above - compile( selector, match )( - seed, - context, - !documentIsHTML, - results, - rsibling.test( selector ) - ); - return results; -} - -// One-time assignments - -// Sort stability -support.sortStable = expando.split("").sort( sortOrder ).join("") === expando; - -// Support: Chrome<14 -// Always assume duplicates if they aren't passed to the comparison function -support.detectDuplicates = hasDuplicate; - -// Initialize against the default document -setDocument(); - -// Support: Webkit<537.32 - Safari 6.0.3/Chrome 25 (fixed in Chrome 27) -// Detached nodes confoundingly follow *each other* -support.sortDetached = assert(function( div1 ) { - // Should return 1, but returns 4 (following) - return div1.compareDocumentPosition( document.createElement("div") ) & 1; -}); - -// Support: IE<8 -// Prevent attribute/property "interpolation" -// http://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx -if ( !assert(function( div ) { - div.innerHTML = ""; - return div.firstChild.getAttribute("href") === "#" ; -}) ) { - addHandle( "type|href|height|width", function( elem, name, isXML ) { - if ( !isXML ) { - return elem.getAttribute( name, name.toLowerCase() === "type" ? 1 : 2 ); - } - }); -} - -// Support: IE<9 -// Use defaultValue in place of getAttribute("value") -if ( !support.attributes || !assert(function( div ) { - div.innerHTML = ""; - div.firstChild.setAttribute( "value", "" ); - return div.firstChild.getAttribute( "value" ) === ""; -}) ) { - addHandle( "value", function( elem, name, isXML ) { - if ( !isXML && elem.nodeName.toLowerCase() === "input" ) { - return elem.defaultValue; - } - }); -} - -// Support: IE<9 -// Use getAttributeNode to fetch booleans when getAttribute lies -if ( !assert(function( div ) { - return div.getAttribute("disabled") == null; -}) ) { - addHandle( booleans, function( elem, name, isXML ) { - var val; - if ( !isXML ) { - return (val = elem.getAttributeNode( name )) && val.specified ? - val.value : - elem[ name ] === true ? name.toLowerCase() : null; - } - }); -} - -jQuery.find = Sizzle; -jQuery.expr = Sizzle.selectors; -jQuery.expr[":"] = jQuery.expr.pseudos; -jQuery.unique = Sizzle.uniqueSort; -jQuery.text = Sizzle.getText; -jQuery.isXMLDoc = Sizzle.isXML; -jQuery.contains = Sizzle.contains; - - -})( window ); -// String to Object options format cache -var optionsCache = {}; - -// Convert String-formatted options into Object-formatted ones and store in cache -function createOptions( options ) { - var object = optionsCache[ options ] = {}; - jQuery.each( options.match( core_rnotwhite ) || [], function( _, flag ) { - object[ flag ] = true; - }); - return object; -} - -/* - * Create a callback list using the following parameters: - * - * options: an optional list of space-separated options that will change how - * the callback list behaves or a more traditional option object - * - * By default a callback list will act like an event callback list and can be - * "fired" multiple times. - * - * Possible options: - * - * once: will ensure the callback list can only be fired once (like a Deferred) - * - * memory: will keep track of previous values and will call any callback added - * after the list has been fired right away with the latest "memorized" - * values (like a Deferred) - * - * unique: will ensure a callback can only be added once (no duplicate in the list) - * - * stopOnFalse: interrupt callings when a callback returns false - * - */ -jQuery.Callbacks = function( options ) { - - // Convert options from String-formatted to Object-formatted if needed - // (we check in cache first) - options = typeof options === "string" ? - ( optionsCache[ options ] || createOptions( options ) ) : - jQuery.extend( {}, options ); - - var // Flag to know if list is currently firing - firing, - // Last fire value (for non-forgettable lists) - memory, - // Flag to know if list was already fired - fired, - // End of the loop when firing - firingLength, - // Index of currently firing callback (modified by remove if needed) - firingIndex, - // First callback to fire (used internally by add and fireWith) - firingStart, - // Actual callback list - list = [], - // Stack of fire calls for repeatable lists - stack = !options.once && [], - // Fire callbacks - fire = function( data ) { - memory = options.memory && data; - fired = true; - firingIndex = firingStart || 0; - firingStart = 0; - firingLength = list.length; - firing = true; - for ( ; list && firingIndex < firingLength; firingIndex++ ) { - if ( list[ firingIndex ].apply( data[ 0 ], data[ 1 ] ) === false && options.stopOnFalse ) { - memory = false; // To prevent further calls using add - break; - } - } - firing = false; - if ( list ) { - if ( stack ) { - if ( stack.length ) { - fire( stack.shift() ); - } - } else if ( memory ) { - list = []; - } else { - self.disable(); - } - } - }, - // Actual Callbacks object - self = { - // Add a callback or a collection of callbacks to the list - add: function() { - if ( list ) { - // First, we save the current length - var start = list.length; - (function add( args ) { - jQuery.each( args, function( _, arg ) { - var type = jQuery.type( arg ); - if ( type === "function" ) { - if ( !options.unique || !self.has( arg ) ) { - list.push( arg ); - } - } else if ( arg && arg.length && type !== "string" ) { - // Inspect recursively - add( arg ); - } - }); - })( arguments ); - // Do we need to add the callbacks to the - // current firing batch? - if ( firing ) { - firingLength = list.length; - // With memory, if we're not firing then - // we should call right away - } else if ( memory ) { - firingStart = start; - fire( memory ); - } - } - return this; - }, - // Remove a callback from the list - remove: function() { - if ( list ) { - jQuery.each( arguments, function( _, arg ) { - var index; - while( ( index = jQuery.inArray( arg, list, index ) ) > -1 ) { - list.splice( index, 1 ); - // Handle firing indexes - if ( firing ) { - if ( index <= firingLength ) { - firingLength--; - } - if ( index <= firingIndex ) { - firingIndex--; - } - } - } - }); - } - return this; - }, - // Check if a given callback is in the list. - // If no argument is given, return whether or not list has callbacks attached. - has: function( fn ) { - return fn ? jQuery.inArray( fn, list ) > -1 : !!( list && list.length ); - }, - // Remove all callbacks from the list - empty: function() { - list = []; - firingLength = 0; - return this; - }, - // Have the list do nothing anymore - disable: function() { - list = stack = memory = undefined; - return this; - }, - // Is it disabled? - disabled: function() { - return !list; - }, - // Lock the list in its current state - lock: function() { - stack = undefined; - if ( !memory ) { - self.disable(); - } - return this; - }, - // Is it locked? - locked: function() { - return !stack; - }, - // Call all callbacks with the given context and arguments - fireWith: function( context, args ) { - if ( list && ( !fired || stack ) ) { - args = args || []; - args = [ context, args.slice ? args.slice() : args ]; - if ( firing ) { - stack.push( args ); - } else { - fire( args ); - } - } - return this; - }, - // Call all the callbacks with the given arguments - fire: function() { - self.fireWith( this, arguments ); - return this; - }, - // To know if the callbacks have already been called at least once - fired: function() { - return !!fired; - } - }; - - return self; -}; -jQuery.extend({ - - Deferred: function( func ) { - var tuples = [ - // action, add listener, listener list, final state - [ "resolve", "done", jQuery.Callbacks("once memory"), "resolved" ], - [ "reject", "fail", jQuery.Callbacks("once memory"), "rejected" ], - [ "notify", "progress", jQuery.Callbacks("memory") ] - ], - state = "pending", - promise = { - state: function() { - return state; - }, - always: function() { - deferred.done( arguments ).fail( arguments ); - return this; - }, - then: function( /* fnDone, fnFail, fnProgress */ ) { - var fns = arguments; - return jQuery.Deferred(function( newDefer ) { - jQuery.each( tuples, function( i, tuple ) { - var action = tuple[ 0 ], - fn = jQuery.isFunction( fns[ i ] ) && fns[ i ]; - // deferred[ done | fail | progress ] for forwarding actions to newDefer - deferred[ tuple[1] ](function() { - var returned = fn && fn.apply( this, arguments ); - if ( returned && jQuery.isFunction( returned.promise ) ) { - returned.promise() - .done( newDefer.resolve ) - .fail( newDefer.reject ) - .progress( newDefer.notify ); - } else { - newDefer[ action + "With" ]( this === promise ? newDefer.promise() : this, fn ? [ returned ] : arguments ); - } - }); - }); - fns = null; - }).promise(); - }, - // Get a promise for this deferred - // If obj is provided, the promise aspect is added to the object - promise: function( obj ) { - return obj != null ? jQuery.extend( obj, promise ) : promise; - } - }, - deferred = {}; - - // Keep pipe for back-compat - promise.pipe = promise.then; - - // Add list-specific methods - jQuery.each( tuples, function( i, tuple ) { - var list = tuple[ 2 ], - stateString = tuple[ 3 ]; - - // promise[ done | fail | progress ] = list.add - promise[ tuple[1] ] = list.add; - - // Handle state - if ( stateString ) { - list.add(function() { - // state = [ resolved | rejected ] - state = stateString; - - // [ reject_list | resolve_list ].disable; progress_list.lock - }, tuples[ i ^ 1 ][ 2 ].disable, tuples[ 2 ][ 2 ].lock ); - } - - // deferred[ resolve | reject | notify ] - deferred[ tuple[0] ] = function() { - deferred[ tuple[0] + "With" ]( this === deferred ? promise : this, arguments ); - return this; - }; - deferred[ tuple[0] + "With" ] = list.fireWith; - }); - - // Make the deferred a promise - promise.promise( deferred ); - - // Call given func if any - if ( func ) { - func.call( deferred, deferred ); - } - - // All done! - return deferred; - }, - - // Deferred helper - when: function( subordinate /* , ..., subordinateN */ ) { - var i = 0, - resolveValues = core_slice.call( arguments ), - length = resolveValues.length, - - // the count of uncompleted subordinates - remaining = length !== 1 || ( subordinate && jQuery.isFunction( subordinate.promise ) ) ? length : 0, - - // the master Deferred. If resolveValues consist of only a single Deferred, just use that. - deferred = remaining === 1 ? subordinate : jQuery.Deferred(), - - // Update function for both resolve and progress values - updateFunc = function( i, contexts, values ) { - return function( value ) { - contexts[ i ] = this; - values[ i ] = arguments.length > 1 ? core_slice.call( arguments ) : value; - if( values === progressValues ) { - deferred.notifyWith( contexts, values ); - } else if ( !( --remaining ) ) { - deferred.resolveWith( contexts, values ); - } - }; - }, - - progressValues, progressContexts, resolveContexts; - - // add listeners to Deferred subordinates; treat others as resolved - if ( length > 1 ) { - progressValues = new Array( length ); - progressContexts = new Array( length ); - resolveContexts = new Array( length ); - for ( ; i < length; i++ ) { - if ( resolveValues[ i ] && jQuery.isFunction( resolveValues[ i ].promise ) ) { - resolveValues[ i ].promise() - .done( updateFunc( i, resolveContexts, resolveValues ) ) - .fail( deferred.reject ) - .progress( updateFunc( i, progressContexts, progressValues ) ); - } else { - --remaining; - } - } - } - - // if we're not waiting on anything, resolve the master - if ( !remaining ) { - deferred.resolveWith( resolveContexts, resolveValues ); - } - - return deferred.promise(); - } -}); -jQuery.support = (function( support ) { - - var all, a, input, select, fragment, opt, eventName, isSupported, i, - div = document.createElement("div"); - - // Setup - div.setAttribute( "className", "t" ); - div.innerHTML = "
    a"; - - // Finish early in limited (non-browser) environments - all = div.getElementsByTagName("*") || []; - a = div.getElementsByTagName("a")[ 0 ]; - if ( !a || !a.style || !all.length ) { - return support; - } - - // First batch of tests - select = document.createElement("select"); - opt = select.appendChild( document.createElement("option") ); - input = div.getElementsByTagName("input")[ 0 ]; - - a.style.cssText = "top:1px;float:left;opacity:.5"; - - // Test setAttribute on camelCase class. If it works, we need attrFixes when doing get/setAttribute (ie6/7) - support.getSetAttribute = div.className !== "t"; - - // IE strips leading whitespace when .innerHTML is used - support.leadingWhitespace = div.firstChild.nodeType === 3; - - // Make sure that tbody elements aren't automatically inserted - // IE will insert them into empty tables - support.tbody = !div.getElementsByTagName("tbody").length; - - // Make sure that link elements get serialized correctly by innerHTML - // This requires a wrapper element in IE - support.htmlSerialize = !!div.getElementsByTagName("link").length; - - // Get the style information from getAttribute - // (IE uses .cssText instead) - support.style = /top/.test( a.getAttribute("style") ); - - // Make sure that URLs aren't manipulated - // (IE normalizes it by default) - support.hrefNormalized = a.getAttribute("href") === "/a"; - - // Make sure that element opacity exists - // (IE uses filter instead) - // Use a regex to work around a WebKit issue. See #5145 - support.opacity = /^0.5/.test( a.style.opacity ); - - // Verify style float existence - // (IE uses styleFloat instead of cssFloat) - support.cssFloat = !!a.style.cssFloat; - - // Check the default checkbox/radio value ("" on WebKit; "on" elsewhere) - support.checkOn = !!input.value; - - // Make sure that a selected-by-default option has a working selected property. - // (WebKit defaults to false instead of true, IE too, if it's in an optgroup) - support.optSelected = opt.selected; - - // Tests for enctype support on a form (#6743) - support.enctype = !!document.createElement("form").enctype; - - // Makes sure cloning an html5 element does not cause problems - // Where outerHTML is undefined, this still works - support.html5Clone = document.createElement("nav").cloneNode( true ).outerHTML !== "<:nav>"; - - // Will be defined later - support.inlineBlockNeedsLayout = false; - support.shrinkWrapBlocks = false; - support.pixelPosition = false; - support.deleteExpando = true; - support.noCloneEvent = true; - support.reliableMarginRight = true; - support.boxSizingReliable = true; - - // Make sure checked status is properly cloned - input.checked = true; - support.noCloneChecked = input.cloneNode( true ).checked; - - // Make sure that the options inside disabled selects aren't marked as disabled - // (WebKit marks them as disabled) - select.disabled = true; - support.optDisabled = !opt.disabled; - - // Support: IE<9 - try { - delete div.test; - } catch( e ) { - support.deleteExpando = false; - } - - // Check if we can trust getAttribute("value") - input = document.createElement("input"); - input.setAttribute( "value", "" ); - support.input = input.getAttribute( "value" ) === ""; - - // Check if an input maintains its value after becoming a radio - input.value = "t"; - input.setAttribute( "type", "radio" ); - support.radioValue = input.value === "t"; - - // #11217 - WebKit loses check when the name is after the checked attribute - input.setAttribute( "checked", "t" ); - input.setAttribute( "name", "t" ); - - fragment = document.createDocumentFragment(); - fragment.appendChild( input ); - - // Check if a disconnected checkbox will retain its checked - // value of true after appended to the DOM (IE6/7) - support.appendChecked = input.checked; - - // WebKit doesn't clone checked state correctly in fragments - support.checkClone = fragment.cloneNode( true ).cloneNode( true ).lastChild.checked; - - // Support: IE<9 - // Opera does not clone events (and typeof div.attachEvent === undefined). - // IE9-10 clones events bound via attachEvent, but they don't trigger with .click() - if ( div.attachEvent ) { - div.attachEvent( "onclick", function() { - support.noCloneEvent = false; - }); - - div.cloneNode( true ).click(); - } - - // Support: IE<9 (lack submit/change bubble), Firefox 17+ (lack focusin event) - // Beware of CSP restrictions (https://developer.mozilla.org/en/Security/CSP) - for ( i in { submit: true, change: true, focusin: true }) { - div.setAttribute( eventName = "on" + i, "t" ); - - support[ i + "Bubbles" ] = eventName in window || div.attributes[ eventName ].expando === false; - } - - div.style.backgroundClip = "content-box"; - div.cloneNode( true ).style.backgroundClip = ""; - support.clearCloneStyle = div.style.backgroundClip === "content-box"; - - // Support: IE<9 - // Iteration over object's inherited properties before its own. - for ( i in jQuery( support ) ) { - break; - } - support.ownLast = i !== "0"; - - // Run tests that need a body at doc ready - jQuery(function() { - var container, marginDiv, tds, - divReset = "padding:0;margin:0;border:0;display:block;box-sizing:content-box;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;", - body = document.getElementsByTagName("body")[0]; - - if ( !body ) { - // Return for frameset docs that don't have a body - return; - } - - container = document.createElement("div"); - container.style.cssText = "border:0;width:0;height:0;position:absolute;top:0;left:-9999px;margin-top:1px"; - - body.appendChild( container ).appendChild( div ); - - // Support: IE8 - // Check if table cells still have offsetWidth/Height when they are set - // to display:none and there are still other visible table cells in a - // table row; if so, offsetWidth/Height are not reliable for use when - // determining if an element has been hidden directly using - // display:none (it is still safe to use offsets if a parent element is - // hidden; don safety goggles and see bug #4512 for more information). - div.innerHTML = "
    t
    "; - tds = div.getElementsByTagName("td"); - tds[ 0 ].style.cssText = "padding:0;margin:0;border:0;display:none"; - isSupported = ( tds[ 0 ].offsetHeight === 0 ); - - tds[ 0 ].style.display = ""; - tds[ 1 ].style.display = "none"; - - // Support: IE8 - // Check if empty table cells still have offsetWidth/Height - support.reliableHiddenOffsets = isSupported && ( tds[ 0 ].offsetHeight === 0 ); - - // Check box-sizing and margin behavior. - div.innerHTML = ""; - div.style.cssText = "box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;"; - - // Workaround failing boxSizing test due to offsetWidth returning wrong value - // with some non-1 values of body zoom, ticket #13543 - jQuery.swap( body, body.style.zoom != null ? { zoom: 1 } : {}, function() { - support.boxSizing = div.offsetWidth === 4; - }); - - // Use window.getComputedStyle because jsdom on node.js will break without it. - if ( window.getComputedStyle ) { - support.pixelPosition = ( window.getComputedStyle( div, null ) || {} ).top !== "1%"; - support.boxSizingReliable = ( window.getComputedStyle( div, null ) || { width: "4px" } ).width === "4px"; - - // Check if div with explicit width and no margin-right incorrectly - // gets computed margin-right based on width of container. (#3333) - // Fails in WebKit before Feb 2011 nightlies - // WebKit Bug 13343 - getComputedStyle returns wrong value for margin-right - marginDiv = div.appendChild( document.createElement("div") ); - marginDiv.style.cssText = div.style.cssText = divReset; - marginDiv.style.marginRight = marginDiv.style.width = "0"; - div.style.width = "1px"; - - support.reliableMarginRight = - !parseFloat( ( window.getComputedStyle( marginDiv, null ) || {} ).marginRight ); - } - - if ( typeof div.style.zoom !== core_strundefined ) { - // Support: IE<8 - // Check if natively block-level elements act like inline-block - // elements when setting their display to 'inline' and giving - // them layout - div.innerHTML = ""; - div.style.cssText = divReset + "width:1px;padding:1px;display:inline;zoom:1"; - support.inlineBlockNeedsLayout = ( div.offsetWidth === 3 ); - - // Support: IE6 - // Check if elements with layout shrink-wrap their children - div.style.display = "block"; - div.innerHTML = "
    "; - div.firstChild.style.width = "5px"; - support.shrinkWrapBlocks = ( div.offsetWidth !== 3 ); - - if ( support.inlineBlockNeedsLayout ) { - // Prevent IE 6 from affecting layout for positioned elements #11048 - // Prevent IE from shrinking the body in IE 7 mode #12869 - // Support: IE<8 - body.style.zoom = 1; - } - } - - body.removeChild( container ); - - // Null elements to avoid leaks in IE - container = div = tds = marginDiv = null; - }); - - // Null elements to avoid leaks in IE - all = select = fragment = opt = a = input = null; - - return support; -})({}); - -var rbrace = /(?:\{[\s\S]*\}|\[[\s\S]*\])$/, - rmultiDash = /([A-Z])/g; - -function internalData( elem, name, data, pvt /* Internal Use Only */ ){ - if ( !jQuery.acceptData( elem ) ) { - return; - } - - var ret, thisCache, - internalKey = jQuery.expando, - - // We have to handle DOM nodes and JS objects differently because IE6-7 - // can't GC object references properly across the DOM-JS boundary - isNode = elem.nodeType, - - // Only DOM nodes need the global jQuery cache; JS object data is - // attached directly to the object so GC can occur automatically - cache = isNode ? jQuery.cache : elem, - - // Only defining an ID for JS objects if its cache already exists allows - // the code to shortcut on the same path as a DOM node with no cache - id = isNode ? elem[ internalKey ] : elem[ internalKey ] && internalKey; - - // Avoid doing any more work than we need to when trying to get data on an - // object that has no data at all - if ( (!id || !cache[id] || (!pvt && !cache[id].data)) && data === undefined && typeof name === "string" ) { - return; - } - - if ( !id ) { - // Only DOM nodes need a new unique ID for each element since their data - // ends up in the global cache - if ( isNode ) { - id = elem[ internalKey ] = core_deletedIds.pop() || jQuery.guid++; - } else { - id = internalKey; - } - } - - if ( !cache[ id ] ) { - // Avoid exposing jQuery metadata on plain JS objects when the object - // is serialized using JSON.stringify - cache[ id ] = isNode ? {} : { toJSON: jQuery.noop }; - } - - // An object can be passed to jQuery.data instead of a key/value pair; this gets - // shallow copied over onto the existing cache - if ( typeof name === "object" || typeof name === "function" ) { - if ( pvt ) { - cache[ id ] = jQuery.extend( cache[ id ], name ); - } else { - cache[ id ].data = jQuery.extend( cache[ id ].data, name ); - } - } - - thisCache = cache[ id ]; - - // jQuery data() is stored in a separate object inside the object's internal data - // cache in order to avoid key collisions between internal data and user-defined - // data. - if ( !pvt ) { - if ( !thisCache.data ) { - thisCache.data = {}; - } - - thisCache = thisCache.data; - } - - if ( data !== undefined ) { - thisCache[ jQuery.camelCase( name ) ] = data; - } - - // Check for both converted-to-camel and non-converted data property names - // If a data property was specified - if ( typeof name === "string" ) { - - // First Try to find as-is property data - ret = thisCache[ name ]; - - // Test for null|undefined property data - if ( ret == null ) { - - // Try to find the camelCased property - ret = thisCache[ jQuery.camelCase( name ) ]; - } - } else { - ret = thisCache; - } - - return ret; -} - -function internalRemoveData( elem, name, pvt ) { - if ( !jQuery.acceptData( elem ) ) { - return; - } - - var thisCache, i, - isNode = elem.nodeType, - - // See jQuery.data for more information - cache = isNode ? jQuery.cache : elem, - id = isNode ? elem[ jQuery.expando ] : jQuery.expando; - - // If there is already no cache entry for this object, there is no - // purpose in continuing - if ( !cache[ id ] ) { - return; - } - - if ( name ) { - - thisCache = pvt ? cache[ id ] : cache[ id ].data; - - if ( thisCache ) { - - // Support array or space separated string names for data keys - if ( !jQuery.isArray( name ) ) { - - // try the string as a key before any manipulation - if ( name in thisCache ) { - name = [ name ]; - } else { - - // split the camel cased version by spaces unless a key with the spaces exists - name = jQuery.camelCase( name ); - if ( name in thisCache ) { - name = [ name ]; - } else { - name = name.split(" "); - } - } - } else { - // If "name" is an array of keys... - // When data is initially created, via ("key", "val") signature, - // keys will be converted to camelCase. - // Since there is no way to tell _how_ a key was added, remove - // both plain key and camelCase key. #12786 - // This will only penalize the array argument path. - name = name.concat( jQuery.map( name, jQuery.camelCase ) ); - } - - i = name.length; - while ( i-- ) { - delete thisCache[ name[i] ]; - } - - // If there is no data left in the cache, we want to continue - // and let the cache object itself get destroyed - if ( pvt ? !isEmptyDataObject(thisCache) : !jQuery.isEmptyObject(thisCache) ) { - return; - } - } - } - - // See jQuery.data for more information - if ( !pvt ) { - delete cache[ id ].data; - - // Don't destroy the parent cache unless the internal data object - // had been the only thing left in it - if ( !isEmptyDataObject( cache[ id ] ) ) { - return; - } - } - - // Destroy the cache - if ( isNode ) { - jQuery.cleanData( [ elem ], true ); - - // Use delete when supported for expandos or `cache` is not a window per isWindow (#10080) - /* jshint eqeqeq: false */ - } else if ( jQuery.support.deleteExpando || cache != cache.window ) { - /* jshint eqeqeq: true */ - delete cache[ id ]; - - // When all else fails, null - } else { - cache[ id ] = null; - } -} - -jQuery.extend({ - cache: {}, - - // The following elements throw uncatchable exceptions if you - // attempt to add expando properties to them. - noData: { - "applet": true, - "embed": true, - // Ban all objects except for Flash (which handle expandos) - "object": "clsid:D27CDB6E-AE6D-11cf-96B8-444553540000" - }, - - hasData: function( elem ) { - elem = elem.nodeType ? jQuery.cache[ elem[jQuery.expando] ] : elem[ jQuery.expando ]; - return !!elem && !isEmptyDataObject( elem ); - }, - - data: function( elem, name, data ) { - return internalData( elem, name, data ); - }, - - removeData: function( elem, name ) { - return internalRemoveData( elem, name ); - }, - - // For internal use only. - _data: function( elem, name, data ) { - return internalData( elem, name, data, true ); - }, - - _removeData: function( elem, name ) { - return internalRemoveData( elem, name, true ); - }, - - // A method for determining if a DOM node can handle the data expando - acceptData: function( elem ) { - // Do not set data on non-element because it will not be cleared (#8335). - if ( elem.nodeType && elem.nodeType !== 1 && elem.nodeType !== 9 ) { - return false; - } - - var noData = elem.nodeName && jQuery.noData[ elem.nodeName.toLowerCase() ]; - - // nodes accept data unless otherwise specified; rejection can be conditional - return !noData || noData !== true && elem.getAttribute("classid") === noData; - } -}); - -jQuery.fn.extend({ - data: function( key, value ) { - var attrs, name, - data = null, - i = 0, - elem = this[0]; - - // Special expections of .data basically thwart jQuery.access, - // so implement the relevant behavior ourselves - - // Gets all values - if ( key === undefined ) { - if ( this.length ) { - data = jQuery.data( elem ); - - if ( elem.nodeType === 1 && !jQuery._data( elem, "parsedAttrs" ) ) { - attrs = elem.attributes; - for ( ; i < attrs.length; i++ ) { - name = attrs[i].name; - - if ( name.indexOf("data-") === 0 ) { - name = jQuery.camelCase( name.slice(5) ); - - dataAttr( elem, name, data[ name ] ); - } - } - jQuery._data( elem, "parsedAttrs", true ); - } - } - - return data; - } - - // Sets multiple values - if ( typeof key === "object" ) { - return this.each(function() { - jQuery.data( this, key ); - }); - } - - return arguments.length > 1 ? - - // Sets one value - this.each(function() { - jQuery.data( this, key, value ); - }) : - - // Gets one value - // Try to fetch any internally stored data first - elem ? dataAttr( elem, key, jQuery.data( elem, key ) ) : null; - }, - - removeData: function( key ) { - return this.each(function() { - jQuery.removeData( this, key ); - }); - } -}); - -function dataAttr( elem, key, data ) { - // If nothing was found internally, try to fetch any - // data from the HTML5 data-* attribute - if ( data === undefined && elem.nodeType === 1 ) { - - var name = "data-" + key.replace( rmultiDash, "-$1" ).toLowerCase(); - - data = elem.getAttribute( name ); - - if ( typeof data === "string" ) { - try { - data = data === "true" ? true : - data === "false" ? false : - data === "null" ? null : - // Only convert to a number if it doesn't change the string - +data + "" === data ? +data : - rbrace.test( data ) ? jQuery.parseJSON( data ) : - data; - } catch( e ) {} - - // Make sure we set the data so it isn't changed later - jQuery.data( elem, key, data ); - - } else { - data = undefined; - } - } - - return data; -} - -// checks a cache object for emptiness -function isEmptyDataObject( obj ) { - var name; - for ( name in obj ) { - - // if the public data object is empty, the private is still empty - if ( name === "data" && jQuery.isEmptyObject( obj[name] ) ) { - continue; - } - if ( name !== "toJSON" ) { - return false; - } - } - - return true; -} -jQuery.extend({ - queue: function( elem, type, data ) { - var queue; - - if ( elem ) { - type = ( type || "fx" ) + "queue"; - queue = jQuery._data( elem, type ); - - // Speed up dequeue by getting out quickly if this is just a lookup - if ( data ) { - if ( !queue || jQuery.isArray(data) ) { - queue = jQuery._data( elem, type, jQuery.makeArray(data) ); - } else { - queue.push( data ); - } - } - return queue || []; - } - }, - - dequeue: function( elem, type ) { - type = type || "fx"; - - var queue = jQuery.queue( elem, type ), - startLength = queue.length, - fn = queue.shift(), - hooks = jQuery._queueHooks( elem, type ), - next = function() { - jQuery.dequeue( elem, type ); - }; - - // If the fx queue is dequeued, always remove the progress sentinel - if ( fn === "inprogress" ) { - fn = queue.shift(); - startLength--; - } - - if ( fn ) { - - // Add a progress sentinel to prevent the fx queue from being - // automatically dequeued - if ( type === "fx" ) { - queue.unshift( "inprogress" ); - } - - // clear up the last queue stop function - delete hooks.stop; - fn.call( elem, next, hooks ); - } - - if ( !startLength && hooks ) { - hooks.empty.fire(); - } - }, - - // not intended for public consumption - generates a queueHooks object, or returns the current one - _queueHooks: function( elem, type ) { - var key = type + "queueHooks"; - return jQuery._data( elem, key ) || jQuery._data( elem, key, { - empty: jQuery.Callbacks("once memory").add(function() { - jQuery._removeData( elem, type + "queue" ); - jQuery._removeData( elem, key ); - }) - }); - } -}); - -jQuery.fn.extend({ - queue: function( type, data ) { - var setter = 2; - - if ( typeof type !== "string" ) { - data = type; - type = "fx"; - setter--; - } - - if ( arguments.length < setter ) { - return jQuery.queue( this[0], type ); - } - - return data === undefined ? - this : - this.each(function() { - var queue = jQuery.queue( this, type, data ); - - // ensure a hooks for this queue - jQuery._queueHooks( this, type ); - - if ( type === "fx" && queue[0] !== "inprogress" ) { - jQuery.dequeue( this, type ); - } - }); - }, - dequeue: function( type ) { - return this.each(function() { - jQuery.dequeue( this, type ); - }); - }, - // Based off of the plugin by Clint Helfers, with permission. - // http://blindsignals.com/index.php/2009/07/jquery-delay/ - delay: function( time, type ) { - time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time; - type = type || "fx"; - - return this.queue( type, function( next, hooks ) { - var timeout = setTimeout( next, time ); - hooks.stop = function() { - clearTimeout( timeout ); - }; - }); - }, - clearQueue: function( type ) { - return this.queue( type || "fx", [] ); - }, - // Get a promise resolved when queues of a certain type - // are emptied (fx is the type by default) - promise: function( type, obj ) { - var tmp, - count = 1, - defer = jQuery.Deferred(), - elements = this, - i = this.length, - resolve = function() { - if ( !( --count ) ) { - defer.resolveWith( elements, [ elements ] ); - } - }; - - if ( typeof type !== "string" ) { - obj = type; - type = undefined; - } - type = type || "fx"; - - while( i-- ) { - tmp = jQuery._data( elements[ i ], type + "queueHooks" ); - if ( tmp && tmp.empty ) { - count++; - tmp.empty.add( resolve ); - } - } - resolve(); - return defer.promise( obj ); - } -}); -var nodeHook, boolHook, - rclass = /[\t\r\n\f]/g, - rreturn = /\r/g, - rfocusable = /^(?:input|select|textarea|button|object)$/i, - rclickable = /^(?:a|area)$/i, - ruseDefault = /^(?:checked|selected)$/i, - getSetAttribute = jQuery.support.getSetAttribute, - getSetInput = jQuery.support.input; - -jQuery.fn.extend({ - attr: function( name, value ) { - return jQuery.access( this, jQuery.attr, name, value, arguments.length > 1 ); - }, - - removeAttr: function( name ) { - return this.each(function() { - jQuery.removeAttr( this, name ); - }); - }, - - prop: function( name, value ) { - return jQuery.access( this, jQuery.prop, name, value, arguments.length > 1 ); - }, - - removeProp: function( name ) { - name = jQuery.propFix[ name ] || name; - return this.each(function() { - // try/catch handles cases where IE balks (such as removing a property on window) - try { - this[ name ] = undefined; - delete this[ name ]; - } catch( e ) {} - }); - }, - - addClass: function( value ) { - var classes, elem, cur, clazz, j, - i = 0, - len = this.length, - proceed = typeof value === "string" && value; - - if ( jQuery.isFunction( value ) ) { - return this.each(function( j ) { - jQuery( this ).addClass( value.call( this, j, this.className ) ); - }); - } - - if ( proceed ) { - // The disjunction here is for better compressibility (see removeClass) - classes = ( value || "" ).match( core_rnotwhite ) || []; - - for ( ; i < len; i++ ) { - elem = this[ i ]; - cur = elem.nodeType === 1 && ( elem.className ? - ( " " + elem.className + " " ).replace( rclass, " " ) : - " " - ); - - if ( cur ) { - j = 0; - while ( (clazz = classes[j++]) ) { - if ( cur.indexOf( " " + clazz + " " ) < 0 ) { - cur += clazz + " "; - } - } - elem.className = jQuery.trim( cur ); - - } - } - } - - return this; - }, - - removeClass: function( value ) { - var classes, elem, cur, clazz, j, - i = 0, - len = this.length, - proceed = arguments.length === 0 || typeof value === "string" && value; - - if ( jQuery.isFunction( value ) ) { - return this.each(function( j ) { - jQuery( this ).removeClass( value.call( this, j, this.className ) ); - }); - } - if ( proceed ) { - classes = ( value || "" ).match( core_rnotwhite ) || []; - - for ( ; i < len; i++ ) { - elem = this[ i ]; - // This expression is here for better compressibility (see addClass) - cur = elem.nodeType === 1 && ( elem.className ? - ( " " + elem.className + " " ).replace( rclass, " " ) : - "" - ); - - if ( cur ) { - j = 0; - while ( (clazz = classes[j++]) ) { - // Remove *all* instances - while ( cur.indexOf( " " + clazz + " " ) >= 0 ) { - cur = cur.replace( " " + clazz + " ", " " ); - } - } - elem.className = value ? jQuery.trim( cur ) : ""; - } - } - } - - return this; - }, - - toggleClass: function( value, stateVal ) { - var type = typeof value; - - if ( typeof stateVal === "boolean" && type === "string" ) { - return stateVal ? this.addClass( value ) : this.removeClass( value ); - } - - if ( jQuery.isFunction( value ) ) { - return this.each(function( i ) { - jQuery( this ).toggleClass( value.call(this, i, this.className, stateVal), stateVal ); - }); - } - - return this.each(function() { - if ( type === "string" ) { - // toggle individual class names - var className, - i = 0, - self = jQuery( this ), - classNames = value.match( core_rnotwhite ) || []; - - while ( (className = classNames[ i++ ]) ) { - // check each className given, space separated list - if ( self.hasClass( className ) ) { - self.removeClass( className ); - } else { - self.addClass( className ); - } - } - - // Toggle whole class name - } else if ( type === core_strundefined || type === "boolean" ) { - if ( this.className ) { - // store className if set - jQuery._data( this, "__className__", this.className ); - } - - // If the element has a class name or if we're passed "false", - // then remove the whole classname (if there was one, the above saved it). - // Otherwise bring back whatever was previously saved (if anything), - // falling back to the empty string if nothing was stored. - this.className = this.className || value === false ? "" : jQuery._data( this, "__className__" ) || ""; - } - }); - }, - - hasClass: function( selector ) { - var className = " " + selector + " ", - i = 0, - l = this.length; - for ( ; i < l; i++ ) { - if ( this[i].nodeType === 1 && (" " + this[i].className + " ").replace(rclass, " ").indexOf( className ) >= 0 ) { - return true; - } - } - - return false; - }, - - val: function( value ) { - var ret, hooks, isFunction, - elem = this[0]; - - if ( !arguments.length ) { - if ( elem ) { - hooks = jQuery.valHooks[ elem.type ] || jQuery.valHooks[ elem.nodeName.toLowerCase() ]; - - if ( hooks && "get" in hooks && (ret = hooks.get( elem, "value" )) !== undefined ) { - return ret; - } - - ret = elem.value; - - return typeof ret === "string" ? - // handle most common string cases - ret.replace(rreturn, "") : - // handle cases where value is null/undef or number - ret == null ? "" : ret; - } - - return; - } - - isFunction = jQuery.isFunction( value ); - - return this.each(function( i ) { - var val; - - if ( this.nodeType !== 1 ) { - return; - } - - if ( isFunction ) { - val = value.call( this, i, jQuery( this ).val() ); - } else { - val = value; - } - - // Treat null/undefined as ""; convert numbers to string - if ( val == null ) { - val = ""; - } else if ( typeof val === "number" ) { - val += ""; - } else if ( jQuery.isArray( val ) ) { - val = jQuery.map(val, function ( value ) { - return value == null ? "" : value + ""; - }); - } - - hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ]; - - // If set returns undefined, fall back to normal setting - if ( !hooks || !("set" in hooks) || hooks.set( this, val, "value" ) === undefined ) { - this.value = val; - } - }); - } -}); - -jQuery.extend({ - valHooks: { - option: { - get: function( elem ) { - // Use proper attribute retrieval(#6932, #12072) - var val = jQuery.find.attr( elem, "value" ); - return val != null ? - val : - elem.text; - } - }, - select: { - get: function( elem ) { - var value, option, - options = elem.options, - index = elem.selectedIndex, - one = elem.type === "select-one" || index < 0, - values = one ? null : [], - max = one ? index + 1 : options.length, - i = index < 0 ? - max : - one ? index : 0; - - // Loop through all the selected options - for ( ; i < max; i++ ) { - option = options[ i ]; - - // oldIE doesn't update selected after form reset (#2551) - if ( ( option.selected || i === index ) && - // Don't return options that are disabled or in a disabled optgroup - ( jQuery.support.optDisabled ? !option.disabled : option.getAttribute("disabled") === null ) && - ( !option.parentNode.disabled || !jQuery.nodeName( option.parentNode, "optgroup" ) ) ) { - - // Get the specific value for the option - value = jQuery( option ).val(); - - // We don't need an array for one selects - if ( one ) { - return value; - } - - // Multi-Selects return an array - values.push( value ); - } - } - - return values; - }, - - set: function( elem, value ) { - var optionSet, option, - options = elem.options, - values = jQuery.makeArray( value ), - i = options.length; - - while ( i-- ) { - option = options[ i ]; - if ( (option.selected = jQuery.inArray( jQuery(option).val(), values ) >= 0) ) { - optionSet = true; - } - } - - // force browsers to behave consistently when non-matching value is set - if ( !optionSet ) { - elem.selectedIndex = -1; - } - return values; - } - } - }, - - attr: function( elem, name, value ) { - var hooks, ret, - nType = elem.nodeType; - - // don't get/set attributes on text, comment and attribute nodes - if ( !elem || nType === 3 || nType === 8 || nType === 2 ) { - return; - } - - // Fallback to prop when attributes are not supported - if ( typeof elem.getAttribute === core_strundefined ) { - return jQuery.prop( elem, name, value ); - } - - // All attributes are lowercase - // Grab necessary hook if one is defined - if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { - name = name.toLowerCase(); - hooks = jQuery.attrHooks[ name ] || - ( jQuery.expr.match.bool.test( name ) ? boolHook : nodeHook ); - } - - if ( value !== undefined ) { - - if ( value === null ) { - jQuery.removeAttr( elem, name ); - - } else if ( hooks && "set" in hooks && (ret = hooks.set( elem, value, name )) !== undefined ) { - return ret; - - } else { - elem.setAttribute( name, value + "" ); - return value; - } - - } else if ( hooks && "get" in hooks && (ret = hooks.get( elem, name )) !== null ) { - return ret; - - } else { - ret = jQuery.find.attr( elem, name ); - - // Non-existent attributes return null, we normalize to undefined - return ret == null ? - undefined : - ret; - } - }, - - removeAttr: function( elem, value ) { - var name, propName, - i = 0, - attrNames = value && value.match( core_rnotwhite ); - - if ( attrNames && elem.nodeType === 1 ) { - while ( (name = attrNames[i++]) ) { - propName = jQuery.propFix[ name ] || name; - - // Boolean attributes get special treatment (#10870) - if ( jQuery.expr.match.bool.test( name ) ) { - // Set corresponding property to false - if ( getSetInput && getSetAttribute || !ruseDefault.test( name ) ) { - elem[ propName ] = false; - // Support: IE<9 - // Also clear defaultChecked/defaultSelected (if appropriate) - } else { - elem[ jQuery.camelCase( "default-" + name ) ] = - elem[ propName ] = false; - } - - // See #9699 for explanation of this approach (setting first, then removal) - } else { - jQuery.attr( elem, name, "" ); - } - - elem.removeAttribute( getSetAttribute ? name : propName ); - } - } - }, - - attrHooks: { - type: { - set: function( elem, value ) { - if ( !jQuery.support.radioValue && value === "radio" && jQuery.nodeName(elem, "input") ) { - // Setting the type on a radio button after the value resets the value in IE6-9 - // Reset value to default in case type is set after value during creation - var val = elem.value; - elem.setAttribute( "type", value ); - if ( val ) { - elem.value = val; - } - return value; - } - } - } - }, - - propFix: { - "for": "htmlFor", - "class": "className" - }, - - prop: function( elem, name, value ) { - var ret, hooks, notxml, - nType = elem.nodeType; - - // don't get/set properties on text, comment and attribute nodes - if ( !elem || nType === 3 || nType === 8 || nType === 2 ) { - return; - } - - notxml = nType !== 1 || !jQuery.isXMLDoc( elem ); - - if ( notxml ) { - // Fix name and attach hooks - name = jQuery.propFix[ name ] || name; - hooks = jQuery.propHooks[ name ]; - } - - if ( value !== undefined ) { - return hooks && "set" in hooks && (ret = hooks.set( elem, value, name )) !== undefined ? - ret : - ( elem[ name ] = value ); - - } else { - return hooks && "get" in hooks && (ret = hooks.get( elem, name )) !== null ? - ret : - elem[ name ]; - } - }, - - propHooks: { - tabIndex: { - get: function( elem ) { - // elem.tabIndex doesn't always return the correct value when it hasn't been explicitly set - // http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/ - // Use proper attribute retrieval(#12072) - var tabindex = jQuery.find.attr( elem, "tabindex" ); - - return tabindex ? - parseInt( tabindex, 10 ) : - rfocusable.test( elem.nodeName ) || rclickable.test( elem.nodeName ) && elem.href ? - 0 : - -1; - } - } - } -}); - -// Hooks for boolean attributes -boolHook = { - set: function( elem, value, name ) { - if ( value === false ) { - // Remove boolean attributes when set to false - jQuery.removeAttr( elem, name ); - } else if ( getSetInput && getSetAttribute || !ruseDefault.test( name ) ) { - // IE<8 needs the *property* name - elem.setAttribute( !getSetAttribute && jQuery.propFix[ name ] || name, name ); - - // Use defaultChecked and defaultSelected for oldIE - } else { - elem[ jQuery.camelCase( "default-" + name ) ] = elem[ name ] = true; - } - - return name; - } -}; -jQuery.each( jQuery.expr.match.bool.source.match( /\w+/g ), function( i, name ) { - var getter = jQuery.expr.attrHandle[ name ] || jQuery.find.attr; - - jQuery.expr.attrHandle[ name ] = getSetInput && getSetAttribute || !ruseDefault.test( name ) ? - function( elem, name, isXML ) { - var fn = jQuery.expr.attrHandle[ name ], - ret = isXML ? - undefined : - /* jshint eqeqeq: false */ - (jQuery.expr.attrHandle[ name ] = undefined) != - getter( elem, name, isXML ) ? - - name.toLowerCase() : - null; - jQuery.expr.attrHandle[ name ] = fn; - return ret; - } : - function( elem, name, isXML ) { - return isXML ? - undefined : - elem[ jQuery.camelCase( "default-" + name ) ] ? - name.toLowerCase() : - null; - }; -}); - -// fix oldIE attroperties -if ( !getSetInput || !getSetAttribute ) { - jQuery.attrHooks.value = { - set: function( elem, value, name ) { - if ( jQuery.nodeName( elem, "input" ) ) { - // Does not return so that setAttribute is also used - elem.defaultValue = value; - } else { - // Use nodeHook if defined (#1954); otherwise setAttribute is fine - return nodeHook && nodeHook.set( elem, value, name ); - } - } - }; -} - -// IE6/7 do not support getting/setting some attributes with get/setAttribute -if ( !getSetAttribute ) { - - // Use this for any attribute in IE6/7 - // This fixes almost every IE6/7 issue - nodeHook = { - set: function( elem, value, name ) { - // Set the existing or create a new attribute node - var ret = elem.getAttributeNode( name ); - if ( !ret ) { - elem.setAttributeNode( - (ret = elem.ownerDocument.createAttribute( name )) - ); - } - - ret.value = value += ""; - - // Break association with cloned elements by also using setAttribute (#9646) - return name === "value" || value === elem.getAttribute( name ) ? - value : - undefined; - } - }; - jQuery.expr.attrHandle.id = jQuery.expr.attrHandle.name = jQuery.expr.attrHandle.coords = - // Some attributes are constructed with empty-string values when not defined - function( elem, name, isXML ) { - var ret; - return isXML ? - undefined : - (ret = elem.getAttributeNode( name )) && ret.value !== "" ? - ret.value : - null; - }; - jQuery.valHooks.button = { - get: function( elem, name ) { - var ret = elem.getAttributeNode( name ); - return ret && ret.specified ? - ret.value : - undefined; - }, - set: nodeHook.set - }; - - // Set contenteditable to false on removals(#10429) - // Setting to empty string throws an error as an invalid value - jQuery.attrHooks.contenteditable = { - set: function( elem, value, name ) { - nodeHook.set( elem, value === "" ? false : value, name ); - } - }; - - // Set width and height to auto instead of 0 on empty string( Bug #8150 ) - // This is for removals - jQuery.each([ "width", "height" ], function( i, name ) { - jQuery.attrHooks[ name ] = { - set: function( elem, value ) { - if ( value === "" ) { - elem.setAttribute( name, "auto" ); - return value; - } - } - }; - }); -} - - -// Some attributes require a special call on IE -// http://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx -if ( !jQuery.support.hrefNormalized ) { - // href/src property should get the full normalized URL (#10299/#12915) - jQuery.each([ "href", "src" ], function( i, name ) { - jQuery.propHooks[ name ] = { - get: function( elem ) { - return elem.getAttribute( name, 4 ); - } - }; - }); -} - -if ( !jQuery.support.style ) { - jQuery.attrHooks.style = { - get: function( elem ) { - // Return undefined in the case of empty string - // Note: IE uppercases css property names, but if we were to .toLowerCase() - // .cssText, that would destroy case senstitivity in URL's, like in "background" - return elem.style.cssText || undefined; - }, - set: function( elem, value ) { - return ( elem.style.cssText = value + "" ); - } - }; -} - -// Safari mis-reports the default selected property of an option -// Accessing the parent's selectedIndex property fixes it -if ( !jQuery.support.optSelected ) { - jQuery.propHooks.selected = { - get: function( elem ) { - var parent = elem.parentNode; - - if ( parent ) { - parent.selectedIndex; - - // Make sure that it also works with optgroups, see #5701 - if ( parent.parentNode ) { - parent.parentNode.selectedIndex; - } - } - return null; - } - }; -} - -jQuery.each([ - "tabIndex", - "readOnly", - "maxLength", - "cellSpacing", - "cellPadding", - "rowSpan", - "colSpan", - "useMap", - "frameBorder", - "contentEditable" -], function() { - jQuery.propFix[ this.toLowerCase() ] = this; -}); - -// IE6/7 call enctype encoding -if ( !jQuery.support.enctype ) { - jQuery.propFix.enctype = "encoding"; -} - -// Radios and checkboxes getter/setter -jQuery.each([ "radio", "checkbox" ], function() { - jQuery.valHooks[ this ] = { - set: function( elem, value ) { - if ( jQuery.isArray( value ) ) { - return ( elem.checked = jQuery.inArray( jQuery(elem).val(), value ) >= 0 ); - } - } - }; - if ( !jQuery.support.checkOn ) { - jQuery.valHooks[ this ].get = function( elem ) { - // Support: Webkit - // "" is returned instead of "on" if a value isn't specified - return elem.getAttribute("value") === null ? "on" : elem.value; - }; - } -}); -var rformElems = /^(?:input|select|textarea)$/i, - rkeyEvent = /^key/, - rmouseEvent = /^(?:mouse|contextmenu)|click/, - rfocusMorph = /^(?:focusinfocus|focusoutblur)$/, - rtypenamespace = /^([^.]*)(?:\.(.+)|)$/; - -function returnTrue() { - return true; -} - -function returnFalse() { - return false; -} - -function safeActiveElement() { - try { - return document.activeElement; - } catch ( err ) { } -} - -/* - * Helper functions for managing events -- not part of the public interface. - * Props to Dean Edwards' addEvent library for many of the ideas. - */ -jQuery.event = { - - global: {}, - - add: function( elem, types, handler, data, selector ) { - var tmp, events, t, handleObjIn, - special, eventHandle, handleObj, - handlers, type, namespaces, origType, - elemData = jQuery._data( elem ); - - // Don't attach events to noData or text/comment nodes (but allow plain objects) - if ( !elemData ) { - return; - } - - // Caller can pass in an object of custom data in lieu of the handler - if ( handler.handler ) { - handleObjIn = handler; - handler = handleObjIn.handler; - selector = handleObjIn.selector; - } - - // Make sure that the handler has a unique ID, used to find/remove it later - if ( !handler.guid ) { - handler.guid = jQuery.guid++; - } - - // Init the element's event structure and main handler, if this is the first - if ( !(events = elemData.events) ) { - events = elemData.events = {}; - } - if ( !(eventHandle = elemData.handle) ) { - eventHandle = elemData.handle = function( e ) { - // Discard the second event of a jQuery.event.trigger() and - // when an event is called after a page has unloaded - return typeof jQuery !== core_strundefined && (!e || jQuery.event.triggered !== e.type) ? - jQuery.event.dispatch.apply( eventHandle.elem, arguments ) : - undefined; - }; - // Add elem as a property of the handle fn to prevent a memory leak with IE non-native events - eventHandle.elem = elem; - } - - // Handle multiple events separated by a space - types = ( types || "" ).match( core_rnotwhite ) || [""]; - t = types.length; - while ( t-- ) { - tmp = rtypenamespace.exec( types[t] ) || []; - type = origType = tmp[1]; - namespaces = ( tmp[2] || "" ).split( "." ).sort(); - - // There *must* be a type, no attaching namespace-only handlers - if ( !type ) { - continue; - } - - // If event changes its type, use the special event handlers for the changed type - special = jQuery.event.special[ type ] || {}; - - // If selector defined, determine special event api type, otherwise given type - type = ( selector ? special.delegateType : special.bindType ) || type; - - // Update special based on newly reset type - special = jQuery.event.special[ type ] || {}; - - // handleObj is passed to all event handlers - handleObj = jQuery.extend({ - type: type, - origType: origType, - data: data, - handler: handler, - guid: handler.guid, - selector: selector, - needsContext: selector && jQuery.expr.match.needsContext.test( selector ), - namespace: namespaces.join(".") - }, handleObjIn ); - - // Init the event handler queue if we're the first - if ( !(handlers = events[ type ]) ) { - handlers = events[ type ] = []; - handlers.delegateCount = 0; - - // Only use addEventListener/attachEvent if the special events handler returns false - if ( !special.setup || special.setup.call( elem, data, namespaces, eventHandle ) === false ) { - // Bind the global event handler to the element - if ( elem.addEventListener ) { - elem.addEventListener( type, eventHandle, false ); - - } else if ( elem.attachEvent ) { - elem.attachEvent( "on" + type, eventHandle ); - } - } - } - - if ( special.add ) { - special.add.call( elem, handleObj ); - - if ( !handleObj.handler.guid ) { - handleObj.handler.guid = handler.guid; - } - } - - // Add to the element's handler list, delegates in front - if ( selector ) { - handlers.splice( handlers.delegateCount++, 0, handleObj ); - } else { - handlers.push( handleObj ); - } - - // Keep track of which events have ever been used, for event optimization - jQuery.event.global[ type ] = true; - } - - // Nullify elem to prevent memory leaks in IE - elem = null; - }, - - // Detach an event or set of events from an element - remove: function( elem, types, handler, selector, mappedTypes ) { - var j, handleObj, tmp, - origCount, t, events, - special, handlers, type, - namespaces, origType, - elemData = jQuery.hasData( elem ) && jQuery._data( elem ); - - if ( !elemData || !(events = elemData.events) ) { - return; - } - - // Once for each type.namespace in types; type may be omitted - types = ( types || "" ).match( core_rnotwhite ) || [""]; - t = types.length; - while ( t-- ) { - tmp = rtypenamespace.exec( types[t] ) || []; - type = origType = tmp[1]; - namespaces = ( tmp[2] || "" ).split( "." ).sort(); - - // Unbind all events (on this namespace, if provided) for the element - if ( !type ) { - for ( type in events ) { - jQuery.event.remove( elem, type + types[ t ], handler, selector, true ); - } - continue; - } - - special = jQuery.event.special[ type ] || {}; - type = ( selector ? special.delegateType : special.bindType ) || type; - handlers = events[ type ] || []; - tmp = tmp[2] && new RegExp( "(^|\\.)" + namespaces.join("\\.(?:.*\\.|)") + "(\\.|$)" ); - - // Remove matching events - origCount = j = handlers.length; - while ( j-- ) { - handleObj = handlers[ j ]; - - if ( ( mappedTypes || origType === handleObj.origType ) && - ( !handler || handler.guid === handleObj.guid ) && - ( !tmp || tmp.test( handleObj.namespace ) ) && - ( !selector || selector === handleObj.selector || selector === "**" && handleObj.selector ) ) { - handlers.splice( j, 1 ); - - if ( handleObj.selector ) { - handlers.delegateCount--; - } - if ( special.remove ) { - special.remove.call( elem, handleObj ); - } - } - } - - // Remove generic event handler if we removed something and no more handlers exist - // (avoids potential for endless recursion during removal of special event handlers) - if ( origCount && !handlers.length ) { - if ( !special.teardown || special.teardown.call( elem, namespaces, elemData.handle ) === false ) { - jQuery.removeEvent( elem, type, elemData.handle ); - } - - delete events[ type ]; - } - } - - // Remove the expando if it's no longer used - if ( jQuery.isEmptyObject( events ) ) { - delete elemData.handle; - - // removeData also checks for emptiness and clears the expando if empty - // so use it instead of delete - jQuery._removeData( elem, "events" ); - } - }, - - trigger: function( event, data, elem, onlyHandlers ) { - var handle, ontype, cur, - bubbleType, special, tmp, i, - eventPath = [ elem || document ], - type = core_hasOwn.call( event, "type" ) ? event.type : event, - namespaces = core_hasOwn.call( event, "namespace" ) ? event.namespace.split(".") : []; - - cur = tmp = elem = elem || document; - - // Don't do events on text and comment nodes - if ( elem.nodeType === 3 || elem.nodeType === 8 ) { - return; - } - - // focus/blur morphs to focusin/out; ensure we're not firing them right now - if ( rfocusMorph.test( type + jQuery.event.triggered ) ) { - return; - } - - if ( type.indexOf(".") >= 0 ) { - // Namespaced trigger; create a regexp to match event type in handle() - namespaces = type.split("."); - type = namespaces.shift(); - namespaces.sort(); - } - ontype = type.indexOf(":") < 0 && "on" + type; - - // Caller can pass in a jQuery.Event object, Object, or just an event type string - event = event[ jQuery.expando ] ? - event : - new jQuery.Event( type, typeof event === "object" && event ); - - // Trigger bitmask: & 1 for native handlers; & 2 for jQuery (always true) - event.isTrigger = onlyHandlers ? 2 : 3; - event.namespace = namespaces.join("."); - event.namespace_re = event.namespace ? - new RegExp( "(^|\\.)" + namespaces.join("\\.(?:.*\\.|)") + "(\\.|$)" ) : - null; - - // Clean up the event in case it is being reused - event.result = undefined; - if ( !event.target ) { - event.target = elem; - } - - // Clone any incoming data and prepend the event, creating the handler arg list - data = data == null ? - [ event ] : - jQuery.makeArray( data, [ event ] ); - - // Allow special events to draw outside the lines - special = jQuery.event.special[ type ] || {}; - if ( !onlyHandlers && special.trigger && special.trigger.apply( elem, data ) === false ) { - return; - } - - // Determine event propagation path in advance, per W3C events spec (#9951) - // Bubble up to document, then to window; watch for a global ownerDocument var (#9724) - if ( !onlyHandlers && !special.noBubble && !jQuery.isWindow( elem ) ) { - - bubbleType = special.delegateType || type; - if ( !rfocusMorph.test( bubbleType + type ) ) { - cur = cur.parentNode; - } - for ( ; cur; cur = cur.parentNode ) { - eventPath.push( cur ); - tmp = cur; - } - - // Only add window if we got to document (e.g., not plain obj or detached DOM) - if ( tmp === (elem.ownerDocument || document) ) { - eventPath.push( tmp.defaultView || tmp.parentWindow || window ); - } - } - - // Fire handlers on the event path - i = 0; - while ( (cur = eventPath[i++]) && !event.isPropagationStopped() ) { - - event.type = i > 1 ? - bubbleType : - special.bindType || type; - - // jQuery handler - handle = ( jQuery._data( cur, "events" ) || {} )[ event.type ] && jQuery._data( cur, "handle" ); - if ( handle ) { - handle.apply( cur, data ); - } - - // Native handler - handle = ontype && cur[ ontype ]; - if ( handle && jQuery.acceptData( cur ) && handle.apply && handle.apply( cur, data ) === false ) { - event.preventDefault(); - } - } - event.type = type; - - // If nobody prevented the default action, do it now - if ( !onlyHandlers && !event.isDefaultPrevented() ) { - - if ( (!special._default || special._default.apply( eventPath.pop(), data ) === false) && - jQuery.acceptData( elem ) ) { - - // Call a native DOM method on the target with the same name name as the event. - // Can't use an .isFunction() check here because IE6/7 fails that test. - // Don't do default actions on window, that's where global variables be (#6170) - if ( ontype && elem[ type ] && !jQuery.isWindow( elem ) ) { - - // Don't re-trigger an onFOO event when we call its FOO() method - tmp = elem[ ontype ]; - - if ( tmp ) { - elem[ ontype ] = null; - } - - // Prevent re-triggering of the same event, since we already bubbled it above - jQuery.event.triggered = type; - try { - elem[ type ](); - } catch ( e ) { - // IE<9 dies on focus/blur to hidden element (#1486,#12518) - // only reproducible on winXP IE8 native, not IE9 in IE8 mode - } - jQuery.event.triggered = undefined; - - if ( tmp ) { - elem[ ontype ] = tmp; - } - } - } - } - - return event.result; - }, - - dispatch: function( event ) { - - // Make a writable jQuery.Event from the native event object - event = jQuery.event.fix( event ); - - var i, ret, handleObj, matched, j, - handlerQueue = [], - args = core_slice.call( arguments ), - handlers = ( jQuery._data( this, "events" ) || {} )[ event.type ] || [], - special = jQuery.event.special[ event.type ] || {}; - - // Use the fix-ed jQuery.Event rather than the (read-only) native event - args[0] = event; - event.delegateTarget = this; - - // Call the preDispatch hook for the mapped type, and let it bail if desired - if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) { - return; - } - - // Determine handlers - handlerQueue = jQuery.event.handlers.call( this, event, handlers ); - - // Run delegates first; they may want to stop propagation beneath us - i = 0; - while ( (matched = handlerQueue[ i++ ]) && !event.isPropagationStopped() ) { - event.currentTarget = matched.elem; - - j = 0; - while ( (handleObj = matched.handlers[ j++ ]) && !event.isImmediatePropagationStopped() ) { - - // Triggered event must either 1) have no namespace, or - // 2) have namespace(s) a subset or equal to those in the bound event (both can have no namespace). - if ( !event.namespace_re || event.namespace_re.test( handleObj.namespace ) ) { - - event.handleObj = handleObj; - event.data = handleObj.data; - - ret = ( (jQuery.event.special[ handleObj.origType ] || {}).handle || handleObj.handler ) - .apply( matched.elem, args ); - - if ( ret !== undefined ) { - if ( (event.result = ret) === false ) { - event.preventDefault(); - event.stopPropagation(); - } - } - } - } - } - - // Call the postDispatch hook for the mapped type - if ( special.postDispatch ) { - special.postDispatch.call( this, event ); - } - - return event.result; - }, - - handlers: function( event, handlers ) { - var sel, handleObj, matches, i, - handlerQueue = [], - delegateCount = handlers.delegateCount, - cur = event.target; - - // Find delegate handlers - // Black-hole SVG instance trees (#13180) - // Avoid non-left-click bubbling in Firefox (#3861) - if ( delegateCount && cur.nodeType && (!event.button || event.type !== "click") ) { - - /* jshint eqeqeq: false */ - for ( ; cur != this; cur = cur.parentNode || this ) { - /* jshint eqeqeq: true */ - - // Don't check non-elements (#13208) - // Don't process clicks on disabled elements (#6911, #8165, #11382, #11764) - if ( cur.nodeType === 1 && (cur.disabled !== true || event.type !== "click") ) { - matches = []; - for ( i = 0; i < delegateCount; i++ ) { - handleObj = handlers[ i ]; - - // Don't conflict with Object.prototype properties (#13203) - sel = handleObj.selector + " "; - - if ( matches[ sel ] === undefined ) { - matches[ sel ] = handleObj.needsContext ? - jQuery( sel, this ).index( cur ) >= 0 : - jQuery.find( sel, this, null, [ cur ] ).length; - } - if ( matches[ sel ] ) { - matches.push( handleObj ); - } - } - if ( matches.length ) { - handlerQueue.push({ elem: cur, handlers: matches }); - } - } - } - } - - // Add the remaining (directly-bound) handlers - if ( delegateCount < handlers.length ) { - handlerQueue.push({ elem: this, handlers: handlers.slice( delegateCount ) }); - } - - return handlerQueue; - }, - - fix: function( event ) { - if ( event[ jQuery.expando ] ) { - return event; - } - - // Create a writable copy of the event object and normalize some properties - var i, prop, copy, - type = event.type, - originalEvent = event, - fixHook = this.fixHooks[ type ]; - - if ( !fixHook ) { - this.fixHooks[ type ] = fixHook = - rmouseEvent.test( type ) ? this.mouseHooks : - rkeyEvent.test( type ) ? this.keyHooks : - {}; - } - copy = fixHook.props ? this.props.concat( fixHook.props ) : this.props; - - event = new jQuery.Event( originalEvent ); - - i = copy.length; - while ( i-- ) { - prop = copy[ i ]; - event[ prop ] = originalEvent[ prop ]; - } - - // Support: IE<9 - // Fix target property (#1925) - if ( !event.target ) { - event.target = originalEvent.srcElement || document; - } - - // Support: Chrome 23+, Safari? - // Target should not be a text node (#504, #13143) - if ( event.target.nodeType === 3 ) { - event.target = event.target.parentNode; - } - - // Support: IE<9 - // For mouse/key events, metaKey==false if it's undefined (#3368, #11328) - event.metaKey = !!event.metaKey; - - return fixHook.filter ? fixHook.filter( event, originalEvent ) : event; - }, - - // Includes some event props shared by KeyEvent and MouseEvent - props: "altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "), - - fixHooks: {}, - - keyHooks: { - props: "char charCode key keyCode".split(" "), - filter: function( event, original ) { - - // Add which for key events - if ( event.which == null ) { - event.which = original.charCode != null ? original.charCode : original.keyCode; - } - - return event; - } - }, - - mouseHooks: { - props: "button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "), - filter: function( event, original ) { - var body, eventDoc, doc, - button = original.button, - fromElement = original.fromElement; - - // Calculate pageX/Y if missing and clientX/Y available - if ( event.pageX == null && original.clientX != null ) { - eventDoc = event.target.ownerDocument || document; - doc = eventDoc.documentElement; - body = eventDoc.body; - - event.pageX = original.clientX + ( doc && doc.scrollLeft || body && body.scrollLeft || 0 ) - ( doc && doc.clientLeft || body && body.clientLeft || 0 ); - event.pageY = original.clientY + ( doc && doc.scrollTop || body && body.scrollTop || 0 ) - ( doc && doc.clientTop || body && body.clientTop || 0 ); - } - - // Add relatedTarget, if necessary - if ( !event.relatedTarget && fromElement ) { - event.relatedTarget = fromElement === event.target ? original.toElement : fromElement; - } - - // Add which for click: 1 === left; 2 === middle; 3 === right - // Note: button is not normalized, so don't use it - if ( !event.which && button !== undefined ) { - event.which = ( button & 1 ? 1 : ( button & 2 ? 3 : ( button & 4 ? 2 : 0 ) ) ); - } - - return event; - } - }, - - special: { - load: { - // Prevent triggered image.load events from bubbling to window.load - noBubble: true - }, - focus: { - // Fire native event if possible so blur/focus sequence is correct - trigger: function() { - if ( this !== safeActiveElement() && this.focus ) { - try { - this.focus(); - return false; - } catch ( e ) { - // Support: IE<9 - // If we error on focus to hidden element (#1486, #12518), - // let .trigger() run the handlers - } - } - }, - delegateType: "focusin" - }, - blur: { - trigger: function() { - if ( this === safeActiveElement() && this.blur ) { - this.blur(); - return false; - } - }, - delegateType: "focusout" - }, - click: { - // For checkbox, fire native event so checked state will be right - trigger: function() { - if ( jQuery.nodeName( this, "input" ) && this.type === "checkbox" && this.click ) { - this.click(); - return false; - } - }, - - // For cross-browser consistency, don't fire native .click() on links - _default: function( event ) { - return jQuery.nodeName( event.target, "a" ); - } - }, - - beforeunload: { - postDispatch: function( event ) { - - // Even when returnValue equals to undefined Firefox will still show alert - if ( event.result !== undefined ) { - event.originalEvent.returnValue = event.result; - } - } - } - }, - - simulate: function( type, elem, event, bubble ) { - // Piggyback on a donor event to simulate a different one. - // Fake originalEvent to avoid donor's stopPropagation, but if the - // simulated event prevents default then we do the same on the donor. - var e = jQuery.extend( - new jQuery.Event(), - event, - { - type: type, - isSimulated: true, - originalEvent: {} - } - ); - if ( bubble ) { - jQuery.event.trigger( e, null, elem ); - } else { - jQuery.event.dispatch.call( elem, e ); - } - if ( e.isDefaultPrevented() ) { - event.preventDefault(); - } - } -}; - -jQuery.removeEvent = document.removeEventListener ? - function( elem, type, handle ) { - if ( elem.removeEventListener ) { - elem.removeEventListener( type, handle, false ); - } - } : - function( elem, type, handle ) { - var name = "on" + type; - - if ( elem.detachEvent ) { - - // #8545, #7054, preventing memory leaks for custom events in IE6-8 - // detachEvent needed property on element, by name of that event, to properly expose it to GC - if ( typeof elem[ name ] === core_strundefined ) { - elem[ name ] = null; - } - - elem.detachEvent( name, handle ); - } - }; - -jQuery.Event = function( src, props ) { - // Allow instantiation without the 'new' keyword - if ( !(this instanceof jQuery.Event) ) { - return new jQuery.Event( src, props ); - } - - // Event object - if ( src && src.type ) { - this.originalEvent = src; - this.type = src.type; - - // Events bubbling up the document may have been marked as prevented - // by a handler lower down the tree; reflect the correct value. - this.isDefaultPrevented = ( src.defaultPrevented || src.returnValue === false || - src.getPreventDefault && src.getPreventDefault() ) ? returnTrue : returnFalse; - - // Event type - } else { - this.type = src; - } - - // Put explicitly provided properties onto the event object - if ( props ) { - jQuery.extend( this, props ); - } - - // Create a timestamp if incoming event doesn't have one - this.timeStamp = src && src.timeStamp || jQuery.now(); - - // Mark it as fixed - this[ jQuery.expando ] = true; -}; - -// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding -// http://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html -jQuery.Event.prototype = { - isDefaultPrevented: returnFalse, - isPropagationStopped: returnFalse, - isImmediatePropagationStopped: returnFalse, - - preventDefault: function() { - var e = this.originalEvent; - - this.isDefaultPrevented = returnTrue; - if ( !e ) { - return; - } - - // If preventDefault exists, run it on the original event - if ( e.preventDefault ) { - e.preventDefault(); - - // Support: IE - // Otherwise set the returnValue property of the original event to false - } else { - e.returnValue = false; - } - }, - stopPropagation: function() { - var e = this.originalEvent; - - this.isPropagationStopped = returnTrue; - if ( !e ) { - return; - } - // If stopPropagation exists, run it on the original event - if ( e.stopPropagation ) { - e.stopPropagation(); - } - - // Support: IE - // Set the cancelBubble property of the original event to true - e.cancelBubble = true; - }, - stopImmediatePropagation: function() { - this.isImmediatePropagationStopped = returnTrue; - this.stopPropagation(); - } -}; - -// Create mouseenter/leave events using mouseover/out and event-time checks -jQuery.each({ - mouseenter: "mouseover", - mouseleave: "mouseout" -}, function( orig, fix ) { - jQuery.event.special[ orig ] = { - delegateType: fix, - bindType: fix, - - handle: function( event ) { - var ret, - target = this, - related = event.relatedTarget, - handleObj = event.handleObj; - - // For mousenter/leave call the handler if related is outside the target. - // NB: No relatedTarget if the mouse left/entered the browser window - if ( !related || (related !== target && !jQuery.contains( target, related )) ) { - event.type = handleObj.origType; - ret = handleObj.handler.apply( this, arguments ); - event.type = fix; - } - return ret; - } - }; -}); - -// IE submit delegation -if ( !jQuery.support.submitBubbles ) { - - jQuery.event.special.submit = { - setup: function() { - // Only need this for delegated form submit events - if ( jQuery.nodeName( this, "form" ) ) { - return false; - } - - // Lazy-add a submit handler when a descendant form may potentially be submitted - jQuery.event.add( this, "click._submit keypress._submit", function( e ) { - // Node name check avoids a VML-related crash in IE (#9807) - var elem = e.target, - form = jQuery.nodeName( elem, "input" ) || jQuery.nodeName( elem, "button" ) ? elem.form : undefined; - if ( form && !jQuery._data( form, "submitBubbles" ) ) { - jQuery.event.add( form, "submit._submit", function( event ) { - event._submit_bubble = true; - }); - jQuery._data( form, "submitBubbles", true ); - } - }); - // return undefined since we don't need an event listener - }, - - postDispatch: function( event ) { - // If form was submitted by the user, bubble the event up the tree - if ( event._submit_bubble ) { - delete event._submit_bubble; - if ( this.parentNode && !event.isTrigger ) { - jQuery.event.simulate( "submit", this.parentNode, event, true ); - } - } - }, - - teardown: function() { - // Only need this for delegated form submit events - if ( jQuery.nodeName( this, "form" ) ) { - return false; - } - - // Remove delegated handlers; cleanData eventually reaps submit handlers attached above - jQuery.event.remove( this, "._submit" ); - } - }; -} - -// IE change delegation and checkbox/radio fix -if ( !jQuery.support.changeBubbles ) { - - jQuery.event.special.change = { - - setup: function() { - - if ( rformElems.test( this.nodeName ) ) { - // IE doesn't fire change on a check/radio until blur; trigger it on click - // after a propertychange. Eat the blur-change in special.change.handle. - // This still fires onchange a second time for check/radio after blur. - if ( this.type === "checkbox" || this.type === "radio" ) { - jQuery.event.add( this, "propertychange._change", function( event ) { - if ( event.originalEvent.propertyName === "checked" ) { - this._just_changed = true; - } - }); - jQuery.event.add( this, "click._change", function( event ) { - if ( this._just_changed && !event.isTrigger ) { - this._just_changed = false; - } - // Allow triggered, simulated change events (#11500) - jQuery.event.simulate( "change", this, event, true ); - }); - } - return false; - } - // Delegated event; lazy-add a change handler on descendant inputs - jQuery.event.add( this, "beforeactivate._change", function( e ) { - var elem = e.target; - - if ( rformElems.test( elem.nodeName ) && !jQuery._data( elem, "changeBubbles" ) ) { - jQuery.event.add( elem, "change._change", function( event ) { - if ( this.parentNode && !event.isSimulated && !event.isTrigger ) { - jQuery.event.simulate( "change", this.parentNode, event, true ); - } - }); - jQuery._data( elem, "changeBubbles", true ); - } - }); - }, - - handle: function( event ) { - var elem = event.target; - - // Swallow native change events from checkbox/radio, we already triggered them above - if ( this !== elem || event.isSimulated || event.isTrigger || (elem.type !== "radio" && elem.type !== "checkbox") ) { - return event.handleObj.handler.apply( this, arguments ); - } - }, - - teardown: function() { - jQuery.event.remove( this, "._change" ); - - return !rformElems.test( this.nodeName ); - } - }; -} - -// Create "bubbling" focus and blur events -if ( !jQuery.support.focusinBubbles ) { - jQuery.each({ focus: "focusin", blur: "focusout" }, function( orig, fix ) { - - // Attach a single capturing handler while someone wants focusin/focusout - var attaches = 0, - handler = function( event ) { - jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ), true ); - }; - - jQuery.event.special[ fix ] = { - setup: function() { - if ( attaches++ === 0 ) { - document.addEventListener( orig, handler, true ); - } - }, - teardown: function() { - if ( --attaches === 0 ) { - document.removeEventListener( orig, handler, true ); - } - } - }; - }); -} - -jQuery.fn.extend({ - - on: function( types, selector, data, fn, /*INTERNAL*/ one ) { - var type, origFn; - - // Types can be a map of types/handlers - if ( typeof types === "object" ) { - // ( types-Object, selector, data ) - if ( typeof selector !== "string" ) { - // ( types-Object, data ) - data = data || selector; - selector = undefined; - } - for ( type in types ) { - this.on( type, selector, data, types[ type ], one ); - } - return this; - } - - if ( data == null && fn == null ) { - // ( types, fn ) - fn = selector; - data = selector = undefined; - } else if ( fn == null ) { - if ( typeof selector === "string" ) { - // ( types, selector, fn ) - fn = data; - data = undefined; - } else { - // ( types, data, fn ) - fn = data; - data = selector; - selector = undefined; - } - } - if ( fn === false ) { - fn = returnFalse; - } else if ( !fn ) { - return this; - } - - if ( one === 1 ) { - origFn = fn; - fn = function( event ) { - // Can use an empty set, since event contains the info - jQuery().off( event ); - return origFn.apply( this, arguments ); - }; - // Use same guid so caller can remove using origFn - fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ ); - } - return this.each( function() { - jQuery.event.add( this, types, fn, data, selector ); - }); - }, - one: function( types, selector, data, fn ) { - return this.on( types, selector, data, fn, 1 ); - }, - off: function( types, selector, fn ) { - var handleObj, type; - if ( types && types.preventDefault && types.handleObj ) { - // ( event ) dispatched jQuery.Event - handleObj = types.handleObj; - jQuery( types.delegateTarget ).off( - handleObj.namespace ? handleObj.origType + "." + handleObj.namespace : handleObj.origType, - handleObj.selector, - handleObj.handler - ); - return this; - } - if ( typeof types === "object" ) { - // ( types-object [, selector] ) - for ( type in types ) { - this.off( type, selector, types[ type ] ); - } - return this; - } - if ( selector === false || typeof selector === "function" ) { - // ( types [, fn] ) - fn = selector; - selector = undefined; - } - if ( fn === false ) { - fn = returnFalse; - } - return this.each(function() { - jQuery.event.remove( this, types, fn, selector ); - }); - }, - - trigger: function( type, data ) { - return this.each(function() { - jQuery.event.trigger( type, data, this ); - }); - }, - triggerHandler: function( type, data ) { - var elem = this[0]; - if ( elem ) { - return jQuery.event.trigger( type, data, elem, true ); - } - } -}); -var isSimple = /^.[^:#\[\.,]*$/, - rparentsprev = /^(?:parents|prev(?:Until|All))/, - rneedsContext = jQuery.expr.match.needsContext, - // methods guaranteed to produce a unique set when starting from a unique set - guaranteedUnique = { - children: true, - contents: true, - next: true, - prev: true - }; - -jQuery.fn.extend({ - find: function( selector ) { - var i, - ret = [], - self = this, - len = self.length; - - if ( typeof selector !== "string" ) { - return this.pushStack( jQuery( selector ).filter(function() { - for ( i = 0; i < len; i++ ) { - if ( jQuery.contains( self[ i ], this ) ) { - return true; - } - } - }) ); - } - - for ( i = 0; i < len; i++ ) { - jQuery.find( selector, self[ i ], ret ); - } - - // Needed because $( selector, context ) becomes $( context ).find( selector ) - ret = this.pushStack( len > 1 ? jQuery.unique( ret ) : ret ); - ret.selector = this.selector ? this.selector + " " + selector : selector; - return ret; - }, - - has: function( target ) { - var i, - targets = jQuery( target, this ), - len = targets.length; - - return this.filter(function() { - for ( i = 0; i < len; i++ ) { - if ( jQuery.contains( this, targets[i] ) ) { - return true; - } - } - }); - }, - - not: function( selector ) { - return this.pushStack( winnow(this, selector || [], true) ); - }, - - filter: function( selector ) { - return this.pushStack( winnow(this, selector || [], false) ); - }, - - is: function( selector ) { - return !!winnow( - this, - - // If this is a positional/relative selector, check membership in the returned set - // so $("p:first").is("p:last") won't return true for a doc with two "p". - typeof selector === "string" && rneedsContext.test( selector ) ? - jQuery( selector ) : - selector || [], - false - ).length; - }, - - closest: function( selectors, context ) { - var cur, - i = 0, - l = this.length, - ret = [], - pos = rneedsContext.test( selectors ) || typeof selectors !== "string" ? - jQuery( selectors, context || this.context ) : - 0; - - for ( ; i < l; i++ ) { - for ( cur = this[i]; cur && cur !== context; cur = cur.parentNode ) { - // Always skip document fragments - if ( cur.nodeType < 11 && (pos ? - pos.index(cur) > -1 : - - // Don't pass non-elements to Sizzle - cur.nodeType === 1 && - jQuery.find.matchesSelector(cur, selectors)) ) { - - cur = ret.push( cur ); - break; - } - } - } - - return this.pushStack( ret.length > 1 ? jQuery.unique( ret ) : ret ); - }, - - // Determine the position of an element within - // the matched set of elements - index: function( elem ) { - - // No argument, return index in parent - if ( !elem ) { - return ( this[0] && this[0].parentNode ) ? this.first().prevAll().length : -1; - } - - // index in selector - if ( typeof elem === "string" ) { - return jQuery.inArray( this[0], jQuery( elem ) ); - } - - // Locate the position of the desired element - return jQuery.inArray( - // If it receives a jQuery object, the first element is used - elem.jquery ? elem[0] : elem, this ); - }, - - add: function( selector, context ) { - var set = typeof selector === "string" ? - jQuery( selector, context ) : - jQuery.makeArray( selector && selector.nodeType ? [ selector ] : selector ), - all = jQuery.merge( this.get(), set ); - - return this.pushStack( jQuery.unique(all) ); - }, - - addBack: function( selector ) { - return this.add( selector == null ? - this.prevObject : this.prevObject.filter(selector) - ); - } -}); - -function sibling( cur, dir ) { - do { - cur = cur[ dir ]; - } while ( cur && cur.nodeType !== 1 ); - - return cur; -} - -jQuery.each({ - parent: function( elem ) { - var parent = elem.parentNode; - return parent && parent.nodeType !== 11 ? parent : null; - }, - parents: function( elem ) { - return jQuery.dir( elem, "parentNode" ); - }, - parentsUntil: function( elem, i, until ) { - return jQuery.dir( elem, "parentNode", until ); - }, - next: function( elem ) { - return sibling( elem, "nextSibling" ); - }, - prev: function( elem ) { - return sibling( elem, "previousSibling" ); - }, - nextAll: function( elem ) { - return jQuery.dir( elem, "nextSibling" ); - }, - prevAll: function( elem ) { - return jQuery.dir( elem, "previousSibling" ); - }, - nextUntil: function( elem, i, until ) { - return jQuery.dir( elem, "nextSibling", until ); - }, - prevUntil: function( elem, i, until ) { - return jQuery.dir( elem, "previousSibling", until ); - }, - siblings: function( elem ) { - return jQuery.sibling( ( elem.parentNode || {} ).firstChild, elem ); - }, - children: function( elem ) { - return jQuery.sibling( elem.firstChild ); - }, - contents: function( elem ) { - return jQuery.nodeName( elem, "iframe" ) ? - elem.contentDocument || elem.contentWindow.document : - jQuery.merge( [], elem.childNodes ); - } -}, function( name, fn ) { - jQuery.fn[ name ] = function( until, selector ) { - var ret = jQuery.map( this, fn, until ); - - if ( name.slice( -5 ) !== "Until" ) { - selector = until; - } - - if ( selector && typeof selector === "string" ) { - ret = jQuery.filter( selector, ret ); - } - - if ( this.length > 1 ) { - // Remove duplicates - if ( !guaranteedUnique[ name ] ) { - ret = jQuery.unique( ret ); - } - - // Reverse order for parents* and prev-derivatives - if ( rparentsprev.test( name ) ) { - ret = ret.reverse(); - } - } - - return this.pushStack( ret ); - }; -}); - -jQuery.extend({ - filter: function( expr, elems, not ) { - var elem = elems[ 0 ]; - - if ( not ) { - expr = ":not(" + expr + ")"; - } - - return elems.length === 1 && elem.nodeType === 1 ? - jQuery.find.matchesSelector( elem, expr ) ? [ elem ] : [] : - jQuery.find.matches( expr, jQuery.grep( elems, function( elem ) { - return elem.nodeType === 1; - })); - }, - - dir: function( elem, dir, until ) { - var matched = [], - cur = elem[ dir ]; - - while ( cur && cur.nodeType !== 9 && (until === undefined || cur.nodeType !== 1 || !jQuery( cur ).is( until )) ) { - if ( cur.nodeType === 1 ) { - matched.push( cur ); - } - cur = cur[dir]; - } - return matched; - }, - - sibling: function( n, elem ) { - var r = []; - - for ( ; n; n = n.nextSibling ) { - if ( n.nodeType === 1 && n !== elem ) { - r.push( n ); - } - } - - return r; - } -}); - -// Implement the identical functionality for filter and not -function winnow( elements, qualifier, not ) { - if ( jQuery.isFunction( qualifier ) ) { - return jQuery.grep( elements, function( elem, i ) { - /* jshint -W018 */ - return !!qualifier.call( elem, i, elem ) !== not; - }); - - } - - if ( qualifier.nodeType ) { - return jQuery.grep( elements, function( elem ) { - return ( elem === qualifier ) !== not; - }); - - } - - if ( typeof qualifier === "string" ) { - if ( isSimple.test( qualifier ) ) { - return jQuery.filter( qualifier, elements, not ); - } - - qualifier = jQuery.filter( qualifier, elements ); - } - - return jQuery.grep( elements, function( elem ) { - return ( jQuery.inArray( elem, qualifier ) >= 0 ) !== not; - }); -} -function createSafeFragment( document ) { - var list = nodeNames.split( "|" ), - safeFrag = document.createDocumentFragment(); - - if ( safeFrag.createElement ) { - while ( list.length ) { - safeFrag.createElement( - list.pop() - ); - } - } - return safeFrag; -} - -var nodeNames = "abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|" + - "header|hgroup|mark|meter|nav|output|progress|section|summary|time|video", - rinlinejQuery = / jQuery\d+="(?:null|\d+)"/g, - rnoshimcache = new RegExp("<(?:" + nodeNames + ")[\\s/>]", "i"), - rleadingWhitespace = /^\s+/, - rxhtmlTag = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi, - rtagName = /<([\w:]+)/, - rtbody = /\s*$/g, - - // We have to close these tags to support XHTML (#13200) - wrapMap = { - option: [ 1, "" ], - legend: [ 1, "
    ", "
    " ], - area: [ 1, "", "" ], - param: [ 1, "", "" ], - thead: [ 1, "", "
    " ], - tr: [ 2, "", "
    " ], - col: [ 2, "", "
    " ], - td: [ 3, "", "
    " ], - - // IE6-8 can't serialize link, script, style, or any html5 (NoScope) tags, - // unless wrapped in a div with non-breaking characters in front of it. - _default: jQuery.support.htmlSerialize ? [ 0, "", "" ] : [ 1, "X
    ", "
    " ] - }, - safeFragment = createSafeFragment( document ), - fragmentDiv = safeFragment.appendChild( document.createElement("div") ); - -wrapMap.optgroup = wrapMap.option; -wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead; -wrapMap.th = wrapMap.td; - -jQuery.fn.extend({ - text: function( value ) { - return jQuery.access( this, function( value ) { - return value === undefined ? - jQuery.text( this ) : - this.empty().append( ( this[0] && this[0].ownerDocument || document ).createTextNode( value ) ); - }, null, value, arguments.length ); - }, - - append: function() { - return this.domManip( arguments, function( elem ) { - if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { - var target = manipulationTarget( this, elem ); - target.appendChild( elem ); - } - }); - }, - - prepend: function() { - return this.domManip( arguments, function( elem ) { - if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { - var target = manipulationTarget( this, elem ); - target.insertBefore( elem, target.firstChild ); - } - }); - }, - - before: function() { - return this.domManip( arguments, function( elem ) { - if ( this.parentNode ) { - this.parentNode.insertBefore( elem, this ); - } - }); - }, - - after: function() { - return this.domManip( arguments, function( elem ) { - if ( this.parentNode ) { - this.parentNode.insertBefore( elem, this.nextSibling ); - } - }); - }, - - // keepData is for internal use only--do not document - remove: function( selector, keepData ) { - var elem, - elems = selector ? jQuery.filter( selector, this ) : this, - i = 0; - - for ( ; (elem = elems[i]) != null; i++ ) { - - if ( !keepData && elem.nodeType === 1 ) { - jQuery.cleanData( getAll( elem ) ); - } - - if ( elem.parentNode ) { - if ( keepData && jQuery.contains( elem.ownerDocument, elem ) ) { - setGlobalEval( getAll( elem, "script" ) ); - } - elem.parentNode.removeChild( elem ); - } - } - - return this; - }, - - empty: function() { - var elem, - i = 0; - - for ( ; (elem = this[i]) != null; i++ ) { - // Remove element nodes and prevent memory leaks - if ( elem.nodeType === 1 ) { - jQuery.cleanData( getAll( elem, false ) ); - } - - // Remove any remaining nodes - while ( elem.firstChild ) { - elem.removeChild( elem.firstChild ); - } - - // If this is a select, ensure that it displays empty (#12336) - // Support: IE<9 - if ( elem.options && jQuery.nodeName( elem, "select" ) ) { - elem.options.length = 0; - } - } - - return this; - }, - - clone: function( dataAndEvents, deepDataAndEvents ) { - dataAndEvents = dataAndEvents == null ? false : dataAndEvents; - deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents; - - return this.map( function () { - return jQuery.clone( this, dataAndEvents, deepDataAndEvents ); - }); - }, - - html: function( value ) { - return jQuery.access( this, function( value ) { - var elem = this[0] || {}, - i = 0, - l = this.length; - - if ( value === undefined ) { - return elem.nodeType === 1 ? - elem.innerHTML.replace( rinlinejQuery, "" ) : - undefined; - } - - // See if we can take a shortcut and just use innerHTML - if ( typeof value === "string" && !rnoInnerhtml.test( value ) && - ( jQuery.support.htmlSerialize || !rnoshimcache.test( value ) ) && - ( jQuery.support.leadingWhitespace || !rleadingWhitespace.test( value ) ) && - !wrapMap[ ( rtagName.exec( value ) || ["", ""] )[1].toLowerCase() ] ) { - - value = value.replace( rxhtmlTag, "<$1>" ); - - try { - for (; i < l; i++ ) { - // Remove element nodes and prevent memory leaks - elem = this[i] || {}; - if ( elem.nodeType === 1 ) { - jQuery.cleanData( getAll( elem, false ) ); - elem.innerHTML = value; - } - } - - elem = 0; - - // If using innerHTML throws an exception, use the fallback method - } catch(e) {} - } - - if ( elem ) { - this.empty().append( value ); - } - }, null, value, arguments.length ); - }, - - replaceWith: function() { - var - // Snapshot the DOM in case .domManip sweeps something relevant into its fragment - args = jQuery.map( this, function( elem ) { - return [ elem.nextSibling, elem.parentNode ]; - }), - i = 0; - - // Make the changes, replacing each context element with the new content - this.domManip( arguments, function( elem ) { - var next = args[ i++ ], - parent = args[ i++ ]; - - if ( parent ) { - // Don't use the snapshot next if it has moved (#13810) - if ( next && next.parentNode !== parent ) { - next = this.nextSibling; - } - jQuery( this ).remove(); - parent.insertBefore( elem, next ); - } - // Allow new content to include elements from the context set - }, true ); - - // Force removal if there was no new content (e.g., from empty arguments) - return i ? this : this.remove(); - }, - - detach: function( selector ) { - return this.remove( selector, true ); - }, - - domManip: function( args, callback, allowIntersection ) { - - // Flatten any nested arrays - args = core_concat.apply( [], args ); - - var first, node, hasScripts, - scripts, doc, fragment, - i = 0, - l = this.length, - set = this, - iNoClone = l - 1, - value = args[0], - isFunction = jQuery.isFunction( value ); - - // We can't cloneNode fragments that contain checked, in WebKit - if ( isFunction || !( l <= 1 || typeof value !== "string" || jQuery.support.checkClone || !rchecked.test( value ) ) ) { - return this.each(function( index ) { - var self = set.eq( index ); - if ( isFunction ) { - args[0] = value.call( this, index, self.html() ); - } - self.domManip( args, callback, allowIntersection ); - }); - } - - if ( l ) { - fragment = jQuery.buildFragment( args, this[ 0 ].ownerDocument, false, !allowIntersection && this ); - first = fragment.firstChild; - - if ( fragment.childNodes.length === 1 ) { - fragment = first; - } - - if ( first ) { - scripts = jQuery.map( getAll( fragment, "script" ), disableScript ); - hasScripts = scripts.length; - - // Use the original fragment for the last item instead of the first because it can end up - // being emptied incorrectly in certain situations (#8070). - for ( ; i < l; i++ ) { - node = fragment; - - if ( i !== iNoClone ) { - node = jQuery.clone( node, true, true ); - - // Keep references to cloned scripts for later restoration - if ( hasScripts ) { - jQuery.merge( scripts, getAll( node, "script" ) ); - } - } - - callback.call( this[i], node, i ); - } - - if ( hasScripts ) { - doc = scripts[ scripts.length - 1 ].ownerDocument; - - // Reenable scripts - jQuery.map( scripts, restoreScript ); - - // Evaluate executable scripts on first document insertion - for ( i = 0; i < hasScripts; i++ ) { - node = scripts[ i ]; - if ( rscriptType.test( node.type || "" ) && - !jQuery._data( node, "globalEval" ) && jQuery.contains( doc, node ) ) { - - if ( node.src ) { - // Hope ajax is available... - jQuery._evalUrl( node.src ); - } else { - jQuery.globalEval( ( node.text || node.textContent || node.innerHTML || "" ).replace( rcleanScript, "" ) ); - } - } - } - } - - // Fix #11809: Avoid leaking memory - fragment = first = null; - } - } - - return this; - } -}); - -// Support: IE<8 -// Manipulating tables requires a tbody -function manipulationTarget( elem, content ) { - return jQuery.nodeName( elem, "table" ) && - jQuery.nodeName( content.nodeType === 1 ? content : content.firstChild, "tr" ) ? - - elem.getElementsByTagName("tbody")[0] || - elem.appendChild( elem.ownerDocument.createElement("tbody") ) : - elem; -} - -// Replace/restore the type attribute of script elements for safe DOM manipulation -function disableScript( elem ) { - elem.type = (jQuery.find.attr( elem, "type" ) !== null) + "/" + elem.type; - return elem; -} -function restoreScript( elem ) { - var match = rscriptTypeMasked.exec( elem.type ); - if ( match ) { - elem.type = match[1]; - } else { - elem.removeAttribute("type"); - } - return elem; -} - -// Mark scripts as having already been evaluated -function setGlobalEval( elems, refElements ) { - var elem, - i = 0; - for ( ; (elem = elems[i]) != null; i++ ) { - jQuery._data( elem, "globalEval", !refElements || jQuery._data( refElements[i], "globalEval" ) ); - } -} - -function cloneCopyEvent( src, dest ) { - - if ( dest.nodeType !== 1 || !jQuery.hasData( src ) ) { - return; - } - - var type, i, l, - oldData = jQuery._data( src ), - curData = jQuery._data( dest, oldData ), - events = oldData.events; - - if ( events ) { - delete curData.handle; - curData.events = {}; - - for ( type in events ) { - for ( i = 0, l = events[ type ].length; i < l; i++ ) { - jQuery.event.add( dest, type, events[ type ][ i ] ); - } - } - } - - // make the cloned public data object a copy from the original - if ( curData.data ) { - curData.data = jQuery.extend( {}, curData.data ); - } -} - -function fixCloneNodeIssues( src, dest ) { - var nodeName, e, data; - - // We do not need to do anything for non-Elements - if ( dest.nodeType !== 1 ) { - return; - } - - nodeName = dest.nodeName.toLowerCase(); - - // IE6-8 copies events bound via attachEvent when using cloneNode. - if ( !jQuery.support.noCloneEvent && dest[ jQuery.expando ] ) { - data = jQuery._data( dest ); - - for ( e in data.events ) { - jQuery.removeEvent( dest, e, data.handle ); - } - - // Event data gets referenced instead of copied if the expando gets copied too - dest.removeAttribute( jQuery.expando ); - } - - // IE blanks contents when cloning scripts, and tries to evaluate newly-set text - if ( nodeName === "script" && dest.text !== src.text ) { - disableScript( dest ).text = src.text; - restoreScript( dest ); - - // IE6-10 improperly clones children of object elements using classid. - // IE10 throws NoModificationAllowedError if parent is null, #12132. - } else if ( nodeName === "object" ) { - if ( dest.parentNode ) { - dest.outerHTML = src.outerHTML; - } - - // This path appears unavoidable for IE9. When cloning an object - // element in IE9, the outerHTML strategy above is not sufficient. - // If the src has innerHTML and the destination does not, - // copy the src.innerHTML into the dest.innerHTML. #10324 - if ( jQuery.support.html5Clone && ( src.innerHTML && !jQuery.trim(dest.innerHTML) ) ) { - dest.innerHTML = src.innerHTML; - } - - } else if ( nodeName === "input" && manipulation_rcheckableType.test( src.type ) ) { - // IE6-8 fails to persist the checked state of a cloned checkbox - // or radio button. Worse, IE6-7 fail to give the cloned element - // a checked appearance if the defaultChecked value isn't also set - - dest.defaultChecked = dest.checked = src.checked; - - // IE6-7 get confused and end up setting the value of a cloned - // checkbox/radio button to an empty string instead of "on" - if ( dest.value !== src.value ) { - dest.value = src.value; - } - - // IE6-8 fails to return the selected option to the default selected - // state when cloning options - } else if ( nodeName === "option" ) { - dest.defaultSelected = dest.selected = src.defaultSelected; - - // IE6-8 fails to set the defaultValue to the correct value when - // cloning other types of input fields - } else if ( nodeName === "input" || nodeName === "textarea" ) { - dest.defaultValue = src.defaultValue; - } -} - -jQuery.each({ - appendTo: "append", - prependTo: "prepend", - insertBefore: "before", - insertAfter: "after", - replaceAll: "replaceWith" -}, function( name, original ) { - jQuery.fn[ name ] = function( selector ) { - var elems, - i = 0, - ret = [], - insert = jQuery( selector ), - last = insert.length - 1; - - for ( ; i <= last; i++ ) { - elems = i === last ? this : this.clone(true); - jQuery( insert[i] )[ original ]( elems ); - - // Modern browsers can apply jQuery collections as arrays, but oldIE needs a .get() - core_push.apply( ret, elems.get() ); - } - - return this.pushStack( ret ); - }; -}); - -function getAll( context, tag ) { - var elems, elem, - i = 0, - found = typeof context.getElementsByTagName !== core_strundefined ? context.getElementsByTagName( tag || "*" ) : - typeof context.querySelectorAll !== core_strundefined ? context.querySelectorAll( tag || "*" ) : - undefined; - - if ( !found ) { - for ( found = [], elems = context.childNodes || context; (elem = elems[i]) != null; i++ ) { - if ( !tag || jQuery.nodeName( elem, tag ) ) { - found.push( elem ); - } else { - jQuery.merge( found, getAll( elem, tag ) ); - } - } - } - - return tag === undefined || tag && jQuery.nodeName( context, tag ) ? - jQuery.merge( [ context ], found ) : - found; -} - -// Used in buildFragment, fixes the defaultChecked property -function fixDefaultChecked( elem ) { - if ( manipulation_rcheckableType.test( elem.type ) ) { - elem.defaultChecked = elem.checked; - } -} - -jQuery.extend({ - clone: function( elem, dataAndEvents, deepDataAndEvents ) { - var destElements, node, clone, i, srcElements, - inPage = jQuery.contains( elem.ownerDocument, elem ); - - if ( jQuery.support.html5Clone || jQuery.isXMLDoc(elem) || !rnoshimcache.test( "<" + elem.nodeName + ">" ) ) { - clone = elem.cloneNode( true ); - - // IE<=8 does not properly clone detached, unknown element nodes - } else { - fragmentDiv.innerHTML = elem.outerHTML; - fragmentDiv.removeChild( clone = fragmentDiv.firstChild ); - } - - if ( (!jQuery.support.noCloneEvent || !jQuery.support.noCloneChecked) && - (elem.nodeType === 1 || elem.nodeType === 11) && !jQuery.isXMLDoc(elem) ) { - - // We eschew Sizzle here for performance reasons: http://jsperf.com/getall-vs-sizzle/2 - destElements = getAll( clone ); - srcElements = getAll( elem ); - - // Fix all IE cloning issues - for ( i = 0; (node = srcElements[i]) != null; ++i ) { - // Ensure that the destination node is not null; Fixes #9587 - if ( destElements[i] ) { - fixCloneNodeIssues( node, destElements[i] ); - } - } - } - - // Copy the events from the original to the clone - if ( dataAndEvents ) { - if ( deepDataAndEvents ) { - srcElements = srcElements || getAll( elem ); - destElements = destElements || getAll( clone ); - - for ( i = 0; (node = srcElements[i]) != null; i++ ) { - cloneCopyEvent( node, destElements[i] ); - } - } else { - cloneCopyEvent( elem, clone ); - } - } - - // Preserve script evaluation history - destElements = getAll( clone, "script" ); - if ( destElements.length > 0 ) { - setGlobalEval( destElements, !inPage && getAll( elem, "script" ) ); - } - - destElements = srcElements = node = null; - - // Return the cloned set - return clone; - }, - - buildFragment: function( elems, context, scripts, selection ) { - var j, elem, contains, - tmp, tag, tbody, wrap, - l = elems.length, - - // Ensure a safe fragment - safe = createSafeFragment( context ), - - nodes = [], - i = 0; - - for ( ; i < l; i++ ) { - elem = elems[ i ]; - - if ( elem || elem === 0 ) { - - // Add nodes directly - if ( jQuery.type( elem ) === "object" ) { - jQuery.merge( nodes, elem.nodeType ? [ elem ] : elem ); - - // Convert non-html into a text node - } else if ( !rhtml.test( elem ) ) { - nodes.push( context.createTextNode( elem ) ); - - // Convert html into DOM nodes - } else { - tmp = tmp || safe.appendChild( context.createElement("div") ); - - // Deserialize a standard representation - tag = ( rtagName.exec( elem ) || ["", ""] )[1].toLowerCase(); - wrap = wrapMap[ tag ] || wrapMap._default; - - tmp.innerHTML = wrap[1] + elem.replace( rxhtmlTag, "<$1>" ) + wrap[2]; - - // Descend through wrappers to the right content - j = wrap[0]; - while ( j-- ) { - tmp = tmp.lastChild; - } - - // Manually add leading whitespace removed by IE - if ( !jQuery.support.leadingWhitespace && rleadingWhitespace.test( elem ) ) { - nodes.push( context.createTextNode( rleadingWhitespace.exec( elem )[0] ) ); - } - - // Remove IE's autoinserted from table fragments - if ( !jQuery.support.tbody ) { - - // String was a , *may* have spurious - elem = tag === "table" && !rtbody.test( elem ) ? - tmp.firstChild : - - // String was a bare or - wrap[1] === "
    " && !rtbody.test( elem ) ? - tmp : - 0; - - j = elem && elem.childNodes.length; - while ( j-- ) { - if ( jQuery.nodeName( (tbody = elem.childNodes[j]), "tbody" ) && !tbody.childNodes.length ) { - elem.removeChild( tbody ); - } - } - } - - jQuery.merge( nodes, tmp.childNodes ); - - // Fix #12392 for WebKit and IE > 9 - tmp.textContent = ""; - - // Fix #12392 for oldIE - while ( tmp.firstChild ) { - tmp.removeChild( tmp.firstChild ); - } - - // Remember the top-level container for proper cleanup - tmp = safe.lastChild; - } - } - } - - // Fix #11356: Clear elements from fragment - if ( tmp ) { - safe.removeChild( tmp ); - } - - // Reset defaultChecked for any radios and checkboxes - // about to be appended to the DOM in IE 6/7 (#8060) - if ( !jQuery.support.appendChecked ) { - jQuery.grep( getAll( nodes, "input" ), fixDefaultChecked ); - } - - i = 0; - while ( (elem = nodes[ i++ ]) ) { - - // #4087 - If origin and destination elements are the same, and this is - // that element, do not do anything - if ( selection && jQuery.inArray( elem, selection ) !== -1 ) { - continue; - } - - contains = jQuery.contains( elem.ownerDocument, elem ); - - // Append to fragment - tmp = getAll( safe.appendChild( elem ), "script" ); - - // Preserve script evaluation history - if ( contains ) { - setGlobalEval( tmp ); - } - - // Capture executables - if ( scripts ) { - j = 0; - while ( (elem = tmp[ j++ ]) ) { - if ( rscriptType.test( elem.type || "" ) ) { - scripts.push( elem ); - } - } - } - } - - tmp = null; - - return safe; - }, - - cleanData: function( elems, /* internal */ acceptData ) { - var elem, type, id, data, - i = 0, - internalKey = jQuery.expando, - cache = jQuery.cache, - deleteExpando = jQuery.support.deleteExpando, - special = jQuery.event.special; - - for ( ; (elem = elems[i]) != null; i++ ) { - - if ( acceptData || jQuery.acceptData( elem ) ) { - - id = elem[ internalKey ]; - data = id && cache[ id ]; - - if ( data ) { - if ( data.events ) { - for ( type in data.events ) { - if ( special[ type ] ) { - jQuery.event.remove( elem, type ); - - // This is a shortcut to avoid jQuery.event.remove's overhead - } else { - jQuery.removeEvent( elem, type, data.handle ); - } - } - } - - // Remove cache only if it was not already removed by jQuery.event.remove - if ( cache[ id ] ) { - - delete cache[ id ]; - - // IE does not allow us to delete expando properties from nodes, - // nor does it have a removeAttribute function on Document nodes; - // we must handle all of these cases - if ( deleteExpando ) { - delete elem[ internalKey ]; - - } else if ( typeof elem.removeAttribute !== core_strundefined ) { - elem.removeAttribute( internalKey ); - - } else { - elem[ internalKey ] = null; - } - - core_deletedIds.push( id ); - } - } - } - } - }, - - _evalUrl: function( url ) { - return jQuery.ajax({ - url: url, - type: "GET", - dataType: "script", - async: false, - global: false, - "throws": true - }); - } -}); -jQuery.fn.extend({ - wrapAll: function( html ) { - if ( jQuery.isFunction( html ) ) { - return this.each(function(i) { - jQuery(this).wrapAll( html.call(this, i) ); - }); - } - - if ( this[0] ) { - // The elements to wrap the target around - var wrap = jQuery( html, this[0].ownerDocument ).eq(0).clone(true); - - if ( this[0].parentNode ) { - wrap.insertBefore( this[0] ); - } - - wrap.map(function() { - var elem = this; - - while ( elem.firstChild && elem.firstChild.nodeType === 1 ) { - elem = elem.firstChild; - } - - return elem; - }).append( this ); - } - - return this; - }, - - wrapInner: function( html ) { - if ( jQuery.isFunction( html ) ) { - return this.each(function(i) { - jQuery(this).wrapInner( html.call(this, i) ); - }); - } - - return this.each(function() { - var self = jQuery( this ), - contents = self.contents(); - - if ( contents.length ) { - contents.wrapAll( html ); - - } else { - self.append( html ); - } - }); - }, - - wrap: function( html ) { - var isFunction = jQuery.isFunction( html ); - - return this.each(function(i) { - jQuery( this ).wrapAll( isFunction ? html.call(this, i) : html ); - }); - }, - - unwrap: function() { - return this.parent().each(function() { - if ( !jQuery.nodeName( this, "body" ) ) { - jQuery( this ).replaceWith( this.childNodes ); - } - }).end(); - } -}); -var iframe, getStyles, curCSS, - ralpha = /alpha\([^)]*\)/i, - ropacity = /opacity\s*=\s*([^)]*)/, - rposition = /^(top|right|bottom|left)$/, - // swappable if display is none or starts with table except "table", "table-cell", or "table-caption" - // see here for display values: https://developer.mozilla.org/en-US/docs/CSS/display - rdisplayswap = /^(none|table(?!-c[ea]).+)/, - rmargin = /^margin/, - rnumsplit = new RegExp( "^(" + core_pnum + ")(.*)$", "i" ), - rnumnonpx = new RegExp( "^(" + core_pnum + ")(?!px)[a-z%]+$", "i" ), - rrelNum = new RegExp( "^([+-])=(" + core_pnum + ")", "i" ), - elemdisplay = { BODY: "block" }, - - cssShow = { position: "absolute", visibility: "hidden", display: "block" }, - cssNormalTransform = { - letterSpacing: 0, - fontWeight: 400 - }, - - cssExpand = [ "Top", "Right", "Bottom", "Left" ], - cssPrefixes = [ "Webkit", "O", "Moz", "ms" ]; - -// return a css property mapped to a potentially vendor prefixed property -function vendorPropName( style, name ) { - - // shortcut for names that are not vendor prefixed - if ( name in style ) { - return name; - } - - // check for vendor prefixed names - var capName = name.charAt(0).toUpperCase() + name.slice(1), - origName = name, - i = cssPrefixes.length; - - while ( i-- ) { - name = cssPrefixes[ i ] + capName; - if ( name in style ) { - return name; - } - } - - return origName; -} - -function isHidden( elem, el ) { - // isHidden might be called from jQuery#filter function; - // in that case, element will be second argument - elem = el || elem; - return jQuery.css( elem, "display" ) === "none" || !jQuery.contains( elem.ownerDocument, elem ); -} - -function showHide( elements, show ) { - var display, elem, hidden, - values = [], - index = 0, - length = elements.length; - - for ( ; index < length; index++ ) { - elem = elements[ index ]; - if ( !elem.style ) { - continue; - } - - values[ index ] = jQuery._data( elem, "olddisplay" ); - display = elem.style.display; - if ( show ) { - // Reset the inline display of this element to learn if it is - // being hidden by cascaded rules or not - if ( !values[ index ] && display === "none" ) { - elem.style.display = ""; - } - - // Set elements which have been overridden with display: none - // in a stylesheet to whatever the default browser style is - // for such an element - if ( elem.style.display === "" && isHidden( elem ) ) { - values[ index ] = jQuery._data( elem, "olddisplay", css_defaultDisplay(elem.nodeName) ); - } - } else { - - if ( !values[ index ] ) { - hidden = isHidden( elem ); - - if ( display && display !== "none" || !hidden ) { - jQuery._data( elem, "olddisplay", hidden ? display : jQuery.css( elem, "display" ) ); - } - } - } - } - - // Set the display of most of the elements in a second loop - // to avoid the constant reflow - for ( index = 0; index < length; index++ ) { - elem = elements[ index ]; - if ( !elem.style ) { - continue; - } - if ( !show || elem.style.display === "none" || elem.style.display === "" ) { - elem.style.display = show ? values[ index ] || "" : "none"; - } - } - - return elements; -} - -jQuery.fn.extend({ - css: function( name, value ) { - return jQuery.access( this, function( elem, name, value ) { - var len, styles, - map = {}, - i = 0; - - if ( jQuery.isArray( name ) ) { - styles = getStyles( elem ); - len = name.length; - - for ( ; i < len; i++ ) { - map[ name[ i ] ] = jQuery.css( elem, name[ i ], false, styles ); - } - - return map; - } - - return value !== undefined ? - jQuery.style( elem, name, value ) : - jQuery.css( elem, name ); - }, name, value, arguments.length > 1 ); - }, - show: function() { - return showHide( this, true ); - }, - hide: function() { - return showHide( this ); - }, - toggle: function( state ) { - if ( typeof state === "boolean" ) { - return state ? this.show() : this.hide(); - } - - return this.each(function() { - if ( isHidden( this ) ) { - jQuery( this ).show(); - } else { - jQuery( this ).hide(); - } - }); - } -}); - -jQuery.extend({ - // Add in style property hooks for overriding the default - // behavior of getting and setting a style property - cssHooks: { - opacity: { - get: function( elem, computed ) { - if ( computed ) { - // We should always get a number back from opacity - var ret = curCSS( elem, "opacity" ); - return ret === "" ? "1" : ret; - } - } - } - }, - - // Don't automatically add "px" to these possibly-unitless properties - cssNumber: { - "columnCount": true, - "fillOpacity": true, - "fontWeight": true, - "lineHeight": true, - "opacity": true, - "order": true, - "orphans": true, - "widows": true, - "zIndex": true, - "zoom": true - }, - - // Add in properties whose names you wish to fix before - // setting or getting the value - cssProps: { - // normalize float css property - "float": jQuery.support.cssFloat ? "cssFloat" : "styleFloat" - }, - - // Get and set the style property on a DOM Node - style: function( elem, name, value, extra ) { - // Don't set styles on text and comment nodes - if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 || !elem.style ) { - return; - } - - // Make sure that we're working with the right name - var ret, type, hooks, - origName = jQuery.camelCase( name ), - style = elem.style; - - name = jQuery.cssProps[ origName ] || ( jQuery.cssProps[ origName ] = vendorPropName( style, origName ) ); - - // gets hook for the prefixed version - // followed by the unprefixed version - hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; - - // Check if we're setting a value - if ( value !== undefined ) { - type = typeof value; - - // convert relative number strings (+= or -=) to relative numbers. #7345 - if ( type === "string" && (ret = rrelNum.exec( value )) ) { - value = ( ret[1] + 1 ) * ret[2] + parseFloat( jQuery.css( elem, name ) ); - // Fixes bug #9237 - type = "number"; - } - - // Make sure that NaN and null values aren't set. See: #7116 - if ( value == null || type === "number" && isNaN( value ) ) { - return; - } - - // If a number was passed in, add 'px' to the (except for certain CSS properties) - if ( type === "number" && !jQuery.cssNumber[ origName ] ) { - value += "px"; - } - - // Fixes #8908, it can be done more correctly by specifing setters in cssHooks, - // but it would mean to define eight (for every problematic property) identical functions - if ( !jQuery.support.clearCloneStyle && value === "" && name.indexOf("background") === 0 ) { - style[ name ] = "inherit"; - } - - // If a hook was provided, use that value, otherwise just set the specified value - if ( !hooks || !("set" in hooks) || (value = hooks.set( elem, value, extra )) !== undefined ) { - - // Wrapped to prevent IE from throwing errors when 'invalid' values are provided - // Fixes bug #5509 - try { - style[ name ] = value; - } catch(e) {} - } - - } else { - // If a hook was provided get the non-computed value from there - if ( hooks && "get" in hooks && (ret = hooks.get( elem, false, extra )) !== undefined ) { - return ret; - } - - // Otherwise just get the value from the style object - return style[ name ]; - } - }, - - css: function( elem, name, extra, styles ) { - var num, val, hooks, - origName = jQuery.camelCase( name ); - - // Make sure that we're working with the right name - name = jQuery.cssProps[ origName ] || ( jQuery.cssProps[ origName ] = vendorPropName( elem.style, origName ) ); - - // gets hook for the prefixed version - // followed by the unprefixed version - hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; - - // If a hook was provided get the computed value from there - if ( hooks && "get" in hooks ) { - val = hooks.get( elem, true, extra ); - } - - // Otherwise, if a way to get the computed value exists, use that - if ( val === undefined ) { - val = curCSS( elem, name, styles ); - } - - //convert "normal" to computed value - if ( val === "normal" && name in cssNormalTransform ) { - val = cssNormalTransform[ name ]; - } - - // Return, converting to number if forced or a qualifier was provided and val looks numeric - if ( extra === "" || extra ) { - num = parseFloat( val ); - return extra === true || jQuery.isNumeric( num ) ? num || 0 : val; - } - return val; - } -}); - -// NOTE: we've included the "window" in window.getComputedStyle -// because jsdom on node.js will break without it. -if ( window.getComputedStyle ) { - getStyles = function( elem ) { - return window.getComputedStyle( elem, null ); - }; - - curCSS = function( elem, name, _computed ) { - var width, minWidth, maxWidth, - computed = _computed || getStyles( elem ), - - // getPropertyValue is only needed for .css('filter') in IE9, see #12537 - ret = computed ? computed.getPropertyValue( name ) || computed[ name ] : undefined, - style = elem.style; - - if ( computed ) { - - if ( ret === "" && !jQuery.contains( elem.ownerDocument, elem ) ) { - ret = jQuery.style( elem, name ); - } - - // A tribute to the "awesome hack by Dean Edwards" - // Chrome < 17 and Safari 5.0 uses "computed value" instead of "used value" for margin-right - // Safari 5.1.7 (at least) returns percentage for a larger set of values, but width seems to be reliably pixels - // this is against the CSSOM draft spec: http://dev.w3.org/csswg/cssom/#resolved-values - if ( rnumnonpx.test( ret ) && rmargin.test( name ) ) { - - // Remember the original values - width = style.width; - minWidth = style.minWidth; - maxWidth = style.maxWidth; - - // Put in the new values to get a computed value out - style.minWidth = style.maxWidth = style.width = ret; - ret = computed.width; - - // Revert the changed values - style.width = width; - style.minWidth = minWidth; - style.maxWidth = maxWidth; - } - } - - return ret; - }; -} else if ( document.documentElement.currentStyle ) { - getStyles = function( elem ) { - return elem.currentStyle; - }; - - curCSS = function( elem, name, _computed ) { - var left, rs, rsLeft, - computed = _computed || getStyles( elem ), - ret = computed ? computed[ name ] : undefined, - style = elem.style; - - // Avoid setting ret to empty string here - // so we don't default to auto - if ( ret == null && style && style[ name ] ) { - ret = style[ name ]; - } - - // From the awesome hack by Dean Edwards - // http://erik.eae.net/archives/2007/07/27/18.54.15/#comment-102291 - - // If we're not dealing with a regular pixel number - // but a number that has a weird ending, we need to convert it to pixels - // but not position css attributes, as those are proportional to the parent element instead - // and we can't measure the parent instead because it might trigger a "stacking dolls" problem - if ( rnumnonpx.test( ret ) && !rposition.test( name ) ) { - - // Remember the original values - left = style.left; - rs = elem.runtimeStyle; - rsLeft = rs && rs.left; - - // Put in the new values to get a computed value out - if ( rsLeft ) { - rs.left = elem.currentStyle.left; - } - style.left = name === "fontSize" ? "1em" : ret; - ret = style.pixelLeft + "px"; - - // Revert the changed values - style.left = left; - if ( rsLeft ) { - rs.left = rsLeft; - } - } - - return ret === "" ? "auto" : ret; - }; -} - -function setPositiveNumber( elem, value, subtract ) { - var matches = rnumsplit.exec( value ); - return matches ? - // Guard against undefined "subtract", e.g., when used as in cssHooks - Math.max( 0, matches[ 1 ] - ( subtract || 0 ) ) + ( matches[ 2 ] || "px" ) : - value; -} - -function augmentWidthOrHeight( elem, name, extra, isBorderBox, styles ) { - var i = extra === ( isBorderBox ? "border" : "content" ) ? - // If we already have the right measurement, avoid augmentation - 4 : - // Otherwise initialize for horizontal or vertical properties - name === "width" ? 1 : 0, - - val = 0; - - for ( ; i < 4; i += 2 ) { - // both box models exclude margin, so add it if we want it - if ( extra === "margin" ) { - val += jQuery.css( elem, extra + cssExpand[ i ], true, styles ); - } - - if ( isBorderBox ) { - // border-box includes padding, so remove it if we want content - if ( extra === "content" ) { - val -= jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); - } - - // at this point, extra isn't border nor margin, so remove border - if ( extra !== "margin" ) { - val -= jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); - } - } else { - // at this point, extra isn't content, so add padding - val += jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); - - // at this point, extra isn't content nor padding, so add border - if ( extra !== "padding" ) { - val += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); - } - } - } - - return val; -} - -function getWidthOrHeight( elem, name, extra ) { - - // Start with offset property, which is equivalent to the border-box value - var valueIsBorderBox = true, - val = name === "width" ? elem.offsetWidth : elem.offsetHeight, - styles = getStyles( elem ), - isBorderBox = jQuery.support.boxSizing && jQuery.css( elem, "boxSizing", false, styles ) === "border-box"; - - // some non-html elements return undefined for offsetWidth, so check for null/undefined - // svg - https://bugzilla.mozilla.org/show_bug.cgi?id=649285 - // MathML - https://bugzilla.mozilla.org/show_bug.cgi?id=491668 - if ( val <= 0 || val == null ) { - // Fall back to computed then uncomputed css if necessary - val = curCSS( elem, name, styles ); - if ( val < 0 || val == null ) { - val = elem.style[ name ]; - } - - // Computed unit is not pixels. Stop here and return. - if ( rnumnonpx.test(val) ) { - return val; - } - - // we need the check for style in case a browser which returns unreliable values - // for getComputedStyle silently falls back to the reliable elem.style - valueIsBorderBox = isBorderBox && ( jQuery.support.boxSizingReliable || val === elem.style[ name ] ); - - // Normalize "", auto, and prepare for extra - val = parseFloat( val ) || 0; - } - - // use the active box-sizing model to add/subtract irrelevant styles - return ( val + - augmentWidthOrHeight( - elem, - name, - extra || ( isBorderBox ? "border" : "content" ), - valueIsBorderBox, - styles - ) - ) + "px"; -} - -// Try to determine the default display value of an element -function css_defaultDisplay( nodeName ) { - var doc = document, - display = elemdisplay[ nodeName ]; - - if ( !display ) { - display = actualDisplay( nodeName, doc ); - - // If the simple way fails, read from inside an iframe - if ( display === "none" || !display ) { - // Use the already-created iframe if possible - iframe = ( iframe || - jQuery(" - - - - diff --git a/docs/reference/themes/mongodb/layouts/partials/assets/css.html b/docs/reference/themes/mongodb/layouts/partials/assets/css.html deleted file mode 100644 index 6a39d8ff648..00000000000 --- a/docs/reference/themes/mongodb/layouts/partials/assets/css.html +++ /dev/null @@ -1,6 +0,0 @@ - - - - - -{{ partial "assets/cssExtras.html" . }} diff --git a/docs/reference/themes/mongodb/layouts/partials/assets/cssExtras.html b/docs/reference/themes/mongodb/layouts/partials/assets/cssExtras.html deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/docs/reference/themes/mongodb/layouts/partials/assets/javascriptExtras.html b/docs/reference/themes/mongodb/layouts/partials/assets/javascriptExtras.html deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/docs/reference/themes/mongodb/layouts/partials/assets/javascripts.html b/docs/reference/themes/mongodb/layouts/partials/assets/javascripts.html deleted file mode 100644 index fcf67f0e4e4..00000000000 --- a/docs/reference/themes/mongodb/layouts/partials/assets/javascripts.html +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - -{{ partial "assets/javascriptExtras.html" . }} diff --git a/docs/reference/themes/mongodb/layouts/partials/footer.html b/docs/reference/themes/mongodb/layouts/partials/footer.html deleted file mode 100644 index c632933ed79..00000000000 --- a/docs/reference/themes/mongodb/layouts/partials/footer.html +++ /dev/null @@ -1,38 +0,0 @@ -{{- if .IsPage -}} - -{{- $menuPrev := $.Scratch.Get "menu.Prev" -}} -{{- $menuNext := $.Scratch.Get "menu.Next" -}} - -
    - {{- with $menuPrev -}} - - {{- end -}} - {{- with $menuNext -}} -
    - - {{- end -}} -
    -
    -{{- end -}} - - -{{- partial "rightColumn.html" . -}} - - - - - - - - - -{{- partial "assets/javascripts.html" . -}} -{{- partial "assets/analytics.html" . -}} - - diff --git a/docs/reference/themes/mongodb/layouts/partials/header.html b/docs/reference/themes/mongodb/layouts/partials/header.html deleted file mode 100644 index e29afe1b4af..00000000000 --- a/docs/reference/themes/mongodb/layouts/partials/header.html +++ /dev/null @@ -1,27 +0,0 @@ - - - - {{- partial "meta.html" -}} - - - {{.Title}} - - {{- partial "assets/css.html" . -}} - - - - -
    - - {{- partial "header/main.html" . -}} - - - {{- partial "menu.html" . -}} - - -
    -
    -
    -
    -
    -
    diff --git a/docs/reference/themes/mongodb/layouts/partials/header/contentHeader.html b/docs/reference/themes/mongodb/layouts/partials/header/contentHeader.html deleted file mode 100644 index f9e8246616d..00000000000 --- a/docs/reference/themes/mongodb/layouts/partials/header/contentHeader.html +++ /dev/null @@ -1,25 +0,0 @@ -{{- if .IsPage -}} -{{- $File := .File -}}{{ with $File.LogicalName }} {{ $srcref := add (add "docs/reference/content/" $File.Dir) $File.LogicalName }} - -{{- end -}} -{{- end -}} -{{- $menuItemL6 := $.Scratch.Get "menu.Item.L6" -}} -{{- $menuItemL5 := $.Scratch.Get "menu.Item.L5" -}} -{{- $menuItemL4 := $.Scratch.Get "menu.Item.L4" -}} -{{- $menuItemL3 := $.Scratch.Get "menu.Item.L3" -}} -{{- $menuItemL2 := $.Scratch.Get "menu.Item.L2" -}} -{{- $menuItemL1 := $.Scratch.Get "menu.Item.L1" -}} -{{- $menuItemL0 := $.Scratch.Get "menu.Item.L0" -}} -{{- if $menuItemL0 -}} -
    -
      - {{- with $menuItemL6 -}}
    • {{.Name}}
    • {{- end -}} - {{- with $menuItemL5 -}}
    • {{.Name}}
    • {{- end -}} - {{- with $menuItemL4 -}}
    • {{.Name}}
    • {{- end -}} - {{- with $menuItemL3 -}}
    • {{.Name}}
    • {{- end -}} - {{- with $menuItemL2 -}}
    • {{.Name}}
    • {{- end -}} - {{- with $menuItemL1 -}}
    • {{.Name}}
    • {{- end -}} -
    • {{ $menuItemL0.Name }}
    • -
    -
    -{{- end -}} diff --git a/docs/reference/themes/mongodb/layouts/partials/header/main.html b/docs/reference/themes/mongodb/layouts/partials/header/main.html deleted file mode 100644 index e37d07bc5f9..00000000000 --- a/docs/reference/themes/mongodb/layouts/partials/header/main.html +++ /dev/null @@ -1,16 +0,0 @@ - diff --git a/docs/reference/themes/mongodb/layouts/partials/header/search.html b/docs/reference/themes/mongodb/layouts/partials/header/search.html deleted file mode 100644 index ac41c4db1b2..00000000000 --- a/docs/reference/themes/mongodb/layouts/partials/header/search.html +++ /dev/null @@ -1,8 +0,0 @@ - diff --git a/docs/reference/themes/mongodb/layouts/partials/header/topRight.html b/docs/reference/themes/mongodb/layouts/partials/header/topRight.html deleted file mode 100644 index 6f1101c50d8..00000000000 --- a/docs/reference/themes/mongodb/layouts/partials/header/topRight.html +++ /dev/null @@ -1,10 +0,0 @@ -
    - -
    diff --git a/docs/reference/themes/mongodb/layouts/partials/menu.html b/docs/reference/themes/mongodb/layouts/partials/menu.html deleted file mode 100644 index 515ebe2fdf7..00000000000 --- a/docs/reference/themes/mongodb/layouts/partials/menu.html +++ /dev/null @@ -1,52 +0,0 @@ - - - -{{- partial "menu/options.html" . -}} diff --git a/docs/reference/themes/mongodb/layouts/partials/menu/currentItem.html b/docs/reference/themes/mongodb/layouts/partials/menu/currentItem.html deleted file mode 100644 index bfe5311981e..00000000000 --- a/docs/reference/themes/mongodb/layouts/partials/menu/currentItem.html +++ /dev/null @@ -1,75 +0,0 @@ -{{- $currentNode := . -}} -{{- $menuItemL6 := $.Scratch.Get "menu.Item.L6" -}} -{{- $menuItemL5 := $.Scratch.Get "menu.Item.L5" -}} -{{- $menuItemL4 := $.Scratch.Get "menu.Item.L4" -}} -{{- $menuItemL3 := $.Scratch.Get "menu.Item.L3" -}} -{{- $menuItemL2 := $.Scratch.Get "menu.Item.L2" -}} -{{- $menuItemL1 := $.Scratch.Get "menu.Item.L1" -}} -{{- $menuItem := $.Scratch.Get "menu.Item" -}} - -{{- if $currentNode.IsMenuCurrent "main" $menuItem -}} - {{- $.Scratch.Set "menu.Found" true -}} - {{- $.Scratch.Set "menu.Item.L6" false -}} - {{- $.Scratch.Set "menu.Item.L5" false -}} - {{- $.Scratch.Set "menu.Item.L4" false -}} - {{- $.Scratch.Set "menu.Item.L3" false -}} - {{- $.Scratch.Set "menu.Item.L2" false -}} - {{- $.Scratch.Set "menu.Item.L1" false -}} - {{- $.Scratch.Set "menu.Item.L0" $menuItem -}} - - {{- if $menuItemL1 -}} - {{- if eq $menuItemL1.Identifier $menuItem.Parent -}} - {{- $.Scratch.Set "menu.Item.L1" $menuItemL1 -}} - {{- if $menuItemL2 -}} - {{- if eq $menuItemL2.Identifier $menuItemL1.Parent -}} - {{- $.Scratch.Set "menu.Item.L2" $menuItemL2 -}} - {{- if $menuItemL3 -}} - {{- if eq $menuItemL3.Identifier $menuItemL2.Parent -}} - {{- $.Scratch.Set "menu.Item.L3" $menuItemL3 -}} - {{- if $menuItemL4 -}} - {{- if eq $menuItemL4.Identifier $menuItemL3.Parent -}} - {{- $.Scratch.Set "menu.Item.L4" $menuItemL4 -}} - {{- if $menuItemL5 -}} - {{- if eq $menuItemL5.Identifier $menuItemL4.Parent -}} - {{- $.Scratch.Set "menu.Item.L5" $menuItemL5 -}} - {{- if $menuItemL6 -}} - {{- if eq $menuItemL6.Identifier $menuItemL5.Parent -}} - {{- $.Scratch.Set "menu.Item.L6" $menuItemL6 -}} - {{- end -}} - {{- end -}} - {{- end -}} - {{- end -}} - {{- end -}} - {{- end -}} - {{- end -}} - {{- end -}} - {{- end -}} - {{- end -}} - {{- end -}} - {{- end -}} -{{- else -}} - {{- if $menuItem.HasChildren -}} - {{- $found := $.Scratch.Get "menu.Found" -}} - {{- range $menuItem.Children -}} - {{- $.Scratch.Set "menu.Item" . -}} - {{- if eq ($.Scratch.Get "menu.Found") false -}} - {{- $.Scratch.Set "menu.Item.L6" $menuItemL5 -}} - {{- $.Scratch.Set "menu.Item.L5" $menuItemL4 -}} - {{- $.Scratch.Set "menu.Item.L4" $menuItemL3 -}} - {{- $.Scratch.Set "menu.Item.L3" $menuItemL2 -}} - {{- $.Scratch.Set "menu.Item.L2" $menuItemL1 -}} - {{- $.Scratch.Set "menu.Item.L1" $menuItem -}} - {{- $.Scratch.Set "menu.Item" . -}} - {{- partial "menu/currentItem.html" $currentNode -}} - {{- if eq ($.Scratch.Get "menu.Found") false -}} - {{- $.Scratch.Set "menu.Prev" . -}} - {{- end -}} - {{- else if and (eq $found false) (eq ($.Scratch.Get "menu.Next") false) -}} - {{- $.Scratch.Set "menu.Next" . -}} - {{- end -}} - {{- end -}} - {{- if eq ($.Scratch.Get "menu.Found") false -}} - {{- $.Scratch.Set "menu.Prev" false -}} - {{- end -}} - {{- end -}} -{{- end -}} diff --git a/docs/reference/themes/mongodb/layouts/partials/menu/footer.html b/docs/reference/themes/mongodb/layouts/partials/menu/footer.html deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/docs/reference/themes/mongodb/layouts/partials/menu/item.html b/docs/reference/themes/mongodb/layouts/partials/menu/item.html deleted file mode 100644 index 319c599abd8..00000000000 --- a/docs/reference/themes/mongodb/layouts/partials/menu/item.html +++ /dev/null @@ -1,54 +0,0 @@ -{{- $currentNode := . -}} -{{- $menuItem := $.Scratch.Get "menu.Item" -}} -{{- $menuNodeLevel := $.Scratch.Get "menu.NodeLevel" -}} - -{{- $menuItemL6 := $.Scratch.Get "menu.Item.L6" -}} -{{- $menuItemL5 := $.Scratch.Get "menu.Item.L5" -}} -{{- $menuItemL4 := $.Scratch.Get "menu.Item.L4" -}} -{{- $menuItemL3 := $.Scratch.Get "menu.Item.L3" -}} -{{- $menuItemL2 := $.Scratch.Get "menu.Item.L2" -}} -{{- $menuItemL1 := $.Scratch.Get "menu.Item.L1" -}} -{{- $menuItemL0 := $.Scratch.Get "menu.Item.L0" -}} - -{{- $.Scratch.Set "menu.isCurrent" false -}} -{{- if eq $menuItem $menuItemL0 -}} - {{- $.Scratch.Set "menu.isCurrent" true -}} -{{- else if eq $menuItem $menuItemL1 -}} - {{- $.Scratch.Set "menu.isCurrent" true -}} -{{- else if eq $menuItem $menuItemL2 -}} - {{- $.Scratch.Set "menu.isCurrent" true -}} -{{- else if eq $menuItem $menuItemL3 -}} - {{- $.Scratch.Set "menu.isCurrent" true -}} -{{- else if eq $menuItem $menuItemL4 -}} - {{- $.Scratch.Set "menu.isCurrent" true -}} -{{- else if eq $menuItem $menuItemL5 -}} - {{- $.Scratch.Set "menu.isCurrent" true -}} -{{- else if eq $menuItem $menuItemL6 -}} - {{- $.Scratch.Set "menu.isCurrent" true -}} -{{- end -}} -{{- $isCurrent := $.Scratch.Get "menu.isCurrent" -}} - -{{- if $menuItem.HasChildren -}} -
  • - - {{- $menuItem.Pre -}} - {{- $menuItem.Name -}} - {{- if lt $menuNodeLevel 2 -}}{{- end -}} - -
      - {{- range $menuItem.Children -}} - {{- $menuItem := $.Scratch.Set "menu.Item" . -}} - {{- $menuNodeLevel := $.Scratch.Add "menu.NodeLevel" 1 -}} - {{- partial "menu/item.html" $currentNode -}} - {{- $menuNodeLevel := $.Scratch.Add "menu.NodeLevel" -1 -}} - {{- end -}} -
    -
  • -{{- else -}} -
  • - - {{- $menuItem.Pre -}} - {{- $menuItem.Name -}} - -
  • -{{- end -}} diff --git a/docs/reference/themes/mongodb/layouts/partials/menu/options.html b/docs/reference/themes/mongodb/layouts/partials/menu/options.html deleted file mode 100644 index fa7a3a02309..00000000000 --- a/docs/reference/themes/mongodb/layouts/partials/menu/options.html +++ /dev/null @@ -1,28 +0,0 @@ - diff --git a/docs/reference/themes/mongodb/layouts/partials/meta.html b/docs/reference/themes/mongodb/layouts/partials/meta.html deleted file mode 100644 index 408b03e3337..00000000000 --- a/docs/reference/themes/mongodb/layouts/partials/meta.html +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/docs/reference/themes/mongodb/layouts/partials/rightColumn.html b/docs/reference/themes/mongodb/layouts/partials/rightColumn.html deleted file mode 100644 index 5d872156b21..00000000000 --- a/docs/reference/themes/mongodb/layouts/partials/rightColumn.html +++ /dev/null @@ -1,10 +0,0 @@ -
    -
    - {{- if .IsPage -}} -
    - On this page - {{- .TableOfContents -}} -
    - {{- end -}} -
    -
    diff --git a/docs/reference/themes/mongodb/layouts/partials/welcome.html b/docs/reference/themes/mongodb/layouts/partials/welcome.html deleted file mode 100644 index 0eccdb42a93..00000000000 --- a/docs/reference/themes/mongodb/layouts/partials/welcome.html +++ /dev/null @@ -1,32 +0,0 @@ -

    Welcome to the hugo mongodb docs theme.

    - -

    You have two choices when it comes to managing this front page.

    - -

    1. Markdown:

    - -

    First create your own index page in content: hugo new ./content/index.md

    - -

    Set a type in the “front matter” to index eg:

    - -
    +++
    -date = "2015-03-17T15:36:56Z"
    -draft = false
    -title = "index"
    -type = "index"
    -+++
    -
    -## Add your markdown content to your frontpage.
    -
    -Hello welcome to my driver docs
    -
    - -

    2. HTML

    - -

    Create your own layouts\index.html eg:

    - -
    {{ partial "header.html" . }}
    -
    -<h2>Add your HTML content to your frontpage.</h2>
    -
    -{{ partial "footer.html" . }}
    -
    diff --git a/docs/reference/themes/mongodb/layouts/shortcodes/docsref.html b/docs/reference/themes/mongodb/layouts/shortcodes/docsref.html deleted file mode 100644 index 672197db9a8..00000000000 --- a/docs/reference/themes/mongodb/layouts/shortcodes/docsref.html +++ /dev/null @@ -1 +0,0 @@ -http://docs.mongodb.org/manual{{ if eq (hasPrefix (.Get 0) "/") false }}/{{ end }}{{ .Get 0 }} diff --git a/docs/reference/themes/mongodb/layouts/shortcodes/note.html b/docs/reference/themes/mongodb/layouts/shortcodes/note.html deleted file mode 100644 index 0988a049a3b..00000000000 --- a/docs/reference/themes/mongodb/layouts/shortcodes/note.html +++ /dev/null @@ -1,4 +0,0 @@ -
    -
    {{if .Get "class"}}{{.Get "class"}}{{else}}Note{{end}}
    -{{ .Inner }} -
    diff --git a/docs/reference/themes/mongodb/layouts/shortcodes/srcref.html b/docs/reference/themes/mongodb/layouts/shortcodes/srcref.html deleted file mode 100644 index 1f7dfc14567..00000000000 --- a/docs/reference/themes/mongodb/layouts/shortcodes/srcref.html +++ /dev/null @@ -1 +0,0 @@ -https://github.com/mongodb/{{ .Page.Site.Data.mongodb.githubRepo }}/blob/{{ .Page.Site.Data.mongodb.githubBranch }}/{{ if eq (in (.Get 0) "src/") false }}src/{{ end }}{{ .Get 0 }} diff --git a/docs/reference/themes/mongodb/static/.nojekyll b/docs/reference/themes/mongodb/static/.nojekyll deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/docs/reference/themes/mongodb/static/apple-touch-icon.png b/docs/reference/themes/mongodb/static/apple-touch-icon.png deleted file mode 100644 index 9f98bad32ca..00000000000 Binary files a/docs/reference/themes/mongodb/static/apple-touch-icon.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/css/basic.css b/docs/reference/themes/mongodb/static/css/basic.css deleted file mode 100644 index 8971e11aea0..00000000000 --- a/docs/reference/themes/mongodb/static/css/basic.css +++ /dev/null @@ -1,537 +0,0 @@ -/* - * basic.css - * ~~~~~~~~~ - * - * Sphinx stylesheet -- basic theme. - * - * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * - */ - -/* -- main layout ----------------------------------------------------------- */ - -div.clearer { - clear: both; -} - -/* -- relbar ---------------------------------------------------------------- */ - -div.related { - width: 100%; - font-size: 90%; -} - -div.related h3 { - display: none; -} - -div.related ul { - margin: 0; - padding: 0 0 0 10px; - list-style: none; -} - -div.related li { - display: inline; -} - -div.related li.right { - float: right; - margin-right: 5px; -} - -/* -- sidebar --------------------------------------------------------------- */ - -div.sphinxsidebarwrapper { - padding: 10px 5px 0 10px; -} - -div.sphinxsidebar { - font-size: 90%; -} - -div.sphinxsidebar ul { - list-style: none; -} - -div.sphinxsidebar ul ul, -div.sphinxsidebar ul.want-points { - margin-left: 20px; - list-style: square; -} - -div.sphinxsidebar ul ul { - margin-top: 0; - margin-bottom: 0; -} - -div.sphinxsidebar form { - margin-top: 10px; -} - -div.sphinxsidebar input { - border: 1px solid #98dbcc; - font-family: sans-serif; - font-size: 1em; -} - -div.sphinxsidebar #searchbox input[type="text"] { - width: 170px; -} - -div.sphinxsidebar #searchbox input[type="submit"] { - width: 30px; -} - -img { - border: 0; -} - -/* -- search page ----------------------------------------------------------- */ - -ul.search { - margin: 10px 0 0 20px; - padding: 0; -} - -ul.search li { - padding: 5px 0 5px 20px; - background-image: url(file.png); - background-repeat: no-repeat; - background-position: 0 7px; -} - -ul.search li a { - font-weight: bold; -} - -ul.search li div.context { - color: #888; - margin: 2px 0 0 30px; - text-align: left; -} - -ul.keywordmatches li.goodmatch a { - font-weight: bold; -} - -/* -- index page ------------------------------------------------------------ */ - -table.contentstable { - width: 90%; -} - -table.contentstable p.biglink { - line-height: 150%; -} - -a.biglink { - font-size: 1.3em; -} - -span.linkdescr { - font-style: italic; - padding-top: 5px; - font-size: 90%; -} - -/* -- general index --------------------------------------------------------- */ - -table.indextable { - width: 100%; -} - -table.indextable td { - text-align: left; - vertical-align: top; -} - -table.indextable dl, table.indextable dd { - margin-top: 0; - margin-bottom: 0; -} - -table.indextable tr.pcap { - height: 10px; -} - -table.indextable tr.cap { - margin-top: 10px; - background-color: #f2f2f2; -} - -img.toggler { - margin-right: 3px; - margin-top: 3px; - cursor: pointer; -} - -div.modindex-jumpbox { - border-top: 1px solid #ddd; - border-bottom: 1px solid #ddd; - margin: 1em 0 1em 0; - padding: 0.4em; -} - -div.genindex-jumpbox { - border-top: 1px solid #ddd; - border-bottom: 1px solid #ddd; - margin: 1em 0 1em 0; - padding: 0.4em; -} - -/* -- general body styles --------------------------------------------------- */ - -a.headerlink { - visibility: hidden; -} - -h1:hover > a.headerlink, -h2:hover > a.headerlink, -h3:hover > a.headerlink, -h4:hover > a.headerlink, -h5:hover > a.headerlink, -h6:hover > a.headerlink, -dt:hover > a.headerlink { - visibility: visible; -} - -div.body p.caption { - text-align: inherit; -} - -div.body td { - text-align: left; -} - -.field-list ul { - padding-left: 1em; -} - -.first { - margin-top: 0 !important; -} - -p.rubric { - margin-top: 30px; - font-weight: bold; -} - -img.align-left, .figure.align-left, object.align-left { - clear: left; - float: left; - margin-right: 1em; -} - -img.align-right, .figure.align-right, object.align-right { - clear: right; - float: right; - margin-left: 1em; -} - -img.align-center, .figure.align-center, object.align-center { - display: block; - margin-left: auto; - margin-right: auto; -} - -.align-left { - text-align: left; -} - -.align-center { - text-align: center; -} - -.align-right { - text-align: right; -} - -/* -- sidebars -------------------------------------------------------------- */ - -div.sidebar { - margin: 0 0 0.5em 1em; - border: 1px solid #ddb; - padding: 7px 7px 0 7px; - background-color: #ffe; - width: 40%; - float: right; -} - -p.sidebar-title { - font-weight: bold; -} - -/* -- topics ---------------------------------------------------------------- */ - -div.topic { - border: 1px solid #ccc; - padding: 7px 7px 0 7px; - margin: 10px 0 10px 0; -} - -p.topic-title { - font-size: 1.1em; - font-weight: bold; - margin-top: 10px; -} - -/* -- admonitions ----------------------------------------------------------- */ - -div.admonition { - margin-top: 10px; - margin-bottom: 10px; - padding: 7px; -} - -div.admonition dt { - font-weight: bold; -} - -div.admonition dl { - margin-bottom: 0; -} - -p.admonition-title { - margin: 0px 10px 5px 0px; - font-weight: bold; -} - -div.body p.centered { - text-align: center; - margin-top: 25px; -} - -/* -- tables ---------------------------------------------------------------- */ - -table.docutils { - border: 0; - border-collapse: collapse; -} - -table.docutils td, table.docutils th { - padding: 1px 8px 1px 5px; - border-top: 0; - border-left: 0; - border-right: 0; - border-bottom: 1px solid #aaa; -} - -table.field-list td, table.field-list th { - border: 0 !important; -} - -table.footnote td, table.footnote th { - border: 0 !important; -} - -th { - text-align: left; - padding-right: 5px; -} - -table.citation { - border-left: solid 1px gray; - margin-left: 1px; -} - -table.citation td { - border-bottom: none; -} - -/* -- other body styles ----------------------------------------------------- */ - -ol.arabic { - list-style: decimal; -} - -ol.loweralpha { - list-style: lower-alpha; -} - -ol.upperalpha { - list-style: upper-alpha; -} - -ol.lowerroman { - list-style: lower-roman; -} - -ol.upperroman { - list-style: upper-roman; -} - -dl { - margin-bottom: 15px; -} - -dd p { - margin-top: 0px; -} - -dd ul, dd table { - margin-bottom: 10px; -} - -dd { - margin-top: 3px; - margin-bottom: 10px; - margin-left: 30px; -} - -dt:target, .highlighted { - background-color: #fbe54e; -} - -dl.glossary dt { - font-weight: bold; - font-size: 1.1em; -} - -.field-list ul { - margin: 0; - padding-left: 1em; -} - -.field-list p { - margin: 0; -} - -.refcount { - color: #060; -} - -.optional { - font-size: 1.3em; -} - -.versionmodified { - font-style: italic; -} - -.system-message { - background-color: #fda; - padding: 5px; - border: 3px solid red; -} - -.footnote:target { - background-color: #ffa; -} - -.line-block { - display: block; - margin-top: 1em; - margin-bottom: 1em; -} - -.line-block .line-block { - margin-top: 0; - margin-bottom: 0; - margin-left: 1.5em; -} - -.guilabel, .menuselection { - font-family: sans-serif; -} - -.accelerator { - text-decoration: underline; -} - -.classifier { - font-style: oblique; -} - -abbr, acronym { - border-bottom: dotted 1px; - cursor: help; -} - -/* -- code displays --------------------------------------------------------- */ - -pre { - overflow: auto; - overflow-y: hidden; /* fixes display issues on Chrome browsers */ -} - -td.linenos pre { - padding: 5px 0px; - border: 0; - background-color: transparent; - color: #aaa; -} - -table.highlighttable { - margin-left: 0.5em; -} - -table.highlighttable td { - padding: 0 0.5em 0 0.5em; -} - -tt.descname { - background-color: transparent; - font-weight: bold; - font-size: 1.2em; -} - -tt.descclassname { - background-color: transparent; -} - -tt.xref, a tt { - background-color: transparent; - font-weight: bold; -} - -h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt { - background-color: transparent; -} - -.viewcode-link { - float: right; -} - -.viewcode-back { - float: right; - font-family: sans-serif; -} - -div.viewcode-block:target { - margin: -1px -10px; - padding: 0 10px; -} - -/* -- math display ---------------------------------------------------------- */ - -img.math { - vertical-align: middle; -} - -div.body div.math p { - text-align: center; -} - -span.eqno { - float: right; -} - -/* -- printout stylesheet --------------------------------------------------- */ - -@media print { - div.document, - div.documentwrapper, - div.bodywrapper { - margin: 0 !important; - width: 100%; - } - - div.sphinxsidebar, - div.related, - div.footer, - #top-link { - display: none; - } -} \ No newline at end of file diff --git a/docs/reference/themes/mongodb/static/css/bootstrap-custom.css b/docs/reference/themes/mongodb/static/css/bootstrap-custom.css deleted file mode 100644 index ebd6d579158..00000000000 --- a/docs/reference/themes/mongodb/static/css/bootstrap-custom.css +++ /dev/null @@ -1,5908 +0,0 @@ -/*! - * Bootstrap v3.0.0 - * - * Copyright 2013 Twitter, Inc - * Licensed under the Apache License v2.0 - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Designed and built with all the love in the world by @mdo and @fat. - */ -/*! normalize.css v2.1.0 | MIT License | git.io/normalize */ -article, -aside, -details, -figcaption, -figure, -footer, -header, -hgroup, -main, -nav, -section, -summary { - display: block; -} -audio, -canvas, -video { - display: inline-block; -} -audio:not([controls]) { - display: none; - height: 0; -} -[hidden] { - display: none; -} -html { - font-family: sans-serif; - -webkit-text-size-adjust: 100%; - -ms-text-size-adjust: 100%; -} -body { - margin: 0; -} -a:focus { - outline: thin dotted; -} -a:active, -a:hover { - outline: 0; -} -h1 { - font-size: 2em; - margin: 0.67em 0; -} -abbr[title] { - border-bottom: 1px dotted; -} -b, -strong { - font-weight: bold; -} -dfn { - font-style: italic; -} -hr { - -moz-box-sizing: content-box; - box-sizing: content-box; - height: 0; -} -mark { - background: #ff0; - color: #000; -} -code, -kbd, -pre, -samp { - font-family: monospace, serif; - font-size: 1em; -} -pre { - white-space: pre-wrap; -} -q { - quotes: "\201C" "\201D" "\2018" "\2019"; -} -small { - font-size: 80%; -} -sub, -sup { - font-size: 75%; - line-height: 0; - position: relative; - vertical-align: baseline; -} -sup { - top: -0.5em; -} -sub { - bottom: -0.25em; -} -img { - border: 0; -} -svg:not(:root) { - overflow: hidden; -} -figure { - margin: 0; -} -fieldset { - border: 1px solid #c0c0c0; - margin: 0 2px; - padding: 0.35em 0.625em 0.75em; -} -legend { - border: 0; - padding: 0; -} -button, -input, -select, -textarea { - font-family: inherit; - font-size: 100%; - margin: 0; -} -button, -input { - line-height: normal; -} -button, -select { - text-transform: none; -} -button, -html input[type="button"], -input[type="reset"], -input[type="submit"] { - -webkit-appearance: button; - cursor: pointer; -} -button[disabled], -html input[disabled] { - cursor: default; -} -input[type="checkbox"], -input[type="radio"] { - box-sizing: border-box; - padding: 0; -} -input[type="search"] { - -webkit-appearance: textfield; - -moz-box-sizing: content-box; - -webkit-box-sizing: content-box; - box-sizing: content-box; -} -input[type="search"]::-webkit-search-cancel-button, -input[type="search"]::-webkit-search-decoration { - -webkit-appearance: none; -} -button::-moz-focus-inner, -input::-moz-focus-inner { - border: 0; - padding: 0; -} -textarea { - overflow: auto; - vertical-align: top; -} -table { - border-collapse: collapse; - border-spacing: 0; -} -@media print { - * { - text-shadow: none !important; - color: #000 !important; - background: transparent !important; - box-shadow: none !important; - } - a, - a:visited { - text-decoration: underline; - } - a[href]:after { - content: " (" attr(href) ")"; - } - abbr[title]:after { - content: " (" attr(title) ")"; - } - .ir a:after, - a[href^="javascript:"]:after, - a[href^="#"]:after { - content: ""; - } - pre, - blockquote { - border: 1px solid #999; - page-break-inside: avoid; - } - thead { - display: table-header-group; - } - tr, - img { - page-break-inside: avoid; - } - img { - max-width: 100% !important; - } - @page { - margin: 2cm .5cm; - } - p, - h2, - h3 { - orphans: 3; - widows: 3; - } - h2, - h3 { - page-break-after: avoid; - } - .navbar { - display: none; - } - .table td, - .table th { - background-color: #fff !important; - } - .btn > .caret, - .dropup > .btn > .caret { - border-top-color: #000 !important; - } - .label { - border: 1px solid #000; - } - .table { - border-collapse: collapse !important; - } - .table-bordered th, - .table-bordered td { - border: 1px solid #ddd !important; - } -} -*, -*:before, -*:after { - -webkit-box-sizing: border-box; - -moz-box-sizing: border-box; - box-sizing: border-box; -} -html { - font-size: 62.5%; - -webkit-tap-highlight-color: rgba(0, 0, 0, 0); -} -body { - font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; - font-size: 14px; - line-height: 1.428571429; - color: #333333; - background-color: #ffffff; -} -input, -button, -select, -textarea { - font-family: inherit; - font-size: inherit; - line-height: inherit; -} -button, -input, -select[multiple], -textarea { - background-image: none; -} -a { - color: #428bca; - text-decoration: none; -} -a:hover, -a:focus { - color: #2a6496; - text-decoration: underline; -} -a:focus { - outline: thin dotted #333; - outline: 5px auto -webkit-focus-ring-color; - outline-offset: -2px; -} -img { - vertical-align: middle; -} -.img-responsive { - display: block; - max-width: 100%; - height: auto; -} -.img-rounded { - border-radius: 6px; -} -.img-thumbnail { - padding: 4px; - line-height: 1.428571429; - background-color: #ffffff; - border: 1px solid #dddddd; - border-radius: 4px; - -webkit-transition: all 0.2s ease-in-out; - transition: all 0.2s ease-in-out; - display: inline-block; - max-width: 100%; - height: auto; -} -.img-circle { - border-radius: 50%; -} -hr { - margin-top: 20px; - margin-bottom: 20px; - border: 0; - border-top: 1px solid #eeeeee; -} -.sr-only { - position: absolute; - width: 1px; - height: 1px; - margin: -1px; - padding: 0; - overflow: hidden; - clip: rect(0 0 0 0); - border: 0; -} -p { - margin: 0 0 10px; -} -.lead { - margin-bottom: 20px; - font-size: 16.099999999999998px; - font-weight: 200; - line-height: 1.4; -} -@media (min-width: 768px) { - .lead { - font-size: 21px; - } -} -small { - font-size: 85%; -} -cite { - font-style: normal; -} -.text-muted { - color: #999999; -} -.text-primary { - color: #428bca; -} -.text-warning { - color: #c09853; -} -.text-danger { - color: #b94a48; -} -.text-success { - color: #468847; -} -.text-info { - color: #3a87ad; -} -.text-left { - text-align: left; -} -.text-right { - text-align: right; -} -.text-center { - text-align: center; -} -h1, -h2, -h3, -h4, -h5, -h6, -.h1, -.h2, -.h3, -.h4, -.h5, -.h6 { - font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; - font-weight: 500; - line-height: 1.1; -} -h1 small, -h2 small, -h3 small, -h4 small, -h5 small, -h6 small, -.h1 small, -.h2 small, -.h3 small, -.h4 small, -.h5 small, -.h6 small { - font-weight: normal; - line-height: 1; - color: #999999; -} -h1, -h2, -h3 { - margin-top: 20px; - margin-bottom: 10px; -} -h4, -h5, -h6 { - margin-top: 10px; - margin-bottom: 10px; -} -h1, -.h1 { - font-size: 36px; -} -h2, -.h2 { - font-size: 30px; -} -h3, -.h3 { - font-size: 24px; -} -h4, -.h4 { - font-size: 18px; -} -h5, -.h5 { - font-size: 14px; -} -h6, -.h6 { - font-size: 12px; -} -h1 small, -.h1 small { - font-size: 24px; -} -h2 small, -.h2 small { - font-size: 18px; -} -h3 small, -.h3 small, -h4 small, -.h4 small { - font-size: 14px; -} -.page-header { - padding-bottom: 9px; - margin: 40px 0 20px; - border-bottom: 1px solid #eeeeee; -} -ul, -ol { - margin-top: 0; - margin-bottom: 10px; -} -ul ul, -ol ul, -ul ol, -ol ol { - margin-bottom: 0; -} -.list-unstyled { - padding-left: 0; - list-style: none; -} -.list-inline { - padding-left: 0; - list-style: none; -} -.list-inline > li { - display: inline-block; - padding-left: 5px; - padding-right: 5px; -} -dl { - margin-bottom: 20px; -} -dt, -dd { - line-height: 1.428571429; -} -dt { - font-weight: bold; -} -dd { - margin-left: 0; -} -@media (min-width: 992px) { - .dl-horizontal dt { - float: left; - width: 160px; - clear: left; - text-align: right; - overflow: hidden; - text-overflow: ellipsis; - white-space: nowrap; - } - .dl-horizontal dd { - margin-left: 180px; - } - .dl-horizontal dd:before, - .dl-horizontal dd:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - - } - .dl-horizontal dd:after { - clear: both; - } - .dl-horizontal dd:before, - .dl-horizontal dd:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - - } - .dl-horizontal dd:after { - clear: both; - } -} -abbr[title], -abbr[data-original-title] { - cursor: help; - border-bottom: 1px dotted #999999; -} -abbr.initialism { - font-size: 90%; - text-transform: uppercase; -} -blockquote { - padding: 10px 20px; - margin: 0 0 20px; - border-left: 5px solid #eeeeee; -} -blockquote p { - font-size: 17.5px; - font-weight: 300; - line-height: 1.25; -} -blockquote p:last-child { - margin-bottom: 0; -} -blockquote small { - display: block; - line-height: 1.428571429; - color: #999999; -} -blockquote small:before { - content: '\2014 \00A0'; -} -blockquote.pull-right { - padding-right: 15px; - padding-left: 0; - border-right: 5px solid #eeeeee; - border-left: 0; -} -blockquote.pull-right p, -blockquote.pull-right small { - text-align: right; -} -blockquote.pull-right small:before { - content: ''; -} -blockquote.pull-right small:after { - content: '\00A0 \2014'; -} -q:before, -q:after, -blockquote:before, -blockquote:after { - content: ""; -} -address { - display: block; - margin-bottom: 20px; - font-style: normal; - line-height: 1.428571429; -} -code, -pre { - font-family: Monaco, Menlo, Consolas, "Courier New", monospace; -} -code { - padding: 2px 4px; - font-size: 90%; - color: #c7254e; - background-color: #f9f2f4; - white-space: nowrap; - border-radius: 4px; -} -pre { - display: block; - padding: 9.5px; - margin: 0 0 10px; - font-size: 13px; - line-height: 1.428571429; - word-break: break-all; - word-wrap: break-word; - color: #333333; - background-color: #f5f5f5; - border: 1px solid #cccccc; - border-radius: 4px; -} -pre.prettyprint { - margin-bottom: 20px; -} -pre code { - padding: 0; - font-size: inherit; - color: inherit; - white-space: pre-wrap; - background-color: transparent; - border: 0; -} -.pre-scrollable { - max-height: 340px; - overflow-y: scroll; -} -.container { - margin-right: auto; - margin-left: auto; - padding-left: 15px; - padding-right: 15px; -} -.container:before, -.container:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.container:after { - clear: both; -} -.container:before, -.container:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.container:after { - clear: both; -} -.row { - margin-left: -15px; - margin-right: -15px; -} -.row:before, -.row:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.row:after { - clear: both; -} -.row:before, -.row:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.row:after { - clear: both; -} -.col-xs-1, -.col-xs-2, -.col-xs-3, -.col-xs-4, -.col-xs-5, -.col-xs-6, -.col-xs-7, -.col-xs-8, -.col-xs-9, -.col-xs-10, -.col-xs-11, -.col-xs-12, -.col-sm-1, -.col-sm-2, -.col-sm-3, -.col-sm-4, -.col-sm-5, -.col-sm-6, -.col-sm-7, -.col-sm-8, -.col-sm-9, -.col-sm-10, -.col-sm-11, -.col-sm-12, -.col-md-1, -.col-md-2, -.col-md-3, -.col-md-4, -.col-md-5, -.col-md-6, -.col-md-7, -.col-md-8, -.col-md-9, -.col-md-10, -.col-md-11, -.col-md-12, -.col-lg-1, -.col-lg-2, -.col-lg-3, -.col-lg-4, -.col-lg-5, -.col-lg-6, -.col-lg-7, -.col-lg-8, -.col-lg-9, -.col-lg-10, -.col-lg-11, -.col-lg-12 { - position: relative; - min-height: 1px; - padding-left: 15px; - padding-right: 15px; -} -.col-xs-1, -.col-xs-2, -.col-xs-3, -.col-xs-4, -.col-xs-5, -.col-xs-6, -.col-xs-7, -.col-xs-8, -.col-xs-9, -.col-xs-10, -.col-xs-11 { - float: left; -} -.col-xs-1 { - width: 8.333333333333332%; -} -.col-xs-2 { - width: 16.666666666666664%; -} -.col-xs-3 { - width: 25%; -} -.col-xs-4 { - width: 33.33333333333333%; -} -.col-xs-5 { - width: 41.66666666666667%; -} -.col-xs-6 { - width: 50%; -} -.col-xs-7 { - width: 58.333333333333336%; -} -.col-xs-8 { - width: 66.66666666666666%; -} -.col-xs-9 { - width: 75%; -} -.col-xs-10 { - width: 83.33333333333334%; -} -.col-xs-11 { - width: 91.66666666666666%; -} -.col-xs-12 { - width: 100%; -} -@media (min-width: 768px) { - .container { - max-width: 750px; - } - .col-sm-1, - .col-sm-2, - .col-sm-3, - .col-sm-4, - .col-sm-5, - .col-sm-6, - .col-sm-7, - .col-sm-8, - .col-sm-9, - .col-sm-10, - .col-sm-11 { - float: left; - } - .col-sm-1 { - width: 8.333333333333332%; - } - .col-sm-2 { - width: 16.666666666666664%; - } - .col-sm-3 { - width: 25%; - } - .col-sm-4 { - width: 33.33333333333333%; - } - .col-sm-5 { - width: 41.66666666666667%; - } - .col-sm-6 { - width: 50%; - } - .col-sm-7 { - width: 58.333333333333336%; - } - .col-sm-8 { - width: 66.66666666666666%; - } - .col-sm-9 { - width: 75%; - } - .col-sm-10 { - width: 83.33333333333334%; - } - .col-sm-11 { - width: 91.66666666666666%; - } - .col-sm-12 { - width: 100%; - } - .col-sm-push-1 { - left: 8.333333333333332%; - } - .col-sm-push-2 { - left: 16.666666666666664%; - } - .col-sm-push-3 { - left: 25%; - } - .col-sm-push-4 { - left: 33.33333333333333%; - } - .col-sm-push-5 { - left: 41.66666666666667%; - } - .col-sm-push-6 { - left: 50%; - } - .col-sm-push-7 { - left: 58.333333333333336%; - } - .col-sm-push-8 { - left: 66.66666666666666%; - } - .col-sm-push-9 { - left: 75%; - } - .col-sm-push-10 { - left: 83.33333333333334%; - } - .col-sm-push-11 { - left: 91.66666666666666%; - } - .col-sm-pull-1 { - right: 8.333333333333332%; - } - .col-sm-pull-2 { - right: 16.666666666666664%; - } - .col-sm-pull-3 { - right: 25%; - } - .col-sm-pull-4 { - right: 33.33333333333333%; - } - .col-sm-pull-5 { - right: 41.66666666666667%; - } - .col-sm-pull-6 { - right: 50%; - } - .col-sm-pull-7 { - right: 58.333333333333336%; - } - .col-sm-pull-8 { - right: 66.66666666666666%; - } - .col-sm-pull-9 { - right: 75%; - } - .col-sm-pull-10 { - right: 83.33333333333334%; - } - .col-sm-pull-11 { - right: 91.66666666666666%; - } - .col-sm-offset-1 { - margin-left: 8.333333333333332%; - } - .col-sm-offset-2 { - margin-left: 16.666666666666664%; - } - .col-sm-offset-3 { - margin-left: 25%; - } - .col-sm-offset-4 { - margin-left: 33.33333333333333%; - } - .col-sm-offset-5 { - margin-left: 41.66666666666667%; - } - .col-sm-offset-6 { - margin-left: 50%; - } - .col-sm-offset-7 { - margin-left: 58.333333333333336%; - } - .col-sm-offset-8 { - margin-left: 66.66666666666666%; - } - .col-sm-offset-9 { - margin-left: 75%; - } - .col-sm-offset-10 { - margin-left: 83.33333333333334%; - } - .col-sm-offset-11 { - margin-left: 91.66666666666666%; - } -} -@media (min-width: 992px) { - .container { - max-width: 970px; - } - .col-md-1, - .col-md-2, - .col-md-3, - .col-md-4, - .col-md-5, - .col-md-6, - .col-md-7, - .col-md-8, - .col-md-9, - .col-md-10, - .col-md-11 { - float: left; - } - .col-md-1 { - width: 8.333333333333332%; - } - .col-md-2 { - width: 16.666666666666664%; - } - .col-md-3 { - width: 25%; - } - .col-md-4 { - width: 33.33333333333333%; - } - .col-md-5 { - width: 41.66666666666667%; - } - .col-md-6 { - width: 50%; - } - .col-md-7 { - width: 58.333333333333336%; - } - .col-md-8 { - width: 66.66666666666666%; - } - .col-md-9 { - width: 75%; - } - .col-md-10 { - width: 83.33333333333334%; - } - .col-md-11 { - width: 91.66666666666666%; - } - .col-md-12 { - width: 100%; - } - .col-md-push-0 { - left: auto; - } - .col-md-push-1 { - left: 8.333333333333332%; - } - .col-md-push-2 { - left: 16.666666666666664%; - } - .col-md-push-3 { - left: 25%; - } - .col-md-push-4 { - left: 33.33333333333333%; - } - .col-md-push-5 { - left: 41.66666666666667%; - } - .col-md-push-6 { - left: 50%; - } - .col-md-push-7 { - left: 58.333333333333336%; - } - .col-md-push-8 { - left: 66.66666666666666%; - } - .col-md-push-9 { - left: 75%; - } - .col-md-push-10 { - left: 83.33333333333334%; - } - .col-md-push-11 { - left: 91.66666666666666%; - } - .col-md-pull-0 { - right: auto; - } - .col-md-pull-1 { - right: 8.333333333333332%; - } - .col-md-pull-2 { - right: 16.666666666666664%; - } - .col-md-pull-3 { - right: 25%; - } - .col-md-pull-4 { - right: 33.33333333333333%; - } - .col-md-pull-5 { - right: 41.66666666666667%; - } - .col-md-pull-6 { - right: 50%; - } - .col-md-pull-7 { - right: 58.333333333333336%; - } - .col-md-pull-8 { - right: 66.66666666666666%; - } - .col-md-pull-9 { - right: 75%; - } - .col-md-pull-10 { - right: 83.33333333333334%; - } - .col-md-pull-11 { - right: 91.66666666666666%; - } - .col-md-offset-0 { - margin-left: 0; - } - .col-md-offset-1 { - margin-left: 8.333333333333332%; - } - .col-md-offset-2 { - margin-left: 16.666666666666664%; - } - .col-md-offset-3 { - margin-left: 25%; - } - .col-md-offset-4 { - margin-left: 33.33333333333333%; - } - .col-md-offset-5 { - margin-left: 41.66666666666667%; - } - .col-md-offset-6 { - margin-left: 50%; - } - .col-md-offset-7 { - margin-left: 58.333333333333336%; - } - .col-md-offset-8 { - margin-left: 66.66666666666666%; - } - .col-md-offset-9 { - margin-left: 75%; - } - .col-md-offset-10 { - margin-left: 83.33333333333334%; - } - .col-md-offset-11 { - margin-left: 91.66666666666666%; - } -} -@media (min-width: 1200px) { - .container { - max-width: 1170px; - } - .col-lg-1, - .col-lg-2, - .col-lg-3, - .col-lg-4, - .col-lg-5, - .col-lg-6, - .col-lg-7, - .col-lg-8, - .col-lg-9, - .col-lg-10, - .col-lg-11 { - float: left; - } - .col-lg-1 { - width: 8.333333333333332%; - } - .col-lg-2 { - width: 16.666666666666664%; - } - .col-lg-3 { - width: 25%; - } - .col-lg-4 { - width: 33.33333333333333%; - } - .col-lg-5 { - width: 41.66666666666667%; - } - .col-lg-6 { - width: 50%; - } - .col-lg-7 { - width: 58.333333333333336%; - } - .col-lg-8 { - width: 66.66666666666666%; - } - .col-lg-9 { - width: 75%; - } - .col-lg-10 { - width: 83.33333333333334%; - } - .col-lg-11 { - width: 91.66666666666666%; - } - .col-lg-12 { - width: 100%; - } - .col-lg-push-0 { - left: auto; - } - .col-lg-push-1 { - left: 8.333333333333332%; - } - .col-lg-push-2 { - left: 16.666666666666664%; - } - .col-lg-push-3 { - left: 25%; - } - .col-lg-push-4 { - left: 33.33333333333333%; - } - .col-lg-push-5 { - left: 41.66666666666667%; - } - .col-lg-push-6 { - left: 50%; - } - .col-lg-push-7 { - left: 58.333333333333336%; - } - .col-lg-push-8 { - left: 66.66666666666666%; - } - .col-lg-push-9 { - left: 75%; - } - .col-lg-push-10 { - left: 83.33333333333334%; - } - .col-lg-push-11 { - left: 91.66666666666666%; - } - .col-lg-pull-0 { - right: auto; - } - .col-lg-pull-1 { - right: 8.333333333333332%; - } - .col-lg-pull-2 { - right: 16.666666666666664%; - } - .col-lg-pull-3 { - right: 25%; - } - .col-lg-pull-4 { - right: 33.33333333333333%; - } - .col-lg-pull-5 { - right: 41.66666666666667%; - } - .col-lg-pull-6 { - right: 50%; - } - .col-lg-pull-7 { - right: 58.333333333333336%; - } - .col-lg-pull-8 { - right: 66.66666666666666%; - } - .col-lg-pull-9 { - right: 75%; - } - .col-lg-pull-10 { - right: 83.33333333333334%; - } - .col-lg-pull-11 { - right: 91.66666666666666%; - } - .col-lg-offset-0 { - margin-left: 0; - } - .col-lg-offset-1 { - margin-left: 8.333333333333332%; - } - .col-lg-offset-2 { - margin-left: 16.666666666666664%; - } - .col-lg-offset-3 { - margin-left: 25%; - } - .col-lg-offset-4 { - margin-left: 33.33333333333333%; - } - .col-lg-offset-5 { - margin-left: 41.66666666666667%; - } - .col-lg-offset-6 { - margin-left: 50%; - } - .col-lg-offset-7 { - margin-left: 58.333333333333336%; - } - .col-lg-offset-8 { - margin-left: 66.66666666666666%; - } - .col-lg-offset-9 { - margin-left: 75%; - } - .col-lg-offset-10 { - margin-left: 83.33333333333334%; - } - .col-lg-offset-11 { - margin-left: 91.66666666666666%; - } -} -table { - max-width: 100%; - background-color: transparent; -} -th { - text-align: left; -} -.table { - width: 100%; - margin-bottom: 20px; -} -.table thead > tr > th, -.table tbody > tr > th, -.table tfoot > tr > th, -.table thead > tr > td, -.table tbody > tr > td, -.table tfoot > tr > td { - padding: 8px; - line-height: 1.428571429; - vertical-align: top; - border-top: 1px solid #dddddd; -} -.table thead > tr > th { - vertical-align: bottom; - border-bottom: 2px solid #dddddd; -} -.table caption + thead tr:first-child th, -.table colgroup + thead tr:first-child th, -.table thead:first-child tr:first-child th, -.table caption + thead tr:first-child td, -.table colgroup + thead tr:first-child td, -.table thead:first-child tr:first-child td { - border-top: 0; -} -.table tbody + tbody { - border-top: 2px solid #dddddd; -} -.table .table { - background-color: #ffffff; -} -.table-condensed thead > tr > th, -.table-condensed tbody > tr > th, -.table-condensed tfoot > tr > th, -.table-condensed thead > tr > td, -.table-condensed tbody > tr > td, -.table-condensed tfoot > tr > td { - padding: 5px; -} -.table-bordered { - border: 1px solid #dddddd; -} -.table-bordered > thead > tr > th, -.table-bordered > tbody > tr > th, -.table-bordered > tfoot > tr > th, -.table-bordered > thead > tr > td, -.table-bordered > tbody > tr > td, -.table-bordered > tfoot > tr > td { - border: 1px solid #dddddd; -} -.table-bordered > thead > tr > th, -.table-bordered > thead > tr > td { - border-bottom-width: 2px; -} -.table-striped > tbody > tr:nth-child(odd) > td, -.table-striped > tbody > tr:nth-child(odd) > th { - background-color: #f9f9f9; -} -.table-hover > tbody > tr:hover > td, -.table-hover > tbody > tr:hover > th { - background-color: #f5f5f5; -} -table col[class*="col-"] { - float: none; - display: table-column; -} -table td[class*="col-"], -table th[class*="col-"] { - float: none; - display: table-cell; -} -.table > thead > tr > td.active, -.table > tbody > tr > td.active, -.table > tfoot > tr > td.active, -.table > thead > tr > th.active, -.table > tbody > tr > th.active, -.table > tfoot > tr > th.active, -.table > thead > tr.active > td, -.table > tbody > tr.active > td, -.table > tfoot > tr.active > td, -.table > thead > tr.active > th, -.table > tbody > tr.active > th, -.table > tfoot > tr.active > th { - background-color: #f5f5f5; -} -.table > thead > tr > td.success, -.table > tbody > tr > td.success, -.table > tfoot > tr > td.success, -.table > thead > tr > th.success, -.table > tbody > tr > th.success, -.table > tfoot > tr > th.success, -.table > thead > tr.success > td, -.table > tbody > tr.success > td, -.table > tfoot > tr.success > td, -.table > thead > tr.success > th, -.table > tbody > tr.success > th, -.table > tfoot > tr.success > th { - background-color: #dff0d8; - border-color: #d6e9c6; -} -.table-hover > tbody > tr > td.success:hover, -.table-hover > tbody > tr > th.success:hover, -.table-hover > tbody > tr.success:hover > td { - background-color: #d0e9c6; - border-color: #c9e2b3; -} -.table > thead > tr > td.danger, -.table > tbody > tr > td.danger, -.table > tfoot > tr > td.danger, -.table > thead > tr > th.danger, -.table > tbody > tr > th.danger, -.table > tfoot > tr > th.danger, -.table > thead > tr.danger > td, -.table > tbody > tr.danger > td, -.table > tfoot > tr.danger > td, -.table > thead > tr.danger > th, -.table > tbody > tr.danger > th, -.table > tfoot > tr.danger > th { - background-color: #f2dede; - border-color: #eed3d7; -} -.table-hover > tbody > tr > td.danger:hover, -.table-hover > tbody > tr > th.danger:hover, -.table-hover > tbody > tr.danger:hover > td { - background-color: #ebcccc; - border-color: #e6c1c7; -} -.table > thead > tr > td.warning, -.table > tbody > tr > td.warning, -.table > tfoot > tr > td.warning, -.table > thead > tr > th.warning, -.table > tbody > tr > th.warning, -.table > tfoot > tr > th.warning, -.table > thead > tr.warning > td, -.table > tbody > tr.warning > td, -.table > tfoot > tr.warning > td, -.table > thead > tr.warning > th, -.table > tbody > tr.warning > th, -.table > tfoot > tr.warning > th { - background-color: #fcf8e3; - border-color: #fbeed5; -} -.table-hover > tbody > tr > td.warning:hover, -.table-hover > tbody > tr > th.warning:hover, -.table-hover > tbody > tr.warning:hover > td { - background-color: #faf2cc; - border-color: #f8e5be; -} -@media (max-width: 768px) { - .table-responsive { - width: 100%; - margin-bottom: 15px; - overflow-y: hidden; - overflow-x: scroll; - border: 1px solid #dddddd; - } - .table-responsive > .table { - margin-bottom: 0; - background-color: #fff; - } - .table-responsive > .table > thead > tr > th, - .table-responsive > .table > tbody > tr > th, - .table-responsive > .table > tfoot > tr > th, - .table-responsive > .table > thead > tr > td, - .table-responsive > .table > tbody > tr > td, - .table-responsive > .table > tfoot > tr > td { - white-space: nowrap; - } - .table-responsive > .table-bordered { - border: 0; - } - .table-responsive > .table-bordered > thead > tr > th:first-child, - .table-responsive > .table-bordered > tbody > tr > th:first-child, - .table-responsive > .table-bordered > tfoot > tr > th:first-child, - .table-responsive > .table-bordered > thead > tr > td:first-child, - .table-responsive > .table-bordered > tbody > tr > td:first-child, - .table-responsive > .table-bordered > tfoot > tr > td:first-child { - border-left: 0; - } - .table-responsive > .table-bordered > thead > tr > th:last-child, - .table-responsive > .table-bordered > tbody > tr > th:last-child, - .table-responsive > .table-bordered > tfoot > tr > th:last-child, - .table-responsive > .table-bordered > thead > tr > td:last-child, - .table-responsive > .table-bordered > tbody > tr > td:last-child, - .table-responsive > .table-bordered > tfoot > tr > td:last-child { - border-right: 0; - } - .table-responsive > .table-bordered > thead > tr:last-child > th, - .table-responsive > .table-bordered > tbody > tr:last-child > th, - .table-responsive > .table-bordered > tfoot > tr:last-child > th, - .table-responsive > .table-bordered > thead > tr:last-child > td, - .table-responsive > .table-bordered > tbody > tr:last-child > td, - .table-responsive > .table-bordered > tfoot > tr:last-child > td { - border-bottom: 0; - } -} -fieldset { - padding: 0; - margin: 0; - border: 0; -} -legend { - display: block; - width: 100%; - padding: 0; - margin-bottom: 20px; - font-size: 21px; - line-height: inherit; - color: #333333; - border: 0; - border-bottom: 1px solid #e5e5e5; -} -label { - display: inline-block; - margin-bottom: 5px; - font-weight: bold; -} -input[type="search"] { - -webkit-box-sizing: border-box; - -moz-box-sizing: border-box; - box-sizing: border-box; -} -input[type="radio"], -input[type="checkbox"] { - margin: 4px 0 0; - margin-top: 1px \9; - /* IE8-9 */ - - line-height: normal; -} -input[type="file"] { - display: block; -} -select[multiple], -select[size] { - height: auto; -} -select optgroup { - font-size: inherit; - font-style: inherit; - font-family: inherit; -} -input[type="file"]:focus, -input[type="radio"]:focus, -input[type="checkbox"]:focus { - outline: thin dotted #333; - outline: 5px auto -webkit-focus-ring-color; - outline-offset: -2px; -} -input[type="number"]::-webkit-outer-spin-button, -input[type="number"]::-webkit-inner-spin-button { - height: auto; -} -.form-control:-moz-placeholder { - color: #999999; -} -.form-control::-moz-placeholder { - color: #999999; -} -.form-control:-ms-input-placeholder { - color: #999999; -} -.form-control::-webkit-input-placeholder { - color: #999999; -} -.form-control { - display: block; - width: 100%; - height: 34px; - padding: 6px 12px; - font-size: 14px; - line-height: 1.428571429; - color: #555555; - vertical-align: middle; - background-color: #ffffff; - border: 1px solid #cccccc; - border-radius: 4px; - -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); - box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); - -webkit-transition: border-color ease-in-out .15s, box-shadow ease-in-out .15s; - transition: border-color ease-in-out .15s, box-shadow ease-in-out .15s; -} -.form-control:focus { - border-color: #66afe9; - outline: 0; - -webkit-box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6); - box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6); -} -.form-control[disabled], -.form-control[readonly], -fieldset[disabled] .form-control { - cursor: not-allowed; - background-color: #eeeeee; -} -textarea.form-control { - height: auto; -} -.form-group { - margin-bottom: 15px; -} -.radio, -.checkbox { - display: block; - min-height: 20px; - margin-top: 10px; - margin-bottom: 10px; - padding-left: 20px; - vertical-align: middle; -} -.radio label, -.checkbox label { - display: inline; - margin-bottom: 0; - font-weight: normal; - cursor: pointer; -} -.radio input[type="radio"], -.radio-inline input[type="radio"], -.checkbox input[type="checkbox"], -.checkbox-inline input[type="checkbox"] { - float: left; - margin-left: -20px; -} -.radio + .radio, -.checkbox + .checkbox { - margin-top: -5px; -} -.radio-inline, -.checkbox-inline { - display: inline-block; - padding-left: 20px; - margin-bottom: 0; - vertical-align: middle; - font-weight: normal; - cursor: pointer; -} -.radio-inline + .radio-inline, -.checkbox-inline + .checkbox-inline { - margin-top: 0; - margin-left: 10px; -} -input[type="radio"][disabled], -input[type="checkbox"][disabled], -.radio[disabled], -.radio-inline[disabled], -.checkbox[disabled], -.checkbox-inline[disabled], -fieldset[disabled] input[type="radio"], -fieldset[disabled] input[type="checkbox"], -fieldset[disabled] .radio, -fieldset[disabled] .radio-inline, -fieldset[disabled] .checkbox, -fieldset[disabled] .checkbox-inline { - cursor: not-allowed; -} -.input-sm { - height: 30px; - padding: 5px 10px; - font-size: 12px; - line-height: 1.5; - border-radius: 3px; -} -select.input-sm { - height: 30px; - line-height: 30px; -} -textarea.input-sm { - height: auto; -} -.input-lg { - height: 45px; - padding: 10px 16px; - font-size: 18px; - line-height: 1.33; - border-radius: 6px; -} -select.input-lg { - height: 45px; - line-height: 45px; -} -textarea.input-lg { - height: auto; -} -.has-warning .help-block, -.has-warning .control-label { - color: #c09853; -} -.has-warning .form-control { - border-color: #c09853; - -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); - box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); -} -.has-warning .form-control:focus { - border-color: #a47e3c; - -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #dbc59e; - box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #dbc59e; -} -.has-warning .input-group-addon { - color: #c09853; - border-color: #c09853; - background-color: #fcf8e3; -} -.has-error .help-block, -.has-error .control-label { - color: #b94a48; -} -.has-error .form-control { - border-color: #b94a48; - -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); - box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); -} -.has-error .form-control:focus { - border-color: #953b39; - -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #d59392; - box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #d59392; -} -.has-error .input-group-addon { - color: #b94a48; - border-color: #b94a48; - background-color: #f2dede; -} -.has-success .help-block, -.has-success .control-label { - color: #468847; -} -.has-success .form-control { - border-color: #468847; - -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); - box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); -} -.has-success .form-control:focus { - border-color: #356635; - -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #7aba7b; - box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #7aba7b; -} -.has-success .input-group-addon { - color: #468847; - border-color: #468847; - background-color: #dff0d8; -} -.form-control-static { - margin-bottom: 0; - padding-top: 7px; -} -.help-block { - display: block; - margin-top: 5px; - margin-bottom: 10px; - color: #737373; -} -@media (min-width: 768px) { - .form-inline .form-group { - display: inline-block; - margin-bottom: 0; - vertical-align: middle; - } - .form-inline .form-control { - display: inline-block; - } - .form-inline .radio, - .form-inline .checkbox { - display: inline-block; - margin-top: 0; - margin-bottom: 0; - padding-left: 0; - } - .form-inline .radio input[type="radio"], - .form-inline .checkbox input[type="checkbox"] { - float: none; - margin-left: 0; - } -} -.form-horizontal .control-label, -.form-horizontal .radio, -.form-horizontal .checkbox, -.form-horizontal .radio-inline, -.form-horizontal .checkbox-inline { - margin-top: 0; - margin-bottom: 0; - padding-top: 7px; -} -.form-horizontal .form-group { - margin-left: -15px; - margin-right: -15px; -} -.form-horizontal .form-group:before, -.form-horizontal .form-group:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.form-horizontal .form-group:after { - clear: both; -} -.form-horizontal .form-group:before, -.form-horizontal .form-group:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.form-horizontal .form-group:after { - clear: both; -} -@media (min-width: 768px) { - .form-horizontal .control-label { - text-align: right; - } -} -.btn { - display: inline-block; - padding: 6px 12px; - margin-bottom: 0; - font-size: 14px; - font-weight: normal; - line-height: 1.428571429; - text-align: center; - vertical-align: middle; - cursor: pointer; - border: 1px solid transparent; - border-radius: 4px; - white-space: nowrap; - -webkit-user-select: none; - -moz-user-select: none; - -ms-user-select: none; - -o-user-select: none; - user-select: none; -} -.btn:focus { - outline: thin dotted #333; - outline: 5px auto -webkit-focus-ring-color; - outline-offset: -2px; -} -.btn:hover, -.btn:focus { - color: #333333; - text-decoration: none; -} -.btn:active, -.btn.active { - outline: 0; - background-image: none; - -webkit-box-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125); - box-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125); -} -.btn.disabled, -.btn[disabled], -fieldset[disabled] .btn { - cursor: not-allowed; - pointer-events: none; - opacity: 0.65; - filter: alpha(opacity=65); - -webkit-box-shadow: none; - box-shadow: none; -} -.btn-default { - color: #333333; - background-color: #ffffff; - border-color: #cccccc; -} -.btn-default:hover, -.btn-default:focus, -.btn-default:active, -.btn-default.active, -.open .dropdown-toggle.btn-default { - color: #333333; - background-color: #ebebeb; - border-color: #adadad; -} -.btn-default:active, -.btn-default.active, -.open .dropdown-toggle.btn-default { - background-image: none; -} -.btn-default.disabled, -.btn-default[disabled], -fieldset[disabled] .btn-default, -.btn-default.disabled:hover, -.btn-default[disabled]:hover, -fieldset[disabled] .btn-default:hover, -.btn-default.disabled:focus, -.btn-default[disabled]:focus, -fieldset[disabled] .btn-default:focus, -.btn-default.disabled:active, -.btn-default[disabled]:active, -fieldset[disabled] .btn-default:active, -.btn-default.disabled.active, -.btn-default[disabled].active, -fieldset[disabled] .btn-default.active { - background-color: #ffffff; - border-color: #cccccc; -} -.btn-primary { - color: #ffffff; - background-color: #428bca; - border-color: #357ebd; -} -.btn-primary:hover, -.btn-primary:focus, -.btn-primary:active, -.btn-primary.active, -.open .dropdown-toggle.btn-primary { - color: #ffffff; - background-color: #3276b1; - border-color: #285e8e; -} -.btn-primary:active, -.btn-primary.active, -.open .dropdown-toggle.btn-primary { - background-image: none; -} -.btn-primary.disabled, -.btn-primary[disabled], -fieldset[disabled] .btn-primary, -.btn-primary.disabled:hover, -.btn-primary[disabled]:hover, -fieldset[disabled] .btn-primary:hover, -.btn-primary.disabled:focus, -.btn-primary[disabled]:focus, -fieldset[disabled] .btn-primary:focus, -.btn-primary.disabled:active, -.btn-primary[disabled]:active, -fieldset[disabled] .btn-primary:active, -.btn-primary.disabled.active, -.btn-primary[disabled].active, -fieldset[disabled] .btn-primary.active { - background-color: #428bca; - border-color: #357ebd; -} -.btn-warning { - color: #ffffff; - background-color: #f0ad4e; - border-color: #eea236; -} -.btn-warning:hover, -.btn-warning:focus, -.btn-warning:active, -.btn-warning.active, -.open .dropdown-toggle.btn-warning { - color: #ffffff; - background-color: #ed9c28; - border-color: #d58512; -} -.btn-warning:active, -.btn-warning.active, -.open .dropdown-toggle.btn-warning { - background-image: none; -} -.btn-warning.disabled, -.btn-warning[disabled], -fieldset[disabled] .btn-warning, -.btn-warning.disabled:hover, -.btn-warning[disabled]:hover, -fieldset[disabled] .btn-warning:hover, -.btn-warning.disabled:focus, -.btn-warning[disabled]:focus, -fieldset[disabled] .btn-warning:focus, -.btn-warning.disabled:active, -.btn-warning[disabled]:active, -fieldset[disabled] .btn-warning:active, -.btn-warning.disabled.active, -.btn-warning[disabled].active, -fieldset[disabled] .btn-warning.active { - background-color: #f0ad4e; - border-color: #eea236; -} -.btn-danger { - color: #ffffff; - background-color: #d9534f; - border-color: #d43f3a; -} -.btn-danger:hover, -.btn-danger:focus, -.btn-danger:active, -.btn-danger.active, -.open .dropdown-toggle.btn-danger { - color: #ffffff; - background-color: #d2322d; - border-color: #ac2925; -} -.btn-danger:active, -.btn-danger.active, -.open .dropdown-toggle.btn-danger { - background-image: none; -} -.btn-danger.disabled, -.btn-danger[disabled], -fieldset[disabled] .btn-danger, -.btn-danger.disabled:hover, -.btn-danger[disabled]:hover, -fieldset[disabled] .btn-danger:hover, -.btn-danger.disabled:focus, -.btn-danger[disabled]:focus, -fieldset[disabled] .btn-danger:focus, -.btn-danger.disabled:active, -.btn-danger[disabled]:active, -fieldset[disabled] .btn-danger:active, -.btn-danger.disabled.active, -.btn-danger[disabled].active, -fieldset[disabled] .btn-danger.active { - background-color: #d9534f; - border-color: #d43f3a; -} -.btn-success { - color: #ffffff; - background-color: #5cb85c; - border-color: #4cae4c; -} -.btn-success:hover, -.btn-success:focus, -.btn-success:active, -.btn-success.active, -.open .dropdown-toggle.btn-success { - color: #ffffff; - background-color: #47a447; - border-color: #398439; -} -.btn-success:active, -.btn-success.active, -.open .dropdown-toggle.btn-success { - background-image: none; -} -.btn-success.disabled, -.btn-success[disabled], -fieldset[disabled] .btn-success, -.btn-success.disabled:hover, -.btn-success[disabled]:hover, -fieldset[disabled] .btn-success:hover, -.btn-success.disabled:focus, -.btn-success[disabled]:focus, -fieldset[disabled] .btn-success:focus, -.btn-success.disabled:active, -.btn-success[disabled]:active, -fieldset[disabled] .btn-success:active, -.btn-success.disabled.active, -.btn-success[disabled].active, -fieldset[disabled] .btn-success.active { - background-color: #5cb85c; - border-color: #4cae4c; -} -.btn-info { - color: #ffffff; - background-color: #5bc0de; - border-color: #46b8da; -} -.btn-info:hover, -.btn-info:focus, -.btn-info:active, -.btn-info.active, -.open .dropdown-toggle.btn-info { - color: #ffffff; - background-color: #39b3d7; - border-color: #269abc; -} -.btn-info:active, -.btn-info.active, -.open .dropdown-toggle.btn-info { - background-image: none; -} -.btn-info.disabled, -.btn-info[disabled], -fieldset[disabled] .btn-info, -.btn-info.disabled:hover, -.btn-info[disabled]:hover, -fieldset[disabled] .btn-info:hover, -.btn-info.disabled:focus, -.btn-info[disabled]:focus, -fieldset[disabled] .btn-info:focus, -.btn-info.disabled:active, -.btn-info[disabled]:active, -fieldset[disabled] .btn-info:active, -.btn-info.disabled.active, -.btn-info[disabled].active, -fieldset[disabled] .btn-info.active { - background-color: #5bc0de; - border-color: #46b8da; -} -.btn-link { - color: #428bca; - font-weight: normal; - cursor: pointer; - border-radius: 0; -} -.btn-link, -.btn-link:active, -.btn-link[disabled], -fieldset[disabled] .btn-link { - background-color: transparent; - -webkit-box-shadow: none; - box-shadow: none; -} -.btn-link, -.btn-link:hover, -.btn-link:focus, -.btn-link:active { - border-color: transparent; -} -.btn-link:hover, -.btn-link:focus { - color: #2a6496; - text-decoration: underline; - background-color: transparent; -} -.btn-link[disabled]:hover, -fieldset[disabled] .btn-link:hover, -.btn-link[disabled]:focus, -fieldset[disabled] .btn-link:focus { - color: #999999; - text-decoration: none; -} -.btn-lg { - padding: 10px 16px; - font-size: 18px; - line-height: 1.33; - border-radius: 6px; -} -.btn-sm, -.btn-xs { - padding: 5px 10px; - font-size: 12px; - line-height: 1.5; - border-radius: 3px; -} -.btn-xs { - padding: 1px 5px; -} -.btn-block { - display: block; - width: 100%; - padding-left: 0; - padding-right: 0; -} -.btn-block + .btn-block { - margin-top: 5px; -} -input[type="submit"].btn-block, -input[type="reset"].btn-block, -input[type="button"].btn-block { - width: 100%; -} -.fade { - opacity: 0; - -webkit-transition: opacity 0.15s linear; - transition: opacity 0.15s linear; -} -.fade.in { - opacity: 1; -} -.collapse { - display: none; -} -.collapse.in { - display: block; -} -.collapsing { - position: relative; - height: 0; - overflow: hidden; - -webkit-transition: height 0.35s ease; - transition: height 0.35s ease; -} -@font-face { - font-family: 'Glyphicons Halflings'; - src: url('../fonts/glyphicons-halflings-regular.eot'); - src: url('../fonts/glyphicons-halflings-regular.eot?#iefix') format('embedded-opentype'), url('../fonts/glyphicons-halflings-regular.woff') format('woff'), url('../fonts/glyphicons-halflings-regular.ttf') format('truetype'), url('../fonts/glyphicons-halflings-regular.svg#glyphicons-halflingsregular') format('svg'); -} -.glyphicon { - position: relative; - top: 1px; - display: inline-block; - font-family: 'Glyphicons Halflings'; - font-style: normal; - font-weight: normal; - line-height: 1; - -webkit-font-smoothing: antialiased; -} -.glyphicon-asterisk:before { - content: "\2a"; -} -.glyphicon-plus:before { - content: "\2b"; -} -.glyphicon-euro:before { - content: "\20ac"; -} -.glyphicon-minus:before { - content: "\2212"; -} -.glyphicon-cloud:before { - content: "\2601"; -} -.glyphicon-envelope:before { - content: "\2709"; -} -.glyphicon-pencil:before { - content: "\270f"; -} -.glyphicon-glass:before { - content: "\e001"; -} -.glyphicon-music:before { - content: "\e002"; -} -.glyphicon-search:before { - content: "\e003"; -} -.glyphicon-heart:before { - content: "\e005"; -} -.glyphicon-star:before { - content: "\e006"; -} -.glyphicon-star-empty:before { - content: "\e007"; -} -.glyphicon-user:before { - content: "\e008"; -} -.glyphicon-film:before { - content: "\e009"; -} -.glyphicon-th-large:before { - content: "\e010"; -} -.glyphicon-th:before { - content: "\e011"; -} -.glyphicon-th-list:before { - content: "\e012"; -} -.glyphicon-ok:before { - content: "\e013"; -} -.glyphicon-remove:before { - content: "\e014"; -} -.glyphicon-zoom-in:before { - content: "\e015"; -} -.glyphicon-zoom-out:before { - content: "\e016"; -} -.glyphicon-off:before { - content: "\e017"; -} -.glyphicon-signal:before { - content: "\e018"; -} -.glyphicon-cog:before { - content: "\e019"; -} -.glyphicon-trash:before { - content: "\e020"; -} -.glyphicon-home:before { - content: "\e021"; -} -.glyphicon-file:before { - content: "\e022"; -} -.glyphicon-time:before { - content: "\e023"; -} -.glyphicon-road:before { - content: "\e024"; -} -.glyphicon-download-alt:before { - content: "\e025"; -} -.glyphicon-download:before { - content: "\e026"; -} -.glyphicon-upload:before { - content: "\e027"; -} -.glyphicon-inbox:before { - content: "\e028"; -} -.glyphicon-play-circle:before { - content: "\e029"; -} -.glyphicon-repeat:before { - content: "\e030"; -} -.glyphicon-refresh:before { - content: "\e031"; -} -.glyphicon-list-alt:before { - content: "\e032"; -} -.glyphicon-flag:before { - content: "\e034"; -} -.glyphicon-headphones:before { - content: "\e035"; -} -.glyphicon-volume-off:before { - content: "\e036"; -} -.glyphicon-volume-down:before { - content: "\e037"; -} -.glyphicon-volume-up:before { - content: "\e038"; -} -.glyphicon-qrcode:before { - content: "\e039"; -} -.glyphicon-barcode:before { - content: "\e040"; -} -.glyphicon-tag:before { - content: "\e041"; -} -.glyphicon-tags:before { - content: "\e042"; -} -.glyphicon-book:before { - content: "\e043"; -} -.glyphicon-print:before { - content: "\e045"; -} -.glyphicon-font:before { - content: "\e047"; -} -.glyphicon-bold:before { - content: "\e048"; -} -.glyphicon-italic:before { - content: "\e049"; -} -.glyphicon-text-height:before { - content: "\e050"; -} -.glyphicon-text-width:before { - content: "\e051"; -} -.glyphicon-align-left:before { - content: "\e052"; -} -.glyphicon-align-center:before { - content: "\e053"; -} -.glyphicon-align-right:before { - content: "\e054"; -} -.glyphicon-align-justify:before { - content: "\e055"; -} -.glyphicon-list:before { - content: "\e056"; -} -.glyphicon-indent-left:before { - content: "\e057"; -} -.glyphicon-indent-right:before { - content: "\e058"; -} -.glyphicon-facetime-video:before { - content: "\e059"; -} -.glyphicon-picture:before { - content: "\e060"; -} -.glyphicon-map-marker:before { - content: "\e062"; -} -.glyphicon-adjust:before { - content: "\e063"; -} -.glyphicon-tint:before { - content: "\e064"; -} -.glyphicon-edit:before { - content: "\e065"; -} -.glyphicon-share:before { - content: "\e066"; -} -.glyphicon-check:before { - content: "\e067"; -} -.glyphicon-move:before { - content: "\e068"; -} -.glyphicon-step-backward:before { - content: "\e069"; -} -.glyphicon-fast-backward:before { - content: "\e070"; -} -.glyphicon-backward:before { - content: "\e071"; -} -.glyphicon-play:before { - content: "\e072"; -} -.glyphicon-pause:before { - content: "\e073"; -} -.glyphicon-stop:before { - content: "\e074"; -} -.glyphicon-forward:before { - content: "\e075"; -} -.glyphicon-fast-forward:before { - content: "\e076"; -} -.glyphicon-step-forward:before { - content: "\e077"; -} -.glyphicon-eject:before { - content: "\e078"; -} -.glyphicon-chevron-left:before { - content: "\e079"; -} -.glyphicon-chevron-right:before { - content: "\e080"; -} -.glyphicon-plus-sign:before { - content: "\e081"; -} -.glyphicon-minus-sign:before { - content: "\e082"; -} -.glyphicon-remove-sign:before { - content: "\e083"; -} -.glyphicon-ok-sign:before { - content: "\e084"; -} -.glyphicon-question-sign:before { - content: "\e085"; -} -.glyphicon-info-sign:before { - content: "\e086"; -} -.glyphicon-screenshot:before { - content: "\e087"; -} -.glyphicon-remove-circle:before { - content: "\e088"; -} -.glyphicon-ok-circle:before { - content: "\e089"; -} -.glyphicon-ban-circle:before { - content: "\e090"; -} -.glyphicon-arrow-left:before { - content: "\e091"; -} -.glyphicon-arrow-right:before { - content: "\e092"; -} -.glyphicon-arrow-up:before { - content: "\e093"; -} -.glyphicon-arrow-down:before { - content: "\e094"; -} -.glyphicon-share-alt:before { - content: "\e095"; -} -.glyphicon-resize-full:before { - content: "\e096"; -} -.glyphicon-resize-small:before { - content: "\e097"; -} -.glyphicon-exclamation-sign:before { - content: "\e101"; -} -.glyphicon-gift:before { - content: "\e102"; -} -.glyphicon-leaf:before { - content: "\e103"; -} -.glyphicon-eye-open:before { - content: "\e105"; -} -.glyphicon-eye-close:before { - content: "\e106"; -} -.glyphicon-warning-sign:before { - content: "\e107"; -} -.glyphicon-plane:before { - content: "\e108"; -} -.glyphicon-random:before { - content: "\e110"; -} -.glyphicon-comment:before { - content: "\e111"; -} -.glyphicon-magnet:before { - content: "\e112"; -} -.glyphicon-chevron-up:before { - content: "\e113"; -} -.glyphicon-chevron-down:before { - content: "\e114"; -} -.glyphicon-retweet:before { - content: "\e115"; -} -.glyphicon-shopping-cart:before { - content: "\e116"; -} -.glyphicon-folder-close:before { - content: "\e117"; -} -.glyphicon-folder-open:before { - content: "\e118"; -} -.glyphicon-resize-vertical:before { - content: "\e119"; -} -.glyphicon-resize-horizontal:before { - content: "\e120"; -} -.glyphicon-hdd:before { - content: "\e121"; -} -.glyphicon-bullhorn:before { - content: "\e122"; -} -.glyphicon-certificate:before { - content: "\e124"; -} -.glyphicon-thumbs-up:before { - content: "\e125"; -} -.glyphicon-thumbs-down:before { - content: "\e126"; -} -.glyphicon-hand-right:before { - content: "\e127"; -} -.glyphicon-hand-left:before { - content: "\e128"; -} -.glyphicon-hand-up:before { - content: "\e129"; -} -.glyphicon-hand-down:before { - content: "\e130"; -} -.glyphicon-circle-arrow-right:before { - content: "\e131"; -} -.glyphicon-circle-arrow-left:before { - content: "\e132"; -} -.glyphicon-circle-arrow-up:before { - content: "\e133"; -} -.glyphicon-circle-arrow-down:before { - content: "\e134"; -} -.glyphicon-globe:before { - content: "\e135"; -} -.glyphicon-tasks:before { - content: "\e137"; -} -.glyphicon-filter:before { - content: "\e138"; -} -.glyphicon-fullscreen:before { - content: "\e140"; -} -.glyphicon-dashboard:before { - content: "\e141"; -} -.glyphicon-heart-empty:before { - content: "\e143"; -} -.glyphicon-link:before { - content: "\e144"; -} -.glyphicon-phone:before { - content: "\e145"; -} -.glyphicon-usd:before { - content: "\e148"; -} -.glyphicon-gbp:before { - content: "\e149"; -} -.glyphicon-sort:before { - content: "\e150"; -} -.glyphicon-sort-by-alphabet:before { - content: "\e151"; -} -.glyphicon-sort-by-alphabet-alt:before { - content: "\e152"; -} -.glyphicon-sort-by-order:before { - content: "\e153"; -} -.glyphicon-sort-by-order-alt:before { - content: "\e154"; -} -.glyphicon-sort-by-attributes:before { - content: "\e155"; -} -.glyphicon-sort-by-attributes-alt:before { - content: "\e156"; -} -.glyphicon-unchecked:before { - content: "\e157"; -} -.glyphicon-expand:before { - content: "\e158"; -} -.glyphicon-collapse-down:before { - content: "\e159"; -} -.glyphicon-collapse-up:before { - content: "\e160"; -} -.glyphicon-log-in:before { - content: "\e161"; -} -.glyphicon-flash:before { - content: "\e162"; -} -.glyphicon-log-out:before { - content: "\e163"; -} -.glyphicon-new-window:before { - content: "\e164"; -} -.glyphicon-record:before { - content: "\e165"; -} -.glyphicon-save:before { - content: "\e166"; -} -.glyphicon-open:before { - content: "\e167"; -} -.glyphicon-saved:before { - content: "\e168"; -} -.glyphicon-import:before { - content: "\e169"; -} -.glyphicon-export:before { - content: "\e170"; -} -.glyphicon-send:before { - content: "\e171"; -} -.glyphicon-floppy-disk:before { - content: "\e172"; -} -.glyphicon-floppy-saved:before { - content: "\e173"; -} -.glyphicon-floppy-remove:before { - content: "\e174"; -} -.glyphicon-floppy-save:before { - content: "\e175"; -} -.glyphicon-floppy-open:before { - content: "\e176"; -} -.glyphicon-credit-card:before { - content: "\e177"; -} -.glyphicon-transfer:before { - content: "\e178"; -} -.glyphicon-cutlery:before { - content: "\e179"; -} -.glyphicon-header:before { - content: "\e180"; -} -.glyphicon-compressed:before { - content: "\e181"; -} -.glyphicon-earphone:before { - content: "\e182"; -} -.glyphicon-phone-alt:before { - content: "\e183"; -} -.glyphicon-tower:before { - content: "\e184"; -} -.glyphicon-stats:before { - content: "\e185"; -} -.glyphicon-sd-video:before { - content: "\e186"; -} -.glyphicon-hd-video:before { - content: "\e187"; -} -.glyphicon-subtitles:before { - content: "\e188"; -} -.glyphicon-sound-stereo:before { - content: "\e189"; -} -.glyphicon-sound-dolby:before { - content: "\e190"; -} -.glyphicon-sound-5-1:before { - content: "\e191"; -} -.glyphicon-sound-6-1:before { - content: "\e192"; -} -.glyphicon-sound-7-1:before { - content: "\e193"; -} -.glyphicon-copyright-mark:before { - content: "\e194"; -} -.glyphicon-registration-mark:before { - content: "\e195"; -} -.glyphicon-cloud-download:before { - content: "\e197"; -} -.glyphicon-cloud-upload:before { - content: "\e198"; -} -.glyphicon-tree-conifer:before { - content: "\e199"; -} -.glyphicon-tree-deciduous:before { - content: "\e200"; -} -.glyphicon-briefcase:before { - content: "\1f4bc"; -} -.glyphicon-calendar:before { - content: "\1f4c5"; -} -.glyphicon-pushpin:before { - content: "\1f4cc"; -} -.glyphicon-paperclip:before { - content: "\1f4ce"; -} -.glyphicon-camera:before { - content: "\1f4f7"; -} -.glyphicon-lock:before { - content: "\1f512"; -} -.glyphicon-bell:before { - content: "\1f514"; -} -.glyphicon-bookmark:before { - content: "\1f516"; -} -.glyphicon-fire:before { - content: "\1f525"; -} -.glyphicon-wrench:before { - content: "\1f527"; -} -.caret { - display: inline-block; - width: 0; - height: 0; - margin-left: 2px; - vertical-align: middle; - border-top: 4px solid #000000; - border-right: 4px solid transparent; - border-left: 4px solid transparent; - border-bottom: 0 dotted; - content: ""; -} -.dropdown { - position: relative; -} -.dropdown-toggle:focus { - outline: 0; -} -.dropdown-menu { - position: absolute; - top: 100%; - left: 0; - z-index: 1000; - display: none; - float: left; - min-width: 160px; - padding: 5px 0; - margin: 2px 0 0; - list-style: none; - font-size: 14px; - background-color: #ffffff; - border: 1px solid #cccccc; - border: 1px solid rgba(0, 0, 0, 0.15); - border-radius: 4px; - -webkit-box-shadow: 0 6px 12px rgba(0, 0, 0, 0.175); - box-shadow: 0 6px 12px rgba(0, 0, 0, 0.175); - background-clip: padding-box; -} -.dropdown-menu.pull-right { - right: 0; - left: auto; -} -.dropdown-menu .divider { - height: 1px; - margin: 9px 0; - overflow: hidden; - background-color: #e5e5e5; -} -.dropdown-menu > li > a { - display: block; - padding: 3px 20px; - clear: both; - font-weight: normal; - line-height: 1.428571429; - color: #333333; - white-space: nowrap; -} -.dropdown-menu > li > a:hover, -.dropdown-menu > li > a:focus { - text-decoration: none; - color: #ffffff; - background-color: #428bca; -} -.dropdown-menu > .active > a, -.dropdown-menu > .active > a:hover, -.dropdown-menu > .active > a:focus { - color: #ffffff; - text-decoration: none; - outline: 0; - background-color: #428bca; -} -.dropdown-menu > .disabled > a, -.dropdown-menu > .disabled > a:hover, -.dropdown-menu > .disabled > a:focus { - color: #999999; -} -.dropdown-menu > .disabled > a:hover, -.dropdown-menu > .disabled > a:focus { - text-decoration: none; - background-color: transparent; - background-image: none; - filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); - cursor: not-allowed; -} -.open > .dropdown-menu { - display: block; -} -.open > a { - outline: 0; -} -.dropdown-header { - display: block; - padding: 3px 20px; - font-size: 12px; - line-height: 1.428571429; - color: #999999; -} -.dropdown-backdrop { - position: fixed; - left: 0; - right: 0; - bottom: 0; - top: 0; - z-index: 990; -} -.pull-right > .dropdown-menu { - right: 0; - left: auto; -} -.dropup .caret, -.navbar-fixed-bottom .dropdown .caret { - border-top: 0 dotted; - border-bottom: 4px solid #000000; - content: ""; -} -.dropup .dropdown-menu, -.navbar-fixed-bottom .dropdown .dropdown-menu { - top: auto; - bottom: 100%; - margin-bottom: 1px; -} -@media (min-width: 992px) { - .navbar-right .dropdown-menu { - right: 0; - left: auto; - } -} -.btn-default .caret { - border-top-color: #333333; -} -.btn-primary .caret, -.btn-success .caret, -.btn-warning .caret, -.btn-danger .caret, -.btn-info .caret { - border-top-color: #fff; -} -.dropup .btn-default .caret { - border-bottom-color: #333333; -} -.dropup .btn-primary .caret, -.dropup .btn-success .caret, -.dropup .btn-warning .caret, -.dropup .btn-danger .caret, -.dropup .btn-info .caret { - border-bottom-color: #fff; -} -.btn-group, -.btn-group-vertical { - position: relative; - display: inline-block; - vertical-align: middle; -} -.btn-group > .btn, -.btn-group-vertical > .btn { - position: relative; - float: left; -} -.btn-group > .btn:hover, -.btn-group-vertical > .btn:hover, -.btn-group > .btn:focus, -.btn-group-vertical > .btn:focus, -.btn-group > .btn:active, -.btn-group-vertical > .btn:active, -.btn-group > .btn.active, -.btn-group-vertical > .btn.active { - z-index: 2; -} -.btn-group > .btn:focus, -.btn-group-vertical > .btn:focus { - outline: none; -} -.btn-group .btn + .btn, -.btn-group .btn + .btn-group, -.btn-group .btn-group + .btn, -.btn-group .btn-group + .btn-group { - margin-left: -1px; -} -.btn-toolbar:before, -.btn-toolbar:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.btn-toolbar:after { - clear: both; -} -.btn-toolbar:before, -.btn-toolbar:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.btn-toolbar:after { - clear: both; -} -.btn-toolbar .btn-group { - float: left; -} -.btn-toolbar > .btn + .btn, -.btn-toolbar > .btn-group + .btn, -.btn-toolbar > .btn + .btn-group, -.btn-toolbar > .btn-group + .btn-group { - margin-left: 5px; -} -.btn-group > .btn:not(:first-child):not(:last-child):not(.dropdown-toggle) { - border-radius: 0; -} -.btn-group > .btn:first-child { - margin-left: 0; -} -.btn-group > .btn:first-child:not(:last-child):not(.dropdown-toggle) { - border-bottom-right-radius: 0; - border-top-right-radius: 0; -} -.btn-group > .btn:last-child:not(:first-child), -.btn-group > .dropdown-toggle:not(:first-child) { - border-bottom-left-radius: 0; - border-top-left-radius: 0; -} -.btn-group > .btn-group { - float: left; -} -.btn-group > .btn-group:not(:first-child):not(:last-child) > .btn { - border-radius: 0; -} -.btn-group > .btn-group:first-child > .btn:last-child, -.btn-group > .btn-group:first-child > .dropdown-toggle { - border-bottom-right-radius: 0; - border-top-right-radius: 0; -} -.btn-group > .btn-group:last-child > .btn:first-child { - border-bottom-left-radius: 0; - border-top-left-radius: 0; -} -.btn-group .dropdown-toggle:active, -.btn-group.open .dropdown-toggle { - outline: 0; -} -.btn-group-xs > .btn { - padding: 5px 10px; - font-size: 12px; - line-height: 1.5; - border-radius: 3px; - padding: 1px 5px; -} -.btn-group-sm > .btn { - padding: 5px 10px; - font-size: 12px; - line-height: 1.5; - border-radius: 3px; -} -.btn-group-lg > .btn { - padding: 10px 16px; - font-size: 18px; - line-height: 1.33; - border-radius: 6px; -} -.btn-group > .btn + .dropdown-toggle { - padding-left: 8px; - padding-right: 8px; -} -.btn-group > .btn-lg + .dropdown-toggle { - padding-left: 12px; - padding-right: 12px; -} -.btn-group.open .dropdown-toggle { - -webkit-box-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125); - box-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125); -} -.btn .caret { - margin-left: 0; -} -.btn-lg .caret { - border-width: 5px 5px 0; - border-bottom-width: 0; -} -.dropup .btn-lg .caret { - border-width: 0 5px 5px; -} -.btn-group-vertical > .btn, -.btn-group-vertical > .btn-group { - display: block; - float: none; - width: 100%; - max-width: 100%; -} -.btn-group-vertical > .btn-group:before, -.btn-group-vertical > .btn-group:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.btn-group-vertical > .btn-group:after { - clear: both; -} -.btn-group-vertical > .btn-group:before, -.btn-group-vertical > .btn-group:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.btn-group-vertical > .btn-group:after { - clear: both; -} -.btn-group-vertical > .btn-group > .btn { - float: none; -} -.btn-group-vertical > .btn + .btn, -.btn-group-vertical > .btn + .btn-group, -.btn-group-vertical > .btn-group + .btn, -.btn-group-vertical > .btn-group + .btn-group { - margin-top: -1px; - margin-left: 0; -} -.btn-group-vertical > .btn:not(:first-child):not(:last-child) { - border-radius: 0; -} -.btn-group-vertical > .btn:first-child:not(:last-child) { - border-top-right-radius: 4px; - border-bottom-right-radius: 0; - border-bottom-left-radius: 0; -} -.btn-group-vertical > .btn:last-child:not(:first-child) { - border-bottom-left-radius: 4px; - border-top-right-radius: 0; - border-top-left-radius: 0; -} -.btn-group-vertical > .btn-group:not(:first-child):not(:last-child) > .btn { - border-radius: 0; -} -.btn-group-vertical > .btn-group:first-child > .btn:last-child, -.btn-group-vertical > .btn-group:first-child > .dropdown-toggle { - border-bottom-right-radius: 0; - border-bottom-left-radius: 0; -} -.btn-group-vertical > .btn-group:last-child > .btn:first-child { - border-top-right-radius: 0; - border-top-left-radius: 0; -} -.btn-group-justified { - display: table; - width: 100%; - table-layout: fixed; - border-collapse: separate; -} -.btn-group-justified .btn { - float: none; - display: table-cell; - width: 1%; -} -[data-toggle="buttons"] > .btn > input[type="radio"], -[data-toggle="buttons"] > .btn > input[type="checkbox"] { - display: none; -} -.input-group { - position: relative; - display: table; - border-collapse: separate; -} -.input-group.col { - float: none; - padding-left: 0; - padding-right: 0; -} -.input-group .form-control { - width: 100%; - margin-bottom: 0; -} -.input-group-lg > .form-control, -.input-group-lg > .input-group-addon, -.input-group-lg > .input-group-btn > .btn { - height: 45px; - padding: 10px 16px; - font-size: 18px; - line-height: 1.33; - border-radius: 6px; -} -select.input-group-lg > .form-control, -select.input-group-lg > .input-group-addon, -select.input-group-lg > .input-group-btn > .btn { - height: 45px; - line-height: 45px; -} -textarea.input-group-lg > .form-control, -textarea.input-group-lg > .input-group-addon, -textarea.input-group-lg > .input-group-btn > .btn { - height: auto; -} -.input-group-sm > .form-control, -.input-group-sm > .input-group-addon, -.input-group-sm > .input-group-btn > .btn { - height: 30px; - padding: 5px 10px; - font-size: 12px; - line-height: 1.5; - border-radius: 3px; -} -select.input-group-sm > .form-control, -select.input-group-sm > .input-group-addon, -select.input-group-sm > .input-group-btn > .btn { - height: 30px; - line-height: 30px; -} -textarea.input-group-sm > .form-control, -textarea.input-group-sm > .input-group-addon, -textarea.input-group-sm > .input-group-btn > .btn { - height: auto; -} -.input-group-addon, -.input-group-btn, -.input-group .form-control { - display: table-cell; -} -.input-group-addon:not(:first-child):not(:last-child), -.input-group-btn:not(:first-child):not(:last-child), -.input-group .form-control:not(:first-child):not(:last-child) { - border-radius: 0; -} -.input-group-addon, -.input-group-btn { - width: 1%; - white-space: nowrap; - vertical-align: middle; -} -.input-group-addon { - padding: 6px 12px; - font-size: 14px; - font-weight: normal; - line-height: 1; - text-align: center; - background-color: #eeeeee; - border: 1px solid #cccccc; - border-radius: 4px; -} -.input-group-addon.input-sm { - padding: 5px 10px; - font-size: 12px; - border-radius: 3px; -} -.input-group-addon.input-lg { - padding: 10px 16px; - font-size: 18px; - border-radius: 6px; -} -.input-group-addon input[type="radio"], -.input-group-addon input[type="checkbox"] { - margin-top: 0; -} -.input-group .form-control:first-child, -.input-group-addon:first-child, -.input-group-btn:first-child > .btn, -.input-group-btn:first-child > .dropdown-toggle, -.input-group-btn:last-child > .btn:not(:last-child):not(.dropdown-toggle) { - border-bottom-right-radius: 0; - border-top-right-radius: 0; -} -.input-group-addon:first-child { - border-right: 0; -} -.input-group .form-control:last-child, -.input-group-addon:last-child, -.input-group-btn:last-child > .btn, -.input-group-btn:last-child > .dropdown-toggle, -.input-group-btn:first-child > .btn:not(:first-child) { - border-bottom-left-radius: 0; - border-top-left-radius: 0; -} -.input-group-addon:last-child { - border-left: 0; -} -.input-group-btn { - position: relative; - white-space: nowrap; -} -.input-group-btn > .btn { - position: relative; -} -.input-group-btn > .btn + .btn { - margin-left: -4px; -} -.input-group-btn > .btn:hover, -.input-group-btn > .btn:active { - z-index: 2; -} -.nav { - margin-bottom: 0; - padding-left: 0; - list-style: none; -} -.nav:before, -.nav:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.nav:after { - clear: both; -} -.nav:before, -.nav:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.nav:after { - clear: both; -} -.nav > li { - position: relative; - display: block; -} -.nav > li > a { - position: relative; - display: block; - padding: 10px 15px; -} -.nav > li > a:hover, -.nav > li > a:focus { - text-decoration: none; - background-color: #eeeeee; -} -.nav > li.disabled > a { - color: #999999; -} -.nav > li.disabled > a:hover, -.nav > li.disabled > a:focus { - color: #999999; - text-decoration: none; - background-color: transparent; - cursor: not-allowed; -} -.nav .open > a, -.nav .open > a:hover, -.nav .open > a:focus { - background-color: #eeeeee; - border-color: #428bca; -} -.nav .nav-divider { - height: 1px; - margin: 9px 0; - overflow: hidden; - background-color: #e5e5e5; -} -.nav > li > a > img { - max-width: none; -} -.nav-tabs { - border-bottom: 1px solid #dddddd; -} -.nav-tabs > li { - float: left; - margin-bottom: -1px; -} -.nav-tabs > li > a { - margin-right: 2px; - line-height: 1.428571429; - border: 1px solid transparent; - border-radius: 4px 4px 0 0; -} -.nav-tabs > li > a:hover { - border-color: #eeeeee #eeeeee #dddddd; -} -.nav-tabs > li.active > a, -.nav-tabs > li.active > a:hover, -.nav-tabs > li.active > a:focus { - color: #555555; - background-color: #ffffff; - border: 1px solid #dddddd; - border-bottom-color: transparent; - cursor: default; -} -.nav-tabs.nav-justified { - width: 100%; - border-bottom: 0; -} -.nav-tabs.nav-justified > li { - float: none; -} -.nav-tabs.nav-justified > li > a { - text-align: center; -} -@media (min-width: 768px) { - .nav-tabs.nav-justified > li { - display: table-cell; - width: 1%; - } -} -.nav-tabs.nav-justified > li > a { - border-bottom: 1px solid #dddddd; - margin-right: 0; -} -.nav-tabs.nav-justified > .active > a { - border-bottom-color: #ffffff; -} -.nav-pills > li { - float: left; -} -.nav-pills > li > a { - border-radius: 5px; -} -.nav-pills > li + li { - margin-left: 2px; -} -.nav-pills > li.active > a, -.nav-pills > li.active > a:hover, -.nav-pills > li.active > a:focus { - color: #ffffff; - background-color: #428bca; -} -.nav-stacked > li { - float: none; -} -.nav-stacked > li + li { - margin-top: 2px; - margin-left: 0; -} -.nav-justified { - width: 100%; -} -.nav-justified > li { - float: none; -} -.nav-justified > li > a { - text-align: center; -} -@media (min-width: 768px) { - .nav-justified > li { - display: table-cell; - width: 1%; - } -} -.nav-tabs-justified { - border-bottom: 0; -} -.nav-tabs-justified > li > a { - border-bottom: 1px solid #dddddd; - margin-right: 0; -} -.nav-tabs-justified > .active > a { - border-bottom-color: #ffffff; -} -.tabbable:before, -.tabbable:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.tabbable:after { - clear: both; -} -.tabbable:before, -.tabbable:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.tabbable:after { - clear: both; -} -.tab-content > .tab-pane, -.pill-content > .pill-pane { - display: none; -} -.tab-content > .active, -.pill-content > .active { - display: block; -} -.nav .caret { - border-top-color: #428bca; - border-bottom-color: #428bca; -} -.nav a:hover .caret { - border-top-color: #2a6496; - border-bottom-color: #2a6496; -} -.nav-tabs .dropdown-menu { - margin-top: -1px; - border-top-right-radius: 0; - border-top-left-radius: 0; -} -.navbar { - position: relative; - z-index: 1000; - min-height: 50px; - margin-bottom: 20px; - border: 1px solid transparent; -} -.navbar:before, -.navbar:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.navbar:after { - clear: both; -} -.navbar:before, -.navbar:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.navbar:after { - clear: both; -} -@media (min-width: 992px) { - .navbar { - border-radius: 4px; - } -} -.navbar-header:before, -.navbar-header:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.navbar-header:after { - clear: both; -} -.navbar-header:before, -.navbar-header:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.navbar-header:after { - clear: both; -} -@media (min-width: 992px) { - .navbar-header { - float: left; - } -} -.navbar-collapse { - max-height: 340px; - overflow-x: visible; - padding-right: 15px; - padding-left: 15px; - border-top: 1px solid transparent; - box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1); - -webkit-overflow-scrolling: touch; -} -.navbar-collapse:before, -.navbar-collapse:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.navbar-collapse:after { - clear: both; -} -.navbar-collapse:before, -.navbar-collapse:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.navbar-collapse:after { - clear: both; -} -.navbar-collapse.in { - overflow-y: auto; -} -@media (min-width: 992px) { - .navbar-collapse { - width: auto; - border-top: 0; - box-shadow: none; - } - .navbar-collapse.collapse { - display: block !important; - height: auto !important; - padding-bottom: 0; - overflow: visible !important; - } - .navbar-collapse.in { - overflow-y: visible; - } - .navbar-collapse .navbar-nav.navbar-left:first-child { - margin-left: -15px; - } - .navbar-collapse .navbar-nav.navbar-right:last-child { - margin-right: -15px; - } - .navbar-collapse .navbar-text:last-child { - margin-right: 0; - } -} -.container > .navbar-header, -.container > .navbar-collapse { - margin-right: -15px; - margin-left: -15px; -} -@media (min-width: 992px) { - .container > .navbar-header, - .container > .navbar-collapse { - margin-right: 0; - margin-left: 0; - } -} -.navbar-static-top { - border-width: 0 0 1px; -} -@media (min-width: 992px) { - .navbar-static-top { - border-radius: 0; - } -} -.navbar-fixed-top, -.navbar-fixed-bottom { - position: fixed; - right: 0; - left: 0; - border-width: 0 0 1px; -} -@media (min-width: 992px) { - .navbar-fixed-top, - .navbar-fixed-bottom { - border-radius: 0; - } -} -.navbar-fixed-top { - z-index: 1030; - top: 0; -} -.navbar-fixed-bottom { - bottom: 0; - margin-bottom: 0; -} -.navbar-brand { - float: left; - padding: 15px 15px; - font-size: 18px; - line-height: 20px; -} -.navbar-brand:hover, -.navbar-brand:focus { - text-decoration: none; -} -@media (min-width: 992px) { - .navbar > .container .navbar-brand { - margin-left: -15px; - } -} -.navbar-toggle { - position: relative; - float: right; - margin-right: 15px; - padding: 9px 10px; - margin-top: 8px; - margin-bottom: 8px; - background-color: transparent; - border: 1px solid transparent; - border-radius: 4px; -} -.navbar-toggle .icon-bar { - display: block; - width: 22px; - height: 2px; - border-radius: 1px; -} -.navbar-toggle .icon-bar + .icon-bar { - margin-top: 4px; -} -@media (min-width: 992px) { - .navbar-toggle { - display: none; - } -} -.navbar-nav { - margin: 7.5px -15px; -} -.navbar-nav > li > a { - padding-top: 10px; - padding-bottom: 10px; - line-height: 20px; -} -@media (max-width: 767px) { - .navbar-nav .open .dropdown-menu { - position: static; - float: none; - width: auto; - margin-top: 0; - background-color: transparent; - border: 0; - box-shadow: none; - } - .navbar-nav .open .dropdown-menu > li > a, - .navbar-nav .open .dropdown-menu .dropdown-header { - padding: 5px 15px 5px 25px; - } - .navbar-nav .open .dropdown-menu > li > a { - line-height: 20px; - } - .navbar-nav .open .dropdown-menu > li > a:hover, - .navbar-nav .open .dropdown-menu > li > a:focus { - background-image: none; - } -} -@media (min-width: 992px) { - .navbar-nav { - float: left; - margin: 0; - } - .navbar-nav > li { - float: left; - } - .navbar-nav > li > a { - padding-top: 15px; - padding-bottom: 15px; - } -} -@media (min-width: 992px) { - .navbar-left { - float: left !important; - } - .navbar-right { - float: right !important; - } -} -.navbar-form { - margin-left: -15px; - margin-right: -15px; - padding: 10px 15px; - border-top: 1px solid transparent; - border-bottom: 1px solid transparent; - -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1), 0 1px 0 rgba(255, 255, 255, 0.1); - box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1), 0 1px 0 rgba(255, 255, 255, 0.1); - margin-top: 8px; - margin-bottom: 8px; -} -@media (min-width: 768px) { - .navbar-form .form-group { - display: inline-block; - margin-bottom: 0; - vertical-align: middle; - } - .navbar-form .form-control { - display: inline-block; - } - .navbar-form .radio, - .navbar-form .checkbox { - display: inline-block; - margin-top: 0; - margin-bottom: 0; - padding-left: 0; - } - .navbar-form .radio input[type="radio"], - .navbar-form .checkbox input[type="checkbox"] { - float: none; - margin-left: 0; - } -} -@media (max-width: 767px) { - .navbar-form .form-group { - margin-bottom: 5px; - } -} -@media (min-width: 992px) { - .navbar-form { - width: auto; - border: 0; - margin-left: 0; - margin-right: 0; - padding-top: 0; - padding-bottom: 0; - -webkit-box-shadow: none; - box-shadow: none; - } -} -.navbar-nav > li > .dropdown-menu { - margin-top: 0; - border-top-right-radius: 0; - border-top-left-radius: 0; -} -.navbar-fixed-bottom .navbar-nav > li > .dropdown-menu { - border-bottom-right-radius: 0; - border-bottom-left-radius: 0; -} -.navbar-nav.pull-right > li > .dropdown-menu, -.navbar-nav > li > .dropdown-menu.pull-right { - left: auto; - right: 0; -} -.navbar-btn { - margin-top: 8px; - margin-bottom: 8px; -} -.navbar-text { - float: left; - margin-top: 15px; - margin-bottom: 15px; -} -@media (min-width: 992px) { - .navbar-text { - margin-left: 15px; - margin-right: 15px; - } -} -.navbar-default { - background-color: #f8f8f8; - border-color: #e7e7e7; -} -.navbar-default .navbar-brand { - color: #777777; -} -.navbar-default .navbar-brand:hover, -.navbar-default .navbar-brand:focus { - color: #5e5e5e; - background-color: transparent; -} -.navbar-default .navbar-text { - color: #777777; -} -.navbar-default .navbar-nav > li > a { - color: #777777; -} -.navbar-default .navbar-nav > li > a:hover, -.navbar-default .navbar-nav > li > a:focus { - color: #333333; - background-color: transparent; -} -.navbar-default .navbar-nav > .active > a, -.navbar-default .navbar-nav > .active > a:hover, -.navbar-default .navbar-nav > .active > a:focus { - color: #555555; - background-color: #e7e7e7; -} -.navbar-default .navbar-nav > .disabled > a, -.navbar-default .navbar-nav > .disabled > a:hover, -.navbar-default .navbar-nav > .disabled > a:focus { - color: #cccccc; - background-color: transparent; -} -.navbar-default .navbar-toggle { - border-color: #dddddd; -} -.navbar-default .navbar-toggle:hover, -.navbar-default .navbar-toggle:focus { - background-color: #dddddd; -} -.navbar-default .navbar-toggle .icon-bar { - background-color: #cccccc; -} -.navbar-default .navbar-collapse, -.navbar-default .navbar-form { - border-color: #e6e6e6; -} -.navbar-default .navbar-nav > .dropdown > a:hover .caret, -.navbar-default .navbar-nav > .dropdown > a:focus .caret { - border-top-color: #333333; - border-bottom-color: #333333; -} -.navbar-default .navbar-nav > .open > a, -.navbar-default .navbar-nav > .open > a:hover, -.navbar-default .navbar-nav > .open > a:focus { - background-color: #e7e7e7; - color: #555555; -} -.navbar-default .navbar-nav > .open > a .caret, -.navbar-default .navbar-nav > .open > a:hover .caret, -.navbar-default .navbar-nav > .open > a:focus .caret { - border-top-color: #555555; - border-bottom-color: #555555; -} -.navbar-default .navbar-nav > .dropdown > a .caret { - border-top-color: #777777; - border-bottom-color: #777777; -} -@media (max-width: 767px) { - .navbar-default .navbar-nav .open .dropdown-menu > li > a { - color: #777777; - } - .navbar-default .navbar-nav .open .dropdown-menu > li > a:hover, - .navbar-default .navbar-nav .open .dropdown-menu > li > a:focus { - color: #333333; - background-color: transparent; - } - .navbar-default .navbar-nav .open .dropdown-menu > .active > a, - .navbar-default .navbar-nav .open .dropdown-menu > .active > a:hover, - .navbar-default .navbar-nav .open .dropdown-menu > .active > a:focus { - color: #555555; - background-color: #e7e7e7; - } - .navbar-default .navbar-nav .open .dropdown-menu > .disabled > a, - .navbar-default .navbar-nav .open .dropdown-menu > .disabled > a:hover, - .navbar-default .navbar-nav .open .dropdown-menu > .disabled > a:focus { - color: #cccccc; - background-color: transparent; - } -} -.navbar-default .navbar-link { - color: #777777; -} -.navbar-default .navbar-link:hover { - color: #333333; -} -.navbar-inverse { - background-color: #222222; - border-color: #080808; -} -.navbar-inverse .navbar-brand { - color: #999999; -} -.navbar-inverse .navbar-brand:hover, -.navbar-inverse .navbar-brand:focus { - color: #ffffff; - background-color: transparent; -} -.navbar-inverse .navbar-text { - color: #999999; -} -.navbar-inverse .navbar-nav > li > a { - color: #999999; -} -.navbar-inverse .navbar-nav > li > a:hover, -.navbar-inverse .navbar-nav > li > a:focus { - color: #ffffff; - background-color: transparent; -} -.navbar-inverse .navbar-nav > .active > a, -.navbar-inverse .navbar-nav > .active > a:hover, -.navbar-inverse .navbar-nav > .active > a:focus { - color: #ffffff; - background-color: #080808; -} -.navbar-inverse .navbar-nav > .disabled > a, -.navbar-inverse .navbar-nav > .disabled > a:hover, -.navbar-inverse .navbar-nav > .disabled > a:focus { - color: #444444; - background-color: transparent; -} -.navbar-inverse .navbar-toggle { - border-color: #333333; -} -.navbar-inverse .navbar-toggle:hover, -.navbar-inverse .navbar-toggle:focus { - background-color: #333333; -} -.navbar-inverse .navbar-toggle .icon-bar { - background-color: #ffffff; -} -.navbar-inverse .navbar-collapse, -.navbar-inverse .navbar-form { - border-color: #101010; -} -.navbar-inverse .navbar-nav > .open > a, -.navbar-inverse .navbar-nav > .open > a:hover, -.navbar-inverse .navbar-nav > .open > a:focus { - background-color: #080808; - color: #ffffff; -} -.navbar-inverse .navbar-nav > .dropdown > a:hover .caret { - border-top-color: #ffffff; - border-bottom-color: #ffffff; -} -.navbar-inverse .navbar-nav > .dropdown > a .caret { - border-top-color: #999999; - border-bottom-color: #999999; -} -.navbar-inverse .navbar-nav > .open > a .caret, -.navbar-inverse .navbar-nav > .open > a:hover .caret, -.navbar-inverse .navbar-nav > .open > a:focus .caret { - border-top-color: #ffffff; - border-bottom-color: #ffffff; -} -@media (max-width: 767px) { - .navbar-inverse .navbar-nav .open .dropdown-menu > .dropdown-header { - border-color: #080808; - } - .navbar-inverse .navbar-nav .open .dropdown-menu > li > a { - color: #999999; - } - .navbar-inverse .navbar-nav .open .dropdown-menu > li > a:hover, - .navbar-inverse .navbar-nav .open .dropdown-menu > li > a:focus { - color: #ffffff; - background-color: transparent; - } - .navbar-inverse .navbar-nav .open .dropdown-menu > .active > a, - .navbar-inverse .navbar-nav .open .dropdown-menu > .active > a:hover, - .navbar-inverse .navbar-nav .open .dropdown-menu > .active > a:focus { - color: #ffffff; - background-color: #080808; - } - .navbar-inverse .navbar-nav .open .dropdown-menu > .disabled > a, - .navbar-inverse .navbar-nav .open .dropdown-menu > .disabled > a:hover, - .navbar-inverse .navbar-nav .open .dropdown-menu > .disabled > a:focus { - color: #444444; - background-color: transparent; - } -} -.navbar-inverse .navbar-link { - color: #999999; -} -.navbar-inverse .navbar-link:hover { - color: #ffffff; -} -.breadcrumb { - padding: 8px 15px; - margin-bottom: 20px; - list-style: none; - background-color: #f5f5f5; - border-radius: 4px; -} -.breadcrumb > li { - display: inline-block; -} -.breadcrumb > li + li:before { - content: "/\00a0"; - padding: 0 5px; - color: #cccccc; -} -.breadcrumb > .active { - color: #999999; -} -.pagination { - display: inline-block; - padding-left: 0; - margin: 20px 0; - border-radius: 4px; -} -.pagination > li { - display: inline; -} -.pagination > li > a, -.pagination > li > span { - position: relative; - float: left; - padding: 6px 12px; - line-height: 1.428571429; - text-decoration: none; - background-color: #ffffff; - border: 1px solid #dddddd; - margin-left: -1px; -} -.pagination > li:first-child > a, -.pagination > li:first-child > span { - margin-left: 0; - border-bottom-left-radius: 4px; - border-top-left-radius: 4px; -} -.pagination > li:last-child > a, -.pagination > li:last-child > span { - border-bottom-right-radius: 4px; - border-top-right-radius: 4px; -} -.pagination > li > a:hover, -.pagination > li > span:hover, -.pagination > li > a:focus, -.pagination > li > span:focus { - background-color: #eeeeee; -} -.pagination > .active > a, -.pagination > .active > span, -.pagination > .active > a:hover, -.pagination > .active > span:hover, -.pagination > .active > a:focus, -.pagination > .active > span:focus { - z-index: 2; - color: #ffffff; - background-color: #428bca; - border-color: #428bca; - cursor: default; -} -.pagination > .disabled > span, -.pagination > .disabled > a, -.pagination > .disabled > a:hover, -.pagination > .disabled > a:focus { - color: #999999; - background-color: #ffffff; - border-color: #dddddd; - cursor: not-allowed; -} -.pagination-lg > li > a, -.pagination-lg > li > span { - padding: 10px 16px; - font-size: 18px; -} -.pagination-lg > li:first-child > a, -.pagination-lg > li:first-child > span { - border-bottom-left-radius: 6px; - border-top-left-radius: 6px; -} -.pagination-lg > li:last-child > a, -.pagination-lg > li:last-child > span { - border-bottom-right-radius: 6px; - border-top-right-radius: 6px; -} -.pagination-sm > li > a, -.pagination-sm > li > span { - padding: 5px 10px; - font-size: 12px; -} -.pagination-sm > li:first-child > a, -.pagination-sm > li:first-child > span { - border-bottom-left-radius: 3px; - border-top-left-radius: 3px; -} -.pagination-sm > li:last-child > a, -.pagination-sm > li:last-child > span { - border-bottom-right-radius: 3px; - border-top-right-radius: 3px; -} -.pager { - padding-left: 0; - margin: 20px 0; - list-style: none; - text-align: center; -} -.pager:before, -.pager:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.pager:after { - clear: both; -} -.pager:before, -.pager:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.pager:after { - clear: both; -} -.pager li { - display: inline; -} -.pager li > a, -.pager li > span { - display: inline-block; - padding: 5px 14px; - background-color: #ffffff; - border: 1px solid #dddddd; - border-radius: 15px; -} -.pager li > a:hover, -.pager li > a:focus { - text-decoration: none; - background-color: #eeeeee; -} -.pager .next > a, -.pager .next > span { - float: right; -} -.pager .previous > a, -.pager .previous > span { - float: left; -} -.pager .disabled > a, -.pager .disabled > a:hover, -.pager .disabled > a:focus, -.pager .disabled > span { - color: #999999; - background-color: #ffffff; - cursor: not-allowed; -} -.label { - display: inline; - padding: .2em .6em .3em; - font-size: 75%; - font-weight: bold; - line-height: 1; - color: #ffffff; - text-align: center; - white-space: nowrap; - vertical-align: baseline; - border-radius: .25em; -} -.label[href]:hover, -.label[href]:focus { - color: #ffffff; - text-decoration: none; - cursor: pointer; -} -.label:empty { - display: none; -} -.label-default { - background-color: #999999; -} -.label-default[href]:hover, -.label-default[href]:focus { - background-color: #808080; -} -.label-primary { - background-color: #428bca; -} -.label-primary[href]:hover, -.label-primary[href]:focus { - background-color: #3071a9; -} -.label-success { - background-color: #5cb85c; -} -.label-success[href]:hover, -.label-success[href]:focus { - background-color: #449d44; -} -.label-info { - background-color: #5bc0de; -} -.label-info[href]:hover, -.label-info[href]:focus { - background-color: #31b0d5; -} -.label-warning { - background-color: #f0ad4e; -} -.label-warning[href]:hover, -.label-warning[href]:focus { - background-color: #ec971f; -} -.label-danger { - background-color: #d9534f; -} -.label-danger[href]:hover, -.label-danger[href]:focus { - background-color: #c9302c; -} -.badge { - display: inline-block; - min-width: 10px; - padding: 3px 7px; - font-size: 12px; - font-weight: bold; - color: #ffffff; - line-height: 1; - vertical-align: baseline; - white-space: nowrap; - text-align: center; - background-color: #999999; - border-radius: 10px; -} -.badge:empty { - display: none; -} -a.badge:hover, -a.badge:focus { - color: #ffffff; - text-decoration: none; - cursor: pointer; -} -.btn .badge { - position: relative; - top: -1px; -} -a.list-group-item.active > .badge, -.nav-pills > .active > a > .badge { - color: #428bca; - background-color: #ffffff; -} -.nav-pills > li > a > .badge { - margin-left: 3px; -} -.jumbotron { - padding: 30px; - margin-bottom: 30px; - font-size: 21px; - font-weight: 200; - line-height: 2.1428571435; - color: inherit; - background-color: #eeeeee; -} -.jumbotron h1 { - line-height: 1; - color: inherit; -} -.jumbotron p { - line-height: 1.4; -} -.container .jumbotron { - border-radius: 6px; -} -@media screen and (min-width: 768px) { - .jumbotron { - padding-top: 48px; - padding-bottom: 48px; - } - .container .jumbotron { - padding-left: 60px; - padding-right: 60px; - } - .jumbotron h1 { - font-size: 63px; - } -} -.thumbnail { - padding: 4px; - line-height: 1.428571429; - background-color: #ffffff; - border: 1px solid #dddddd; - border-radius: 4px; - -webkit-transition: all 0.2s ease-in-out; - transition: all 0.2s ease-in-out; - display: inline-block; - max-width: 100%; - height: auto; - display: block; -} -.thumbnail > img { - display: block; - max-width: 100%; - height: auto; -} -a.thumbnail:hover, -a.thumbnail:focus { - border-color: #428bca; -} -.thumbnail > img { - margin-left: auto; - margin-right: auto; -} -.thumbnail .caption { - padding: 9px; - color: #333333; -} -.alert { - padding: 15px; - margin-bottom: 20px; - border: 1px solid transparent; - border-radius: 4px; -} -.alert h4 { - margin-top: 0; - color: inherit; -} -.alert .alert-link { - font-weight: bold; -} -.alert > p, -.alert > ul { - margin-bottom: 0; -} -.alert > p + p { - margin-top: 5px; -} -.alert-dismissable { - padding-right: 35px; -} -.alert-dismissable .close { - position: relative; - top: -2px; - right: -21px; - color: inherit; -} -.alert-success { - background-color: #dff0d8; - border-color: #d6e9c6; - color: #468847; -} -.alert-success hr { - border-top-color: #c9e2b3; -} -.alert-success .alert-link { - color: #356635; -} -.alert-info { - background-color: #d9edf7; - border-color: #bce8f1; - color: #3a87ad; -} -.alert-info hr { - border-top-color: #a6e1ec; -} -.alert-info .alert-link { - color: #2d6987; -} -.alert-warning { - background-color: #fcf8e3; - border-color: #fbeed5; - color: #c09853; -} -.alert-warning hr { - border-top-color: #f8e5be; -} -.alert-warning .alert-link { - color: #a47e3c; -} -.alert-danger { - background-color: #f2dede; - border-color: #eed3d7; - color: #b94a48; -} -.alert-danger hr { - border-top-color: #e6c1c7; -} -.alert-danger .alert-link { - color: #953b39; -} -@-webkit-keyframes progress-bar-stripes { - from { - background-position: 40px 0; - } - to { - background-position: 0 0; - } -} -@-moz-keyframes progress-bar-stripes { - from { - background-position: 40px 0; - } - to { - background-position: 0 0; - } -} -@-o-keyframes progress-bar-stripes { - from { - background-position: 0 0; - } - to { - background-position: 40px 0; - } -} -@keyframes progress-bar-stripes { - from { - background-position: 40px 0; - } - to { - background-position: 0 0; - } -} -.progress { - overflow: hidden; - height: 20px; - margin-bottom: 20px; - background-color: #f5f5f5; - border-radius: 4px; - -webkit-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1); - box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1); -} -.progress-bar { - float: left; - width: 0%; - height: 100%; - font-size: 12px; - color: #ffffff; - text-align: center; - background-color: #428bca; - -webkit-box-shadow: inset 0 -1px 0 rgba(0, 0, 0, 0.15); - box-shadow: inset 0 -1px 0 rgba(0, 0, 0, 0.15); - -webkit-transition: width 0.6s ease; - transition: width 0.6s ease; -} -.progress-striped .progress-bar { - background-image: -webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent)); - background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); - background-image: -moz-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); - background-image: linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); - background-size: 40px 40px; -} -.progress.active .progress-bar { - -webkit-animation: progress-bar-stripes 2s linear infinite; - -moz-animation: progress-bar-stripes 2s linear infinite; - -ms-animation: progress-bar-stripes 2s linear infinite; - -o-animation: progress-bar-stripes 2s linear infinite; - animation: progress-bar-stripes 2s linear infinite; -} -.progress-bar-success { - background-color: #5cb85c; -} -.progress-striped .progress-bar-success { - background-image: -webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent)); - background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); - background-image: -moz-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); - background-image: linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); -} -.progress-bar-info { - background-color: #5bc0de; -} -.progress-striped .progress-bar-info { - background-image: -webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent)); - background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); - background-image: -moz-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); - background-image: linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); -} -.progress-bar-warning { - background-color: #f0ad4e; -} -.progress-striped .progress-bar-warning { - background-image: -webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent)); - background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); - background-image: -moz-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); - background-image: linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); -} -.progress-bar-danger { - background-color: #d9534f; -} -.progress-striped .progress-bar-danger { - background-image: -webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent)); - background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); - background-image: -moz-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); - background-image: linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); -} -.media, -.media-body { - overflow: hidden; - zoom: 1; -} -.media, -.media .media { - margin-top: 15px; -} -.media:first-child { - margin-top: 0; -} -.media-object { - display: block; -} -.media-heading { - margin: 0 0 5px; -} -.media > .pull-left { - margin-right: 10px; -} -.media > .pull-right { - margin-left: 10px; -} -.media-list { - padding-left: 0; - list-style: none; -} -.list-group { - margin-bottom: 20px; - padding-left: 0; -} -.list-group-item { - position: relative; - display: block; - padding: 10px 15px; - margin-bottom: -1px; - background-color: #ffffff; - border: 1px solid #dddddd; -} -.list-group-item:first-child { - border-top-right-radius: 4px; - border-top-left-radius: 4px; -} -.list-group-item:last-child { - margin-bottom: 0; - border-bottom-right-radius: 4px; - border-bottom-left-radius: 4px; -} -.list-group-item > .badge { - float: right; -} -.list-group-item > .badge + .badge { - margin-right: 5px; -} -a.list-group-item { - color: #555555; -} -a.list-group-item .list-group-item-heading { - color: #333333; -} -a.list-group-item:hover, -a.list-group-item:focus { - text-decoration: none; - background-color: #f5f5f5; -} -.list-group-item.active, -.list-group-item.active:hover, -.list-group-item.active:focus { - z-index: 2; - color: #ffffff; - background-color: #428bca; - border-color: #428bca; -} -.list-group-item.active .list-group-item-heading, -.list-group-item.active:hover .list-group-item-heading, -.list-group-item.active:focus .list-group-item-heading { - color: inherit; -} -.list-group-item.active .list-group-item-text, -.list-group-item.active:hover .list-group-item-text, -.list-group-item.active:focus .list-group-item-text { - color: #e1edf7; -} -.list-group-item-heading { - margin-top: 0; - margin-bottom: 5px; -} -.list-group-item-text { - margin-bottom: 0; - line-height: 1.3; -} -.panel { - margin-bottom: 20px; - background-color: #ffffff; - border: 1px solid transparent; - border-radius: 4px; - -webkit-box-shadow: 0 1px 1px rgba(0, 0, 0, 0.05); - box-shadow: 0 1px 1px rgba(0, 0, 0, 0.05); -} -.panel-body { - padding: 15px; -} -.panel-body:before, -.panel-body:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.panel-body:after { - clear: both; -} -.panel-body:before, -.panel-body:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.panel-body:after { - clear: both; -} -.panel > .list-group { - margin-bottom: 0; -} -.panel > .list-group .list-group-item { - border-width: 1px 0; -} -.panel > .list-group .list-group-item:first-child { - border-top-right-radius: 0; - border-top-left-radius: 0; -} -.panel > .list-group .list-group-item:last-child { - border-bottom: 0; -} -.panel-heading + .list-group .list-group-item:first-child { - border-top-width: 0; -} -.panel > .table { - margin-bottom: 0; -} -.panel > .panel-body + .table { - border-top: 1px solid #dddddd; -} -.panel-heading { - padding: 10px 15px; - border-bottom: 1px solid transparent; - border-top-right-radius: 3px; - border-top-left-radius: 3px; -} -.panel-title { - margin-top: 0; - margin-bottom: 0; - font-size: 16px; -} -.panel-title > a { - color: inherit; -} -.panel-footer { - padding: 10px 15px; - background-color: #f5f5f5; - border-top: 1px solid #dddddd; - border-bottom-right-radius: 3px; - border-bottom-left-radius: 3px; -} -.panel-group .panel { - margin-bottom: 0; - border-radius: 4px; - overflow: hidden; -} -.panel-group .panel + .panel { - margin-top: 5px; -} -.panel-group .panel-heading { - border-bottom: 0; -} -.panel-group .panel-heading + .panel-collapse .panel-body { - border-top: 1px solid #dddddd; -} -.panel-group .panel-footer { - border-top: 0; -} -.panel-group .panel-footer + .panel-collapse .panel-body { - border-bottom: 1px solid #dddddd; -} -.panel-default { - border-color: #dddddd; -} -.panel-default > .panel-heading { - color: #333333; - background-color: #f5f5f5; - border-color: #dddddd; -} -.panel-default > .panel-heading + .panel-collapse .panel-body { - border-top-color: #dddddd; -} -.panel-default > .panel-footer + .panel-collapse .panel-body { - border-bottom-color: #dddddd; -} -.panel-primary { - border-color: #428bca; -} -.panel-primary > .panel-heading { - color: #ffffff; - background-color: #428bca; - border-color: #428bca; -} -.panel-primary > .panel-heading + .panel-collapse .panel-body { - border-top-color: #428bca; -} -.panel-primary > .panel-footer + .panel-collapse .panel-body { - border-bottom-color: #428bca; -} -.panel-success { - border-color: #d6e9c6; -} -.panel-success > .panel-heading { - color: #468847; - background-color: #dff0d8; - border-color: #d6e9c6; -} -.panel-success > .panel-heading + .panel-collapse .panel-body { - border-top-color: #d6e9c6; -} -.panel-success > .panel-footer + .panel-collapse .panel-body { - border-bottom-color: #d6e9c6; -} -.panel-warning { - border-color: #fbeed5; -} -.panel-warning > .panel-heading { - color: #c09853; - background-color: #fcf8e3; - border-color: #fbeed5; -} -.panel-warning > .panel-heading + .panel-collapse .panel-body { - border-top-color: #fbeed5; -} -.panel-warning > .panel-footer + .panel-collapse .panel-body { - border-bottom-color: #fbeed5; -} -.panel-danger { - border-color: #eed3d7; -} -.panel-danger > .panel-heading { - color: #b94a48; - background-color: #f2dede; - border-color: #eed3d7; -} -.panel-danger > .panel-heading + .panel-collapse .panel-body { - border-top-color: #eed3d7; -} -.panel-danger > .panel-footer + .panel-collapse .panel-body { - border-bottom-color: #eed3d7; -} -.panel-info { - border-color: #bce8f1; -} -.panel-info > .panel-heading { - color: #3a87ad; - background-color: #d9edf7; - border-color: #bce8f1; -} -.panel-info > .panel-heading + .panel-collapse .panel-body { - border-top-color: #bce8f1; -} -.panel-info > .panel-footer + .panel-collapse .panel-body { - border-bottom-color: #bce8f1; -} -.well { - min-height: 20px; - padding: 19px; - margin-bottom: 20px; - background-color: #f5f5f5; - border: 1px solid #e3e3e3; - border-radius: 4px; - -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.05); - box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.05); -} -.well blockquote { - border-color: #ddd; - border-color: rgba(0, 0, 0, 0.15); -} -.well-lg { - padding: 24px; - border-radius: 6px; -} -.well-sm { - padding: 9px; - border-radius: 3px; -} -.close { - float: right; - font-size: 21px; - font-weight: bold; - line-height: 1; - color: #000000; - text-shadow: 0 1px 0 #ffffff; - opacity: 0.2; - filter: alpha(opacity=20); -} -.close:hover, -.close:focus { - color: #000000; - text-decoration: none; - cursor: pointer; - opacity: 0.5; - filter: alpha(opacity=50); -} -button.close { - padding: 0; - cursor: pointer; - background: transparent; - border: 0; - -webkit-appearance: none; -} -.modal-open { - overflow: hidden; -} -body.modal-open, -.modal-open .navbar-fixed-top, -.modal-open .navbar-fixed-bottom { - margin-right: 15px; -} -.modal { - display: none; - overflow: auto; - overflow-y: scroll; - position: fixed; - top: 0; - right: 0; - bottom: 0; - left: 0; - z-index: 1040; -} -.modal.fade .modal-dialog { - -webkit-transform: translate(0, -25%); - -ms-transform: translate(0, -25%); - transform: translate(0, -25%); - -webkit-transition: -webkit-transform 0.3s ease-out; - -moz-transition: -moz-transform 0.3s ease-out; - -o-transition: -o-transform 0.3s ease-out; - transition: transform 0.3s ease-out; -} -.modal.in .modal-dialog { - -webkit-transform: translate(0, 0); - -ms-transform: translate(0, 0); - transform: translate(0, 0); -} -.modal-dialog { - margin-left: auto; - margin-right: auto; - width: auto; - padding: 10px; - z-index: 1050; -} -.modal-content { - position: relative; - background-color: #ffffff; - border: 1px solid #999999; - border: 1px solid rgba(0, 0, 0, 0.2); - border-radius: 6px; - -webkit-box-shadow: 0 3px 9px rgba(0, 0, 0, 0.5); - box-shadow: 0 3px 9px rgba(0, 0, 0, 0.5); - background-clip: padding-box; - outline: none; -} -.modal-backdrop { - position: fixed; - top: 0; - right: 0; - bottom: 0; - left: 0; - z-index: 1030; - background-color: #000000; -} -.modal-backdrop.fade { - opacity: 0; - filter: alpha(opacity=0); -} -.modal-backdrop.in { - opacity: 0.5; - filter: alpha(opacity=50); -} -.modal-header { - padding: 15px; - border-bottom: 1px solid #e5e5e5; - min-height: 16.428571429px; -} -.modal-header .close { - margin-top: -2px; -} -.modal-title { - margin: 0; - line-height: 1.428571429; -} -.modal-body { - position: relative; - padding: 20px; -} -.modal-footer { - margin-top: 15px; - padding: 19px 20px 20px; - text-align: right; - border-top: 1px solid #e5e5e5; -} -.modal-footer:before, -.modal-footer:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.modal-footer:after { - clear: both; -} -.modal-footer:before, -.modal-footer:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.modal-footer:after { - clear: both; -} -.modal-footer .btn + .btn { - margin-left: 5px; - margin-bottom: 0; -} -.modal-footer .btn-group .btn + .btn { - margin-left: -1px; -} -.modal-footer .btn-block + .btn-block { - margin-left: 0; -} -@media screen and (min-width: 768px) { - .modal-dialog { - left: 50%; - right: auto; - width: 600px; - padding-top: 30px; - padding-bottom: 30px; - } - .modal-content { - -webkit-box-shadow: 0 5px 15px rgba(0, 0, 0, 0.5); - box-shadow: 0 5px 15px rgba(0, 0, 0, 0.5); - } -} -.tooltip { - position: absolute; - z-index: 1030; - display: block; - visibility: visible; - font-size: 12px; - line-height: 1.4; - opacity: 0; - filter: alpha(opacity=0); -} -.tooltip.in { - opacity: 0.9; - filter: alpha(opacity=90); -} -.tooltip.top { - margin-top: -3px; - padding: 5px 0; -} -.tooltip.right { - margin-left: 3px; - padding: 0 5px; -} -.tooltip.bottom { - margin-top: 3px; - padding: 5px 0; -} -.tooltip.left { - margin-left: -3px; - padding: 0 5px; -} -.tooltip-inner { - max-width: 200px; - padding: 3px 8px; - color: #ffffff; - text-align: center; - text-decoration: none; - background-color: #000000; - border-radius: 4px; -} -.tooltip-arrow { - position: absolute; - width: 0; - height: 0; - border-color: transparent; - border-style: solid; -} -.tooltip.top .tooltip-arrow { - bottom: 0; - left: 50%; - margin-left: -5px; - border-width: 5px 5px 0; - border-top-color: #000000; -} -.tooltip.top-left .tooltip-arrow { - bottom: 0; - left: 5px; - border-width: 5px 5px 0; - border-top-color: #000000; -} -.tooltip.top-right .tooltip-arrow { - bottom: 0; - right: 5px; - border-width: 5px 5px 0; - border-top-color: #000000; -} -.tooltip.right .tooltip-arrow { - top: 50%; - left: 0; - margin-top: -5px; - border-width: 5px 5px 5px 0; - border-right-color: #000000; -} -.tooltip.left .tooltip-arrow { - top: 50%; - right: 0; - margin-top: -5px; - border-width: 5px 0 5px 5px; - border-left-color: #000000; -} -.tooltip.bottom .tooltip-arrow { - top: 0; - left: 50%; - margin-left: -5px; - border-width: 0 5px 5px; - border-bottom-color: #000000; -} -.tooltip.bottom-left .tooltip-arrow { - top: 0; - left: 5px; - border-width: 0 5px 5px; - border-bottom-color: #000000; -} -.tooltip.bottom-right .tooltip-arrow { - top: 0; - right: 5px; - border-width: 0 5px 5px; - border-bottom-color: #000000; -} -.popover { - position: absolute; - top: 0; - left: 0; - z-index: 1010; - display: none; - max-width: 276px; - padding: 1px; - text-align: left; - background-color: #ffffff; - background-clip: padding-box; - border: 1px solid #cccccc; - border: 1px solid rgba(0, 0, 0, 0.2); - border-radius: 6px; - -webkit-box-shadow: 0 5px 10px rgba(0, 0, 0, 0.2); - box-shadow: 0 5px 10px rgba(0, 0, 0, 0.2); - white-space: normal; -} -.popover.top { - margin-top: -10px; -} -.popover.right { - margin-left: 10px; -} -.popover.bottom { - margin-top: 10px; -} -.popover.left { - margin-left: -10px; -} -.popover-title { - margin: 0; - padding: 8px 14px; - font-size: 14px; - font-weight: normal; - line-height: 18px; - background-color: #f7f7f7; - border-bottom: 1px solid #ebebeb; - border-radius: 5px 5px 0 0; -} -.popover-content { - padding: 9px 14px; -} -.popover .arrow, -.popover .arrow:after { - position: absolute; - display: block; - width: 0; - height: 0; - border-color: transparent; - border-style: solid; -} -.popover .arrow { - border-width: 11px; -} -.popover .arrow:after { - border-width: 10px; - content: ""; -} -.popover.top .arrow { - left: 50%; - margin-left: -11px; - border-bottom-width: 0; - border-top-color: #999999; - border-top-color: rgba(0, 0, 0, 0.25); - bottom: -11px; -} -.popover.top .arrow:after { - content: " "; - bottom: 1px; - margin-left: -10px; - border-bottom-width: 0; - border-top-color: #ffffff; -} -.popover.right .arrow { - top: 50%; - left: -11px; - margin-top: -11px; - border-left-width: 0; - border-right-color: #999999; - border-right-color: rgba(0, 0, 0, 0.25); -} -.popover.right .arrow:after { - content: " "; - left: 1px; - bottom: -10px; - border-left-width: 0; - border-right-color: #ffffff; -} -.popover.bottom .arrow { - left: 50%; - margin-left: -11px; - border-top-width: 0; - border-bottom-color: #999999; - border-bottom-color: rgba(0, 0, 0, 0.25); - top: -11px; -} -.popover.bottom .arrow:after { - content: " "; - top: 1px; - margin-left: -10px; - border-top-width: 0; - border-bottom-color: #ffffff; -} -.popover.left .arrow { - top: 50%; - right: -11px; - margin-top: -11px; - border-right-width: 0; - border-left-color: #999999; - border-left-color: rgba(0, 0, 0, 0.25); -} -.popover.left .arrow:after { - content: " "; - right: 1px; - border-right-width: 0; - border-left-color: #ffffff; - bottom: -10px; -} -.carousel { - position: relative; -} -.carousel-inner { - position: relative; - overflow: hidden; - width: 100%; -} -.carousel-inner > .item { - display: none; - position: relative; - -webkit-transition: 0.6s ease-in-out left; - transition: 0.6s ease-in-out left; -} -.carousel-inner > .item > img, -.carousel-inner > .item > a > img { - display: block; - max-width: 100%; - height: auto; - line-height: 1; -} -.carousel-inner > .active, -.carousel-inner > .next, -.carousel-inner > .prev { - display: block; -} -.carousel-inner > .active { - left: 0; -} -.carousel-inner > .next, -.carousel-inner > .prev { - position: absolute; - top: 0; - width: 100%; -} -.carousel-inner > .next { - left: 100%; -} -.carousel-inner > .prev { - left: -100%; -} -.carousel-inner > .next.left, -.carousel-inner > .prev.right { - left: 0; -} -.carousel-inner > .active.left { - left: -100%; -} -.carousel-inner > .active.right { - left: 100%; -} -.carousel-control { - position: absolute; - top: 0; - left: 0; - bottom: 0; - width: 15%; - opacity: 0.5; - filter: alpha(opacity=50); - font-size: 20px; - color: #ffffff; - text-align: center; - text-shadow: 0 1px 2px rgba(0, 0, 0, 0.6); -} -.carousel-control.left { - background-image: -webkit-gradient(linear, 0% top, 100% top, from(rgba(0, 0, 0, 0.5)), to(rgba(0, 0, 0, 0.0001))); - background-image: -webkit-linear-gradient(left, color-stop(rgba(0, 0, 0, 0.5) 0%), color-stop(rgba(0, 0, 0, 0.0001) 100%)); - background-image: -moz-linear-gradient(left, rgba(0, 0, 0, 0.5) 0%, rgba(0, 0, 0, 0.0001) 100%); - background-image: linear-gradient(to right, rgba(0, 0, 0, 0.5) 0%, rgba(0, 0, 0, 0.0001) 100%); - background-repeat: repeat-x; - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#80000000', endColorstr='#00000000', GradientType=1); -} -.carousel-control.right { - left: auto; - right: 0; - background-image: -webkit-gradient(linear, 0% top, 100% top, from(rgba(0, 0, 0, 0.0001)), to(rgba(0, 0, 0, 0.5))); - background-image: -webkit-linear-gradient(left, color-stop(rgba(0, 0, 0, 0.0001) 0%), color-stop(rgba(0, 0, 0, 0.5) 100%)); - background-image: -moz-linear-gradient(left, rgba(0, 0, 0, 0.0001) 0%, rgba(0, 0, 0, 0.5) 100%); - background-image: linear-gradient(to right, rgba(0, 0, 0, 0.0001) 0%, rgba(0, 0, 0, 0.5) 100%); - background-repeat: repeat-x; - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#00000000', endColorstr='#80000000', GradientType=1); -} -.carousel-control:hover, -.carousel-control:focus { - color: #ffffff; - text-decoration: none; - opacity: 0.9; - filter: alpha(opacity=90); -} -.carousel-control .icon-prev, -.carousel-control .icon-next, -.carousel-control .glyphicon-chevron-left, -.carousel-control .glyphicon-chevron-right { - position: absolute; - top: 50%; - left: 50%; - z-index: 5; - display: inline-block; -} -.carousel-control .icon-prev, -.carousel-control .icon-next { - width: 20px; - height: 20px; - margin-top: -10px; - margin-left: -10px; - font-family: serif; -} -.carousel-control .icon-prev:before { - content: '\2039'; -} -.carousel-control .icon-next:before { - content: '\203a'; -} -.carousel-indicators { - position: absolute; - bottom: 10px; - left: 50%; - z-index: 15; - width: 60%; - margin-left: -30%; - padding-left: 0; - list-style: none; - text-align: center; -} -.carousel-indicators li { - display: inline-block; - width: 10px; - height: 10px; - margin: 1px; - text-indent: -999px; - border: 1px solid #ffffff; - border-radius: 10px; - cursor: pointer; -} -.carousel-indicators .active { - margin: 0; - width: 12px; - height: 12px; - background-color: #ffffff; -} -.carousel-caption { - position: absolute; - left: 15%; - right: 15%; - bottom: 20px; - z-index: 10; - padding-top: 20px; - padding-bottom: 20px; - color: #ffffff; - text-align: center; - text-shadow: 0 1px 2px rgba(0, 0, 0, 0.6); -} -.carousel-caption .btn { - text-shadow: none; -} -@media screen and (min-width: 768px) { - .carousel-control .icon-prev, - .carousel-control .icon-next { - width: 30px; - height: 30px; - margin-top: -15px; - margin-left: -15px; - font-size: 30px; - } - .carousel-caption { - left: 20%; - right: 20%; - padding-bottom: 30px; - } - .carousel-indicators { - bottom: 20px; - } -} -.clearfix:before, -.clearfix:after { - content: " "; - /* 1 */ - - display: table; - /* 2 */ - -} -.clearfix:after { - clear: both; -} -.pull-right { - float: right !important; -} -.pull-left { - float: left !important; -} -.hide { - display: none !important; -} -.show { - display: block !important; -} -.invisible { - visibility: hidden; -} -.text-hide { - font: 0/0 a; - color: transparent; - text-shadow: none; - background-color: transparent; - border: 0; -} -.affix { - position: fixed; -} -@-ms-viewport { - width: device-width; -} -@media screen and (max-width: 400px) { - @-ms-viewport { - width: 320px; - } -} -.hidden { - display: none !important; - visibility: hidden !important; -} -.visible-xs { - display: none !important; -} -tr.visible-xs { - display: none !important; -} -th.visible-xs, -td.visible-xs { - display: none !important; -} -@media (max-width: 767px) { - .visible-xs { - display: block !important; - } - tr.visible-xs { - display: table-row !important; - } - th.visible-xs, - td.visible-xs { - display: table-cell !important; - } -} -@media (min-width: 768px) and (max-width: 991px) { - .visible-xs.visible-sm { - display: block !important; - } - tr.visible-xs.visible-sm { - display: table-row !important; - } - th.visible-xs.visible-sm, - td.visible-xs.visible-sm { - display: table-cell !important; - } -} -@media (min-width: 992px) and (max-width: 1199px) { - .visible-xs.visible-md { - display: block !important; - } - tr.visible-xs.visible-md { - display: table-row !important; - } - th.visible-xs.visible-md, - td.visible-xs.visible-md { - display: table-cell !important; - } -} -@media (min-width: 1200px) { - .visible-xs.visible-lg { - display: block !important; - } - tr.visible-xs.visible-lg { - display: table-row !important; - } - th.visible-xs.visible-lg, - td.visible-xs.visible-lg { - display: table-cell !important; - } -} -.visible-sm { - display: none !important; -} -tr.visible-sm { - display: none !important; -} -th.visible-sm, -td.visible-sm { - display: none !important; -} -@media (max-width: 767px) { - .visible-sm.visible-xs { - display: block !important; - } - tr.visible-sm.visible-xs { - display: table-row !important; - } - th.visible-sm.visible-xs, - td.visible-sm.visible-xs { - display: table-cell !important; - } -} -@media (min-width: 768px) and (max-width: 991px) { - .visible-sm { - display: block !important; - } - tr.visible-sm { - display: table-row !important; - } - th.visible-sm, - td.visible-sm { - display: table-cell !important; - } -} -@media (min-width: 992px) and (max-width: 1199px) { - .visible-sm.visible-md { - display: block !important; - } - tr.visible-sm.visible-md { - display: table-row !important; - } - th.visible-sm.visible-md, - td.visible-sm.visible-md { - display: table-cell !important; - } -} -@media (min-width: 1200px) { - .visible-sm.visible-lg { - display: block !important; - } - tr.visible-sm.visible-lg { - display: table-row !important; - } - th.visible-sm.visible-lg, - td.visible-sm.visible-lg { - display: table-cell !important; - } -} -.visible-md { - display: none !important; -} -tr.visible-md { - display: none !important; -} -th.visible-md, -td.visible-md { - display: none !important; -} -@media (max-width: 767px) { - .visible-md.visible-xs { - display: block !important; - } - tr.visible-md.visible-xs { - display: table-row !important; - } - th.visible-md.visible-xs, - td.visible-md.visible-xs { - display: table-cell !important; - } -} -@media (min-width: 768px) and (max-width: 991px) { - .visible-md.visible-sm { - display: block !important; - } - tr.visible-md.visible-sm { - display: table-row !important; - } - th.visible-md.visible-sm, - td.visible-md.visible-sm { - display: table-cell !important; - } -} -@media (min-width: 992px) and (max-width: 1199px) { - .visible-md { - display: block !important; - } - tr.visible-md { - display: table-row !important; - } - th.visible-md, - td.visible-md { - display: table-cell !important; - } -} -@media (min-width: 1200px) { - .visible-md.visible-lg { - display: block !important; - } - tr.visible-md.visible-lg { - display: table-row !important; - } - th.visible-md.visible-lg, - td.visible-md.visible-lg { - display: table-cell !important; - } -} -.visible-lg { - display: none !important; -} -tr.visible-lg { - display: none !important; -} -th.visible-lg, -td.visible-lg { - display: none !important; -} -@media (max-width: 767px) { - .visible-lg.visible-xs { - display: block !important; - } - tr.visible-lg.visible-xs { - display: table-row !important; - } - th.visible-lg.visible-xs, - td.visible-lg.visible-xs { - display: table-cell !important; - } -} -@media (min-width: 768px) and (max-width: 991px) { - .visible-lg.visible-sm { - display: block !important; - } - tr.visible-lg.visible-sm { - display: table-row !important; - } - th.visible-lg.visible-sm, - td.visible-lg.visible-sm { - display: table-cell !important; - } -} -@media (min-width: 992px) and (max-width: 1199px) { - .visible-lg.visible-md { - display: block !important; - } - tr.visible-lg.visible-md { - display: table-row !important; - } - th.visible-lg.visible-md, - td.visible-lg.visible-md { - display: table-cell !important; - } -} -@media (min-width: 1200px) { - .visible-lg { - display: block !important; - } - tr.visible-lg { - display: table-row !important; - } - th.visible-lg, - td.visible-lg { - display: table-cell !important; - } -} -.hidden-xs { - display: block !important; -} -tr.hidden-xs { - display: table-row !important; -} -th.hidden-xs, -td.hidden-xs { - display: table-cell !important; -} -@media (max-width: 767px) { - .hidden-xs { - display: none !important; - } - tr.hidden-xs { - display: none !important; - } - th.hidden-xs, - td.hidden-xs { - display: none !important; - } -} -@media (min-width: 768px) and (max-width: 991px) { - .hidden-xs.hidden-sm { - display: none !important; - } - tr.hidden-xs.hidden-sm { - display: none !important; - } - th.hidden-xs.hidden-sm, - td.hidden-xs.hidden-sm { - display: none !important; - } -} -@media (min-width: 992px) and (max-width: 1199px) { - .hidden-xs.hidden-md { - display: none !important; - } - tr.hidden-xs.hidden-md { - display: none !important; - } - th.hidden-xs.hidden-md, - td.hidden-xs.hidden-md { - display: none !important; - } -} -@media (min-width: 1200px) { - .hidden-xs.hidden-lg { - display: none !important; - } - tr.hidden-xs.hidden-lg { - display: none !important; - } - th.hidden-xs.hidden-lg, - td.hidden-xs.hidden-lg { - display: none !important; - } -} -.hidden-sm { - display: block !important; -} -tr.hidden-sm { - display: table-row !important; -} -th.hidden-sm, -td.hidden-sm { - display: table-cell !important; -} -@media (max-width: 767px) { - .hidden-sm.hidden-xs { - display: none !important; - } - tr.hidden-sm.hidden-xs { - display: none !important; - } - th.hidden-sm.hidden-xs, - td.hidden-sm.hidden-xs { - display: none !important; - } -} -@media (min-width: 768px) and (max-width: 991px) { - .hidden-sm { - display: none !important; - } - tr.hidden-sm { - display: none !important; - } - th.hidden-sm, - td.hidden-sm { - display: none !important; - } -} -@media (min-width: 992px) and (max-width: 1199px) { - .hidden-sm.hidden-md { - display: none !important; - } - tr.hidden-sm.hidden-md { - display: none !important; - } - th.hidden-sm.hidden-md, - td.hidden-sm.hidden-md { - display: none !important; - } -} -@media (min-width: 1200px) { - .hidden-sm.hidden-lg { - display: none !important; - } - tr.hidden-sm.hidden-lg { - display: none !important; - } - th.hidden-sm.hidden-lg, - td.hidden-sm.hidden-lg { - display: none !important; - } -} -.hidden-md { - display: block !important; -} -tr.hidden-md { - display: table-row !important; -} -th.hidden-md, -td.hidden-md { - display: table-cell !important; -} -@media (max-width: 767px) { - .hidden-md.hidden-xs { - display: none !important; - } - tr.hidden-md.hidden-xs { - display: none !important; - } - th.hidden-md.hidden-xs, - td.hidden-md.hidden-xs { - display: none !important; - } -} -@media (min-width: 768px) and (max-width: 991px) { - .hidden-md.hidden-sm { - display: none !important; - } - tr.hidden-md.hidden-sm { - display: none !important; - } - th.hidden-md.hidden-sm, - td.hidden-md.hidden-sm { - display: none !important; - } -} -@media (min-width: 992px) and (max-width: 1199px) { - .hidden-md { - display: none !important; - } - tr.hidden-md { - display: none !important; - } - th.hidden-md, - td.hidden-md { - display: none !important; - } -} -@media (min-width: 1200px) { - .hidden-md.hidden-lg { - display: none !important; - } - tr.hidden-md.hidden-lg { - display: none !important; - } - th.hidden-md.hidden-lg, - td.hidden-md.hidden-lg { - display: none !important; - } -} -.hidden-lg { - display: block !important; -} -tr.hidden-lg { - display: table-row !important; -} -th.hidden-lg, -td.hidden-lg { - display: table-cell !important; -} -@media (max-width: 767px) { - .hidden-lg.hidden-xs { - display: none !important; - } - tr.hidden-lg.hidden-xs { - display: none !important; - } - th.hidden-lg.hidden-xs, - td.hidden-lg.hidden-xs { - display: none !important; - } -} -@media (min-width: 768px) and (max-width: 991px) { - .hidden-lg.hidden-sm { - display: none !important; - } - tr.hidden-lg.hidden-sm { - display: none !important; - } - th.hidden-lg.hidden-sm, - td.hidden-lg.hidden-sm { - display: none !important; - } -} -@media (min-width: 992px) and (max-width: 1199px) { - .hidden-lg.hidden-md { - display: none !important; - } - tr.hidden-lg.hidden-md { - display: none !important; - } - th.hidden-lg.hidden-md, - td.hidden-lg.hidden-md { - display: none !important; - } -} -@media (min-width: 1200px) { - .hidden-lg { - display: none !important; - } - tr.hidden-lg { - display: none !important; - } - th.hidden-lg, - td.hidden-lg { - display: none !important; - } -} -.visible-print { - display: none !important; -} -tr.visible-print { - display: none !important; -} -th.visible-print, -td.visible-print { - display: none !important; -} -@media print { - .visible-print { - display: block !important; - } - tr.visible-print { - display: table-row !important; - } - th.visible-print, - td.visible-print { - display: table-cell !important; - } - .hidden-print { - display: none !important; - } - tr.hidden-print { - display: none !important; - } - th.hidden-print, - td.hidden-print { - display: none !important; - } -} diff --git a/docs/reference/themes/mongodb/static/css/mongodb-docs.css b/docs/reference/themes/mongodb/static/css/mongodb-docs.css deleted file mode 100644 index 659bbac9305..00000000000 --- a/docs/reference/themes/mongodb/static/css/mongodb-docs.css +++ /dev/null @@ -1,1453 +0,0 @@ -/* - * nature.css_t - * ~~~~~~~~~~~~ - * - * Sphinx stylesheet -- nature theme. - * - * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * - */ -@import url("basic.css"); - -@import url(http://fonts.googleapis.com/css?family=Source+Code+Pro:300,500,700); - -/* -- page layout ----------------------------------------------------------- */ - -#baselineOverlay { - display: none; - position: fixed; - top: 0; - left: 0; - right: 0; - bottom: 0; - width: 100%; - height: 100%; - z-index: 2000; - opacity: 0.5; - background: url(../img/24px-baseline-overlay.png) 0 0 repeat; -} - -body { - font-family: "PT Sans", "Helvetica Neue", Helvetica, Arial, sans-serif; - margin: 0; - padding: 0; - color: #494747; - -webkit-font-smoothing: antialiased; -} - -hr { border: 1px solid #B1B4B6; } - -.content { - font-size: 16px; - line-height: 24px; -} - -div.body { - overflow-x: auto; -} - -div.related { - color: #fff; - background-color: #402817; - margin-bottom: 0.9em; - font-size:90%; -} - -div.related a { - color: #E2F3CC; -} - -div.related ul { - padding: 0 0 0; -} - -div.footer-nav div.related ul { - padding: 5px 10px 0; -} - -.figure img { - width: 100%; -} - - -p.searchtip { font-size: 93%; } - -/* -- body styles ----------------------------------------------------------- */ - -a { - color: #006cbc; - text-decoration: none; -} - -a:hover { - text-decoration: underline; -} - -div.body h1, div.body h2, div.body h3, div.body h4, div.body h5, div.body h6 { - font-family: "PT Sans", "Helvetica Neue", Helvetica, Arial, sans-serif; - font-weight: normal; - line-height: 24px; - color: #313030; - margin: 0; - padding: 0; -} - -div.body h2, div.body h2, div.body h3, div.body h4, div.body h5, div.body h6 { - cursor: pointer; -} - -div.body h1 { - border-top: none; - font-size: 36px; - line-height: 48px; - padding: 0; -} -div.body h2 { - font-size: 24px; -} -div.body h3 { - font-size: 18px; - font-weight: bold; - margin-bottom: 0; -} - -div.body h4 { - font-size: 16px; - font-weight: bold; - margin-bottom: 0; -} -div.body h5 { - font-size: 16px; -} -div.body h6 { - font-size: 16px; - font-weight: 300; - } - -div.body h1 + p, div.body h2 + p, div.body h3 + p, div.body h4 + p, -div.body h1 + dl, div.body h2 + dl, div.body h3 + dl, div.body h4 + dl { - padding: 0; - text-indent:0; -} - -div.body h1 + ul, div.body h2 + ul, div.body h3 + ul, div.body h4 + ul, -div.section h1 + ul, div.section h2 + ul, div.section h3 + ul, div.section h4 + ul{ - padding-top:0; - margin-top: 0.3em; - text-indent:0; - margin-left: -1.5em !important; -} - -a.headerlink { - - color: #c60f0f; - font-size: 0.8em; - padding: 0 4px 0 4px; - text-decoration: none; - - -} - -a.headerlink:hover { - background-color: #c60f0f; - color: white; -} - -div.body li { - padding-top: .2em; -} -div.highlight{ background-color: white; } -dl.binary { display: none; } -div.topic { background-color: #eee; } - -table.docutils > div.admonition-example { - background-color:inherit; -} - -div.admonition-example pre { - background-color: #FBFBFB; -} - -div.seealso { - padding-bottom: 7px; -} - -pre { - padding: 24px 12px; - color: #222; - margin: 24px 0; -} - -tt { - color: #000; - font-family: 'Source Code Pro', monospace; -} - -a>tt { - color: #006cbc; -} - -tt.xref, a tt { - font-weight: normal; -} - - -.viewcode-back { font-family: helvetica,arial,sans-serif; } - -div.viewcode-block:target { - background-color: #f4debf; - border-top: 1px solid #ac9; - border-bottom: 1px solid #ac9; -} - -/* -p.first, p.last { - margin-bottom: 0 !important; -} -*/ - -table.docutils td { padding: 1px 8px 1em 5px; } - -table.docutils.field-list ul.first.last.simple>li { padding-top: 1em; } -table.docutils.field-list ul.first.last.simple>li>p { padding-top: 1em; } - -div.highlight-javascript>div.highlight>pre>span.hll { background-color: transparent; } -div.highlight-javascript>div.highlight>pre>span.hll>span.nx { font-weight: bold; } - -table.footnote { - font-size: 0.95em; - line-height: 1.3em; -} - -div td pre { - border: none; - -webkit-box-shadow: none; - -moz-box-shadow: none; - margin: 0px; - padding-top: 0px; - padding-right: 8px; - background-color: transparent; -} - -table.docutils { - margin: 24px 0; - font-size: 14px; - line-height: 24px; -} - -table.docutils td { padding: 11px 5px 12px; } - -table.docutils > thead th.head { - padding: 0 5px 12px; -} - -table.docutils td, table.docutils th { - border-color: #ebebed; -} - -table.index-table td { - text-align: left; - border-bottom: none; - border-right: 1px solid #ebebed; - padding-left: 25px; - padding-right: 0px; - padding-bottom: 0px; - padding-top: 5px; -} - -table.index-table td:last-child { - border-right: 0; -} - -table.index-table tr.row-even td p { - line-height: 24px; - margin-bottom: 10px; -} - -table.index-table tr.row-odd td { - text-align: center; - border-bottom: none; - border-right: none; - padding-left: 15px; - padding-right: 15px; - max-height:5px; - padding-top: 0px; -} - -table.index-table { - margin: auto; - width: 98%; /* hack to prevent horizontal scrolling in index.html */ -} - -table.index-table > thead th.head { - text-align: left; - padding: 0 0 5px 24px; - border-bottom: none; -} - -div#mongodb ul { - list-style: none; - padding-left:0px; - padding-right:1em; -} -div#mongodb ul.simple { - list-style:circle; - margin-top: 0px; - padding-left:0px; -} - -div.related li.right { - float: right; - margin-right: 5px; - margin-top: 85px; -} - -#main-db { - padding-top: 2.5em; - padding-bottom: 2em; - background-image: url(../img/back-body.png); - background-repeat: repeat-x; -} - -div#top-right { - position: absolute; - top: -3px; - right: 10px; - *z-index: 10; -} - -div#top-right ul#header-menu-bar { - margin-top: 0; - padding-left: 20px; - height: 38px; - background-image: url(../img/trans-user-left.png); - background-repeat: no-repeat; -} - -div#top-right div.user-right { - float: right; - height: 38px; - min-width: 186px; - margin: 0; - padding: 0 20px 0 0; - font-size: 10pt; - background-image: url(../img/trans-user-right.png); - background-position: top right; - background-repeat: no-repeat; -} - -div#top-right div.user-right li.normal { - float: left; - padding: 8px 1em 0 0; - height: 38px; - background-image: url(../img/trans-user-back.png); - background-repeat: repeat-x; - list-style-type: none; - font-size: 10pt; - color: #c48c55; -} - -div#top-right div.user-right a, div#top-right div.user-right a:visited { - border-right: 1px solid #663f12; - padding-right: 1em; - color: #aa814d; - font-weight: normal; -} - -div#top-right div.user-right li.last, div#top-right div.user-right a.last, div#top-right div.user-right a.last:visited { - padding-right: 0; - border-right: none; -} - -.quick-search { - margin: 0; - padding: 0; -} - -#quick-search input { float: right; } - -.quick-search input { - border: none; - margin: 0; - padding: 0; -} - -input.blank-search, .quick-search .placeholded { - color: #666; -} - -input.blur { color: #666; } - -input[type="text"]:disabled { color: #333; } - -#quick-search-query { width: 13em; } - -input#quick-search-query { - -webkit-appearance:none; - background-color: transparent !important; - position: relative; - top: 7px; - width: 230px; - border: none; - font-size: 73%; - color: #dbcaaf; -} - -input#quick-search-query:active { border:none !important; } - -#quick-search .placeholded { color: #dbcaaf; } - -input.blur { color: #666; } - -input[type="text"]:disabled { color: #333; } - -#quick-search-query { width: 13em; } - -#home-nav { - padding: 0; - background-color: #f6f4cd; -} - -#home-nav a:active, #home-nav a:focus { outline: none; } - -div.split ul.home-nav { - margin: 0; - height: 62px; - padding: 15px 0 4px 0; - width: 980px; -} - -div.split ul.home-nav li { - float: left; - margin: 0; - padding: 0; - list-style-type: none; - list-style-image: none; -} - -ul.home-nav li.docs { width: 276px; } - -ul.home-nav li.docs a, ul.home-nav li.docs a:visited { - float: left; - background: url(http://www.mongodb.org/static/images/home_nav.png) -11px -63px; - width: 209px; - height: 54px; -} - -ul.home-nav li.docs a:hover { - float: left; - background: url(http://www.mongodb.org/static/images/home_nav.png) -11px 0; - width: 209px; - height: 54px; -} - -ul.home-nav li.try { width: 238px; } - -ul.home-nav li.try a, ul.home-nav li.try a:visited { - float: left; - background: url(http://www.mongodb.org/static/images/home_nav.png) -274px -63px; - width: 176px; - height: 54px; -} - -ul.home-nav li.try a:hover { - float: left; - background: url(http://www.mongodb.org/static/images/home_nav.png) -274px 0; - width: 176px; - height: 54px; -} - -ul.home-nav li.downloads { width: 245px; } - -ul.home-nav li.downloads a, ul.home-nav li.downloads a:visited { - float: left; - background: url(http://www.mongodb.org/static/images/home_nav.png) -497px -63px; - width: 185px; - height: 54px; -} - -ul.home-nav li.downloads a:hover { - float: left; - background: url(http://www.mongodb.org/static/images/home_nav.png) -497px 0; - width: 185px; - height: 54px; -} - -ul.home-nav li.drivers { - width: 192px; -} - -ul.home-nav li.drivers a, ul.home-nav li.drivers a:visited { - float: left; - background: url(http://www.mongodb.org/static/images/home_nav.png) right -63px no-repeat; - width: 194px; - height: 54px; -} - -ul.home-nav li.drivers a:hover { - float: left; - background: url(http://www.mongodb.org/static/images/home_nav.png) right 0 no-repeat; - width: 194px; - height: 54px; -} - -ul.home-nav li .hidden { visibility: hidden; } - -div#docs-header { width: 950px; } - -div#main-db.hidden { display: none; } - -tt,tt span.pre { - font-family: 'Source Code Pro', monospace; -} -tt.descname, -tt.descclassname { - font-size: 16px; - font-weight: 300; -} - -dl.method dt { - margin-bottom: 24px; -} - -dl.method > dt big { - font-family: 'Source Code Pro', monospace; - font-weight: 300; - font-size: 16px; -} - -dl.method > dt em { - font-family: 'Source Code Pro', monospace; - font-weight: 700; - font-style: normal; -} -div.section > h1 + dl.dbcommand > dt {display: none;} -div.section > h1 + dl.operator > dt {display: none;} -div.section > h1 + dl.method > dt {display: none;} -div.section > h1 + dl.pipeline > dt {display: none;} -div.section > h1 + dl.group > dt {display: none;} -div.section > h1 + dl.expression > dt {display: none;} -div.section > h1 + dl.projection > dt {display: none;} - -.body p { - margin: 24px 0; -} - -.body p.caption { - margin-top: 0; -} - -.body li>p { - margin: 0; -} - -div.highlight pre { - background: #f5f6f7 url(../img/code-block-bg.png) 0 0 repeat; - border-radius: 0; - border: none; - border-left: 5px solid #494747; - font-family: 'Source Code Pro', monospace; - font-size: 14px; - line-height: 24px; - overflow: auto; - word-wrap: normal; - white-space: pre; -} - -@media (-webkit-min-device-pixel-ratio: 2), -(min-resolution: 192dpi) { - div.highlight pre { - background: #f5f6f7 url(../img/code-block-bg@2x.png) 0 0 repeat; - background-size: 12px 12px; - } -} - -table.docutils td div.highlight pre { - background: none; - border: none; - padding: 0; -} - -div.section { - margin-top: 48px; -} - -h2 + div.section, -h3 + div.section, -h4 + div.section { - margin-top: 0; -} - -div.section > h2, div.section > h3, div.section > h4 { - margin: 24px 0; -} - -div.section > h3, -div.section > h4 { - margin-bottom: 0; -} - -.body h3 + p, -.body h4 + p { - margin-top: 0; -} - -#btnv { - padding-top: 2em; - padding-bottom: 2.5em; - display: table; - width: 100%; -} - -#btnv .btn-prev-text, -#btnv .btn-next-text, -#btnv .btn-arrow-left, -#btnv .btn-arrow-right { - display: table-cell; - vertical-align: middle; -} - -#btnv .btn-arrow-left, -#btnv .btn-next-text { - text-align: right; -} - -#btnv .btn-arrow-right, -#btnv .btn-prev-text { - text-align: left; -} - -#btnv .btn-prev-text>span, #btnv .btn-next-text>span { - display: inline-block; - width: 270px; -} - -table.docutils tbody tr td div.highlight pre { background-color: inherit; } - -/* somehow, powershell commands starting with a directory reference does not include a div.highlight */ - -div.highlight-powershell pre { background-color: #F5F5F5; } - -div.body li > p.first { margin-bottom: 0; } -h4#vnd {padding:0;margin:0;line-height:1.5;padding-left:10px;} -div.body div.bc { - background: white; - cursor: default; - margin-bottom: 24px; -} -div.body div.bc .fa-home { - font-size: 24px; - margin-top: 2px; -} -div.body div.bc li.jr { float: right; display:none; } -div.body div.bc ul { padding:0; margin:0 } -div.body div.bc ul li { list-style: none; font-size: 13px; display:inline; } -div.body div.bc ul li a { color: #006cbc; } -div.dc span.bcpoint { font-weight: bold; font-size:0.9em; color: #343434; } -div.body div.contents { max-width: 30%; float: right; padding: 0 0.5em 0.5em; margin-left: 1em; } -div.body div.contents.long-toc { max-width: 100%; float: none; margin-left: 0; } -div.body div.contents li { padding-top: .25em; } -div.body div.contents ul { margin: 0; padding-left: 1.5em; } -div.hidden { display: none; } - -#vn { - font-size: 0.85em; -} - -.document .body .edit-link, -.document .body .edit-link:hover { - position: relative; - float: right; - bottom: -9px; - text-decoration: none; - font-size: 24px; - color: #313030; -} - -a > em { font-style: normal; } - -div.versionadded > p > span, div.versionchanged > p > span, div.deprecated > p > span { - font-style: italic; -} - -/* Content */ -.content { - box-sizing: border-box; - width: 1092px; -} - -.content .main-column { - margin-top: 72px; - margin-left: 293px; - min-height: 600px; - width: 800px; - padding: 0 30px; - - transition: margin-left 0.4s cubic-bezier(.02,.01,.47,1); - -moz-transition: margin-left 0.4s cubic-bezier(.02,.01,.47,1); - -webkit-transition: margin-left 0.4s cubic-bezier(.02,.01,.47,1); -} - -#see-also > p { - margin-top: 12.5px; -} - - -/* Header styles */ - -#header-db { - position: fixed; - -webkit-transform: translateZ(0); /* hack around Chrome bug: http://stackoverflow.com/questions/11258877/fixed-element-disappears-in-chrome */ - height: 50px; - top: 0; - left: 0; - width: 100%; - font-size: 31px; - background-color: #3b291f; - color: white; - margin: 0; - padding: 0; - z-index: 100; -} - -#header-db .header-content { - width: 1385px; - padding-left: 12px; - - transition: width 0.4s cubic-bezier(.02,.01,.47,1); - -moz-transition: width 0.4s cubic-bezier(.02,.01,.47,1); - -webkit-transition: width 0.4s cubic-bezier(.02,.01,.47,1); -} - -#header-db .nav-items { - display: inline-block; -} - -#header-db .nav-items > a { - font-size: 14px; -} - -#header-db a { - color: white; -} - -#header-db .logo img { - height: 36px; - vertical-align: top; -} -#header-db .nav-items > a:not(:last-child) { - margin-right: 15px; -} - -div.gsc-control-cse-en, div.gsc-control-cse { padding: 0 !important; } - -form.gsc-search-box { - background-color: rgba(255,255,255,0.3); - border-radius: 6px; - border: 1px solid #3b2920; -} - -.gsc-search-box.gsc-search-box-tools .gsc-search-box .gsc-input { - padding: 0; -} - -div.gsc-input-box, -.gsc-search-box .gsc-input>input:hover, .gsc-input-box-hover, -.gsc-search-box .gsc-input>input:focus, .gsc-input-box-focus { - border: 0; - background: transparent; - box-shadow: none; -} - -/* [name] selector is a hack to override google's stylsheet */ -.gsc-input input.gsc-input[name="search"] { - background-color: transparent !important; - color: white; - font-weight: 300; - font-size: 15px; - height: 1.3em !important; -} - -.gsc-input input.gsc-input::-webkit-input-placeholder, -.gsc-input input.gsc-input:-moz-input-placeholder, -.gsc-input input.gsc-input::-moz-input-placeholder, -.gsc-input input.gsc-input:-ms-input-placeholder { - color: #CCC; -} - -/* Clear search button */ -.gsib_b { - display: none; -} - -/* [title] selector is a hack to override google's stylesheet */ -input.gsc-search-button[title], input.gsc-search-button:hover[title], input.gsc-search-button:focus[title] { - border-color: transparent; - background-color: transparent; - padding: 0 8px; - box-sizing: content-box; - -moz-box-sizing: content-box; - -webkit-box-sizing: content-box; -} - -#header-db .logo { - padding: 5px 0; -} - -#header-db .search-db { - transition: width 0.3s linear; - -webkit-transition: width 0.3s linear; - -moz-transition: width 0.3s linear; - -o-transition: width 0.3s linear; - margin-left: 45px; - margin-top: 11px; - display: inline-block; -} - -#header-db .search-db.narrow { - width: 98px; -} - -#header-db .search-db.wide { - width: 226px; -} - -/* Footer styles */ -.footer { - width:auto; - font-size:80%; - border:none; - padding: 20px 0; -} - -.footer .copyright { - text-align: center; -} - -.footer p { - margin: 1em 0; - padding: 0; - line-height:1.5; -} - -.footer a { - color: #989898; - text-decoration: underline; -} - -.footer h3 { - margin-top: 10px; - font-size: 16px; -} - -.footer .box { - background-color: #f5f6f7; - min-height: 20px; - padding: 15px; - margin-bottom: 20px; - text-align: left; -} - -.footer .box a { - color: #333; - text-decoration: none; -} - -.footer .box a:hover { - text-decoration: underline; -} - -.footer ul { - list-style-type: none; - padding-left: 20px; -} - -.footer .section-0 { - padding-left: 100px; -} - -.footer .section-1 { - padding-left: 46px; -} - -.footer .section-2 { - padding-left: 64px; -} - -.footer .section-3 { - padding-left: 49px; -} - -@media (max-width: 992px) { - .footer .section-0, - .footer .section-1, - .footer .section-2, - .footer .section-3 { - padding-left: 15px; - } -} - -.social { - height: 21px; - position: absolute; - right: 0; - bottom: 43px; -} - -.right-column .wrapper { - min-height: 100%; - position: relative; - padding-bottom: 100px; -} - -.right-column .social .img { - background-repeat: no-repeat; - background-size: 21px 21px; - width: 21px; - height: 21px; - display: inline-block; -} - -.right-column .social .twitter-icon .img { - background-image: url(../img/social-twitter.png); -} - -.right-column .social .youtube-icon .img { - background-image: url(../img/social-youtube.png); -} - -.right-column .social .facebook-icon .img { - background-image: url(../img/social-facebook.png); -} - -.right-column .social .gplus-icon .img { - background-image: url(../img/social-gplus.png); -} - -@media only screen and (-webkit-min-device-pixel-ratio: 2) { - .right-column .social .twitter-icon .img { - background-image: url(../img/social-twitter@2x.png); - } - - .right-column .social .youtube-icon .img { - background-image: url(../img/social-youtube@2x.png); - } - - .right-column .social .facebook-icon .img { - background-image: url(../img/social-facebook@2x.png); - } - - .right-column .social .gplus-icon .img { - background-image: url(../img/social-gplus@2x.png); - } -} - -/* Numbered steps directive */ -.sequence-block { - display: table; - width: 100%; - table-layout: fixed; - margin: 25px 0; -} - -.sequence-block > .bullet-block { - display: table-cell; - padding-right: 10px; - width: 2.5em; -} - -.sequence-block .sequence-step { - text-align: center; - background-color: #333; - color: white; - width: 24px; - font-weight: bold; - height: 24px; - border-radius: 12px; -} - -.sequence-block > .section { - display: table-cell; -} - -.sequence-block > .section > h4 { - margin: 0; -} - -/* Collapsible sidebar styles */ -.sidebar { - position: fixed; - -webkit-transform: translateZ(0); /* hack around Chrome bug: http://stackoverflow.com/questions/11258877/fixed-element-disappears-in-chrome */ - background-color: #f5f6f7; - width: 293px; - height: auto; - top: 50px; - bottom: 0; - left: 0; - overflow: auto; - font-size: 0.85em; - z-index: 100; - - transition: left 0.4s cubic-bezier(.02,.01,.47,1); - -moz-transition: left 0.4s cubic-bezier(.02,.01,.47,1); - -webkit-transition: left 0.4s cubic-bezier(.02,.01,.47,1); -} - -div.ssidebar { - font-size: 14px; - height: 100%; -} - -div.ssidebarwrapper { - padding: 0; /* overwrites basic.css rules */ - padding-top: 24px; - padding-bottom: 130px; /* Extra padding for the footer items at the bottom */ - min-height: 100%; - position: relative; -} - -div.ssidebar h3 { - padding: 0 12px; - font-size: 14px; - line-height: 24px; - font-weight: bold; - margin: -3px 0 15px 0; -} - -div.ssidebar h3 a.index-link { - text-transform: uppercase; -} - -div.ssidebar h3 a.showlink { - color: #003594; -} - -.ssidebarwrapper > ul > .toctree-l1 { - padding: 11px 0; - line-height: 24px; - - border-top: 1px solid #ebebed; -} - -.ssidebarwrapper > ul > .toctree-l1.current { - background-color: #fff; - border-right: 1px solid #f5f6f7; -} - -.ssidebarwrapper li.toctree-l1 ul > li > a { - line-height: 24px; - display: inline-block; - width: 100%; -} - -.ssidebarwrapper > ul > .toctree-l1:last-child { - border-bottom: 1px solid #ebebed; -} - -.ssidebarwrapper .toctree-l1 li.current.selected-item { - background-color: rgb( 110, 124, 159 ); -} -.ssidebarwrapper .toctree-l1 li.current.selected-item > a, -.ssidebarwrapper .toctree-l1 li.current.selected-item > a > tt { - color: white; -} - -.ssidebarwrapper .toctree-l1 li.current.selected-item > ul { - background-color: white; - color: #333; -} - -.ssidebarwrapper > ul > .toctree-l1 > a { - font-size: 18px; - line-height: 24px; - padding: 0 12px; - width: 100%; - display: inline-block; -} - -.ssidebarwrapper .mms-hosted-search { - margin-top: 15px; -} - -.ssidebarwrapper .nav-footer { - padding: 50px 12px 12px 12px; - position: absolute; - bottom: 48px; -} - -div.ssidebar li.toctree-l2 > a { - text-indent: -12px; - padding-left: 37px; -} -div.ssidebar li.toctree-l3 > a { - text-indent: -12px; - padding-left: 48px; -} -div.ssidebar li.toctree-l4 > a { - text-indent: -12px; - padding-left: 60px; -} -div.ssidebar li.toctree-l5 > a { - text-indent: -12px; - padding-left: 72px; -} - -div.ssidebar p { - color: #333; - margin: 12px 0 5px 12px; - padding: 0 12px; -} - -div.ssidebar form { margin-top: 5px; } - -div.ssidebar p.logo { - color: #888; - padding: 0px; - margin: -18px 5px 5px; -} - -div.ssidebar ul ul, -div.ssidebar ul.want-points { - list-style: none outside none; - margin-left: 0; -} - -div.ssidebar ul { - margin: 10px; - padding: 0; - color: #000; -} - -div.ssidebar ul.extra-contents { - margin-top: -1px; -} - -div.ssidebar a { color: #333; } - -div.ssidebar input { - border: 1px solid #ccc; - font-family: sans-serif; - font-size: 1em; -} -div.ssidebar input[type=text]{ - margin-left: 20px; - width: 11em !important; -} -div.ssidebar input[type="submit"] { - text-align: center; - width: 4.5em !important; -} - -div.ssidebar tt.literal .pre { - font-weight: normal; -} - -div.ssidebarwrapper ul { - margin: 12px 0 0 0; - padding: 0; -} -div.ssidebarwrapper ul li a tt span { - font-size:inherit; -} -div.ssidebarwrapper div.idxcontents { - margin-left: 8px; - font-size: 88%; -} - -/* Options panel */ -.option-popup { - position: fixed; - -webkit-transform: translateZ(0); /* hack around Chrome bug: http://stackoverflow.com/questions/11258877/fixed-element-disappears-in-chrome */ - width: 293px; - height: 303px; - bottom: 0; - left: 0; - box-shadow: 0 -2px 3px rgba(0,0,0,0.15); - z-index: 100; - - transition: bottom 300ms cubic-bezier(.02,.01,.47,1); - -moz-transition: bottom 300ms cubic-bezier(.02,.01,.47,1); - -webkit-transition: bottom 300ms cubic-bezier(.02,.01,.47,1); -} - -.option-popup.closed { - bottom: -255px; -} - -.option-popup .option-header { - cursor: pointer; - height: 50px; - background-color: rgb(49, 48, 48); - color: rgb(160, 159, 158); - padding: 15px 14px; - font-size: 15px; - font-weight: bold; -} - -.option-popup .option-header > .fa-gear { - font-size: 18px; - margin-right: 10px; -} - -.option-popup .option-header > .fa-angle-up, -.option-popup .option-header > .fa-angle-down { - font-size: 18px; -} - -.option-popup .option-body { - height: 253px; - padding: 22px 16px; - background-color: rgb(73, 71, 71); - color: rgb(191, 191, 190); - font-size: 13px; -} - -.option-popup .option-body > ul { - padding: 0; -} - -.option-popup > .option-body > ul > li { - list-style-type: none; - padding-bottom: 20px; -} - -.option-popup .formats-list, -.option-popup .contribute-list { - margin: 0; - padding: 0; -} - -.option-popup .formats-list li, -.option-popup .contribute-list li { - display: inline; - margin-right: 30px; -} - -.option-body > ul > li > label { - color: rgb( 191, 191, 190 ); - line-height: 1.714; - font-weight: normal; -} - -.option-popup .formats-list > li > a, -.option-popup .contribute-list > li > a { - color: rgb( 255, 255, 255 ); - font-weight: bold; - line-height: 0.914; -} - -/* Right column TOC */ - -.content .right-column { - width: 292px; - padding: 0; - position: fixed; - -webkit-transform: translateZ(0); /* hack around Chrome bug: http://stackoverflow.com/questions/11258877/fixed-element-disappears-in-chrome */ - left: 1093px; - top: 72px; - bottom: 0; - overflow-y: auto; - overflow-x: hidden; -} - -.right-column .toc { - padding: 24px 20px; - background-color: rgb( 245, 246, 247 ); - margin-bottom: 24px; -} - -.right-column .toc .toc-header{ - font-size: 14px; - font-weight: bold; -} - -.right-column .toc a { - font-size: 13px; - text-indent: -11px; -} - -/*hack for correct line height*/ -.right-column .toc li { - line-height: 23px; -} -.right-column .toc li a { - line-height: 24px; -} -/*end hack*/ - -.right-column .toc li { - list-style: none; -} - -.right-column .toc ul { - padding: 0; - margin: 0; -} - -.right-column .toc > ul > li > a { - display: none; -} - -/* hack to hide toc items that are more than 2 levels deep */ -.right-column .toc > ul > li > ul > li > ul > li > ul { - display: none; -} - -.right-column .toc > ul > li > ul { - padding-left: 1px; -} - -.right-column .toc > ul > li > ul > li > ul { - padding-left: 13px; -} - -.right-column .toc li > a:before { - content: '\2022'; - color: #333; - opacity: 0.5; - display: inline-block; -} - -/* Admonition styles */ -div.admonition { - margin: 24px 0; - width: auto; - max-width: 100%; - padding: 2px 12px 22px 12px; - border-left: 5px solid transparent; -} - -.admonition .admonition-title { - margin-bottom: 0; - font-size: 12px; - font-weight: bold; - text-transform: uppercase; - line-height: 24px; -} - -.admonition .admonition-title:after { - content: ":"; - font-weight:900; -} - -.admonition > p { - margin: 0 0 12.5px 0; -} - -.admonition > p.last { - margin-bottom: 0; -} - -.admonition.admonition-platform-support, -.admonition.tip, -.admonition.admonition-tip, -.admonition.note { - background-color: #edf4e8; - border-color: #6ba442; -} - -.admonition.admonition-platform-support .admonition-title, -.admonition.tip .admonition-title, -.admonition.note .admonition-title { - color: #89b668; -} - -.admonition.important { - background-color: #fff2d5; - border-color: #ffb618; -} -.admonition.important .admonition-title { - color: #ffb618; -} - -/*.admonition.tip, .admonition-tip { - background-color: #a6c88e; - border-color: #507b32; -}*/ - -.admonition.admonition-optional { - background-color: inherit; - border: 1px solid #DCDCDC; -} - -.admonition.admonition-example { - background-color: #F5F5F5; - color: #000000; -} - -.admonition.warning { - background-color: #fae6e5; - border-color: #ed271c; -} - -.admonition.warning .admonition-title { - color: #ed271c; -} - -div.admonition pre { - margin: 10px 0; -} - -dd > div.admonition { - margin-left: 0; -} - -p > div.admonition { - margin-left:0; -} - -li > div.admonition { - margin-left:0; -} - -div.admonition.note table.docutils tr:last-child td { - border-bottom: 0; -} - -@media (max-width: 1393px) { - #header-db .header-content { - width: 1068px; - } - - .content .right-column { - display: none; - } -} - -.expand-toc-icon { - display: none; -} - -@media (max-width: 1093px) { - .expand-toc-icon { - display: block; - padding-top: 10px; - padding-right: 10px; - color: white; - } - - .expand-toc-icon:hover, - .expand-toc-icon:active { - color: white; - text-decoration: none; - } - - #header-db .header-content { - width: 775px; - padding-left: 25px; - } - - .sidebar { - left: -293px; - box-shadow: 0 0 13px rgba(0,0,0,0.3); - } - - .sidebar.reveal { - left: 0; - } - - .content .main-column { - margin-left: 0; - } -} diff --git a/docs/reference/themes/mongodb/static/css/overrides.css b/docs/reference/themes/mongodb/static/css/overrides.css deleted file mode 100644 index 31a0096ae77..00000000000 --- a/docs/reference/themes/mongodb/static/css/overrides.css +++ /dev/null @@ -1,157 +0,0 @@ -.toggle-nav .fa { - cursor: pointer; - display: inline-block; - font-size: 20px; -} - -.logo { - margin-left: 10px; -} - -.sidebar-menu .fa { - margin: auto 10px; -} - -.sidebar-closed .content .main-column { - margin-left: 5px; -} - -.jsEnabled pre { padding: 0px; overflow: auto; word-wrap: normal; white-space: nowrap;} -.jsEnabled pre code { padding: 24px 12px; overflow: auto; white-space: pre;} - -.body blockquote { - background-color: #edf4e8; - border-color: #6ba442; -} -.body blockquote strong { - margin-bottom: 0; - font-size: 12px; - font-weight: bold; - text-transform: uppercase; - line-height: 24px; - background-color: #edf4e8; - border-color: #6ba442; -} -.body blockquote p { margin: 0px; } - - -.sidebar-menu li.toctree-l1 > ul { - display: none; -} -.sidebar-menu li.toctree-l1 > ul.current { - display: block; -} - -.sidebar-menu ul ul { - margin: 0; -} - -.right-column .toc { - display: none; - padding: 12px 20px; -} -.right-column .toc .toc-header { - font-size: 12px; - font-weight: bold; - text-transform: uppercase; - padding-bottom: 10px; -} -.right-column .toc #TableOfContents li > ul { - padding-left: 5px; -} - -.body div.admonition { - padding: 2px 12px 12px 12px; -} - -.body div.admonition h5 { - font-weight: 800; - text-transform: uppercase; -} - -a code { - color: #006cbc; -} - -code { - background-color: #f5f6f7; - color: #494747; -} - -a code { - color: #006cbc; -} - -#search { - visibility: hidden; - display: inline-block; - background-color: rgba(255,255,255,0.3); - border-radius: 6px; - border: 1px solid #3b2920; - font-weight: 300; - font-size: 15px; - padding: 2px; -} - -#search label { - padding-right: 5px; -} - -#search input[name="searchQuery"] { - background-color: transparent !important; - color: white; - border: none; - padding: 2px 0 0 4px; - outline: none; -} - -.jsEnabled #search { - visibility: visible; -} - - -@media (min-width: 815px) { - .nav-items a { - display: inline-block !important; - } - .nav-items { padding-right: 5px; } -} - -#header-db .nav-items > a { - display: none; -} - -@media (max-width: 1093px) { - #header-db .header-content { - width: auto; - padding-left: 25px; - } -} - -@media (max-width: 1393px) { - #header-db .header-content { - width: auto; - } -} - -.sidebar-closed #sidebar { - margin-left: -270px; -} - -.sidebar-closed-winSize #optionsVersionsPopup { - visibility: hidden; - left: -293px; -} - - -.sidebar-closed #optionsVersionsPopup { - left: -293px; -} - -#optionsVersionsPopup { - left: 0px; - -webkit-transform: translateZ(0); - transition: left 0.4s cubic-bezier(.02,.01,.47,1); - -moz-transition: left 0.4s cubic-bezier(.02,.01,.47,1); - -webkit-transition: left 0.4s cubic-bezier(.02,.01,.47,1); -} diff --git a/docs/reference/themes/mongodb/static/css/reset.css b/docs/reference/themes/mongodb/static/css/reset.css deleted file mode 100644 index 78d0b6e37c5..00000000000 --- a/docs/reference/themes/mongodb/static/css/reset.css +++ /dev/null @@ -1,46 +0,0 @@ -/* http://meyerweb.com/eric/tools/css/reset/ - v2.0 | 20110126 - License: none (public domain) -*/ - -html, body, div, span, applet, object, iframe, -h1, h2, h3, h4, h5, h6, p, blockquote, pre, -a, abbr, acronym, address, big, cite, code, -del, dfn, em, img, ins, kbd, q, s, samp, -small, strike, strong, sub, sup, tt, var, -b, u, i, center, -dl, dt, dd, ol, ul, li, -fieldset, form, label, legend, -table, caption, tbody, tfoot, thead, tr, th, td, -article, aside, canvas, details, embed, -figure, figcaption, footer, header, hgroup, -menu, nav, output, ruby, section, summary, -time, mark, audio, video { - font-size: 100%; - font: inherit; - vertical-align: baseline; -} -/* HTML5 display-role reset for older browsers */ -article, aside, details, figcaption, figure, -footer, header, hgroup, menu, nav, section { - display: block; -} -body { - line-height: 1; -} -ol, ul { - list-style: none; -} -blockquote, q { - quotes: none; -} -blockquote:before, blockquote:after, -q:before, q:after { - content: ''; - content: none; -} -table { - border-collapse: collapse; - border-spacing: 0; -} - diff --git a/docs/reference/themes/mongodb/static/favicon.ico b/docs/reference/themes/mongodb/static/favicon.ico deleted file mode 100644 index 1cb9531f4b7..00000000000 Binary files a/docs/reference/themes/mongodb/static/favicon.ico and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/24px-baseline-overlay.png b/docs/reference/themes/mongodb/static/img/24px-baseline-overlay.png deleted file mode 100644 index 9aa62dcdd83..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/24px-baseline-overlay.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/back-body.png b/docs/reference/themes/mongodb/static/img/back-body.png deleted file mode 100644 index 0eb9bc5e960..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/back-body.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/code-block-bg.png b/docs/reference/themes/mongodb/static/img/code-block-bg.png deleted file mode 100644 index aecf24d6d20..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/code-block-bg.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/code-block-bg@2x.png b/docs/reference/themes/mongodb/static/img/code-block-bg@2x.png deleted file mode 100644 index c0c4d806bc1..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/code-block-bg@2x.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/favicon.png b/docs/reference/themes/mongodb/static/img/favicon.png deleted file mode 100644 index f9f54468dd5..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/favicon.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/gray.png b/docs/reference/themes/mongodb/static/img/gray.png deleted file mode 100755 index 3807691d3fd..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/gray.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/logo-mongodb-header.png b/docs/reference/themes/mongodb/static/img/logo-mongodb-header.png deleted file mode 100755 index fa2dccfa620..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/logo-mongodb-header.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/social-facebook.png b/docs/reference/themes/mongodb/static/img/social-facebook.png deleted file mode 100644 index 4a8e6cf831a..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/social-facebook.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/social-facebook@2x.png b/docs/reference/themes/mongodb/static/img/social-facebook@2x.png deleted file mode 100644 index dcbd4074814..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/social-facebook@2x.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/social-gplus.png b/docs/reference/themes/mongodb/static/img/social-gplus.png deleted file mode 100644 index efbac7d18c9..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/social-gplus.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/social-gplus@2x.png b/docs/reference/themes/mongodb/static/img/social-gplus@2x.png deleted file mode 100644 index 45f130c1a3f..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/social-gplus@2x.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/social-twitter.png b/docs/reference/themes/mongodb/static/img/social-twitter.png deleted file mode 100644 index 05f534c47e0..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/social-twitter.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/social-twitter@2x.png b/docs/reference/themes/mongodb/static/img/social-twitter@2x.png deleted file mode 100644 index e84e6c0d332..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/social-twitter@2x.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/social-youtube.png b/docs/reference/themes/mongodb/static/img/social-youtube.png deleted file mode 100644 index 1cc3167b5b3..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/social-youtube.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/social-youtube@2x.png b/docs/reference/themes/mongodb/static/img/social-youtube@2x.png deleted file mode 100644 index 033e628ce65..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/social-youtube@2x.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/trans-user-back.png b/docs/reference/themes/mongodb/static/img/trans-user-back.png deleted file mode 100644 index 388216cfcc4..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/trans-user-back.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/trans-user-left.png b/docs/reference/themes/mongodb/static/img/trans-user-left.png deleted file mode 100644 index a96245e706d..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/trans-user-left.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/img/trans-user-right.png b/docs/reference/themes/mongodb/static/img/trans-user-right.png deleted file mode 100644 index e7069e161d7..00000000000 Binary files a/docs/reference/themes/mongodb/static/img/trans-user-right.png and /dev/null differ diff --git a/docs/reference/themes/mongodb/static/js/doctools.js b/docs/reference/themes/mongodb/static/js/doctools.js deleted file mode 100644 index 8d7a5fcb5d5..00000000000 --- a/docs/reference/themes/mongodb/static/js/doctools.js +++ /dev/null @@ -1,224 +0,0 @@ -/* - * doctools.js - * ~~~~~~~~~~~ - * - * Sphinx JavaScript utilities for all documentation. - * - * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * - */ - -/** - * select a different prefix for underscore - */ -$u = _.noConflict(); - -/** - * make the code below compatible with browsers without - * an installed firebug like debugger -if (!window.console || !console.firebug) { - var names = ["log", "debug", "info", "warn", "error", "assert", "dir", - "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", - "profile", "profileEnd"]; - window.console = {}; - for (var i = 0; i < names.length; ++i) - window.console[names[i]] = function() {}; -} - */ - -/** - * small helper function to urldecode strings - */ -jQuery.urldecode = function(x) { - return decodeURIComponent(x).replace(/\+/g, ' '); -} - -/** - * small helper function to urlencode strings - */ -jQuery.urlencode = encodeURIComponent; - -/** - * This function returns the parsed url parameters of the - * current request. Multiple values per key are supported, - * it will always return arrays of strings for the value parts. - */ -jQuery.getQueryParameters = function(s) { - if (typeof s == 'undefined') - s = document.location.search; - var parts = s.substr(s.indexOf('?') + 1).split('&'); - var result = {}; - for (var i = 0; i < parts.length; i++) { - var tmp = parts[i].split('=', 2); - var key = jQuery.urldecode(tmp[0]); - var value = jQuery.urldecode(tmp[1]); - if (key in result) - result[key].push(value); - else - result[key] = [value]; - } - return result; -}; - -/** - * highlight a given string on a jquery object by wrapping it in - * span elements with the given class name. - */ -jQuery.fn.highlightText = function(text, className) { - function highlight(node) { - if (node.nodeType == 3) { - var val = node.nodeValue; - var pos = val.toLowerCase().indexOf(text); - if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) { - var span = document.createElement("span"); - span.className = className; - span.appendChild(document.createTextNode(val.substr(pos, text.length))); - node.parentNode.insertBefore(span, node.parentNode.insertBefore( - document.createTextNode(val.substr(pos + text.length)), - node.nextSibling)); - node.nodeValue = val.substr(0, pos); - } - } - else if (!jQuery(node).is("button, select, textarea")) { - jQuery.each(node.childNodes, function() { - highlight(this); - }); - } - } - return this.each(function() { - highlight(this); - }); -}; - -/** - * Small JavaScript module for the documentation. - */ -var Documentation = { - - init : function() { - this.highlightSearchWords(); - this.initIndexTable(); - }, - - /** - * i18n support - */ - TRANSLATIONS : {}, - PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; }, - LOCALE : 'unknown', - - // gettext and ngettext don't access this so that the functions - // can safely bound to a different name (_ = Documentation.gettext) - gettext : function(string) { - var translated = Documentation.TRANSLATIONS[string]; - if (typeof translated == 'undefined') - return string; - return (typeof translated == 'string') ? translated : translated[0]; - }, - - ngettext : function(singular, plural, n) { - var translated = Documentation.TRANSLATIONS[singular]; - if (typeof translated == 'undefined') - return (n == 1) ? singular : plural; - return translated[Documentation.PLURALEXPR(n)]; - }, - - addTranslations : function(catalog) { - for (var key in catalog.messages) - this.TRANSLATIONS[key] = catalog.messages[key]; - this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); - this.LOCALE = catalog.locale; - }, - - /** - * add context elements like header anchor links - */ - addContextElements : function() { - $('div[id] > :header:first').each(function() { - $('\u00B6'). - attr('href', '#' + this.id). - attr('title', _('Permalink to this headline')). - appendTo(this); - }); - $('dt[id]').each(function() { - $('\u00B6'). - attr('href', '#' + this.id). - attr('title', _('Permalink to this definition')). - appendTo(this); - }); - }, - - /** - * highlight the search words provided in the url in the text - */ - highlightSearchWords : function() { - var params = $.getQueryParameters(); - var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; - if (terms.length) { - var body = $('div.body'); - window.setTimeout(function() { - $.each(terms, function() { - body.highlightText(this.toLowerCase(), 'highlighted'); - }); - }, 10); - $('') - .appendTo($('#searchbox')); - } - }, - - /** - * init the domain index toggle buttons - */ - initIndexTable : function() { - var togglers = $('img.toggler').click(function() { - var src = $(this).attr('src'); - var idnum = $(this).attr('id').substr(7); - $('tr.cg-' + idnum).toggle(); - if (src.substr(-9) == 'minus.png') - $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); - else - $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); - }).css('display', ''); - if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { - togglers.click(); - } - }, - - /** - * helper function to hide the search marks again - */ - hideSearchWords : function() { - $('#searchbox .highlight-link').fadeOut(300); - $('span.highlighted').removeClass('highlighted'); - }, - - /** - * make the url absolute - */ - makeURL : function(relativeURL) { - return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; - }, - - /** - * get the current relative url - */ - getCurrentURL : function() { - var path = document.location.pathname; - var parts = path.split(/\//); - $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { - if (this == '..') - parts.pop(); - }); - var url = parts.join('/'); - return path.substring(url.lastIndexOf('/') + 1, path.length - 1); - } -}; - -// quick alias for translations -_ = Documentation.gettext; - -$(document).ready(function() { - Documentation.init(); -}); diff --git a/docs/reference/themes/mongodb/static/js/jquery.js b/docs/reference/themes/mongodb/static/js/jquery.js deleted file mode 100644 index bfb2376d2ae..00000000000 --- a/docs/reference/themes/mongodb/static/js/jquery.js +++ /dev/null @@ -1,9789 +0,0 @@ -/*! - * jQuery JavaScript Library v1.10.2 - * http://jquery.com/ - * - * Includes Sizzle.js - * http://sizzlejs.com/ - * - * Copyright 2005, 2013 jQuery Foundation, Inc. and other contributors - * Released under the MIT license - * http://jquery.org/license - * - * Date: 2013-07-03T13:48Z - */ -(function( window, undefined ) { - -// Can't do this because several apps including ASP.NET trace -// the stack via arguments.caller.callee and Firefox dies if -// you try to trace through "use strict" call chains. (#13335) -// Support: Firefox 18+ -//"use strict"; -var - // The deferred used on DOM ready - readyList, - - // A central reference to the root jQuery(document) - rootjQuery, - - // Support: IE<10 - // For `typeof xmlNode.method` instead of `xmlNode.method !== undefined` - core_strundefined = typeof undefined, - - // Use the correct document accordingly with window argument (sandbox) - location = window.location, - document = window.document, - docElem = document.documentElement, - - // Map over jQuery in case of overwrite - _jQuery = window.jQuery, - - // Map over the $ in case of overwrite - _$ = window.$, - - // [[Class]] -> type pairs - class2type = {}, - - // List of deleted data cache ids, so we can reuse them - core_deletedIds = [], - - core_version = "1.10.2", - - // Save a reference to some core methods - core_concat = core_deletedIds.concat, - core_push = core_deletedIds.push, - core_slice = core_deletedIds.slice, - core_indexOf = core_deletedIds.indexOf, - core_toString = class2type.toString, - core_hasOwn = class2type.hasOwnProperty, - core_trim = core_version.trim, - - // Define a local copy of jQuery - jQuery = function( selector, context ) { - // The jQuery object is actually just the init constructor 'enhanced' - return new jQuery.fn.init( selector, context, rootjQuery ); - }, - - // Used for matching numbers - core_pnum = /[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source, - - // Used for splitting on whitespace - core_rnotwhite = /\S+/g, - - // Make sure we trim BOM and NBSP (here's looking at you, Safari 5.0 and IE) - rtrim = /^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g, - - // A simple way to check for HTML strings - // Prioritize #id over to avoid XSS via location.hash (#9521) - // Strict HTML recognition (#11290: must start with <) - rquickExpr = /^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/, - - // Match a standalone tag - rsingleTag = /^<(\w+)\s*\/?>(?:<\/\1>|)$/, - - // JSON RegExp - rvalidchars = /^[\],:{}\s]*$/, - rvalidbraces = /(?:^|:|,)(?:\s*\[)+/g, - rvalidescape = /\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g, - rvalidtokens = /"[^"\\\r\n]*"|true|false|null|-?(?:\d+\.|)\d+(?:[eE][+-]?\d+|)/g, - - // Matches dashed string for camelizing - rmsPrefix = /^-ms-/, - rdashAlpha = /-([\da-z])/gi, - - // Used by jQuery.camelCase as callback to replace() - fcamelCase = function( all, letter ) { - return letter.toUpperCase(); - }, - - // The ready event handler - completed = function( event ) { - - // readyState === "complete" is good enough for us to call the dom ready in oldIE - if ( document.addEventListener || event.type === "load" || document.readyState === "complete" ) { - detach(); - jQuery.ready(); - } - }, - // Clean-up method for dom ready events - detach = function() { - if ( document.addEventListener ) { - document.removeEventListener( "DOMContentLoaded", completed, false ); - window.removeEventListener( "load", completed, false ); - - } else { - document.detachEvent( "onreadystatechange", completed ); - window.detachEvent( "onload", completed ); - } - }; - -jQuery.fn = jQuery.prototype = { - // The current version of jQuery being used - jquery: core_version, - - constructor: jQuery, - init: function( selector, context, rootjQuery ) { - var match, elem; - - // HANDLE: $(""), $(null), $(undefined), $(false) - if ( !selector ) { - return this; - } - - // Handle HTML strings - if ( typeof selector === "string" ) { - if ( selector.charAt(0) === "<" && selector.charAt( selector.length - 1 ) === ">" && selector.length >= 3 ) { - // Assume that strings that start and end with <> are HTML and skip the regex check - match = [ null, selector, null ]; - - } else { - match = rquickExpr.exec( selector ); - } - - // Match html or make sure no context is specified for #id - if ( match && (match[1] || !context) ) { - - // HANDLE: $(html) -> $(array) - if ( match[1] ) { - context = context instanceof jQuery ? context[0] : context; - - // scripts is true for back-compat - jQuery.merge( this, jQuery.parseHTML( - match[1], - context && context.nodeType ? context.ownerDocument || context : document, - true - ) ); - - // HANDLE: $(html, props) - if ( rsingleTag.test( match[1] ) && jQuery.isPlainObject( context ) ) { - for ( match in context ) { - // Properties of context are called as methods if possible - if ( jQuery.isFunction( this[ match ] ) ) { - this[ match ]( context[ match ] ); - - // ...and otherwise set as attributes - } else { - this.attr( match, context[ match ] ); - } - } - } - - return this; - - // HANDLE: $(#id) - } else { - elem = document.getElementById( match[2] ); - - // Check parentNode to catch when Blackberry 4.6 returns - // nodes that are no longer in the document #6963 - if ( elem && elem.parentNode ) { - // Handle the case where IE and Opera return items - // by name instead of ID - if ( elem.id !== match[2] ) { - return rootjQuery.find( selector ); - } - - // Otherwise, we inject the element directly into the jQuery object - this.length = 1; - this[0] = elem; - } - - this.context = document; - this.selector = selector; - return this; - } - - // HANDLE: $(expr, $(...)) - } else if ( !context || context.jquery ) { - return ( context || rootjQuery ).find( selector ); - - // HANDLE: $(expr, context) - // (which is just equivalent to: $(context).find(expr) - } else { - return this.constructor( context ).find( selector ); - } - - // HANDLE: $(DOMElement) - } else if ( selector.nodeType ) { - this.context = this[0] = selector; - this.length = 1; - return this; - - // HANDLE: $(function) - // Shortcut for document ready - } else if ( jQuery.isFunction( selector ) ) { - return rootjQuery.ready( selector ); - } - - if ( selector.selector !== undefined ) { - this.selector = selector.selector; - this.context = selector.context; - } - - return jQuery.makeArray( selector, this ); - }, - - // Start with an empty selector - selector: "", - - // The default length of a jQuery object is 0 - length: 0, - - toArray: function() { - return core_slice.call( this ); - }, - - // Get the Nth element in the matched element set OR - // Get the whole matched element set as a clean array - get: function( num ) { - return num == null ? - - // Return a 'clean' array - this.toArray() : - - // Return just the object - ( num < 0 ? this[ this.length + num ] : this[ num ] ); - }, - - // Take an array of elements and push it onto the stack - // (returning the new matched element set) - pushStack: function( elems ) { - - // Build a new jQuery matched element set - var ret = jQuery.merge( this.constructor(), elems ); - - // Add the old object onto the stack (as a reference) - ret.prevObject = this; - ret.context = this.context; - - // Return the newly-formed element set - return ret; - }, - - // Execute a callback for every element in the matched set. - // (You can seed the arguments with an array of args, but this is - // only used internally.) - each: function( callback, args ) { - return jQuery.each( this, callback, args ); - }, - - ready: function( fn ) { - // Add the callback - jQuery.ready.promise().done( fn ); - - return this; - }, - - slice: function() { - return this.pushStack( core_slice.apply( this, arguments ) ); - }, - - first: function() { - return this.eq( 0 ); - }, - - last: function() { - return this.eq( -1 ); - }, - - eq: function( i ) { - var len = this.length, - j = +i + ( i < 0 ? len : 0 ); - return this.pushStack( j >= 0 && j < len ? [ this[j] ] : [] ); - }, - - map: function( callback ) { - return this.pushStack( jQuery.map(this, function( elem, i ) { - return callback.call( elem, i, elem ); - })); - }, - - end: function() { - return this.prevObject || this.constructor(null); - }, - - // For internal use only. - // Behaves like an Array's method, not like a jQuery method. - push: core_push, - sort: [].sort, - splice: [].splice -}; - -// Give the init function the jQuery prototype for later instantiation -jQuery.fn.init.prototype = jQuery.fn; - -jQuery.extend = jQuery.fn.extend = function() { - var src, copyIsArray, copy, name, options, clone, - target = arguments[0] || {}, - i = 1, - length = arguments.length, - deep = false; - - // Handle a deep copy situation - if ( typeof target === "boolean" ) { - deep = target; - target = arguments[1] || {}; - // skip the boolean and the target - i = 2; - } - - // Handle case when target is a string or something (possible in deep copy) - if ( typeof target !== "object" && !jQuery.isFunction(target) ) { - target = {}; - } - - // extend jQuery itself if only one argument is passed - if ( length === i ) { - target = this; - --i; - } - - for ( ; i < length; i++ ) { - // Only deal with non-null/undefined values - if ( (options = arguments[ i ]) != null ) { - // Extend the base object - for ( name in options ) { - src = target[ name ]; - copy = options[ name ]; - - // Prevent never-ending loop - if ( target === copy ) { - continue; - } - - // Recurse if we're merging plain objects or arrays - if ( deep && copy && ( jQuery.isPlainObject(copy) || (copyIsArray = jQuery.isArray(copy)) ) ) { - if ( copyIsArray ) { - copyIsArray = false; - clone = src && jQuery.isArray(src) ? src : []; - - } else { - clone = src && jQuery.isPlainObject(src) ? src : {}; - } - - // Never move original objects, clone them - target[ name ] = jQuery.extend( deep, clone, copy ); - - // Don't bring in undefined values - } else if ( copy !== undefined ) { - target[ name ] = copy; - } - } - } - } - - // Return the modified object - return target; -}; - -jQuery.extend({ - // Unique for each copy of jQuery on the page - // Non-digits removed to match rinlinejQuery - expando: "jQuery" + ( core_version + Math.random() ).replace( /\D/g, "" ), - - noConflict: function( deep ) { - if ( window.$ === jQuery ) { - window.$ = _$; - } - - if ( deep && window.jQuery === jQuery ) { - window.jQuery = _jQuery; - } - - return jQuery; - }, - - // Is the DOM ready to be used? Set to true once it occurs. - isReady: false, - - // A counter to track how many items to wait for before - // the ready event fires. See #6781 - readyWait: 1, - - // Hold (or release) the ready event - holdReady: function( hold ) { - if ( hold ) { - jQuery.readyWait++; - } else { - jQuery.ready( true ); - } - }, - - // Handle when the DOM is ready - ready: function( wait ) { - - // Abort if there are pending holds or we're already ready - if ( wait === true ? --jQuery.readyWait : jQuery.isReady ) { - return; - } - - // Make sure body exists, at least, in case IE gets a little overzealous (ticket #5443). - if ( !document.body ) { - return setTimeout( jQuery.ready ); - } - - // Remember that the DOM is ready - jQuery.isReady = true; - - // If a normal DOM Ready event fired, decrement, and wait if need be - if ( wait !== true && --jQuery.readyWait > 0 ) { - return; - } - - // If there are functions bound, to execute - readyList.resolveWith( document, [ jQuery ] ); - - // Trigger any bound ready events - if ( jQuery.fn.trigger ) { - jQuery( document ).trigger("ready").off("ready"); - } - }, - - // See test/unit/core.js for details concerning isFunction. - // Since version 1.3, DOM methods and functions like alert - // aren't supported. They return false on IE (#2968). - isFunction: function( obj ) { - return jQuery.type(obj) === "function"; - }, - - isArray: Array.isArray || function( obj ) { - return jQuery.type(obj) === "array"; - }, - - isWindow: function( obj ) { - /* jshint eqeqeq: false */ - return obj != null && obj == obj.window; - }, - - isNumeric: function( obj ) { - return !isNaN( parseFloat(obj) ) && isFinite( obj ); - }, - - type: function( obj ) { - if ( obj == null ) { - return String( obj ); - } - return typeof obj === "object" || typeof obj === "function" ? - class2type[ core_toString.call(obj) ] || "object" : - typeof obj; - }, - - isPlainObject: function( obj ) { - var key; - - // Must be an Object. - // Because of IE, we also have to check the presence of the constructor property. - // Make sure that DOM nodes and window objects don't pass through, as well - if ( !obj || jQuery.type(obj) !== "object" || obj.nodeType || jQuery.isWindow( obj ) ) { - return false; - } - - try { - // Not own constructor property must be Object - if ( obj.constructor && - !core_hasOwn.call(obj, "constructor") && - !core_hasOwn.call(obj.constructor.prototype, "isPrototypeOf") ) { - return false; - } - } catch ( e ) { - // IE8,9 Will throw exceptions on certain host objects #9897 - return false; - } - - // Support: IE<9 - // Handle iteration over inherited properties before own properties. - if ( jQuery.support.ownLast ) { - for ( key in obj ) { - return core_hasOwn.call( obj, key ); - } - } - - // Own properties are enumerated firstly, so to speed up, - // if last one is own, then all properties are own. - for ( key in obj ) {} - - return key === undefined || core_hasOwn.call( obj, key ); - }, - - isEmptyObject: function( obj ) { - var name; - for ( name in obj ) { - return false; - } - return true; - }, - - error: function( msg ) { - throw new Error( msg ); - }, - - // data: string of html - // context (optional): If specified, the fragment will be created in this context, defaults to document - // keepScripts (optional): If true, will include scripts passed in the html string - parseHTML: function( data, context, keepScripts ) { - if ( !data || typeof data !== "string" ) { - return null; - } - if ( typeof context === "boolean" ) { - keepScripts = context; - context = false; - } - context = context || document; - - var parsed = rsingleTag.exec( data ), - scripts = !keepScripts && []; - - // Single tag - if ( parsed ) { - return [ context.createElement( parsed[1] ) ]; - } - - parsed = jQuery.buildFragment( [ data ], context, scripts ); - if ( scripts ) { - jQuery( scripts ).remove(); - } - return jQuery.merge( [], parsed.childNodes ); - }, - - parseJSON: function( data ) { - // Attempt to parse using the native JSON parser first - if ( window.JSON && window.JSON.parse ) { - return window.JSON.parse( data ); - } - - if ( data === null ) { - return data; - } - - if ( typeof data === "string" ) { - - // Make sure leading/trailing whitespace is removed (IE can't handle it) - data = jQuery.trim( data ); - - if ( data ) { - // Make sure the incoming data is actual JSON - // Logic borrowed from http://json.org/json2.js - if ( rvalidchars.test( data.replace( rvalidescape, "@" ) - .replace( rvalidtokens, "]" ) - .replace( rvalidbraces, "")) ) { - - return ( new Function( "return " + data ) )(); - } - } - } - - jQuery.error( "Invalid JSON: " + data ); - }, - - // Cross-browser xml parsing - parseXML: function( data ) { - var xml, tmp; - if ( !data || typeof data !== "string" ) { - return null; - } - try { - if ( window.DOMParser ) { // Standard - tmp = new DOMParser(); - xml = tmp.parseFromString( data , "text/xml" ); - } else { // IE - xml = new ActiveXObject( "Microsoft.XMLDOM" ); - xml.async = "false"; - xml.loadXML( data ); - } - } catch( e ) { - xml = undefined; - } - if ( !xml || !xml.documentElement || xml.getElementsByTagName( "parsererror" ).length ) { - jQuery.error( "Invalid XML: " + data ); - } - return xml; - }, - - noop: function() {}, - - // Evaluates a script in a global context - // Workarounds based on findings by Jim Driscoll - // http://weblogs.java.net/blog/driscoll/archive/2009/09/08/eval-javascript-global-context - globalEval: function( data ) { - if ( data && jQuery.trim( data ) ) { - // We use execScript on Internet Explorer - // We use an anonymous function so that context is window - // rather than jQuery in Firefox - ( window.execScript || function( data ) { - window[ "eval" ].call( window, data ); - } )( data ); - } - }, - - // Convert dashed to camelCase; used by the css and data modules - // Microsoft forgot to hump their vendor prefix (#9572) - camelCase: function( string ) { - return string.replace( rmsPrefix, "ms-" ).replace( rdashAlpha, fcamelCase ); - }, - - nodeName: function( elem, name ) { - return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase(); - }, - - // args is for internal usage only - each: function( obj, callback, args ) { - var value, - i = 0, - length = obj.length, - isArray = isArraylike( obj ); - - if ( args ) { - if ( isArray ) { - for ( ; i < length; i++ ) { - value = callback.apply( obj[ i ], args ); - - if ( value === false ) { - break; - } - } - } else { - for ( i in obj ) { - value = callback.apply( obj[ i ], args ); - - if ( value === false ) { - break; - } - } - } - - // A special, fast, case for the most common use of each - } else { - if ( isArray ) { - for ( ; i < length; i++ ) { - value = callback.call( obj[ i ], i, obj[ i ] ); - - if ( value === false ) { - break; - } - } - } else { - for ( i in obj ) { - value = callback.call( obj[ i ], i, obj[ i ] ); - - if ( value === false ) { - break; - } - } - } - } - - return obj; - }, - - // Use native String.trim function wherever possible - trim: core_trim && !core_trim.call("\uFEFF\xA0") ? - function( text ) { - return text == null ? - "" : - core_trim.call( text ); - } : - - // Otherwise use our own trimming functionality - function( text ) { - return text == null ? - "" : - ( text + "" ).replace( rtrim, "" ); - }, - - // results is for internal usage only - makeArray: function( arr, results ) { - var ret = results || []; - - if ( arr != null ) { - if ( isArraylike( Object(arr) ) ) { - jQuery.merge( ret, - typeof arr === "string" ? - [ arr ] : arr - ); - } else { - core_push.call( ret, arr ); - } - } - - return ret; - }, - - inArray: function( elem, arr, i ) { - var len; - - if ( arr ) { - if ( core_indexOf ) { - return core_indexOf.call( arr, elem, i ); - } - - len = arr.length; - i = i ? i < 0 ? Math.max( 0, len + i ) : i : 0; - - for ( ; i < len; i++ ) { - // Skip accessing in sparse arrays - if ( i in arr && arr[ i ] === elem ) { - return i; - } - } - } - - return -1; - }, - - merge: function( first, second ) { - var l = second.length, - i = first.length, - j = 0; - - if ( typeof l === "number" ) { - for ( ; j < l; j++ ) { - first[ i++ ] = second[ j ]; - } - } else { - while ( second[j] !== undefined ) { - first[ i++ ] = second[ j++ ]; - } - } - - first.length = i; - - return first; - }, - - grep: function( elems, callback, inv ) { - var retVal, - ret = [], - i = 0, - length = elems.length; - inv = !!inv; - - // Go through the array, only saving the items - // that pass the validator function - for ( ; i < length; i++ ) { - retVal = !!callback( elems[ i ], i ); - if ( inv !== retVal ) { - ret.push( elems[ i ] ); - } - } - - return ret; - }, - - // arg is for internal usage only - map: function( elems, callback, arg ) { - var value, - i = 0, - length = elems.length, - isArray = isArraylike( elems ), - ret = []; - - // Go through the array, translating each of the items to their - if ( isArray ) { - for ( ; i < length; i++ ) { - value = callback( elems[ i ], i, arg ); - - if ( value != null ) { - ret[ ret.length ] = value; - } - } - - // Go through every key on the object, - } else { - for ( i in elems ) { - value = callback( elems[ i ], i, arg ); - - if ( value != null ) { - ret[ ret.length ] = value; - } - } - } - - // Flatten any nested arrays - return core_concat.apply( [], ret ); - }, - - // A global GUID counter for objects - guid: 1, - - // Bind a function to a context, optionally partially applying any - // arguments. - proxy: function( fn, context ) { - var args, proxy, tmp; - - if ( typeof context === "string" ) { - tmp = fn[ context ]; - context = fn; - fn = tmp; - } - - // Quick check to determine if target is callable, in the spec - // this throws a TypeError, but we will just return undefined. - if ( !jQuery.isFunction( fn ) ) { - return undefined; - } - - // Simulated bind - args = core_slice.call( arguments, 2 ); - proxy = function() { - return fn.apply( context || this, args.concat( core_slice.call( arguments ) ) ); - }; - - // Set the guid of unique handler to the same of original handler, so it can be removed - proxy.guid = fn.guid = fn.guid || jQuery.guid++; - - return proxy; - }, - - // Multifunctional method to get and set values of a collection - // The value/s can optionally be executed if it's a function - access: function( elems, fn, key, value, chainable, emptyGet, raw ) { - var i = 0, - length = elems.length, - bulk = key == null; - - // Sets many values - if ( jQuery.type( key ) === "object" ) { - chainable = true; - for ( i in key ) { - jQuery.access( elems, fn, i, key[i], true, emptyGet, raw ); - } - - // Sets one value - } else if ( value !== undefined ) { - chainable = true; - - if ( !jQuery.isFunction( value ) ) { - raw = true; - } - - if ( bulk ) { - // Bulk operations run against the entire set - if ( raw ) { - fn.call( elems, value ); - fn = null; - - // ...except when executing function values - } else { - bulk = fn; - fn = function( elem, key, value ) { - return bulk.call( jQuery( elem ), value ); - }; - } - } - - if ( fn ) { - for ( ; i < length; i++ ) { - fn( elems[i], key, raw ? value : value.call( elems[i], i, fn( elems[i], key ) ) ); - } - } - } - - return chainable ? - elems : - - // Gets - bulk ? - fn.call( elems ) : - length ? fn( elems[0], key ) : emptyGet; - }, - - now: function() { - return ( new Date() ).getTime(); - }, - - // A method for quickly swapping in/out CSS properties to get correct calculations. - // Note: this method belongs to the css module but it's needed here for the support module. - // If support gets modularized, this method should be moved back to the css module. - swap: function( elem, options, callback, args ) { - var ret, name, - old = {}; - - // Remember the old values, and insert the new ones - for ( name in options ) { - old[ name ] = elem.style[ name ]; - elem.style[ name ] = options[ name ]; - } - - ret = callback.apply( elem, args || [] ); - - // Revert the old values - for ( name in options ) { - elem.style[ name ] = old[ name ]; - } - - return ret; - } -}); - -jQuery.ready.promise = function( obj ) { - if ( !readyList ) { - - readyList = jQuery.Deferred(); - - // Catch cases where $(document).ready() is called after the browser event has already occurred. - // we once tried to use readyState "interactive" here, but it caused issues like the one - // discovered by ChrisS here: http://bugs.jquery.com/ticket/12282#comment:15 - if ( document.readyState === "complete" ) { - // Handle it asynchronously to allow scripts the opportunity to delay ready - setTimeout( jQuery.ready ); - - // Standards-based browsers support DOMContentLoaded - } else if ( document.addEventListener ) { - // Use the handy event callback - document.addEventListener( "DOMContentLoaded", completed, false ); - - // A fallback to window.onload, that will always work - window.addEventListener( "load", completed, false ); - - // If IE event model is used - } else { - // Ensure firing before onload, maybe late but safe also for iframes - document.attachEvent( "onreadystatechange", completed ); - - // A fallback to window.onload, that will always work - window.attachEvent( "onload", completed ); - - // If IE and not a frame - // continually check to see if the document is ready - var top = false; - - try { - top = window.frameElement == null && document.documentElement; - } catch(e) {} - - if ( top && top.doScroll ) { - (function doScrollCheck() { - if ( !jQuery.isReady ) { - - try { - // Use the trick by Diego Perini - // http://javascript.nwbox.com/IEContentLoaded/ - top.doScroll("left"); - } catch(e) { - return setTimeout( doScrollCheck, 50 ); - } - - // detach all dom ready events - detach(); - - // and execute any waiting functions - jQuery.ready(); - } - })(); - } - } - } - return readyList.promise( obj ); -}; - -// Populate the class2type map -jQuery.each("Boolean Number String Function Array Date RegExp Object Error".split(" "), function(i, name) { - class2type[ "[object " + name + "]" ] = name.toLowerCase(); -}); - -function isArraylike( obj ) { - var length = obj.length, - type = jQuery.type( obj ); - - if ( jQuery.isWindow( obj ) ) { - return false; - } - - if ( obj.nodeType === 1 && length ) { - return true; - } - - return type === "array" || type !== "function" && - ( length === 0 || - typeof length === "number" && length > 0 && ( length - 1 ) in obj ); -} - -// All jQuery objects should point back to these -rootjQuery = jQuery(document); -/*! - * Sizzle CSS Selector Engine v1.10.2 - * http://sizzlejs.com/ - * - * Copyright 2013 jQuery Foundation, Inc. and other contributors - * Released under the MIT license - * http://jquery.org/license - * - * Date: 2013-07-03 - */ -(function( window, undefined ) { - -var i, - support, - cachedruns, - Expr, - getText, - isXML, - compile, - outermostContext, - sortInput, - - // Local document vars - setDocument, - document, - docElem, - documentIsHTML, - rbuggyQSA, - rbuggyMatches, - matches, - contains, - - // Instance-specific data - expando = "sizzle" + -(new Date()), - preferredDoc = window.document, - dirruns = 0, - done = 0, - classCache = createCache(), - tokenCache = createCache(), - compilerCache = createCache(), - hasDuplicate = false, - sortOrder = function( a, b ) { - if ( a === b ) { - hasDuplicate = true; - return 0; - } - return 0; - }, - - // General-purpose constants - strundefined = typeof undefined, - MAX_NEGATIVE = 1 << 31, - - // Instance methods - hasOwn = ({}).hasOwnProperty, - arr = [], - pop = arr.pop, - push_native = arr.push, - push = arr.push, - slice = arr.slice, - // Use a stripped-down indexOf if we can't use a native one - indexOf = arr.indexOf || function( elem ) { - var i = 0, - len = this.length; - for ( ; i < len; i++ ) { - if ( this[i] === elem ) { - return i; - } - } - return -1; - }, - - booleans = "checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped", - - // Regular expressions - - // Whitespace characters http://www.w3.org/TR/css3-selectors/#whitespace - whitespace = "[\\x20\\t\\r\\n\\f]", - // http://www.w3.org/TR/css3-syntax/#characters - characterEncoding = "(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+", - - // Loosely modeled on CSS identifier characters - // An unquoted value should be a CSS identifier http://www.w3.org/TR/css3-selectors/#attribute-selectors - // Proper syntax: http://www.w3.org/TR/CSS21/syndata.html#value-def-identifier - identifier = characterEncoding.replace( "w", "w#" ), - - // Acceptable operators http://www.w3.org/TR/selectors/#attribute-selectors - attributes = "\\[" + whitespace + "*(" + characterEncoding + ")" + whitespace + - "*(?:([*^$|!~]?=)" + whitespace + "*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|(" + identifier + ")|)|)" + whitespace + "*\\]", - - // Prefer arguments quoted, - // then not containing pseudos/brackets, - // then attribute selectors/non-parenthetical expressions, - // then anything else - // These preferences are here to reduce the number of selectors - // needing tokenize in the PSEUDO preFilter - pseudos = ":(" + characterEncoding + ")(?:\\(((['\"])((?:\\\\.|[^\\\\])*?)\\3|((?:\\\\.|[^\\\\()[\\]]|" + attributes.replace( 3, 8 ) + ")*)|.*)\\)|)", - - // Leading and non-escaped trailing whitespace, capturing some non-whitespace characters preceding the latter - rtrim = new RegExp( "^" + whitespace + "+|((?:^|[^\\\\])(?:\\\\.)*)" + whitespace + "+$", "g" ), - - rcomma = new RegExp( "^" + whitespace + "*," + whitespace + "*" ), - rcombinators = new RegExp( "^" + whitespace + "*([>+~]|" + whitespace + ")" + whitespace + "*" ), - - rsibling = new RegExp( whitespace + "*[+~]" ), - rattributeQuotes = new RegExp( "=" + whitespace + "*([^\\]'\"]*)" + whitespace + "*\\]", "g" ), - - rpseudo = new RegExp( pseudos ), - ridentifier = new RegExp( "^" + identifier + "$" ), - - matchExpr = { - "ID": new RegExp( "^#(" + characterEncoding + ")" ), - "CLASS": new RegExp( "^\\.(" + characterEncoding + ")" ), - "TAG": new RegExp( "^(" + characterEncoding.replace( "w", "w*" ) + ")" ), - "ATTR": new RegExp( "^" + attributes ), - "PSEUDO": new RegExp( "^" + pseudos ), - "CHILD": new RegExp( "^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\(" + whitespace + - "*(even|odd|(([+-]|)(\\d*)n|)" + whitespace + "*(?:([+-]|)" + whitespace + - "*(\\d+)|))" + whitespace + "*\\)|)", "i" ), - "bool": new RegExp( "^(?:" + booleans + ")$", "i" ), - // For use in libraries implementing .is() - // We use this for POS matching in `select` - "needsContext": new RegExp( "^" + whitespace + "*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\(" + - whitespace + "*((?:-\\d)?\\d*)" + whitespace + "*\\)|)(?=[^-]|$)", "i" ) - }, - - rnative = /^[^{]+\{\s*\[native \w/, - - // Easily-parseable/retrievable ID or TAG or CLASS selectors - rquickExpr = /^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/, - - rinputs = /^(?:input|select|textarea|button)$/i, - rheader = /^h\d$/i, - - rescape = /'|\\/g, - - // CSS escapes http://www.w3.org/TR/CSS21/syndata.html#escaped-characters - runescape = new RegExp( "\\\\([\\da-f]{1,6}" + whitespace + "?|(" + whitespace + ")|.)", "ig" ), - funescape = function( _, escaped, escapedWhitespace ) { - var high = "0x" + escaped - 0x10000; - // NaN means non-codepoint - // Support: Firefox - // Workaround erroneous numeric interpretation of +"0x" - return high !== high || escapedWhitespace ? - escaped : - // BMP codepoint - high < 0 ? - String.fromCharCode( high + 0x10000 ) : - // Supplemental Plane codepoint (surrogate pair) - String.fromCharCode( high >> 10 | 0xD800, high & 0x3FF | 0xDC00 ); - }; - -// Optimize for push.apply( _, NodeList ) -try { - push.apply( - (arr = slice.call( preferredDoc.childNodes )), - preferredDoc.childNodes - ); - // Support: Android<4.0 - // Detect silently failing push.apply - arr[ preferredDoc.childNodes.length ].nodeType; -} catch ( e ) { - push = { apply: arr.length ? - - // Leverage slice if possible - function( target, els ) { - push_native.apply( target, slice.call(els) ); - } : - - // Support: IE<9 - // Otherwise append directly - function( target, els ) { - var j = target.length, - i = 0; - // Can't trust NodeList.length - while ( (target[j++] = els[i++]) ) {} - target.length = j - 1; - } - }; -} - -function Sizzle( selector, context, results, seed ) { - var match, elem, m, nodeType, - // QSA vars - i, groups, old, nid, newContext, newSelector; - - if ( ( context ? context.ownerDocument || context : preferredDoc ) !== document ) { - setDocument( context ); - } - - context = context || document; - results = results || []; - - if ( !selector || typeof selector !== "string" ) { - return results; - } - - if ( (nodeType = context.nodeType) !== 1 && nodeType !== 9 ) { - return []; - } - - if ( documentIsHTML && !seed ) { - - // Shortcuts - if ( (match = rquickExpr.exec( selector )) ) { - // Speed-up: Sizzle("#ID") - if ( (m = match[1]) ) { - if ( nodeType === 9 ) { - elem = context.getElementById( m ); - // Check parentNode to catch when Blackberry 4.6 returns - // nodes that are no longer in the document #6963 - if ( elem && elem.parentNode ) { - // Handle the case where IE, Opera, and Webkit return items - // by name instead of ID - if ( elem.id === m ) { - results.push( elem ); - return results; - } - } else { - return results; - } - } else { - // Context is not a document - if ( context.ownerDocument && (elem = context.ownerDocument.getElementById( m )) && - contains( context, elem ) && elem.id === m ) { - results.push( elem ); - return results; - } - } - - // Speed-up: Sizzle("TAG") - } else if ( match[2] ) { - push.apply( results, context.getElementsByTagName( selector ) ); - return results; - - // Speed-up: Sizzle(".CLASS") - } else if ( (m = match[3]) && support.getElementsByClassName && context.getElementsByClassName ) { - push.apply( results, context.getElementsByClassName( m ) ); - return results; - } - } - - // QSA path - if ( support.qsa && (!rbuggyQSA || !rbuggyQSA.test( selector )) ) { - nid = old = expando; - newContext = context; - newSelector = nodeType === 9 && selector; - - // qSA works strangely on Element-rooted queries - // We can work around this by specifying an extra ID on the root - // and working up from there (Thanks to Andrew Dupont for the technique) - // IE 8 doesn't work on object elements - if ( nodeType === 1 && context.nodeName.toLowerCase() !== "object" ) { - groups = tokenize( selector ); - - if ( (old = context.getAttribute("id")) ) { - nid = old.replace( rescape, "\\$&" ); - } else { - context.setAttribute( "id", nid ); - } - nid = "[id='" + nid + "'] "; - - i = groups.length; - while ( i-- ) { - groups[i] = nid + toSelector( groups[i] ); - } - newContext = rsibling.test( selector ) && context.parentNode || context; - newSelector = groups.join(","); - } - - if ( newSelector ) { - try { - push.apply( results, - newContext.querySelectorAll( newSelector ) - ); - return results; - } catch(qsaError) { - } finally { - if ( !old ) { - context.removeAttribute("id"); - } - } - } - } - } - - // All others - return select( selector.replace( rtrim, "$1" ), context, results, seed ); -} - -/** - * Create key-value caches of limited size - * @returns {Function(string, Object)} Returns the Object data after storing it on itself with - * property name the (space-suffixed) string and (if the cache is larger than Expr.cacheLength) - * deleting the oldest entry - */ -function createCache() { - var keys = []; - - function cache( key, value ) { - // Use (key + " ") to avoid collision with native prototype properties (see Issue #157) - if ( keys.push( key += " " ) > Expr.cacheLength ) { - // Only keep the most recent entries - delete cache[ keys.shift() ]; - } - return (cache[ key ] = value); - } - return cache; -} - -/** - * Mark a function for special use by Sizzle - * @param {Function} fn The function to mark - */ -function markFunction( fn ) { - fn[ expando ] = true; - return fn; -} - -/** - * Support testing using an element - * @param {Function} fn Passed the created div and expects a boolean result - */ -function assert( fn ) { - var div = document.createElement("div"); - - try { - return !!fn( div ); - } catch (e) { - return false; - } finally { - // Remove from its parent by default - if ( div.parentNode ) { - div.parentNode.removeChild( div ); - } - // release memory in IE - div = null; - } -} - -/** - * Adds the same handler for all of the specified attrs - * @param {String} attrs Pipe-separated list of attributes - * @param {Function} handler The method that will be applied - */ -function addHandle( attrs, handler ) { - var arr = attrs.split("|"), - i = attrs.length; - - while ( i-- ) { - Expr.attrHandle[ arr[i] ] = handler; - } -} - -/** - * Checks document order of two siblings - * @param {Element} a - * @param {Element} b - * @returns {Number} Returns less than 0 if a precedes b, greater than 0 if a follows b - */ -function siblingCheck( a, b ) { - var cur = b && a, - diff = cur && a.nodeType === 1 && b.nodeType === 1 && - ( ~b.sourceIndex || MAX_NEGATIVE ) - - ( ~a.sourceIndex || MAX_NEGATIVE ); - - // Use IE sourceIndex if available on both nodes - if ( diff ) { - return diff; - } - - // Check if b follows a - if ( cur ) { - while ( (cur = cur.nextSibling) ) { - if ( cur === b ) { - return -1; - } - } - } - - return a ? 1 : -1; -} - -/** - * Returns a function to use in pseudos for input types - * @param {String} type - */ -function createInputPseudo( type ) { - return function( elem ) { - var name = elem.nodeName.toLowerCase(); - return name === "input" && elem.type === type; - }; -} - -/** - * Returns a function to use in pseudos for buttons - * @param {String} type - */ -function createButtonPseudo( type ) { - return function( elem ) { - var name = elem.nodeName.toLowerCase(); - return (name === "input" || name === "button") && elem.type === type; - }; -} - -/** - * Returns a function to use in pseudos for positionals - * @param {Function} fn - */ -function createPositionalPseudo( fn ) { - return markFunction(function( argument ) { - argument = +argument; - return markFunction(function( seed, matches ) { - var j, - matchIndexes = fn( [], seed.length, argument ), - i = matchIndexes.length; - - // Match elements found at the specified indexes - while ( i-- ) { - if ( seed[ (j = matchIndexes[i]) ] ) { - seed[j] = !(matches[j] = seed[j]); - } - } - }); - }); -} - -/** - * Detect xml - * @param {Element|Object} elem An element or a document - */ -isXML = Sizzle.isXML = function( elem ) { - // documentElement is verified for cases where it doesn't yet exist - // (such as loading iframes in IE - #4833) - var documentElement = elem && (elem.ownerDocument || elem).documentElement; - return documentElement ? documentElement.nodeName !== "HTML" : false; -}; - -// Expose support vars for convenience -support = Sizzle.support = {}; - -/** - * Sets document-related variables once based on the current document - * @param {Element|Object} [doc] An element or document object to use to set the document - * @returns {Object} Returns the current document - */ -setDocument = Sizzle.setDocument = function( node ) { - var doc = node ? node.ownerDocument || node : preferredDoc, - parent = doc.defaultView; - - // If no document and documentElement is available, return - if ( doc === document || doc.nodeType !== 9 || !doc.documentElement ) { - return document; - } - - // Set our document - document = doc; - docElem = doc.documentElement; - - // Support tests - documentIsHTML = !isXML( doc ); - - // Support: IE>8 - // If iframe document is assigned to "document" variable and if iframe has been reloaded, - // IE will throw "permission denied" error when accessing "document" variable, see jQuery #13936 - // IE6-8 do not support the defaultView property so parent will be undefined - if ( parent && parent.attachEvent && parent !== parent.top ) { - parent.attachEvent( "onbeforeunload", function() { - setDocument(); - }); - } - - /* Attributes - ---------------------------------------------------------------------- */ - - // Support: IE<8 - // Verify that getAttribute really returns attributes and not properties (excepting IE8 booleans) - support.attributes = assert(function( div ) { - div.className = "i"; - return !div.getAttribute("className"); - }); - - /* getElement(s)By* - ---------------------------------------------------------------------- */ - - // Check if getElementsByTagName("*") returns only elements - support.getElementsByTagName = assert(function( div ) { - div.appendChild( doc.createComment("") ); - return !div.getElementsByTagName("*").length; - }); - - // Check if getElementsByClassName can be trusted - support.getElementsByClassName = assert(function( div ) { - div.innerHTML = "
    "; - - // Support: Safari<4 - // Catch class over-caching - div.firstChild.className = "i"; - // Support: Opera<10 - // Catch gEBCN failure to find non-leading classes - return div.getElementsByClassName("i").length === 2; - }); - - // Support: IE<10 - // Check if getElementById returns elements by name - // The broken getElementById methods don't pick up programatically-set names, - // so use a roundabout getElementsByName test - support.getById = assert(function( div ) { - docElem.appendChild( div ).id = expando; - return !doc.getElementsByName || !doc.getElementsByName( expando ).length; - }); - - // ID find and filter - if ( support.getById ) { - Expr.find["ID"] = function( id, context ) { - if ( typeof context.getElementById !== strundefined && documentIsHTML ) { - var m = context.getElementById( id ); - // Check parentNode to catch when Blackberry 4.6 returns - // nodes that are no longer in the document #6963 - return m && m.parentNode ? [m] : []; - } - }; - Expr.filter["ID"] = function( id ) { - var attrId = id.replace( runescape, funescape ); - return function( elem ) { - return elem.getAttribute("id") === attrId; - }; - }; - } else { - // Support: IE6/7 - // getElementById is not reliable as a find shortcut - delete Expr.find["ID"]; - - Expr.filter["ID"] = function( id ) { - var attrId = id.replace( runescape, funescape ); - return function( elem ) { - var node = typeof elem.getAttributeNode !== strundefined && elem.getAttributeNode("id"); - return node && node.value === attrId; - }; - }; - } - - // Tag - Expr.find["TAG"] = support.getElementsByTagName ? - function( tag, context ) { - if ( typeof context.getElementsByTagName !== strundefined ) { - return context.getElementsByTagName( tag ); - } - } : - function( tag, context ) { - var elem, - tmp = [], - i = 0, - results = context.getElementsByTagName( tag ); - - // Filter out possible comments - if ( tag === "*" ) { - while ( (elem = results[i++]) ) { - if ( elem.nodeType === 1 ) { - tmp.push( elem ); - } - } - - return tmp; - } - return results; - }; - - // Class - Expr.find["CLASS"] = support.getElementsByClassName && function( className, context ) { - if ( typeof context.getElementsByClassName !== strundefined && documentIsHTML ) { - return context.getElementsByClassName( className ); - } - }; - - /* QSA/matchesSelector - ---------------------------------------------------------------------- */ - - // QSA and matchesSelector support - - // matchesSelector(:active) reports false when true (IE9/Opera 11.5) - rbuggyMatches = []; - - // qSa(:focus) reports false when true (Chrome 21) - // We allow this because of a bug in IE8/9 that throws an error - // whenever `document.activeElement` is accessed on an iframe - // So, we allow :focus to pass through QSA all the time to avoid the IE error - // See http://bugs.jquery.com/ticket/13378 - rbuggyQSA = []; - - if ( (support.qsa = rnative.test( doc.querySelectorAll )) ) { - // Build QSA regex - // Regex strategy adopted from Diego Perini - assert(function( div ) { - // Select is set to empty string on purpose - // This is to test IE's treatment of not explicitly - // setting a boolean content attribute, - // since its presence should be enough - // http://bugs.jquery.com/ticket/12359 - div.innerHTML = ""; - - // Support: IE8 - // Boolean attributes and "value" are not treated correctly - if ( !div.querySelectorAll("[selected]").length ) { - rbuggyQSA.push( "\\[" + whitespace + "*(?:value|" + booleans + ")" ); - } - - // Webkit/Opera - :checked should return selected option elements - // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked - // IE8 throws error here and will not see later tests - if ( !div.querySelectorAll(":checked").length ) { - rbuggyQSA.push(":checked"); - } - }); - - assert(function( div ) { - - // Support: Opera 10-12/IE8 - // ^= $= *= and empty values - // Should not select anything - // Support: Windows 8 Native Apps - // The type attribute is restricted during .innerHTML assignment - var input = doc.createElement("input"); - input.setAttribute( "type", "hidden" ); - div.appendChild( input ).setAttribute( "t", "" ); - - if ( div.querySelectorAll("[t^='']").length ) { - rbuggyQSA.push( "[*^$]=" + whitespace + "*(?:''|\"\")" ); - } - - // FF 3.5 - :enabled/:disabled and hidden elements (hidden elements are still enabled) - // IE8 throws error here and will not see later tests - if ( !div.querySelectorAll(":enabled").length ) { - rbuggyQSA.push( ":enabled", ":disabled" ); - } - - // Opera 10-11 does not throw on post-comma invalid pseudos - div.querySelectorAll("*,:x"); - rbuggyQSA.push(",.*:"); - }); - } - - if ( (support.matchesSelector = rnative.test( (matches = docElem.webkitMatchesSelector || - docElem.mozMatchesSelector || - docElem.oMatchesSelector || - docElem.msMatchesSelector) )) ) { - - assert(function( div ) { - // Check to see if it's possible to do matchesSelector - // on a disconnected node (IE 9) - support.disconnectedMatch = matches.call( div, "div" ); - - // This should fail with an exception - // Gecko does not error, returns false instead - matches.call( div, "[s!='']:x" ); - rbuggyMatches.push( "!=", pseudos ); - }); - } - - rbuggyQSA = rbuggyQSA.length && new RegExp( rbuggyQSA.join("|") ); - rbuggyMatches = rbuggyMatches.length && new RegExp( rbuggyMatches.join("|") ); - - /* Contains - ---------------------------------------------------------------------- */ - - // Element contains another - // Purposefully does not implement inclusive descendent - // As in, an element does not contain itself - contains = rnative.test( docElem.contains ) || docElem.compareDocumentPosition ? - function( a, b ) { - var adown = a.nodeType === 9 ? a.documentElement : a, - bup = b && b.parentNode; - return a === bup || !!( bup && bup.nodeType === 1 && ( - adown.contains ? - adown.contains( bup ) : - a.compareDocumentPosition && a.compareDocumentPosition( bup ) & 16 - )); - } : - function( a, b ) { - if ( b ) { - while ( (b = b.parentNode) ) { - if ( b === a ) { - return true; - } - } - } - return false; - }; - - /* Sorting - ---------------------------------------------------------------------- */ - - // Document order sorting - sortOrder = docElem.compareDocumentPosition ? - function( a, b ) { - - // Flag for duplicate removal - if ( a === b ) { - hasDuplicate = true; - return 0; - } - - var compare = b.compareDocumentPosition && a.compareDocumentPosition && a.compareDocumentPosition( b ); - - if ( compare ) { - // Disconnected nodes - if ( compare & 1 || - (!support.sortDetached && b.compareDocumentPosition( a ) === compare) ) { - - // Choose the first element that is related to our preferred document - if ( a === doc || contains(preferredDoc, a) ) { - return -1; - } - if ( b === doc || contains(preferredDoc, b) ) { - return 1; - } - - // Maintain original order - return sortInput ? - ( indexOf.call( sortInput, a ) - indexOf.call( sortInput, b ) ) : - 0; - } - - return compare & 4 ? -1 : 1; - } - - // Not directly comparable, sort on existence of method - return a.compareDocumentPosition ? -1 : 1; - } : - function( a, b ) { - var cur, - i = 0, - aup = a.parentNode, - bup = b.parentNode, - ap = [ a ], - bp = [ b ]; - - // Exit early if the nodes are identical - if ( a === b ) { - hasDuplicate = true; - return 0; - - // Parentless nodes are either documents or disconnected - } else if ( !aup || !bup ) { - return a === doc ? -1 : - b === doc ? 1 : - aup ? -1 : - bup ? 1 : - sortInput ? - ( indexOf.call( sortInput, a ) - indexOf.call( sortInput, b ) ) : - 0; - - // If the nodes are siblings, we can do a quick check - } else if ( aup === bup ) { - return siblingCheck( a, b ); - } - - // Otherwise we need full lists of their ancestors for comparison - cur = a; - while ( (cur = cur.parentNode) ) { - ap.unshift( cur ); - } - cur = b; - while ( (cur = cur.parentNode) ) { - bp.unshift( cur ); - } - - // Walk down the tree looking for a discrepancy - while ( ap[i] === bp[i] ) { - i++; - } - - return i ? - // Do a sibling check if the nodes have a common ancestor - siblingCheck( ap[i], bp[i] ) : - - // Otherwise nodes in our document sort first - ap[i] === preferredDoc ? -1 : - bp[i] === preferredDoc ? 1 : - 0; - }; - - return doc; -}; - -Sizzle.matches = function( expr, elements ) { - return Sizzle( expr, null, null, elements ); -}; - -Sizzle.matchesSelector = function( elem, expr ) { - // Set document vars if needed - if ( ( elem.ownerDocument || elem ) !== document ) { - setDocument( elem ); - } - - // Make sure that attribute selectors are quoted - expr = expr.replace( rattributeQuotes, "='$1']" ); - - if ( support.matchesSelector && documentIsHTML && - ( !rbuggyMatches || !rbuggyMatches.test( expr ) ) && - ( !rbuggyQSA || !rbuggyQSA.test( expr ) ) ) { - - try { - var ret = matches.call( elem, expr ); - - // IE 9's matchesSelector returns false on disconnected nodes - if ( ret || support.disconnectedMatch || - // As well, disconnected nodes are said to be in a document - // fragment in IE 9 - elem.document && elem.document.nodeType !== 11 ) { - return ret; - } - } catch(e) {} - } - - return Sizzle( expr, document, null, [elem] ).length > 0; -}; - -Sizzle.contains = function( context, elem ) { - // Set document vars if needed - if ( ( context.ownerDocument || context ) !== document ) { - setDocument( context ); - } - return contains( context, elem ); -}; - -Sizzle.attr = function( elem, name ) { - // Set document vars if needed - if ( ( elem.ownerDocument || elem ) !== document ) { - setDocument( elem ); - } - - var fn = Expr.attrHandle[ name.toLowerCase() ], - // Don't get fooled by Object.prototype properties (jQuery #13807) - val = fn && hasOwn.call( Expr.attrHandle, name.toLowerCase() ) ? - fn( elem, name, !documentIsHTML ) : - undefined; - - return val === undefined ? - support.attributes || !documentIsHTML ? - elem.getAttribute( name ) : - (val = elem.getAttributeNode(name)) && val.specified ? - val.value : - null : - val; -}; - -Sizzle.error = function( msg ) { - throw new Error( "Syntax error, unrecognized expression: " + msg ); -}; - -/** - * Document sorting and removing duplicates - * @param {ArrayLike} results - */ -Sizzle.uniqueSort = function( results ) { - var elem, - duplicates = [], - j = 0, - i = 0; - - // Unless we *know* we can detect duplicates, assume their presence - hasDuplicate = !support.detectDuplicates; - sortInput = !support.sortStable && results.slice( 0 ); - results.sort( sortOrder ); - - if ( hasDuplicate ) { - while ( (elem = results[i++]) ) { - if ( elem === results[ i ] ) { - j = duplicates.push( i ); - } - } - while ( j-- ) { - results.splice( duplicates[ j ], 1 ); - } - } - - return results; -}; - -/** - * Utility function for retrieving the text value of an array of DOM nodes - * @param {Array|Element} elem - */ -getText = Sizzle.getText = function( elem ) { - var node, - ret = "", - i = 0, - nodeType = elem.nodeType; - - if ( !nodeType ) { - // If no nodeType, this is expected to be an array - for ( ; (node = elem[i]); i++ ) { - // Do not traverse comment nodes - ret += getText( node ); - } - } else if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) { - // Use textContent for elements - // innerText usage removed for consistency of new lines (see #11153) - if ( typeof elem.textContent === "string" ) { - return elem.textContent; - } else { - // Traverse its children - for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { - ret += getText( elem ); - } - } - } else if ( nodeType === 3 || nodeType === 4 ) { - return elem.nodeValue; - } - // Do not include comment or processing instruction nodes - - return ret; -}; - -Expr = Sizzle.selectors = { - - // Can be adjusted by the user - cacheLength: 50, - - createPseudo: markFunction, - - match: matchExpr, - - attrHandle: {}, - - find: {}, - - relative: { - ">": { dir: "parentNode", first: true }, - " ": { dir: "parentNode" }, - "+": { dir: "previousSibling", first: true }, - "~": { dir: "previousSibling" } - }, - - preFilter: { - "ATTR": function( match ) { - match[1] = match[1].replace( runescape, funescape ); - - // Move the given value to match[3] whether quoted or unquoted - match[3] = ( match[4] || match[5] || "" ).replace( runescape, funescape ); - - if ( match[2] === "~=" ) { - match[3] = " " + match[3] + " "; - } - - return match.slice( 0, 4 ); - }, - - "CHILD": function( match ) { - /* matches from matchExpr["CHILD"] - 1 type (only|nth|...) - 2 what (child|of-type) - 3 argument (even|odd|\d*|\d*n([+-]\d+)?|...) - 4 xn-component of xn+y argument ([+-]?\d*n|) - 5 sign of xn-component - 6 x of xn-component - 7 sign of y-component - 8 y of y-component - */ - match[1] = match[1].toLowerCase(); - - if ( match[1].slice( 0, 3 ) === "nth" ) { - // nth-* requires argument - if ( !match[3] ) { - Sizzle.error( match[0] ); - } - - // numeric x and y parameters for Expr.filter.CHILD - // remember that false/true cast respectively to 0/1 - match[4] = +( match[4] ? match[5] + (match[6] || 1) : 2 * ( match[3] === "even" || match[3] === "odd" ) ); - match[5] = +( ( match[7] + match[8] ) || match[3] === "odd" ); - - // other types prohibit arguments - } else if ( match[3] ) { - Sizzle.error( match[0] ); - } - - return match; - }, - - "PSEUDO": function( match ) { - var excess, - unquoted = !match[5] && match[2]; - - if ( matchExpr["CHILD"].test( match[0] ) ) { - return null; - } - - // Accept quoted arguments as-is - if ( match[3] && match[4] !== undefined ) { - match[2] = match[4]; - - // Strip excess characters from unquoted arguments - } else if ( unquoted && rpseudo.test( unquoted ) && - // Get excess from tokenize (recursively) - (excess = tokenize( unquoted, true )) && - // advance to the next closing parenthesis - (excess = unquoted.indexOf( ")", unquoted.length - excess ) - unquoted.length) ) { - - // excess is a negative index - match[0] = match[0].slice( 0, excess ); - match[2] = unquoted.slice( 0, excess ); - } - - // Return only captures needed by the pseudo filter method (type and argument) - return match.slice( 0, 3 ); - } - }, - - filter: { - - "TAG": function( nodeNameSelector ) { - var nodeName = nodeNameSelector.replace( runescape, funescape ).toLowerCase(); - return nodeNameSelector === "*" ? - function() { return true; } : - function( elem ) { - return elem.nodeName && elem.nodeName.toLowerCase() === nodeName; - }; - }, - - "CLASS": function( className ) { - var pattern = classCache[ className + " " ]; - - return pattern || - (pattern = new RegExp( "(^|" + whitespace + ")" + className + "(" + whitespace + "|$)" )) && - classCache( className, function( elem ) { - return pattern.test( typeof elem.className === "string" && elem.className || typeof elem.getAttribute !== strundefined && elem.getAttribute("class") || "" ); - }); - }, - - "ATTR": function( name, operator, check ) { - return function( elem ) { - var result = Sizzle.attr( elem, name ); - - if ( result == null ) { - return operator === "!="; - } - if ( !operator ) { - return true; - } - - result += ""; - - return operator === "=" ? result === check : - operator === "!=" ? result !== check : - operator === "^=" ? check && result.indexOf( check ) === 0 : - operator === "*=" ? check && result.indexOf( check ) > -1 : - operator === "$=" ? check && result.slice( -check.length ) === check : - operator === "~=" ? ( " " + result + " " ).indexOf( check ) > -1 : - operator === "|=" ? result === check || result.slice( 0, check.length + 1 ) === check + "-" : - false; - }; - }, - - "CHILD": function( type, what, argument, first, last ) { - var simple = type.slice( 0, 3 ) !== "nth", - forward = type.slice( -4 ) !== "last", - ofType = what === "of-type"; - - return first === 1 && last === 0 ? - - // Shortcut for :nth-*(n) - function( elem ) { - return !!elem.parentNode; - } : - - function( elem, context, xml ) { - var cache, outerCache, node, diff, nodeIndex, start, - dir = simple !== forward ? "nextSibling" : "previousSibling", - parent = elem.parentNode, - name = ofType && elem.nodeName.toLowerCase(), - useCache = !xml && !ofType; - - if ( parent ) { - - // :(first|last|only)-(child|of-type) - if ( simple ) { - while ( dir ) { - node = elem; - while ( (node = node[ dir ]) ) { - if ( ofType ? node.nodeName.toLowerCase() === name : node.nodeType === 1 ) { - return false; - } - } - // Reverse direction for :only-* (if we haven't yet done so) - start = dir = type === "only" && !start && "nextSibling"; - } - return true; - } - - start = [ forward ? parent.firstChild : parent.lastChild ]; - - // non-xml :nth-child(...) stores cache data on `parent` - if ( forward && useCache ) { - // Seek `elem` from a previously-cached index - outerCache = parent[ expando ] || (parent[ expando ] = {}); - cache = outerCache[ type ] || []; - nodeIndex = cache[0] === dirruns && cache[1]; - diff = cache[0] === dirruns && cache[2]; - node = nodeIndex && parent.childNodes[ nodeIndex ]; - - while ( (node = ++nodeIndex && node && node[ dir ] || - - // Fallback to seeking `elem` from the start - (diff = nodeIndex = 0) || start.pop()) ) { - - // When found, cache indexes on `parent` and break - if ( node.nodeType === 1 && ++diff && node === elem ) { - outerCache[ type ] = [ dirruns, nodeIndex, diff ]; - break; - } - } - - // Use previously-cached element index if available - } else if ( useCache && (cache = (elem[ expando ] || (elem[ expando ] = {}))[ type ]) && cache[0] === dirruns ) { - diff = cache[1]; - - // xml :nth-child(...) or :nth-last-child(...) or :nth(-last)?-of-type(...) - } else { - // Use the same loop as above to seek `elem` from the start - while ( (node = ++nodeIndex && node && node[ dir ] || - (diff = nodeIndex = 0) || start.pop()) ) { - - if ( ( ofType ? node.nodeName.toLowerCase() === name : node.nodeType === 1 ) && ++diff ) { - // Cache the index of each encountered element - if ( useCache ) { - (node[ expando ] || (node[ expando ] = {}))[ type ] = [ dirruns, diff ]; - } - - if ( node === elem ) { - break; - } - } - } - } - - // Incorporate the offset, then check against cycle size - diff -= last; - return diff === first || ( diff % first === 0 && diff / first >= 0 ); - } - }; - }, - - "PSEUDO": function( pseudo, argument ) { - // pseudo-class names are case-insensitive - // http://www.w3.org/TR/selectors/#pseudo-classes - // Prioritize by case sensitivity in case custom pseudos are added with uppercase letters - // Remember that setFilters inherits from pseudos - var args, - fn = Expr.pseudos[ pseudo ] || Expr.setFilters[ pseudo.toLowerCase() ] || - Sizzle.error( "unsupported pseudo: " + pseudo ); - - // The user may use createPseudo to indicate that - // arguments are needed to create the filter function - // just as Sizzle does - if ( fn[ expando ] ) { - return fn( argument ); - } - - // But maintain support for old signatures - if ( fn.length > 1 ) { - args = [ pseudo, pseudo, "", argument ]; - return Expr.setFilters.hasOwnProperty( pseudo.toLowerCase() ) ? - markFunction(function( seed, matches ) { - var idx, - matched = fn( seed, argument ), - i = matched.length; - while ( i-- ) { - idx = indexOf.call( seed, matched[i] ); - seed[ idx ] = !( matches[ idx ] = matched[i] ); - } - }) : - function( elem ) { - return fn( elem, 0, args ); - }; - } - - return fn; - } - }, - - pseudos: { - // Potentially complex pseudos - "not": markFunction(function( selector ) { - // Trim the selector passed to compile - // to avoid treating leading and trailing - // spaces as combinators - var input = [], - results = [], - matcher = compile( selector.replace( rtrim, "$1" ) ); - - return matcher[ expando ] ? - markFunction(function( seed, matches, context, xml ) { - var elem, - unmatched = matcher( seed, null, xml, [] ), - i = seed.length; - - // Match elements unmatched by `matcher` - while ( i-- ) { - if ( (elem = unmatched[i]) ) { - seed[i] = !(matches[i] = elem); - } - } - }) : - function( elem, context, xml ) { - input[0] = elem; - matcher( input, null, xml, results ); - return !results.pop(); - }; - }), - - "has": markFunction(function( selector ) { - return function( elem ) { - return Sizzle( selector, elem ).length > 0; - }; - }), - - "contains": markFunction(function( text ) { - return function( elem ) { - return ( elem.textContent || elem.innerText || getText( elem ) ).indexOf( text ) > -1; - }; - }), - - // "Whether an element is represented by a :lang() selector - // is based solely on the element's language value - // being equal to the identifier C, - // or beginning with the identifier C immediately followed by "-". - // The matching of C against the element's language value is performed case-insensitively. - // The identifier C does not have to be a valid language name." - // http://www.w3.org/TR/selectors/#lang-pseudo - "lang": markFunction( function( lang ) { - // lang value must be a valid identifier - if ( !ridentifier.test(lang || "") ) { - Sizzle.error( "unsupported lang: " + lang ); - } - lang = lang.replace( runescape, funescape ).toLowerCase(); - return function( elem ) { - var elemLang; - do { - if ( (elemLang = documentIsHTML ? - elem.lang : - elem.getAttribute("xml:lang") || elem.getAttribute("lang")) ) { - - elemLang = elemLang.toLowerCase(); - return elemLang === lang || elemLang.indexOf( lang + "-" ) === 0; - } - } while ( (elem = elem.parentNode) && elem.nodeType === 1 ); - return false; - }; - }), - - // Miscellaneous - "target": function( elem ) { - var hash = window.location && window.location.hash; - return hash && hash.slice( 1 ) === elem.id; - }, - - "root": function( elem ) { - return elem === docElem; - }, - - "focus": function( elem ) { - return elem === document.activeElement && (!document.hasFocus || document.hasFocus()) && !!(elem.type || elem.href || ~elem.tabIndex); - }, - - // Boolean properties - "enabled": function( elem ) { - return elem.disabled === false; - }, - - "disabled": function( elem ) { - return elem.disabled === true; - }, - - "checked": function( elem ) { - // In CSS3, :checked should return both checked and selected elements - // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked - var nodeName = elem.nodeName.toLowerCase(); - return (nodeName === "input" && !!elem.checked) || (nodeName === "option" && !!elem.selected); - }, - - "selected": function( elem ) { - // Accessing this property makes selected-by-default - // options in Safari work properly - if ( elem.parentNode ) { - elem.parentNode.selectedIndex; - } - - return elem.selected === true; - }, - - // Contents - "empty": function( elem ) { - // http://www.w3.org/TR/selectors/#empty-pseudo - // :empty is only affected by element nodes and content nodes(including text(3), cdata(4)), - // not comment, processing instructions, or others - // Thanks to Diego Perini for the nodeName shortcut - // Greater than "@" means alpha characters (specifically not starting with "#" or "?") - for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { - if ( elem.nodeName > "@" || elem.nodeType === 3 || elem.nodeType === 4 ) { - return false; - } - } - return true; - }, - - "parent": function( elem ) { - return !Expr.pseudos["empty"]( elem ); - }, - - // Element/input types - "header": function( elem ) { - return rheader.test( elem.nodeName ); - }, - - "input": function( elem ) { - return rinputs.test( elem.nodeName ); - }, - - "button": function( elem ) { - var name = elem.nodeName.toLowerCase(); - return name === "input" && elem.type === "button" || name === "button"; - }, - - "text": function( elem ) { - var attr; - // IE6 and 7 will map elem.type to 'text' for new HTML5 types (search, etc) - // use getAttribute instead to test this case - return elem.nodeName.toLowerCase() === "input" && - elem.type === "text" && - ( (attr = elem.getAttribute("type")) == null || attr.toLowerCase() === elem.type ); - }, - - // Position-in-collection - "first": createPositionalPseudo(function() { - return [ 0 ]; - }), - - "last": createPositionalPseudo(function( matchIndexes, length ) { - return [ length - 1 ]; - }), - - "eq": createPositionalPseudo(function( matchIndexes, length, argument ) { - return [ argument < 0 ? argument + length : argument ]; - }), - - "even": createPositionalPseudo(function( matchIndexes, length ) { - var i = 0; - for ( ; i < length; i += 2 ) { - matchIndexes.push( i ); - } - return matchIndexes; - }), - - "odd": createPositionalPseudo(function( matchIndexes, length ) { - var i = 1; - for ( ; i < length; i += 2 ) { - matchIndexes.push( i ); - } - return matchIndexes; - }), - - "lt": createPositionalPseudo(function( matchIndexes, length, argument ) { - var i = argument < 0 ? argument + length : argument; - for ( ; --i >= 0; ) { - matchIndexes.push( i ); - } - return matchIndexes; - }), - - "gt": createPositionalPseudo(function( matchIndexes, length, argument ) { - var i = argument < 0 ? argument + length : argument; - for ( ; ++i < length; ) { - matchIndexes.push( i ); - } - return matchIndexes; - }) - } -}; - -Expr.pseudos["nth"] = Expr.pseudos["eq"]; - -// Add button/input type pseudos -for ( i in { radio: true, checkbox: true, file: true, password: true, image: true } ) { - Expr.pseudos[ i ] = createInputPseudo( i ); -} -for ( i in { submit: true, reset: true } ) { - Expr.pseudos[ i ] = createButtonPseudo( i ); -} - -// Easy API for creating new setFilters -function setFilters() {} -setFilters.prototype = Expr.filters = Expr.pseudos; -Expr.setFilters = new setFilters(); - -function tokenize( selector, parseOnly ) { - var matched, match, tokens, type, - soFar, groups, preFilters, - cached = tokenCache[ selector + " " ]; - - if ( cached ) { - return parseOnly ? 0 : cached.slice( 0 ); - } - - soFar = selector; - groups = []; - preFilters = Expr.preFilter; - - while ( soFar ) { - - // Comma and first run - if ( !matched || (match = rcomma.exec( soFar )) ) { - if ( match ) { - // Don't consume trailing commas as valid - soFar = soFar.slice( match[0].length ) || soFar; - } - groups.push( tokens = [] ); - } - - matched = false; - - // Combinators - if ( (match = rcombinators.exec( soFar )) ) { - matched = match.shift(); - tokens.push({ - value: matched, - // Cast descendant combinators to space - type: match[0].replace( rtrim, " " ) - }); - soFar = soFar.slice( matched.length ); - } - - // Filters - for ( type in Expr.filter ) { - if ( (match = matchExpr[ type ].exec( soFar )) && (!preFilters[ type ] || - (match = preFilters[ type ]( match ))) ) { - matched = match.shift(); - tokens.push({ - value: matched, - type: type, - matches: match - }); - soFar = soFar.slice( matched.length ); - } - } - - if ( !matched ) { - break; - } - } - - // Return the length of the invalid excess - // if we're just parsing - // Otherwise, throw an error or return tokens - return parseOnly ? - soFar.length : - soFar ? - Sizzle.error( selector ) : - // Cache the tokens - tokenCache( selector, groups ).slice( 0 ); -} - -function toSelector( tokens ) { - var i = 0, - len = tokens.length, - selector = ""; - for ( ; i < len; i++ ) { - selector += tokens[i].value; - } - return selector; -} - -function addCombinator( matcher, combinator, base ) { - var dir = combinator.dir, - checkNonElements = base && dir === "parentNode", - doneName = done++; - - return combinator.first ? - // Check against closest ancestor/preceding element - function( elem, context, xml ) { - while ( (elem = elem[ dir ]) ) { - if ( elem.nodeType === 1 || checkNonElements ) { - return matcher( elem, context, xml ); - } - } - } : - - // Check against all ancestor/preceding elements - function( elem, context, xml ) { - var data, cache, outerCache, - dirkey = dirruns + " " + doneName; - - // We can't set arbitrary data on XML nodes, so they don't benefit from dir caching - if ( xml ) { - while ( (elem = elem[ dir ]) ) { - if ( elem.nodeType === 1 || checkNonElements ) { - if ( matcher( elem, context, xml ) ) { - return true; - } - } - } - } else { - while ( (elem = elem[ dir ]) ) { - if ( elem.nodeType === 1 || checkNonElements ) { - outerCache = elem[ expando ] || (elem[ expando ] = {}); - if ( (cache = outerCache[ dir ]) && cache[0] === dirkey ) { - if ( (data = cache[1]) === true || data === cachedruns ) { - return data === true; - } - } else { - cache = outerCache[ dir ] = [ dirkey ]; - cache[1] = matcher( elem, context, xml ) || cachedruns; - if ( cache[1] === true ) { - return true; - } - } - } - } - } - }; -} - -function elementMatcher( matchers ) { - return matchers.length > 1 ? - function( elem, context, xml ) { - var i = matchers.length; - while ( i-- ) { - if ( !matchers[i]( elem, context, xml ) ) { - return false; - } - } - return true; - } : - matchers[0]; -} - -function condense( unmatched, map, filter, context, xml ) { - var elem, - newUnmatched = [], - i = 0, - len = unmatched.length, - mapped = map != null; - - for ( ; i < len; i++ ) { - if ( (elem = unmatched[i]) ) { - if ( !filter || filter( elem, context, xml ) ) { - newUnmatched.push( elem ); - if ( mapped ) { - map.push( i ); - } - } - } - } - - return newUnmatched; -} - -function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) { - if ( postFilter && !postFilter[ expando ] ) { - postFilter = setMatcher( postFilter ); - } - if ( postFinder && !postFinder[ expando ] ) { - postFinder = setMatcher( postFinder, postSelector ); - } - return markFunction(function( seed, results, context, xml ) { - var temp, i, elem, - preMap = [], - postMap = [], - preexisting = results.length, - - // Get initial elements from seed or context - elems = seed || multipleContexts( selector || "*", context.nodeType ? [ context ] : context, [] ), - - // Prefilter to get matcher input, preserving a map for seed-results synchronization - matcherIn = preFilter && ( seed || !selector ) ? - condense( elems, preMap, preFilter, context, xml ) : - elems, - - matcherOut = matcher ? - // If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results, - postFinder || ( seed ? preFilter : preexisting || postFilter ) ? - - // ...intermediate processing is necessary - [] : - - // ...otherwise use results directly - results : - matcherIn; - - // Find primary matches - if ( matcher ) { - matcher( matcherIn, matcherOut, context, xml ); - } - - // Apply postFilter - if ( postFilter ) { - temp = condense( matcherOut, postMap ); - postFilter( temp, [], context, xml ); - - // Un-match failing elements by moving them back to matcherIn - i = temp.length; - while ( i-- ) { - if ( (elem = temp[i]) ) { - matcherOut[ postMap[i] ] = !(matcherIn[ postMap[i] ] = elem); - } - } - } - - if ( seed ) { - if ( postFinder || preFilter ) { - if ( postFinder ) { - // Get the final matcherOut by condensing this intermediate into postFinder contexts - temp = []; - i = matcherOut.length; - while ( i-- ) { - if ( (elem = matcherOut[i]) ) { - // Restore matcherIn since elem is not yet a final match - temp.push( (matcherIn[i] = elem) ); - } - } - postFinder( null, (matcherOut = []), temp, xml ); - } - - // Move matched elements from seed to results to keep them synchronized - i = matcherOut.length; - while ( i-- ) { - if ( (elem = matcherOut[i]) && - (temp = postFinder ? indexOf.call( seed, elem ) : preMap[i]) > -1 ) { - - seed[temp] = !(results[temp] = elem); - } - } - } - - // Add elements to results, through postFinder if defined - } else { - matcherOut = condense( - matcherOut === results ? - matcherOut.splice( preexisting, matcherOut.length ) : - matcherOut - ); - if ( postFinder ) { - postFinder( null, results, matcherOut, xml ); - } else { - push.apply( results, matcherOut ); - } - } - }); -} - -function matcherFromTokens( tokens ) { - var checkContext, matcher, j, - len = tokens.length, - leadingRelative = Expr.relative[ tokens[0].type ], - implicitRelative = leadingRelative || Expr.relative[" "], - i = leadingRelative ? 1 : 0, - - // The foundational matcher ensures that elements are reachable from top-level context(s) - matchContext = addCombinator( function( elem ) { - return elem === checkContext; - }, implicitRelative, true ), - matchAnyContext = addCombinator( function( elem ) { - return indexOf.call( checkContext, elem ) > -1; - }, implicitRelative, true ), - matchers = [ function( elem, context, xml ) { - return ( !leadingRelative && ( xml || context !== outermostContext ) ) || ( - (checkContext = context).nodeType ? - matchContext( elem, context, xml ) : - matchAnyContext( elem, context, xml ) ); - } ]; - - for ( ; i < len; i++ ) { - if ( (matcher = Expr.relative[ tokens[i].type ]) ) { - matchers = [ addCombinator(elementMatcher( matchers ), matcher) ]; - } else { - matcher = Expr.filter[ tokens[i].type ].apply( null, tokens[i].matches ); - - // Return special upon seeing a positional matcher - if ( matcher[ expando ] ) { - // Find the next relative operator (if any) for proper handling - j = ++i; - for ( ; j < len; j++ ) { - if ( Expr.relative[ tokens[j].type ] ) { - break; - } - } - return setMatcher( - i > 1 && elementMatcher( matchers ), - i > 1 && toSelector( - // If the preceding token was a descendant combinator, insert an implicit any-element `*` - tokens.slice( 0, i - 1 ).concat({ value: tokens[ i - 2 ].type === " " ? "*" : "" }) - ).replace( rtrim, "$1" ), - matcher, - i < j && matcherFromTokens( tokens.slice( i, j ) ), - j < len && matcherFromTokens( (tokens = tokens.slice( j )) ), - j < len && toSelector( tokens ) - ); - } - matchers.push( matcher ); - } - } - - return elementMatcher( matchers ); -} - -function matcherFromGroupMatchers( elementMatchers, setMatchers ) { - // A counter to specify which element is currently being matched - var matcherCachedRuns = 0, - bySet = setMatchers.length > 0, - byElement = elementMatchers.length > 0, - superMatcher = function( seed, context, xml, results, expandContext ) { - var elem, j, matcher, - setMatched = [], - matchedCount = 0, - i = "0", - unmatched = seed && [], - outermost = expandContext != null, - contextBackup = outermostContext, - // We must always have either seed elements or context - elems = seed || byElement && Expr.find["TAG"]( "*", expandContext && context.parentNode || context ), - // Use integer dirruns iff this is the outermost matcher - dirrunsUnique = (dirruns += contextBackup == null ? 1 : Math.random() || 0.1); - - if ( outermost ) { - outermostContext = context !== document && context; - cachedruns = matcherCachedRuns; - } - - // Add elements passing elementMatchers directly to results - // Keep `i` a string if there are no elements so `matchedCount` will be "00" below - for ( ; (elem = elems[i]) != null; i++ ) { - if ( byElement && elem ) { - j = 0; - while ( (matcher = elementMatchers[j++]) ) { - if ( matcher( elem, context, xml ) ) { - results.push( elem ); - break; - } - } - if ( outermost ) { - dirruns = dirrunsUnique; - cachedruns = ++matcherCachedRuns; - } - } - - // Track unmatched elements for set filters - if ( bySet ) { - // They will have gone through all possible matchers - if ( (elem = !matcher && elem) ) { - matchedCount--; - } - - // Lengthen the array for every element, matched or not - if ( seed ) { - unmatched.push( elem ); - } - } - } - - // Apply set filters to unmatched elements - matchedCount += i; - if ( bySet && i !== matchedCount ) { - j = 0; - while ( (matcher = setMatchers[j++]) ) { - matcher( unmatched, setMatched, context, xml ); - } - - if ( seed ) { - // Reintegrate element matches to eliminate the need for sorting - if ( matchedCount > 0 ) { - while ( i-- ) { - if ( !(unmatched[i] || setMatched[i]) ) { - setMatched[i] = pop.call( results ); - } - } - } - - // Discard index placeholder values to get only actual matches - setMatched = condense( setMatched ); - } - - // Add matches to results - push.apply( results, setMatched ); - - // Seedless set matches succeeding multiple successful matchers stipulate sorting - if ( outermost && !seed && setMatched.length > 0 && - ( matchedCount + setMatchers.length ) > 1 ) { - - Sizzle.uniqueSort( results ); - } - } - - // Override manipulation of globals by nested matchers - if ( outermost ) { - dirruns = dirrunsUnique; - outermostContext = contextBackup; - } - - return unmatched; - }; - - return bySet ? - markFunction( superMatcher ) : - superMatcher; -} - -compile = Sizzle.compile = function( selector, group /* Internal Use Only */ ) { - var i, - setMatchers = [], - elementMatchers = [], - cached = compilerCache[ selector + " " ]; - - if ( !cached ) { - // Generate a function of recursive functions that can be used to check each element - if ( !group ) { - group = tokenize( selector ); - } - i = group.length; - while ( i-- ) { - cached = matcherFromTokens( group[i] ); - if ( cached[ expando ] ) { - setMatchers.push( cached ); - } else { - elementMatchers.push( cached ); - } - } - - // Cache the compiled function - cached = compilerCache( selector, matcherFromGroupMatchers( elementMatchers, setMatchers ) ); - } - return cached; -}; - -function multipleContexts( selector, contexts, results ) { - var i = 0, - len = contexts.length; - for ( ; i < len; i++ ) { - Sizzle( selector, contexts[i], results ); - } - return results; -} - -function select( selector, context, results, seed ) { - var i, tokens, token, type, find, - match = tokenize( selector ); - - if ( !seed ) { - // Try to minimize operations if there is only one group - if ( match.length === 1 ) { - - // Take a shortcut and set the context if the root selector is an ID - tokens = match[0] = match[0].slice( 0 ); - if ( tokens.length > 2 && (token = tokens[0]).type === "ID" && - support.getById && context.nodeType === 9 && documentIsHTML && - Expr.relative[ tokens[1].type ] ) { - - context = ( Expr.find["ID"]( token.matches[0].replace(runescape, funescape), context ) || [] )[0]; - if ( !context ) { - return results; - } - selector = selector.slice( tokens.shift().value.length ); - } - - // Fetch a seed set for right-to-left matching - i = matchExpr["needsContext"].test( selector ) ? 0 : tokens.length; - while ( i-- ) { - token = tokens[i]; - - // Abort if we hit a combinator - if ( Expr.relative[ (type = token.type) ] ) { - break; - } - if ( (find = Expr.find[ type ]) ) { - // Search, expanding context for leading sibling combinators - if ( (seed = find( - token.matches[0].replace( runescape, funescape ), - rsibling.test( tokens[0].type ) && context.parentNode || context - )) ) { - - // If seed is empty or no tokens remain, we can return early - tokens.splice( i, 1 ); - selector = seed.length && toSelector( tokens ); - if ( !selector ) { - push.apply( results, seed ); - return results; - } - - break; - } - } - } - } - } - - // Compile and execute a filtering function - // Provide `match` to avoid retokenization if we modified the selector above - compile( selector, match )( - seed, - context, - !documentIsHTML, - results, - rsibling.test( selector ) - ); - return results; -} - -// One-time assignments - -// Sort stability -support.sortStable = expando.split("").sort( sortOrder ).join("") === expando; - -// Support: Chrome<14 -// Always assume duplicates if they aren't passed to the comparison function -support.detectDuplicates = hasDuplicate; - -// Initialize against the default document -setDocument(); - -// Support: Webkit<537.32 - Safari 6.0.3/Chrome 25 (fixed in Chrome 27) -// Detached nodes confoundingly follow *each other* -support.sortDetached = assert(function( div1 ) { - // Should return 1, but returns 4 (following) - return div1.compareDocumentPosition( document.createElement("div") ) & 1; -}); - -// Support: IE<8 -// Prevent attribute/property "interpolation" -// http://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx -if ( !assert(function( div ) { - div.innerHTML = ""; - return div.firstChild.getAttribute("href") === "#" ; -}) ) { - addHandle( "type|href|height|width", function( elem, name, isXML ) { - if ( !isXML ) { - return elem.getAttribute( name, name.toLowerCase() === "type" ? 1 : 2 ); - } - }); -} - -// Support: IE<9 -// Use defaultValue in place of getAttribute("value") -if ( !support.attributes || !assert(function( div ) { - div.innerHTML = ""; - div.firstChild.setAttribute( "value", "" ); - return div.firstChild.getAttribute( "value" ) === ""; -}) ) { - addHandle( "value", function( elem, name, isXML ) { - if ( !isXML && elem.nodeName.toLowerCase() === "input" ) { - return elem.defaultValue; - } - }); -} - -// Support: IE<9 -// Use getAttributeNode to fetch booleans when getAttribute lies -if ( !assert(function( div ) { - return div.getAttribute("disabled") == null; -}) ) { - addHandle( booleans, function( elem, name, isXML ) { - var val; - if ( !isXML ) { - return (val = elem.getAttributeNode( name )) && val.specified ? - val.value : - elem[ name ] === true ? name.toLowerCase() : null; - } - }); -} - -jQuery.find = Sizzle; -jQuery.expr = Sizzle.selectors; -jQuery.expr[":"] = jQuery.expr.pseudos; -jQuery.unique = Sizzle.uniqueSort; -jQuery.text = Sizzle.getText; -jQuery.isXMLDoc = Sizzle.isXML; -jQuery.contains = Sizzle.contains; - - -})( window ); -// String to Object options format cache -var optionsCache = {}; - -// Convert String-formatted options into Object-formatted ones and store in cache -function createOptions( options ) { - var object = optionsCache[ options ] = {}; - jQuery.each( options.match( core_rnotwhite ) || [], function( _, flag ) { - object[ flag ] = true; - }); - return object; -} - -/* - * Create a callback list using the following parameters: - * - * options: an optional list of space-separated options that will change how - * the callback list behaves or a more traditional option object - * - * By default a callback list will act like an event callback list and can be - * "fired" multiple times. - * - * Possible options: - * - * once: will ensure the callback list can only be fired once (like a Deferred) - * - * memory: will keep track of previous values and will call any callback added - * after the list has been fired right away with the latest "memorized" - * values (like a Deferred) - * - * unique: will ensure a callback can only be added once (no duplicate in the list) - * - * stopOnFalse: interrupt callings when a callback returns false - * - */ -jQuery.Callbacks = function( options ) { - - // Convert options from String-formatted to Object-formatted if needed - // (we check in cache first) - options = typeof options === "string" ? - ( optionsCache[ options ] || createOptions( options ) ) : - jQuery.extend( {}, options ); - - var // Flag to know if list is currently firing - firing, - // Last fire value (for non-forgettable lists) - memory, - // Flag to know if list was already fired - fired, - // End of the loop when firing - firingLength, - // Index of currently firing callback (modified by remove if needed) - firingIndex, - // First callback to fire (used internally by add and fireWith) - firingStart, - // Actual callback list - list = [], - // Stack of fire calls for repeatable lists - stack = !options.once && [], - // Fire callbacks - fire = function( data ) { - memory = options.memory && data; - fired = true; - firingIndex = firingStart || 0; - firingStart = 0; - firingLength = list.length; - firing = true; - for ( ; list && firingIndex < firingLength; firingIndex++ ) { - if ( list[ firingIndex ].apply( data[ 0 ], data[ 1 ] ) === false && options.stopOnFalse ) { - memory = false; // To prevent further calls using add - break; - } - } - firing = false; - if ( list ) { - if ( stack ) { - if ( stack.length ) { - fire( stack.shift() ); - } - } else if ( memory ) { - list = []; - } else { - self.disable(); - } - } - }, - // Actual Callbacks object - self = { - // Add a callback or a collection of callbacks to the list - add: function() { - if ( list ) { - // First, we save the current length - var start = list.length; - (function add( args ) { - jQuery.each( args, function( _, arg ) { - var type = jQuery.type( arg ); - if ( type === "function" ) { - if ( !options.unique || !self.has( arg ) ) { - list.push( arg ); - } - } else if ( arg && arg.length && type !== "string" ) { - // Inspect recursively - add( arg ); - } - }); - })( arguments ); - // Do we need to add the callbacks to the - // current firing batch? - if ( firing ) { - firingLength = list.length; - // With memory, if we're not firing then - // we should call right away - } else if ( memory ) { - firingStart = start; - fire( memory ); - } - } - return this; - }, - // Remove a callback from the list - remove: function() { - if ( list ) { - jQuery.each( arguments, function( _, arg ) { - var index; - while( ( index = jQuery.inArray( arg, list, index ) ) > -1 ) { - list.splice( index, 1 ); - // Handle firing indexes - if ( firing ) { - if ( index <= firingLength ) { - firingLength--; - } - if ( index <= firingIndex ) { - firingIndex--; - } - } - } - }); - } - return this; - }, - // Check if a given callback is in the list. - // If no argument is given, return whether or not list has callbacks attached. - has: function( fn ) { - return fn ? jQuery.inArray( fn, list ) > -1 : !!( list && list.length ); - }, - // Remove all callbacks from the list - empty: function() { - list = []; - firingLength = 0; - return this; - }, - // Have the list do nothing anymore - disable: function() { - list = stack = memory = undefined; - return this; - }, - // Is it disabled? - disabled: function() { - return !list; - }, - // Lock the list in its current state - lock: function() { - stack = undefined; - if ( !memory ) { - self.disable(); - } - return this; - }, - // Is it locked? - locked: function() { - return !stack; - }, - // Call all callbacks with the given context and arguments - fireWith: function( context, args ) { - if ( list && ( !fired || stack ) ) { - args = args || []; - args = [ context, args.slice ? args.slice() : args ]; - if ( firing ) { - stack.push( args ); - } else { - fire( args ); - } - } - return this; - }, - // Call all the callbacks with the given arguments - fire: function() { - self.fireWith( this, arguments ); - return this; - }, - // To know if the callbacks have already been called at least once - fired: function() { - return !!fired; - } - }; - - return self; -}; -jQuery.extend({ - - Deferred: function( func ) { - var tuples = [ - // action, add listener, listener list, final state - [ "resolve", "done", jQuery.Callbacks("once memory"), "resolved" ], - [ "reject", "fail", jQuery.Callbacks("once memory"), "rejected" ], - [ "notify", "progress", jQuery.Callbacks("memory") ] - ], - state = "pending", - promise = { - state: function() { - return state; - }, - always: function() { - deferred.done( arguments ).fail( arguments ); - return this; - }, - then: function( /* fnDone, fnFail, fnProgress */ ) { - var fns = arguments; - return jQuery.Deferred(function( newDefer ) { - jQuery.each( tuples, function( i, tuple ) { - var action = tuple[ 0 ], - fn = jQuery.isFunction( fns[ i ] ) && fns[ i ]; - // deferred[ done | fail | progress ] for forwarding actions to newDefer - deferred[ tuple[1] ](function() { - var returned = fn && fn.apply( this, arguments ); - if ( returned && jQuery.isFunction( returned.promise ) ) { - returned.promise() - .done( newDefer.resolve ) - .fail( newDefer.reject ) - .progress( newDefer.notify ); - } else { - newDefer[ action + "With" ]( this === promise ? newDefer.promise() : this, fn ? [ returned ] : arguments ); - } - }); - }); - fns = null; - }).promise(); - }, - // Get a promise for this deferred - // If obj is provided, the promise aspect is added to the object - promise: function( obj ) { - return obj != null ? jQuery.extend( obj, promise ) : promise; - } - }, - deferred = {}; - - // Keep pipe for back-compat - promise.pipe = promise.then; - - // Add list-specific methods - jQuery.each( tuples, function( i, tuple ) { - var list = tuple[ 2 ], - stateString = tuple[ 3 ]; - - // promise[ done | fail | progress ] = list.add - promise[ tuple[1] ] = list.add; - - // Handle state - if ( stateString ) { - list.add(function() { - // state = [ resolved | rejected ] - state = stateString; - - // [ reject_list | resolve_list ].disable; progress_list.lock - }, tuples[ i ^ 1 ][ 2 ].disable, tuples[ 2 ][ 2 ].lock ); - } - - // deferred[ resolve | reject | notify ] - deferred[ tuple[0] ] = function() { - deferred[ tuple[0] + "With" ]( this === deferred ? promise : this, arguments ); - return this; - }; - deferred[ tuple[0] + "With" ] = list.fireWith; - }); - - // Make the deferred a promise - promise.promise( deferred ); - - // Call given func if any - if ( func ) { - func.call( deferred, deferred ); - } - - // All done! - return deferred; - }, - - // Deferred helper - when: function( subordinate /* , ..., subordinateN */ ) { - var i = 0, - resolveValues = core_slice.call( arguments ), - length = resolveValues.length, - - // the count of uncompleted subordinates - remaining = length !== 1 || ( subordinate && jQuery.isFunction( subordinate.promise ) ) ? length : 0, - - // the master Deferred. If resolveValues consist of only a single Deferred, just use that. - deferred = remaining === 1 ? subordinate : jQuery.Deferred(), - - // Update function for both resolve and progress values - updateFunc = function( i, contexts, values ) { - return function( value ) { - contexts[ i ] = this; - values[ i ] = arguments.length > 1 ? core_slice.call( arguments ) : value; - if( values === progressValues ) { - deferred.notifyWith( contexts, values ); - } else if ( !( --remaining ) ) { - deferred.resolveWith( contexts, values ); - } - }; - }, - - progressValues, progressContexts, resolveContexts; - - // add listeners to Deferred subordinates; treat others as resolved - if ( length > 1 ) { - progressValues = new Array( length ); - progressContexts = new Array( length ); - resolveContexts = new Array( length ); - for ( ; i < length; i++ ) { - if ( resolveValues[ i ] && jQuery.isFunction( resolveValues[ i ].promise ) ) { - resolveValues[ i ].promise() - .done( updateFunc( i, resolveContexts, resolveValues ) ) - .fail( deferred.reject ) - .progress( updateFunc( i, progressContexts, progressValues ) ); - } else { - --remaining; - } - } - } - - // if we're not waiting on anything, resolve the master - if ( !remaining ) { - deferred.resolveWith( resolveContexts, resolveValues ); - } - - return deferred.promise(); - } -}); -jQuery.support = (function( support ) { - - var all, a, input, select, fragment, opt, eventName, isSupported, i, - div = document.createElement("div"); - - // Setup - div.setAttribute( "className", "t" ); - div.innerHTML = "
    a"; - - // Finish early in limited (non-browser) environments - all = div.getElementsByTagName("*") || []; - a = div.getElementsByTagName("a")[ 0 ]; - if ( !a || !a.style || !all.length ) { - return support; - } - - // First batch of tests - select = document.createElement("select"); - opt = select.appendChild( document.createElement("option") ); - input = div.getElementsByTagName("input")[ 0 ]; - - a.style.cssText = "top:1px;float:left;opacity:.5"; - - // Test setAttribute on camelCase class. If it works, we need attrFixes when doing get/setAttribute (ie6/7) - support.getSetAttribute = div.className !== "t"; - - // IE strips leading whitespace when .innerHTML is used - support.leadingWhitespace = div.firstChild.nodeType === 3; - - // Make sure that tbody elements aren't automatically inserted - // IE will insert them into empty tables - support.tbody = !div.getElementsByTagName("tbody").length; - - // Make sure that link elements get serialized correctly by innerHTML - // This requires a wrapper element in IE - support.htmlSerialize = !!div.getElementsByTagName("link").length; - - // Get the style information from getAttribute - // (IE uses .cssText instead) - support.style = /top/.test( a.getAttribute("style") ); - - // Make sure that URLs aren't manipulated - // (IE normalizes it by default) - support.hrefNormalized = a.getAttribute("href") === "/a"; - - // Make sure that element opacity exists - // (IE uses filter instead) - // Use a regex to work around a WebKit issue. See #5145 - support.opacity = /^0.5/.test( a.style.opacity ); - - // Verify style float existence - // (IE uses styleFloat instead of cssFloat) - support.cssFloat = !!a.style.cssFloat; - - // Check the default checkbox/radio value ("" on WebKit; "on" elsewhere) - support.checkOn = !!input.value; - - // Make sure that a selected-by-default option has a working selected property. - // (WebKit defaults to false instead of true, IE too, if it's in an optgroup) - support.optSelected = opt.selected; - - // Tests for enctype support on a form (#6743) - support.enctype = !!document.createElement("form").enctype; - - // Makes sure cloning an html5 element does not cause problems - // Where outerHTML is undefined, this still works - support.html5Clone = document.createElement("nav").cloneNode( true ).outerHTML !== "<:nav>"; - - // Will be defined later - support.inlineBlockNeedsLayout = false; - support.shrinkWrapBlocks = false; - support.pixelPosition = false; - support.deleteExpando = true; - support.noCloneEvent = true; - support.reliableMarginRight = true; - support.boxSizingReliable = true; - - // Make sure checked status is properly cloned - input.checked = true; - support.noCloneChecked = input.cloneNode( true ).checked; - - // Make sure that the options inside disabled selects aren't marked as disabled - // (WebKit marks them as disabled) - select.disabled = true; - support.optDisabled = !opt.disabled; - - // Support: IE<9 - try { - delete div.test; - } catch( e ) { - support.deleteExpando = false; - } - - // Check if we can trust getAttribute("value") - input = document.createElement("input"); - input.setAttribute( "value", "" ); - support.input = input.getAttribute( "value" ) === ""; - - // Check if an input maintains its value after becoming a radio - input.value = "t"; - input.setAttribute( "type", "radio" ); - support.radioValue = input.value === "t"; - - // #11217 - WebKit loses check when the name is after the checked attribute - input.setAttribute( "checked", "t" ); - input.setAttribute( "name", "t" ); - - fragment = document.createDocumentFragment(); - fragment.appendChild( input ); - - // Check if a disconnected checkbox will retain its checked - // value of true after appended to the DOM (IE6/7) - support.appendChecked = input.checked; - - // WebKit doesn't clone checked state correctly in fragments - support.checkClone = fragment.cloneNode( true ).cloneNode( true ).lastChild.checked; - - // Support: IE<9 - // Opera does not clone events (and typeof div.attachEvent === undefined). - // IE9-10 clones events bound via attachEvent, but they don't trigger with .click() - if ( div.attachEvent ) { - div.attachEvent( "onclick", function() { - support.noCloneEvent = false; - }); - - div.cloneNode( true ).click(); - } - - // Support: IE<9 (lack submit/change bubble), Firefox 17+ (lack focusin event) - // Beware of CSP restrictions (https://developer.mozilla.org/en/Security/CSP) - for ( i in { submit: true, change: true, focusin: true }) { - div.setAttribute( eventName = "on" + i, "t" ); - - support[ i + "Bubbles" ] = eventName in window || div.attributes[ eventName ].expando === false; - } - - div.style.backgroundClip = "content-box"; - div.cloneNode( true ).style.backgroundClip = ""; - support.clearCloneStyle = div.style.backgroundClip === "content-box"; - - // Support: IE<9 - // Iteration over object's inherited properties before its own. - for ( i in jQuery( support ) ) { - break; - } - support.ownLast = i !== "0"; - - // Run tests that need a body at doc ready - jQuery(function() { - var container, marginDiv, tds, - divReset = "padding:0;margin:0;border:0;display:block;box-sizing:content-box;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;", - body = document.getElementsByTagName("body")[0]; - - if ( !body ) { - // Return for frameset docs that don't have a body - return; - } - - container = document.createElement("div"); - container.style.cssText = "border:0;width:0;height:0;position:absolute;top:0;left:-9999px;margin-top:1px"; - - body.appendChild( container ).appendChild( div ); - - // Support: IE8 - // Check if table cells still have offsetWidth/Height when they are set - // to display:none and there are still other visible table cells in a - // table row; if so, offsetWidth/Height are not reliable for use when - // determining if an element has been hidden directly using - // display:none (it is still safe to use offsets if a parent element is - // hidden; don safety goggles and see bug #4512 for more information). - div.innerHTML = "
    t
    "; - tds = div.getElementsByTagName("td"); - tds[ 0 ].style.cssText = "padding:0;margin:0;border:0;display:none"; - isSupported = ( tds[ 0 ].offsetHeight === 0 ); - - tds[ 0 ].style.display = ""; - tds[ 1 ].style.display = "none"; - - // Support: IE8 - // Check if empty table cells still have offsetWidth/Height - support.reliableHiddenOffsets = isSupported && ( tds[ 0 ].offsetHeight === 0 ); - - // Check box-sizing and margin behavior. - div.innerHTML = ""; - div.style.cssText = "box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;"; - - // Workaround failing boxSizing test due to offsetWidth returning wrong value - // with some non-1 values of body zoom, ticket #13543 - jQuery.swap( body, body.style.zoom != null ? { zoom: 1 } : {}, function() { - support.boxSizing = div.offsetWidth === 4; - }); - - // Use window.getComputedStyle because jsdom on node.js will break without it. - if ( window.getComputedStyle ) { - support.pixelPosition = ( window.getComputedStyle( div, null ) || {} ).top !== "1%"; - support.boxSizingReliable = ( window.getComputedStyle( div, null ) || { width: "4px" } ).width === "4px"; - - // Check if div with explicit width and no margin-right incorrectly - // gets computed margin-right based on width of container. (#3333) - // Fails in WebKit before Feb 2011 nightlies - // WebKit Bug 13343 - getComputedStyle returns wrong value for margin-right - marginDiv = div.appendChild( document.createElement("div") ); - marginDiv.style.cssText = div.style.cssText = divReset; - marginDiv.style.marginRight = marginDiv.style.width = "0"; - div.style.width = "1px"; - - support.reliableMarginRight = - !parseFloat( ( window.getComputedStyle( marginDiv, null ) || {} ).marginRight ); - } - - if ( typeof div.style.zoom !== core_strundefined ) { - // Support: IE<8 - // Check if natively block-level elements act like inline-block - // elements when setting their display to 'inline' and giving - // them layout - div.innerHTML = ""; - div.style.cssText = divReset + "width:1px;padding:1px;display:inline;zoom:1"; - support.inlineBlockNeedsLayout = ( div.offsetWidth === 3 ); - - // Support: IE6 - // Check if elements with layout shrink-wrap their children - div.style.display = "block"; - div.innerHTML = "
    "; - div.firstChild.style.width = "5px"; - support.shrinkWrapBlocks = ( div.offsetWidth !== 3 ); - - if ( support.inlineBlockNeedsLayout ) { - // Prevent IE 6 from affecting layout for positioned elements #11048 - // Prevent IE from shrinking the body in IE 7 mode #12869 - // Support: IE<8 - body.style.zoom = 1; - } - } - - body.removeChild( container ); - - // Null elements to avoid leaks in IE - container = div = tds = marginDiv = null; - }); - - // Null elements to avoid leaks in IE - all = select = fragment = opt = a = input = null; - - return support; -})({}); - -var rbrace = /(?:\{[\s\S]*\}|\[[\s\S]*\])$/, - rmultiDash = /([A-Z])/g; - -function internalData( elem, name, data, pvt /* Internal Use Only */ ){ - if ( !jQuery.acceptData( elem ) ) { - return; - } - - var ret, thisCache, - internalKey = jQuery.expando, - - // We have to handle DOM nodes and JS objects differently because IE6-7 - // can't GC object references properly across the DOM-JS boundary - isNode = elem.nodeType, - - // Only DOM nodes need the global jQuery cache; JS object data is - // attached directly to the object so GC can occur automatically - cache = isNode ? jQuery.cache : elem, - - // Only defining an ID for JS objects if its cache already exists allows - // the code to shortcut on the same path as a DOM node with no cache - id = isNode ? elem[ internalKey ] : elem[ internalKey ] && internalKey; - - // Avoid doing any more work than we need to when trying to get data on an - // object that has no data at all - if ( (!id || !cache[id] || (!pvt && !cache[id].data)) && data === undefined && typeof name === "string" ) { - return; - } - - if ( !id ) { - // Only DOM nodes need a new unique ID for each element since their data - // ends up in the global cache - if ( isNode ) { - id = elem[ internalKey ] = core_deletedIds.pop() || jQuery.guid++; - } else { - id = internalKey; - } - } - - if ( !cache[ id ] ) { - // Avoid exposing jQuery metadata on plain JS objects when the object - // is serialized using JSON.stringify - cache[ id ] = isNode ? {} : { toJSON: jQuery.noop }; - } - - // An object can be passed to jQuery.data instead of a key/value pair; this gets - // shallow copied over onto the existing cache - if ( typeof name === "object" || typeof name === "function" ) { - if ( pvt ) { - cache[ id ] = jQuery.extend( cache[ id ], name ); - } else { - cache[ id ].data = jQuery.extend( cache[ id ].data, name ); - } - } - - thisCache = cache[ id ]; - - // jQuery data() is stored in a separate object inside the object's internal data - // cache in order to avoid key collisions between internal data and user-defined - // data. - if ( !pvt ) { - if ( !thisCache.data ) { - thisCache.data = {}; - } - - thisCache = thisCache.data; - } - - if ( data !== undefined ) { - thisCache[ jQuery.camelCase( name ) ] = data; - } - - // Check for both converted-to-camel and non-converted data property names - // If a data property was specified - if ( typeof name === "string" ) { - - // First Try to find as-is property data - ret = thisCache[ name ]; - - // Test for null|undefined property data - if ( ret == null ) { - - // Try to find the camelCased property - ret = thisCache[ jQuery.camelCase( name ) ]; - } - } else { - ret = thisCache; - } - - return ret; -} - -function internalRemoveData( elem, name, pvt ) { - if ( !jQuery.acceptData( elem ) ) { - return; - } - - var thisCache, i, - isNode = elem.nodeType, - - // See jQuery.data for more information - cache = isNode ? jQuery.cache : elem, - id = isNode ? elem[ jQuery.expando ] : jQuery.expando; - - // If there is already no cache entry for this object, there is no - // purpose in continuing - if ( !cache[ id ] ) { - return; - } - - if ( name ) { - - thisCache = pvt ? cache[ id ] : cache[ id ].data; - - if ( thisCache ) { - - // Support array or space separated string names for data keys - if ( !jQuery.isArray( name ) ) { - - // try the string as a key before any manipulation - if ( name in thisCache ) { - name = [ name ]; - } else { - - // split the camel cased version by spaces unless a key with the spaces exists - name = jQuery.camelCase( name ); - if ( name in thisCache ) { - name = [ name ]; - } else { - name = name.split(" "); - } - } - } else { - // If "name" is an array of keys... - // When data is initially created, via ("key", "val") signature, - // keys will be converted to camelCase. - // Since there is no way to tell _how_ a key was added, remove - // both plain key and camelCase key. #12786 - // This will only penalize the array argument path. - name = name.concat( jQuery.map( name, jQuery.camelCase ) ); - } - - i = name.length; - while ( i-- ) { - delete thisCache[ name[i] ]; - } - - // If there is no data left in the cache, we want to continue - // and let the cache object itself get destroyed - if ( pvt ? !isEmptyDataObject(thisCache) : !jQuery.isEmptyObject(thisCache) ) { - return; - } - } - } - - // See jQuery.data for more information - if ( !pvt ) { - delete cache[ id ].data; - - // Don't destroy the parent cache unless the internal data object - // had been the only thing left in it - if ( !isEmptyDataObject( cache[ id ] ) ) { - return; - } - } - - // Destroy the cache - if ( isNode ) { - jQuery.cleanData( [ elem ], true ); - - // Use delete when supported for expandos or `cache` is not a window per isWindow (#10080) - /* jshint eqeqeq: false */ - } else if ( jQuery.support.deleteExpando || cache != cache.window ) { - /* jshint eqeqeq: true */ - delete cache[ id ]; - - // When all else fails, null - } else { - cache[ id ] = null; - } -} - -jQuery.extend({ - cache: {}, - - // The following elements throw uncatchable exceptions if you - // attempt to add expando properties to them. - noData: { - "applet": true, - "embed": true, - // Ban all objects except for Flash (which handle expandos) - "object": "clsid:D27CDB6E-AE6D-11cf-96B8-444553540000" - }, - - hasData: function( elem ) { - elem = elem.nodeType ? jQuery.cache[ elem[jQuery.expando] ] : elem[ jQuery.expando ]; - return !!elem && !isEmptyDataObject( elem ); - }, - - data: function( elem, name, data ) { - return internalData( elem, name, data ); - }, - - removeData: function( elem, name ) { - return internalRemoveData( elem, name ); - }, - - // For internal use only. - _data: function( elem, name, data ) { - return internalData( elem, name, data, true ); - }, - - _removeData: function( elem, name ) { - return internalRemoveData( elem, name, true ); - }, - - // A method for determining if a DOM node can handle the data expando - acceptData: function( elem ) { - // Do not set data on non-element because it will not be cleared (#8335). - if ( elem.nodeType && elem.nodeType !== 1 && elem.nodeType !== 9 ) { - return false; - } - - var noData = elem.nodeName && jQuery.noData[ elem.nodeName.toLowerCase() ]; - - // nodes accept data unless otherwise specified; rejection can be conditional - return !noData || noData !== true && elem.getAttribute("classid") === noData; - } -}); - -jQuery.fn.extend({ - data: function( key, value ) { - var attrs, name, - data = null, - i = 0, - elem = this[0]; - - // Special expections of .data basically thwart jQuery.access, - // so implement the relevant behavior ourselves - - // Gets all values - if ( key === undefined ) { - if ( this.length ) { - data = jQuery.data( elem ); - - if ( elem.nodeType === 1 && !jQuery._data( elem, "parsedAttrs" ) ) { - attrs = elem.attributes; - for ( ; i < attrs.length; i++ ) { - name = attrs[i].name; - - if ( name.indexOf("data-") === 0 ) { - name = jQuery.camelCase( name.slice(5) ); - - dataAttr( elem, name, data[ name ] ); - } - } - jQuery._data( elem, "parsedAttrs", true ); - } - } - - return data; - } - - // Sets multiple values - if ( typeof key === "object" ) { - return this.each(function() { - jQuery.data( this, key ); - }); - } - - return arguments.length > 1 ? - - // Sets one value - this.each(function() { - jQuery.data( this, key, value ); - }) : - - // Gets one value - // Try to fetch any internally stored data first - elem ? dataAttr( elem, key, jQuery.data( elem, key ) ) : null; - }, - - removeData: function( key ) { - return this.each(function() { - jQuery.removeData( this, key ); - }); - } -}); - -function dataAttr( elem, key, data ) { - // If nothing was found internally, try to fetch any - // data from the HTML5 data-* attribute - if ( data === undefined && elem.nodeType === 1 ) { - - var name = "data-" + key.replace( rmultiDash, "-$1" ).toLowerCase(); - - data = elem.getAttribute( name ); - - if ( typeof data === "string" ) { - try { - data = data === "true" ? true : - data === "false" ? false : - data === "null" ? null : - // Only convert to a number if it doesn't change the string - +data + "" === data ? +data : - rbrace.test( data ) ? jQuery.parseJSON( data ) : - data; - } catch( e ) {} - - // Make sure we set the data so it isn't changed later - jQuery.data( elem, key, data ); - - } else { - data = undefined; - } - } - - return data; -} - -// checks a cache object for emptiness -function isEmptyDataObject( obj ) { - var name; - for ( name in obj ) { - - // if the public data object is empty, the private is still empty - if ( name === "data" && jQuery.isEmptyObject( obj[name] ) ) { - continue; - } - if ( name !== "toJSON" ) { - return false; - } - } - - return true; -} -jQuery.extend({ - queue: function( elem, type, data ) { - var queue; - - if ( elem ) { - type = ( type || "fx" ) + "queue"; - queue = jQuery._data( elem, type ); - - // Speed up dequeue by getting out quickly if this is just a lookup - if ( data ) { - if ( !queue || jQuery.isArray(data) ) { - queue = jQuery._data( elem, type, jQuery.makeArray(data) ); - } else { - queue.push( data ); - } - } - return queue || []; - } - }, - - dequeue: function( elem, type ) { - type = type || "fx"; - - var queue = jQuery.queue( elem, type ), - startLength = queue.length, - fn = queue.shift(), - hooks = jQuery._queueHooks( elem, type ), - next = function() { - jQuery.dequeue( elem, type ); - }; - - // If the fx queue is dequeued, always remove the progress sentinel - if ( fn === "inprogress" ) { - fn = queue.shift(); - startLength--; - } - - if ( fn ) { - - // Add a progress sentinel to prevent the fx queue from being - // automatically dequeued - if ( type === "fx" ) { - queue.unshift( "inprogress" ); - } - - // clear up the last queue stop function - delete hooks.stop; - fn.call( elem, next, hooks ); - } - - if ( !startLength && hooks ) { - hooks.empty.fire(); - } - }, - - // not intended for public consumption - generates a queueHooks object, or returns the current one - _queueHooks: function( elem, type ) { - var key = type + "queueHooks"; - return jQuery._data( elem, key ) || jQuery._data( elem, key, { - empty: jQuery.Callbacks("once memory").add(function() { - jQuery._removeData( elem, type + "queue" ); - jQuery._removeData( elem, key ); - }) - }); - } -}); - -jQuery.fn.extend({ - queue: function( type, data ) { - var setter = 2; - - if ( typeof type !== "string" ) { - data = type; - type = "fx"; - setter--; - } - - if ( arguments.length < setter ) { - return jQuery.queue( this[0], type ); - } - - return data === undefined ? - this : - this.each(function() { - var queue = jQuery.queue( this, type, data ); - - // ensure a hooks for this queue - jQuery._queueHooks( this, type ); - - if ( type === "fx" && queue[0] !== "inprogress" ) { - jQuery.dequeue( this, type ); - } - }); - }, - dequeue: function( type ) { - return this.each(function() { - jQuery.dequeue( this, type ); - }); - }, - // Based off of the plugin by Clint Helfers, with permission. - // http://blindsignals.com/index.php/2009/07/jquery-delay/ - delay: function( time, type ) { - time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time; - type = type || "fx"; - - return this.queue( type, function( next, hooks ) { - var timeout = setTimeout( next, time ); - hooks.stop = function() { - clearTimeout( timeout ); - }; - }); - }, - clearQueue: function( type ) { - return this.queue( type || "fx", [] ); - }, - // Get a promise resolved when queues of a certain type - // are emptied (fx is the type by default) - promise: function( type, obj ) { - var tmp, - count = 1, - defer = jQuery.Deferred(), - elements = this, - i = this.length, - resolve = function() { - if ( !( --count ) ) { - defer.resolveWith( elements, [ elements ] ); - } - }; - - if ( typeof type !== "string" ) { - obj = type; - type = undefined; - } - type = type || "fx"; - - while( i-- ) { - tmp = jQuery._data( elements[ i ], type + "queueHooks" ); - if ( tmp && tmp.empty ) { - count++; - tmp.empty.add( resolve ); - } - } - resolve(); - return defer.promise( obj ); - } -}); -var nodeHook, boolHook, - rclass = /[\t\r\n\f]/g, - rreturn = /\r/g, - rfocusable = /^(?:input|select|textarea|button|object)$/i, - rclickable = /^(?:a|area)$/i, - ruseDefault = /^(?:checked|selected)$/i, - getSetAttribute = jQuery.support.getSetAttribute, - getSetInput = jQuery.support.input; - -jQuery.fn.extend({ - attr: function( name, value ) { - return jQuery.access( this, jQuery.attr, name, value, arguments.length > 1 ); - }, - - removeAttr: function( name ) { - return this.each(function() { - jQuery.removeAttr( this, name ); - }); - }, - - prop: function( name, value ) { - return jQuery.access( this, jQuery.prop, name, value, arguments.length > 1 ); - }, - - removeProp: function( name ) { - name = jQuery.propFix[ name ] || name; - return this.each(function() { - // try/catch handles cases where IE balks (such as removing a property on window) - try { - this[ name ] = undefined; - delete this[ name ]; - } catch( e ) {} - }); - }, - - addClass: function( value ) { - var classes, elem, cur, clazz, j, - i = 0, - len = this.length, - proceed = typeof value === "string" && value; - - if ( jQuery.isFunction( value ) ) { - return this.each(function( j ) { - jQuery( this ).addClass( value.call( this, j, this.className ) ); - }); - } - - if ( proceed ) { - // The disjunction here is for better compressibility (see removeClass) - classes = ( value || "" ).match( core_rnotwhite ) || []; - - for ( ; i < len; i++ ) { - elem = this[ i ]; - cur = elem.nodeType === 1 && ( elem.className ? - ( " " + elem.className + " " ).replace( rclass, " " ) : - " " - ); - - if ( cur ) { - j = 0; - while ( (clazz = classes[j++]) ) { - if ( cur.indexOf( " " + clazz + " " ) < 0 ) { - cur += clazz + " "; - } - } - elem.className = jQuery.trim( cur ); - - } - } - } - - return this; - }, - - removeClass: function( value ) { - var classes, elem, cur, clazz, j, - i = 0, - len = this.length, - proceed = arguments.length === 0 || typeof value === "string" && value; - - if ( jQuery.isFunction( value ) ) { - return this.each(function( j ) { - jQuery( this ).removeClass( value.call( this, j, this.className ) ); - }); - } - if ( proceed ) { - classes = ( value || "" ).match( core_rnotwhite ) || []; - - for ( ; i < len; i++ ) { - elem = this[ i ]; - // This expression is here for better compressibility (see addClass) - cur = elem.nodeType === 1 && ( elem.className ? - ( " " + elem.className + " " ).replace( rclass, " " ) : - "" - ); - - if ( cur ) { - j = 0; - while ( (clazz = classes[j++]) ) { - // Remove *all* instances - while ( cur.indexOf( " " + clazz + " " ) >= 0 ) { - cur = cur.replace( " " + clazz + " ", " " ); - } - } - elem.className = value ? jQuery.trim( cur ) : ""; - } - } - } - - return this; - }, - - toggleClass: function( value, stateVal ) { - var type = typeof value; - - if ( typeof stateVal === "boolean" && type === "string" ) { - return stateVal ? this.addClass( value ) : this.removeClass( value ); - } - - if ( jQuery.isFunction( value ) ) { - return this.each(function( i ) { - jQuery( this ).toggleClass( value.call(this, i, this.className, stateVal), stateVal ); - }); - } - - return this.each(function() { - if ( type === "string" ) { - // toggle individual class names - var className, - i = 0, - self = jQuery( this ), - classNames = value.match( core_rnotwhite ) || []; - - while ( (className = classNames[ i++ ]) ) { - // check each className given, space separated list - if ( self.hasClass( className ) ) { - self.removeClass( className ); - } else { - self.addClass( className ); - } - } - - // Toggle whole class name - } else if ( type === core_strundefined || type === "boolean" ) { - if ( this.className ) { - // store className if set - jQuery._data( this, "__className__", this.className ); - } - - // If the element has a class name or if we're passed "false", - // then remove the whole classname (if there was one, the above saved it). - // Otherwise bring back whatever was previously saved (if anything), - // falling back to the empty string if nothing was stored. - this.className = this.className || value === false ? "" : jQuery._data( this, "__className__" ) || ""; - } - }); - }, - - hasClass: function( selector ) { - var className = " " + selector + " ", - i = 0, - l = this.length; - for ( ; i < l; i++ ) { - if ( this[i].nodeType === 1 && (" " + this[i].className + " ").replace(rclass, " ").indexOf( className ) >= 0 ) { - return true; - } - } - - return false; - }, - - val: function( value ) { - var ret, hooks, isFunction, - elem = this[0]; - - if ( !arguments.length ) { - if ( elem ) { - hooks = jQuery.valHooks[ elem.type ] || jQuery.valHooks[ elem.nodeName.toLowerCase() ]; - - if ( hooks && "get" in hooks && (ret = hooks.get( elem, "value" )) !== undefined ) { - return ret; - } - - ret = elem.value; - - return typeof ret === "string" ? - // handle most common string cases - ret.replace(rreturn, "") : - // handle cases where value is null/undef or number - ret == null ? "" : ret; - } - - return; - } - - isFunction = jQuery.isFunction( value ); - - return this.each(function( i ) { - var val; - - if ( this.nodeType !== 1 ) { - return; - } - - if ( isFunction ) { - val = value.call( this, i, jQuery( this ).val() ); - } else { - val = value; - } - - // Treat null/undefined as ""; convert numbers to string - if ( val == null ) { - val = ""; - } else if ( typeof val === "number" ) { - val += ""; - } else if ( jQuery.isArray( val ) ) { - val = jQuery.map(val, function ( value ) { - return value == null ? "" : value + ""; - }); - } - - hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ]; - - // If set returns undefined, fall back to normal setting - if ( !hooks || !("set" in hooks) || hooks.set( this, val, "value" ) === undefined ) { - this.value = val; - } - }); - } -}); - -jQuery.extend({ - valHooks: { - option: { - get: function( elem ) { - // Use proper attribute retrieval(#6932, #12072) - var val = jQuery.find.attr( elem, "value" ); - return val != null ? - val : - elem.text; - } - }, - select: { - get: function( elem ) { - var value, option, - options = elem.options, - index = elem.selectedIndex, - one = elem.type === "select-one" || index < 0, - values = one ? null : [], - max = one ? index + 1 : options.length, - i = index < 0 ? - max : - one ? index : 0; - - // Loop through all the selected options - for ( ; i < max; i++ ) { - option = options[ i ]; - - // oldIE doesn't update selected after form reset (#2551) - if ( ( option.selected || i === index ) && - // Don't return options that are disabled or in a disabled optgroup - ( jQuery.support.optDisabled ? !option.disabled : option.getAttribute("disabled") === null ) && - ( !option.parentNode.disabled || !jQuery.nodeName( option.parentNode, "optgroup" ) ) ) { - - // Get the specific value for the option - value = jQuery( option ).val(); - - // We don't need an array for one selects - if ( one ) { - return value; - } - - // Multi-Selects return an array - values.push( value ); - } - } - - return values; - }, - - set: function( elem, value ) { - var optionSet, option, - options = elem.options, - values = jQuery.makeArray( value ), - i = options.length; - - while ( i-- ) { - option = options[ i ]; - if ( (option.selected = jQuery.inArray( jQuery(option).val(), values ) >= 0) ) { - optionSet = true; - } - } - - // force browsers to behave consistently when non-matching value is set - if ( !optionSet ) { - elem.selectedIndex = -1; - } - return values; - } - } - }, - - attr: function( elem, name, value ) { - var hooks, ret, - nType = elem.nodeType; - - // don't get/set attributes on text, comment and attribute nodes - if ( !elem || nType === 3 || nType === 8 || nType === 2 ) { - return; - } - - // Fallback to prop when attributes are not supported - if ( typeof elem.getAttribute === core_strundefined ) { - return jQuery.prop( elem, name, value ); - } - - // All attributes are lowercase - // Grab necessary hook if one is defined - if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { - name = name.toLowerCase(); - hooks = jQuery.attrHooks[ name ] || - ( jQuery.expr.match.bool.test( name ) ? boolHook : nodeHook ); - } - - if ( value !== undefined ) { - - if ( value === null ) { - jQuery.removeAttr( elem, name ); - - } else if ( hooks && "set" in hooks && (ret = hooks.set( elem, value, name )) !== undefined ) { - return ret; - - } else { - elem.setAttribute( name, value + "" ); - return value; - } - - } else if ( hooks && "get" in hooks && (ret = hooks.get( elem, name )) !== null ) { - return ret; - - } else { - ret = jQuery.find.attr( elem, name ); - - // Non-existent attributes return null, we normalize to undefined - return ret == null ? - undefined : - ret; - } - }, - - removeAttr: function( elem, value ) { - var name, propName, - i = 0, - attrNames = value && value.match( core_rnotwhite ); - - if ( attrNames && elem.nodeType === 1 ) { - while ( (name = attrNames[i++]) ) { - propName = jQuery.propFix[ name ] || name; - - // Boolean attributes get special treatment (#10870) - if ( jQuery.expr.match.bool.test( name ) ) { - // Set corresponding property to false - if ( getSetInput && getSetAttribute || !ruseDefault.test( name ) ) { - elem[ propName ] = false; - // Support: IE<9 - // Also clear defaultChecked/defaultSelected (if appropriate) - } else { - elem[ jQuery.camelCase( "default-" + name ) ] = - elem[ propName ] = false; - } - - // See #9699 for explanation of this approach (setting first, then removal) - } else { - jQuery.attr( elem, name, "" ); - } - - elem.removeAttribute( getSetAttribute ? name : propName ); - } - } - }, - - attrHooks: { - type: { - set: function( elem, value ) { - if ( !jQuery.support.radioValue && value === "radio" && jQuery.nodeName(elem, "input") ) { - // Setting the type on a radio button after the value resets the value in IE6-9 - // Reset value to default in case type is set after value during creation - var val = elem.value; - elem.setAttribute( "type", value ); - if ( val ) { - elem.value = val; - } - return value; - } - } - } - }, - - propFix: { - "for": "htmlFor", - "class": "className" - }, - - prop: function( elem, name, value ) { - var ret, hooks, notxml, - nType = elem.nodeType; - - // don't get/set properties on text, comment and attribute nodes - if ( !elem || nType === 3 || nType === 8 || nType === 2 ) { - return; - } - - notxml = nType !== 1 || !jQuery.isXMLDoc( elem ); - - if ( notxml ) { - // Fix name and attach hooks - name = jQuery.propFix[ name ] || name; - hooks = jQuery.propHooks[ name ]; - } - - if ( value !== undefined ) { - return hooks && "set" in hooks && (ret = hooks.set( elem, value, name )) !== undefined ? - ret : - ( elem[ name ] = value ); - - } else { - return hooks && "get" in hooks && (ret = hooks.get( elem, name )) !== null ? - ret : - elem[ name ]; - } - }, - - propHooks: { - tabIndex: { - get: function( elem ) { - // elem.tabIndex doesn't always return the correct value when it hasn't been explicitly set - // http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/ - // Use proper attribute retrieval(#12072) - var tabindex = jQuery.find.attr( elem, "tabindex" ); - - return tabindex ? - parseInt( tabindex, 10 ) : - rfocusable.test( elem.nodeName ) || rclickable.test( elem.nodeName ) && elem.href ? - 0 : - -1; - } - } - } -}); - -// Hooks for boolean attributes -boolHook = { - set: function( elem, value, name ) { - if ( value === false ) { - // Remove boolean attributes when set to false - jQuery.removeAttr( elem, name ); - } else if ( getSetInput && getSetAttribute || !ruseDefault.test( name ) ) { - // IE<8 needs the *property* name - elem.setAttribute( !getSetAttribute && jQuery.propFix[ name ] || name, name ); - - // Use defaultChecked and defaultSelected for oldIE - } else { - elem[ jQuery.camelCase( "default-" + name ) ] = elem[ name ] = true; - } - - return name; - } -}; -jQuery.each( jQuery.expr.match.bool.source.match( /\w+/g ), function( i, name ) { - var getter = jQuery.expr.attrHandle[ name ] || jQuery.find.attr; - - jQuery.expr.attrHandle[ name ] = getSetInput && getSetAttribute || !ruseDefault.test( name ) ? - function( elem, name, isXML ) { - var fn = jQuery.expr.attrHandle[ name ], - ret = isXML ? - undefined : - /* jshint eqeqeq: false */ - (jQuery.expr.attrHandle[ name ] = undefined) != - getter( elem, name, isXML ) ? - - name.toLowerCase() : - null; - jQuery.expr.attrHandle[ name ] = fn; - return ret; - } : - function( elem, name, isXML ) { - return isXML ? - undefined : - elem[ jQuery.camelCase( "default-" + name ) ] ? - name.toLowerCase() : - null; - }; -}); - -// fix oldIE attroperties -if ( !getSetInput || !getSetAttribute ) { - jQuery.attrHooks.value = { - set: function( elem, value, name ) { - if ( jQuery.nodeName( elem, "input" ) ) { - // Does not return so that setAttribute is also used - elem.defaultValue = value; - } else { - // Use nodeHook if defined (#1954); otherwise setAttribute is fine - return nodeHook && nodeHook.set( elem, value, name ); - } - } - }; -} - -// IE6/7 do not support getting/setting some attributes with get/setAttribute -if ( !getSetAttribute ) { - - // Use this for any attribute in IE6/7 - // This fixes almost every IE6/7 issue - nodeHook = { - set: function( elem, value, name ) { - // Set the existing or create a new attribute node - var ret = elem.getAttributeNode( name ); - if ( !ret ) { - elem.setAttributeNode( - (ret = elem.ownerDocument.createAttribute( name )) - ); - } - - ret.value = value += ""; - - // Break association with cloned elements by also using setAttribute (#9646) - return name === "value" || value === elem.getAttribute( name ) ? - value : - undefined; - } - }; - jQuery.expr.attrHandle.id = jQuery.expr.attrHandle.name = jQuery.expr.attrHandle.coords = - // Some attributes are constructed with empty-string values when not defined - function( elem, name, isXML ) { - var ret; - return isXML ? - undefined : - (ret = elem.getAttributeNode( name )) && ret.value !== "" ? - ret.value : - null; - }; - jQuery.valHooks.button = { - get: function( elem, name ) { - var ret = elem.getAttributeNode( name ); - return ret && ret.specified ? - ret.value : - undefined; - }, - set: nodeHook.set - }; - - // Set contenteditable to false on removals(#10429) - // Setting to empty string throws an error as an invalid value - jQuery.attrHooks.contenteditable = { - set: function( elem, value, name ) { - nodeHook.set( elem, value === "" ? false : value, name ); - } - }; - - // Set width and height to auto instead of 0 on empty string( Bug #8150 ) - // This is for removals - jQuery.each([ "width", "height" ], function( i, name ) { - jQuery.attrHooks[ name ] = { - set: function( elem, value ) { - if ( value === "" ) { - elem.setAttribute( name, "auto" ); - return value; - } - } - }; - }); -} - - -// Some attributes require a special call on IE -// http://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx -if ( !jQuery.support.hrefNormalized ) { - // href/src property should get the full normalized URL (#10299/#12915) - jQuery.each([ "href", "src" ], function( i, name ) { - jQuery.propHooks[ name ] = { - get: function( elem ) { - return elem.getAttribute( name, 4 ); - } - }; - }); -} - -if ( !jQuery.support.style ) { - jQuery.attrHooks.style = { - get: function( elem ) { - // Return undefined in the case of empty string - // Note: IE uppercases css property names, but if we were to .toLowerCase() - // .cssText, that would destroy case senstitivity in URL's, like in "background" - return elem.style.cssText || undefined; - }, - set: function( elem, value ) { - return ( elem.style.cssText = value + "" ); - } - }; -} - -// Safari mis-reports the default selected property of an option -// Accessing the parent's selectedIndex property fixes it -if ( !jQuery.support.optSelected ) { - jQuery.propHooks.selected = { - get: function( elem ) { - var parent = elem.parentNode; - - if ( parent ) { - parent.selectedIndex; - - // Make sure that it also works with optgroups, see #5701 - if ( parent.parentNode ) { - parent.parentNode.selectedIndex; - } - } - return null; - } - }; -} - -jQuery.each([ - "tabIndex", - "readOnly", - "maxLength", - "cellSpacing", - "cellPadding", - "rowSpan", - "colSpan", - "useMap", - "frameBorder", - "contentEditable" -], function() { - jQuery.propFix[ this.toLowerCase() ] = this; -}); - -// IE6/7 call enctype encoding -if ( !jQuery.support.enctype ) { - jQuery.propFix.enctype = "encoding"; -} - -// Radios and checkboxes getter/setter -jQuery.each([ "radio", "checkbox" ], function() { - jQuery.valHooks[ this ] = { - set: function( elem, value ) { - if ( jQuery.isArray( value ) ) { - return ( elem.checked = jQuery.inArray( jQuery(elem).val(), value ) >= 0 ); - } - } - }; - if ( !jQuery.support.checkOn ) { - jQuery.valHooks[ this ].get = function( elem ) { - // Support: Webkit - // "" is returned instead of "on" if a value isn't specified - return elem.getAttribute("value") === null ? "on" : elem.value; - }; - } -}); -var rformElems = /^(?:input|select|textarea)$/i, - rkeyEvent = /^key/, - rmouseEvent = /^(?:mouse|contextmenu)|click/, - rfocusMorph = /^(?:focusinfocus|focusoutblur)$/, - rtypenamespace = /^([^.]*)(?:\.(.+)|)$/; - -function returnTrue() { - return true; -} - -function returnFalse() { - return false; -} - -function safeActiveElement() { - try { - return document.activeElement; - } catch ( err ) { } -} - -/* - * Helper functions for managing events -- not part of the public interface. - * Props to Dean Edwards' addEvent library for many of the ideas. - */ -jQuery.event = { - - global: {}, - - add: function( elem, types, handler, data, selector ) { - var tmp, events, t, handleObjIn, - special, eventHandle, handleObj, - handlers, type, namespaces, origType, - elemData = jQuery._data( elem ); - - // Don't attach events to noData or text/comment nodes (but allow plain objects) - if ( !elemData ) { - return; - } - - // Caller can pass in an object of custom data in lieu of the handler - if ( handler.handler ) { - handleObjIn = handler; - handler = handleObjIn.handler; - selector = handleObjIn.selector; - } - - // Make sure that the handler has a unique ID, used to find/remove it later - if ( !handler.guid ) { - handler.guid = jQuery.guid++; - } - - // Init the element's event structure and main handler, if this is the first - if ( !(events = elemData.events) ) { - events = elemData.events = {}; - } - if ( !(eventHandle = elemData.handle) ) { - eventHandle = elemData.handle = function( e ) { - // Discard the second event of a jQuery.event.trigger() and - // when an event is called after a page has unloaded - return typeof jQuery !== core_strundefined && (!e || jQuery.event.triggered !== e.type) ? - jQuery.event.dispatch.apply( eventHandle.elem, arguments ) : - undefined; - }; - // Add elem as a property of the handle fn to prevent a memory leak with IE non-native events - eventHandle.elem = elem; - } - - // Handle multiple events separated by a space - types = ( types || "" ).match( core_rnotwhite ) || [""]; - t = types.length; - while ( t-- ) { - tmp = rtypenamespace.exec( types[t] ) || []; - type = origType = tmp[1]; - namespaces = ( tmp[2] || "" ).split( "." ).sort(); - - // There *must* be a type, no attaching namespace-only handlers - if ( !type ) { - continue; - } - - // If event changes its type, use the special event handlers for the changed type - special = jQuery.event.special[ type ] || {}; - - // If selector defined, determine special event api type, otherwise given type - type = ( selector ? special.delegateType : special.bindType ) || type; - - // Update special based on newly reset type - special = jQuery.event.special[ type ] || {}; - - // handleObj is passed to all event handlers - handleObj = jQuery.extend({ - type: type, - origType: origType, - data: data, - handler: handler, - guid: handler.guid, - selector: selector, - needsContext: selector && jQuery.expr.match.needsContext.test( selector ), - namespace: namespaces.join(".") - }, handleObjIn ); - - // Init the event handler queue if we're the first - if ( !(handlers = events[ type ]) ) { - handlers = events[ type ] = []; - handlers.delegateCount = 0; - - // Only use addEventListener/attachEvent if the special events handler returns false - if ( !special.setup || special.setup.call( elem, data, namespaces, eventHandle ) === false ) { - // Bind the global event handler to the element - if ( elem.addEventListener ) { - elem.addEventListener( type, eventHandle, false ); - - } else if ( elem.attachEvent ) { - elem.attachEvent( "on" + type, eventHandle ); - } - } - } - - if ( special.add ) { - special.add.call( elem, handleObj ); - - if ( !handleObj.handler.guid ) { - handleObj.handler.guid = handler.guid; - } - } - - // Add to the element's handler list, delegates in front - if ( selector ) { - handlers.splice( handlers.delegateCount++, 0, handleObj ); - } else { - handlers.push( handleObj ); - } - - // Keep track of which events have ever been used, for event optimization - jQuery.event.global[ type ] = true; - } - - // Nullify elem to prevent memory leaks in IE - elem = null; - }, - - // Detach an event or set of events from an element - remove: function( elem, types, handler, selector, mappedTypes ) { - var j, handleObj, tmp, - origCount, t, events, - special, handlers, type, - namespaces, origType, - elemData = jQuery.hasData( elem ) && jQuery._data( elem ); - - if ( !elemData || !(events = elemData.events) ) { - return; - } - - // Once for each type.namespace in types; type may be omitted - types = ( types || "" ).match( core_rnotwhite ) || [""]; - t = types.length; - while ( t-- ) { - tmp = rtypenamespace.exec( types[t] ) || []; - type = origType = tmp[1]; - namespaces = ( tmp[2] || "" ).split( "." ).sort(); - - // Unbind all events (on this namespace, if provided) for the element - if ( !type ) { - for ( type in events ) { - jQuery.event.remove( elem, type + types[ t ], handler, selector, true ); - } - continue; - } - - special = jQuery.event.special[ type ] || {}; - type = ( selector ? special.delegateType : special.bindType ) || type; - handlers = events[ type ] || []; - tmp = tmp[2] && new RegExp( "(^|\\.)" + namespaces.join("\\.(?:.*\\.|)") + "(\\.|$)" ); - - // Remove matching events - origCount = j = handlers.length; - while ( j-- ) { - handleObj = handlers[ j ]; - - if ( ( mappedTypes || origType === handleObj.origType ) && - ( !handler || handler.guid === handleObj.guid ) && - ( !tmp || tmp.test( handleObj.namespace ) ) && - ( !selector || selector === handleObj.selector || selector === "**" && handleObj.selector ) ) { - handlers.splice( j, 1 ); - - if ( handleObj.selector ) { - handlers.delegateCount--; - } - if ( special.remove ) { - special.remove.call( elem, handleObj ); - } - } - } - - // Remove generic event handler if we removed something and no more handlers exist - // (avoids potential for endless recursion during removal of special event handlers) - if ( origCount && !handlers.length ) { - if ( !special.teardown || special.teardown.call( elem, namespaces, elemData.handle ) === false ) { - jQuery.removeEvent( elem, type, elemData.handle ); - } - - delete events[ type ]; - } - } - - // Remove the expando if it's no longer used - if ( jQuery.isEmptyObject( events ) ) { - delete elemData.handle; - - // removeData also checks for emptiness and clears the expando if empty - // so use it instead of delete - jQuery._removeData( elem, "events" ); - } - }, - - trigger: function( event, data, elem, onlyHandlers ) { - var handle, ontype, cur, - bubbleType, special, tmp, i, - eventPath = [ elem || document ], - type = core_hasOwn.call( event, "type" ) ? event.type : event, - namespaces = core_hasOwn.call( event, "namespace" ) ? event.namespace.split(".") : []; - - cur = tmp = elem = elem || document; - - // Don't do events on text and comment nodes - if ( elem.nodeType === 3 || elem.nodeType === 8 ) { - return; - } - - // focus/blur morphs to focusin/out; ensure we're not firing them right now - if ( rfocusMorph.test( type + jQuery.event.triggered ) ) { - return; - } - - if ( type.indexOf(".") >= 0 ) { - // Namespaced trigger; create a regexp to match event type in handle() - namespaces = type.split("."); - type = namespaces.shift(); - namespaces.sort(); - } - ontype = type.indexOf(":") < 0 && "on" + type; - - // Caller can pass in a jQuery.Event object, Object, or just an event type string - event = event[ jQuery.expando ] ? - event : - new jQuery.Event( type, typeof event === "object" && event ); - - // Trigger bitmask: & 1 for native handlers; & 2 for jQuery (always true) - event.isTrigger = onlyHandlers ? 2 : 3; - event.namespace = namespaces.join("."); - event.namespace_re = event.namespace ? - new RegExp( "(^|\\.)" + namespaces.join("\\.(?:.*\\.|)") + "(\\.|$)" ) : - null; - - // Clean up the event in case it is being reused - event.result = undefined; - if ( !event.target ) { - event.target = elem; - } - - // Clone any incoming data and prepend the event, creating the handler arg list - data = data == null ? - [ event ] : - jQuery.makeArray( data, [ event ] ); - - // Allow special events to draw outside the lines - special = jQuery.event.special[ type ] || {}; - if ( !onlyHandlers && special.trigger && special.trigger.apply( elem, data ) === false ) { - return; - } - - // Determine event propagation path in advance, per W3C events spec (#9951) - // Bubble up to document, then to window; watch for a global ownerDocument var (#9724) - if ( !onlyHandlers && !special.noBubble && !jQuery.isWindow( elem ) ) { - - bubbleType = special.delegateType || type; - if ( !rfocusMorph.test( bubbleType + type ) ) { - cur = cur.parentNode; - } - for ( ; cur; cur = cur.parentNode ) { - eventPath.push( cur ); - tmp = cur; - } - - // Only add window if we got to document (e.g., not plain obj or detached DOM) - if ( tmp === (elem.ownerDocument || document) ) { - eventPath.push( tmp.defaultView || tmp.parentWindow || window ); - } - } - - // Fire handlers on the event path - i = 0; - while ( (cur = eventPath[i++]) && !event.isPropagationStopped() ) { - - event.type = i > 1 ? - bubbleType : - special.bindType || type; - - // jQuery handler - handle = ( jQuery._data( cur, "events" ) || {} )[ event.type ] && jQuery._data( cur, "handle" ); - if ( handle ) { - handle.apply( cur, data ); - } - - // Native handler - handle = ontype && cur[ ontype ]; - if ( handle && jQuery.acceptData( cur ) && handle.apply && handle.apply( cur, data ) === false ) { - event.preventDefault(); - } - } - event.type = type; - - // If nobody prevented the default action, do it now - if ( !onlyHandlers && !event.isDefaultPrevented() ) { - - if ( (!special._default || special._default.apply( eventPath.pop(), data ) === false) && - jQuery.acceptData( elem ) ) { - - // Call a native DOM method on the target with the same name name as the event. - // Can't use an .isFunction() check here because IE6/7 fails that test. - // Don't do default actions on window, that's where global variables be (#6170) - if ( ontype && elem[ type ] && !jQuery.isWindow( elem ) ) { - - // Don't re-trigger an onFOO event when we call its FOO() method - tmp = elem[ ontype ]; - - if ( tmp ) { - elem[ ontype ] = null; - } - - // Prevent re-triggering of the same event, since we already bubbled it above - jQuery.event.triggered = type; - try { - elem[ type ](); - } catch ( e ) { - // IE<9 dies on focus/blur to hidden element (#1486,#12518) - // only reproducible on winXP IE8 native, not IE9 in IE8 mode - } - jQuery.event.triggered = undefined; - - if ( tmp ) { - elem[ ontype ] = tmp; - } - } - } - } - - return event.result; - }, - - dispatch: function( event ) { - - // Make a writable jQuery.Event from the native event object - event = jQuery.event.fix( event ); - - var i, ret, handleObj, matched, j, - handlerQueue = [], - args = core_slice.call( arguments ), - handlers = ( jQuery._data( this, "events" ) || {} )[ event.type ] || [], - special = jQuery.event.special[ event.type ] || {}; - - // Use the fix-ed jQuery.Event rather than the (read-only) native event - args[0] = event; - event.delegateTarget = this; - - // Call the preDispatch hook for the mapped type, and let it bail if desired - if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) { - return; - } - - // Determine handlers - handlerQueue = jQuery.event.handlers.call( this, event, handlers ); - - // Run delegates first; they may want to stop propagation beneath us - i = 0; - while ( (matched = handlerQueue[ i++ ]) && !event.isPropagationStopped() ) { - event.currentTarget = matched.elem; - - j = 0; - while ( (handleObj = matched.handlers[ j++ ]) && !event.isImmediatePropagationStopped() ) { - - // Triggered event must either 1) have no namespace, or - // 2) have namespace(s) a subset or equal to those in the bound event (both can have no namespace). - if ( !event.namespace_re || event.namespace_re.test( handleObj.namespace ) ) { - - event.handleObj = handleObj; - event.data = handleObj.data; - - ret = ( (jQuery.event.special[ handleObj.origType ] || {}).handle || handleObj.handler ) - .apply( matched.elem, args ); - - if ( ret !== undefined ) { - if ( (event.result = ret) === false ) { - event.preventDefault(); - event.stopPropagation(); - } - } - } - } - } - - // Call the postDispatch hook for the mapped type - if ( special.postDispatch ) { - special.postDispatch.call( this, event ); - } - - return event.result; - }, - - handlers: function( event, handlers ) { - var sel, handleObj, matches, i, - handlerQueue = [], - delegateCount = handlers.delegateCount, - cur = event.target; - - // Find delegate handlers - // Black-hole SVG instance trees (#13180) - // Avoid non-left-click bubbling in Firefox (#3861) - if ( delegateCount && cur.nodeType && (!event.button || event.type !== "click") ) { - - /* jshint eqeqeq: false */ - for ( ; cur != this; cur = cur.parentNode || this ) { - /* jshint eqeqeq: true */ - - // Don't check non-elements (#13208) - // Don't process clicks on disabled elements (#6911, #8165, #11382, #11764) - if ( cur.nodeType === 1 && (cur.disabled !== true || event.type !== "click") ) { - matches = []; - for ( i = 0; i < delegateCount; i++ ) { - handleObj = handlers[ i ]; - - // Don't conflict with Object.prototype properties (#13203) - sel = handleObj.selector + " "; - - if ( matches[ sel ] === undefined ) { - matches[ sel ] = handleObj.needsContext ? - jQuery( sel, this ).index( cur ) >= 0 : - jQuery.find( sel, this, null, [ cur ] ).length; - } - if ( matches[ sel ] ) { - matches.push( handleObj ); - } - } - if ( matches.length ) { - handlerQueue.push({ elem: cur, handlers: matches }); - } - } - } - } - - // Add the remaining (directly-bound) handlers - if ( delegateCount < handlers.length ) { - handlerQueue.push({ elem: this, handlers: handlers.slice( delegateCount ) }); - } - - return handlerQueue; - }, - - fix: function( event ) { - if ( event[ jQuery.expando ] ) { - return event; - } - - // Create a writable copy of the event object and normalize some properties - var i, prop, copy, - type = event.type, - originalEvent = event, - fixHook = this.fixHooks[ type ]; - - if ( !fixHook ) { - this.fixHooks[ type ] = fixHook = - rmouseEvent.test( type ) ? this.mouseHooks : - rkeyEvent.test( type ) ? this.keyHooks : - {}; - } - copy = fixHook.props ? this.props.concat( fixHook.props ) : this.props; - - event = new jQuery.Event( originalEvent ); - - i = copy.length; - while ( i-- ) { - prop = copy[ i ]; - event[ prop ] = originalEvent[ prop ]; - } - - // Support: IE<9 - // Fix target property (#1925) - if ( !event.target ) { - event.target = originalEvent.srcElement || document; - } - - // Support: Chrome 23+, Safari? - // Target should not be a text node (#504, #13143) - if ( event.target.nodeType === 3 ) { - event.target = event.target.parentNode; - } - - // Support: IE<9 - // For mouse/key events, metaKey==false if it's undefined (#3368, #11328) - event.metaKey = !!event.metaKey; - - return fixHook.filter ? fixHook.filter( event, originalEvent ) : event; - }, - - // Includes some event props shared by KeyEvent and MouseEvent - props: "altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "), - - fixHooks: {}, - - keyHooks: { - props: "char charCode key keyCode".split(" "), - filter: function( event, original ) { - - // Add which for key events - if ( event.which == null ) { - event.which = original.charCode != null ? original.charCode : original.keyCode; - } - - return event; - } - }, - - mouseHooks: { - props: "button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "), - filter: function( event, original ) { - var body, eventDoc, doc, - button = original.button, - fromElement = original.fromElement; - - // Calculate pageX/Y if missing and clientX/Y available - if ( event.pageX == null && original.clientX != null ) { - eventDoc = event.target.ownerDocument || document; - doc = eventDoc.documentElement; - body = eventDoc.body; - - event.pageX = original.clientX + ( doc && doc.scrollLeft || body && body.scrollLeft || 0 ) - ( doc && doc.clientLeft || body && body.clientLeft || 0 ); - event.pageY = original.clientY + ( doc && doc.scrollTop || body && body.scrollTop || 0 ) - ( doc && doc.clientTop || body && body.clientTop || 0 ); - } - - // Add relatedTarget, if necessary - if ( !event.relatedTarget && fromElement ) { - event.relatedTarget = fromElement === event.target ? original.toElement : fromElement; - } - - // Add which for click: 1 === left; 2 === middle; 3 === right - // Note: button is not normalized, so don't use it - if ( !event.which && button !== undefined ) { - event.which = ( button & 1 ? 1 : ( button & 2 ? 3 : ( button & 4 ? 2 : 0 ) ) ); - } - - return event; - } - }, - - special: { - load: { - // Prevent triggered image.load events from bubbling to window.load - noBubble: true - }, - focus: { - // Fire native event if possible so blur/focus sequence is correct - trigger: function() { - if ( this !== safeActiveElement() && this.focus ) { - try { - this.focus(); - return false; - } catch ( e ) { - // Support: IE<9 - // If we error on focus to hidden element (#1486, #12518), - // let .trigger() run the handlers - } - } - }, - delegateType: "focusin" - }, - blur: { - trigger: function() { - if ( this === safeActiveElement() && this.blur ) { - this.blur(); - return false; - } - }, - delegateType: "focusout" - }, - click: { - // For checkbox, fire native event so checked state will be right - trigger: function() { - if ( jQuery.nodeName( this, "input" ) && this.type === "checkbox" && this.click ) { - this.click(); - return false; - } - }, - - // For cross-browser consistency, don't fire native .click() on links - _default: function( event ) { - return jQuery.nodeName( event.target, "a" ); - } - }, - - beforeunload: { - postDispatch: function( event ) { - - // Even when returnValue equals to undefined Firefox will still show alert - if ( event.result !== undefined ) { - event.originalEvent.returnValue = event.result; - } - } - } - }, - - simulate: function( type, elem, event, bubble ) { - // Piggyback on a donor event to simulate a different one. - // Fake originalEvent to avoid donor's stopPropagation, but if the - // simulated event prevents default then we do the same on the donor. - var e = jQuery.extend( - new jQuery.Event(), - event, - { - type: type, - isSimulated: true, - originalEvent: {} - } - ); - if ( bubble ) { - jQuery.event.trigger( e, null, elem ); - } else { - jQuery.event.dispatch.call( elem, e ); - } - if ( e.isDefaultPrevented() ) { - event.preventDefault(); - } - } -}; - -jQuery.removeEvent = document.removeEventListener ? - function( elem, type, handle ) { - if ( elem.removeEventListener ) { - elem.removeEventListener( type, handle, false ); - } - } : - function( elem, type, handle ) { - var name = "on" + type; - - if ( elem.detachEvent ) { - - // #8545, #7054, preventing memory leaks for custom events in IE6-8 - // detachEvent needed property on element, by name of that event, to properly expose it to GC - if ( typeof elem[ name ] === core_strundefined ) { - elem[ name ] = null; - } - - elem.detachEvent( name, handle ); - } - }; - -jQuery.Event = function( src, props ) { - // Allow instantiation without the 'new' keyword - if ( !(this instanceof jQuery.Event) ) { - return new jQuery.Event( src, props ); - } - - // Event object - if ( src && src.type ) { - this.originalEvent = src; - this.type = src.type; - - // Events bubbling up the document may have been marked as prevented - // by a handler lower down the tree; reflect the correct value. - this.isDefaultPrevented = ( src.defaultPrevented || src.returnValue === false || - src.getPreventDefault && src.getPreventDefault() ) ? returnTrue : returnFalse; - - // Event type - } else { - this.type = src; - } - - // Put explicitly provided properties onto the event object - if ( props ) { - jQuery.extend( this, props ); - } - - // Create a timestamp if incoming event doesn't have one - this.timeStamp = src && src.timeStamp || jQuery.now(); - - // Mark it as fixed - this[ jQuery.expando ] = true; -}; - -// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding -// http://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html -jQuery.Event.prototype = { - isDefaultPrevented: returnFalse, - isPropagationStopped: returnFalse, - isImmediatePropagationStopped: returnFalse, - - preventDefault: function() { - var e = this.originalEvent; - - this.isDefaultPrevented = returnTrue; - if ( !e ) { - return; - } - - // If preventDefault exists, run it on the original event - if ( e.preventDefault ) { - e.preventDefault(); - - // Support: IE - // Otherwise set the returnValue property of the original event to false - } else { - e.returnValue = false; - } - }, - stopPropagation: function() { - var e = this.originalEvent; - - this.isPropagationStopped = returnTrue; - if ( !e ) { - return; - } - // If stopPropagation exists, run it on the original event - if ( e.stopPropagation ) { - e.stopPropagation(); - } - - // Support: IE - // Set the cancelBubble property of the original event to true - e.cancelBubble = true; - }, - stopImmediatePropagation: function() { - this.isImmediatePropagationStopped = returnTrue; - this.stopPropagation(); - } -}; - -// Create mouseenter/leave events using mouseover/out and event-time checks -jQuery.each({ - mouseenter: "mouseover", - mouseleave: "mouseout" -}, function( orig, fix ) { - jQuery.event.special[ orig ] = { - delegateType: fix, - bindType: fix, - - handle: function( event ) { - var ret, - target = this, - related = event.relatedTarget, - handleObj = event.handleObj; - - // For mousenter/leave call the handler if related is outside the target. - // NB: No relatedTarget if the mouse left/entered the browser window - if ( !related || (related !== target && !jQuery.contains( target, related )) ) { - event.type = handleObj.origType; - ret = handleObj.handler.apply( this, arguments ); - event.type = fix; - } - return ret; - } - }; -}); - -// IE submit delegation -if ( !jQuery.support.submitBubbles ) { - - jQuery.event.special.submit = { - setup: function() { - // Only need this for delegated form submit events - if ( jQuery.nodeName( this, "form" ) ) { - return false; - } - - // Lazy-add a submit handler when a descendant form may potentially be submitted - jQuery.event.add( this, "click._submit keypress._submit", function( e ) { - // Node name check avoids a VML-related crash in IE (#9807) - var elem = e.target, - form = jQuery.nodeName( elem, "input" ) || jQuery.nodeName( elem, "button" ) ? elem.form : undefined; - if ( form && !jQuery._data( form, "submitBubbles" ) ) { - jQuery.event.add( form, "submit._submit", function( event ) { - event._submit_bubble = true; - }); - jQuery._data( form, "submitBubbles", true ); - } - }); - // return undefined since we don't need an event listener - }, - - postDispatch: function( event ) { - // If form was submitted by the user, bubble the event up the tree - if ( event._submit_bubble ) { - delete event._submit_bubble; - if ( this.parentNode && !event.isTrigger ) { - jQuery.event.simulate( "submit", this.parentNode, event, true ); - } - } - }, - - teardown: function() { - // Only need this for delegated form submit events - if ( jQuery.nodeName( this, "form" ) ) { - return false; - } - - // Remove delegated handlers; cleanData eventually reaps submit handlers attached above - jQuery.event.remove( this, "._submit" ); - } - }; -} - -// IE change delegation and checkbox/radio fix -if ( !jQuery.support.changeBubbles ) { - - jQuery.event.special.change = { - - setup: function() { - - if ( rformElems.test( this.nodeName ) ) { - // IE doesn't fire change on a check/radio until blur; trigger it on click - // after a propertychange. Eat the blur-change in special.change.handle. - // This still fires onchange a second time for check/radio after blur. - if ( this.type === "checkbox" || this.type === "radio" ) { - jQuery.event.add( this, "propertychange._change", function( event ) { - if ( event.originalEvent.propertyName === "checked" ) { - this._just_changed = true; - } - }); - jQuery.event.add( this, "click._change", function( event ) { - if ( this._just_changed && !event.isTrigger ) { - this._just_changed = false; - } - // Allow triggered, simulated change events (#11500) - jQuery.event.simulate( "change", this, event, true ); - }); - } - return false; - } - // Delegated event; lazy-add a change handler on descendant inputs - jQuery.event.add( this, "beforeactivate._change", function( e ) { - var elem = e.target; - - if ( rformElems.test( elem.nodeName ) && !jQuery._data( elem, "changeBubbles" ) ) { - jQuery.event.add( elem, "change._change", function( event ) { - if ( this.parentNode && !event.isSimulated && !event.isTrigger ) { - jQuery.event.simulate( "change", this.parentNode, event, true ); - } - }); - jQuery._data( elem, "changeBubbles", true ); - } - }); - }, - - handle: function( event ) { - var elem = event.target; - - // Swallow native change events from checkbox/radio, we already triggered them above - if ( this !== elem || event.isSimulated || event.isTrigger || (elem.type !== "radio" && elem.type !== "checkbox") ) { - return event.handleObj.handler.apply( this, arguments ); - } - }, - - teardown: function() { - jQuery.event.remove( this, "._change" ); - - return !rformElems.test( this.nodeName ); - } - }; -} - -// Create "bubbling" focus and blur events -if ( !jQuery.support.focusinBubbles ) { - jQuery.each({ focus: "focusin", blur: "focusout" }, function( orig, fix ) { - - // Attach a single capturing handler while someone wants focusin/focusout - var attaches = 0, - handler = function( event ) { - jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ), true ); - }; - - jQuery.event.special[ fix ] = { - setup: function() { - if ( attaches++ === 0 ) { - document.addEventListener( orig, handler, true ); - } - }, - teardown: function() { - if ( --attaches === 0 ) { - document.removeEventListener( orig, handler, true ); - } - } - }; - }); -} - -jQuery.fn.extend({ - - on: function( types, selector, data, fn, /*INTERNAL*/ one ) { - var type, origFn; - - // Types can be a map of types/handlers - if ( typeof types === "object" ) { - // ( types-Object, selector, data ) - if ( typeof selector !== "string" ) { - // ( types-Object, data ) - data = data || selector; - selector = undefined; - } - for ( type in types ) { - this.on( type, selector, data, types[ type ], one ); - } - return this; - } - - if ( data == null && fn == null ) { - // ( types, fn ) - fn = selector; - data = selector = undefined; - } else if ( fn == null ) { - if ( typeof selector === "string" ) { - // ( types, selector, fn ) - fn = data; - data = undefined; - } else { - // ( types, data, fn ) - fn = data; - data = selector; - selector = undefined; - } - } - if ( fn === false ) { - fn = returnFalse; - } else if ( !fn ) { - return this; - } - - if ( one === 1 ) { - origFn = fn; - fn = function( event ) { - // Can use an empty set, since event contains the info - jQuery().off( event ); - return origFn.apply( this, arguments ); - }; - // Use same guid so caller can remove using origFn - fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ ); - } - return this.each( function() { - jQuery.event.add( this, types, fn, data, selector ); - }); - }, - one: function( types, selector, data, fn ) { - return this.on( types, selector, data, fn, 1 ); - }, - off: function( types, selector, fn ) { - var handleObj, type; - if ( types && types.preventDefault && types.handleObj ) { - // ( event ) dispatched jQuery.Event - handleObj = types.handleObj; - jQuery( types.delegateTarget ).off( - handleObj.namespace ? handleObj.origType + "." + handleObj.namespace : handleObj.origType, - handleObj.selector, - handleObj.handler - ); - return this; - } - if ( typeof types === "object" ) { - // ( types-object [, selector] ) - for ( type in types ) { - this.off( type, selector, types[ type ] ); - } - return this; - } - if ( selector === false || typeof selector === "function" ) { - // ( types [, fn] ) - fn = selector; - selector = undefined; - } - if ( fn === false ) { - fn = returnFalse; - } - return this.each(function() { - jQuery.event.remove( this, types, fn, selector ); - }); - }, - - trigger: function( type, data ) { - return this.each(function() { - jQuery.event.trigger( type, data, this ); - }); - }, - triggerHandler: function( type, data ) { - var elem = this[0]; - if ( elem ) { - return jQuery.event.trigger( type, data, elem, true ); - } - } -}); -var isSimple = /^.[^:#\[\.,]*$/, - rparentsprev = /^(?:parents|prev(?:Until|All))/, - rneedsContext = jQuery.expr.match.needsContext, - // methods guaranteed to produce a unique set when starting from a unique set - guaranteedUnique = { - children: true, - contents: true, - next: true, - prev: true - }; - -jQuery.fn.extend({ - find: function( selector ) { - var i, - ret = [], - self = this, - len = self.length; - - if ( typeof selector !== "string" ) { - return this.pushStack( jQuery( selector ).filter(function() { - for ( i = 0; i < len; i++ ) { - if ( jQuery.contains( self[ i ], this ) ) { - return true; - } - } - }) ); - } - - for ( i = 0; i < len; i++ ) { - jQuery.find( selector, self[ i ], ret ); - } - - // Needed because $( selector, context ) becomes $( context ).find( selector ) - ret = this.pushStack( len > 1 ? jQuery.unique( ret ) : ret ); - ret.selector = this.selector ? this.selector + " " + selector : selector; - return ret; - }, - - has: function( target ) { - var i, - targets = jQuery( target, this ), - len = targets.length; - - return this.filter(function() { - for ( i = 0; i < len; i++ ) { - if ( jQuery.contains( this, targets[i] ) ) { - return true; - } - } - }); - }, - - not: function( selector ) { - return this.pushStack( winnow(this, selector || [], true) ); - }, - - filter: function( selector ) { - return this.pushStack( winnow(this, selector || [], false) ); - }, - - is: function( selector ) { - return !!winnow( - this, - - // If this is a positional/relative selector, check membership in the returned set - // so $("p:first").is("p:last") won't return true for a doc with two "p". - typeof selector === "string" && rneedsContext.test( selector ) ? - jQuery( selector ) : - selector || [], - false - ).length; - }, - - closest: function( selectors, context ) { - var cur, - i = 0, - l = this.length, - ret = [], - pos = rneedsContext.test( selectors ) || typeof selectors !== "string" ? - jQuery( selectors, context || this.context ) : - 0; - - for ( ; i < l; i++ ) { - for ( cur = this[i]; cur && cur !== context; cur = cur.parentNode ) { - // Always skip document fragments - if ( cur.nodeType < 11 && (pos ? - pos.index(cur) > -1 : - - // Don't pass non-elements to Sizzle - cur.nodeType === 1 && - jQuery.find.matchesSelector(cur, selectors)) ) { - - cur = ret.push( cur ); - break; - } - } - } - - return this.pushStack( ret.length > 1 ? jQuery.unique( ret ) : ret ); - }, - - // Determine the position of an element within - // the matched set of elements - index: function( elem ) { - - // No argument, return index in parent - if ( !elem ) { - return ( this[0] && this[0].parentNode ) ? this.first().prevAll().length : -1; - } - - // index in selector - if ( typeof elem === "string" ) { - return jQuery.inArray( this[0], jQuery( elem ) ); - } - - // Locate the position of the desired element - return jQuery.inArray( - // If it receives a jQuery object, the first element is used - elem.jquery ? elem[0] : elem, this ); - }, - - add: function( selector, context ) { - var set = typeof selector === "string" ? - jQuery( selector, context ) : - jQuery.makeArray( selector && selector.nodeType ? [ selector ] : selector ), - all = jQuery.merge( this.get(), set ); - - return this.pushStack( jQuery.unique(all) ); - }, - - addBack: function( selector ) { - return this.add( selector == null ? - this.prevObject : this.prevObject.filter(selector) - ); - } -}); - -function sibling( cur, dir ) { - do { - cur = cur[ dir ]; - } while ( cur && cur.nodeType !== 1 ); - - return cur; -} - -jQuery.each({ - parent: function( elem ) { - var parent = elem.parentNode; - return parent && parent.nodeType !== 11 ? parent : null; - }, - parents: function( elem ) { - return jQuery.dir( elem, "parentNode" ); - }, - parentsUntil: function( elem, i, until ) { - return jQuery.dir( elem, "parentNode", until ); - }, - next: function( elem ) { - return sibling( elem, "nextSibling" ); - }, - prev: function( elem ) { - return sibling( elem, "previousSibling" ); - }, - nextAll: function( elem ) { - return jQuery.dir( elem, "nextSibling" ); - }, - prevAll: function( elem ) { - return jQuery.dir( elem, "previousSibling" ); - }, - nextUntil: function( elem, i, until ) { - return jQuery.dir( elem, "nextSibling", until ); - }, - prevUntil: function( elem, i, until ) { - return jQuery.dir( elem, "previousSibling", until ); - }, - siblings: function( elem ) { - return jQuery.sibling( ( elem.parentNode || {} ).firstChild, elem ); - }, - children: function( elem ) { - return jQuery.sibling( elem.firstChild ); - }, - contents: function( elem ) { - return jQuery.nodeName( elem, "iframe" ) ? - elem.contentDocument || elem.contentWindow.document : - jQuery.merge( [], elem.childNodes ); - } -}, function( name, fn ) { - jQuery.fn[ name ] = function( until, selector ) { - var ret = jQuery.map( this, fn, until ); - - if ( name.slice( -5 ) !== "Until" ) { - selector = until; - } - - if ( selector && typeof selector === "string" ) { - ret = jQuery.filter( selector, ret ); - } - - if ( this.length > 1 ) { - // Remove duplicates - if ( !guaranteedUnique[ name ] ) { - ret = jQuery.unique( ret ); - } - - // Reverse order for parents* and prev-derivatives - if ( rparentsprev.test( name ) ) { - ret = ret.reverse(); - } - } - - return this.pushStack( ret ); - }; -}); - -jQuery.extend({ - filter: function( expr, elems, not ) { - var elem = elems[ 0 ]; - - if ( not ) { - expr = ":not(" + expr + ")"; - } - - return elems.length === 1 && elem.nodeType === 1 ? - jQuery.find.matchesSelector( elem, expr ) ? [ elem ] : [] : - jQuery.find.matches( expr, jQuery.grep( elems, function( elem ) { - return elem.nodeType === 1; - })); - }, - - dir: function( elem, dir, until ) { - var matched = [], - cur = elem[ dir ]; - - while ( cur && cur.nodeType !== 9 && (until === undefined || cur.nodeType !== 1 || !jQuery( cur ).is( until )) ) { - if ( cur.nodeType === 1 ) { - matched.push( cur ); - } - cur = cur[dir]; - } - return matched; - }, - - sibling: function( n, elem ) { - var r = []; - - for ( ; n; n = n.nextSibling ) { - if ( n.nodeType === 1 && n !== elem ) { - r.push( n ); - } - } - - return r; - } -}); - -// Implement the identical functionality for filter and not -function winnow( elements, qualifier, not ) { - if ( jQuery.isFunction( qualifier ) ) { - return jQuery.grep( elements, function( elem, i ) { - /* jshint -W018 */ - return !!qualifier.call( elem, i, elem ) !== not; - }); - - } - - if ( qualifier.nodeType ) { - return jQuery.grep( elements, function( elem ) { - return ( elem === qualifier ) !== not; - }); - - } - - if ( typeof qualifier === "string" ) { - if ( isSimple.test( qualifier ) ) { - return jQuery.filter( qualifier, elements, not ); - } - - qualifier = jQuery.filter( qualifier, elements ); - } - - return jQuery.grep( elements, function( elem ) { - return ( jQuery.inArray( elem, qualifier ) >= 0 ) !== not; - }); -} -function createSafeFragment( document ) { - var list = nodeNames.split( "|" ), - safeFrag = document.createDocumentFragment(); - - if ( safeFrag.createElement ) { - while ( list.length ) { - safeFrag.createElement( - list.pop() - ); - } - } - return safeFrag; -} - -var nodeNames = "abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|" + - "header|hgroup|mark|meter|nav|output|progress|section|summary|time|video", - rinlinejQuery = / jQuery\d+="(?:null|\d+)"/g, - rnoshimcache = new RegExp("<(?:" + nodeNames + ")[\\s/>]", "i"), - rleadingWhitespace = /^\s+/, - rxhtmlTag = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi, - rtagName = /<([\w:]+)/, - rtbody = /\s*$/g, - - // We have to close these tags to support XHTML (#13200) - wrapMap = { - option: [ 1, "" ], - legend: [ 1, "
    ", "
    " ], - area: [ 1, "", "" ], - param: [ 1, "", "" ], - thead: [ 1, "", "
    " ], - tr: [ 2, "", "
    " ], - col: [ 2, "", "
    " ], - td: [ 3, "", "
    " ], - - // IE6-8 can't serialize link, script, style, or any html5 (NoScope) tags, - // unless wrapped in a div with non-breaking characters in front of it. - _default: jQuery.support.htmlSerialize ? [ 0, "", "" ] : [ 1, "X
    ", "
    " ] - }, - safeFragment = createSafeFragment( document ), - fragmentDiv = safeFragment.appendChild( document.createElement("div") ); - -wrapMap.optgroup = wrapMap.option; -wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead; -wrapMap.th = wrapMap.td; - -jQuery.fn.extend({ - text: function( value ) { - return jQuery.access( this, function( value ) { - return value === undefined ? - jQuery.text( this ) : - this.empty().append( ( this[0] && this[0].ownerDocument || document ).createTextNode( value ) ); - }, null, value, arguments.length ); - }, - - append: function() { - return this.domManip( arguments, function( elem ) { - if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { - var target = manipulationTarget( this, elem ); - target.appendChild( elem ); - } - }); - }, - - prepend: function() { - return this.domManip( arguments, function( elem ) { - if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { - var target = manipulationTarget( this, elem ); - target.insertBefore( elem, target.firstChild ); - } - }); - }, - - before: function() { - return this.domManip( arguments, function( elem ) { - if ( this.parentNode ) { - this.parentNode.insertBefore( elem, this ); - } - }); - }, - - after: function() { - return this.domManip( arguments, function( elem ) { - if ( this.parentNode ) { - this.parentNode.insertBefore( elem, this.nextSibling ); - } - }); - }, - - // keepData is for internal use only--do not document - remove: function( selector, keepData ) { - var elem, - elems = selector ? jQuery.filter( selector, this ) : this, - i = 0; - - for ( ; (elem = elems[i]) != null; i++ ) { - - if ( !keepData && elem.nodeType === 1 ) { - jQuery.cleanData( getAll( elem ) ); - } - - if ( elem.parentNode ) { - if ( keepData && jQuery.contains( elem.ownerDocument, elem ) ) { - setGlobalEval( getAll( elem, "script" ) ); - } - elem.parentNode.removeChild( elem ); - } - } - - return this; - }, - - empty: function() { - var elem, - i = 0; - - for ( ; (elem = this[i]) != null; i++ ) { - // Remove element nodes and prevent memory leaks - if ( elem.nodeType === 1 ) { - jQuery.cleanData( getAll( elem, false ) ); - } - - // Remove any remaining nodes - while ( elem.firstChild ) { - elem.removeChild( elem.firstChild ); - } - - // If this is a select, ensure that it displays empty (#12336) - // Support: IE<9 - if ( elem.options && jQuery.nodeName( elem, "select" ) ) { - elem.options.length = 0; - } - } - - return this; - }, - - clone: function( dataAndEvents, deepDataAndEvents ) { - dataAndEvents = dataAndEvents == null ? false : dataAndEvents; - deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents; - - return this.map( function () { - return jQuery.clone( this, dataAndEvents, deepDataAndEvents ); - }); - }, - - html: function( value ) { - return jQuery.access( this, function( value ) { - var elem = this[0] || {}, - i = 0, - l = this.length; - - if ( value === undefined ) { - return elem.nodeType === 1 ? - elem.innerHTML.replace( rinlinejQuery, "" ) : - undefined; - } - - // See if we can take a shortcut and just use innerHTML - if ( typeof value === "string" && !rnoInnerhtml.test( value ) && - ( jQuery.support.htmlSerialize || !rnoshimcache.test( value ) ) && - ( jQuery.support.leadingWhitespace || !rleadingWhitespace.test( value ) ) && - !wrapMap[ ( rtagName.exec( value ) || ["", ""] )[1].toLowerCase() ] ) { - - value = value.replace( rxhtmlTag, "<$1>" ); - - try { - for (; i < l; i++ ) { - // Remove element nodes and prevent memory leaks - elem = this[i] || {}; - if ( elem.nodeType === 1 ) { - jQuery.cleanData( getAll( elem, false ) ); - elem.innerHTML = value; - } - } - - elem = 0; - - // If using innerHTML throws an exception, use the fallback method - } catch(e) {} - } - - if ( elem ) { - this.empty().append( value ); - } - }, null, value, arguments.length ); - }, - - replaceWith: function() { - var - // Snapshot the DOM in case .domManip sweeps something relevant into its fragment - args = jQuery.map( this, function( elem ) { - return [ elem.nextSibling, elem.parentNode ]; - }), - i = 0; - - // Make the changes, replacing each context element with the new content - this.domManip( arguments, function( elem ) { - var next = args[ i++ ], - parent = args[ i++ ]; - - if ( parent ) { - // Don't use the snapshot next if it has moved (#13810) - if ( next && next.parentNode !== parent ) { - next = this.nextSibling; - } - jQuery( this ).remove(); - parent.insertBefore( elem, next ); - } - // Allow new content to include elements from the context set - }, true ); - - // Force removal if there was no new content (e.g., from empty arguments) - return i ? this : this.remove(); - }, - - detach: function( selector ) { - return this.remove( selector, true ); - }, - - domManip: function( args, callback, allowIntersection ) { - - // Flatten any nested arrays - args = core_concat.apply( [], args ); - - var first, node, hasScripts, - scripts, doc, fragment, - i = 0, - l = this.length, - set = this, - iNoClone = l - 1, - value = args[0], - isFunction = jQuery.isFunction( value ); - - // We can't cloneNode fragments that contain checked, in WebKit - if ( isFunction || !( l <= 1 || typeof value !== "string" || jQuery.support.checkClone || !rchecked.test( value ) ) ) { - return this.each(function( index ) { - var self = set.eq( index ); - if ( isFunction ) { - args[0] = value.call( this, index, self.html() ); - } - self.domManip( args, callback, allowIntersection ); - }); - } - - if ( l ) { - fragment = jQuery.buildFragment( args, this[ 0 ].ownerDocument, false, !allowIntersection && this ); - first = fragment.firstChild; - - if ( fragment.childNodes.length === 1 ) { - fragment = first; - } - - if ( first ) { - scripts = jQuery.map( getAll( fragment, "script" ), disableScript ); - hasScripts = scripts.length; - - // Use the original fragment for the last item instead of the first because it can end up - // being emptied incorrectly in certain situations (#8070). - for ( ; i < l; i++ ) { - node = fragment; - - if ( i !== iNoClone ) { - node = jQuery.clone( node, true, true ); - - // Keep references to cloned scripts for later restoration - if ( hasScripts ) { - jQuery.merge( scripts, getAll( node, "script" ) ); - } - } - - callback.call( this[i], node, i ); - } - - if ( hasScripts ) { - doc = scripts[ scripts.length - 1 ].ownerDocument; - - // Reenable scripts - jQuery.map( scripts, restoreScript ); - - // Evaluate executable scripts on first document insertion - for ( i = 0; i < hasScripts; i++ ) { - node = scripts[ i ]; - if ( rscriptType.test( node.type || "" ) && - !jQuery._data( node, "globalEval" ) && jQuery.contains( doc, node ) ) { - - if ( node.src ) { - // Hope ajax is available... - jQuery._evalUrl( node.src ); - } else { - jQuery.globalEval( ( node.text || node.textContent || node.innerHTML || "" ).replace( rcleanScript, "" ) ); - } - } - } - } - - // Fix #11809: Avoid leaking memory - fragment = first = null; - } - } - - return this; - } -}); - -// Support: IE<8 -// Manipulating tables requires a tbody -function manipulationTarget( elem, content ) { - return jQuery.nodeName( elem, "table" ) && - jQuery.nodeName( content.nodeType === 1 ? content : content.firstChild, "tr" ) ? - - elem.getElementsByTagName("tbody")[0] || - elem.appendChild( elem.ownerDocument.createElement("tbody") ) : - elem; -} - -// Replace/restore the type attribute of script elements for safe DOM manipulation -function disableScript( elem ) { - elem.type = (jQuery.find.attr( elem, "type" ) !== null) + "/" + elem.type; - return elem; -} -function restoreScript( elem ) { - var match = rscriptTypeMasked.exec( elem.type ); - if ( match ) { - elem.type = match[1]; - } else { - elem.removeAttribute("type"); - } - return elem; -} - -// Mark scripts as having already been evaluated -function setGlobalEval( elems, refElements ) { - var elem, - i = 0; - for ( ; (elem = elems[i]) != null; i++ ) { - jQuery._data( elem, "globalEval", !refElements || jQuery._data( refElements[i], "globalEval" ) ); - } -} - -function cloneCopyEvent( src, dest ) { - - if ( dest.nodeType !== 1 || !jQuery.hasData( src ) ) { - return; - } - - var type, i, l, - oldData = jQuery._data( src ), - curData = jQuery._data( dest, oldData ), - events = oldData.events; - - if ( events ) { - delete curData.handle; - curData.events = {}; - - for ( type in events ) { - for ( i = 0, l = events[ type ].length; i < l; i++ ) { - jQuery.event.add( dest, type, events[ type ][ i ] ); - } - } - } - - // make the cloned public data object a copy from the original - if ( curData.data ) { - curData.data = jQuery.extend( {}, curData.data ); - } -} - -function fixCloneNodeIssues( src, dest ) { - var nodeName, e, data; - - // We do not need to do anything for non-Elements - if ( dest.nodeType !== 1 ) { - return; - } - - nodeName = dest.nodeName.toLowerCase(); - - // IE6-8 copies events bound via attachEvent when using cloneNode. - if ( !jQuery.support.noCloneEvent && dest[ jQuery.expando ] ) { - data = jQuery._data( dest ); - - for ( e in data.events ) { - jQuery.removeEvent( dest, e, data.handle ); - } - - // Event data gets referenced instead of copied if the expando gets copied too - dest.removeAttribute( jQuery.expando ); - } - - // IE blanks contents when cloning scripts, and tries to evaluate newly-set text - if ( nodeName === "script" && dest.text !== src.text ) { - disableScript( dest ).text = src.text; - restoreScript( dest ); - - // IE6-10 improperly clones children of object elements using classid. - // IE10 throws NoModificationAllowedError if parent is null, #12132. - } else if ( nodeName === "object" ) { - if ( dest.parentNode ) { - dest.outerHTML = src.outerHTML; - } - - // This path appears unavoidable for IE9. When cloning an object - // element in IE9, the outerHTML strategy above is not sufficient. - // If the src has innerHTML and the destination does not, - // copy the src.innerHTML into the dest.innerHTML. #10324 - if ( jQuery.support.html5Clone && ( src.innerHTML && !jQuery.trim(dest.innerHTML) ) ) { - dest.innerHTML = src.innerHTML; - } - - } else if ( nodeName === "input" && manipulation_rcheckableType.test( src.type ) ) { - // IE6-8 fails to persist the checked state of a cloned checkbox - // or radio button. Worse, IE6-7 fail to give the cloned element - // a checked appearance if the defaultChecked value isn't also set - - dest.defaultChecked = dest.checked = src.checked; - - // IE6-7 get confused and end up setting the value of a cloned - // checkbox/radio button to an empty string instead of "on" - if ( dest.value !== src.value ) { - dest.value = src.value; - } - - // IE6-8 fails to return the selected option to the default selected - // state when cloning options - } else if ( nodeName === "option" ) { - dest.defaultSelected = dest.selected = src.defaultSelected; - - // IE6-8 fails to set the defaultValue to the correct value when - // cloning other types of input fields - } else if ( nodeName === "input" || nodeName === "textarea" ) { - dest.defaultValue = src.defaultValue; - } -} - -jQuery.each({ - appendTo: "append", - prependTo: "prepend", - insertBefore: "before", - insertAfter: "after", - replaceAll: "replaceWith" -}, function( name, original ) { - jQuery.fn[ name ] = function( selector ) { - var elems, - i = 0, - ret = [], - insert = jQuery( selector ), - last = insert.length - 1; - - for ( ; i <= last; i++ ) { - elems = i === last ? this : this.clone(true); - jQuery( insert[i] )[ original ]( elems ); - - // Modern browsers can apply jQuery collections as arrays, but oldIE needs a .get() - core_push.apply( ret, elems.get() ); - } - - return this.pushStack( ret ); - }; -}); - -function getAll( context, tag ) { - var elems, elem, - i = 0, - found = typeof context.getElementsByTagName !== core_strundefined ? context.getElementsByTagName( tag || "*" ) : - typeof context.querySelectorAll !== core_strundefined ? context.querySelectorAll( tag || "*" ) : - undefined; - - if ( !found ) { - for ( found = [], elems = context.childNodes || context; (elem = elems[i]) != null; i++ ) { - if ( !tag || jQuery.nodeName( elem, tag ) ) { - found.push( elem ); - } else { - jQuery.merge( found, getAll( elem, tag ) ); - } - } - } - - return tag === undefined || tag && jQuery.nodeName( context, tag ) ? - jQuery.merge( [ context ], found ) : - found; -} - -// Used in buildFragment, fixes the defaultChecked property -function fixDefaultChecked( elem ) { - if ( manipulation_rcheckableType.test( elem.type ) ) { - elem.defaultChecked = elem.checked; - } -} - -jQuery.extend({ - clone: function( elem, dataAndEvents, deepDataAndEvents ) { - var destElements, node, clone, i, srcElements, - inPage = jQuery.contains( elem.ownerDocument, elem ); - - if ( jQuery.support.html5Clone || jQuery.isXMLDoc(elem) || !rnoshimcache.test( "<" + elem.nodeName + ">" ) ) { - clone = elem.cloneNode( true ); - - // IE<=8 does not properly clone detached, unknown element nodes - } else { - fragmentDiv.innerHTML = elem.outerHTML; - fragmentDiv.removeChild( clone = fragmentDiv.firstChild ); - } - - if ( (!jQuery.support.noCloneEvent || !jQuery.support.noCloneChecked) && - (elem.nodeType === 1 || elem.nodeType === 11) && !jQuery.isXMLDoc(elem) ) { - - // We eschew Sizzle here for performance reasons: http://jsperf.com/getall-vs-sizzle/2 - destElements = getAll( clone ); - srcElements = getAll( elem ); - - // Fix all IE cloning issues - for ( i = 0; (node = srcElements[i]) != null; ++i ) { - // Ensure that the destination node is not null; Fixes #9587 - if ( destElements[i] ) { - fixCloneNodeIssues( node, destElements[i] ); - } - } - } - - // Copy the events from the original to the clone - if ( dataAndEvents ) { - if ( deepDataAndEvents ) { - srcElements = srcElements || getAll( elem ); - destElements = destElements || getAll( clone ); - - for ( i = 0; (node = srcElements[i]) != null; i++ ) { - cloneCopyEvent( node, destElements[i] ); - } - } else { - cloneCopyEvent( elem, clone ); - } - } - - // Preserve script evaluation history - destElements = getAll( clone, "script" ); - if ( destElements.length > 0 ) { - setGlobalEval( destElements, !inPage && getAll( elem, "script" ) ); - } - - destElements = srcElements = node = null; - - // Return the cloned set - return clone; - }, - - buildFragment: function( elems, context, scripts, selection ) { - var j, elem, contains, - tmp, tag, tbody, wrap, - l = elems.length, - - // Ensure a safe fragment - safe = createSafeFragment( context ), - - nodes = [], - i = 0; - - for ( ; i < l; i++ ) { - elem = elems[ i ]; - - if ( elem || elem === 0 ) { - - // Add nodes directly - if ( jQuery.type( elem ) === "object" ) { - jQuery.merge( nodes, elem.nodeType ? [ elem ] : elem ); - - // Convert non-html into a text node - } else if ( !rhtml.test( elem ) ) { - nodes.push( context.createTextNode( elem ) ); - - // Convert html into DOM nodes - } else { - tmp = tmp || safe.appendChild( context.createElement("div") ); - - // Deserialize a standard representation - tag = ( rtagName.exec( elem ) || ["", ""] )[1].toLowerCase(); - wrap = wrapMap[ tag ] || wrapMap._default; - - tmp.innerHTML = wrap[1] + elem.replace( rxhtmlTag, "<$1>" ) + wrap[2]; - - // Descend through wrappers to the right content - j = wrap[0]; - while ( j-- ) { - tmp = tmp.lastChild; - } - - // Manually add leading whitespace removed by IE - if ( !jQuery.support.leadingWhitespace && rleadingWhitespace.test( elem ) ) { - nodes.push( context.createTextNode( rleadingWhitespace.exec( elem )[0] ) ); - } - - // Remove IE's autoinserted from table fragments - if ( !jQuery.support.tbody ) { - - // String was a , *may* have spurious - elem = tag === "table" && !rtbody.test( elem ) ? - tmp.firstChild : - - // String was a bare or - wrap[1] === "
    " && !rtbody.test( elem ) ? - tmp : - 0; - - j = elem && elem.childNodes.length; - while ( j-- ) { - if ( jQuery.nodeName( (tbody = elem.childNodes[j]), "tbody" ) && !tbody.childNodes.length ) { - elem.removeChild( tbody ); - } - } - } - - jQuery.merge( nodes, tmp.childNodes ); - - // Fix #12392 for WebKit and IE > 9 - tmp.textContent = ""; - - // Fix #12392 for oldIE - while ( tmp.firstChild ) { - tmp.removeChild( tmp.firstChild ); - } - - // Remember the top-level container for proper cleanup - tmp = safe.lastChild; - } - } - } - - // Fix #11356: Clear elements from fragment - if ( tmp ) { - safe.removeChild( tmp ); - } - - // Reset defaultChecked for any radios and checkboxes - // about to be appended to the DOM in IE 6/7 (#8060) - if ( !jQuery.support.appendChecked ) { - jQuery.grep( getAll( nodes, "input" ), fixDefaultChecked ); - } - - i = 0; - while ( (elem = nodes[ i++ ]) ) { - - // #4087 - If origin and destination elements are the same, and this is - // that element, do not do anything - if ( selection && jQuery.inArray( elem, selection ) !== -1 ) { - continue; - } - - contains = jQuery.contains( elem.ownerDocument, elem ); - - // Append to fragment - tmp = getAll( safe.appendChild( elem ), "script" ); - - // Preserve script evaluation history - if ( contains ) { - setGlobalEval( tmp ); - } - - // Capture executables - if ( scripts ) { - j = 0; - while ( (elem = tmp[ j++ ]) ) { - if ( rscriptType.test( elem.type || "" ) ) { - scripts.push( elem ); - } - } - } - } - - tmp = null; - - return safe; - }, - - cleanData: function( elems, /* internal */ acceptData ) { - var elem, type, id, data, - i = 0, - internalKey = jQuery.expando, - cache = jQuery.cache, - deleteExpando = jQuery.support.deleteExpando, - special = jQuery.event.special; - - for ( ; (elem = elems[i]) != null; i++ ) { - - if ( acceptData || jQuery.acceptData( elem ) ) { - - id = elem[ internalKey ]; - data = id && cache[ id ]; - - if ( data ) { - if ( data.events ) { - for ( type in data.events ) { - if ( special[ type ] ) { - jQuery.event.remove( elem, type ); - - // This is a shortcut to avoid jQuery.event.remove's overhead - } else { - jQuery.removeEvent( elem, type, data.handle ); - } - } - } - - // Remove cache only if it was not already removed by jQuery.event.remove - if ( cache[ id ] ) { - - delete cache[ id ]; - - // IE does not allow us to delete expando properties from nodes, - // nor does it have a removeAttribute function on Document nodes; - // we must handle all of these cases - if ( deleteExpando ) { - delete elem[ internalKey ]; - - } else if ( typeof elem.removeAttribute !== core_strundefined ) { - elem.removeAttribute( internalKey ); - - } else { - elem[ internalKey ] = null; - } - - core_deletedIds.push( id ); - } - } - } - } - }, - - _evalUrl: function( url ) { - return jQuery.ajax({ - url: url, - type: "GET", - dataType: "script", - async: false, - global: false, - "throws": true - }); - } -}); -jQuery.fn.extend({ - wrapAll: function( html ) { - if ( jQuery.isFunction( html ) ) { - return this.each(function(i) { - jQuery(this).wrapAll( html.call(this, i) ); - }); - } - - if ( this[0] ) { - // The elements to wrap the target around - var wrap = jQuery( html, this[0].ownerDocument ).eq(0).clone(true); - - if ( this[0].parentNode ) { - wrap.insertBefore( this[0] ); - } - - wrap.map(function() { - var elem = this; - - while ( elem.firstChild && elem.firstChild.nodeType === 1 ) { - elem = elem.firstChild; - } - - return elem; - }).append( this ); - } - - return this; - }, - - wrapInner: function( html ) { - if ( jQuery.isFunction( html ) ) { - return this.each(function(i) { - jQuery(this).wrapInner( html.call(this, i) ); - }); - } - - return this.each(function() { - var self = jQuery( this ), - contents = self.contents(); - - if ( contents.length ) { - contents.wrapAll( html ); - - } else { - self.append( html ); - } - }); - }, - - wrap: function( html ) { - var isFunction = jQuery.isFunction( html ); - - return this.each(function(i) { - jQuery( this ).wrapAll( isFunction ? html.call(this, i) : html ); - }); - }, - - unwrap: function() { - return this.parent().each(function() { - if ( !jQuery.nodeName( this, "body" ) ) { - jQuery( this ).replaceWith( this.childNodes ); - } - }).end(); - } -}); -var iframe, getStyles, curCSS, - ralpha = /alpha\([^)]*\)/i, - ropacity = /opacity\s*=\s*([^)]*)/, - rposition = /^(top|right|bottom|left)$/, - // swappable if display is none or starts with table except "table", "table-cell", or "table-caption" - // see here for display values: https://developer.mozilla.org/en-US/docs/CSS/display - rdisplayswap = /^(none|table(?!-c[ea]).+)/, - rmargin = /^margin/, - rnumsplit = new RegExp( "^(" + core_pnum + ")(.*)$", "i" ), - rnumnonpx = new RegExp( "^(" + core_pnum + ")(?!px)[a-z%]+$", "i" ), - rrelNum = new RegExp( "^([+-])=(" + core_pnum + ")", "i" ), - elemdisplay = { BODY: "block" }, - - cssShow = { position: "absolute", visibility: "hidden", display: "block" }, - cssNormalTransform = { - letterSpacing: 0, - fontWeight: 400 - }, - - cssExpand = [ "Top", "Right", "Bottom", "Left" ], - cssPrefixes = [ "Webkit", "O", "Moz", "ms" ]; - -// return a css property mapped to a potentially vendor prefixed property -function vendorPropName( style, name ) { - - // shortcut for names that are not vendor prefixed - if ( name in style ) { - return name; - } - - // check for vendor prefixed names - var capName = name.charAt(0).toUpperCase() + name.slice(1), - origName = name, - i = cssPrefixes.length; - - while ( i-- ) { - name = cssPrefixes[ i ] + capName; - if ( name in style ) { - return name; - } - } - - return origName; -} - -function isHidden( elem, el ) { - // isHidden might be called from jQuery#filter function; - // in that case, element will be second argument - elem = el || elem; - return jQuery.css( elem, "display" ) === "none" || !jQuery.contains( elem.ownerDocument, elem ); -} - -function showHide( elements, show ) { - var display, elem, hidden, - values = [], - index = 0, - length = elements.length; - - for ( ; index < length; index++ ) { - elem = elements[ index ]; - if ( !elem.style ) { - continue; - } - - values[ index ] = jQuery._data( elem, "olddisplay" ); - display = elem.style.display; - if ( show ) { - // Reset the inline display of this element to learn if it is - // being hidden by cascaded rules or not - if ( !values[ index ] && display === "none" ) { - elem.style.display = ""; - } - - // Set elements which have been overridden with display: none - // in a stylesheet to whatever the default browser style is - // for such an element - if ( elem.style.display === "" && isHidden( elem ) ) { - values[ index ] = jQuery._data( elem, "olddisplay", css_defaultDisplay(elem.nodeName) ); - } - } else { - - if ( !values[ index ] ) { - hidden = isHidden( elem ); - - if ( display && display !== "none" || !hidden ) { - jQuery._data( elem, "olddisplay", hidden ? display : jQuery.css( elem, "display" ) ); - } - } - } - } - - // Set the display of most of the elements in a second loop - // to avoid the constant reflow - for ( index = 0; index < length; index++ ) { - elem = elements[ index ]; - if ( !elem.style ) { - continue; - } - if ( !show || elem.style.display === "none" || elem.style.display === "" ) { - elem.style.display = show ? values[ index ] || "" : "none"; - } - } - - return elements; -} - -jQuery.fn.extend({ - css: function( name, value ) { - return jQuery.access( this, function( elem, name, value ) { - var len, styles, - map = {}, - i = 0; - - if ( jQuery.isArray( name ) ) { - styles = getStyles( elem ); - len = name.length; - - for ( ; i < len; i++ ) { - map[ name[ i ] ] = jQuery.css( elem, name[ i ], false, styles ); - } - - return map; - } - - return value !== undefined ? - jQuery.style( elem, name, value ) : - jQuery.css( elem, name ); - }, name, value, arguments.length > 1 ); - }, - show: function() { - return showHide( this, true ); - }, - hide: function() { - return showHide( this ); - }, - toggle: function( state ) { - if ( typeof state === "boolean" ) { - return state ? this.show() : this.hide(); - } - - return this.each(function() { - if ( isHidden( this ) ) { - jQuery( this ).show(); - } else { - jQuery( this ).hide(); - } - }); - } -}); - -jQuery.extend({ - // Add in style property hooks for overriding the default - // behavior of getting and setting a style property - cssHooks: { - opacity: { - get: function( elem, computed ) { - if ( computed ) { - // We should always get a number back from opacity - var ret = curCSS( elem, "opacity" ); - return ret === "" ? "1" : ret; - } - } - } - }, - - // Don't automatically add "px" to these possibly-unitless properties - cssNumber: { - "columnCount": true, - "fillOpacity": true, - "fontWeight": true, - "lineHeight": true, - "opacity": true, - "order": true, - "orphans": true, - "widows": true, - "zIndex": true, - "zoom": true - }, - - // Add in properties whose names you wish to fix before - // setting or getting the value - cssProps: { - // normalize float css property - "float": jQuery.support.cssFloat ? "cssFloat" : "styleFloat" - }, - - // Get and set the style property on a DOM Node - style: function( elem, name, value, extra ) { - // Don't set styles on text and comment nodes - if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 || !elem.style ) { - return; - } - - // Make sure that we're working with the right name - var ret, type, hooks, - origName = jQuery.camelCase( name ), - style = elem.style; - - name = jQuery.cssProps[ origName ] || ( jQuery.cssProps[ origName ] = vendorPropName( style, origName ) ); - - // gets hook for the prefixed version - // followed by the unprefixed version - hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; - - // Check if we're setting a value - if ( value !== undefined ) { - type = typeof value; - - // convert relative number strings (+= or -=) to relative numbers. #7345 - if ( type === "string" && (ret = rrelNum.exec( value )) ) { - value = ( ret[1] + 1 ) * ret[2] + parseFloat( jQuery.css( elem, name ) ); - // Fixes bug #9237 - type = "number"; - } - - // Make sure that NaN and null values aren't set. See: #7116 - if ( value == null || type === "number" && isNaN( value ) ) { - return; - } - - // If a number was passed in, add 'px' to the (except for certain CSS properties) - if ( type === "number" && !jQuery.cssNumber[ origName ] ) { - value += "px"; - } - - // Fixes #8908, it can be done more correctly by specifing setters in cssHooks, - // but it would mean to define eight (for every problematic property) identical functions - if ( !jQuery.support.clearCloneStyle && value === "" && name.indexOf("background") === 0 ) { - style[ name ] = "inherit"; - } - - // If a hook was provided, use that value, otherwise just set the specified value - if ( !hooks || !("set" in hooks) || (value = hooks.set( elem, value, extra )) !== undefined ) { - - // Wrapped to prevent IE from throwing errors when 'invalid' values are provided - // Fixes bug #5509 - try { - style[ name ] = value; - } catch(e) {} - } - - } else { - // If a hook was provided get the non-computed value from there - if ( hooks && "get" in hooks && (ret = hooks.get( elem, false, extra )) !== undefined ) { - return ret; - } - - // Otherwise just get the value from the style object - return style[ name ]; - } - }, - - css: function( elem, name, extra, styles ) { - var num, val, hooks, - origName = jQuery.camelCase( name ); - - // Make sure that we're working with the right name - name = jQuery.cssProps[ origName ] || ( jQuery.cssProps[ origName ] = vendorPropName( elem.style, origName ) ); - - // gets hook for the prefixed version - // followed by the unprefixed version - hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; - - // If a hook was provided get the computed value from there - if ( hooks && "get" in hooks ) { - val = hooks.get( elem, true, extra ); - } - - // Otherwise, if a way to get the computed value exists, use that - if ( val === undefined ) { - val = curCSS( elem, name, styles ); - } - - //convert "normal" to computed value - if ( val === "normal" && name in cssNormalTransform ) { - val = cssNormalTransform[ name ]; - } - - // Return, converting to number if forced or a qualifier was provided and val looks numeric - if ( extra === "" || extra ) { - num = parseFloat( val ); - return extra === true || jQuery.isNumeric( num ) ? num || 0 : val; - } - return val; - } -}); - -// NOTE: we've included the "window" in window.getComputedStyle -// because jsdom on node.js will break without it. -if ( window.getComputedStyle ) { - getStyles = function( elem ) { - return window.getComputedStyle( elem, null ); - }; - - curCSS = function( elem, name, _computed ) { - var width, minWidth, maxWidth, - computed = _computed || getStyles( elem ), - - // getPropertyValue is only needed for .css('filter') in IE9, see #12537 - ret = computed ? computed.getPropertyValue( name ) || computed[ name ] : undefined, - style = elem.style; - - if ( computed ) { - - if ( ret === "" && !jQuery.contains( elem.ownerDocument, elem ) ) { - ret = jQuery.style( elem, name ); - } - - // A tribute to the "awesome hack by Dean Edwards" - // Chrome < 17 and Safari 5.0 uses "computed value" instead of "used value" for margin-right - // Safari 5.1.7 (at least) returns percentage for a larger set of values, but width seems to be reliably pixels - // this is against the CSSOM draft spec: http://dev.w3.org/csswg/cssom/#resolved-values - if ( rnumnonpx.test( ret ) && rmargin.test( name ) ) { - - // Remember the original values - width = style.width; - minWidth = style.minWidth; - maxWidth = style.maxWidth; - - // Put in the new values to get a computed value out - style.minWidth = style.maxWidth = style.width = ret; - ret = computed.width; - - // Revert the changed values - style.width = width; - style.minWidth = minWidth; - style.maxWidth = maxWidth; - } - } - - return ret; - }; -} else if ( document.documentElement.currentStyle ) { - getStyles = function( elem ) { - return elem.currentStyle; - }; - - curCSS = function( elem, name, _computed ) { - var left, rs, rsLeft, - computed = _computed || getStyles( elem ), - ret = computed ? computed[ name ] : undefined, - style = elem.style; - - // Avoid setting ret to empty string here - // so we don't default to auto - if ( ret == null && style && style[ name ] ) { - ret = style[ name ]; - } - - // From the awesome hack by Dean Edwards - // http://erik.eae.net/archives/2007/07/27/18.54.15/#comment-102291 - - // If we're not dealing with a regular pixel number - // but a number that has a weird ending, we need to convert it to pixels - // but not position css attributes, as those are proportional to the parent element instead - // and we can't measure the parent instead because it might trigger a "stacking dolls" problem - if ( rnumnonpx.test( ret ) && !rposition.test( name ) ) { - - // Remember the original values - left = style.left; - rs = elem.runtimeStyle; - rsLeft = rs && rs.left; - - // Put in the new values to get a computed value out - if ( rsLeft ) { - rs.left = elem.currentStyle.left; - } - style.left = name === "fontSize" ? "1em" : ret; - ret = style.pixelLeft + "px"; - - // Revert the changed values - style.left = left; - if ( rsLeft ) { - rs.left = rsLeft; - } - } - - return ret === "" ? "auto" : ret; - }; -} - -function setPositiveNumber( elem, value, subtract ) { - var matches = rnumsplit.exec( value ); - return matches ? - // Guard against undefined "subtract", e.g., when used as in cssHooks - Math.max( 0, matches[ 1 ] - ( subtract || 0 ) ) + ( matches[ 2 ] || "px" ) : - value; -} - -function augmentWidthOrHeight( elem, name, extra, isBorderBox, styles ) { - var i = extra === ( isBorderBox ? "border" : "content" ) ? - // If we already have the right measurement, avoid augmentation - 4 : - // Otherwise initialize for horizontal or vertical properties - name === "width" ? 1 : 0, - - val = 0; - - for ( ; i < 4; i += 2 ) { - // both box models exclude margin, so add it if we want it - if ( extra === "margin" ) { - val += jQuery.css( elem, extra + cssExpand[ i ], true, styles ); - } - - if ( isBorderBox ) { - // border-box includes padding, so remove it if we want content - if ( extra === "content" ) { - val -= jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); - } - - // at this point, extra isn't border nor margin, so remove border - if ( extra !== "margin" ) { - val -= jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); - } - } else { - // at this point, extra isn't content, so add padding - val += jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); - - // at this point, extra isn't content nor padding, so add border - if ( extra !== "padding" ) { - val += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); - } - } - } - - return val; -} - -function getWidthOrHeight( elem, name, extra ) { - - // Start with offset property, which is equivalent to the border-box value - var valueIsBorderBox = true, - val = name === "width" ? elem.offsetWidth : elem.offsetHeight, - styles = getStyles( elem ), - isBorderBox = jQuery.support.boxSizing && jQuery.css( elem, "boxSizing", false, styles ) === "border-box"; - - // some non-html elements return undefined for offsetWidth, so check for null/undefined - // svg - https://bugzilla.mozilla.org/show_bug.cgi?id=649285 - // MathML - https://bugzilla.mozilla.org/show_bug.cgi?id=491668 - if ( val <= 0 || val == null ) { - // Fall back to computed then uncomputed css if necessary - val = curCSS( elem, name, styles ); - if ( val < 0 || val == null ) { - val = elem.style[ name ]; - } - - // Computed unit is not pixels. Stop here and return. - if ( rnumnonpx.test(val) ) { - return val; - } - - // we need the check for style in case a browser which returns unreliable values - // for getComputedStyle silently falls back to the reliable elem.style - valueIsBorderBox = isBorderBox && ( jQuery.support.boxSizingReliable || val === elem.style[ name ] ); - - // Normalize "", auto, and prepare for extra - val = parseFloat( val ) || 0; - } - - // use the active box-sizing model to add/subtract irrelevant styles - return ( val + - augmentWidthOrHeight( - elem, - name, - extra || ( isBorderBox ? "border" : "content" ), - valueIsBorderBox, - styles - ) - ) + "px"; -} - -// Try to determine the default display value of an element -function css_defaultDisplay( nodeName ) { - var doc = document, - display = elemdisplay[ nodeName ]; - - if ( !display ) { - display = actualDisplay( nodeName, doc ); - - // If the simple way fails, read from inside an iframe - if ( display === "none" || !display ) { - // Use the already-created iframe if possible - iframe = ( iframe || - jQuery("

    Original Thanks to commons project in - * ws.apache.org for this code.